[youtube:user:playlists] Add extractor (Closes #3817)

This commit is contained in:
Sergey M․ 2015-11-22 04:17:07 +06:00
parent 0eebf34d9d
commit 0c14841585
2 changed files with 27 additions and 0 deletions

View file

@ -834,6 +834,7 @@
YoutubeTruncatedIDIE, YoutubeTruncatedIDIE,
YoutubeTruncatedURLIE, YoutubeTruncatedURLIE,
YoutubeUserIE, YoutubeUserIE,
YoutubeUserPlaylistsIE,
YoutubeWatchLaterIE, YoutubeWatchLaterIE,
) )
from .zapiks import ZapiksIE from .zapiks import ZapiksIE

View file

@ -224,6 +224,17 @@ def extract_videos_from_page(self, page):
return zip(ids_in_page, titles_in_page) return zip(ids_in_page, titles_in_page)
class YoutubePlaylistsBaseInfoExtractor(InfoExtractor):
def _real_extract(self, url):
playlist_id = self._match_id(url)
webpage = self._download_webpage(url, playlist_id)
entries = [
self.url_result(compat_urlparse.urljoin(url, playlist), 'YoutubePlaylist')
for playlist in re.findall(r'href="(/playlist\?list=.+?)"', webpage)]
title = self._og_search_title(webpage, fatal=False)
return self.playlist_result(entries, playlist_id, title)
class YoutubeIE(YoutubeBaseInfoExtractor): class YoutubeIE(YoutubeBaseInfoExtractor):
IE_DESC = 'YouTube.com' IE_DESC = 'YouTube.com'
_VALID_URL = r"""(?x)^ _VALID_URL = r"""(?x)^
@ -1742,6 +1753,21 @@ def suitable(cls, url):
return super(YoutubeUserIE, cls).suitable(url) return super(YoutubeUserIE, cls).suitable(url)
class YoutubeUserPlaylistsIE(YoutubePlaylistsBaseInfoExtractor):
IE_DESC = 'YouTube.com user playlists'
_VALID_URL = r'https?://(?:\w+\.)?youtube\.com/user/(?P<id>[^/]+)/playlists'
IE_NAME = 'youtube:user:playlists'
_TEST = {
'url': 'http://www.youtube.com/user/ThirstForScience/playlists',
'playlist_mincount': 4,
'info_dict': {
'id': 'ThirstForScience',
'title': 'Thirst for Science',
},
}
class YoutubeSearchIE(SearchInfoExtractor, YoutubePlaylistIE): class YoutubeSearchIE(SearchInfoExtractor, YoutubePlaylistIE):
IE_DESC = 'YouTube.com searches' IE_DESC = 'YouTube.com searches'
# there doesn't appear to be a real limit, for example if you search for # there doesn't appear to be a real limit, for example if you search for