mirror of
https://github.com/yt-dlp/yt-dlp.git
synced 2024-11-07 20:30:41 -05:00
Merge pull request #1531 from rg3/no-playlist
[youtube] implement --no-playlist to only download current video
This commit is contained in:
commit
bd8e5c7ca2
5 changed files with 24 additions and 1 deletions
|
@ -50,6 +50,7 @@ ## Video Selection:
|
|||
--date DATE download only videos uploaded in this date
|
||||
--datebefore DATE download only videos uploaded before this date
|
||||
--dateafter DATE download only videos uploaded after this date
|
||||
--no-playlist download only the currently playing video
|
||||
|
||||
## Download Options:
|
||||
-r, --rate-limit LIMIT maximum download rate (e.g. 50k or 44.6m)
|
||||
|
|
|
@ -27,6 +27,14 @@ def test_youtube_playlist(self):
|
|||
ytie_results = [YoutubeIE()._extract_id(url['url']) for url in result['entries']]
|
||||
self.assertEqual(ytie_results, [ 'bV9L5Ht9LgY', 'FXxLjLQi3Fg', 'tU3Bgo5qJZE'])
|
||||
|
||||
def test_youtube_playlist_noplaylist(self):
|
||||
dl = FakeYDL()
|
||||
dl.params['noplaylist'] = True
|
||||
ie = YoutubePlaylistIE(dl)
|
||||
result = ie.extract('https://www.youtube.com/watch?v=FXxLjLQi3Fg&list=PLwiyx1dc3P2JR9N8gQaQN_BCvlSlap7re')
|
||||
self.assertEqual(result['_type'], 'url')
|
||||
self.assertEqual(YoutubeIE()._extract_id(result['url']), 'FXxLjLQi3Fg')
|
||||
|
||||
def test_issue_673(self):
|
||||
dl = FakeYDL()
|
||||
ie = YoutubePlaylistIE(dl)
|
||||
|
|
|
@ -83,6 +83,7 @@ class YoutubeDL(object):
|
|||
skip_download: Skip the actual download of the video file
|
||||
cachedir: Location of the cache files in the filesystem.
|
||||
None to disable filesystem cache.
|
||||
noplaylist: Download single video instead of a playlist if in doubt.
|
||||
|
||||
The following parameters are not used by YoutubeDL itself, they are used by
|
||||
the FileDownloader:
|
||||
|
|
|
@ -187,6 +187,7 @@ def _hide_login_info(opts):
|
|||
selection.add_option('--date', metavar='DATE', dest='date', help='download only videos uploaded in this date', default=None)
|
||||
selection.add_option('--datebefore', metavar='DATE', dest='datebefore', help='download only videos uploaded before this date', default=None)
|
||||
selection.add_option('--dateafter', metavar='DATE', dest='dateafter', help='download only videos uploaded after this date', default=None)
|
||||
selection.add_option('--no-playlist', action='store_true', dest='noplaylist', help='download only the currently playing video', default=False)
|
||||
|
||||
|
||||
authentication.add_option('-u', '--username',
|
||||
|
@ -599,6 +600,7 @@ def _real_main(argv=None):
|
|||
'progress_with_newline': opts.progress_with_newline,
|
||||
'playliststart': opts.playliststart,
|
||||
'playlistend': opts.playlistend,
|
||||
'noplaylist': opts.noplaylist,
|
||||
'logtostderr': opts.outtmpl == '-',
|
||||
'consoletitle': opts.consoletitle,
|
||||
'nopart': opts.nopart,
|
||||
|
|
|
@ -23,6 +23,7 @@
|
|||
compat_urllib_error,
|
||||
compat_urllib_parse,
|
||||
compat_urllib_request,
|
||||
compat_urlparse,
|
||||
compat_str,
|
||||
|
||||
clean_html,
|
||||
|
@ -1525,9 +1526,19 @@ def _real_extract(self, url):
|
|||
mobj = re.match(self._VALID_URL, url, re.VERBOSE)
|
||||
if mobj is None:
|
||||
raise ExtractorError(u'Invalid URL: %s' % url)
|
||||
playlist_id = mobj.group(1) or mobj.group(2)
|
||||
|
||||
# Check if it's a video-specific URL
|
||||
query_dict = compat_urlparse.parse_qs(compat_urlparse.urlparse(url).query)
|
||||
if 'v' in query_dict:
|
||||
video_id = query_dict['v'][0]
|
||||
if self._downloader.params.get('noplaylist'):
|
||||
self.to_screen(u'Downloading just video %s because of --no-playlist' % video_id)
|
||||
return self.url_result('https://www.youtube.com/watch?v=' + video_id, 'Youtube')
|
||||
else:
|
||||
self.to_screen(u'Downloading playlist PL%s - add --no-playlist to just download video %s' % (playlist_id, video_id))
|
||||
|
||||
# Download playlist videos from API
|
||||
playlist_id = mobj.group(1) or mobj.group(2)
|
||||
videos = []
|
||||
|
||||
for page_num in itertools.count(1):
|
||||
|
|
Loading…
Reference in a new issue