mirror of
https://github.com/yt-dlp/yt-dlp.git
synced 2024-11-07 20:30:41 -05:00
[SponsorBlock] Obey extractor-retries
and sleep-requests
This commit is contained in:
parent
19b824f693
commit
ef58c47637
1 changed files with 23 additions and 10 deletions
|
@ -1,6 +1,8 @@
|
||||||
|
from hashlib import sha256
|
||||||
|
import itertools
|
||||||
import json
|
import json
|
||||||
import re
|
import re
|
||||||
from hashlib import sha256
|
import time
|
||||||
|
|
||||||
from .ffmpeg import FFmpegPostProcessor
|
from .ffmpeg import FFmpegPostProcessor
|
||||||
from ..compat import compat_urllib_parse_urlencode, compat_HTTPError
|
from ..compat import compat_urllib_parse_urlencode, compat_HTTPError
|
||||||
|
@ -33,6 +35,7 @@ def run(self, info):
|
||||||
self.to_screen(f'SponsorBlock is not supported for {extractor}')
|
self.to_screen(f'SponsorBlock is not supported for {extractor}')
|
||||||
return [], info
|
return [], info
|
||||||
|
|
||||||
|
self.to_screen('Fetching SponsorBlock segments')
|
||||||
info['sponsorblock_chapters'] = self._get_sponsor_chapters(info, info['duration'])
|
info['sponsorblock_chapters'] = self._get_sponsor_chapters(info, info['duration'])
|
||||||
return [], info
|
return [], info
|
||||||
|
|
||||||
|
@ -79,18 +82,28 @@ def _get_sponsor_segments(self, video_id, service):
|
||||||
'service': service,
|
'service': service,
|
||||||
'categories': json.dumps(self._categories),
|
'categories': json.dumps(self._categories),
|
||||||
})
|
})
|
||||||
|
self.write_debug(f'SponsorBlock query: {url}')
|
||||||
for d in self._get_json(url):
|
for d in self._get_json(url):
|
||||||
if d['videoID'] == video_id:
|
if d['videoID'] == video_id:
|
||||||
return d['segments']
|
return d['segments']
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def _get_json(self, url):
|
def _get_json(self, url):
|
||||||
self.write_debug(f'SponsorBlock query: {url}')
|
# While this is not an extractor, it behaves similar to one and
|
||||||
try:
|
# so obey extractor_retries and sleep_interval_requests
|
||||||
rsp = self._downloader.urlopen(sanitized_Request(url))
|
max_retries = self.get_param('extractor_retries', 3)
|
||||||
except network_exceptions as e:
|
sleep_interval = self.get_param('sleep_interval_requests') or 0
|
||||||
if isinstance(e, compat_HTTPError) and e.code == 404:
|
for retries in itertools.count():
|
||||||
return []
|
try:
|
||||||
raise PostProcessingError(f'Unable to communicate with SponsorBlock API - {e}')
|
rsp = self._downloader.urlopen(sanitized_Request(url))
|
||||||
|
return json.loads(rsp.read().decode(rsp.info().get_param('charset') or 'utf-8'))
|
||||||
return json.loads(rsp.read().decode(rsp.info().get_param('charset') or 'utf-8'))
|
except network_exceptions as e:
|
||||||
|
if isinstance(e, compat_HTTPError) and e.code == 404:
|
||||||
|
return []
|
||||||
|
if retries < max_retries:
|
||||||
|
self.report_warning(f'{e}. Retrying...')
|
||||||
|
if sleep_interval > 0:
|
||||||
|
self.to_screen(f'Sleeping {sleep_interval} seconds ...')
|
||||||
|
time.sleep(sleep_interval)
|
||||||
|
continue
|
||||||
|
raise PostProcessingError(f'Unable to communicate with SponsorBlock API: {e}')
|
||||||
|
|
Loading…
Reference in a new issue