2012-12-12 08:15:21 -05:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
2012-09-28 09:34:56 -04:00
|
|
|
import hashlib
|
2012-12-12 08:15:21 -05:00
|
|
|
import io
|
2012-09-28 09:34:56 -04:00
|
|
|
import os
|
2012-10-15 07:01:36 -04:00
|
|
|
import json
|
2012-11-28 09:09:56 -05:00
|
|
|
import unittest
|
|
|
|
import sys
|
2012-12-20 08:14:43 -05:00
|
|
|
import hashlib
|
2012-12-20 10:30:55 -05:00
|
|
|
import socket
|
2012-12-12 08:15:21 -05:00
|
|
|
|
|
|
|
# Allow direct execution
|
|
|
|
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
2012-11-28 09:09:56 -05:00
|
|
|
|
2012-12-12 08:15:21 -05:00
|
|
|
import youtube_dl.FileDownloader
|
|
|
|
import youtube_dl.InfoExtractors
|
|
|
|
from youtube_dl.utils import *
|
2012-12-11 21:55:06 -05:00
|
|
|
|
|
|
|
DEF_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'tests.json')
|
2012-12-12 08:15:21 -05:00
|
|
|
PARAMETERS_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), "parameters.json")
|
|
|
|
|
|
|
|
# General configuration (from __init__, not very elegant...)
|
|
|
|
jar = compat_cookiejar.CookieJar()
|
|
|
|
cookie_processor = compat_urllib_request.HTTPCookieProcessor(jar)
|
|
|
|
proxy_handler = compat_urllib_request.ProxyHandler()
|
|
|
|
opener = compat_urllib_request.build_opener(proxy_handler, cookie_processor, YoutubeDLHandler())
|
|
|
|
compat_urllib_request.install_opener(opener)
|
|
|
|
|
|
|
|
class FileDownloader(youtube_dl.FileDownloader):
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
self.to_stderr = self.to_screen
|
2012-12-20 08:14:43 -05:00
|
|
|
self.processed_info_dicts = []
|
|
|
|
return youtube_dl.FileDownloader.__init__(self, *args, **kwargs)
|
|
|
|
def process_info(self, info_dict):
|
|
|
|
self.processed_info_dicts.append(info_dict)
|
|
|
|
return youtube_dl.FileDownloader.process_info(self, info_dict)
|
2012-12-11 21:55:06 -05:00
|
|
|
|
2012-12-12 08:15:21 -05:00
|
|
|
def _file_md5(fn):
|
|
|
|
with open(fn, 'rb') as f:
|
|
|
|
return hashlib.md5(f.read()).hexdigest()
|
|
|
|
|
|
|
|
with io.open(DEF_FILE, encoding='utf-8') as deff:
|
|
|
|
defs = json.load(deff)
|
|
|
|
with io.open(PARAMETERS_FILE, encoding='utf-8') as pf:
|
|
|
|
parameters = json.load(pf)
|
2012-12-11 21:55:06 -05:00
|
|
|
|
2012-12-20 08:14:43 -05:00
|
|
|
|
2012-12-11 21:55:06 -05:00
|
|
|
class TestDownload(unittest.TestCase):
|
2012-12-12 08:15:21 -05:00
|
|
|
def setUp(self):
|
|
|
|
self.parameters = parameters
|
|
|
|
self.defs = defs
|
|
|
|
|
|
|
|
# Clear old files
|
|
|
|
self.tearDown()
|
2012-12-11 21:55:06 -05:00
|
|
|
|
2012-12-12 08:15:21 -05:00
|
|
|
def tearDown(self):
|
2012-12-12 09:14:58 -05:00
|
|
|
for fn in [ test.get('file', False) for test in self.defs ]:
|
2012-12-12 08:15:21 -05:00
|
|
|
if fn and os.path.exists(fn):
|
|
|
|
os.remove(fn)
|
2012-12-11 21:55:06 -05:00
|
|
|
|
|
|
|
|
2012-12-12 09:14:58 -05:00
|
|
|
### Dinamically generate tests
|
|
|
|
def generator(test_case):
|
|
|
|
|
2012-12-11 21:55:06 -05:00
|
|
|
def test_template(self):
|
2012-12-12 08:15:21 -05:00
|
|
|
ie = getattr(youtube_dl.InfoExtractors, test_case['name'] + 'IE')
|
|
|
|
if not ie._WORKING:
|
|
|
|
print('Skipping: IE marked as not _WORKING')
|
|
|
|
return
|
|
|
|
if not test_case['file']:
|
|
|
|
print('Skipping: No output file specified')
|
|
|
|
return
|
|
|
|
if 'skip' in test_case:
|
|
|
|
print('Skipping: {0}'.format(test_case['skip']))
|
|
|
|
return
|
2012-12-20 08:14:43 -05:00
|
|
|
|
2012-12-12 08:15:21 -05:00
|
|
|
params = dict(self.parameters) # Duplicate it locally
|
|
|
|
for p in test_case.get('params', {}):
|
|
|
|
params[p] = test_case['params'][p]
|
2012-12-20 08:14:43 -05:00
|
|
|
|
2012-12-12 08:15:21 -05:00
|
|
|
fd = FileDownloader(params)
|
|
|
|
fd.add_info_extractor(ie())
|
|
|
|
for ien in test_case.get('add_ie', []):
|
|
|
|
fd.add_info_extractor(getattr(youtube_dl.InfoExtractors, ien + 'IE')())
|
|
|
|
fd.download([test_case['url']])
|
2012-12-20 08:14:43 -05:00
|
|
|
|
2012-12-12 08:15:21 -05:00
|
|
|
self.assertTrue(os.path.exists(test_case['file']))
|
|
|
|
if 'md5' in test_case:
|
|
|
|
md5_for_file = _file_md5(test_case['file'])
|
|
|
|
self.assertEqual(md5_for_file, test_case['md5'])
|
2012-12-20 08:14:43 -05:00
|
|
|
info_dict = fd.processed_info_dicts[0]
|
2012-12-20 10:30:55 -05:00
|
|
|
for (info_field, value) in test_case.get('info_dict', {}).items():
|
2012-12-20 08:14:43 -05:00
|
|
|
if value.startswith('md5:'):
|
2012-12-20 10:30:55 -05:00
|
|
|
md5_info_value = hashlib.md5(info_dict.get(info_field, '')).hexdigest()
|
2012-12-20 08:14:43 -05:00
|
|
|
self.assertEqual(value[3:], md5_info_value)
|
|
|
|
else:
|
2012-12-20 10:30:55 -05:00
|
|
|
self.assertEqual(value, info_dict.get(info_field))
|
2012-12-12 08:15:21 -05:00
|
|
|
|
2012-12-11 21:55:06 -05:00
|
|
|
return test_template
|
2012-12-12 08:15:21 -05:00
|
|
|
|
2012-12-12 09:14:58 -05:00
|
|
|
### And add them to TestDownload
|
2012-12-12 08:15:21 -05:00
|
|
|
for test_case in defs:
|
2012-12-12 09:14:58 -05:00
|
|
|
test_method = generator(test_case)
|
2012-12-12 08:15:21 -05:00
|
|
|
test_method.__name__ = "test_{0}".format(test_case["name"])
|
|
|
|
setattr(TestDownload, test_method.__name__, test_method)
|
2012-12-12 09:14:58 -05:00
|
|
|
del test_method
|
2012-11-28 09:09:56 -05:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
unittest.main()
|