mirror of
https://github.com/ytdl-org/youtube-dl.git
synced 2024-12-22 16:57:40 +00:00
Adapt test_download to support playlists, and remove race conditions
This commit is contained in:
parent
6985325e01
commit
5c892b0ba9
3 changed files with 54 additions and 29 deletions
|
@ -35,6 +35,6 @@
|
||||||
"username": null,
|
"username": null,
|
||||||
"verbose": true,
|
"verbose": true,
|
||||||
"writedescription": false,
|
"writedescription": false,
|
||||||
"writeinfojson": false,
|
"writeinfojson": true,
|
||||||
"writesubtitles": false
|
"writesubtitles": false
|
||||||
}
|
}
|
|
@ -1,5 +1,6 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
import errno
|
||||||
import hashlib
|
import hashlib
|
||||||
import io
|
import io
|
||||||
import os
|
import os
|
||||||
|
@ -26,6 +27,14 @@ proxy_handler = compat_urllib_request.ProxyHandler()
|
||||||
opener = compat_urllib_request.build_opener(proxy_handler, cookie_processor, YoutubeDLHandler())
|
opener = compat_urllib_request.build_opener(proxy_handler, cookie_processor, YoutubeDLHandler())
|
||||||
compat_urllib_request.install_opener(opener)
|
compat_urllib_request.install_opener(opener)
|
||||||
|
|
||||||
|
def _try_rm(filename):
|
||||||
|
""" Remove a file if it exists """
|
||||||
|
try:
|
||||||
|
os.remove(filename)
|
||||||
|
except OSError as ose:
|
||||||
|
if ose.errno != errno.ENOENT:
|
||||||
|
raise
|
||||||
|
|
||||||
class FileDownloader(youtube_dl.FileDownloader):
|
class FileDownloader(youtube_dl.FileDownloader):
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
self.to_stderr = self.to_screen
|
self.to_stderr = self.to_screen
|
||||||
|
@ -50,15 +59,6 @@ class TestDownload(unittest.TestCase):
|
||||||
self.parameters = parameters
|
self.parameters = parameters
|
||||||
self.defs = defs
|
self.defs = defs
|
||||||
|
|
||||||
# Clear old files
|
|
||||||
self.tearDown()
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
for fn in [ test.get('file', False) for test in self.defs ]:
|
|
||||||
if fn and os.path.exists(fn):
|
|
||||||
os.remove(fn)
|
|
||||||
|
|
||||||
|
|
||||||
### Dynamically generate tests
|
### Dynamically generate tests
|
||||||
def generator(test_case):
|
def generator(test_case):
|
||||||
|
|
||||||
|
@ -67,7 +67,7 @@ def generator(test_case):
|
||||||
if not ie._WORKING:
|
if not ie._WORKING:
|
||||||
print('Skipping: IE marked as not _WORKING')
|
print('Skipping: IE marked as not _WORKING')
|
||||||
return
|
return
|
||||||
if not test_case['file']:
|
if 'playlist' not in test_case and not test_case['file']:
|
||||||
print('Skipping: No output file specified')
|
print('Skipping: No output file specified')
|
||||||
return
|
return
|
||||||
if 'skip' in test_case:
|
if 'skip' in test_case:
|
||||||
|
@ -82,19 +82,32 @@ def generator(test_case):
|
||||||
fd.add_info_extractor(ie())
|
fd.add_info_extractor(ie())
|
||||||
for ien in test_case.get('add_ie', []):
|
for ien in test_case.get('add_ie', []):
|
||||||
fd.add_info_extractor(getattr(youtube_dl.InfoExtractors, ien + 'IE')())
|
fd.add_info_extractor(getattr(youtube_dl.InfoExtractors, ien + 'IE')())
|
||||||
|
|
||||||
|
test_cases = test_case.get('playlist', [test_case])
|
||||||
|
for tc in test_cases:
|
||||||
|
_try_rm(tc['file'])
|
||||||
|
_try_rm(tc['file'] + '.info.json')
|
||||||
|
try:
|
||||||
fd.download([test_case['url']])
|
fd.download([test_case['url']])
|
||||||
|
|
||||||
self.assertTrue(os.path.exists(test_case['file']))
|
for tc in test_cases:
|
||||||
if 'md5' in test_case:
|
self.assertTrue(os.path.exists(tc['file']))
|
||||||
md5_for_file = _file_md5(test_case['file'])
|
self.assertTrue(os.path.exists(tc['file'] + '.info.json'))
|
||||||
self.assertEqual(md5_for_file, test_case['md5'])
|
if 'md5' in tc:
|
||||||
info_dict = fd.processed_info_dicts[0]
|
md5_for_file = _file_md5(tc['file'])
|
||||||
for (info_field, value) in test_case.get('info_dict', {}).items():
|
self.assertEqual(md5_for_file, tc['md5'])
|
||||||
|
with io.open(tc['file'] + '.info.json', encoding='utf-8') as infof:
|
||||||
|
info_dict = json.load(infof)
|
||||||
|
for (info_field, value) in tc.get('info_dict', {}).items():
|
||||||
if value.startswith('md5:'):
|
if value.startswith('md5:'):
|
||||||
md5_info_value = hashlib.md5(info_dict.get(info_field, '')).hexdigest()
|
md5_info_value = hashlib.md5(info_dict.get(info_field, '')).hexdigest()
|
||||||
self.assertEqual(value[3:], md5_info_value)
|
self.assertEqual(value[3:], md5_info_value)
|
||||||
else:
|
else:
|
||||||
self.assertEqual(value, info_dict.get(info_field))
|
self.assertEqual(value, info_dict.get(info_field))
|
||||||
|
finally:
|
||||||
|
for tc in test_cases:
|
||||||
|
_try_rm(tc['file'])
|
||||||
|
_try_rm(tc['file'] + '.info.json')
|
||||||
|
|
||||||
return test_template
|
return test_template
|
||||||
|
|
||||||
|
|
|
@ -124,16 +124,28 @@
|
||||||
{
|
{
|
||||||
"name": "Steam",
|
"name": "Steam",
|
||||||
"url": "http://store.steampowered.com/video/105600/",
|
"url": "http://store.steampowered.com/video/105600/",
|
||||||
|
"playlist": [
|
||||||
|
{
|
||||||
"file": "81300.flv",
|
"file": "81300.flv",
|
||||||
"md5": "f870007cee7065d7c76b88f0a45ecc07",
|
"md5": "f870007cee7065d7c76b88f0a45ecc07",
|
||||||
"info_dict": {
|
"info_dict": {
|
||||||
"title": "Terraria 1.1 Trailer"
|
"title": "Terraria 1.1 Trailer"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"file": "80859.flv",
|
||||||
|
"md5": "61aaf31a5c5c3041afb58fb83cbb5751",
|
||||||
|
"info_dict": {
|
||||||
|
"title": "Terraria Trailer"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"name": "Ustream",
|
"name": "Ustream",
|
||||||
"url": "http://www.ustream.tv/recorded/20274954",
|
"url": "http://www.ustream.tv/recorded/20274954",
|
||||||
"files": [["20274954.flv", "088f151799e8f572f84eb62f17d73e5c" ]],
|
"file": "20274954.flv",
|
||||||
|
"md5": "088f151799e8f572f84eb62f17d73e5c",
|
||||||
"info_dict": {
|
"info_dict": {
|
||||||
"title": "Young Americans for Liberty February 7, 2012 2:28 AM"
|
"title": "Young Americans for Liberty February 7, 2012 2:28 AM"
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue