1
0
Fork 0
mirror of https://github.com/ytdl-org/youtube-dl.git synced 2024-05-18 11:09:32 +00:00
This commit is contained in:
Zil0 2024-04-25 14:36:25 +09:00 committed by GitHub
commit bc3745c738
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -1829,10 +1829,13 @@ class YoutubeDL(object):
if download: if download:
if len(formats_to_download) > 1: if len(formats_to_download) > 1:
self.to_screen('[info] %s: downloading video in %s formats' % (info_dict['id'], len(formats_to_download))) self.to_screen('[info] %s: downloading video in %s formats' % (info_dict['id'], len(formats_to_download)))
filenames = []
for format in formats_to_download: for format in formats_to_download:
new_info = dict(info_dict) new_info = dict(info_dict)
new_info.update(format) new_info.update(format)
self.process_info(new_info) self.process_info(new_info)
filenames.append(new_info.get('_filename'))
info_dict['filenames'] = filenames
# We update the info dict with the best quality format (backwards compatibility) # We update the info dict with the best quality format (backwards compatibility)
info_dict.update(formats_to_download[-1]) info_dict.update(formats_to_download[-1])
return info_dict return info_dict
@ -2099,7 +2102,7 @@ class YoutubeDL(object):
self.report_warning( self.report_warning(
'Requested formats are incompatible for merge and will be merged into mkv.') 'Requested formats are incompatible for merge and will be merged into mkv.')
# Ensure filename always has a correct extension for successful merge # Ensure filename always has a correct extension for successful merge
filename = '%s.%s' % (filename_wo_ext, info_dict['ext']) info_dict['_filename'] = filename = '%s.%s' % (filename_wo_ext, info_dict['ext'])
if os.path.exists(encodeFilename(filename)): if os.path.exists(encodeFilename(filename)):
self.to_screen( self.to_screen(
'[download] %s has already been downloaded and ' '[download] %s has already been downloaded and '
@ -2282,19 +2285,26 @@ class YoutubeDL(object):
if ie_info.get('__postprocessors') is not None: if ie_info.get('__postprocessors') is not None:
pps_chain.extend(ie_info['__postprocessors']) pps_chain.extend(ie_info['__postprocessors'])
pps_chain.extend(self._pps) pps_chain.extend(self._pps)
filepaths = []
for pp in pps_chain: for pp in pps_chain:
files_to_delete = [] files_to_delete = []
try: try:
files_to_delete, info = pp.run(info) files_to_delete, info = pp.run(info)
except PostProcessingError as e: except PostProcessingError as e:
self.report_error(e.msg) self.report_error(e.msg)
if files_to_delete and not self.params.get('keepvideo', False): if files_to_delete:
for old_filename in files_to_delete: if self.params.get('keepvideo'):
self.to_screen('Deleting original file %s (pass -k to keep)' % old_filename) filepaths.extend(files_to_delete)
try: else:
os.remove(encodeFilename(old_filename)) for old_filename in files_to_delete:
except (IOError, OSError): self.to_screen('Deleting original file %s (pass -k to keep)' % old_filename)
self.report_warning('Unable to remove downloaded original file') try:
os.remove(encodeFilename(old_filename))
except (IOError, OSError):
self.report_warning('Unable to remove downloaded original file')
if info.get('filepath'):
filepaths.append(info['filepath'])
ie_info['_filename'] = filepaths or filename
def _make_archive_id(self, info_dict): def _make_archive_id(self, info_dict):
video_id = info_dict.get('id') video_id = info_dict.get('id')