Discard info_dict from memory if no longer needed

Closes #1399
This commit is contained in:
pukkandan 2022-07-11 02:14:23 +05:30
parent 56b5b832bf
commit 134c913cca
No known key found for this signature in database
GPG Key ID: 7EEE9E1E817D0A39
2 changed files with 31 additions and 4 deletions

View File

@ -319,9 +319,14 @@ class YoutubeDL:
default_search: Prepend this string if an input url is not valid. default_search: Prepend this string if an input url is not valid.
'auto' for elaborate guessing 'auto' for elaborate guessing
encoding: Use this encoding instead of the system-specified. encoding: Use this encoding instead of the system-specified.
extract_flat: Do not resolve URLs, return the immediate result. extract_flat: Whether to resolve and process url_results further
Pass in 'in_playlist' to only show this behavior for * False: Always process (default)
playlist items. * True: Never process
* 'in_playlist': Do not process inside playlist/multi_video
* 'discard': Always process, but don't return the result
from inside playlist/multi_video
* 'discard_in_playlist': Same as "discard", but only for
playlists (not multi_video)
wait_for_video: If given, wait for scheduled streams to become available. wait_for_video: If given, wait for scheduled streams to become available.
The value should be a tuple containing the range The value should be a tuple containing the range
(min_secs, max_secs) to wait between retries (min_secs, max_secs) to wait between retries
@ -1725,6 +1730,12 @@ def __process_playlist(self, ie_result, download):
self.to_screen(f'[{ie_result["extractor"]}] Playlist {title}: Downloading {n_entries} videos' self.to_screen(f'[{ie_result["extractor"]}] Playlist {title}: Downloading {n_entries} videos'
f'{format_field(ie_result, "playlist_count", " of %s")}') f'{format_field(ie_result, "playlist_count", " of %s")}')
keep_resolved_entries = self.params.get('extract_flat') != 'discard'
if self.params.get('extract_flat') == 'discard_in_playlist':
keep_resolved_entries = ie_result['_type'] != 'playlist'
if keep_resolved_entries:
self.write_debug('The information of all playlist entries will be held in memory')
failures = 0 failures = 0
max_failures = self.params.get('skip_playlist_after_errors') or float('inf') max_failures = self.params.get('skip_playlist_after_errors') or float('inf')
for i, (playlist_index, entry) in enumerate(entries): for i, (playlist_index, entry) in enumerate(entries):
@ -1765,6 +1776,7 @@ def __process_playlist(self, ie_result, download):
self.report_error( self.report_error(
f'Skipping the remaining entries in playlist "{title}" since {failures} items failed extraction') f'Skipping the remaining entries in playlist "{title}" since {failures} items failed extraction')
break break
if keep_resolved_entries:
resolved_entries[i] = (playlist_index, entry_result) resolved_entries[i] = (playlist_index, entry_result)
# Update with processed data # Update with processed data

View File

@ -688,6 +688,21 @@ def parse_options(argv=None):
'getformat', 'getid', 'getthumbnail', 'gettitle', 'geturl' 'getformat', 'getid', 'getthumbnail', 'gettitle', 'geturl'
)) ))
playlist_pps = [pp for pp in postprocessors if pp.get('when') == 'playlist']
write_playlist_infojson = (opts.writeinfojson and not opts.clean_infojson
and opts.allow_playlist_files and opts.outtmpl.get('pl_infojson') != '')
if not any((
opts.extract_flat,
opts.dump_single_json,
opts.forceprint.get('playlist'),
opts.print_to_file.get('playlist'),
write_playlist_infojson,
)):
if not playlist_pps:
opts.extract_flat = 'discard'
elif playlist_pps == [{'key': 'FFmpegConcat', 'only_multi_video': True, 'when': 'playlist'}]:
opts.extract_flat = 'discard_in_playlist'
final_ext = ( final_ext = (
opts.recodevideo if opts.recodevideo in FFmpegVideoConvertorPP.SUPPORTED_EXTS opts.recodevideo if opts.recodevideo in FFmpegVideoConvertorPP.SUPPORTED_EXTS
else opts.remuxvideo if opts.remuxvideo in FFmpegVideoRemuxerPP.SUPPORTED_EXTS else opts.remuxvideo if opts.remuxvideo in FFmpegVideoRemuxerPP.SUPPORTED_EXTS