12 from .fragment
import FragmentFD
13 from ..networking
import Request
14 from ..postprocessor
.ffmpeg
import EXT_TO_OUT_FORMATS
, FFmpegPostProcessor
32 class Features(enum
.Enum
):
33 TO_STDOUT
= enum
.auto()
34 MULTIPLE_FORMATS
= enum
.auto()
37 class ExternalFD(FragmentFD
):
38 SUPPORTED_PROTOCOLS
= ('http', 'https', 'ftp', 'ftps')
39 SUPPORTED_FEATURES
= ()
40 _CAPTURE_STDERR
= True
42 def real_download(self
, filename
, info_dict
):
43 self
.report_destination(filename
)
44 tmpfilename
= self
.temp_name(filename
)
45 self
._cookies
_tempfile
= None
49 retval
= self
._call
_downloader
(tmpfilename
, info_dict
)
50 except KeyboardInterrupt:
51 if not info_dict
.get('is_live'):
53 # Live stream downloading cancellation should be considered as
54 # correct and expected termination thus all postprocessing
57 self
.to_screen(f
'[{self.get_basename()}] Interrupted by user')
59 if self
._cookies
_tempfile
:
60 self
.try_remove(self
._cookies
_tempfile
)
66 'elapsed': time
.time() - started
,
69 fsize
= os
.path
.getsize(tmpfilename
)
70 self
.try_rename(tmpfilename
, filename
)
72 'downloaded_bytes': fsize
,
75 self
._hook
_progress
(status
, info_dict
)
79 self
.report_error('%s exited with code %d' % (
80 self
.get_basename(), retval
))
84 def get_basename(cls
):
85 return cls
.__name
__[:-2].lower()
89 return cls
.get_basename()
91 @functools.cached_property
96 def available(cls
, path
=None):
97 path
= check_executable(
98 cls
.EXE_NAME
if path
in (None, cls
.get_basename()) else path
,
106 def supports(cls
, info_dict
):
108 not info_dict
.get('to_stdout') or Features
.TO_STDOUT
in cls
.SUPPORTED_FEATURES
,
109 '+' not in info_dict
['protocol'] or Features
.MULTIPLE_FORMATS
in cls
.SUPPORTED_FEATURES
,
110 not traverse_obj(info_dict
, ('hls_aes', ...), 'extra_param_to_segment_url', 'extra_param_to_key_url'),
111 all(proto
in cls
.SUPPORTED_PROTOCOLS
for proto
in info_dict
['protocol'].split('+')),
115 def can_download(cls
, info_dict
, path
=None):
116 return cls
.available(path
) and cls
.supports(info_dict
)
118 def _option(self
, command_option
, param
):
119 return cli_option(self
.params
, command_option
, param
)
121 def _bool_option(self
, command_option
, param
, true_value
='true', false_value
='false', separator
=None):
122 return cli_bool_option(self
.params
, command_option
, param
, true_value
, false_value
, separator
)
124 def _valueless_option(self
, command_option
, param
, expected_value
=True):
125 return cli_valueless_option(self
.params
, command_option
, param
, expected_value
)
127 def _configuration_args(self
, keys
=None, *args
, **kwargs
):
128 return _configuration_args(
129 self
.get_basename(), self
.params
.get('external_downloader_args'), self
.EXE_NAME
,
130 keys
, *args
, **kwargs
)
132 def _write_cookies(self
):
133 if not self
.ydl
.cookiejar
.filename
:
134 tmp_cookies
= tempfile
.NamedTemporaryFile(suffix
='.cookies', delete
=False)
136 self
._cookies
_tempfile
= tmp_cookies
.name
137 self
.to_screen(f
'[download] Writing temporary cookies file to "{self._cookies_tempfile}"')
138 # real_download resets _cookies_tempfile; if it's None then save() will write to cookiejar.filename
139 self
.ydl
.cookiejar
.save(self
._cookies
_tempfile
)
140 return self
.ydl
.cookiejar
.filename
or self
._cookies
_tempfile
142 def _call_downloader(self
, tmpfilename
, info_dict
):
143 """ Either overwrite this or implement _make_cmd """
144 cmd
= [encodeArgument(a
) for a
in self
._make
_cmd
(tmpfilename
, info_dict
)]
148 if 'fragments' not in info_dict
:
149 _
, stderr
, returncode
= self
._call
_process
(cmd
, info_dict
)
150 if returncode
and stderr
:
151 self
.to_stderr(stderr
)
154 skip_unavailable_fragments
= self
.params
.get('skip_unavailable_fragments', True)
156 retry_manager
= RetryManager(self
.params
.get('fragment_retries'), self
.report_retry
,
157 frag_index
=None, fatal
=not skip_unavailable_fragments
)
158 for retry
in retry_manager
:
159 _
, stderr
, returncode
= self
._call
_process
(cmd
, info_dict
)
162 # TODO: Decide whether to retry based on error code
163 # https://aria2.github.io/manual/en/html/aria2c.html#exit-status
165 self
.to_stderr(stderr
)
166 retry
.error
= Exception()
168 if not skip_unavailable_fragments
and retry_manager
.error
:
171 decrypt_fragment
= self
.decrypter(info_dict
)
172 dest
, _
= self
.sanitize_open(tmpfilename
, 'wb')
173 for frag_index
, fragment
in enumerate(info_dict
['fragments']):
174 fragment_filename
= f
'{tmpfilename}-Frag{frag_index}'
176 src
, _
= self
.sanitize_open(fragment_filename
, 'rb')
177 except OSError as err
:
178 if skip_unavailable_fragments
and frag_index
> 1:
179 self
.report_skip_fragment(frag_index
, err
)
181 self
.report_error(f
'Unable to open fragment {frag_index}; {err}')
183 dest
.write(decrypt_fragment(fragment
, src
.read()))
185 if not self
.params
.get('keep_fragments', False):
186 self
.try_remove(fragment_filename
)
188 self
.try_remove(f
'{tmpfilename}.frag.urls')
191 def _call_process(self
, cmd
, info_dict
):
192 return Popen
.run(cmd
, text
=True, stderr
=subprocess
.PIPE
if self
._CAPTURE
_STDERR
else None)
195 class CurlFD(ExternalFD
):
197 _CAPTURE_STDERR
= False # curl writes the progress to stderr
199 def _make_cmd(self
, tmpfilename
, info_dict
):
200 cmd
= [self
.exe
, '--location', '-o', tmpfilename
, '--compressed']
201 cookie_header
= self
.ydl
.cookiejar
.get_cookie_header(info_dict
['url'])
203 cmd
+= ['--cookie', cookie_header
]
204 if info_dict
.get('http_headers') is not None:
205 for key
, val
in info_dict
['http_headers'].items():
206 cmd
+= ['--header', f
'{key}: {val}']
208 cmd
+= self
._bool
_option
('--continue-at', 'continuedl', '-', '0')
209 cmd
+= self
._valueless
_option
('--silent', 'noprogress')
210 cmd
+= self
._valueless
_option
('--verbose', 'verbose')
211 cmd
+= self
._option
('--limit-rate', 'ratelimit')
212 retry
= self
._option
('--retry', 'retries')
214 if retry
[1] in ('inf', 'infinite'):
215 retry
[1] = '2147483647'
217 cmd
+= self
._option
('--max-filesize', 'max_filesize')
218 cmd
+= self
._option
('--interface', 'source_address')
219 cmd
+= self
._option
('--proxy', 'proxy')
220 cmd
+= self
._valueless
_option
('--insecure', 'nocheckcertificate')
221 cmd
+= self
._configuration
_args
()
222 cmd
+= ['--', info_dict
['url']]
226 class AxelFD(ExternalFD
):
229 def _make_cmd(self
, tmpfilename
, info_dict
):
230 cmd
= [self
.exe
, '-o', tmpfilename
]
231 if info_dict
.get('http_headers') is not None:
232 for key
, val
in info_dict
['http_headers'].items():
233 cmd
+= ['-H', f
'{key}: {val}']
234 cookie_header
= self
.ydl
.cookiejar
.get_cookie_header(info_dict
['url'])
236 cmd
+= ['-H', f
'Cookie: {cookie_header}', '--max-redirect=0']
237 cmd
+= self
._configuration
_args
()
238 cmd
+= ['--', info_dict
['url']]
242 class WgetFD(ExternalFD
):
243 AVAILABLE_OPT
= '--version'
245 def _make_cmd(self
, tmpfilename
, info_dict
):
246 cmd
= [self
.exe
, '-O', tmpfilename
, '-nv', '--compression=auto']
247 if self
.ydl
.cookiejar
.get_cookie_header(info_dict
['url']):
248 cmd
+= ['--load-cookies', self
._write
_cookies
()]
249 if info_dict
.get('http_headers') is not None:
250 for key
, val
in info_dict
['http_headers'].items():
251 cmd
+= ['--header', f
'{key}: {val}']
252 cmd
+= self
._option
('--limit-rate', 'ratelimit')
253 retry
= self
._option
('--tries', 'retries')
255 if retry
[1] in ('inf', 'infinite'):
258 cmd
+= self
._option
('--bind-address', 'source_address')
259 proxy
= self
.params
.get('proxy')
261 for var
in ('http_proxy', 'https_proxy'):
262 cmd
+= ['--execute', f
'{var}={proxy}']
263 cmd
+= self
._valueless
_option
('--no-check-certificate', 'nocheckcertificate')
264 cmd
+= self
._configuration
_args
()
265 cmd
+= ['--', info_dict
['url']]
269 class Aria2cFD(ExternalFD
):
271 SUPPORTED_PROTOCOLS
= ('http', 'https', 'ftp', 'ftps', 'dash_frag_urls', 'm3u8_frag_urls')
274 def supports_manifest(manifest
):
275 UNSUPPORTED_FEATURES
= [
276 r
'#EXT-X-BYTERANGE', # playlists composed of byte ranges of media files [1]
277 # 1. https://tools.ietf.org/html/draft-pantos-http-live-streaming-17#section-4.3.2.2
279 check_results
= (not re
.search(feature
, manifest
) for feature
in UNSUPPORTED_FEATURES
)
280 return all(check_results
)
283 def _aria2c_filename(fn
):
284 return fn
if os
.path
.isabs(fn
) else f
'.{os.path.sep}{fn}'
286 def _call_downloader(self
, tmpfilename
, info_dict
):
287 # FIXME: Disabled due to https://github.com/yt-dlp/yt-dlp/issues/5931
288 if False and 'no-external-downloader-progress' not in self
.params
.get('compat_opts', []):
289 info_dict
['__rpc'] = {
290 'port': find_available_port() or 19190,
291 'secret': str(uuid
.uuid4()),
293 return super()._call
_downloader
(tmpfilename
, info_dict
)
295 def _make_cmd(self
, tmpfilename
, info_dict
):
296 cmd
= [self
.exe
, '-c', '--no-conf',
297 '--console-log-level=warn', '--summary-interval=0', '--download-result=hide',
298 '--http-accept-gzip=true', '--file-allocation=none', '-x16', '-j16', '-s16']
299 if 'fragments' in info_dict
:
300 cmd
+= ['--allow-overwrite=true', '--allow-piece-length-change=true']
302 cmd
+= ['--min-split-size', '1M']
304 if self
.ydl
.cookiejar
.get_cookie_header(info_dict
['url']):
305 cmd
+= [f
'--load-cookies={self._write_cookies()}']
306 if info_dict
.get('http_headers') is not None:
307 for key
, val
in info_dict
['http_headers'].items():
308 cmd
+= ['--header', f
'{key}: {val}']
309 cmd
+= self
._option
('--max-overall-download-limit', 'ratelimit')
310 cmd
+= self
._option
('--interface', 'source_address')
311 cmd
+= self
._option
('--all-proxy', 'proxy')
312 cmd
+= self
._bool
_option
('--check-certificate', 'nocheckcertificate', 'false', 'true', '=')
313 cmd
+= self
._bool
_option
('--remote-time', 'updatetime', 'true', 'false', '=')
314 cmd
+= self
._bool
_option
('--show-console-readout', 'noprogress', 'false', 'true', '=')
315 cmd
+= self
._configuration
_args
()
317 if '__rpc' in info_dict
:
320 f
'--rpc-listen-port={info_dict["__rpc"]["port"]}',
321 f
'--rpc-secret={info_dict["__rpc"]["secret"]}']
323 # aria2c strips out spaces from the beginning/end of filenames and paths.
324 # We work around this issue by adding a "./" to the beginning of the
325 # filename and relative path, and adding a "/" at the end of the path.
326 # See: https://github.com/yt-dlp/yt-dlp/issues/276
327 # https://github.com/ytdl-org/youtube-dl/issues/20312
328 # https://github.com/aria2/aria2/issues/1373
329 dn
= os
.path
.dirname(tmpfilename
)
331 cmd
+= ['--dir', self
._aria
2c
_filename
(dn
) + os
.path
.sep
]
332 if 'fragments' not in info_dict
:
333 cmd
+= ['--out', self
._aria
2c
_filename
(os
.path
.basename(tmpfilename
))]
334 cmd
+= ['--auto-file-renaming=false']
336 if 'fragments' in info_dict
:
337 cmd
+= ['--uri-selector=inorder']
338 url_list_file
= f
'{tmpfilename}.frag.urls'
340 for frag_index
, fragment
in enumerate(info_dict
['fragments']):
341 fragment_filename
= f
'{os.path.basename(tmpfilename)}-Frag{frag_index}'
342 url_list
.append('{}\n\tout={}'.format(fragment
['url'], self
._aria
2c
_filename
(fragment_filename
)))
343 stream
, _
= self
.sanitize_open(url_list_file
, 'wb')
344 stream
.write('\n'.join(url_list
).encode())
346 cmd
+= ['-i', self
._aria
2c
_filename
(url_list_file
)]
348 cmd
+= ['--', info_dict
['url']]
351 def aria2c_rpc(self
, rpc_port
, rpc_secret
, method
, params
=()):
352 # Does not actually need to be UUID, just unique
353 sanitycheck
= str(uuid
.uuid4())
358 'params': [f
'token:{rpc_secret}', *params
],
361 f
'http://localhost:{rpc_port}/jsonrpc',
363 'Content-Type': 'application/json',
364 'Content-Length': f
'{len(d)}',
365 }, proxies
={'all': None})
366 with self
.ydl
.urlopen(request
) as r
:
368 assert resp
.get('id') == sanitycheck
, 'Something went wrong with RPC server'
369 return resp
['result']
371 def _call_process(self
, cmd
, info_dict
):
372 if '__rpc' not in info_dict
:
373 return super()._call
_process
(cmd
, info_dict
)
375 send_rpc
= functools
.partial(self
.aria2c_rpc
, info_dict
['__rpc']['port'], info_dict
['__rpc']['secret'])
376 started
= time
.time()
378 fragmented
= 'fragments' in info_dict
379 frag_count
= len(info_dict
['fragments']) if fragmented
else 1
381 'filename': info_dict
.get('_filename'),
382 'status': 'downloading',
384 'downloaded_bytes': 0,
385 'fragment_count': frag_count
if fragmented
else None,
386 'fragment_index': 0 if fragmented
else None,
388 self
._hook
_progress
(status
, info_dict
)
390 def get_stat(key
, *obj
, average
=False):
391 val
= tuple(filter(None, map(float, traverse_obj(obj
, (..., ..., key
))))) or [0]
392 return sum(val
) / (len(val
) if average
else 1)
394 with
Popen(cmd
, text
=True, stdout
=subprocess
.DEVNULL
, stderr
=subprocess
.PIPE
) as p
:
395 # Add a small sleep so that RPC client can receive response,
396 # or the connection stalls infinitely
399 while retval
is None:
400 # We don't use tellStatus as we won't know the GID without reading stdout
401 # Ref: https://aria2.github.io/manual/en/html/aria2c.html#aria2.tellActive
402 active
= send_rpc('aria2.tellActive')
403 completed
= send_rpc('aria2.tellStopped', [0, frag_count
])
405 downloaded
= get_stat('totalLength', completed
) + get_stat('completedLength', active
)
406 speed
= get_stat('downloadSpeed', active
)
407 total
= frag_count
* get_stat('totalLength', active
, completed
, average
=True)
408 if total
< downloaded
:
412 'downloaded_bytes': int(downloaded
),
414 'total_bytes': None if fragmented
else total
,
415 'total_bytes_estimate': total
,
416 'eta': (total
- downloaded
) / (speed
or 1),
417 'fragment_index': min(frag_count
, len(completed
) + 1) if fragmented
else None,
418 'elapsed': time
.time() - started
,
420 self
._hook
_progress
(status
, info_dict
)
422 if not active
and len(completed
) >= frag_count
:
423 send_rpc('aria2.shutdown')
430 return '', p
.stderr
.read(), retval
433 class HttpieFD(ExternalFD
):
434 AVAILABLE_OPT
= '--version'
437 def _make_cmd(self
, tmpfilename
, info_dict
):
438 cmd
= ['http', '--download', '--output', tmpfilename
, info_dict
['url']]
440 if info_dict
.get('http_headers') is not None:
441 for key
, val
in info_dict
['http_headers'].items():
442 cmd
+= [f
'{key}:{val}']
444 # httpie 3.1.0+ removes the Cookie header on redirect, so this should be safe for now. [1]
445 # If we ever need cookie handling for redirects, we can export the cookiejar into a session. [2]
446 # 1: https://github.com/httpie/httpie/security/advisories/GHSA-9w4w-cpc8-h2fq
447 # 2: https://httpie.io/docs/cli/sessions
448 cookie_header
= self
.ydl
.cookiejar
.get_cookie_header(info_dict
['url'])
450 cmd
+= [f
'Cookie:{cookie_header}']
454 class FFmpegFD(ExternalFD
):
455 SUPPORTED_PROTOCOLS
= ('http', 'https', 'ftp', 'ftps', 'm3u8', 'm3u8_native', 'rtsp', 'rtmp', 'rtmp_ffmpeg', 'mms', 'http_dash_segments')
456 SUPPORTED_FEATURES
= (Features
.TO_STDOUT
, Features
.MULTIPLE_FORMATS
)
459 def available(cls
, path
=None):
460 # TODO: Fix path for ffmpeg
461 # Fixme: This may be wrong when --ffmpeg-location is used
462 return FFmpegPostProcessor().available
464 def on_process_started(self
, proc
, stdin
):
465 """ Override this in subclasses """
469 def can_merge_formats(cls
, info_dict
, params
):
471 info_dict
.get('requested_formats')
472 and info_dict
.get('protocol')
473 and not params
.get('allow_unplayable_formats')
474 and 'no-direct-merge' not in params
.get('compat_opts', [])
475 and cls
.can_download(info_dict
))
477 def _call_downloader(self
, tmpfilename
, info_dict
):
478 ffpp
= FFmpegPostProcessor(downloader
=self
)
479 if not ffpp
.available
:
480 self
.report_error('m3u8 download detected but ffmpeg could not be found. Please install')
484 args
= [ffpp
.executable
, '-y']
486 for log_level
in ('quiet', 'verbose'):
487 if self
.params
.get(log_level
, False):
488 args
+= ['-loglevel', log_level
]
490 if not self
.params
.get('verbose'):
491 args
+= ['-hide_banner']
493 args
+= traverse_obj(info_dict
, ('downloader_options', 'ffmpeg_args', ...))
495 # These exists only for compatibility. Extractors should use
496 # info_dict['downloader_options']['ffmpeg_args'] instead
497 args
+= info_dict
.get('_ffmpeg_args') or []
498 seekable
= info_dict
.get('_seekable')
499 if seekable
is not None:
500 # setting -seekable prevents ffmpeg from guessing if the server
501 # supports seeking(by adding the header `Range: bytes=0-`), which
502 # can cause problems in some cases
503 # https://github.com/ytdl-org/youtube-dl/issues/11800#issuecomment-275037127
504 # http://trac.ffmpeg.org/ticket/6125#comment:10
505 args
+= ['-seekable', '1' if seekable
else '0']
508 proxy
= self
.params
.get('proxy')
510 if not re
.match(r
'[\da-zA-Z]+://', proxy
):
511 proxy
= f
'http://{proxy}'
513 if proxy
.startswith('socks'):
515 f
'{self.get_basename()} does not support SOCKS proxies. Downloading is likely to fail. '
516 'Consider adding --hls-prefer-native to your command.')
518 # Since December 2015 ffmpeg supports -http_proxy option (see
519 # http://git.videolan.org/?p=ffmpeg.git;a=commit;h=b4eb1f29ebddd60c41a2eb39f5af701e38e0d3fd)
520 # We could switch to the following code if we are able to detect version properly
521 # args += ['-http_proxy', proxy]
522 env
= os
.environ
.copy()
523 env
['HTTP_PROXY'] = proxy
524 env
['http_proxy'] = proxy
526 protocol
= info_dict
.get('protocol')
528 if protocol
== 'rtmp':
529 player_url
= info_dict
.get('player_url')
530 page_url
= info_dict
.get('page_url')
531 app
= info_dict
.get('app')
532 play_path
= info_dict
.get('play_path')
533 tc_url
= info_dict
.get('tc_url')
534 flash_version
= info_dict
.get('flash_version')
535 live
= info_dict
.get('rtmp_live', False)
536 conn
= info_dict
.get('rtmp_conn')
537 if player_url
is not None:
538 args
+= ['-rtmp_swfverify', player_url
]
539 if page_url
is not None:
540 args
+= ['-rtmp_pageurl', page_url
]
542 args
+= ['-rtmp_app', app
]
543 if play_path
is not None:
544 args
+= ['-rtmp_playpath', play_path
]
545 if tc_url
is not None:
546 args
+= ['-rtmp_tcurl', tc_url
]
547 if flash_version
is not None:
548 args
+= ['-rtmp_flashver', flash_version
]
550 args
+= ['-rtmp_live', 'live']
551 if isinstance(conn
, list):
553 args
+= ['-rtmp_conn', entry
]
554 elif isinstance(conn
, str):
555 args
+= ['-rtmp_conn', conn
]
557 start_time
, end_time
= info_dict
.get('section_start') or 0, info_dict
.get('section_end')
559 selected_formats
= info_dict
.get('requested_formats') or [info_dict
]
560 for i
, fmt
in enumerate(selected_formats
):
561 is_http
= re
.match(r
'https?://', fmt
['url'])
562 cookies
= self
.ydl
.cookiejar
.get_cookies_for_url(fmt
['url']) if is_http
else []
564 args
.extend(['-cookies', ''.join(
565 f
'{cookie.name}={cookie.value}; path={cookie.path}; domain={cookie.domain};\r\n'
566 for cookie
in cookies
)])
567 if fmt
.get('http_headers') and is_http
:
568 # Trailing \r\n after each HTTP header is important to prevent warning from ffmpeg/avconv:
569 # [http @ 00000000003d2fa0] No trailing CRLF found in HTTP header.
570 args
.extend(['-headers', ''.join(f
'{key}: {val}\r\n' for key
, val
in fmt
['http_headers'].items())])
573 args
+= ['-ss', str(start_time
)]
575 args
+= ['-t', str(end_time
- start_time
)]
577 args
+= [*self
._configuration
_args
((f
'_i{i + 1}', '_i')), '-i', fmt
['url']]
579 if not (start_time
or end_time
) or not self
.params
.get('force_keyframes_at_cuts'):
580 args
+= ['-c', 'copy']
582 if info_dict
.get('requested_formats') or protocol
== 'http_dash_segments':
583 for i
, fmt
in enumerate(selected_formats
):
584 stream_number
= fmt
.get('manifest_stream_number', 0)
585 args
.extend(['-map', f
'{i}:{stream_number}'])
587 if self
.params
.get('test', False):
588 args
+= ['-fs', str(self
._TEST
_FILE
_SIZE
)]
590 ext
= info_dict
['ext']
591 if protocol
in ('m3u8', 'm3u8_native'):
592 use_mpegts
= (tmpfilename
== '-') or self
.params
.get('hls_use_mpegts')
593 if use_mpegts
is None:
594 use_mpegts
= info_dict
.get('is_live')
596 args
+= ['-f', 'mpegts']
598 args
+= ['-f', 'mp4']
599 if (ffpp
.basename
== 'ffmpeg' and ffpp
._features
.get('needs_adtstoasc')) and (not info_dict
.get('acodec') or info_dict
['acodec'].split('.')[0] in ('aac', 'mp4a')):
600 args
+= ['-bsf:a', 'aac_adtstoasc']
601 elif protocol
== 'rtmp':
602 args
+= ['-f', 'flv']
603 elif ext
== 'mp4' and tmpfilename
== '-':
604 args
+= ['-f', 'mpegts']
605 elif ext
== 'unknown_video':
606 ext
= determine_ext(remove_end(tmpfilename
, '.part'))
607 if ext
== 'unknown_video':
609 'The video format is unknown and cannot be downloaded by ffmpeg. '
610 'Explicitly set the extension in the filename to attempt download in that format')
612 self
.report_warning(f
'The video format is unknown. Trying to download as {ext} according to the filename')
613 args
+= ['-f', EXT_TO_OUT_FORMATS
.get(ext
, ext
)]
615 args
+= ['-f', EXT_TO_OUT_FORMATS
.get(ext
, ext
)]
617 args
+= traverse_obj(info_dict
, ('downloader_options', 'ffmpeg_args_out', ...))
619 args
+= self
._configuration
_args
(('_o1', '_o', ''))
621 args
= [encodeArgument(opt
) for opt
in args
]
622 args
.append(ffpp
._ffmpeg
_filename
_argument
(tmpfilename
))
623 self
._debug
_cmd
(args
)
625 piped
= any(fmt
['url'] in ('-', 'pipe:') for fmt
in selected_formats
)
626 with
Popen(args
, stdin
=subprocess
.PIPE
, env
=env
) as proc
:
628 self
.on_process_started(proc
, proc
.stdin
)
631 except BaseException
as e
:
632 # subprocces.run would send the SIGKILL signal to ffmpeg and the
633 # mp4 file couldn't be played, but if we ask ffmpeg to quit it
634 # produces a file that is playable (this is mostly useful for live
635 # streams). Note that Windows is not affected and produces playable
636 # files (see https://github.com/ytdl-org/youtube-dl/issues/8300).
637 if isinstance(e
, KeyboardInterrupt) and sys
.platform
!= 'win32' and not piped
:
638 proc
.communicate_or_kill(b
'q')
640 proc
.kill(timeout
=None)
645 class AVconvFD(FFmpegFD
):
650 klass
.get_basename(): klass
651 for name
, klass
in globals().items()
652 if name
.endswith('FD') and name
not in ('ExternalFD', 'FragmentFD')
656 def list_external_downloaders():
657 return sorted(_BY_NAME
.keys())
660 def get_external_downloader(external_downloader
):
661 """ Given the name of the executable, see whether we support the given downloader """
662 bn
= os
.path
.splitext(os
.path
.basename(external_downloader
))[0]
663 return _BY_NAME
.get(bn
) or next((
664 klass
for klass
in _BY_NAME
.values() if klass
.EXE_NAME
in bn