12 from .fragment
import FragmentFD
13 from ..networking
import Request
14 from ..postprocessor
.ffmpeg
import EXT_TO_OUT_FORMATS
, FFmpegPostProcessor
33 class Features(enum
.Enum
):
34 TO_STDOUT
= enum
.auto()
35 MULTIPLE_FORMATS
= enum
.auto()
38 class ExternalFD(FragmentFD
):
39 SUPPORTED_PROTOCOLS
= ('http', 'https', 'ftp', 'ftps')
40 SUPPORTED_FEATURES
= ()
41 _CAPTURE_STDERR
= True
43 def real_download(self
, filename
, info_dict
):
44 self
.report_destination(filename
)
45 tmpfilename
= self
.temp_name(filename
)
46 self
._cookies
_tempfile
= None
50 retval
= self
._call
_downloader
(tmpfilename
, info_dict
)
51 except KeyboardInterrupt:
52 if not info_dict
.get('is_live'):
54 # Live stream downloading cancellation should be considered as
55 # correct and expected termination thus all postprocessing
58 self
.to_screen(f
'[{self.get_basename()}] Interrupted by user')
60 if self
._cookies
_tempfile
:
61 self
.try_remove(self
._cookies
_tempfile
)
67 'elapsed': time
.time() - started
,
70 fsize
= os
.path
.getsize(encodeFilename(tmpfilename
))
71 self
.try_rename(tmpfilename
, filename
)
73 'downloaded_bytes': fsize
,
76 self
._hook
_progress
(status
, info_dict
)
80 self
.report_error('%s exited with code %d' % (
81 self
.get_basename(), retval
))
85 def get_basename(cls
):
86 return cls
.__name
__[:-2].lower()
90 return cls
.get_basename()
92 @functools.cached_property
97 def available(cls
, path
=None):
98 path
= check_executable(
99 cls
.EXE_NAME
if path
in (None, cls
.get_basename()) else path
,
107 def supports(cls
, info_dict
):
109 not info_dict
.get('to_stdout') or Features
.TO_STDOUT
in cls
.SUPPORTED_FEATURES
,
110 '+' not in info_dict
['protocol'] or Features
.MULTIPLE_FORMATS
in cls
.SUPPORTED_FEATURES
,
111 not traverse_obj(info_dict
, ('hls_aes', ...), 'extra_param_to_segment_url', 'extra_param_to_key_url'),
112 all(proto
in cls
.SUPPORTED_PROTOCOLS
for proto
in info_dict
['protocol'].split('+')),
116 def can_download(cls
, info_dict
, path
=None):
117 return cls
.available(path
) and cls
.supports(info_dict
)
119 def _option(self
, command_option
, param
):
120 return cli_option(self
.params
, command_option
, param
)
122 def _bool_option(self
, command_option
, param
, true_value
='true', false_value
='false', separator
=None):
123 return cli_bool_option(self
.params
, command_option
, param
, true_value
, false_value
, separator
)
125 def _valueless_option(self
, command_option
, param
, expected_value
=True):
126 return cli_valueless_option(self
.params
, command_option
, param
, expected_value
)
128 def _configuration_args(self
, keys
=None, *args
, **kwargs
):
129 return _configuration_args(
130 self
.get_basename(), self
.params
.get('external_downloader_args'), self
.EXE_NAME
,
131 keys
, *args
, **kwargs
)
133 def _write_cookies(self
):
134 if not self
.ydl
.cookiejar
.filename
:
135 tmp_cookies
= tempfile
.NamedTemporaryFile(suffix
='.cookies', delete
=False)
137 self
._cookies
_tempfile
= tmp_cookies
.name
138 self
.to_screen(f
'[download] Writing temporary cookies file to "{self._cookies_tempfile}"')
139 # real_download resets _cookies_tempfile; if it's None then save() will write to cookiejar.filename
140 self
.ydl
.cookiejar
.save(self
._cookies
_tempfile
)
141 return self
.ydl
.cookiejar
.filename
or self
._cookies
_tempfile
143 def _call_downloader(self
, tmpfilename
, info_dict
):
144 """ Either overwrite this or implement _make_cmd """
145 cmd
= [encodeArgument(a
) for a
in self
._make
_cmd
(tmpfilename
, info_dict
)]
149 if 'fragments' not in info_dict
:
150 _
, stderr
, returncode
= self
._call
_process
(cmd
, info_dict
)
151 if returncode
and stderr
:
152 self
.to_stderr(stderr
)
155 skip_unavailable_fragments
= self
.params
.get('skip_unavailable_fragments', True)
157 retry_manager
= RetryManager(self
.params
.get('fragment_retries'), self
.report_retry
,
158 frag_index
=None, fatal
=not skip_unavailable_fragments
)
159 for retry
in retry_manager
:
160 _
, stderr
, returncode
= self
._call
_process
(cmd
, info_dict
)
163 # TODO: Decide whether to retry based on error code
164 # https://aria2.github.io/manual/en/html/aria2c.html#exit-status
166 self
.to_stderr(stderr
)
167 retry
.error
= Exception()
169 if not skip_unavailable_fragments
and retry_manager
.error
:
172 decrypt_fragment
= self
.decrypter(info_dict
)
173 dest
, _
= self
.sanitize_open(tmpfilename
, 'wb')
174 for frag_index
, fragment
in enumerate(info_dict
['fragments']):
175 fragment_filename
= f
'{tmpfilename}-Frag{frag_index}'
177 src
, _
= self
.sanitize_open(fragment_filename
, 'rb')
178 except OSError as err
:
179 if skip_unavailable_fragments
and frag_index
> 1:
180 self
.report_skip_fragment(frag_index
, err
)
182 self
.report_error(f
'Unable to open fragment {frag_index}; {err}')
184 dest
.write(decrypt_fragment(fragment
, src
.read()))
186 if not self
.params
.get('keep_fragments', False):
187 self
.try_remove(encodeFilename(fragment_filename
))
189 self
.try_remove(encodeFilename(f
'{tmpfilename}.frag.urls'))
192 def _call_process(self
, cmd
, info_dict
):
193 return Popen
.run(cmd
, text
=True, stderr
=subprocess
.PIPE
if self
._CAPTURE
_STDERR
else None)
196 class CurlFD(ExternalFD
):
198 _CAPTURE_STDERR
= False # curl writes the progress to stderr
200 def _make_cmd(self
, tmpfilename
, info_dict
):
201 cmd
= [self
.exe
, '--location', '-o', tmpfilename
, '--compressed']
202 cookie_header
= self
.ydl
.cookiejar
.get_cookie_header(info_dict
['url'])
204 cmd
+= ['--cookie', cookie_header
]
205 if info_dict
.get('http_headers') is not None:
206 for key
, val
in info_dict
['http_headers'].items():
207 cmd
+= ['--header', f
'{key}: {val}']
209 cmd
+= self
._bool
_option
('--continue-at', 'continuedl', '-', '0')
210 cmd
+= self
._valueless
_option
('--silent', 'noprogress')
211 cmd
+= self
._valueless
_option
('--verbose', 'verbose')
212 cmd
+= self
._option
('--limit-rate', 'ratelimit')
213 retry
= self
._option
('--retry', 'retries')
215 if retry
[1] in ('inf', 'infinite'):
216 retry
[1] = '2147483647'
218 cmd
+= self
._option
('--max-filesize', 'max_filesize')
219 cmd
+= self
._option
('--interface', 'source_address')
220 cmd
+= self
._option
('--proxy', 'proxy')
221 cmd
+= self
._valueless
_option
('--insecure', 'nocheckcertificate')
222 cmd
+= self
._configuration
_args
()
223 cmd
+= ['--', info_dict
['url']]
227 class AxelFD(ExternalFD
):
230 def _make_cmd(self
, tmpfilename
, info_dict
):
231 cmd
= [self
.exe
, '-o', tmpfilename
]
232 if info_dict
.get('http_headers') is not None:
233 for key
, val
in info_dict
['http_headers'].items():
234 cmd
+= ['-H', f
'{key}: {val}']
235 cookie_header
= self
.ydl
.cookiejar
.get_cookie_header(info_dict
['url'])
237 cmd
+= ['-H', f
'Cookie: {cookie_header}', '--max-redirect=0']
238 cmd
+= self
._configuration
_args
()
239 cmd
+= ['--', info_dict
['url']]
243 class WgetFD(ExternalFD
):
244 AVAILABLE_OPT
= '--version'
246 def _make_cmd(self
, tmpfilename
, info_dict
):
247 cmd
= [self
.exe
, '-O', tmpfilename
, '-nv', '--compression=auto']
248 if self
.ydl
.cookiejar
.get_cookie_header(info_dict
['url']):
249 cmd
+= ['--load-cookies', self
._write
_cookies
()]
250 if info_dict
.get('http_headers') is not None:
251 for key
, val
in info_dict
['http_headers'].items():
252 cmd
+= ['--header', f
'{key}: {val}']
253 cmd
+= self
._option
('--limit-rate', 'ratelimit')
254 retry
= self
._option
('--tries', 'retries')
256 if retry
[1] in ('inf', 'infinite'):
259 cmd
+= self
._option
('--bind-address', 'source_address')
260 proxy
= self
.params
.get('proxy')
262 for var
in ('http_proxy', 'https_proxy'):
263 cmd
+= ['--execute', f
'{var}={proxy}']
264 cmd
+= self
._valueless
_option
('--no-check-certificate', 'nocheckcertificate')
265 cmd
+= self
._configuration
_args
()
266 cmd
+= ['--', info_dict
['url']]
270 class Aria2cFD(ExternalFD
):
272 SUPPORTED_PROTOCOLS
= ('http', 'https', 'ftp', 'ftps', 'dash_frag_urls', 'm3u8_frag_urls')
275 def supports_manifest(manifest
):
276 UNSUPPORTED_FEATURES
= [
277 r
'#EXT-X-BYTERANGE', # playlists composed of byte ranges of media files [1]
278 # 1. https://tools.ietf.org/html/draft-pantos-http-live-streaming-17#section-4.3.2.2
280 check_results
= (not re
.search(feature
, manifest
) for feature
in UNSUPPORTED_FEATURES
)
281 return all(check_results
)
284 def _aria2c_filename(fn
):
285 return fn
if os
.path
.isabs(fn
) else f
'.{os.path.sep}{fn}'
287 def _call_downloader(self
, tmpfilename
, info_dict
):
288 # FIXME: Disabled due to https://github.com/yt-dlp/yt-dlp/issues/5931
289 if False and 'no-external-downloader-progress' not in self
.params
.get('compat_opts', []):
290 info_dict
['__rpc'] = {
291 'port': find_available_port() or 19190,
292 'secret': str(uuid
.uuid4()),
294 return super()._call
_downloader
(tmpfilename
, info_dict
)
296 def _make_cmd(self
, tmpfilename
, info_dict
):
297 cmd
= [self
.exe
, '-c', '--no-conf',
298 '--console-log-level=warn', '--summary-interval=0', '--download-result=hide',
299 '--http-accept-gzip=true', '--file-allocation=none', '-x16', '-j16', '-s16']
300 if 'fragments' in info_dict
:
301 cmd
+= ['--allow-overwrite=true', '--allow-piece-length-change=true']
303 cmd
+= ['--min-split-size', '1M']
305 if self
.ydl
.cookiejar
.get_cookie_header(info_dict
['url']):
306 cmd
+= [f
'--load-cookies={self._write_cookies()}']
307 if info_dict
.get('http_headers') is not None:
308 for key
, val
in info_dict
['http_headers'].items():
309 cmd
+= ['--header', f
'{key}: {val}']
310 cmd
+= self
._option
('--max-overall-download-limit', 'ratelimit')
311 cmd
+= self
._option
('--interface', 'source_address')
312 cmd
+= self
._option
('--all-proxy', 'proxy')
313 cmd
+= self
._bool
_option
('--check-certificate', 'nocheckcertificate', 'false', 'true', '=')
314 cmd
+= self
._bool
_option
('--remote-time', 'updatetime', 'true', 'false', '=')
315 cmd
+= self
._bool
_option
('--show-console-readout', 'noprogress', 'false', 'true', '=')
316 cmd
+= self
._configuration
_args
()
318 if '__rpc' in info_dict
:
321 f
'--rpc-listen-port={info_dict["__rpc"]["port"]}',
322 f
'--rpc-secret={info_dict["__rpc"]["secret"]}']
324 # aria2c strips out spaces from the beginning/end of filenames and paths.
325 # We work around this issue by adding a "./" to the beginning of the
326 # filename and relative path, and adding a "/" at the end of the path.
327 # See: https://github.com/yt-dlp/yt-dlp/issues/276
328 # https://github.com/ytdl-org/youtube-dl/issues/20312
329 # https://github.com/aria2/aria2/issues/1373
330 dn
= os
.path
.dirname(tmpfilename
)
332 cmd
+= ['--dir', self
._aria
2c
_filename
(dn
) + os
.path
.sep
]
333 if 'fragments' not in info_dict
:
334 cmd
+= ['--out', self
._aria
2c
_filename
(os
.path
.basename(tmpfilename
))]
335 cmd
+= ['--auto-file-renaming=false']
337 if 'fragments' in info_dict
:
338 cmd
+= ['--uri-selector=inorder']
339 url_list_file
= f
'{tmpfilename}.frag.urls'
341 for frag_index
, fragment
in enumerate(info_dict
['fragments']):
342 fragment_filename
= f
'{os.path.basename(tmpfilename)}-Frag{frag_index}'
343 url_list
.append('{}\n\tout={}'.format(fragment
['url'], self
._aria
2c
_filename
(fragment_filename
)))
344 stream
, _
= self
.sanitize_open(url_list_file
, 'wb')
345 stream
.write('\n'.join(url_list
).encode())
347 cmd
+= ['-i', self
._aria
2c
_filename
(url_list_file
)]
349 cmd
+= ['--', info_dict
['url']]
352 def aria2c_rpc(self
, rpc_port
, rpc_secret
, method
, params
=()):
353 # Does not actually need to be UUID, just unique
354 sanitycheck
= str(uuid
.uuid4())
359 'params': [f
'token:{rpc_secret}', *params
],
362 f
'http://localhost:{rpc_port}/jsonrpc',
364 'Content-Type': 'application/json',
365 'Content-Length': f
'{len(d)}',
366 }, proxies
={'all': None})
367 with self
.ydl
.urlopen(request
) as r
:
369 assert resp
.get('id') == sanitycheck
, 'Something went wrong with RPC server'
370 return resp
['result']
372 def _call_process(self
, cmd
, info_dict
):
373 if '__rpc' not in info_dict
:
374 return super()._call
_process
(cmd
, info_dict
)
376 send_rpc
= functools
.partial(self
.aria2c_rpc
, info_dict
['__rpc']['port'], info_dict
['__rpc']['secret'])
377 started
= time
.time()
379 fragmented
= 'fragments' in info_dict
380 frag_count
= len(info_dict
['fragments']) if fragmented
else 1
382 'filename': info_dict
.get('_filename'),
383 'status': 'downloading',
385 'downloaded_bytes': 0,
386 'fragment_count': frag_count
if fragmented
else None,
387 'fragment_index': 0 if fragmented
else None,
389 self
._hook
_progress
(status
, info_dict
)
391 def get_stat(key
, *obj
, average
=False):
392 val
= tuple(filter(None, map(float, traverse_obj(obj
, (..., ..., key
))))) or [0]
393 return sum(val
) / (len(val
) if average
else 1)
395 with
Popen(cmd
, text
=True, stdout
=subprocess
.DEVNULL
, stderr
=subprocess
.PIPE
) as p
:
396 # Add a small sleep so that RPC client can receive response,
397 # or the connection stalls infinitely
400 while retval
is None:
401 # We don't use tellStatus as we won't know the GID without reading stdout
402 # Ref: https://aria2.github.io/manual/en/html/aria2c.html#aria2.tellActive
403 active
= send_rpc('aria2.tellActive')
404 completed
= send_rpc('aria2.tellStopped', [0, frag_count
])
406 downloaded
= get_stat('totalLength', completed
) + get_stat('completedLength', active
)
407 speed
= get_stat('downloadSpeed', active
)
408 total
= frag_count
* get_stat('totalLength', active
, completed
, average
=True)
409 if total
< downloaded
:
413 'downloaded_bytes': int(downloaded
),
415 'total_bytes': None if fragmented
else total
,
416 'total_bytes_estimate': total
,
417 'eta': (total
- downloaded
) / (speed
or 1),
418 'fragment_index': min(frag_count
, len(completed
) + 1) if fragmented
else None,
419 'elapsed': time
.time() - started
,
421 self
._hook
_progress
(status
, info_dict
)
423 if not active
and len(completed
) >= frag_count
:
424 send_rpc('aria2.shutdown')
431 return '', p
.stderr
.read(), retval
434 class HttpieFD(ExternalFD
):
435 AVAILABLE_OPT
= '--version'
438 def _make_cmd(self
, tmpfilename
, info_dict
):
439 cmd
= ['http', '--download', '--output', tmpfilename
, info_dict
['url']]
441 if info_dict
.get('http_headers') is not None:
442 for key
, val
in info_dict
['http_headers'].items():
443 cmd
+= [f
'{key}:{val}']
445 # httpie 3.1.0+ removes the Cookie header on redirect, so this should be safe for now. [1]
446 # If we ever need cookie handling for redirects, we can export the cookiejar into a session. [2]
447 # 1: https://github.com/httpie/httpie/security/advisories/GHSA-9w4w-cpc8-h2fq
448 # 2: https://httpie.io/docs/cli/sessions
449 cookie_header
= self
.ydl
.cookiejar
.get_cookie_header(info_dict
['url'])
451 cmd
+= [f
'Cookie:{cookie_header}']
455 class FFmpegFD(ExternalFD
):
456 SUPPORTED_PROTOCOLS
= ('http', 'https', 'ftp', 'ftps', 'm3u8', 'm3u8_native', 'rtsp', 'rtmp', 'rtmp_ffmpeg', 'mms', 'http_dash_segments')
457 SUPPORTED_FEATURES
= (Features
.TO_STDOUT
, Features
.MULTIPLE_FORMATS
)
460 def available(cls
, path
=None):
461 # TODO: Fix path for ffmpeg
462 # Fixme: This may be wrong when --ffmpeg-location is used
463 return FFmpegPostProcessor().available
465 def on_process_started(self
, proc
, stdin
):
466 """ Override this in subclasses """
470 def can_merge_formats(cls
, info_dict
, params
):
472 info_dict
.get('requested_formats')
473 and info_dict
.get('protocol')
474 and not params
.get('allow_unplayable_formats')
475 and 'no-direct-merge' not in params
.get('compat_opts', [])
476 and cls
.can_download(info_dict
))
478 def _call_downloader(self
, tmpfilename
, info_dict
):
479 ffpp
= FFmpegPostProcessor(downloader
=self
)
480 if not ffpp
.available
:
481 self
.report_error('m3u8 download detected but ffmpeg could not be found. Please install')
485 args
= [ffpp
.executable
, '-y']
487 for log_level
in ('quiet', 'verbose'):
488 if self
.params
.get(log_level
, False):
489 args
+= ['-loglevel', log_level
]
491 if not self
.params
.get('verbose'):
492 args
+= ['-hide_banner']
494 args
+= traverse_obj(info_dict
, ('downloader_options', 'ffmpeg_args', ...))
496 # These exists only for compatibility. Extractors should use
497 # info_dict['downloader_options']['ffmpeg_args'] instead
498 args
+= info_dict
.get('_ffmpeg_args') or []
499 seekable
= info_dict
.get('_seekable')
500 if seekable
is not None:
501 # setting -seekable prevents ffmpeg from guessing if the server
502 # supports seeking(by adding the header `Range: bytes=0-`), which
503 # can cause problems in some cases
504 # https://github.com/ytdl-org/youtube-dl/issues/11800#issuecomment-275037127
505 # http://trac.ffmpeg.org/ticket/6125#comment:10
506 args
+= ['-seekable', '1' if seekable
else '0']
509 proxy
= self
.params
.get('proxy')
511 if not re
.match(r
'^[\da-zA-Z]+://', proxy
):
512 proxy
= f
'http://{proxy}'
514 if proxy
.startswith('socks'):
516 f
'{self.get_basename()} does not support SOCKS proxies. Downloading is likely to fail. '
517 'Consider adding --hls-prefer-native to your command.')
519 # Since December 2015 ffmpeg supports -http_proxy option (see
520 # http://git.videolan.org/?p=ffmpeg.git;a=commit;h=b4eb1f29ebddd60c41a2eb39f5af701e38e0d3fd)
521 # We could switch to the following code if we are able to detect version properly
522 # args += ['-http_proxy', proxy]
523 env
= os
.environ
.copy()
524 env
['HTTP_PROXY'] = proxy
525 env
['http_proxy'] = proxy
527 protocol
= info_dict
.get('protocol')
529 if protocol
== 'rtmp':
530 player_url
= info_dict
.get('player_url')
531 page_url
= info_dict
.get('page_url')
532 app
= info_dict
.get('app')
533 play_path
= info_dict
.get('play_path')
534 tc_url
= info_dict
.get('tc_url')
535 flash_version
= info_dict
.get('flash_version')
536 live
= info_dict
.get('rtmp_live', False)
537 conn
= info_dict
.get('rtmp_conn')
538 if player_url
is not None:
539 args
+= ['-rtmp_swfverify', player_url
]
540 if page_url
is not None:
541 args
+= ['-rtmp_pageurl', page_url
]
543 args
+= ['-rtmp_app', app
]
544 if play_path
is not None:
545 args
+= ['-rtmp_playpath', play_path
]
546 if tc_url
is not None:
547 args
+= ['-rtmp_tcurl', tc_url
]
548 if flash_version
is not None:
549 args
+= ['-rtmp_flashver', flash_version
]
551 args
+= ['-rtmp_live', 'live']
552 if isinstance(conn
, list):
554 args
+= ['-rtmp_conn', entry
]
555 elif isinstance(conn
, str):
556 args
+= ['-rtmp_conn', conn
]
558 start_time
, end_time
= info_dict
.get('section_start') or 0, info_dict
.get('section_end')
560 selected_formats
= info_dict
.get('requested_formats') or [info_dict
]
561 for i
, fmt
in enumerate(selected_formats
):
562 is_http
= re
.match(r
'^https?://', fmt
['url'])
563 cookies
= self
.ydl
.cookiejar
.get_cookies_for_url(fmt
['url']) if is_http
else []
565 args
.extend(['-cookies', ''.join(
566 f
'{cookie.name}={cookie.value}; path={cookie.path}; domain={cookie.domain};\r\n'
567 for cookie
in cookies
)])
568 if fmt
.get('http_headers') and is_http
:
569 # Trailing \r\n after each HTTP header is important to prevent warning from ffmpeg/avconv:
570 # [http @ 00000000003d2fa0] No trailing CRLF found in HTTP header.
571 args
.extend(['-headers', ''.join(f
'{key}: {val}\r\n' for key
, val
in fmt
['http_headers'].items())])
574 args
+= ['-ss', str(start_time
)]
576 args
+= ['-t', str(end_time
- start_time
)]
578 args
+= [*self
._configuration
_args
((f
'_i{i + 1}', '_i')), '-i', fmt
['url']]
580 if not (start_time
or end_time
) or not self
.params
.get('force_keyframes_at_cuts'):
581 args
+= ['-c', 'copy']
583 if info_dict
.get('requested_formats') or protocol
== 'http_dash_segments':
584 for i
, fmt
in enumerate(selected_formats
):
585 stream_number
= fmt
.get('manifest_stream_number', 0)
586 args
.extend(['-map', f
'{i}:{stream_number}'])
588 if self
.params
.get('test', False):
589 args
+= ['-fs', str(self
._TEST
_FILE
_SIZE
)]
591 ext
= info_dict
['ext']
592 if protocol
in ('m3u8', 'm3u8_native'):
593 use_mpegts
= (tmpfilename
== '-') or self
.params
.get('hls_use_mpegts')
594 if use_mpegts
is None:
595 use_mpegts
= info_dict
.get('is_live')
597 args
+= ['-f', 'mpegts']
599 args
+= ['-f', 'mp4']
600 if (ffpp
.basename
== 'ffmpeg' and ffpp
._features
.get('needs_adtstoasc')) and (not info_dict
.get('acodec') or info_dict
['acodec'].split('.')[0] in ('aac', 'mp4a')):
601 args
+= ['-bsf:a', 'aac_adtstoasc']
602 elif protocol
== 'rtmp':
603 args
+= ['-f', 'flv']
604 elif ext
== 'mp4' and tmpfilename
== '-':
605 args
+= ['-f', 'mpegts']
606 elif ext
== 'unknown_video':
607 ext
= determine_ext(remove_end(tmpfilename
, '.part'))
608 if ext
== 'unknown_video':
610 'The video format is unknown and cannot be downloaded by ffmpeg. '
611 'Explicitly set the extension in the filename to attempt download in that format')
613 self
.report_warning(f
'The video format is unknown. Trying to download as {ext} according to the filename')
614 args
+= ['-f', EXT_TO_OUT_FORMATS
.get(ext
, ext
)]
616 args
+= ['-f', EXT_TO_OUT_FORMATS
.get(ext
, ext
)]
618 args
+= traverse_obj(info_dict
, ('downloader_options', 'ffmpeg_args_out', ...))
620 args
+= self
._configuration
_args
(('_o1', '_o', ''))
622 args
= [encodeArgument(opt
) for opt
in args
]
623 args
.append(encodeFilename(ffpp
._ffmpeg
_filename
_argument
(tmpfilename
), True))
624 self
._debug
_cmd
(args
)
626 piped
= any(fmt
['url'] in ('-', 'pipe:') for fmt
in selected_formats
)
627 with
Popen(args
, stdin
=subprocess
.PIPE
, env
=env
) as proc
:
629 self
.on_process_started(proc
, proc
.stdin
)
632 except BaseException
as e
:
633 # subprocces.run would send the SIGKILL signal to ffmpeg and the
634 # mp4 file couldn't be played, but if we ask ffmpeg to quit it
635 # produces a file that is playable (this is mostly useful for live
636 # streams). Note that Windows is not affected and produces playable
637 # files (see https://github.com/ytdl-org/youtube-dl/issues/8300).
638 if isinstance(e
, KeyboardInterrupt) and sys
.platform
!= 'win32' and not piped
:
639 proc
.communicate_or_kill(b
'q')
641 proc
.kill(timeout
=None)
646 class AVconvFD(FFmpegFD
):
651 klass
.get_basename(): klass
652 for name
, klass
in globals().items()
653 if name
.endswith('FD') and name
not in ('ExternalFD', 'FragmentFD')
657 def list_external_downloaders():
658 return sorted(_BY_NAME
.keys())
661 def get_external_downloader(external_downloader
):
662 """ Given the name of the executable, see whether we support the given downloader """
663 bn
= os
.path
.splitext(os
.path
.basename(external_downloader
))[0]
664 return _BY_NAME
.get(bn
) or next((
665 klass
for klass
in _BY_NAME
.values() if klass
.EXE_NAME
in bn