mirror of
https://github.com/yt-dlp/yt-dlp.git
synced 2024-12-25 04:35:52 +01:00
#29 New option -P
/--paths
to give different paths for different types of files
Syntax: `-P "type:path" -P "type:path"` Types: home, temp, description, annotation, subtitle, infojson, thumbnail
This commit is contained in:
parent
b8f6bbe68a
commit
0202b52a0c
22
README.md
22
README.md
@ -150,9 +150,9 @@ ## General Options:
|
|||||||
compatibility) if this option is found
|
compatibility) if this option is found
|
||||||
inside the system configuration file, the
|
inside the system configuration file, the
|
||||||
user configuration is not loaded
|
user configuration is not loaded
|
||||||
--config-location PATH Location of the configuration file; either
|
--config-location PATH Location of the main configuration file;
|
||||||
the path to the config or its containing
|
either the path to the config or its
|
||||||
directory
|
containing directory
|
||||||
--flat-playlist Do not extract the videos of a playlist,
|
--flat-playlist Do not extract the videos of a playlist,
|
||||||
only list them
|
only list them
|
||||||
--flat-videos Do not resolve the video urls
|
--flat-videos Do not resolve the video urls
|
||||||
@ -316,6 +316,17 @@ ## Filesystem Options:
|
|||||||
stdin), one URL per line. Lines starting
|
stdin), one URL per line. Lines starting
|
||||||
with '#', ';' or ']' are considered as
|
with '#', ';' or ']' are considered as
|
||||||
comments and ignored
|
comments and ignored
|
||||||
|
-P, --paths TYPE:PATH The paths where the files should be
|
||||||
|
downloaded. Specify the type of file and
|
||||||
|
the path separated by a colon ":"
|
||||||
|
(supported: description|annotation|subtitle
|
||||||
|
|infojson|thumbnail). Additionally, you can
|
||||||
|
also provide "home" and "temp" paths. All
|
||||||
|
intermediary files are first downloaded to
|
||||||
|
the temp path and then the final files are
|
||||||
|
moved over to the home path after download
|
||||||
|
is finished. Note that this option is
|
||||||
|
ignored if --output is an absolute path
|
||||||
-o, --output TEMPLATE Output filename template, see "OUTPUT
|
-o, --output TEMPLATE Output filename template, see "OUTPUT
|
||||||
TEMPLATE" for details
|
TEMPLATE" for details
|
||||||
--autonumber-start NUMBER Specify the start value for %(autonumber)s
|
--autonumber-start NUMBER Specify the start value for %(autonumber)s
|
||||||
@ -651,8 +662,9 @@ # CONFIGURATION
|
|||||||
|
|
||||||
You can configure youtube-dlc by placing any supported command line option to a configuration file. The configuration is loaded from the following locations:
|
You can configure youtube-dlc by placing any supported command line option to a configuration file. The configuration is loaded from the following locations:
|
||||||
|
|
||||||
1. The file given by `--config-location`
|
1. **Main Configuration**: The file given by `--config-location`
|
||||||
1. **Portable Configuration**: `yt-dlp.conf` or `youtube-dlc.conf` in the same directory as the bundled binary. If you are running from source-code (`<root dir>/youtube_dlc/__main__.py`), the root directory is used instead.
|
1. **Portable Configuration**: `yt-dlp.conf` or `youtube-dlc.conf` in the same directory as the bundled binary. If you are running from source-code (`<root dir>/youtube_dlc/__main__.py`), the root directory is used instead.
|
||||||
|
1. **Home Configuration**: `yt-dlp.conf` or `youtube-dlc.conf` in the home path given by `-P "home:<path>"`, or in the current directory if no such path is given
|
||||||
1. **User Configuration**:
|
1. **User Configuration**:
|
||||||
* `%XDG_CONFIG_HOME%/yt-dlp/config` (recommended on Linux/macOS)
|
* `%XDG_CONFIG_HOME%/yt-dlp/config` (recommended on Linux/macOS)
|
||||||
* `%XDG_CONFIG_HOME%/yt-dlp.conf`
|
* `%XDG_CONFIG_HOME%/yt-dlp.conf`
|
||||||
@ -710,7 +722,7 @@ ### Authentication with `.netrc` file
|
|||||||
|
|
||||||
# OUTPUT TEMPLATE
|
# OUTPUT TEMPLATE
|
||||||
|
|
||||||
The `-o` option allows users to indicate a template for the output file names.
|
The `-o` option is used to indicate a template for the output file names while `-P` option is used to specify the path each type of file should be saved to.
|
||||||
|
|
||||||
**tl;dr:** [navigate me to examples](#output-template-examples).
|
**tl;dr:** [navigate me to examples](#output-template-examples).
|
||||||
|
|
||||||
|
@ -69,6 +69,7 @@
|
|||||||
iri_to_uri,
|
iri_to_uri,
|
||||||
ISO3166Utils,
|
ISO3166Utils,
|
||||||
locked_file,
|
locked_file,
|
||||||
|
make_dir,
|
||||||
make_HTTPS_handler,
|
make_HTTPS_handler,
|
||||||
MaxDownloadsReached,
|
MaxDownloadsReached,
|
||||||
orderedSet,
|
orderedSet,
|
||||||
@ -114,8 +115,9 @@
|
|||||||
FFmpegFixupStretchedPP,
|
FFmpegFixupStretchedPP,
|
||||||
FFmpegMergerPP,
|
FFmpegMergerPP,
|
||||||
FFmpegPostProcessor,
|
FFmpegPostProcessor,
|
||||||
FFmpegSubtitlesConvertorPP,
|
# FFmpegSubtitlesConvertorPP,
|
||||||
get_postprocessor,
|
get_postprocessor,
|
||||||
|
MoveFilesAfterDownloadPP,
|
||||||
)
|
)
|
||||||
from .version import __version__
|
from .version import __version__
|
||||||
|
|
||||||
@ -257,6 +259,8 @@ class YoutubeDL(object):
|
|||||||
postprocessors: A list of dictionaries, each with an entry
|
postprocessors: A list of dictionaries, each with an entry
|
||||||
* key: The name of the postprocessor. See
|
* key: The name of the postprocessor. See
|
||||||
youtube_dlc/postprocessor/__init__.py for a list.
|
youtube_dlc/postprocessor/__init__.py for a list.
|
||||||
|
* _after_move: Optional. If True, run this post_processor
|
||||||
|
after 'MoveFilesAfterDownload'
|
||||||
as well as any further keyword arguments for the
|
as well as any further keyword arguments for the
|
||||||
postprocessor.
|
postprocessor.
|
||||||
post_hooks: A list of functions that get called as the final step
|
post_hooks: A list of functions that get called as the final step
|
||||||
@ -369,6 +373,8 @@ class YoutubeDL(object):
|
|||||||
params = None
|
params = None
|
||||||
_ies = []
|
_ies = []
|
||||||
_pps = []
|
_pps = []
|
||||||
|
_pps_end = []
|
||||||
|
__prepare_filename_warned = False
|
||||||
_download_retcode = None
|
_download_retcode = None
|
||||||
_num_downloads = None
|
_num_downloads = None
|
||||||
_playlist_level = 0
|
_playlist_level = 0
|
||||||
@ -382,6 +388,8 @@ def __init__(self, params=None, auto_init=True):
|
|||||||
self._ies = []
|
self._ies = []
|
||||||
self._ies_instances = {}
|
self._ies_instances = {}
|
||||||
self._pps = []
|
self._pps = []
|
||||||
|
self._pps_end = []
|
||||||
|
self.__prepare_filename_warned = False
|
||||||
self._post_hooks = []
|
self._post_hooks = []
|
||||||
self._progress_hooks = []
|
self._progress_hooks = []
|
||||||
self._download_retcode = 0
|
self._download_retcode = 0
|
||||||
@ -483,8 +491,11 @@ def check_deprecated(param, option, suggestion):
|
|||||||
pp_class = get_postprocessor(pp_def_raw['key'])
|
pp_class = get_postprocessor(pp_def_raw['key'])
|
||||||
pp_def = dict(pp_def_raw)
|
pp_def = dict(pp_def_raw)
|
||||||
del pp_def['key']
|
del pp_def['key']
|
||||||
|
after_move = pp_def.get('_after_move', False)
|
||||||
|
if '_after_move' in pp_def:
|
||||||
|
del pp_def['_after_move']
|
||||||
pp = pp_class(self, **compat_kwargs(pp_def))
|
pp = pp_class(self, **compat_kwargs(pp_def))
|
||||||
self.add_post_processor(pp)
|
self.add_post_processor(pp, after_move=after_move)
|
||||||
|
|
||||||
for ph in self.params.get('post_hooks', []):
|
for ph in self.params.get('post_hooks', []):
|
||||||
self.add_post_hook(ph)
|
self.add_post_hook(ph)
|
||||||
@ -536,8 +547,11 @@ def add_default_info_extractors(self):
|
|||||||
for ie in gen_extractor_classes():
|
for ie in gen_extractor_classes():
|
||||||
self.add_info_extractor(ie)
|
self.add_info_extractor(ie)
|
||||||
|
|
||||||
def add_post_processor(self, pp):
|
def add_post_processor(self, pp, after_move=False):
|
||||||
"""Add a PostProcessor object to the end of the chain."""
|
"""Add a PostProcessor object to the end of the chain."""
|
||||||
|
if after_move:
|
||||||
|
self._pps_end.append(pp)
|
||||||
|
else:
|
||||||
self._pps.append(pp)
|
self._pps.append(pp)
|
||||||
pp.set_downloader(self)
|
pp.set_downloader(self)
|
||||||
|
|
||||||
@ -702,7 +716,7 @@ def report_file_delete(self, file_name):
|
|||||||
except UnicodeEncodeError:
|
except UnicodeEncodeError:
|
||||||
self.to_screen('Deleting already existent file')
|
self.to_screen('Deleting already existent file')
|
||||||
|
|
||||||
def prepare_filename(self, info_dict):
|
def prepare_filename(self, info_dict, warn=False):
|
||||||
"""Generate the output filename."""
|
"""Generate the output filename."""
|
||||||
try:
|
try:
|
||||||
template_dict = dict(info_dict)
|
template_dict = dict(info_dict)
|
||||||
@ -796,11 +810,33 @@ def prepare_filename(self, info_dict):
|
|||||||
# to workaround encoding issues with subprocess on python2 @ Windows
|
# to workaround encoding issues with subprocess on python2 @ Windows
|
||||||
if sys.version_info < (3, 0) and sys.platform == 'win32':
|
if sys.version_info < (3, 0) and sys.platform == 'win32':
|
||||||
filename = encodeFilename(filename, True).decode(preferredencoding())
|
filename = encodeFilename(filename, True).decode(preferredencoding())
|
||||||
return sanitize_path(filename)
|
filename = sanitize_path(filename)
|
||||||
|
|
||||||
|
if warn and not self.__prepare_filename_warned:
|
||||||
|
if not self.params.get('paths'):
|
||||||
|
pass
|
||||||
|
elif filename == '-':
|
||||||
|
self.report_warning('--paths is ignored when an outputting to stdout')
|
||||||
|
elif os.path.isabs(filename):
|
||||||
|
self.report_warning('--paths is ignored since an absolute path is given in output template')
|
||||||
|
self.__prepare_filename_warned = True
|
||||||
|
|
||||||
|
return filename
|
||||||
except ValueError as err:
|
except ValueError as err:
|
||||||
self.report_error('Error in output template: ' + str(err) + ' (encoding: ' + repr(preferredencoding()) + ')')
|
self.report_error('Error in output template: ' + str(err) + ' (encoding: ' + repr(preferredencoding()) + ')')
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
def prepare_filepath(self, filename, dir_type=''):
|
||||||
|
if filename == '-':
|
||||||
|
return filename
|
||||||
|
paths = self.params.get('paths', {})
|
||||||
|
assert isinstance(paths, dict)
|
||||||
|
homepath = expand_path(paths.get('home', '').strip())
|
||||||
|
assert isinstance(homepath, compat_str)
|
||||||
|
subdir = expand_path(paths.get(dir_type, '').strip()) if dir_type else ''
|
||||||
|
assert isinstance(subdir, compat_str)
|
||||||
|
return sanitize_path(os.path.join(homepath, subdir, filename))
|
||||||
|
|
||||||
def _match_entry(self, info_dict, incomplete):
|
def _match_entry(self, info_dict, incomplete):
|
||||||
""" Returns None if the file should be downloaded """
|
""" Returns None if the file should be downloaded """
|
||||||
|
|
||||||
@ -972,7 +1008,8 @@ def process_ie_result(self, ie_result, download=True, extra_info={}):
|
|||||||
if ((extract_flat == 'in_playlist' and 'playlist' in extra_info)
|
if ((extract_flat == 'in_playlist' and 'playlist' in extra_info)
|
||||||
or extract_flat is True):
|
or extract_flat is True):
|
||||||
self.__forced_printings(
|
self.__forced_printings(
|
||||||
ie_result, self.prepare_filename(ie_result),
|
ie_result,
|
||||||
|
self.prepare_filepath(self.prepare_filename(ie_result)),
|
||||||
incomplete=True)
|
incomplete=True)
|
||||||
return ie_result
|
return ie_result
|
||||||
|
|
||||||
@ -1890,6 +1927,8 @@ def process_info(self, info_dict):
|
|||||||
|
|
||||||
assert info_dict.get('_type', 'video') == 'video'
|
assert info_dict.get('_type', 'video') == 'video'
|
||||||
|
|
||||||
|
info_dict.setdefault('__postprocessors', [])
|
||||||
|
|
||||||
max_downloads = self.params.get('max_downloads')
|
max_downloads = self.params.get('max_downloads')
|
||||||
if max_downloads is not None:
|
if max_downloads is not None:
|
||||||
if self._num_downloads >= int(max_downloads):
|
if self._num_downloads >= int(max_downloads):
|
||||||
@ -1906,10 +1945,13 @@ def process_info(self, info_dict):
|
|||||||
|
|
||||||
self._num_downloads += 1
|
self._num_downloads += 1
|
||||||
|
|
||||||
info_dict['_filename'] = filename = self.prepare_filename(info_dict)
|
filename = self.prepare_filename(info_dict, warn=True)
|
||||||
|
info_dict['_filename'] = full_filename = self.prepare_filepath(filename)
|
||||||
|
temp_filename = self.prepare_filepath(filename, 'temp')
|
||||||
|
files_to_move = {}
|
||||||
|
|
||||||
# Forced printings
|
# Forced printings
|
||||||
self.__forced_printings(info_dict, filename, incomplete=False)
|
self.__forced_printings(info_dict, full_filename, incomplete=False)
|
||||||
|
|
||||||
if self.params.get('simulate', False):
|
if self.params.get('simulate', False):
|
||||||
if self.params.get('force_write_download_archive', False):
|
if self.params.get('force_write_download_archive', False):
|
||||||
@ -1922,20 +1964,19 @@ def process_info(self, info_dict):
|
|||||||
return
|
return
|
||||||
|
|
||||||
def ensure_dir_exists(path):
|
def ensure_dir_exists(path):
|
||||||
try:
|
return make_dir(path, self.report_error)
|
||||||
dn = os.path.dirname(path)
|
|
||||||
if dn and not os.path.exists(dn):
|
|
||||||
os.makedirs(dn)
|
|
||||||
return True
|
|
||||||
except (OSError, IOError) as err:
|
|
||||||
self.report_error('unable to create directory ' + error_to_compat_str(err))
|
|
||||||
return False
|
|
||||||
|
|
||||||
if not ensure_dir_exists(sanitize_path(encodeFilename(filename))):
|
if not ensure_dir_exists(encodeFilename(full_filename)):
|
||||||
|
return
|
||||||
|
if not ensure_dir_exists(encodeFilename(temp_filename)):
|
||||||
return
|
return
|
||||||
|
|
||||||
if self.params.get('writedescription', False):
|
if self.params.get('writedescription', False):
|
||||||
descfn = replace_extension(filename, 'description', info_dict.get('ext'))
|
descfn = replace_extension(
|
||||||
|
self.prepare_filepath(filename, 'description'),
|
||||||
|
'description', info_dict.get('ext'))
|
||||||
|
if not ensure_dir_exists(encodeFilename(descfn)):
|
||||||
|
return
|
||||||
if not self.params.get('overwrites', True) and os.path.exists(encodeFilename(descfn)):
|
if not self.params.get('overwrites', True) and os.path.exists(encodeFilename(descfn)):
|
||||||
self.to_screen('[info] Video description is already present')
|
self.to_screen('[info] Video description is already present')
|
||||||
elif info_dict.get('description') is None:
|
elif info_dict.get('description') is None:
|
||||||
@ -1950,7 +1991,11 @@ def ensure_dir_exists(path):
|
|||||||
return
|
return
|
||||||
|
|
||||||
if self.params.get('writeannotations', False):
|
if self.params.get('writeannotations', False):
|
||||||
annofn = replace_extension(filename, 'annotations.xml', info_dict.get('ext'))
|
annofn = replace_extension(
|
||||||
|
self.prepare_filepath(filename, 'annotation'),
|
||||||
|
'annotations.xml', info_dict.get('ext'))
|
||||||
|
if not ensure_dir_exists(encodeFilename(annofn)):
|
||||||
|
return
|
||||||
if not self.params.get('overwrites', True) and os.path.exists(encodeFilename(annofn)):
|
if not self.params.get('overwrites', True) and os.path.exists(encodeFilename(annofn)):
|
||||||
self.to_screen('[info] Video annotations are already present')
|
self.to_screen('[info] Video annotations are already present')
|
||||||
elif not info_dict.get('annotations'):
|
elif not info_dict.get('annotations'):
|
||||||
@ -1984,9 +2029,13 @@ def dl(name, info, subtitle=False):
|
|||||||
# ie = self.get_info_extractor(info_dict['extractor_key'])
|
# ie = self.get_info_extractor(info_dict['extractor_key'])
|
||||||
for sub_lang, sub_info in subtitles.items():
|
for sub_lang, sub_info in subtitles.items():
|
||||||
sub_format = sub_info['ext']
|
sub_format = sub_info['ext']
|
||||||
sub_filename = subtitles_filename(filename, sub_lang, sub_format, info_dict.get('ext'))
|
sub_filename = subtitles_filename(temp_filename, sub_lang, sub_format, info_dict.get('ext'))
|
||||||
|
sub_filename_final = subtitles_filename(
|
||||||
|
self.prepare_filepath(filename, 'subtitle'),
|
||||||
|
sub_lang, sub_format, info_dict.get('ext'))
|
||||||
if not self.params.get('overwrites', True) and os.path.exists(encodeFilename(sub_filename)):
|
if not self.params.get('overwrites', True) and os.path.exists(encodeFilename(sub_filename)):
|
||||||
self.to_screen('[info] Video subtitle %s.%s is already present' % (sub_lang, sub_format))
|
self.to_screen('[info] Video subtitle %s.%s is already present' % (sub_lang, sub_format))
|
||||||
|
files_to_move[sub_filename] = sub_filename_final
|
||||||
else:
|
else:
|
||||||
self.to_screen('[info] Writing video subtitles to: ' + sub_filename)
|
self.to_screen('[info] Writing video subtitles to: ' + sub_filename)
|
||||||
if sub_info.get('data') is not None:
|
if sub_info.get('data') is not None:
|
||||||
@ -1995,6 +2044,7 @@ def dl(name, info, subtitle=False):
|
|||||||
# See https://github.com/ytdl-org/youtube-dl/issues/10268
|
# See https://github.com/ytdl-org/youtube-dl/issues/10268
|
||||||
with io.open(encodeFilename(sub_filename), 'w', encoding='utf-8', newline='') as subfile:
|
with io.open(encodeFilename(sub_filename), 'w', encoding='utf-8', newline='') as subfile:
|
||||||
subfile.write(sub_info['data'])
|
subfile.write(sub_info['data'])
|
||||||
|
files_to_move[sub_filename] = sub_filename_final
|
||||||
except (OSError, IOError):
|
except (OSError, IOError):
|
||||||
self.report_error('Cannot write subtitles file ' + sub_filename)
|
self.report_error('Cannot write subtitles file ' + sub_filename)
|
||||||
return
|
return
|
||||||
@ -2010,6 +2060,7 @@ def dl(name, info, subtitle=False):
|
|||||||
with io.open(encodeFilename(sub_filename), 'wb') as subfile:
|
with io.open(encodeFilename(sub_filename), 'wb') as subfile:
|
||||||
subfile.write(sub_data)
|
subfile.write(sub_data)
|
||||||
'''
|
'''
|
||||||
|
files_to_move[sub_filename] = sub_filename_final
|
||||||
except (ExtractorError, IOError, OSError, ValueError, compat_urllib_error.URLError, compat_http_client.HTTPException, socket.error) as err:
|
except (ExtractorError, IOError, OSError, ValueError, compat_urllib_error.URLError, compat_http_client.HTTPException, socket.error) as err:
|
||||||
self.report_warning('Unable to download subtitle for "%s": %s' %
|
self.report_warning('Unable to download subtitle for "%s": %s' %
|
||||||
(sub_lang, error_to_compat_str(err)))
|
(sub_lang, error_to_compat_str(err)))
|
||||||
@ -2017,15 +2068,14 @@ def dl(name, info, subtitle=False):
|
|||||||
|
|
||||||
if self.params.get('skip_download', False):
|
if self.params.get('skip_download', False):
|
||||||
if self.params.get('convertsubtitles', False):
|
if self.params.get('convertsubtitles', False):
|
||||||
subconv = FFmpegSubtitlesConvertorPP(self, format=self.params.get('convertsubtitles'))
|
# subconv = FFmpegSubtitlesConvertorPP(self, format=self.params.get('convertsubtitles'))
|
||||||
filename_real_ext = os.path.splitext(filename)[1][1:]
|
filename_real_ext = os.path.splitext(filename)[1][1:]
|
||||||
filename_wo_ext = (
|
filename_wo_ext = (
|
||||||
os.path.splitext(filename)[0]
|
os.path.splitext(full_filename)[0]
|
||||||
if filename_real_ext == info_dict['ext']
|
if filename_real_ext == info_dict['ext']
|
||||||
else filename)
|
else full_filename)
|
||||||
afilename = '%s.%s' % (filename_wo_ext, self.params.get('convertsubtitles'))
|
afilename = '%s.%s' % (filename_wo_ext, self.params.get('convertsubtitles'))
|
||||||
if subconv.available:
|
# if subconv.available:
|
||||||
info_dict.setdefault('__postprocessors', [])
|
|
||||||
# info_dict['__postprocessors'].append(subconv)
|
# info_dict['__postprocessors'].append(subconv)
|
||||||
if os.path.exists(encodeFilename(afilename)):
|
if os.path.exists(encodeFilename(afilename)):
|
||||||
self.to_screen(
|
self.to_screen(
|
||||||
@ -2033,13 +2083,17 @@ def dl(name, info, subtitle=False):
|
|||||||
'converted' % afilename)
|
'converted' % afilename)
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
self.post_process(filename, info_dict)
|
self.post_process(full_filename, info_dict, files_to_move)
|
||||||
except (PostProcessingError) as err:
|
except (PostProcessingError) as err:
|
||||||
self.report_error('postprocessing: %s' % str(err))
|
self.report_error('postprocessing: %s' % str(err))
|
||||||
return
|
return
|
||||||
|
|
||||||
if self.params.get('writeinfojson', False):
|
if self.params.get('writeinfojson', False):
|
||||||
infofn = replace_extension(filename, 'info.json', info_dict.get('ext'))
|
infofn = replace_extension(
|
||||||
|
self.prepare_filepath(filename, 'infojson'),
|
||||||
|
'info.json', info_dict.get('ext'))
|
||||||
|
if not ensure_dir_exists(encodeFilename(infofn)):
|
||||||
|
return
|
||||||
if not self.params.get('overwrites', True) and os.path.exists(encodeFilename(infofn)):
|
if not self.params.get('overwrites', True) and os.path.exists(encodeFilename(infofn)):
|
||||||
self.to_screen('[info] Video description metadata is already present')
|
self.to_screen('[info] Video description metadata is already present')
|
||||||
else:
|
else:
|
||||||
@ -2050,7 +2104,9 @@ def dl(name, info, subtitle=False):
|
|||||||
self.report_error('Cannot write metadata to JSON file ' + infofn)
|
self.report_error('Cannot write metadata to JSON file ' + infofn)
|
||||||
return
|
return
|
||||||
|
|
||||||
self._write_thumbnails(info_dict, filename)
|
thumbdir = os.path.dirname(self.prepare_filepath(filename, 'thumbnail'))
|
||||||
|
for thumbfn in self._write_thumbnails(info_dict, temp_filename):
|
||||||
|
files_to_move[thumbfn] = os.path.join(thumbdir, os.path.basename(thumbfn))
|
||||||
|
|
||||||
# Write internet shortcut files
|
# Write internet shortcut files
|
||||||
url_link = webloc_link = desktop_link = False
|
url_link = webloc_link = desktop_link = False
|
||||||
@ -2075,7 +2131,7 @@ def dl(name, info, subtitle=False):
|
|||||||
ascii_url = iri_to_uri(info_dict['webpage_url'])
|
ascii_url = iri_to_uri(info_dict['webpage_url'])
|
||||||
|
|
||||||
def _write_link_file(extension, template, newline, embed_filename):
|
def _write_link_file(extension, template, newline, embed_filename):
|
||||||
linkfn = replace_extension(filename, extension, info_dict.get('ext'))
|
linkfn = replace_extension(full_filename, extension, info_dict.get('ext'))
|
||||||
if self.params.get('nooverwrites', False) and os.path.exists(encodeFilename(linkfn)):
|
if self.params.get('nooverwrites', False) and os.path.exists(encodeFilename(linkfn)):
|
||||||
self.to_screen('[info] Internet shortcut is already present')
|
self.to_screen('[info] Internet shortcut is already present')
|
||||||
else:
|
else:
|
||||||
@ -2105,9 +2161,27 @@ def _write_link_file(extension, template, newline, embed_filename):
|
|||||||
must_record_download_archive = False
|
must_record_download_archive = False
|
||||||
if not self.params.get('skip_download', False):
|
if not self.params.get('skip_download', False):
|
||||||
try:
|
try:
|
||||||
|
|
||||||
|
def existing_file(filename, temp_filename):
|
||||||
|
file_exists = os.path.exists(encodeFilename(filename))
|
||||||
|
tempfile_exists = (
|
||||||
|
False if temp_filename == filename
|
||||||
|
else os.path.exists(encodeFilename(temp_filename)))
|
||||||
|
if not self.params.get('overwrites', False) and (file_exists or tempfile_exists):
|
||||||
|
existing_filename = temp_filename if tempfile_exists else filename
|
||||||
|
self.to_screen('[download] %s has already been downloaded and merged' % existing_filename)
|
||||||
|
return existing_filename
|
||||||
|
if tempfile_exists:
|
||||||
|
self.report_file_delete(temp_filename)
|
||||||
|
os.remove(encodeFilename(temp_filename))
|
||||||
|
if file_exists:
|
||||||
|
self.report_file_delete(filename)
|
||||||
|
os.remove(encodeFilename(filename))
|
||||||
|
return None
|
||||||
|
|
||||||
|
success = True
|
||||||
if info_dict.get('requested_formats') is not None:
|
if info_dict.get('requested_formats') is not None:
|
||||||
downloaded = []
|
downloaded = []
|
||||||
success = True
|
|
||||||
merger = FFmpegMergerPP(self)
|
merger = FFmpegMergerPP(self)
|
||||||
if not merger.available:
|
if not merger.available:
|
||||||
postprocessors = []
|
postprocessors = []
|
||||||
@ -2136,32 +2210,31 @@ def compatible_formats(formats):
|
|||||||
# TODO: Check acodec/vcodec
|
# TODO: Check acodec/vcodec
|
||||||
return False
|
return False
|
||||||
|
|
||||||
filename_real_ext = os.path.splitext(filename)[1][1:]
|
|
||||||
filename_wo_ext = (
|
|
||||||
os.path.splitext(filename)[0]
|
|
||||||
if filename_real_ext == info_dict['ext']
|
|
||||||
else filename)
|
|
||||||
requested_formats = info_dict['requested_formats']
|
requested_formats = info_dict['requested_formats']
|
||||||
|
old_ext = info_dict['ext']
|
||||||
if self.params.get('merge_output_format') is None and not compatible_formats(requested_formats):
|
if self.params.get('merge_output_format') is None and not compatible_formats(requested_formats):
|
||||||
info_dict['ext'] = 'mkv'
|
info_dict['ext'] = 'mkv'
|
||||||
self.report_warning(
|
self.report_warning(
|
||||||
'Requested formats are incompatible for merge and will be merged into mkv.')
|
'Requested formats are incompatible for merge and will be merged into mkv.')
|
||||||
|
|
||||||
|
def correct_ext(filename):
|
||||||
|
filename_real_ext = os.path.splitext(filename)[1][1:]
|
||||||
|
filename_wo_ext = (
|
||||||
|
os.path.splitext(filename)[0]
|
||||||
|
if filename_real_ext == old_ext
|
||||||
|
else filename)
|
||||||
|
return '%s.%s' % (filename_wo_ext, info_dict['ext'])
|
||||||
|
|
||||||
# Ensure filename always has a correct extension for successful merge
|
# Ensure filename always has a correct extension for successful merge
|
||||||
filename = '%s.%s' % (filename_wo_ext, info_dict['ext'])
|
full_filename = correct_ext(full_filename)
|
||||||
file_exists = os.path.exists(encodeFilename(filename))
|
temp_filename = correct_ext(temp_filename)
|
||||||
if not self.params.get('overwrites', False) and file_exists:
|
dl_filename = existing_file(full_filename, temp_filename)
|
||||||
self.to_screen(
|
if dl_filename is None:
|
||||||
'[download] %s has already been downloaded and '
|
|
||||||
'merged' % filename)
|
|
||||||
else:
|
|
||||||
if file_exists:
|
|
||||||
self.report_file_delete(filename)
|
|
||||||
os.remove(encodeFilename(filename))
|
|
||||||
for f in requested_formats:
|
for f in requested_formats:
|
||||||
new_info = dict(info_dict)
|
new_info = dict(info_dict)
|
||||||
new_info.update(f)
|
new_info.update(f)
|
||||||
fname = prepend_extension(
|
fname = prepend_extension(
|
||||||
self.prepare_filename(new_info),
|
self.prepare_filepath(self.prepare_filename(new_info), 'temp'),
|
||||||
'f%s' % f['format_id'], new_info['ext'])
|
'f%s' % f['format_id'], new_info['ext'])
|
||||||
if not ensure_dir_exists(fname):
|
if not ensure_dir_exists(fname):
|
||||||
return
|
return
|
||||||
@ -2173,14 +2246,17 @@ def compatible_formats(formats):
|
|||||||
# Even if there were no downloads, it is being merged only now
|
# Even if there were no downloads, it is being merged only now
|
||||||
info_dict['__real_download'] = True
|
info_dict['__real_download'] = True
|
||||||
else:
|
else:
|
||||||
# Delete existing file with --yes-overwrites
|
|
||||||
if self.params.get('overwrites', False):
|
|
||||||
if os.path.exists(encodeFilename(filename)):
|
|
||||||
self.report_file_delete(filename)
|
|
||||||
os.remove(encodeFilename(filename))
|
|
||||||
# Just a single file
|
# Just a single file
|
||||||
success, real_download = dl(filename, info_dict)
|
dl_filename = existing_file(full_filename, temp_filename)
|
||||||
|
if dl_filename is None:
|
||||||
|
success, real_download = dl(temp_filename, info_dict)
|
||||||
info_dict['__real_download'] = real_download
|
info_dict['__real_download'] = real_download
|
||||||
|
|
||||||
|
# info_dict['__temp_filename'] = temp_filename
|
||||||
|
dl_filename = dl_filename or temp_filename
|
||||||
|
info_dict['__dl_filename'] = dl_filename
|
||||||
|
info_dict['__final_filename'] = full_filename
|
||||||
|
|
||||||
except (compat_urllib_error.URLError, compat_http_client.HTTPException, socket.error) as err:
|
except (compat_urllib_error.URLError, compat_http_client.HTTPException, socket.error) as err:
|
||||||
self.report_error('unable to download video data: %s' % error_to_compat_str(err))
|
self.report_error('unable to download video data: %s' % error_to_compat_str(err))
|
||||||
return
|
return
|
||||||
@ -2206,7 +2282,6 @@ def compatible_formats(formats):
|
|||||||
elif fixup_policy == 'detect_or_warn':
|
elif fixup_policy == 'detect_or_warn':
|
||||||
stretched_pp = FFmpegFixupStretchedPP(self)
|
stretched_pp = FFmpegFixupStretchedPP(self)
|
||||||
if stretched_pp.available:
|
if stretched_pp.available:
|
||||||
info_dict.setdefault('__postprocessors', [])
|
|
||||||
info_dict['__postprocessors'].append(stretched_pp)
|
info_dict['__postprocessors'].append(stretched_pp)
|
||||||
else:
|
else:
|
||||||
self.report_warning(
|
self.report_warning(
|
||||||
@ -2225,7 +2300,6 @@ def compatible_formats(formats):
|
|||||||
elif fixup_policy == 'detect_or_warn':
|
elif fixup_policy == 'detect_or_warn':
|
||||||
fixup_pp = FFmpegFixupM4aPP(self)
|
fixup_pp = FFmpegFixupM4aPP(self)
|
||||||
if fixup_pp.available:
|
if fixup_pp.available:
|
||||||
info_dict.setdefault('__postprocessors', [])
|
|
||||||
info_dict['__postprocessors'].append(fixup_pp)
|
info_dict['__postprocessors'].append(fixup_pp)
|
||||||
else:
|
else:
|
||||||
self.report_warning(
|
self.report_warning(
|
||||||
@ -2244,7 +2318,6 @@ def compatible_formats(formats):
|
|||||||
elif fixup_policy == 'detect_or_warn':
|
elif fixup_policy == 'detect_or_warn':
|
||||||
fixup_pp = FFmpegFixupM3u8PP(self)
|
fixup_pp = FFmpegFixupM3u8PP(self)
|
||||||
if fixup_pp.available:
|
if fixup_pp.available:
|
||||||
info_dict.setdefault('__postprocessors', [])
|
|
||||||
info_dict['__postprocessors'].append(fixup_pp)
|
info_dict['__postprocessors'].append(fixup_pp)
|
||||||
else:
|
else:
|
||||||
self.report_warning(
|
self.report_warning(
|
||||||
@ -2254,13 +2327,13 @@ def compatible_formats(formats):
|
|||||||
assert fixup_policy in ('ignore', 'never')
|
assert fixup_policy in ('ignore', 'never')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.post_process(filename, info_dict)
|
self.post_process(dl_filename, info_dict, files_to_move)
|
||||||
except (PostProcessingError) as err:
|
except (PostProcessingError) as err:
|
||||||
self.report_error('postprocessing: %s' % str(err))
|
self.report_error('postprocessing: %s' % str(err))
|
||||||
return
|
return
|
||||||
try:
|
try:
|
||||||
for ph in self._post_hooks:
|
for ph in self._post_hooks:
|
||||||
ph(filename)
|
ph(full_filename)
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
self.report_error('post hooks: %s' % str(err))
|
self.report_error('post hooks: %s' % str(err))
|
||||||
return
|
return
|
||||||
@ -2326,27 +2399,41 @@ def filter_requested_info(info_dict):
|
|||||||
(k, v) for k, v in info_dict.items()
|
(k, v) for k, v in info_dict.items()
|
||||||
if k not in ['requested_formats', 'requested_subtitles'])
|
if k not in ['requested_formats', 'requested_subtitles'])
|
||||||
|
|
||||||
def post_process(self, filename, ie_info):
|
def post_process(self, filename, ie_info, files_to_move={}):
|
||||||
"""Run all the postprocessors on the given file."""
|
"""Run all the postprocessors on the given file."""
|
||||||
info = dict(ie_info)
|
info = dict(ie_info)
|
||||||
info['filepath'] = filename
|
info['filepath'] = filename
|
||||||
pps_chain = []
|
|
||||||
if ie_info.get('__postprocessors') is not None:
|
def run_pp(pp):
|
||||||
pps_chain.extend(ie_info['__postprocessors'])
|
|
||||||
pps_chain.extend(self._pps)
|
|
||||||
for pp in pps_chain:
|
|
||||||
files_to_delete = []
|
files_to_delete = []
|
||||||
|
infodict = info
|
||||||
try:
|
try:
|
||||||
files_to_delete, info = pp.run(info)
|
files_to_delete, infodict = pp.run(infodict)
|
||||||
except PostProcessingError as e:
|
except PostProcessingError as e:
|
||||||
self.report_error(e.msg)
|
self.report_error(e.msg)
|
||||||
if files_to_delete and not self.params.get('keepvideo', False):
|
if not files_to_delete:
|
||||||
|
return infodict
|
||||||
|
|
||||||
|
if self.params.get('keepvideo', False):
|
||||||
|
for f in files_to_delete:
|
||||||
|
files_to_move.setdefault(f, '')
|
||||||
|
else:
|
||||||
for old_filename in set(files_to_delete):
|
for old_filename in set(files_to_delete):
|
||||||
self.to_screen('Deleting original file %s (pass -k to keep)' % old_filename)
|
self.to_screen('Deleting original file %s (pass -k to keep)' % old_filename)
|
||||||
try:
|
try:
|
||||||
os.remove(encodeFilename(old_filename))
|
os.remove(encodeFilename(old_filename))
|
||||||
except (IOError, OSError):
|
except (IOError, OSError):
|
||||||
self.report_warning('Unable to remove downloaded original file')
|
self.report_warning('Unable to remove downloaded original file')
|
||||||
|
if old_filename in files_to_move:
|
||||||
|
del files_to_move[old_filename]
|
||||||
|
return infodict
|
||||||
|
|
||||||
|
for pp in ie_info.get('__postprocessors', []) + self._pps:
|
||||||
|
info = run_pp(pp)
|
||||||
|
info = run_pp(MoveFilesAfterDownloadPP(self, files_to_move))
|
||||||
|
files_to_move = {}
|
||||||
|
for pp in self._pps_end:
|
||||||
|
info = run_pp(pp)
|
||||||
|
|
||||||
def _make_archive_id(self, info_dict):
|
def _make_archive_id(self, info_dict):
|
||||||
video_id = info_dict.get('id')
|
video_id = info_dict.get('id')
|
||||||
@ -2700,14 +2787,11 @@ def _write_thumbnails(self, info_dict, filename):
|
|||||||
if thumbnails:
|
if thumbnails:
|
||||||
thumbnails = [thumbnails[-1]]
|
thumbnails = [thumbnails[-1]]
|
||||||
elif self.params.get('write_all_thumbnails', False):
|
elif self.params.get('write_all_thumbnails', False):
|
||||||
thumbnails = info_dict.get('thumbnails')
|
thumbnails = info_dict.get('thumbnails') or []
|
||||||
else:
|
else:
|
||||||
return
|
thumbnails = []
|
||||||
|
|
||||||
if not thumbnails:
|
|
||||||
# No thumbnails present, so return immediately
|
|
||||||
return
|
|
||||||
|
|
||||||
|
ret = []
|
||||||
for t in thumbnails:
|
for t in thumbnails:
|
||||||
thumb_ext = determine_ext(t['url'], 'jpg')
|
thumb_ext = determine_ext(t['url'], 'jpg')
|
||||||
suffix = '_%s' % t['id'] if len(thumbnails) > 1 else ''
|
suffix = '_%s' % t['id'] if len(thumbnails) > 1 else ''
|
||||||
@ -2715,6 +2799,7 @@ def _write_thumbnails(self, info_dict, filename):
|
|||||||
t['filename'] = thumb_filename = replace_extension(filename + suffix, thumb_ext, info_dict.get('ext'))
|
t['filename'] = thumb_filename = replace_extension(filename + suffix, thumb_ext, info_dict.get('ext'))
|
||||||
|
|
||||||
if not self.params.get('overwrites', True) and os.path.exists(encodeFilename(thumb_filename)):
|
if not self.params.get('overwrites', True) and os.path.exists(encodeFilename(thumb_filename)):
|
||||||
|
ret.append(thumb_filename)
|
||||||
self.to_screen('[%s] %s: Thumbnail %sis already present' %
|
self.to_screen('[%s] %s: Thumbnail %sis already present' %
|
||||||
(info_dict['extractor'], info_dict['id'], thumb_display_id))
|
(info_dict['extractor'], info_dict['id'], thumb_display_id))
|
||||||
else:
|
else:
|
||||||
@ -2724,8 +2809,10 @@ def _write_thumbnails(self, info_dict, filename):
|
|||||||
uf = self.urlopen(t['url'])
|
uf = self.urlopen(t['url'])
|
||||||
with open(encodeFilename(thumb_filename), 'wb') as thumbf:
|
with open(encodeFilename(thumb_filename), 'wb') as thumbf:
|
||||||
shutil.copyfileobj(uf, thumbf)
|
shutil.copyfileobj(uf, thumbf)
|
||||||
|
ret.append(thumb_filename)
|
||||||
self.to_screen('[%s] %s: Writing thumbnail %sto: %s' %
|
self.to_screen('[%s] %s: Writing thumbnail %sto: %s' %
|
||||||
(info_dict['extractor'], info_dict['id'], thumb_display_id, thumb_filename))
|
(info_dict['extractor'], info_dict['id'], thumb_display_id, thumb_filename))
|
||||||
except (compat_urllib_error.URLError, compat_http_client.HTTPException, socket.error) as err:
|
except (compat_urllib_error.URLError, compat_http_client.HTTPException, socket.error) as err:
|
||||||
self.report_warning('Unable to download thumbnail "%s": %s' %
|
self.report_warning('Unable to download thumbnail "%s": %s' %
|
||||||
(t['url'], error_to_compat_str(err)))
|
(t['url'], error_to_compat_str(err)))
|
||||||
|
return ret
|
||||||
|
@ -244,6 +244,7 @@ def parse_retries(retries):
|
|||||||
parser.error('Cannot download a video and extract audio into the same'
|
parser.error('Cannot download a video and extract audio into the same'
|
||||||
' file! Use "{0}.%(ext)s" instead of "{0}" as the output'
|
' file! Use "{0}.%(ext)s" instead of "{0}" as the output'
|
||||||
' template'.format(outtmpl))
|
' template'.format(outtmpl))
|
||||||
|
|
||||||
for f in opts.format_sort:
|
for f in opts.format_sort:
|
||||||
if re.match(InfoExtractor.FormatSort.regex, f) is None:
|
if re.match(InfoExtractor.FormatSort.regex, f) is None:
|
||||||
parser.error('invalid format sort string "%s" specified' % f)
|
parser.error('invalid format sort string "%s" specified' % f)
|
||||||
@ -318,12 +319,12 @@ def parse_retries(retries):
|
|||||||
'force': opts.sponskrub_force,
|
'force': opts.sponskrub_force,
|
||||||
'ignoreerror': opts.sponskrub is None,
|
'ignoreerror': opts.sponskrub is None,
|
||||||
})
|
})
|
||||||
# Please keep ExecAfterDownload towards the bottom as it allows the user to modify the final file in any way.
|
# ExecAfterDownload must be the last PP
|
||||||
# So if the user is able to remove the file before your postprocessor runs it might cause a few problems.
|
|
||||||
if opts.exec_cmd:
|
if opts.exec_cmd:
|
||||||
postprocessors.append({
|
postprocessors.append({
|
||||||
'key': 'ExecAfterDownload',
|
'key': 'ExecAfterDownload',
|
||||||
'exec_cmd': opts.exec_cmd,
|
'exec_cmd': opts.exec_cmd,
|
||||||
|
'_after_move': True
|
||||||
})
|
})
|
||||||
|
|
||||||
_args_compat_warning = 'WARNING: %s given without specifying name. The arguments will be given to all %s\n'
|
_args_compat_warning = 'WARNING: %s given without specifying name. The arguments will be given to all %s\n'
|
||||||
@ -372,6 +373,7 @@ def parse_retries(retries):
|
|||||||
'listformats': opts.listformats,
|
'listformats': opts.listformats,
|
||||||
'listformats_table': opts.listformats_table,
|
'listformats_table': opts.listformats_table,
|
||||||
'outtmpl': outtmpl,
|
'outtmpl': outtmpl,
|
||||||
|
'paths': opts.paths,
|
||||||
'autonumber_size': opts.autonumber_size,
|
'autonumber_size': opts.autonumber_size,
|
||||||
'autonumber_start': opts.autonumber_start,
|
'autonumber_start': opts.autonumber_start,
|
||||||
'restrictfilenames': opts.restrictfilenames,
|
'restrictfilenames': opts.restrictfilenames,
|
||||||
|
@ -14,6 +14,7 @@
|
|||||||
compat_shlex_split,
|
compat_shlex_split,
|
||||||
)
|
)
|
||||||
from .utils import (
|
from .utils import (
|
||||||
|
expand_path,
|
||||||
preferredencoding,
|
preferredencoding,
|
||||||
write_string,
|
write_string,
|
||||||
)
|
)
|
||||||
@ -62,7 +63,7 @@ def _readUserConf(package_name, default=[]):
|
|||||||
userConfFile = os.path.join(xdg_config_home, '%s.conf' % package_name)
|
userConfFile = os.path.join(xdg_config_home, '%s.conf' % package_name)
|
||||||
userConf = _readOptions(userConfFile, default=None)
|
userConf = _readOptions(userConfFile, default=None)
|
||||||
if userConf is not None:
|
if userConf is not None:
|
||||||
return userConf
|
return userConf, userConfFile
|
||||||
|
|
||||||
# appdata
|
# appdata
|
||||||
appdata_dir = compat_getenv('appdata')
|
appdata_dir = compat_getenv('appdata')
|
||||||
@ -70,19 +71,21 @@ def _readUserConf(package_name, default=[]):
|
|||||||
userConfFile = os.path.join(appdata_dir, package_name, 'config')
|
userConfFile = os.path.join(appdata_dir, package_name, 'config')
|
||||||
userConf = _readOptions(userConfFile, default=None)
|
userConf = _readOptions(userConfFile, default=None)
|
||||||
if userConf is None:
|
if userConf is None:
|
||||||
userConf = _readOptions('%s.txt' % userConfFile, default=None)
|
userConfFile += '.txt'
|
||||||
|
userConf = _readOptions(userConfFile, default=None)
|
||||||
if userConf is not None:
|
if userConf is not None:
|
||||||
return userConf
|
return userConf, userConfFile
|
||||||
|
|
||||||
# home
|
# home
|
||||||
userConfFile = os.path.join(compat_expanduser('~'), '%s.conf' % package_name)
|
userConfFile = os.path.join(compat_expanduser('~'), '%s.conf' % package_name)
|
||||||
userConf = _readOptions(userConfFile, default=None)
|
userConf = _readOptions(userConfFile, default=None)
|
||||||
if userConf is None:
|
if userConf is None:
|
||||||
userConf = _readOptions('%s.txt' % userConfFile, default=None)
|
userConfFile += '.txt'
|
||||||
|
userConf = _readOptions(userConfFile, default=None)
|
||||||
if userConf is not None:
|
if userConf is not None:
|
||||||
return userConf
|
return userConf, userConfFile
|
||||||
|
|
||||||
return default
|
return default, None
|
||||||
|
|
||||||
def _format_option_string(option):
|
def _format_option_string(option):
|
||||||
''' ('-o', '--option') -> -o, --format METAVAR'''
|
''' ('-o', '--option') -> -o, --format METAVAR'''
|
||||||
@ -187,7 +190,7 @@ def _dict_from_multiple_values_options_callback(
|
|||||||
general.add_option(
|
general.add_option(
|
||||||
'--config-location',
|
'--config-location',
|
||||||
dest='config_location', metavar='PATH',
|
dest='config_location', metavar='PATH',
|
||||||
help='Location of the configuration file; either the path to the config or its containing directory')
|
help='Location of the main configuration file; either the path to the config or its containing directory')
|
||||||
general.add_option(
|
general.add_option(
|
||||||
'--flat-playlist',
|
'--flat-playlist',
|
||||||
action='store_const', dest='extract_flat', const='in_playlist', default=False,
|
action='store_const', dest='extract_flat', const='in_playlist', default=False,
|
||||||
@ -819,6 +822,21 @@ def _dict_from_multiple_values_options_callback(
|
|||||||
filesystem.add_option(
|
filesystem.add_option(
|
||||||
'--id', default=False,
|
'--id', default=False,
|
||||||
action='store_true', dest='useid', help=optparse.SUPPRESS_HELP)
|
action='store_true', dest='useid', help=optparse.SUPPRESS_HELP)
|
||||||
|
filesystem.add_option(
|
||||||
|
'-P', '--paths',
|
||||||
|
metavar='TYPE:PATH', dest='paths', default={}, type='str',
|
||||||
|
action='callback', callback=_dict_from_multiple_values_options_callback,
|
||||||
|
callback_kwargs={
|
||||||
|
'allowed_keys': 'home|temp|config|description|annotation|subtitle|infojson|thumbnail',
|
||||||
|
'process': lambda x: x.strip()},
|
||||||
|
help=(
|
||||||
|
'The paths where the files should be downloaded. '
|
||||||
|
'Specify the type of file and the path separated by a colon ":" '
|
||||||
|
'(supported: description|annotation|subtitle|infojson|thumbnail). '
|
||||||
|
'Additionally, you can also provide "home" and "temp" paths. '
|
||||||
|
'All intermediary files are first downloaded to the temp path and '
|
||||||
|
'then the final files are moved over to the home path after download is finished. '
|
||||||
|
'Note that this option is ignored if --output is an absolute path'))
|
||||||
filesystem.add_option(
|
filesystem.add_option(
|
||||||
'-o', '--output',
|
'-o', '--output',
|
||||||
dest='outtmpl', metavar='TEMPLATE',
|
dest='outtmpl', metavar='TEMPLATE',
|
||||||
@ -1171,59 +1189,79 @@ def compat_conf(conf):
|
|||||||
return conf
|
return conf
|
||||||
|
|
||||||
configs = {
|
configs = {
|
||||||
'command_line': compat_conf(sys.argv[1:]),
|
'command-line': compat_conf(sys.argv[1:]),
|
||||||
'custom': [], 'portable': [], 'user': [], 'system': []}
|
'custom': [], 'home': [], 'portable': [], 'user': [], 'system': []}
|
||||||
opts, args = parser.parse_args(configs['command_line'])
|
paths = {'command-line': False}
|
||||||
|
opts, args = parser.parse_args(configs['command-line'])
|
||||||
|
|
||||||
def get_configs():
|
def get_configs():
|
||||||
if '--config-location' in configs['command_line']:
|
if '--config-location' in configs['command-line']:
|
||||||
location = compat_expanduser(opts.config_location)
|
location = compat_expanduser(opts.config_location)
|
||||||
if os.path.isdir(location):
|
if os.path.isdir(location):
|
||||||
location = os.path.join(location, 'youtube-dlc.conf')
|
location = os.path.join(location, 'youtube-dlc.conf')
|
||||||
if not os.path.exists(location):
|
if not os.path.exists(location):
|
||||||
parser.error('config-location %s does not exist.' % location)
|
parser.error('config-location %s does not exist.' % location)
|
||||||
configs['custom'] = _readOptions(location)
|
configs['custom'] = _readOptions(location, default=None)
|
||||||
|
if configs['custom'] is None:
|
||||||
if '--ignore-config' in configs['command_line']:
|
configs['custom'] = []
|
||||||
|
else:
|
||||||
|
paths['custom'] = location
|
||||||
|
if '--ignore-config' in configs['command-line']:
|
||||||
return
|
return
|
||||||
if '--ignore-config' in configs['custom']:
|
if '--ignore-config' in configs['custom']:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
def read_options(path, user=False):
|
||||||
|
func = _readUserConf if user else _readOptions
|
||||||
|
current_path = os.path.join(path, 'yt-dlp.conf')
|
||||||
|
config = func(current_path, default=None)
|
||||||
|
if user:
|
||||||
|
config, current_path = config
|
||||||
|
if config is None:
|
||||||
|
current_path = os.path.join(path, 'youtube-dlc.conf')
|
||||||
|
config = func(current_path, default=None)
|
||||||
|
if user:
|
||||||
|
config, current_path = config
|
||||||
|
if config is None:
|
||||||
|
return [], None
|
||||||
|
return config, current_path
|
||||||
|
|
||||||
def get_portable_path():
|
def get_portable_path():
|
||||||
path = os.path.dirname(sys.argv[0])
|
path = os.path.dirname(sys.argv[0])
|
||||||
if os.path.abspath(sys.argv[0]) != os.path.abspath(sys.executable): # Not packaged
|
if os.path.abspath(sys.argv[0]) != os.path.abspath(sys.executable): # Not packaged
|
||||||
path = os.path.join(path, '..')
|
path = os.path.join(path, '..')
|
||||||
return os.path.abspath(path)
|
return os.path.abspath(path)
|
||||||
|
|
||||||
run_path = get_portable_path()
|
configs['portable'], paths['portable'] = read_options(get_portable_path())
|
||||||
configs['portable'] = _readOptions(os.path.join(run_path, 'yt-dlp.conf'), default=None)
|
|
||||||
if configs['portable'] is None:
|
|
||||||
configs['portable'] = _readOptions(os.path.join(run_path, 'youtube-dlc.conf'))
|
|
||||||
|
|
||||||
if '--ignore-config' in configs['portable']:
|
if '--ignore-config' in configs['portable']:
|
||||||
return
|
return
|
||||||
configs['system'] = _readOptions('/etc/yt-dlp.conf', default=None)
|
|
||||||
if configs['system'] is None:
|
|
||||||
configs['system'] = _readOptions('/etc/youtube-dlc.conf')
|
|
||||||
|
|
||||||
|
def get_home_path():
|
||||||
|
opts = parser.parse_args(configs['portable'] + configs['custom'] + configs['command-line'])[0]
|
||||||
|
return expand_path(opts.paths.get('home', '')).strip()
|
||||||
|
|
||||||
|
configs['home'], paths['home'] = read_options(get_home_path())
|
||||||
|
if '--ignore-config' in configs['home']:
|
||||||
|
return
|
||||||
|
|
||||||
|
configs['system'], paths['system'] = read_options('/etc')
|
||||||
if '--ignore-config' in configs['system']:
|
if '--ignore-config' in configs['system']:
|
||||||
return
|
return
|
||||||
configs['user'] = _readUserConf('yt-dlp', default=None)
|
|
||||||
if configs['user'] is None:
|
configs['user'], paths['user'] = read_options('', True)
|
||||||
configs['user'] = _readUserConf('youtube-dlc')
|
|
||||||
if '--ignore-config' in configs['user']:
|
if '--ignore-config' in configs['user']:
|
||||||
configs['system'] = []
|
configs['system'], paths['system'] = [], None
|
||||||
|
|
||||||
get_configs()
|
get_configs()
|
||||||
argv = configs['system'] + configs['user'] + configs['portable'] + configs['custom'] + configs['command_line']
|
argv = configs['system'] + configs['user'] + configs['home'] + configs['portable'] + configs['custom'] + configs['command-line']
|
||||||
opts, args = parser.parse_args(argv)
|
opts, args = parser.parse_args(argv)
|
||||||
if opts.verbose:
|
if opts.verbose:
|
||||||
for conf_label, conf in (
|
for label in ('System', 'User', 'Portable', 'Home', 'Custom', 'Command-line'):
|
||||||
('System config', configs['system']),
|
key = label.lower()
|
||||||
('User config', configs['user']),
|
if paths.get(key) is None:
|
||||||
('Portable config', configs['portable']),
|
continue
|
||||||
('Custom config', configs['custom']),
|
if paths[key]:
|
||||||
('Command-line args', configs['command_line'])):
|
write_string('[debug] %s config file: %s\n' % (label, paths[key]))
|
||||||
write_string('[debug] %s: %s\n' % (conf_label, repr(_hide_login_info(conf))))
|
write_string('[debug] %s config: %s\n' % (label, repr(_hide_login_info(configs[key]))))
|
||||||
|
|
||||||
return parser, opts, args
|
return parser, opts, args
|
||||||
|
@ -17,6 +17,7 @@
|
|||||||
from .xattrpp import XAttrMetadataPP
|
from .xattrpp import XAttrMetadataPP
|
||||||
from .execafterdownload import ExecAfterDownloadPP
|
from .execafterdownload import ExecAfterDownloadPP
|
||||||
from .metadatafromtitle import MetadataFromTitlePP
|
from .metadatafromtitle import MetadataFromTitlePP
|
||||||
|
from .movefilesafterdownload import MoveFilesAfterDownloadPP
|
||||||
from .sponskrub import SponSkrubPP
|
from .sponskrub import SponSkrubPP
|
||||||
|
|
||||||
|
|
||||||
@ -39,6 +40,7 @@ def get_postprocessor(key):
|
|||||||
'FFmpegVideoConvertorPP',
|
'FFmpegVideoConvertorPP',
|
||||||
'FFmpegVideoRemuxerPP',
|
'FFmpegVideoRemuxerPP',
|
||||||
'MetadataFromTitlePP',
|
'MetadataFromTitlePP',
|
||||||
|
'MoveFilesAfterDownloadPP',
|
||||||
'SponSkrubPP',
|
'SponSkrubPP',
|
||||||
'XAttrMetadataPP',
|
'XAttrMetadataPP',
|
||||||
]
|
]
|
||||||
|
52
youtube_dlc/postprocessor/movefilesafterdownload.py
Normal file
52
youtube_dlc/postprocessor/movefilesafterdownload.py
Normal file
@ -0,0 +1,52 @@
|
|||||||
|
from __future__ import unicode_literals
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
from .common import PostProcessor
|
||||||
|
from ..utils import (
|
||||||
|
encodeFilename,
|
||||||
|
make_dir,
|
||||||
|
PostProcessingError,
|
||||||
|
)
|
||||||
|
from ..compat import compat_str
|
||||||
|
|
||||||
|
|
||||||
|
class MoveFilesAfterDownloadPP(PostProcessor):
|
||||||
|
|
||||||
|
def __init__(self, downloader, files_to_move):
|
||||||
|
PostProcessor.__init__(self, downloader)
|
||||||
|
self.files_to_move = files_to_move
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def pp_key(cls):
|
||||||
|
return 'MoveFiles'
|
||||||
|
|
||||||
|
def run(self, info):
|
||||||
|
if info.get('__dl_filename') is None:
|
||||||
|
return [], info
|
||||||
|
self.files_to_move.setdefault(info['__dl_filename'], '')
|
||||||
|
outdir = os.path.dirname(os.path.abspath(encodeFilename(info['__final_filename'])))
|
||||||
|
|
||||||
|
for oldfile, newfile in self.files_to_move.items():
|
||||||
|
if not os.path.exists(encodeFilename(oldfile)):
|
||||||
|
self.report_warning('File "%s" cannot be found' % oldfile)
|
||||||
|
continue
|
||||||
|
if not newfile:
|
||||||
|
newfile = compat_str(os.path.join(outdir, os.path.basename(encodeFilename(oldfile))))
|
||||||
|
if os.path.abspath(encodeFilename(oldfile)) == os.path.abspath(encodeFilename(newfile)):
|
||||||
|
continue
|
||||||
|
if os.path.exists(encodeFilename(newfile)):
|
||||||
|
if self.get_param('overwrites', True):
|
||||||
|
self.report_warning('Replacing existing file "%s"' % newfile)
|
||||||
|
os.path.remove(encodeFilename(newfile))
|
||||||
|
else:
|
||||||
|
self.report_warning(
|
||||||
|
'Cannot move file "%s" out of temporary directory since "%s" already exists. '
|
||||||
|
% (oldfile, newfile))
|
||||||
|
continue
|
||||||
|
make_dir(newfile, PostProcessingError)
|
||||||
|
self.to_screen('Moving file "%s" to "%s"' % (oldfile, newfile))
|
||||||
|
shutil.move(oldfile, newfile) # os.rename cannot move between volumes
|
||||||
|
|
||||||
|
info['filepath'] = info['__final_filename']
|
||||||
|
return [], info
|
@ -5893,3 +5893,15 @@ def clean_podcast_url(url):
|
|||||||
|
|
||||||
def random_uuidv4():
|
def random_uuidv4():
|
||||||
return re.sub(r'[xy]', lambda x: _HEX_TABLE[random.randint(0, 15)], 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx')
|
return re.sub(r'[xy]', lambda x: _HEX_TABLE[random.randint(0, 15)], 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx')
|
||||||
|
|
||||||
|
|
||||||
|
def make_dir(path, to_screen=None):
|
||||||
|
try:
|
||||||
|
dn = os.path.dirname(path)
|
||||||
|
if dn and not os.path.exists(dn):
|
||||||
|
os.makedirs(dn)
|
||||||
|
return True
|
||||||
|
except (OSError, IOError) as err:
|
||||||
|
if callable(to_screen) is not None:
|
||||||
|
to_screen('unable to create directory ' + error_to_compat_str(err))
|
||||||
|
return False
|
||||||
|
Loading…
Reference in New Issue
Block a user