From 19a0394044bfad36cd665450271b8eb048a41c02 Mon Sep 17 00:00:00 2001 From: pukkandan Date: Mon, 18 Apr 2022 02:28:28 +0530 Subject: [PATCH] [cleanup] Misc cleanup and refactor (#2173) --- devscripts/make_readme.py | 25 +++--- setup.cfg | 4 +- supportedsites.md | 4 +- test/test_compat.py | 10 ++- test/test_execution.py | 11 ++- test/test_utils.py | 22 +++-- test/test_verbose_output.py | 12 ++- test/test_write_annotations.py.disabled | 1 - test/test_youtube_signature.py | 5 +- yt_dlp/YoutubeDL.py | 34 ++++---- yt_dlp/__init__.py | 3 +- yt_dlp/aes.py | 2 +- yt_dlp/cache.py | 5 +- yt_dlp/compat.py | 23 +++--- yt_dlp/cookies.py | 35 ++++---- yt_dlp/downloader/common.py | 98 ++++++++++------------ yt_dlp/downloader/fragment.py | 8 +- yt_dlp/downloader/websocket.py | 5 +- yt_dlp/extractor/__init__.py | 13 ++- yt_dlp/extractor/cpac.py | 7 -- yt_dlp/extractor/extractors.py | 3 +- yt_dlp/extractor/openload.py | 9 +-- yt_dlp/extractor/rtve.py | 7 +- yt_dlp/extractor/spotify.py | 2 + yt_dlp/extractor/youtube.py | 10 +-- yt_dlp/jsinterp.py | 41 +++------- yt_dlp/options.py | 72 +++++++++-------- yt_dlp/postprocessor/common.py | 2 +- yt_dlp/postprocessor/metadataparser.py | 22 +++-- yt_dlp/utils.py | 103 +++++++++--------------- yt_dlp/webvtt.py | 10 +-- 31 files changed, 263 insertions(+), 345 deletions(-) diff --git a/devscripts/make_readme.py b/devscripts/make_readme.py index 1719ac8e43..1401c2e5a7 100755 --- a/devscripts/make_readme.py +++ b/devscripts/make_readme.py @@ -6,22 +6,25 @@ import re import sys README_FILE = 'README.md' -helptext = sys.stdin.read() +OPTIONS_START = 'General Options:' +OPTIONS_END = 'CONFIGURATION' +EPILOG_START = 'See full documentation' + + +helptext = sys.stdin.read() if isinstance(helptext, bytes): helptext = helptext.decode('utf-8') +start, end = helptext.index(f'\n {OPTIONS_START}'), helptext.index(f'\n{EPILOG_START}') +options = re.sub(r'(?m)^ (\w.+)$', r'## \1', helptext[start + 1: end + 1]) + with open(README_FILE, encoding='utf-8') as f: - oldreadme = f.read() + readme = f.read() -header = oldreadme[:oldreadme.index('## General Options:')] -footer = oldreadme[oldreadme.index('# CONFIGURATION'):] - -options = helptext[helptext.index(' General Options:'):] -options = re.sub(r'(?m)^ (\w.+)$', r'## \1', options) -options = options + '\n' +header = readme[:readme.index(f'## {OPTIONS_START}')] +footer = readme[readme.index(f'# {OPTIONS_END}'):] with open(README_FILE, 'w', encoding='utf-8') as f: - f.write(header) - f.write(options) - f.write(footer) + for part in (header, options, footer): + f.write(part) diff --git a/setup.cfg b/setup.cfg index 59372d93a5..5fe95226ad 100644 --- a/setup.cfg +++ b/setup.cfg @@ -2,5 +2,5 @@ universal = True [flake8] -exclude = yt_dlp/extractor/__init__.py,devscripts/buildserver.py,devscripts/lazy_load_template.py,devscripts/make_issue_template.py,setup.py,build,.git,venv,devscripts/create-github-release.py,devscripts/release.sh,devscripts/show-downloads-statistics.py -ignore = E402,E501,E731,E741,W503 \ No newline at end of file +exclude = devscripts/lazy_load_template.py,devscripts/make_issue_template.py,setup.py,build,.git,venv +ignore = E402,E501,E731,E741,W503 diff --git a/supportedsites.md b/supportedsites.md index eac7842a3d..746a93de62 100644 --- a/supportedsites.md +++ b/supportedsites.md @@ -1147,8 +1147,8 @@ - **Sport5** - **SportBox** - **SportDeutschland** - - **spotify** - - **spotify:show** + - **spotify**: Spotify episodes + - **spotify:show**: Spotify shows - **Spreaker** - **SpreakerPage** - **SpreakerShow** diff --git a/test/test_compat.py b/test/test_compat.py index 20dab9573f..29e7384f04 100644 --- a/test/test_compat.py +++ b/test/test_compat.py @@ -35,10 +35,12 @@ class TestCompat(unittest.TestCase): def test_compat_expanduser(self): old_home = os.environ.get('HOME') - test_str = r'C:\Documents and Settings\тест\Application Data' - compat_setenv('HOME', test_str) - self.assertEqual(compat_expanduser('~'), test_str) - compat_setenv('HOME', old_home or '') + test_str = R'C:\Documents and Settings\тест\Application Data' + try: + compat_setenv('HOME', test_str) + self.assertEqual(compat_expanduser('~'), test_str) + finally: + compat_setenv('HOME', old_home or '') def test_all_present(self): import yt_dlp.compat diff --git a/test/test_execution.py b/test/test_execution.py index 6a3e9944bf..6efd432e97 100644 --- a/test/test_execution.py +++ b/test/test_execution.py @@ -1,4 +1,5 @@ #!/usr/bin/env python3 +import contextlib import os import subprocess import sys @@ -22,14 +23,14 @@ class TestExecution(unittest.TestCase): subprocess.check_call([sys.executable, '-c', 'import yt_dlp'], cwd=rootDir) def test_module_exec(self): - subprocess.check_call([sys.executable, '-m', 'yt_dlp', '--version'], cwd=rootDir, stdout=_DEV_NULL) + subprocess.check_call([sys.executable, '-m', 'yt_dlp', '--ignore-config', '--version'], cwd=rootDir, stdout=_DEV_NULL) def test_main_exec(self): - subprocess.check_call([sys.executable, 'yt_dlp/__main__.py', '--version'], cwd=rootDir, stdout=_DEV_NULL) + subprocess.check_call([sys.executable, 'yt_dlp/__main__.py', '--ignore-config', '--version'], cwd=rootDir, stdout=_DEV_NULL) def test_cmdline_umlauts(self): p = subprocess.Popen( - [sys.executable, 'yt_dlp/__main__.py', encodeArgument('ä'), '--version'], + [sys.executable, 'yt_dlp/__main__.py', '--ignore-config', encodeArgument('ä'), '--version'], cwd=rootDir, stdout=_DEV_NULL, stderr=subprocess.PIPE) _, stderr = p.communicate() self.assertFalse(stderr) @@ -39,10 +40,8 @@ class TestExecution(unittest.TestCase): subprocess.check_call([sys.executable, 'devscripts/make_lazy_extractors.py', 'yt_dlp/extractor/lazy_extractors.py'], cwd=rootDir, stdout=_DEV_NULL) subprocess.check_call([sys.executable, 'test/test_all_urls.py'], cwd=rootDir, stdout=_DEV_NULL) finally: - try: + with contextlib.suppress(OSError): os.remove('yt_dlp/extractor/lazy_extractors.py') - except OSError: - pass if __name__ == '__main__': diff --git a/test/test_utils.py b/test/test_utils.py index 7909dc61c0..5e220087b5 100644 --- a/test/test_utils.py +++ b/test/test_utils.py @@ -1,5 +1,6 @@ #!/usr/bin/env python3 # Allow direct execution +import contextlib import os import sys import unittest @@ -267,11 +268,18 @@ class TestUtil(unittest.TestCase): compat_setenv('yt_dlp_EXPATH_PATH', 'expanded') self.assertEqual(expand_path(env('yt_dlp_EXPATH_PATH')), 'expanded') - self.assertEqual(expand_path(env('HOME')), compat_getenv('HOME')) - self.assertEqual(expand_path('~'), compat_getenv('HOME')) - self.assertEqual( - expand_path('~/%s' % env('yt_dlp_EXPATH_PATH')), - '%s/expanded' % compat_getenv('HOME')) + + old_home = os.environ.get('HOME') + test_str = R'C:\Documents and Settings\тест\Application Data' + try: + compat_setenv('HOME', test_str) + self.assertEqual(expand_path(env('HOME')), compat_getenv('HOME')) + self.assertEqual(expand_path('~'), compat_getenv('HOME')) + self.assertEqual( + expand_path('~/%s' % env('yt_dlp_EXPATH_PATH')), + '%s/expanded' % compat_getenv('HOME')) + finally: + compat_setenv('HOME', old_home or '') def test_prepend_extension(self): self.assertEqual(prepend_extension('abc.ext', 'temp'), 'abc.temp.ext') @@ -1814,10 +1822,8 @@ Line 1 else: self.assertFalse(testing_write, f'{test_mode} is not blocked by {lock_mode}') finally: - try: + with contextlib.suppress(OSError): os.remove(FILE) - except Exception: - pass if __name__ == '__main__': diff --git a/test/test_verbose_output.py b/test/test_verbose_output.py index 1213a97266..6579940740 100644 --- a/test/test_verbose_output.py +++ b/test/test_verbose_output.py @@ -13,7 +13,8 @@ class TestVerboseOutput(unittest.TestCase): def test_private_info_arg(self): outp = subprocess.Popen( [ - sys.executable, 'yt_dlp/__main__.py', '-v', + sys.executable, 'yt_dlp/__main__.py', + '-v', '--ignore-config', '--username', 'johnsmith@gmail.com', '--password', 'my_secret_password', ], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE) @@ -26,7 +27,8 @@ class TestVerboseOutput(unittest.TestCase): def test_private_info_shortarg(self): outp = subprocess.Popen( [ - sys.executable, 'yt_dlp/__main__.py', '-v', + sys.executable, 'yt_dlp/__main__.py', + '-v', '--ignore-config', '-u', 'johnsmith@gmail.com', '-p', 'my_secret_password', ], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE) @@ -39,7 +41,8 @@ class TestVerboseOutput(unittest.TestCase): def test_private_info_eq(self): outp = subprocess.Popen( [ - sys.executable, 'yt_dlp/__main__.py', '-v', + sys.executable, 'yt_dlp/__main__.py', + '-v', '--ignore-config', '--username=johnsmith@gmail.com', '--password=my_secret_password', ], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE) @@ -52,7 +55,8 @@ class TestVerboseOutput(unittest.TestCase): def test_private_info_shortarg_eq(self): outp = subprocess.Popen( [ - sys.executable, 'yt_dlp/__main__.py', '-v', + sys.executable, 'yt_dlp/__main__.py', + '-v', '--ignore-config', '-u=johnsmith@gmail.com', '-p=my_secret_password', ], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE) diff --git a/test/test_write_annotations.py.disabled b/test/test_write_annotations.py.disabled index bf13efe2c3..cca60561f5 100644 --- a/test/test_write_annotations.py.disabled +++ b/test/test_write_annotations.py.disabled @@ -6,7 +6,6 @@ import unittest sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) -import io import xml.etree.ElementTree from test.helper import get_params, is_download_test, try_rm diff --git a/test/test_youtube_signature.py b/test/test_youtube_signature.py index ca23c910d5..2c2013295c 100644 --- a/test/test_youtube_signature.py +++ b/test/test_youtube_signature.py @@ -1,5 +1,6 @@ #!/usr/bin/env python3 # Allow direct execution +import contextlib import os import sys import unittest @@ -127,11 +128,9 @@ class TestSignature(unittest.TestCase): os.mkdir(self.TESTDATA_DIR) def tearDown(self): - try: + with contextlib.suppress(OSError): for f in os.listdir(self.TESTDATA_DIR): os.remove(f) - except OSError: - pass def t_factory(name, sig_func, url_pattern): diff --git a/yt_dlp/YoutubeDL.py b/yt_dlp/YoutubeDL.py index eaf2d92167..155b5a063f 100644 --- a/yt_dlp/YoutubeDL.py +++ b/yt_dlp/YoutubeDL.py @@ -23,7 +23,6 @@ import tokenize import traceback import unicodedata import urllib.request -from enum import Enum from string import ascii_letters from .cache import Cache @@ -82,6 +81,7 @@ from .utils import ( ISO3166Utils, LazyList, MaxDownloadsReached, + Namespace, PagedList, PerRequestProxyHandler, Popen, @@ -878,14 +878,15 @@ class YoutubeDL: raise DownloadError(message, exc_info) self._download_retcode = 1 - class Styles(Enum): - HEADERS = 'yellow' - EMPHASIS = 'light blue' - ID = 'green' - DELIM = 'blue' - ERROR = 'red' - WARNING = 'yellow' - SUPPRESS = 'light black' + Styles = Namespace( + HEADERS='yellow', + EMPHASIS='light blue', + ID='green', + DELIM='blue', + ERROR='red', + WARNING='yellow', + SUPPRESS='light black', + ) def _format_text(self, handle, allow_colors, text, f, fallback=None, *, test_encoding=False): text = str(text) @@ -896,8 +897,6 @@ class YoutubeDL: text = text.encode(encoding, 'ignore').decode(encoding) if fallback is not None and text != original_text: text = fallback - if isinstance(f, Enum): - f = f.value return format_text(text, f) if allow_colors else text if fallback is None else fallback def _format_screen(self, *args, **kwargs): @@ -1760,7 +1759,8 @@ class YoutubeDL: playlist_index, entry = entry_tuple if 'playlist-index' in self.params.get('compat_opts', []): playlist_index = playlistitems[i - 1] if playlistitems else i + playliststart - 1 - self.to_screen(f'[download] Downloading video {i} of {n_entries}') + self.to_screen('[download] Downloading video %s of %s' % ( + self._format_screen(i, self.Styles.ID), self._format_screen(n_entries, self.Styles.EMPHASIS))) # This __x_forwarded_for_ip thing is a bit ugly but requires # minimal changes if x_forwarded_for: @@ -2337,11 +2337,9 @@ class YoutubeDL: if info_dict.get(date_key) is None and info_dict.get(ts_key) is not None: # Working around out-of-range timestamp values (e.g. negative ones on Windows, # see http://bugs.python.org/issue1646728) - try: + with contextlib.suppress(ValueError, OverflowError, OSError): upload_date = datetime.datetime.utcfromtimestamp(info_dict[ts_key]) info_dict[date_key] = upload_date.strftime('%Y%m%d') - except (ValueError, OverflowError, OSError): - pass live_keys = ('is_live', 'was_live') live_status = info_dict.get('live_status') @@ -3631,10 +3629,8 @@ class YoutubeDL: if re.match('[0-9a-f]+', out): write_debug('Git HEAD: %s' % out) except Exception: - try: + with contextlib.suppress(Exception): sys.exc_clear() - except Exception: - pass def python_implementation(): impl_name = platform.python_implementation() @@ -3651,7 +3647,7 @@ class YoutubeDL: exe_versions, ffmpeg_features = FFmpegPostProcessor.get_versions_and_features(self) ffmpeg_features = {key for key, val in ffmpeg_features.items() if val} if ffmpeg_features: - exe_versions['ffmpeg'] += ' (%s)' % ','.join(ffmpeg_features) + exe_versions['ffmpeg'] += ' (%s)' % ','.join(sorted(ffmpeg_features)) exe_versions['rtmpdump'] = rtmpdump_version() exe_versions['phantomjs'] = PhantomJSwrapper._version() diff --git a/yt_dlp/__init__.py b/yt_dlp/__init__.py index 24991e19b4..9ea13ad37a 100644 --- a/yt_dlp/__init__.py +++ b/yt_dlp/__init__.py @@ -404,7 +404,8 @@ def validate_options(opts): report_conflict('--sponskrub', 'sponskrub', '--remove-chapters', 'remove_chapters') report_conflict('--sponskrub', 'sponskrub', '--sponsorblock-mark', 'sponsorblock_mark') report_conflict('--sponskrub', 'sponskrub', '--sponsorblock-remove', 'sponsorblock_remove') - report_conflict('--sponskrub-cut', 'sponskrub_cut', '--split-chapter', 'split_chapters', val1=opts.sponskrub and opts.sponskrub_cut) + report_conflict('--sponskrub-cut', 'sponskrub_cut', '--split-chapter', 'split_chapters', + val1=opts.sponskrub and opts.sponskrub_cut) # Conflicts with --allow-unplayable-formats report_conflict('--add-metadata', 'addmetadata') diff --git a/yt_dlp/aes.py b/yt_dlp/aes.py index 01818df61c..603f3d1875 100644 --- a/yt_dlp/aes.py +++ b/yt_dlp/aes.py @@ -493,7 +493,7 @@ def ghash(subkey, data): last_y = [0] * BLOCK_SIZE_BYTES for i in range(0, len(data), BLOCK_SIZE_BYTES): - block = data[i : i + BLOCK_SIZE_BYTES] # noqa: E203 + block = data[i: i + BLOCK_SIZE_BYTES] last_y = block_product(xor(last_y, block), subkey) return last_y diff --git a/yt_dlp/cache.py b/yt_dlp/cache.py index 0cac3ee888..e3f8a7dab2 100644 --- a/yt_dlp/cache.py +++ b/yt_dlp/cache.py @@ -1,3 +1,4 @@ +import contextlib import errno import json import os @@ -57,7 +58,7 @@ class Cache: return default cache_fn = self._get_cache_fn(section, key, dtype) - try: + with contextlib.suppress(OSError): try: with open(cache_fn, encoding='utf-8') as cachef: self._ydl.write_debug(f'Loading {section}.{key} from cache') @@ -68,8 +69,6 @@ class Cache: except OSError as oe: file_size = str(oe) self._ydl.report_warning(f'Cache retrieval from {cache_fn} failed ({file_size})') - except OSError: - pass # No cache available return default diff --git a/yt_dlp/compat.py b/yt_dlp/compat.py index df0c54606e..f18c6cce28 100644 --- a/yt_dlp/compat.py +++ b/yt_dlp/compat.py @@ -1,6 +1,7 @@ import asyncio import base64 import collections +import contextlib import ctypes import getpass import html @@ -54,14 +55,11 @@ if compat_os_name == 'nt': def compat_shlex_quote(s): return s if re.match(r'^[-_\w./]+$', s) else '"%s"' % s.replace('"', '\\"') else: - from shlex import quote as compat_shlex_quote + from shlex import quote as compat_shlex_quote # noqa: F401 def compat_ord(c): - if type(c) is int: - return c - else: - return ord(c) + return c if isinstance(c, int) else ord(c) def compat_setenv(key, value, env=os.environ): @@ -118,16 +116,17 @@ except ImportError: # Python 3.8+ does not honor %HOME% on windows, but this breaks compatibility with youtube-dl # See https://github.com/yt-dlp/yt-dlp/issues/792 # https://docs.python.org/3/library/os.path.html#os.path.expanduser -if compat_os_name in ('nt', 'ce') and 'HOME' in os.environ: - _userhome = os.environ['HOME'] - +if compat_os_name in ('nt', 'ce'): def compat_expanduser(path): - if not path.startswith('~'): + HOME = os.environ.get('HOME') + if not HOME: + return os.path.expanduser(path) + elif not path.startswith('~'): return path i = path.replace('\\', '/', 1).find('/') # ~user if i < 0: i = len(path) - userhome = os.path.join(os.path.dirname(_userhome), path[1:i]) if i > 1 else _userhome + userhome = os.path.join(os.path.dirname(HOME), path[1:i]) if i > 1 else HOME return userhome + path[i:] else: compat_expanduser = os.path.expanduser @@ -158,11 +157,9 @@ def windows_enable_vt_mode(): # TODO: Do this the proper way https://bugs.pytho global WINDOWS_VT_MODE startupinfo = subprocess.STARTUPINFO() startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW - try: + with contextlib.suppress(Exception): subprocess.Popen('', shell=True, startupinfo=startupinfo).wait() WINDOWS_VT_MODE = True - except Exception: - pass # Deprecated diff --git a/yt_dlp/cookies.py b/yt_dlp/cookies.py index 6ff9f6f2d2..8a4baa5bb9 100644 --- a/yt_dlp/cookies.py +++ b/yt_dlp/cookies.py @@ -167,7 +167,7 @@ def _firefox_browser_dir(): if sys.platform in ('linux', 'linux2'): return os.path.expanduser('~/.mozilla/firefox') elif sys.platform == 'win32': - return os.path.expandvars(r'%APPDATA%\Mozilla\Firefox\Profiles') + return os.path.expandvars(R'%APPDATA%\Mozilla\Firefox\Profiles') elif sys.platform == 'darwin': return os.path.expanduser('~/Library/Application Support/Firefox') else: @@ -191,12 +191,12 @@ def _get_chromium_based_browser_settings(browser_name): appdata_local = os.path.expandvars('%LOCALAPPDATA%') appdata_roaming = os.path.expandvars('%APPDATA%') browser_dir = { - 'brave': os.path.join(appdata_local, r'BraveSoftware\Brave-Browser\User Data'), - 'chrome': os.path.join(appdata_local, r'Google\Chrome\User Data'), - 'chromium': os.path.join(appdata_local, r'Chromium\User Data'), - 'edge': os.path.join(appdata_local, r'Microsoft\Edge\User Data'), - 'opera': os.path.join(appdata_roaming, r'Opera Software\Opera Stable'), - 'vivaldi': os.path.join(appdata_local, r'Vivaldi\User Data'), + 'brave': os.path.join(appdata_local, R'BraveSoftware\Brave-Browser\User Data'), + 'chrome': os.path.join(appdata_local, R'Google\Chrome\User Data'), + 'chromium': os.path.join(appdata_local, R'Chromium\User Data'), + 'edge': os.path.join(appdata_local, R'Microsoft\Edge\User Data'), + 'opera': os.path.join(appdata_roaming, R'Opera Software\Opera Stable'), + 'vivaldi': os.path.join(appdata_local, R'Vivaldi\User Data'), }[browser_name] elif sys.platform == 'darwin': @@ -237,8 +237,8 @@ def _extract_chrome_cookies(browser_name, profile, keyring, logger): logger.info(f'Extracting cookies from {browser_name}') if not SQLITE_AVAILABLE: - logger.warning(('Cannot extract cookies from {} without sqlite3 support. ' - 'Please use a python interpreter compiled with sqlite3 support').format(browser_name)) + logger.warning(f'Cannot extract cookies from {browser_name} without sqlite3 support. ' + 'Please use a python interpreter compiled with sqlite3 support') return YoutubeDLCookieJar() config = _get_chromium_based_browser_settings(browser_name) @@ -269,8 +269,7 @@ def _extract_chrome_cookies(browser_name, profile, keyring, logger): cursor.connection.text_factory = bytes column_names = _get_column_names(cursor, 'cookies') secure_column = 'is_secure' if 'is_secure' in column_names else 'secure' - cursor.execute('SELECT host_key, name, value, encrypted_value, path, ' - 'expires_utc, {} FROM cookies'.format(secure_column)) + cursor.execute(f'SELECT host_key, name, value, encrypted_value, path, expires_utc, {secure_column} FROM cookies') jar = YoutubeDLCookieJar() failed_cookies = 0 unencrypted_cookies = 0 @@ -346,11 +345,11 @@ class ChromeCookieDecryptor: """ def decrypt(self, encrypted_value): - raise NotImplementedError + raise NotImplementedError('Must be implemented by sub classes') @property def cookie_counts(self): - raise NotImplementedError + raise NotImplementedError('Must be implemented by sub classes') def get_cookie_decryptor(browser_root, browser_keyring_name, logger, *, keyring=None): @@ -361,8 +360,7 @@ def get_cookie_decryptor(browser_root, browser_keyring_name, logger, *, keyring= elif sys.platform == 'win32': return WindowsChromeCookieDecryptor(browser_root, logger) else: - raise NotImplementedError('Chrome cookie decryption is not supported ' - 'on this platform: {}'.format(sys.platform)) + raise NotImplementedError(f'Chrome cookie decryption is not supported on this platform: {sys.platform}') class LinuxChromeCookieDecryptor(ChromeCookieDecryptor): @@ -546,8 +544,7 @@ class DataParser: def skip(self, num_bytes, description='unknown'): if num_bytes > 0: - self._logger.debug('skipping {} bytes ({}): {}'.format( - num_bytes, description, self.read_bytes(num_bytes))) + self._logger.debug(f'skipping {num_bytes} bytes ({description}): {self.read_bytes(num_bytes)!r}') elif num_bytes < 0: raise ParserError(f'invalid skip of {num_bytes} bytes') @@ -784,8 +781,8 @@ def _get_kwallet_password(browser_keyring_name, logger): stdout, stderr = proc.communicate_or_kill() if proc.returncode != 0: - logger.error('kwallet-query failed with return code {}. Please consult ' - 'the kwallet-query man page for details'.format(proc.returncode)) + logger.error(f'kwallet-query failed with return code {proc.returncode}. Please consult ' + 'the kwallet-query man page for details') return b'' else: if stdout.lower().startswith(b'failed to read'): diff --git a/yt_dlp/downloader/common.py b/yt_dlp/downloader/common.py index 3033926ae1..3e53969886 100644 --- a/yt_dlp/downloader/common.py +++ b/yt_dlp/downloader/common.py @@ -1,3 +1,4 @@ +import contextlib import errno import os import random @@ -12,6 +13,7 @@ from ..minicurses import ( ) from ..utils import ( LockingUnsupportedError, + Namespace, decodeArgument, encodeFilename, error_to_compat_str, @@ -70,12 +72,30 @@ class FileDownloader: def __init__(self, ydl, params): """Create a FileDownloader object with the given options.""" - self.ydl = ydl + self._set_ydl(ydl) self._progress_hooks = [] self.params = params self._prepare_multiline_status() self.add_progress_hook(self.report_progress) + def _set_ydl(self, ydl): + self.ydl = ydl + + for func in ( + 'deprecation_warning', + 'report_error', + 'report_file_already_downloaded', + 'report_warning', + 'to_console_title', + 'to_stderr', + 'trouble', + 'write_debug', + ): + setattr(self, func, getattr(ydl, func)) + + def to_screen(self, *args, **kargs): + self.ydl.to_screen(*args, quiet=self.params.get('quiet'), **kargs) + @staticmethod def format_seconds(seconds): time = timetuple_from_msec(seconds * 1000) @@ -157,27 +177,6 @@ class FileDownloader: multiplier = 1024.0 ** 'bkmgtpezy'.index(matchobj.group(2).lower()) return int(round(number * multiplier)) - def to_screen(self, *args, **kargs): - self.ydl.to_screen(*args, quiet=self.params.get('quiet'), **kargs) - - def to_stderr(self, message): - self.ydl.to_stderr(message) - - def to_console_title(self, message): - self.ydl.to_console_title(message) - - def trouble(self, *args, **kargs): - self.ydl.trouble(*args, **kargs) - - def report_warning(self, *args, **kargs): - self.ydl.report_warning(*args, **kargs) - - def report_error(self, *args, **kargs): - self.ydl.report_error(*args, **kargs) - - def write_debug(self, *args, **kargs): - self.ydl.write_debug(*args, **kargs) - def slow_down(self, start_time, now, byte_counter): """Sleep if the download speed is over the rate limit.""" rate_limit = self.params.get('ratelimit') @@ -263,10 +262,8 @@ class FileDownloader: # Ignore obviously invalid dates if filetime == 0: return - try: + with contextlib.suppress(Exception): os.utime(filename, (time.time(), filetime)) - except Exception: - pass return filetime def report_destination(self, filename): @@ -287,18 +284,18 @@ class FileDownloader: def _finish_multiline_status(self): self._multiline.end() - _progress_styles = { - 'downloaded_bytes': 'light blue', - 'percent': 'light blue', - 'eta': 'yellow', - 'speed': 'green', - 'elapsed': 'bold white', - 'total_bytes': '', - 'total_bytes_estimate': '', - } + ProgressStyles = Namespace( + downloaded_bytes='light blue', + percent='light blue', + eta='yellow', + speed='green', + elapsed='bold white', + total_bytes='', + total_bytes_estimate='', + ) def _report_progress_status(self, s, default_template): - for name, style in self._progress_styles.items(): + for name, style in self.ProgressStyles._asdict().items(): name = f'_{name}_str' if name not in s: continue @@ -391,10 +388,6 @@ class FileDownloader: '[download] Got server HTTP error: %s. Retrying (attempt %d of %s) ...' % (error_to_compat_str(err), count, self.format_retries(retries))) - def report_file_already_downloaded(self, *args, **kwargs): - """Report file has already been fully downloaded.""" - return self.ydl.report_file_already_downloaded(*args, **kwargs) - def report_unable_to_resume(self): """Report it was impossible to resume download.""" self.to_screen('[download] Unable to resume') @@ -433,25 +426,16 @@ class FileDownloader: self._finish_multiline_status() return True, False - if subtitle is False: - min_sleep_interval = self.params.get('sleep_interval') - if min_sleep_interval: - max_sleep_interval = self.params.get('max_sleep_interval', min_sleep_interval) - sleep_interval = random.uniform(min_sleep_interval, max_sleep_interval) - self.to_screen( - '[download] Sleeping %s seconds ...' % ( - int(sleep_interval) if sleep_interval.is_integer() - else '%.2f' % sleep_interval)) - time.sleep(sleep_interval) + if subtitle: + sleep_interval = self.params.get('sleep_interval_subtitles') or 0 else: - sleep_interval_sub = 0 - if type(self.params.get('sleep_interval_subtitles')) is int: - sleep_interval_sub = self.params.get('sleep_interval_subtitles') - if sleep_interval_sub > 0: - self.to_screen( - '[download] Sleeping %s seconds ...' % ( - sleep_interval_sub)) - time.sleep(sleep_interval_sub) + min_sleep_interval = self.params.get('sleep_interval') or 0 + sleep_interval = random.uniform( + min_sleep_interval, self.params.get('max_sleep_interval', min_sleep_interval)) + if sleep_interval > 0: + self.to_screen(f'[download] Sleeping {sleep_interval:.2f} seconds ...') + time.sleep(sleep_interval) + ret = self.real_download(filename, info_dict) self._finish_multiline_status() return ret, True diff --git a/yt_dlp/downloader/fragment.py b/yt_dlp/downloader/fragment.py index 2a97cfd161..390c840bbd 100644 --- a/yt_dlp/downloader/fragment.py +++ b/yt_dlp/downloader/fragment.py @@ -1,3 +1,4 @@ +import contextlib import http.client import json import math @@ -310,10 +311,8 @@ class FragmentFD(FileDownloader): if self.params.get('updatetime', True): filetime = ctx.get('fragment_filetime') if filetime: - try: + with contextlib.suppress(Exception): os.utime(ctx['filename'], (time.time(), filetime)) - except Exception: - pass downloaded_bytes = os.path.getsize(encodeFilename(ctx['filename'])) self._hook_progress({ @@ -523,7 +522,8 @@ class FragmentFD(FileDownloader): break try: download_fragment(fragment, ctx) - result = append_fragment(decrypt_fragment(fragment, self._read_fragment(ctx)), fragment['frag_index'], ctx) + result = append_fragment( + decrypt_fragment(fragment, self._read_fragment(ctx)), fragment['frag_index'], ctx) except KeyboardInterrupt: if info_dict.get('is_live'): break diff --git a/yt_dlp/downloader/websocket.py b/yt_dlp/downloader/websocket.py index 96d1138469..6b190cd90a 100644 --- a/yt_dlp/downloader/websocket.py +++ b/yt_dlp/downloader/websocket.py @@ -1,4 +1,5 @@ import asyncio +import contextlib import os import signal import threading @@ -29,11 +30,9 @@ class FFmpegSinkFD(FileDownloader): except (BrokenPipeError, OSError): pass finally: - try: + with contextlib.suppress(OSError): stdin.flush() stdin.close() - except OSError: - pass os.kill(os.getpid(), signal.SIGINT) class FFmpegStdinFD(FFmpegFD): diff --git a/yt_dlp/extractor/__init__.py b/yt_dlp/extractor/__init__.py index b35484246a..6288c5c6bb 100644 --- a/yt_dlp/extractor/__init__.py +++ b/yt_dlp/extractor/__init__.py @@ -1,24 +1,23 @@ +import contextlib import os from ..utils import load_plugins _LAZY_LOADER = False if not os.environ.get('YTDLP_NO_LAZY_EXTRACTORS'): - try: - from .lazy_extractors import * + with contextlib.suppress(ImportError): + from .lazy_extractors import * # noqa: F403 from .lazy_extractors import _ALL_CLASSES _LAZY_LOADER = True - except ImportError: - pass if not _LAZY_LOADER: - from .extractors import * - _ALL_CLASSES = [ + from .extractors import * # noqa: F403 + _ALL_CLASSES = [ # noqa: F811 klass for name, klass in globals().items() if name.endswith('IE') and name != 'GenericIE' ] - _ALL_CLASSES.append(GenericIE) + _ALL_CLASSES.append(GenericIE) # noqa: F405 _PLUGIN_CLASSES = load_plugins('extractor', 'IE', globals()) _ALL_CLASSES = list(_PLUGIN_CLASSES.values()) + _ALL_CLASSES diff --git a/yt_dlp/extractor/cpac.py b/yt_dlp/extractor/cpac.py index e8975e5e2b..65ac2497f4 100644 --- a/yt_dlp/extractor/cpac.py +++ b/yt_dlp/extractor/cpac.py @@ -9,13 +9,6 @@ from ..utils import ( urljoin, ) -# compat_range -try: - if callable(xrange): - range = xrange -except (NameError, TypeError): - pass - class CPACIE(InfoExtractor): IE_NAME = 'cpac' diff --git a/yt_dlp/extractor/extractors.py b/yt_dlp/extractor/extractors.py index cd3934a70d..d67b2eeecc 100644 --- a/yt_dlp/extractor/extractors.py +++ b/yt_dlp/extractor/extractors.py @@ -1,4 +1,5 @@ -# flake8: noqa +# flake8: noqa: F401 + from .abc import ( ABCIE, ABCIViewIE, diff --git a/yt_dlp/extractor/openload.py b/yt_dlp/extractor/openload.py index f2600aaa4e..61e3a8b861 100644 --- a/yt_dlp/extractor/openload.py +++ b/yt_dlp/extractor/openload.py @@ -1,3 +1,4 @@ +import contextlib import json import os import subprocess @@ -31,13 +32,11 @@ def cookie_to_dict(cookie): cookie_dict['secure'] = cookie.secure if cookie.discard is not None: cookie_dict['discard'] = cookie.discard - try: + with contextlib.suppress(TypeError): if (cookie.has_nonstandard_attr('httpOnly') or cookie.has_nonstandard_attr('httponly') or cookie.has_nonstandard_attr('HttpOnly')): cookie_dict['httponly'] = True - except TypeError: - pass return cookie_dict @@ -129,10 +128,8 @@ class PhantomJSwrapper: def __del__(self): for name in self._TMP_FILE_NAMES: - try: + with contextlib.suppress(OSError, KeyError): os.remove(self._TMP_FILES[name].name) - except (OSError, KeyError): - pass def _save_cookies(self, url): cookies = cookie_jar_to_list(self.extractor._downloader.cookiejar) diff --git a/yt_dlp/extractor/rtve.py b/yt_dlp/extractor/rtve.py index e5837e8c87..42a6029688 100644 --- a/yt_dlp/extractor/rtve.py +++ b/yt_dlp/extractor/rtve.py @@ -1,6 +1,5 @@ import base64 import io -import sys from .common import InfoExtractor from ..compat import ( @@ -17,8 +16,6 @@ from ..utils import ( try_get, ) -_bytes_to_chr = (lambda x: x) if sys.version_info[0] == 2 else (lambda x: map(chr, x)) - class RTVEALaCartaIE(InfoExtractor): IE_NAME = 'rtve.es:alacarta' @@ -87,7 +84,7 @@ class RTVEALaCartaIE(InfoExtractor): alphabet = [] e = 0 d = 0 - for l in _bytes_to_chr(alphabet_data): + for l in alphabet_data.decode('iso-8859-1'): if d == 0: alphabet.append(l) d = e = (e + 1) % 4 @@ -97,7 +94,7 @@ class RTVEALaCartaIE(InfoExtractor): f = 0 e = 3 b = 1 - for letter in _bytes_to_chr(url_data): + for letter in url_data.decode('iso-8859-1'): if f == 0: l = int(letter) * 10 f = 1 diff --git a/yt_dlp/extractor/spotify.py b/yt_dlp/extractor/spotify.py index 3128825e5d..a2068a1b6e 100644 --- a/yt_dlp/extractor/spotify.py +++ b/yt_dlp/extractor/spotify.py @@ -102,6 +102,7 @@ class SpotifyBaseIE(InfoExtractor): class SpotifyIE(SpotifyBaseIE): IE_NAME = 'spotify' + IE_DESC = 'Spotify episodes' _VALID_URL = SpotifyBaseIE._VALID_URL_TEMPL % 'episode' _TESTS = [{ 'url': 'https://open.spotify.com/episode/4Z7GAJ50bgctf6uclHlWKo', @@ -131,6 +132,7 @@ class SpotifyIE(SpotifyBaseIE): class SpotifyShowIE(SpotifyBaseIE): IE_NAME = 'spotify:show' + IE_DESC = 'Spotify shows' _VALID_URL = SpotifyBaseIE._VALID_URL_TEMPL % 'show' _TEST = { 'url': 'https://open.spotify.com/show/4PM9Ke6l66IRNpottHKV9M', diff --git a/yt_dlp/extractor/youtube.py b/yt_dlp/extractor/youtube.py index 4312309480..7da54e0886 100644 --- a/yt_dlp/extractor/youtube.py +++ b/yt_dlp/extractor/youtube.py @@ -3586,17 +3586,17 @@ class YoutubeIE(YoutubeBaseInfoExtractor): headers=self.generate_api_headers(ytcfg=master_ytcfg), note='Downloading initial data API JSON') - try: - # This will error if there is no livechat + try: # This will error if there is no livechat initial_data['contents']['twoColumnWatchNextResults']['conversationBar']['liveChatRenderer']['continuations'][0]['reloadContinuationData']['continuation'] + except (KeyError, IndexError, TypeError): + pass + else: info.setdefault('subtitles', {})['live_chat'] = [{ - 'url': 'https://www.youtube.com/watch?v=%s' % video_id, # url is needed to set cookies + 'url': f'https://www.youtube.com/watch?v={video_id}', # url is needed to set cookies 'video_id': video_id, 'ext': 'json', 'protocol': 'youtube_live_chat' if is_live or is_upcoming else 'youtube_live_chat_replay', }] - except (KeyError, IndexError, TypeError): - pass if initial_data: info['chapters'] = ( diff --git a/yt_dlp/jsinterp.py b/yt_dlp/jsinterp.py index 001836887a..70857b7981 100644 --- a/yt_dlp/jsinterp.py +++ b/yt_dlp/jsinterp.py @@ -1,7 +1,8 @@ +import collections +import contextlib import json import operator import re -from collections.abc import MutableMapping from .utils import ExtractorError, remove_quotes @@ -35,38 +36,17 @@ class JS_Continue(ExtractorError): ExtractorError.__init__(self, 'Invalid continue') -class LocalNameSpace(MutableMapping): - def __init__(self, *stack): - self.stack = tuple(stack) - - def __getitem__(self, key): - for scope in self.stack: - if key in scope: - return scope[key] - raise KeyError(key) - +class LocalNameSpace(collections.ChainMap): def __setitem__(self, key, value): - for scope in self.stack: + for scope in self.maps: if key in scope: scope[key] = value - break - else: - self.stack[0][key] = value - return value + return + self.maps[0][key] = value def __delitem__(self, key): raise NotImplementedError('Deleting is not supported') - def __iter__(self): - for scope in self.stack: - yield from scope - - def __len__(self, key): - return len(iter(self)) - - def __repr__(self): - return f'LocalNameSpace{self.stack}' - class JSInterpreter: def __init__(self, code, objects=None): @@ -302,10 +282,8 @@ class JSInterpreter: if var_m: return local_vars[var_m.group('name')] - try: + with contextlib.suppress(ValueError): return json.loads(expr) - except ValueError: - pass m = re.match( r'(?P%s)\[(?P.+)\]$' % _NAME_RE, expr) @@ -521,14 +499,13 @@ class JSInterpreter: def build_function(self, argnames, code, *global_stack): global_stack = list(global_stack) or [{}] - local_vars = global_stack.pop(0) def resf(args, **kwargs): - local_vars.update({ + global_stack[0].update({ **dict(zip(argnames, args)), **kwargs }) - var_stack = LocalNameSpace(local_vars, *global_stack) + var_stack = LocalNameSpace(*global_stack) for stmt in self._separate(code.replace('\n', ''), ';'): ret, should_abort = self.interpret_statement(stmt, var_stack) if should_abort: diff --git a/yt_dlp/options.py b/yt_dlp/options.py index 243beab4d1..0c042caf4b 100644 --- a/yt_dlp/options.py +++ b/yt_dlp/options.py @@ -21,6 +21,7 @@ from .utils import ( Config, expand_path, get_executable_path, + join_nonempty, remove_end, write_string, ) @@ -109,9 +110,43 @@ def parseOpts(overrideArguments=None, ignore_config_files='if_override'): return parser, opts, args +class _YoutubeDLHelpFormatter(optparse.IndentedHelpFormatter): + def __init__(self): + # No need to wrap help messages if we're on a wide console + max_width = compat_get_terminal_size().columns or 80 + # 47% is chosen because that is how README.md is currently formatted + # and moving help text even further to the right is undesirable. + # This can be reduced in the future to get a prettier output + super().__init__(width=max_width, max_help_position=int(0.47 * max_width)) + + @staticmethod + def format_option_strings(option): + """ ('-o', '--option') -> -o, --format METAVAR """ + opts = join_nonempty( + option._short_opts and option._short_opts[0], + option._long_opts and option._long_opts[0], + delim=', ') + if option.takes_value(): + opts += f' {option.metavar}' + return opts + + class _YoutubeDLOptionParser(optparse.OptionParser): # optparse is deprecated since python 3.2. So assume a stable interface even for private methods + def __init__(self): + super().__init__( + prog='yt-dlp', + version=__version__, + usage='%prog [OPTIONS] URL [URL...]', + epilog='See full documentation at https://github.com/yt-dlp/yt-dlp#readme', + formatter=_YoutubeDLHelpFormatter(), + conflict_handler='resolve', + ) + + def _get_args(self, args): + return sys.argv[1:] if args is None else list(args) + def _match_long_opt(self, opt): """Improve ambigious argument resolution by comparing option objects instead of argument strings""" try: @@ -123,23 +158,6 @@ class _YoutubeDLOptionParser(optparse.OptionParser): def create_parser(): - def _format_option_string(option): - ''' ('-o', '--option') -> -o, --format METAVAR''' - - opts = [] - - if option._short_opts: - opts.append(option._short_opts[0]) - if option._long_opts: - opts.append(option._long_opts[0]) - if len(opts) > 1: - opts.insert(1, ', ') - - if option.takes_value(): - opts.append(' %s' % option.metavar) - - return ''.join(opts) - def _list_from_options_callback(option, opt_str, value, parser, append=True, delim=',', process=str.strip): # append can be True, False or -1 (prepend) current = list(getattr(parser.values, option.dest)) if append else [] @@ -204,23 +222,7 @@ def create_parser(): out_dict[key] = out_dict.get(key, []) + [val] if append else val setattr(parser.values, option.dest, out_dict) - # No need to wrap help messages if we're on a wide console - columns = compat_get_terminal_size().columns - max_width = columns if columns else 80 - # 47% is chosen because that is how README.md is currently formatted - # and moving help text even further to the right is undesirable. - # This can be reduced in the future to get a prettier output - max_help_position = int(0.47 * max_width) - - fmt = optparse.IndentedHelpFormatter(width=max_width, max_help_position=max_help_position) - fmt.format_option_strings = _format_option_string - - parser = _YoutubeDLOptionParser( - version=__version__, - formatter=fmt, - usage='%prog [OPTIONS] URL [URL...]', - conflict_handler='resolve' - ) + parser = _YoutubeDLOptionParser() general = optparse.OptionGroup(parser, 'General Options') general.add_option( @@ -1048,7 +1050,7 @@ def create_parser(): verbosity.add_option( '-C', '--call-home', dest='call_home', action='store_true', default=False, - # help='[Broken] Contact the yt-dlp server for debugging') + # help='Contact the yt-dlp server for debugging') help=optparse.SUPPRESS_HELP) verbosity.add_option( '--no-call-home', diff --git a/yt_dlp/postprocessor/common.py b/yt_dlp/postprocessor/common.py index fdea3a7ea1..519d061383 100644 --- a/yt_dlp/postprocessor/common.py +++ b/yt_dlp/postprocessor/common.py @@ -69,8 +69,8 @@ class PostProcessor(metaclass=PostProcessorMetaClass): return name[6:] if name[:6].lower() == 'ffmpeg' else name def to_screen(self, text, prefix=True, *args, **kwargs): - tag = '[%s] ' % self.PP_NAME if prefix else '' if self._downloader: + tag = '[%s] ' % self.PP_NAME if prefix else '' return self._downloader.to_screen(f'{tag}{text}', *args, **kwargs) def report_warning(self, text, *args, **kwargs): diff --git a/yt_dlp/postprocessor/metadataparser.py b/yt_dlp/postprocessor/metadataparser.py index 5bc435da36..98885bd194 100644 --- a/yt_dlp/postprocessor/metadataparser.py +++ b/yt_dlp/postprocessor/metadataparser.py @@ -1,29 +1,25 @@ import re -from enum import Enum from .common import PostProcessor +from ..utils import Namespace class MetadataParserPP(PostProcessor): - class Actions(Enum): - INTERPRET = 'interpretter' - REPLACE = 'replacer' - def __init__(self, downloader, actions): - PostProcessor.__init__(self, downloader) + super().__init__(self, downloader) self._actions = [] for f in actions: - action = f[0] - assert isinstance(action, self.Actions) - self._actions.append(getattr(self, action.value)(*f[1:])) + action, *args = f + assert action in self.Actions + self._actions.append(action(*args)) @classmethod def validate_action(cls, action, *data): - ''' Each action can be: + """Each action can be: (Actions.INTERPRET, from, to) OR (Actions.REPLACE, field, search, replace) - ''' - if not isinstance(action, cls.Actions): + """ + if action not in cls.Actions: raise ValueError(f'{action!r} is not a valid action') getattr(cls, action.value)(cls, *data) # So this can raise error to validate @@ -99,6 +95,8 @@ class MetadataParserPP(PostProcessor): search_re = re.compile(search) return f + Actions = Namespace(INTERPRET=interpretter, REPLACE=replacer) + class MetadataFromFieldPP(MetadataParserPP): @classmethod diff --git a/yt_dlp/utils.py b/yt_dlp/utils.py index 34a9383629..cf52fb2b63 100644 --- a/yt_dlp/utils.py +++ b/yt_dlp/utils.py @@ -70,6 +70,7 @@ from .socks import ProxyType, sockssocket try: import certifi + # The certificate may not be bundled in executable has_certifi = os.path.exists(certifi.where()) except ImportError: @@ -282,22 +283,16 @@ def write_json_file(obj, fn): if sys.platform == 'win32': # Need to remove existing file on Windows, else os.rename raises # WindowsError or FileExistsError. - try: + with contextlib.suppress(OSError): os.unlink(fn) - except OSError: - pass - try: + with contextlib.suppress(OSError): mask = os.umask(0) os.umask(mask) os.chmod(tf.name, 0o666 & ~mask) - except OSError: - pass os.rename(tf.name, fn) except Exception: - try: + with contextlib.suppress(OSError): os.remove(tf.name) - except OSError: - pass raise @@ -575,12 +570,9 @@ def extract_attributes(html_element): }. """ parser = HTMLAttributeParser() - try: + with contextlib.suppress(compat_HTMLParseError): parser.feed(html_element) parser.close() - # Older Python may throw HTMLParseError in case of malformed HTML - except compat_HTMLParseError: - pass return parser.attrs @@ -800,10 +792,8 @@ def _htmlentity_transform(entity_with_semicolon): else: base = 10 # See https://github.com/ytdl-org/youtube-dl/issues/7518 - try: + with contextlib.suppress(ValueError): return compat_chr(int(numstr, base)) - except ValueError: - pass # Unknown entity in name, return its literal representation return '&%s;' % entity @@ -812,7 +802,7 @@ def _htmlentity_transform(entity_with_semicolon): def unescapeHTML(s): if s is None: return None - assert type(s) == compat_str + assert isinstance(s, str) return re.sub( r'&([^&;]+;)', lambda m: _htmlentity_transform(m.group(1)), s) @@ -865,7 +855,7 @@ def get_subprocess_encoding(): def encodeFilename(s, for_subprocess=False): - assert type(s) == str + assert isinstance(s, str) return s @@ -924,10 +914,8 @@ def _ssl_load_windows_store_certs(ssl_context, storename): except PermissionError: return for cert in certs: - try: + with contextlib.suppress(ssl.SSLError): ssl_context.load_verify_locations(cadata=cert) - except ssl.SSLError: - pass def make_HTTPS_handler(params, **kwargs): @@ -1391,7 +1379,7 @@ def make_socks_conn_class(base_class, socks_proxy): def connect(self): self.sock = sockssocket() self.sock.setproxy(*proxy_args) - if type(self.timeout) in (int, float): + if isinstance(self.timeout, (int, float)): self.sock.settimeout(self.timeout) self.sock.connect((self.host, self.port)) @@ -1526,9 +1514,7 @@ class YoutubeDLCookieJar(compat_cookiejar.MozillaCookieJar): try: cf.write(prepare_line(line)) except compat_cookiejar.LoadError as e: - write_string( - 'WARNING: skipping cookie file entry due to %s: %r\n' - % (e, line), sys.stderr) + write_string(f'WARNING: skipping cookie file entry due to {e}: {line!r}\n') continue cf.seek(0) self._really_load(cf, filename, ignore_discard, ignore_expires) @@ -1646,12 +1632,10 @@ def parse_iso8601(date_str, delimiter='T', timezone=None): if timezone is None: timezone, date_str = extract_timezone(date_str) - try: + with contextlib.suppress(ValueError): date_format = f'%Y-%m-%d{delimiter}%H:%M:%S' dt = datetime.datetime.strptime(date_str, date_format) - timezone return calendar.timegm(dt.timetuple()) - except ValueError: - pass def date_formats(day_first=True): @@ -1671,17 +1655,13 @@ def unified_strdate(date_str, day_first=True): _, date_str = extract_timezone(date_str) for expression in date_formats(day_first): - try: + with contextlib.suppress(ValueError): upload_date = datetime.datetime.strptime(date_str, expression).strftime('%Y%m%d') - except ValueError: - pass if upload_date is None: timetuple = email.utils.parsedate_tz(date_str) if timetuple: - try: + with contextlib.suppress(ValueError): upload_date = datetime.datetime(*timetuple[:6]).strftime('%Y%m%d') - except ValueError: - pass if upload_date is not None: return compat_str(upload_date) @@ -1709,11 +1689,9 @@ def unified_timestamp(date_str, day_first=True): date_str = m.group(1) for expression in date_formats(day_first): - try: + with contextlib.suppress(ValueError): dt = datetime.datetime.strptime(date_str, expression) - timezone + datetime.timedelta(hours=pm_delta) return calendar.timegm(dt.timetuple()) - except ValueError: - pass timetuple = email.utils.parsedate_tz(date_str) if timetuple: return calendar.timegm(timetuple) + pm_delta * 3600 @@ -1879,9 +1857,8 @@ def get_windows_version(): def write_string(s, out=None, encoding=None): - if out is None: - out = sys.stderr - assert type(s) == compat_str + assert isinstance(s, str) + out = out or sys.stderr if 'b' in getattr(out, 'mode', ''): byt = s.encode(encoding or preferredencoding(), 'ignore') @@ -2483,18 +2460,10 @@ def parse_duration(s): else: return None - duration = 0 - if secs: - duration += float(secs) - if mins: - duration += float(mins) * 60 - if hours: - duration += float(hours) * 60 * 60 - if days: - duration += float(days) * 24 * 60 * 60 if ms: - duration += float(ms.replace(':', '.')) - return duration + ms = ms.replace(':', '.') + return sum(float(part or 0) * mult for part, mult in ( + (days, 86400), (hours, 3600), (mins, 60), (secs, 1), (ms, 1))) def prepend_extension(filename, ext, expected_real_ext=None): @@ -2957,9 +2926,10 @@ TV_PARENTAL_GUIDELINES = { def parse_age_limit(s): - if type(s) == int: + # isinstance(False, int) is True. So type() must be used instead + if type(s) is int: return s if 0 <= s <= 21 else None - if not isinstance(s, str): + elif not isinstance(s, str): return None m = re.match(r'^(?P\d{1,2})\+?$', s) if m: @@ -3227,7 +3197,7 @@ def parse_codecs(codecs_str): if not tcodec: tcodec = full_codec else: - write_string('WARNING: Unknown codec %s\n' % full_codec, sys.stderr) + write_string(f'WARNING: Unknown codec {full_codec}\n') if vcodec or acodec or tcodec: return { 'vcodec': vcodec or 'none', @@ -4934,7 +4904,7 @@ def get_executable_path(): def load_plugins(name, suffix, namespace): classes = {} - try: + with contextlib.suppress(FileNotFoundError): plugins_spec = importlib.util.spec_from_file_location( name, os.path.join(get_executable_path(), 'ytdlp_plugins', name, '__init__.py')) plugins = importlib.util.module_from_spec(plugins_spec) @@ -4947,8 +4917,6 @@ def load_plugins(name, suffix, namespace): continue klass = getattr(plugins, name) classes[name] = namespace[name] = klass - except FileNotFoundError: - pass return classes @@ -4957,13 +4925,14 @@ def traverse_obj( casesense=True, is_user_input=False, traverse_string=False): ''' Traverse nested list/dict/tuple @param path_list A list of paths which are checked one by one. - Each path is a list of keys where each key is a string, - a function, a tuple of strings/None or "...". - When a fuction is given, it takes the key and value as arguments - and returns whether the key matches or not. When a tuple is given, - all the keys given in the tuple are traversed, and - "..." traverses all the keys in the object - "None" returns the object without traversal + Each path is a list of keys where each key is a: + - None: Do nothing + - string: A dictionary key + - int: An index into a list + - tuple: A list of keys all of which will be traversed + - Ellipsis: Fetch all values in the object + - Function: Takes the key and value as arguments + and returns whether the key matches or not @param default Default value to return @param expected_type Only accept final value of this type (Can also be any callable) @param get_all Return all the values obtained from a path or only the first one @@ -5253,7 +5222,7 @@ class Config: yield from self.own_args or [] def parse_args(self): - return self._parser.parse_args(list(self.all_args)) + return self._parser.parse_args(self.all_args) class WebSocketsWrapper(): @@ -5339,3 +5308,7 @@ class classproperty: def __get__(self, _, cls): return self.f(cls) + + +def Namespace(**kwargs): + return collections.namedtuple('Namespace', kwargs)(**kwargs) diff --git a/yt_dlp/webvtt.py b/yt_dlp/webvtt.py index 3180eafde9..741622b25b 100644 --- a/yt_dlp/webvtt.py +++ b/yt_dlp/webvtt.py @@ -103,14 +103,8 @@ def _parse_ts(ts): Convert a parsed WebVTT timestamp (a re.Match obtained from _REGEX_TS) into an MPEG PES timestamp: a tick counter at 90 kHz resolution. """ - - h, min, s, ms = ts.groups() - return 90 * ( - int(h or 0) * 3600000 + # noqa: W504,E221,E222 - int(min) * 60000 + # noqa: W504,E221,E222 - int(s) * 1000 + # noqa: W504,E221,E222 - int(ms) # noqa: W504,E221,E222 - ) + return 90 * sum( + int(part or 0) * mult for part, mult in zip(ts.groups(), (3600_000, 60_000, 1000, 1))) def _format_ts(ts):