From b88af539798328f5fddf8c290402617ed2076e78 Mon Sep 17 00:00:00 2001 From: Andrew Udvare Date: Tue, 3 Jun 2025 19:42:31 -0400 Subject: [PATCH 01/13] Add yt-dlp stubs --- stubs/yt-dlp/@tests/stubtest_allowlist.txt | 26 + stubs/yt-dlp/METADATA.toml | 2 + stubs/yt-dlp/yt_dlp/YoutubeDL.pyi | 124 +++ stubs/yt-dlp/yt_dlp/__init__.pyi | 258 +++++++ stubs/yt-dlp/yt_dlp/aes.pyi | 40 + stubs/yt-dlp/yt_dlp/cache.pyi | 19 + stubs/yt-dlp/yt_dlp/compat/__init__.pyi | 12 + stubs/yt-dlp/yt_dlp/compat/compat_utils.pyi | 21 + stubs/yt-dlp/yt_dlp/compat/imghdr.pyi | 3 + stubs/yt-dlp/yt_dlp/cookies.pyi | 104 +++ stubs/yt-dlp/yt_dlp/downloader/__init__.pyi | 31 + stubs/yt-dlp/yt_dlp/downloader/bunnycdn.pyi | 17 + stubs/yt-dlp/yt_dlp/downloader/common.pyi | 87 +++ stubs/yt-dlp/yt_dlp/downloader/dash.pyi | 8 + stubs/yt-dlp/yt_dlp/downloader/external.pyi | 62 ++ stubs/yt-dlp/yt_dlp/downloader/f4m.pyi | 31 + stubs/yt-dlp/yt_dlp/downloader/fc2.pyi | 3 + stubs/yt-dlp/yt_dlp/downloader/fragment.pyi | 28 + stubs/yt-dlp/yt_dlp/downloader/hls.pyi | 7 + stubs/yt-dlp/yt_dlp/downloader/http.pyi | 3 + stubs/yt-dlp/yt_dlp/downloader/ism.pyi | 28 + stubs/yt-dlp/yt_dlp/downloader/mhtml.pyi | 3 + stubs/yt-dlp/yt_dlp/downloader/niconico.pyi | 3 + stubs/yt-dlp/yt_dlp/downloader/rtmp.pyi | 5 + stubs/yt-dlp/yt_dlp/downloader/rtsp.pyi | 5 + stubs/yt-dlp/yt_dlp/downloader/websocket.pyi | 11 + .../yt_dlp/downloader/youtube_live_chat.pyi | 7 + stubs/yt-dlp/yt_dlp/extractor/__init__.pyi | 8 + stubs/yt-dlp/yt_dlp/extractor/common.pyi | 717 ++++++++++++++++++ .../yt_dlp/extractor/commonmistakes.pyi | 10 + .../yt_dlp/extractor/commonprotocols.pyi | 10 + stubs/yt-dlp/yt_dlp/globals.pyi | 19 + stubs/yt-dlp/yt_dlp/jsinterp.pyi | 55 ++ stubs/yt-dlp/yt_dlp/minicurses.pyi | 23 + stubs/yt-dlp/yt_dlp/networking/__init__.pyi | 9 + stubs/yt-dlp/yt_dlp/networking/_helper.pyi | 51 ++ stubs/yt-dlp/yt_dlp/networking/common.pyi | 161 ++++ stubs/yt-dlp/yt_dlp/networking/exceptions.pyi | 36 + .../yt-dlp/yt_dlp/networking/impersonate.pyi | 27 + stubs/yt-dlp/yt_dlp/networking/websocket.pyi | 9 + stubs/yt-dlp/yt_dlp/options.pyi | 15 + stubs/yt-dlp/yt_dlp/plugins.pyi | 44 ++ .../yt-dlp/yt_dlp/postprocessor/__init__.pyi | 7 + stubs/yt-dlp/yt_dlp/postprocessor/common.pyi | 28 + stubs/yt-dlp/yt_dlp/socks.pyi | 72 ++ stubs/yt-dlp/yt_dlp/update.pyi | 34 + stubs/yt-dlp/yt_dlp/utils/__init__.pyi | 263 +++++++ stubs/yt-dlp/yt_dlp/utils/_legacy.pyi | 57 ++ stubs/yt-dlp/yt_dlp/utils/_utils.pyi | 686 +++++++++++++++++ stubs/yt-dlp/yt_dlp/utils/networking.pyi | 42 + stubs/yt-dlp/yt_dlp/utils/progress.pyi | 22 + stubs/yt-dlp/yt_dlp/utils/traversal.pyi | 77 ++ stubs/yt-dlp/yt_dlp/version.pyi | 6 + stubs/yt-dlp/yt_dlp/webvtt.pyi | 49 ++ 54 files changed, 3485 insertions(+) create mode 100644 stubs/yt-dlp/@tests/stubtest_allowlist.txt create mode 100644 stubs/yt-dlp/METADATA.toml create mode 100644 stubs/yt-dlp/yt_dlp/YoutubeDL.pyi create mode 100644 stubs/yt-dlp/yt_dlp/__init__.pyi create mode 100644 stubs/yt-dlp/yt_dlp/aes.pyi create mode 100644 stubs/yt-dlp/yt_dlp/cache.pyi create mode 100644 stubs/yt-dlp/yt_dlp/compat/__init__.pyi create mode 100644 stubs/yt-dlp/yt_dlp/compat/compat_utils.pyi create mode 100644 stubs/yt-dlp/yt_dlp/compat/imghdr.pyi create mode 100644 stubs/yt-dlp/yt_dlp/cookies.pyi create mode 100644 stubs/yt-dlp/yt_dlp/downloader/__init__.pyi create mode 100644 stubs/yt-dlp/yt_dlp/downloader/bunnycdn.pyi create mode 100644 stubs/yt-dlp/yt_dlp/downloader/common.pyi create mode 100644 stubs/yt-dlp/yt_dlp/downloader/dash.pyi create mode 100644 stubs/yt-dlp/yt_dlp/downloader/external.pyi create mode 100644 stubs/yt-dlp/yt_dlp/downloader/f4m.pyi create mode 100644 stubs/yt-dlp/yt_dlp/downloader/fc2.pyi create mode 100644 stubs/yt-dlp/yt_dlp/downloader/fragment.pyi create mode 100644 stubs/yt-dlp/yt_dlp/downloader/hls.pyi create mode 100644 stubs/yt-dlp/yt_dlp/downloader/http.pyi create mode 100644 stubs/yt-dlp/yt_dlp/downloader/ism.pyi create mode 100644 stubs/yt-dlp/yt_dlp/downloader/mhtml.pyi create mode 100644 stubs/yt-dlp/yt_dlp/downloader/niconico.pyi create mode 100644 stubs/yt-dlp/yt_dlp/downloader/rtmp.pyi create mode 100644 stubs/yt-dlp/yt_dlp/downloader/rtsp.pyi create mode 100644 stubs/yt-dlp/yt_dlp/downloader/websocket.pyi create mode 100644 stubs/yt-dlp/yt_dlp/downloader/youtube_live_chat.pyi create mode 100644 stubs/yt-dlp/yt_dlp/extractor/__init__.pyi create mode 100644 stubs/yt-dlp/yt_dlp/extractor/common.pyi create mode 100644 stubs/yt-dlp/yt_dlp/extractor/commonmistakes.pyi create mode 100644 stubs/yt-dlp/yt_dlp/extractor/commonprotocols.pyi create mode 100644 stubs/yt-dlp/yt_dlp/globals.pyi create mode 100644 stubs/yt-dlp/yt_dlp/jsinterp.pyi create mode 100644 stubs/yt-dlp/yt_dlp/minicurses.pyi create mode 100644 stubs/yt-dlp/yt_dlp/networking/__init__.pyi create mode 100644 stubs/yt-dlp/yt_dlp/networking/_helper.pyi create mode 100644 stubs/yt-dlp/yt_dlp/networking/common.pyi create mode 100644 stubs/yt-dlp/yt_dlp/networking/exceptions.pyi create mode 100644 stubs/yt-dlp/yt_dlp/networking/impersonate.pyi create mode 100644 stubs/yt-dlp/yt_dlp/networking/websocket.pyi create mode 100644 stubs/yt-dlp/yt_dlp/options.pyi create mode 100644 stubs/yt-dlp/yt_dlp/plugins.pyi create mode 100644 stubs/yt-dlp/yt_dlp/postprocessor/__init__.pyi create mode 100644 stubs/yt-dlp/yt_dlp/postprocessor/common.pyi create mode 100644 stubs/yt-dlp/yt_dlp/socks.pyi create mode 100644 stubs/yt-dlp/yt_dlp/update.pyi create mode 100644 stubs/yt-dlp/yt_dlp/utils/__init__.pyi create mode 100644 stubs/yt-dlp/yt_dlp/utils/_legacy.pyi create mode 100644 stubs/yt-dlp/yt_dlp/utils/_utils.pyi create mode 100644 stubs/yt-dlp/yt_dlp/utils/networking.pyi create mode 100644 stubs/yt-dlp/yt_dlp/utils/progress.pyi create mode 100644 stubs/yt-dlp/yt_dlp/utils/traversal.pyi create mode 100644 stubs/yt-dlp/yt_dlp/version.pyi create mode 100644 stubs/yt-dlp/yt_dlp/webvtt.pyi diff --git a/stubs/yt-dlp/@tests/stubtest_allowlist.txt b/stubs/yt-dlp/@tests/stubtest_allowlist.txt new file mode 100644 index 000000000000..c4222fabcefd --- /dev/null +++ b/stubs/yt-dlp/@tests/stubtest_allowlist.txt @@ -0,0 +1,26 @@ +# Extractors will not be stubbed at this time. +yt_dlp.extractor.* +# Postprocessors will not be stubbed at this time. +yt_dlp.postprocessor.* +# Won't be covered. +yt_dlp.__main__ +yt_dlp.__pyinstaller.* +yt_dlp.compat.(shutil|types|urllib).* +yt_dlp.dependencies.* +yt_dlp.jsinterp.Debugger.sys +yt_dlp.networking.impersonate.ImpersonateTarget._DT +yt_dlp.utils.xattr +# Deprecated +yt_dlp.YoutubeDL.(YoutubeDL.)?parse_outtmpl +yt_dlp.downloader.(common.)?FileDownloader.parse_bytes +yt_dlp.networking.(common.)?Response.(code|info|get(code|url|header)) +yt_dlp.utils._legacy.decode_png +# ``except IndexError`` is sufficient. +yt_dlp.utils.(_utils.)?(PlaylistEntries|(Lazy|Paged)List).IndexError +# Reports 'not a function'. +yt_dlp.networking.(common.)?(HEAD|PATCH|PUT)Request +# This is partially typed. +yt_dlp.utils.(_utils.)?is_iterable_like +# Generated with functools.partial. +yt_dlp.utils.(_utils.)?prepend_extension +yt_dlp.utils.(_utils.)?replace_extension diff --git a/stubs/yt-dlp/METADATA.toml b/stubs/yt-dlp/METADATA.toml new file mode 100644 index 000000000000..c971b23e8191 --- /dev/null +++ b/stubs/yt-dlp/METADATA.toml @@ -0,0 +1,2 @@ +version = "2025.05.*" +upstream_repository = "https://github.com/yt-dlp/yt-dlp" diff --git a/stubs/yt-dlp/yt_dlp/YoutubeDL.pyi b/stubs/yt-dlp/yt_dlp/YoutubeDL.pyi new file mode 100644 index 000000000000..f22e938aeda3 --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/YoutubeDL.pyi @@ -0,0 +1,124 @@ +from collections.abc import Callable, Collection, Iterable, Iterator, Mapping +from functools import cached_property +from types import TracebackType +from typing_extensions import Self, TypeAlias +from urllib.request import Request + +from yt_dlp.cache import Cache +from yt_dlp.cookies import YoutubeDLCookieJar +from yt_dlp.networking import RequestDirector, RequestHandler, Response +from yt_dlp.utils import Namespace + +from . import _Params +from .extractor.common import InfoExtractor, _InfoDict +from .postprocessor.common import PostProcessor +from .utils._utils import _ProgressState + +_FormatSelector: TypeAlias = Callable[[Mapping[str, object]], Iterator[object]] + +class YoutubeDL: + params: _Params + cache: Cache + format_selector: _FormatSelector + archive: set[str] + def __init__(self, params: _Params | None = ..., auto_init: bool = ...) -> None: ... + def warn_if_short_id(self, argv: list[str]) -> None: ... + def add_info_extractor(self, ie: InfoExtractor) -> None: ... + def get_info_extractor(self, ie_key: str) -> InfoExtractor: ... + def add_default_info_extractors(self) -> None: ... + def add_post_processor(self, pp: PostProcessor, when: str = "post_process") -> None: ... + def add_post_hook(self, ph: Callable[..., object]) -> None: ... + def add_close_hook(self, ch: Callable[..., object]) -> None: ... + def add_progress_hook(self, ph: Callable[..., object]) -> None: ... + def add_postprocessor_hook(self, ph: Callable[..., object]) -> None: ... + def to_stdout(self, message: str, skip_eol: bool = False, quiet: bool | None = None) -> None: ... + def to_screen(self, message: str, skip_eol: bool = False, quiet: bool | None = None, only_once: bool = False) -> None: ... + def to_stderr(self, message: str, only_once: bool = False) -> None: ... + def to_console_title( + self, message: str | None = None, progress_state: _ProgressState | None = None, percent: int | None = None + ) -> None: ... + def save_console_title(self) -> None: ... + def restore_console_title(self) -> None: ... + def __enter__(self) -> Self: ... + def save_cookies(self) -> None: ... + def __exit__(self, *args: object) -> None: ... + def close(self) -> None: ... + def trouble(self, message: str | None = None, tb: TracebackType | None = None, is_error: bool = True) -> None: ... + Styles: Namespace + def report_warning(self, message: str, only_once: bool = False) -> None: ... + def deprecation_warning(self, message: str, *, stacklevel: int = 0) -> None: ... + def deprecated_feature(self, message: str) -> None: ... + def report_error(self, message: str, *args: object, **kwargs: object) -> None: ... + def write_debug(self, message: str, only_once: bool = False) -> None: ... + def report_file_already_downloaded(self, file_name: str) -> None: ... + def report_file_delete(self, file_name: str) -> None: ... + def raise_no_formats(self, info: str, forced: bool = False, *, msg: str | None = None) -> None: ... + def get_output_path(self, dir_type: str = "", filename: str | None = None) -> str: ... + @staticmethod + def escape_outtmpl(outtmpl: str) -> str: ... + @classmethod + def validate_outtmpl(cls, outtmpl: str) -> ValueError | None: ... + def prepare_outtmpl(self, outtmpl: str, info_dict: _InfoDict, sanitize: bool = False) -> tuple[str, dict[str, object]]: ... + def evaluate_outtmpl(self, outtmpl: str, info_dict: _InfoDict, *args: object, **kwargs: object) -> str: ... + def prepare_filename( + self, info_dict: _InfoDict, dir_type: str = "", *, outtmpl: str | None = None, warn: bool = False + ) -> str: ... + @staticmethod + def add_extra_info(info_dict: _InfoDict, extra_info: Mapping[str, object]) -> None: ... + def extract_info( + self, + url: str, + download: bool = True, + ie_key: str | None = None, + extra_info: object | None = None, + process: bool = True, + force_generic_extractor: bool = False, + ) -> _InfoDict: ... + def add_default_extra_info(self, ie_result: _InfoDict, ie: InfoExtractor, url: str) -> None: ... + def process_ie_result( + self, ie_result: _InfoDict, download: bool = True, extra_info: Mapping[str, object] | None = None + ) -> _InfoDict: ... + def build_format_selector(self, format_spec: str) -> _FormatSelector: ... + def sort_formats(self, info_dict: _InfoDict) -> None: ... + def process_video_result(self, info_dict: _InfoDict, download: bool = True) -> _InfoDict: ... + def process_subtitles( + self, video_id: str, normal_subtitles: Mapping[str, object], automatic_captions: Mapping[str, object] + ) -> dict[str, object] | None: ... + def dl(self, name: str, info: _InfoDict, subtitle: bool = False, test: bool = False) -> bool: ... + def existing_file(self, filepaths: Iterable[str], *, default_overwrite: bool = True) -> object | None: ... + def process_info(self, info_dict: _InfoDict) -> None: ... + def download(self, url_list: Collection[str]) -> None: ... + def download_with_info_file(self, info_filename: str) -> int: ... + @staticmethod + def sanitize_info(info_dict: _InfoDict, remove_private_keys: bool = False) -> _InfoDict | None: ... + @staticmethod + def filter_requested_info(info_dict: _InfoDict, actually_filter: bool = True) -> _InfoDict | None: ... + @staticmethod + def post_extract(info_dict: _InfoDict) -> None: ... + def run_pp(self, pp: PostProcessor, infodict: _InfoDict) -> _InfoDict: ... + def run_all_pps(self, key: str, info: _InfoDict, *, additional_pps: Collection[PostProcessor] | None = None) -> _InfoDict: ... + def pre_process( + self, ie_info: _InfoDict, key: str = "pre_process", files_to_move: Mapping[str, object] | None = None + ) -> tuple[_InfoDict, list[str] | None]: ... + def post_process(self, filename: str, info: _InfoDict, files_to_move: Mapping[str, object] | None = None) -> _InfoDict: ... + def in_download_archive(self, info_dict: _InfoDict) -> bool: ... + def record_download_archive(self, info_dict: _InfoDict) -> None: ... + @staticmethod + def format_resolution(format: Mapping[str, object], default: str = "unknown") -> str: ... + def render_formats_table(self, info_dict: _InfoDict) -> str | None: ... + def render_thumbnails_table(self, info_dict: _InfoDict) -> str | None: ... + def render_subtitles_table(self, video_id: str, subtitles: Iterable[Mapping[str, object]]) -> str | None: ... + def list_formats(self, info_dict: _InfoDict) -> None: ... + def list_thumbnails(self, info_dict: _InfoDict) -> None: ... + def list_subtitles(self, video_id: str, subtitles: Iterable[Mapping[str, object]], name: str = "subtitles") -> None: ... + def print_debug_header(self) -> None: ... + @cached_property + def proxies(self) -> dict[str, object]: ... + @cached_property + def cookiejar(self) -> YoutubeDLCookieJar: ... + def urlopen(self, req: Request | str) -> Response: ... + def build_request_director( + self, handlers: Collection[RequestHandler], preferences: Collection[object] | None = None + ) -> RequestDirector: ... + def encode(self, s: str) -> bytes: ... + def get_encoding(self) -> str: ... diff --git a/stubs/yt-dlp/yt_dlp/__init__.pyi b/stubs/yt-dlp/yt_dlp/__init__.pyi new file mode 100644 index 000000000000..46879ff0713c --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/__init__.pyi @@ -0,0 +1,258 @@ +import optparse +from collections.abc import Callable, Collection, Iterator, Mapping +from typing import Literal, NamedTuple, Protocol, TypedDict, type_check_only +from typing_extensions import NotRequired + +from .extractor import gen_extractors, list_extractors +from .networking.impersonate import ImpersonateTarget +from .YoutubeDL import YoutubeDL + +__all__ = ["YoutubeDL", "gen_extractors", "list_extractors", "main", "parse_options"] + +@type_check_only +class _LoggerProtocol(Protocol): # noqa: Y046 + def __init__(self, ydl: YoutubeDL | None = None) -> None: ... + def debug(self, message: str) -> None: ... + def info(self, message: str) -> None: ... + def warning(self, message: str, *, once: bool = ..., only_once: bool = ...) -> None: ... + def error(self, message: str) -> None: ... + def stdout(self, message: str) -> None: ... + def stderr(self, message: str) -> None: ... + +@type_check_only +class _RetrySleepFunctions(TypedDict): + default: NotRequired[Callable[[int], int]] + file_access: NotRequired[Callable[[int], int]] + fragment: NotRequired[Callable[[int], int]] + +@type_check_only +class _ProgressTemplateValue(TypedDict): # noqa: Y049 + info: NotRequired[str] + progress: NotRequired[str] + +@type_check_only +class _ExternalDownloader(TypedDict): + dash: NotRequired[str] + default: NotRequired[str] + ftp: NotRequired[str] + http: NotRequired[str] + m3u8: NotRequired[str] + mms: NotRequired[str] + rtmp: NotRequired[str] + rtsp: NotRequired[str] + +@type_check_only +class _DownloadRange(TypedDict): + end_time: int + index: NotRequired[int] + start_time: int + title: NotRequired[str] + +@type_check_only +class _Color(TypedDict): + stderr: NotRequired[Literal["always", "auto", "no_color", "never"]] + stdout: NotRequired[Literal["always", "auto", "no_color", "never"]] + +_ProgressTemplate = TypedDict( + "_ProgressTemplate", + { + "download": _ProgressTemplateValue, + "download-title": _ProgressTemplateValue, + "postprocess": _ProgressTemplateValue, + "postprocess-title": _ProgressTemplateValue, + }, +) + +@type_check_only +class _Params(TypedDict): + usenetrc: NotRequired[bool | None] + netrc_location: NotRequired[str | None] + netrc_cmd: NotRequired[str | None] + username: NotRequired[str | None] + password: NotRequired[str | None] + twofactor: NotRequired[str | None] + videopassword: NotRequired[str | None] + ap_mso: NotRequired[str | None] + ap_username: NotRequired[str | None] + ap_password: NotRequired[str | None] + client_certificate: NotRequired[str | None] + client_certificate_key: NotRequired[str | None] + client_certificate_password: NotRequired[str | None] + quiet: NotRequired[bool | None] + no_warnings: NotRequired[bool | None] + forceurl: NotRequired[bool | None] + forcetitle: NotRequired[str | None] + forceid: NotRequired[bool | None] + forcethumbnail: NotRequired[bool | None] + forcedescription: NotRequired[bool | None] + forceduration: NotRequired[str | None] + forcefilename: NotRequired[bool | None] + forceprint: NotRequired[Mapping[str, Collection[str]] | Collection[str] | None] + print_to_file: NotRequired[Mapping[str, tuple[str, str]] | None] + forcejson: NotRequired[bool | None] + dump_single_json: NotRequired[bool | None] + force_write_download_archive: NotRequired[str | None] + simulate: NotRequired[str | None] + skip_download: NotRequired[str | None] + format: NotRequired[str | Callable[[Mapping[str, object]], Mapping[str, object]] | None] + allow_unplayable_formats: NotRequired[bool | None] + ignore_no_formats_error: NotRequired[bool | None] + format_sort: NotRequired[Collection[str] | None] + format_sort_force: NotRequired[str | None] + allow_multiple_video_streams: NotRequired[bool | None] + allow_multiple_audio_streams: NotRequired[bool | None] + check_formats: NotRequired[bool | Literal["selected"] | None] + listformats: NotRequired[bool | None] + outtmpl: NotRequired[str | Mapping[str, str] | None] + outtmpl_na_placeholder: NotRequired[str | None] + paths: NotRequired[str | None] + restrictfilenames: NotRequired[bool | None] + windowsfilenames: NotRequired[bool | None] + ignoreerrors: NotRequired[bool | Literal["only_download"] | None] + force_generic_extractor: NotRequired[bool | None] + allowed_extractors: NotRequired[Collection[str] | None] + ratelimit: NotRequired[int | None] + throttledratelimit: NotRequired[int | None] + overwrites: NotRequired[bool | None] + retries: NotRequired[int | None] + file_access_retries: NotRequired[int | None] + fragment_retries: NotRequired[int | None] + extractor_retries: NotRequired[int | None] + retry_sleep_functions: NotRequired[_RetrySleepFunctions | None] + skip_unavailable_fragments: NotRequired[bool | None] + keep_fragments: NotRequired[bool | None] + concurrent_fragment_downloads: NotRequired[int | None] + buffersize: NotRequired[int | None] + noresizebuffer: NotRequired[bool | None] + http_chunk_size: NotRequired[int | None] + continuedl: NotRequired[bool | None] + noprogress: NotRequired[bool | None] + progress_with_newline: NotRequired[bool | None] + progress_template: NotRequired[_ProgressTemplate | None] + playliststart: NotRequired[int | None] + playlistend: NotRequired[int | None] + playlistreverse: NotRequired[bool | None] + playlistrandom: NotRequired[bool | None] + lazy_playlist: NotRequired[bool | None] + noplaylist: NotRequired[bool | None] + logtostderr: NotRequired[bool | None] + consoletitle: NotRequired[str | None] + nopart: NotRequired[bool | None] + updatetime: NotRequired[bool | None] + writedescription: NotRequired[bool | None] + writeannotations: NotRequired[bool | None] + writeinfojson: NotRequired[bool | None] + allow_playlist_files: NotRequired[bool | None] + clean_infojson: NotRequired[bool | None] + getcomments: NotRequired[bool | None] + writethumbnail: NotRequired[bool | None] + write_all_thumbnails: NotRequired[bool | None] + writelink: NotRequired[bool | None] + writeurllink: NotRequired[bool | None] + writewebloclink: NotRequired[bool | None] + writedesktoplink: NotRequired[bool | None] + writesubtitles: NotRequired[bool | None] + writeautomaticsub: NotRequired[bool | None] + allsubtitles: NotRequired[bool | None] + listsubtitles: NotRequired[bool | None] + subtitlesformat: NotRequired[str | None] + subtitleslangs: NotRequired[Collection[str] | None] + matchtitle: NotRequired[bool | None] + rejecttitle: NotRequired[bool | None] + prefer_free_formats: NotRequired[bool | None] + trim_file_name: NotRequired[int | None] + verbose: NotRequired[bool | None] + test: NotRequired[bool | None] + keepvideo: NotRequired[str | None] + min_filesize: NotRequired[int | None] + max_filesize: NotRequired[int | None] + min_views: NotRequired[str | None] + max_views: NotRequired[str | None] + daterange: NotRequired[str | None] + cachedir: NotRequired[str | None] + age_limit: NotRequired[str | None] + download_archive: NotRequired[str | None] + break_on_existing: NotRequired[str | None] + break_on_reject: NotRequired[bool | None] + break_per_url: NotRequired[bool | None] + skip_playlist_after_errors: NotRequired[bool | None] + cookiefile: NotRequired[str | None] + cookiesfrombrowser: NotRequired[tuple[str, ...] | None] + legacyserverconnect: NotRequired[bool | None] + nocheckcertificate: NotRequired[bool | None] + prefer_insecure: NotRequired[str | None] + enable_file_urls: NotRequired[str | None] + http_headers: NotRequired[Mapping[str, str] | None] + proxy: NotRequired[str | None] + socket_timeout: NotRequired[int | None] + bidi_workaround: NotRequired[bool | None] + debug_printtraffic: NotRequired[bool | None] + prefer_ffmpeg: NotRequired[bool | None] + include_ads: NotRequired[bool | None] + default_search: NotRequired[str | None] + dynamic_mpd: NotRequired[bool | None] + extractor_args: NotRequired[Mapping[str, Mapping[str, object]] | None] + youtube_include_dash_manifest: NotRequired[bool | None] + youtube_include_hls_manifest: NotRequired[bool | None] + encoding: NotRequired[str | None] + extract_flat: NotRequired[bool | Literal["in_playlist", "discard", "discard_in_playlist"] | None] + live_from_start: NotRequired[bool | None] + wait_for_video: NotRequired[tuple[int, int] | None] + mark_watched: NotRequired[bool | None] + merge_output_format: NotRequired[str | None] + final_ext: NotRequired[str | None] + postprocessors: NotRequired[Collection[Mapping[str, object]]] + fixup: NotRequired[Literal["never", "warn", "detect_or_warn"] | None] + source_address: NotRequired[str | None] + call_home: NotRequired[bool | None] + sleep_interval_requests: NotRequired[int | None] + sleep_interval: NotRequired[int | None] + max_sleep_interval: NotRequired[int | None] + sleep_interval_subtitles: NotRequired[int | None] + external_downloader: NotRequired[_ExternalDownloader | None] + download_ranges: NotRequired[Callable[[object, YoutubeDL], Iterator[_DownloadRange]] | None] + force_keyframes_at_cuts: NotRequired[bool | None] + list_thumbnails: NotRequired[str | None] + playlist_items: NotRequired[Collection[int] | None] + xattr_set_filesize: NotRequired[bool | None] + match_filter: NotRequired[ + Callable[[Mapping[str, object], bool], str | None] | Callable[[Mapping[str, object]], str | None] | None + ] + color: NotRequired[_Color | None] + ffmpeg_location: NotRequired[str | None] + hls_prefer_native: NotRequired[bool | None] + hls_use_mpegts: NotRequired[bool | None] + hls_split_discontinuity: NotRequired[bool | None] + max_downloads: NotRequired[int | None] + dump_intermediate_pages: NotRequired[bool | None] + listformats_table: NotRequired[bool | None] + write_pages: NotRequired[bool | None] + external_downloader_args: NotRequired[Literal["default"] | Mapping[str, Collection[str]] | Collection[str] | None] + postprocessor_args: NotRequired[Mapping[str, Collection[str]] | Collection[str] | None] + geo_verification_proxy: NotRequired[str | None] + geo_bypass: NotRequired[bool | None] + geo_bypass_country: NotRequired[str | None] + geo_bypass_ip_block: NotRequired[str | None] + compat_opts: NotRequired[dict[str, object] | None] + # Undocumented fields below. + _deprecation_warnings: NotRequired[Collection[str] | None] + _warnings: NotRequired[Collection[str] | None] + autonumber_size: NotRequired[int | None] + autonumber_start: NotRequired[int | None] + cn_verification_proxy: NotRequired[str | None] + forceformat: NotRequired[object] + load_pages: NotRequired[bool | None] + logger: NotRequired[_LoggerProtocol] + youtube_print_sig_code: NotRequired[bool | None] + progress_hooks: NotRequired[list[Callable[[object], object]]] + impersonate: NotRequired[ImpersonateTarget] + +@type_check_only +class _ParsedOptions(NamedTuple): + parser: optparse.OptionParser + options: optparse.Values + urls: Collection[str] + ydl_opts: _Params + +def parse_options(argv: Collection[str] | None = ...) -> _ParsedOptions: ... +def main(argv: list[str] | None = ...) -> int: ... diff --git a/stubs/yt-dlp/yt_dlp/aes.pyi b/stubs/yt-dlp/yt_dlp/aes.pyi new file mode 100644 index 000000000000..64a844f383ce --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/aes.pyi @@ -0,0 +1,40 @@ +from _typeshed import ReadableBuffer +from typing import Literal + +__all__ = [ + "aes_cbc_decrypt", + "aes_cbc_decrypt_bytes", + "aes_cbc_encrypt", + "aes_cbc_encrypt_bytes", + "aes_ctr_decrypt", + "aes_ctr_encrypt", + "aes_decrypt", + "aes_decrypt_text", + "aes_ecb_decrypt", + "aes_ecb_encrypt", + "aes_encrypt", + "aes_gcm_decrypt_and_verify", + "aes_gcm_decrypt_and_verify_bytes", + "key_expansion", + "pad_block", + "pkcs7_padding", + "unpad_pkcs7", +] + +def aes_cbc_decrypt_bytes(data: bytes, key: bytes, iv: bytes) -> bytes: ... +def aes_gcm_decrypt_and_verify_bytes(data: bytes, key: bytes, tag: bytes, nonce: bytes) -> bytes: ... +def aes_cbc_encrypt_bytes(data: bytes, key: bytes, iv: bytes, **kwargs: object) -> bytes: ... +def unpad_pkcs7(data: list[int]) -> list[int]: ... +def pkcs7_padding(data: list[int]) -> list[int]: ... +def pad_block(block: list[int], padding_mode: Literal["pkcs7", "iso7816", "whitespace", "zero"]) -> list[int]: ... +def aes_ecb_encrypt(data: list[int], key: list[int], iv: list[int] | None = None) -> list[int]: ... +def aes_ecb_decrypt(data: list[int], key: list[int], iv: list[int] | None = None) -> list[int]: ... +def aes_ctr_decrypt(data: list[int], key: list[int], iv: list[int]) -> list[int]: ... +def aes_ctr_encrypt(data: list[int], key: list[int], iv: list[int]) -> list[int]: ... +def aes_cbc_decrypt(data: list[int], key: list[int], iv: list[int]) -> list[int]: ... +def aes_cbc_encrypt(data: list[int], key: list[int], iv: list[int], *, padding_mode: str = "pkcs7") -> list[int]: ... +def aes_gcm_decrypt_and_verify(data: list[int], key: list[int], tag: list[int], nonce: list[int]) -> list[int]: ... +def aes_encrypt(data: list[int], expanded_key: list[int]) -> list[int]: ... +def aes_decrypt(data: list[int], expanded_key: list[int]) -> list[int]: ... +def aes_decrypt_text(data: str | ReadableBuffer, password: str, key_size_bytes: int) -> str: ... +def key_expansion(data: list[int]) -> list[int]: ... diff --git a/stubs/yt-dlp/yt_dlp/cache.pyi b/stubs/yt-dlp/yt_dlp/cache.pyi new file mode 100644 index 000000000000..ebea38468205 --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/cache.pyi @@ -0,0 +1,19 @@ +from typing import Literal + +from .YoutubeDL import YoutubeDL + +class Cache: + def __init__(self, ydl: YoutubeDL) -> None: ... + @property + def enabled(self) -> bool: ... + def store(self, section: str, key: str, data: object, dtype: Literal["json"] = "json") -> None: ... + def load( + self, + section: str, + key: str, + dtype: Literal["json"] = "json", + default: object | None = None, + *, + min_ver: str | None = None, + ) -> object: ... + def remove(self) -> None: ... diff --git a/stubs/yt-dlp/yt_dlp/compat/__init__.pyi b/stubs/yt-dlp/yt_dlp/compat/__init__.pyi new file mode 100644 index 000000000000..cfa7f4a18eb6 --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/compat/__init__.pyi @@ -0,0 +1,12 @@ +import urllib +import urllib.request +import xml.etree.ElementTree as etree + +from yt_dlp.networking import Request + +class compat_HTMLParseError(ValueError): ... + +def compat_etree_fromstring(text: str) -> etree.Element[str]: ... +def compat_ord(c: str) -> int: ... +def compat_expanduser(path: str) -> str: ... +def urllib_req_to_req(urllib_request: urllib.request.Request) -> Request: ... diff --git a/stubs/yt-dlp/yt_dlp/compat/compat_utils.pyi b/stubs/yt-dlp/yt_dlp/compat/compat_utils.pyi new file mode 100644 index 000000000000..7a234e98c774 --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/compat/compat_utils.pyi @@ -0,0 +1,21 @@ +import types +from collections.abc import Callable, Collection +from typing import NamedTuple + +class _Package(NamedTuple): + name: str + version: str + +def get_package_info(module: types.ModuleType) -> _Package: ... + +class EnhancedModule(types.ModuleType): + def __bool__(self) -> bool: ... + def __getattribute__(self, attr: str) -> object: ... + +def passthrough_module( + parent: types.ModuleType, + child: str | types.ModuleType, + allowed_attributes: Collection[str] = ..., + *, + callback: Callable[[object], object] = ..., +) -> types.ModuleType: ... diff --git a/stubs/yt-dlp/yt_dlp/compat/imghdr.pyi b/stubs/yt-dlp/yt_dlp/compat/imghdr.pyi new file mode 100644 index 000000000000..703b7e450dc1 --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/compat/imghdr.pyi @@ -0,0 +1,3 @@ +from _typeshed import FileDescriptorOrPath + +def what(file: FileDescriptorOrPath | None = None, h: bytes | None = None) -> str | None: ... diff --git a/stubs/yt-dlp/yt_dlp/cookies.pyi b/stubs/yt-dlp/yt_dlp/cookies.pyi new file mode 100644 index 000000000000..769f1301343f --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/cookies.pyi @@ -0,0 +1,104 @@ +from collections.abc import Collection, Iterator, KeysView +from enum import Enum +from http.cookiejar import Cookie, MozillaCookieJar +from http.cookies import SimpleCookie +from typing import TextIO, TypeVar + +from . import _LoggerProtocol +from .minicurses import MultilinePrinter +from .utils._utils import YoutubeDLError +from .YoutubeDL import YoutubeDL + +CHROMIUM_BASED_BROWSERS: set[str] = ... +SUPPORTED_BROWSERS: set[str] = ... + +class _LinuxKeyring(Enum): + BASICTEXT = 5 + GNOMEKEYRING = 4 + KWALLET = 1 + KWALLET5 = 2 + KWALLET6 = 3 + +SUPPORTED_KEYRINGS: KeysView[str] + +class YDLLogger(_LoggerProtocol): + def warning(self, message: str, only_once: bool = False) -> None: ... # type: ignore[override] + + class ProgressBar(MultilinePrinter): + def print(self, message: str) -> None: ... + + def progress_bar(self) -> ProgressBar: ... + +class CookieLoadError(YoutubeDLError): ... + +class YoutubeDLCookieJar(MozillaCookieJar): + def __init__(self, filename: str | None = ..., *args: object, **kwargs: object) -> None: ... + def open(self, file: str, *, write: bool = ...) -> Iterator[TextIO]: ... + def get_cookie_header(self, url: str) -> str: ... + def get_cookies_for_url(self, url: str) -> list[Cookie]: ... + def load(self, filename: str | None = None, ignore_discard: bool = True, ignore_expires: bool = True) -> None: ... + def save(self, filename: str | None = None, ignore_discard: bool = True, ignore_expires: bool = True) -> None: ... + +def load_cookies(cookie_file: str, browser_specification: str | None, ydl: YoutubeDL) -> YoutubeDLCookieJar: ... +def extract_cookies_from_browser( + browser_name: str, + profile: str | None = ..., + logger: _LoggerProtocol = ..., + *, + keyring: _LinuxKeyring | None = ..., + container: str | None = ..., +) -> YoutubeDLCookieJar: ... + +_T = TypeVar("_T", bound=MozillaCookieJar) + +def parse_safari_cookies(data: bytes, jar: _T | None = None, logger: _LoggerProtocol = ...) -> _T: ... + +class ChromeCookieDecryptor: + def decrypt(self, encrypted_value: bytes) -> str: ... + +class LinuxChromeCookieDecryptor(ChromeCookieDecryptor): + def __init__( + self, + browser_keyring_name: str, + logger: _LoggerProtocol, + *, + keyring: _LinuxKeyring | None = ..., + meta_version: int | None = ..., + ) -> None: ... + @staticmethod + def derive_key(password: bytes) -> bytes: ... + +class MacChromeCookieDecryptor(ChromeCookieDecryptor): + def __init__(self, browser_keyring_name: str, logger: YDLLogger, meta_version: int | None = None) -> None: ... + @staticmethod + def derive_key(password: bytes) -> bytes: ... + +class WindowsChromeCookieDecryptor(ChromeCookieDecryptor): + def __init__(self, browser_root: str, logger: YDLLogger, meta_version: int | None = None) -> None: ... + +def get_cookie_decryptor( + browser_root: object, + browser_keyring_name: str, + logger: _LoggerProtocol, + *, + keyring: _LinuxKeyring | None = ..., + meta_version: int | None = ..., +) -> ChromeCookieDecryptor: ... + +class ParserError(Exception): ... + +class DataParser: + def __init__(self, data: bytes, logger: YDLLogger) -> None: ... + def read_bytes(self, num_bytes: int) -> bytes: ... + def expect_bytes(self, expected_value: bytes, message: str) -> None: ... + def read_uint(self, big_endian: bool = False) -> int: ... + def read_double(self, big_endian: bool = False) -> float: ... + def read_cstring(self) -> bytes: ... + def skip(self, num_bytes: int, description: str = "unknown") -> None: ... + def skip_to(self, offset: int, description: str = "unknown") -> None: ... + def skip_to_end(self, description: str = "unknown") -> None: ... + +def pbkdf2_sha1(password: bytes, salt: bytes, iterations: int, key_length: int) -> bytes: ... + +class LenientSimpleCookie(SimpleCookie): + def load(self, data: str | Collection[str]) -> None: ... diff --git a/stubs/yt-dlp/yt_dlp/downloader/__init__.pyi b/stubs/yt-dlp/yt_dlp/downloader/__init__.pyi new file mode 100644 index 000000000000..03ff26bf70b1 --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/downloader/__init__.pyi @@ -0,0 +1,31 @@ +from collections.abc import Mapping +from typing import Literal +from typing_extensions import TypeAlias + +from ..extractor.common import _InfoDict +from ..utils._utils import NO_DEFAULT +from .common import FileDownloader + +__all__ = ["FileDownloader", "get_suitable_downloader", "shorten_protocol_name"] + +_Protocol: TypeAlias = Literal[ + "dash_frag_urls", + "ftps", + "http_dash_segments", + "http_dash_segments_generator", + "https", + "m3u8", + "m3u8_frag_urls", + "m3u8_native", + "rtmp_ffmpeg", + "websocket_frag", +] + +def get_suitable_downloader( + info_dict: _InfoDict, + params: Mapping[str, object] = ..., + default: FileDownloader | type[NO_DEFAULT] = ..., + protocol: _Protocol | None = None, + to_stdout: bool = False, +) -> FileDownloader: ... +def shorten_protocol_name(proto: _Protocol, simplify: bool = False) -> str: ... diff --git a/stubs/yt-dlp/yt_dlp/downloader/bunnycdn.pyi b/stubs/yt-dlp/yt_dlp/downloader/bunnycdn.pyi new file mode 100644 index 000000000000..086c8646065c --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/downloader/bunnycdn.pyi @@ -0,0 +1,17 @@ +import threading +from collections.abc import Mapping + +from ..extractor.common import _InfoDict +from ..utils.networking import HTTPHeaderDict +from .common import FileDownloader + +class BunnyCdnFD(FileDownloader): + def real_download(self, filename: str, info_dict: _InfoDict) -> bool | None: ... + def ping_thread( + self, + stop_event: threading.Event, + url: str, + headers: HTTPHeaderDict | Mapping[str, str] | None, + secret: str, + context_id: str, + ) -> None: ... diff --git a/stubs/yt-dlp/yt_dlp/downloader/common.pyi b/stubs/yt-dlp/yt_dlp/downloader/common.pyi new file mode 100644 index 000000000000..fc687306a03e --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/downloader/common.pyi @@ -0,0 +1,87 @@ +from _typeshed import OpenBinaryMode, OpenTextMode +from collections.abc import Callable, Mapping +from typing import IO, Any, AnyStr, TypedDict, type_check_only + +from ..extractor.common import _InfoDict +from ..utils._utils import NO_DEFAULT, Namespace +from ..YoutubeDL import YoutubeDL + +@type_check_only +class _FileDownloaderParams(TypedDict): + buffersize: int + continuedl: bool + external_downloader_args: dict[str, list[str]] | list[str] | None + file_access_retries: int + hls_use_mpegts: bool + http_chunk_size: int | None + max_filesize: int | None + min_filesize: int | None + nopart: bool + noprogress: bool + noresizebuffer: bool + progress_delta: float | None + progress_template: dict[str, str] + quiet: bool + ratelimit: int | None + retries: int + retry_sleep_functions: dict[str, Callable[..., object]] + test: bool + throttledratelimit: int | None + updatetime: bool + verbose: bool + xattr_set_filesize: bool + +class FileDownloader: + params: _FileDownloaderParams | None + def __init__(self, ydl: YoutubeDL, params: _FileDownloaderParams) -> None: ... + def to_screen(self, *args: object, **kargs: object) -> None: ... + @property + def FD_NAME(cls) -> str: ... + @staticmethod + def format_seconds(seconds: int | None) -> str: ... + @classmethod + def format_eta(cls, seconds: int | None) -> str: ... + @staticmethod + def calc_percent(byte_counter: float, data_len: float | None) -> float: ... + @staticmethod + def format_percent(percent: float | None) -> str: ... + @classmethod + def calc_eta( + cls, + start_or_rate: int | None, + now_or_remaining: float | None, + total: int | type[NO_DEFAULT] = ..., + current: int | type[NO_DEFAULT] = ..., + ) -> float | int | None: ... + @staticmethod + def calc_speed(start: float, now: float, bytes: int) -> float | None: ... + @staticmethod + def format_speed(speed: int | None) -> str: ... + @staticmethod + def format_retries(retries: int) -> float | int: ... + @staticmethod + def filesize_or_none(unencoded_filename: AnyStr) -> int | None: ... + @staticmethod + def best_block_size(elapsed_time: float, bytes: int) -> int: ... + def slow_down(self, start_time: float, now: float, byte_counter: int) -> None: ... + def temp_name(self, filename: str) -> str: ... + def undo_temp_name(self, filename: str) -> str: ... + def ytdl_filename(self, filename: str) -> str: ... + def wrap_file_access(action: str, *, fatal: bool = False) -> object: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + def sanitize_open(self, filename: str, open_mode: OpenTextMode | OpenBinaryMode) -> tuple[IO[Any], str]: ... + def try_remove(self, filename: str) -> None: ... + def try_rename(self, old_filename: str, new_filename: str) -> None: ... + def try_utime(self, filename: str, last_modified_hdr: str | None) -> int | None: ... + def report_destination(self, filename: str) -> None: ... + ProgressStyles: Namespace + def report_progress(self, s: Mapping[str, object]) -> None: ... + def report_resuming_byte(self, resume_len: int) -> None: ... + def report_retry( + self, err: str, count: int, retries: int, frag_index: int | type[NO_DEFAULT] = ..., fatal: bool = True + ) -> None: ... + def report_unable_to_resume(self) -> None: ... + @staticmethod + def supports_manifest(manifest: str) -> bool: ... + def download(self, filename: str, info_dict: _InfoDict, subtitle: bool = False) -> bool: ... + def real_download(self, filename: str, info_dict: _InfoDict) -> bool | None: ... + def add_progress_hook(self, ph: Callable[[str], object]) -> None: ... diff --git a/stubs/yt-dlp/yt_dlp/downloader/dash.pyi b/stubs/yt-dlp/yt_dlp/downloader/dash.pyi new file mode 100644 index 000000000000..d5870c348919 --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/downloader/dash.pyi @@ -0,0 +1,8 @@ +from typing import Literal + +from ..extractor.common import _InfoDict +from .fragment import FragmentFD + +class DashSegmentsFD(FragmentFD): + FD_NAME: Literal["dashsegments"] + def real_download(self, filename: str, info_dict: _InfoDict) -> bool: ... diff --git a/stubs/yt-dlp/yt_dlp/downloader/external.pyi b/stubs/yt-dlp/yt_dlp/downloader/external.pyi new file mode 100644 index 000000000000..a44f7e857ab9 --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/downloader/external.pyi @@ -0,0 +1,62 @@ +import enum +import functools +from _typeshed import Unused +from collections.abc import Iterable, Mapping +from typing import Literal + +from ..extractor.common import _InfoDict +from .fragment import FragmentFD + +class Features(enum.Enum): + TO_STDOUT = 1 + MULTIPLE_FORMATS = 2 + +class ExternalFD(FragmentFD): + SUPPORTED_PROTOCOLS: tuple[str, ...] + SUPPORTED_FEATURES: tuple[Features, ...] + @classmethod + def get_basename(cls) -> str: ... + @property + def EXE_NAME(cls) -> str: ... + @functools.cached_property + def exe(self) -> str: ... + @classmethod + def available(cls, path: str | None = None) -> str | Literal[False]: ... + @classmethod + def supports(cls, info_dict: _InfoDict) -> bool: ... + @classmethod + def can_download(cls, info_dict: _InfoDict, path: str | None = None) -> bool: ... + +class CurlFD(ExternalFD): + AVAILABLE_OPT: str + +class AxelFD(ExternalFD): + AVAILABLE_OPT: str + +class WgetFD(ExternalFD): + AVAILABLE_OPT: str + +class Aria2cFD(ExternalFD): + AVAILABLE_OPT: str + SUPPORTED_PROTOCOLS: tuple[str, ...] + @staticmethod + def supports_manifest(manifest: str) -> bool: ... + def aria2c_rpc(self, rpc_port: int, rpc_secret: str, method: str, params: Iterable[str] = ()) -> object: ... + +class HttpieFD(ExternalFD): + AVAILABLE_OPT: str + EXE_NAME: str + +class FFmpegFD(ExternalFD): + SUPPORTED_PROTOCOLS: tuple[str, ...] + SUPPORTED_FEATURES: tuple[Features, ...] + @classmethod + def available(cls, path: str | None = None) -> bool: ... # type: ignore[override] + def on_process_started(self, proc: Unused, stdin: Unused) -> None: ... + @classmethod + def can_merge_formats(cls, info_dict: _InfoDict, params: Mapping[str, object]) -> bool: ... + +class AVconvFD(FFmpegFD): ... + +def list_external_downloaders() -> list[str]: ... +def get_external_downloader(external_downloader: str) -> type[ExternalFD]: ... diff --git a/stubs/yt-dlp/yt_dlp/downloader/f4m.pyi b/stubs/yt-dlp/yt_dlp/downloader/f4m.pyi new file mode 100644 index 000000000000..5a3c4cee1ab5 --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/downloader/f4m.pyi @@ -0,0 +1,31 @@ +import io +from _typeshed import SupportsWrite +from collections.abc import Iterable, Mapping +from xml.etree.ElementTree import Element + +from .fragment import FragmentFD + +class DataTruncatedError(Exception): ... + +class FlvReader(io.BytesIO): + def read_bytes(self, n: int) -> bytes: ... + def read_unsigned_long_long(self) -> int: ... + def read_unsigned_int(self) -> int: ... + def read_unsigned_char(self) -> str: ... + def read_string(self) -> bytes: ... + def read_box_info(self) -> tuple[int, bytes, bytes]: ... + def read_asrt(self) -> dict[str, object]: ... + def read_afrt(self) -> dict[str, object]: ... + def read_abst(self) -> dict[str, object]: ... + def read_bootstrap_info(self) -> dict[str, object]: ... + +def read_bootstrap_info(bootstrap_bytes: bytes) -> dict[str, object]: ... +def build_fragments_list(boot_info: Mapping[str, object]) -> list[tuple[object, int]]: ... +def write_unsigned_int(stream: SupportsWrite[bytes], val: int) -> None: ... +def write_unsigned_int_24(stream: SupportsWrite[bytes], val: int) -> None: ... +def write_flv_header(stream: SupportsWrite[bytes]) -> None: ... +def write_metadata_tag(stream: SupportsWrite[bytes], metadata: bytes) -> None: ... +def remove_encrypted_media(media: Iterable[Element]) -> list[object]: ... +def get_base_url(manifest: str) -> str | None: ... + +class F4mFD(FragmentFD): ... diff --git a/stubs/yt-dlp/yt_dlp/downloader/fc2.pyi b/stubs/yt-dlp/yt_dlp/downloader/fc2.pyi new file mode 100644 index 000000000000..714890013741 --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/downloader/fc2.pyi @@ -0,0 +1,3 @@ +from .common import FileDownloader + +class FC2LiveFD(FileDownloader): ... diff --git a/stubs/yt-dlp/yt_dlp/downloader/fragment.pyi b/stubs/yt-dlp/yt_dlp/downloader/fragment.pyi new file mode 100644 index 000000000000..33a18419f527 --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/downloader/fragment.pyi @@ -0,0 +1,28 @@ +from collections.abc import Callable, Collection, Mapping, Sequence +from concurrent.futures.thread import ThreadPoolExecutor + +from ..extractor.common import _InfoDict +from .common import FileDownloader +from .http import HttpFD + +class HttpQuietDownloader(HttpFD): + def to_screen(self, *args: object, **kargs: object) -> None: ... + to_console_title = to_screen + +class FragmentFD(FileDownloader): + def report_retry_fragment(self, err: str, frag_index: int, count: int, retries: int) -> None: ... + def report_skip_fragment(self, frag_index: int, err: str | None = None) -> None: ... + def decrypter(self, info_dict: _InfoDict) -> Callable[[Mapping[str, object], bytes], bytes]: ... + def download_and_append_fragments_multiple(self, *args: object, **kwargs: object) -> bool: ... + def download_and_append_fragments( + self, + ctx: Mapping[str, object], + fragments: Collection[Mapping[str, object]], + info_dict: _InfoDict, + *, + is_fatal: Callable[[int], bool] = ..., + pack_func: Callable[[str, int], bytes] = ..., + finish_func: Callable[[], object] | None = None, + tpe: ThreadPoolExecutor | None = None, + interrupt_trigger: Sequence[bool] = (True,), + ) -> bool: ... diff --git a/stubs/yt-dlp/yt_dlp/downloader/hls.pyi b/stubs/yt-dlp/yt_dlp/downloader/hls.pyi new file mode 100644 index 000000000000..7382bfd510ec --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/downloader/hls.pyi @@ -0,0 +1,7 @@ +from ..extractor.common import _InfoDict +from .fragment import FragmentFD + +class HlsFD(FragmentFD): + FD_NAME: str + @classmethod + def can_download(cls, manifest: str, info_dict: _InfoDict, allow_unplayable_formats: bool = False) -> bool: ... diff --git a/stubs/yt-dlp/yt_dlp/downloader/http.pyi b/stubs/yt-dlp/yt_dlp/downloader/http.pyi new file mode 100644 index 000000000000..7447e07f208f --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/downloader/http.pyi @@ -0,0 +1,3 @@ +from .common import FileDownloader + +class HttpFD(FileDownloader): ... diff --git a/stubs/yt-dlp/yt_dlp/downloader/ism.pyi b/stubs/yt-dlp/yt_dlp/downloader/ism.pyi new file mode 100644 index 000000000000..c69117729a10 --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/downloader/ism.pyi @@ -0,0 +1,28 @@ +import struct +from _typeshed import SupportsWrite +from collections.abc import Collection, Mapping + +from .fragment import FragmentFD + +u8: struct.Struct +u88: struct.Struct +u16: struct.Struct +u1616: struct.Struct +u32: struct.Struct +u64: struct.Struct +s88: struct.Struct +s16: struct.Struct +s1616: struct.Struct +s32: struct.Struct +unity_matrix: bytes +TRACK_ENABLED: int +TRACK_IN_MOVIE: int +TRACK_IN_PREVIEW: int +SELF_CONTAINED: int + +def box(box_type: bytes, payload: bytes) -> bytes: ... +def full_box(box_type: bytes, version: int, flags: int, payload: bytes) -> bytes: ... +def write_piff_header(stream: SupportsWrite[bytes], params: Mapping[str, object]) -> None: ... +def extract_box_data(data: bytes, box_sequence: Collection[bytes]) -> bytes | None: ... + +class IsmFD(FragmentFD): ... diff --git a/stubs/yt-dlp/yt_dlp/downloader/mhtml.pyi b/stubs/yt-dlp/yt_dlp/downloader/mhtml.pyi new file mode 100644 index 000000000000..0651d95a3a63 --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/downloader/mhtml.pyi @@ -0,0 +1,3 @@ +from .fragment import FragmentFD + +class MhtmlFD(FragmentFD): ... diff --git a/stubs/yt-dlp/yt_dlp/downloader/niconico.pyi b/stubs/yt-dlp/yt_dlp/downloader/niconico.pyi new file mode 100644 index 000000000000..2aceb6d1d735 --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/downloader/niconico.pyi @@ -0,0 +1,3 @@ +from .common import FileDownloader + +class NiconicoLiveFD(FileDownloader): ... diff --git a/stubs/yt-dlp/yt_dlp/downloader/rtmp.pyi b/stubs/yt-dlp/yt_dlp/downloader/rtmp.pyi new file mode 100644 index 000000000000..3c7c4ced4efe --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/downloader/rtmp.pyi @@ -0,0 +1,5 @@ +from .common import FileDownloader + +def rtmpdump_version() -> str: ... + +class RtmpFD(FileDownloader): ... diff --git a/stubs/yt-dlp/yt_dlp/downloader/rtsp.pyi b/stubs/yt-dlp/yt_dlp/downloader/rtsp.pyi new file mode 100644 index 000000000000..313d7c09fcd6 --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/downloader/rtsp.pyi @@ -0,0 +1,5 @@ +from ..extractor.common import _InfoDict +from .common import FileDownloader + +class RtspFD(FileDownloader): + def real_download(self, filename: str, info_dict: _InfoDict) -> bool: ... diff --git a/stubs/yt-dlp/yt_dlp/downloader/websocket.pyi b/stubs/yt-dlp/yt_dlp/downloader/websocket.pyi new file mode 100644 index 000000000000..1b0780b24805 --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/downloader/websocket.pyi @@ -0,0 +1,11 @@ +from _typeshed import SupportsWrite + +from ..extractor.common import _InfoDict +from .common import FileDownloader + +class FFmpegSinkFD(FileDownloader): + def real_download(self, filename: str, info_dict: _InfoDict) -> bool: ... + async def real_connection(self, sink: SupportsWrite[bytes], info_dict: _InfoDict) -> None: ... + +class WebSocketFragmentFD(FFmpegSinkFD): + async def real_connection(self, sink: SupportsWrite[bytes], info_dict: _InfoDict) -> None: ... diff --git a/stubs/yt-dlp/yt_dlp/downloader/youtube_live_chat.pyi b/stubs/yt-dlp/yt_dlp/downloader/youtube_live_chat.pyi new file mode 100644 index 000000000000..475e01eb4c0a --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/downloader/youtube_live_chat.pyi @@ -0,0 +1,7 @@ +from ..extractor.common import _InfoDict +from .fragment import FragmentFD + +class YoutubeLiveChatFD(FragmentFD): + def real_download(self, filename: str, info_dict: _InfoDict) -> bool: ... + @staticmethod + def parse_live_timestamp(action: dict[str, object]) -> int | None: ... diff --git a/stubs/yt-dlp/yt_dlp/extractor/__init__.pyi b/stubs/yt-dlp/yt_dlp/extractor/__init__.pyi new file mode 100644 index 000000000000..2e610f6cb134 --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/extractor/__init__.pyi @@ -0,0 +1,8 @@ +from .common import InfoExtractor + +def gen_extractor_classes() -> list[type[InfoExtractor]]: ... +def gen_extractors() -> list[InfoExtractor]: ... +def list_extractor_classes(age_limit: int | None = None) -> list[type[InfoExtractor]]: ... +def list_extractors(age_limit: int | None = None) -> list[InfoExtractor]: ... +def get_info_extractor(ie_name: str) -> InfoExtractor: ... +def import_extractors() -> None: ... diff --git a/stubs/yt-dlp/yt_dlp/extractor/common.pyi b/stubs/yt-dlp/yt_dlp/extractor/common.pyi new file mode 100644 index 000000000000..5aa2770799be --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/extractor/common.pyi @@ -0,0 +1,717 @@ +import re +from collections.abc import Callable, Collection, Iterable, Iterator, Mapping, Sequence +from functools import cached_property +from typing import Literal, TypedDict, TypeVar, type_check_only +from typing_extensions import Required, TypeAlias +from urllib.request import Request +from xml.etree import ElementTree as ET + +from ..cache import Cache +from ..cookies import LenientSimpleCookie, YoutubeDLCookieJar +from ..networking.common import Response, _RequestData +from ..networking.impersonate import ImpersonateTarget +from ..utils._utils import NO_DEFAULT, RetryManager as _RetryManager +from ..YoutubeDL import YoutubeDL + +@type_check_only +class _InfoDict(TypedDict, total=False): + age_limit: int + availability: Literal["private", "premium_only", "subscriber_only", "needs_auth", "unlisted", "public"] | None + creator: str | None + comment_count: int | None + duration: int | None + formats: list[object] | None + id: Required[str] + like_count: int | None + tags: list[str] | None + thumbnail: str | None + timestamp: int | float | None + title: str | None + uploader: str | None + url: str | None + +_StrNoDefaultOrNone: TypeAlias = str | None | type[NO_DEFAULT] +_T = TypeVar("_T") + +class InfoExtractor: + IE_DESC: str | bool + SEARCH_KEY: str + def _login_hint(self, method: _StrNoDefaultOrNone, netrc: str | None = None) -> dict[str, str]: ... + def __init__(self, downloader: YoutubeDL | None = None) -> None: ... + @classmethod + def _match_valid_url(cls, url: str) -> re.Match[str] | None: ... + @classmethod + def suitable(cls, url: str) -> re.Match[str] | None: ... + @classmethod + def get_temp_id(cls, url: str) -> str | None: ... + @classmethod + def working(cls) -> bool: ... + @classmethod + def supports_login(cls) -> bool: ... + def initialize(self) -> None: ... + def extract(self, url: str) -> Iterator[_InfoDict]: ... + def set_downloader(self, downloader: YoutubeDL) -> None: ... + @property + def cache(self) -> Cache: ... + @property + def cookiejar(self) -> YoutubeDLCookieJar: ... + def _initialize_pre_login(self) -> None: ... + def _perform_login(self, username: str, password: str) -> None: ... + def _real_initialize(self) -> None: ... + @classmethod + def ie_key(cls) -> str: ... + @property + def IE_NAME(cls) -> str: ... + def _create_request( + self, + url_or_request: str | Request, + data: object | None = None, + headers: Mapping[str, str] | None = None, + query: str | Mapping[str, str] | None = None, + extensions: Mapping[str, object] | None = None, + ) -> Request: ... + def _download_webpage_handle( + self, + url_or_request: str | Request, + video_id: str, + note: str | None = None, + errnote: str | None = None, + fatal: bool = True, + encoding: str | None = None, + data: object | None = None, + headers: Mapping[str, str] | None = None, + query: str | Mapping[str, str] | None = None, + expected_status: int | None = None, + impersonate: ImpersonateTarget | str | bool | Collection[str | ImpersonateTarget] | None = None, + require_impersonation: bool = False, + ) -> tuple[str, Response] | Literal[False]: ... + @staticmethod + def _guess_encoding_from_content(content_type: str, webpage_bytes: bytes) -> str: ... + def _webpage_read_content( + self, + urlh: Response, + url_or_request: str | Request, + video_id: str, + note: str | None = None, + errnote: str | None = None, + fatal: bool = True, + prefix: str | None = None, + encoding: str | None = None, + data: _RequestData | None = None, + ) -> str | Literal[False]: ... + def _parse_json( + self, + json_string: str, + video_id: str, + transform_source: Callable[..., str] | None = None, + fatal: bool = True, + errnote: str | None = None, + **parser_kwargs: object, + ) -> object: ... + def _parse_socket_response_as_json(self, data: str, *args: object, **kwargs: object) -> object: ... + def report_warning( + self, msg: str, video_id: str | None = None, *args: object, only_once: bool = False, **kwargs: object + ) -> None: ... + def to_screen(self, msg: str, *args: object, **kwargs: object) -> None: ... + def write_debug(self, msg: str, *args: object, **kwargs: object) -> None: ... + def get_param(self, name: str, default: object | None = None, *args: object, **kwargs: object) -> object: ... + def report_drm(self, video_id: str) -> None: ... + def report_extraction(self, id_or_name: str) -> None: ... + def report_download_webpage(self, video_id: str) -> None: ... + def report_age_confirmation(self) -> None: ... + def report_login(self) -> None: ... + def raise_login_required( + self, + msg: str = "This video is only available for registered users", + metadata_available: bool = False, + method: str | type[NO_DEFAULT] = ..., + ) -> None: ... + def raise_geo_restricted( + self, msg: str = ..., countries: Collection[str] | None = None, metadata_available: bool = False + ) -> None: ... + def raise_no_formats(self, msg: str, expected: bool = False, video_id: str | None = None) -> None: ... + @staticmethod + def url_result( + url: str, + ie: InfoExtractor | None = None, + video_id: str | None = None, + video_title: str | None = None, + *, + url_transparent: bool = False, + **kwargs: object, + ) -> dict[str, object]: ... + @classmethod + def playlist_from_matches( + cls, + matches: object, + playlist_id: str | None = None, + playlist_title: str | None = None, + getter: Callable[..., object] = ..., + ie: InfoExtractor | None = None, + video_kwargs: Mapping[str, object] | None = None, + **kwargs: object, + ) -> dict[str, object]: ... + @staticmethod + def playlist_result( + entries: Iterable[_InfoDict], + playlist_id: str | None = ..., + playlist_title: str | None = ..., + playlist_description: str | None = ..., + *, + multi_video: bool = ..., + **kwargs: object, + ) -> _InfoDict: ... + def http_scheme(self) -> str: ... + @classmethod + def get_testcases(cls, include_onlymatching: bool = False) -> Iterator[dict[str, object]]: ... + @classmethod + def get_webpage_testcases(cls) -> Iterator[dict[str, object]]: ... + @property + def age_limit(cls) -> int: ... + @classmethod + def is_single_video(cls, url: str) -> bool: ... + @classmethod + def is_suitable(cls, age_limit: int) -> bool: ... + @classmethod + def description(cls, *, markdown: bool = True, search_examples: Sequence[str] | None = None) -> str: ... + def extract_subtitles(self, *args: object, **kwargs: object) -> list[dict[str, object]]: ... + def _configuration_arg( + self, key: str, default: object = ..., *, ie_key: str | None = ..., casesense: bool = ... + ) -> object: ... + # These are dynamically created. + def _download_xml_handle( + self, + url_or_request: str | Request, + video_id: str, + note: str | None = ..., + errnote: str | None = ..., + transform_source: Callable[..., str] | None = ..., + fatal: bool = ..., + encoding: str | None = ..., + data: object = ..., + headers: Mapping[str, str] = ..., + query: Mapping[str, str] = ..., + expected_status: int | None = ..., + impersonate: str | None = ..., + require_impersonation: bool = ..., + ) -> tuple[ET.ElementTree, Response]: ... + def _download_xml( + self, + url_or_request: str | Request, + video_id: str, + note: str | None = ..., + errnote: str | None = ..., + transform_source: Callable[..., str] | None = ..., + fatal: bool = ..., + encoding: str | None = ..., + data: object = ..., + headers: Mapping[str, str] = ..., + query: Mapping[str, str] = ..., + expected_status: int | None = ..., + impersonate: str | None = ..., + require_impersonation: bool = ..., + ) -> ET.ElementTree: ... + def _download_socket_json_handle( + self, + url_or_request: str | Request, + video_id: str, + note: str | None = ..., + errnote: str | None = ..., + transform_source: Callable[..., str] | None = ..., + fatal: bool = ..., + encoding: str | None = ..., + data: object = ..., + headers: Mapping[str, str] = ..., + query: Mapping[str, str] = ..., + expected_status: int | None = ..., + impersonate: str | None = ..., + require_impersonation: bool = ..., + ) -> tuple[dict[str, object], Response]: ... + def _download_socket_json( + self, + url_or_request: str | Request, + video_id: str, + note: str | None = ..., + errnote: str | None = ..., + transform_source: Callable[..., str] | None = ..., + fatal: bool = ..., + encoding: str | None = ..., + data: object = ..., + headers: Mapping[str, str] = ..., + query: Mapping[str, str] = ..., + expected_status: int | None = ..., + impersonate: str | None = ..., + require_impersonation: bool = ..., + ) -> dict[str, object]: ... + def _download_json_handle( + self, + url_or_request: str | Request, + video_id: str, + note: str | None = ..., + errnote: str | None = ..., + transform_source: Callable[..., str] | None = ..., + fatal: bool = ..., + encoding: str | None = ..., + data: object = ..., + headers: Mapping[str, str] = ..., + query: Mapping[str, str] = ..., + expected_status: int | None = ..., + impersonate: str | None = ..., + require_impersonation: bool = ..., + ) -> tuple[dict[str, object], Response]: ... + def _download_json( + self, + url_or_request: str | Request, + video_id: str, + note: str | None = ..., + errnote: str | None = ..., + transform_source: Callable[..., str] | None = ..., + fatal: bool = ..., + encoding: str | None = ..., + data: object = ..., + headers: Mapping[str, str] = ..., + query: Mapping[str, str] = ..., + expected_status: int | None = ..., + impersonate: str | None = ..., + require_impersonation: bool = ..., + ) -> dict[str, object]: ... + def _download_webpage( + self, + url_or_request: str | Request, + video_id: str, + note: str | None = ..., + errnote: str | None = ..., + transform_source: Callable[..., str] | None = ..., + fatal: bool = ..., + encoding: str | None = ..., + data: object = ..., + headers: Mapping[str, str] = ..., + query: Mapping[str, str] = ..., + expected_status: int | None = ..., + impersonate: str | None = ..., + require_impersonation: bool = ..., + ) -> str: ... + def _parse_xml( + self, + xml_string: str, + video_id: str, + transform_source: Callable[..., str] | None = ..., + fatal: bool = ..., + errnote: str | None = ..., + ) -> ET.Element: ... + def _parse_mpd_formats( + self, mpd_doc: ET.Element, mpd_id: str | None = ..., mpd_base_url: str = ..., mpd_url: str | None = ... + ) -> list[object]: ... + def _real_extract(self, url: str) -> _InfoDict: ... + @staticmethod + def _availability( + is_private: bool | None = ..., + needs_premium: bool | None = ..., + needs_subscription: bool | None = ..., + needs_auth: bool | None = ..., + is_unlisted: bool | None = ..., + ) -> Literal["needs_auth", "premium_only", "private", "public", "subscriber_only", "unlisted"] | None: ... + def _request_webpage( + self, + url_or_req: str | Request, + video_id: str, + note: str | None = ..., + errnote: str | None = ..., + fatal: bool = ..., + data: object = ..., + headers: Mapping[str, str] = ..., + query: Mapping[str, str] = ..., + expected_status: int | None = ..., + ) -> Response | Literal[False]: ... + @classmethod + def _match_id(cls, url: str) -> str: ... + def _search_regex( + self, + pattern: str | re.Pattern[str], + string: str | None, + name: str, + default: _StrNoDefaultOrNone = ..., + fatal: bool = True, + flags: int = 0, + group: tuple[int, ...] | list[int] | None = None, + ) -> str: ... + def _search_json( + self, + start_pattern: str | re.Pattern[str], + string: str | None, + name: str, + video_id: str, + *, + end_pattern: str | re.Pattern[str] = "", + contains_pattern: str | re.Pattern[str] = r"{(?s:.+)}", + fatal: bool = True, + default: _StrNoDefaultOrNone = ..., + **kwargs: object, + ) -> object | None: ... + def _html_search_regex( + self, + pattern: str | re.Pattern[str], + string: str, + name: str, + default: _StrNoDefaultOrNone = ..., + fatal: bool = True, + flags: int = 0, + group: int | None = None, + ) -> str | tuple[str, ...]: ... + def _get_netrc_login_info(self, netrc_machine: str | None = None) -> tuple[str | None, str | None]: ... + def _get_login_info( + self, username_option: str = "username", password_option: str = "password", netrc_machine: str | None = None + ) -> tuple[str | None, str | None]: ... + def _get_tfa_info(self, note: str = "two-factor verification code") -> str: ... + @staticmethod + def _og_regexes(prop: str) -> list[str]: ... + @staticmethod + def _meta_regex(prop: str) -> str: ... + def _og_search_property(self, prop: str, html: str, name: str | None = None, **kargs: object) -> str | None: ... + def _og_search_thumbnail(self, html: str, **kargs: object) -> str | None: ... + def _og_search_description(self, html: str, **kargs: object) -> str | None: ... + def _og_search_title(self, html: str, *, fatal: bool = False, **kargs: object) -> str | None: ... + def _og_search_video_url(self, html: str, name: str = "video url", secure: bool = True, **kargs: object) -> str | None: ... + def _og_search_url(self, html: str, **kargs: object) -> str | None: ... + def _html_extract_title(self, html: str, name: str = "title", *, fatal: bool = False, **kwargs: object) -> str | None: ... + def _html_search_meta( + self, name: str, html: str, display_name: str | None = None, fatal: bool = False, **kwargs: object + ) -> str | None: ... + def _dc_search_uploader(self, html: str) -> str | None: ... + @staticmethod + def _rta_search(html: str) -> int: ... + def _media_rating_search(self, html: str) -> int: ... + def _family_friendly_search(self, html: str) -> int: ... + def _twitter_search_player(self, html: str) -> str | None: ... + def _yield_json_ld( + self, html: str, video_id: str, *, fatal: bool = True, default: type[NO_DEFAULT] | bool = ... + ) -> Iterator[dict[str, object]]: ... + def _search_json_ld( + self, + html: str, + video_id: str, + expected_type: Iterable[str] | str | None = None, + *, + fatal: bool = True, + default: type[NO_DEFAULT] | bool = ..., + ) -> dict[str, object]: ... + def _json_ld( + self, json_ld: object, video_id: str, fatal: bool = True, expected_type: Iterable[str] | str | None = None + ) -> dict[str, object]: ... + def _search_nextjs_data( + self, webpage: str, video_id: str, *, fatal: bool = True, default: type[NO_DEFAULT] | bool = ..., **kw: object + ) -> object: ... + def _search_nuxt_data( + self, + webpage: str, + video_id: str, + context_name: str = "__NUXT__", + *, + fatal: bool = True, + traverse: tuple[str, int] = ("data", 0), + ) -> object: ... + @staticmethod + def _hidden_inputs(html: str) -> dict[str, object]: ... + def _form_hidden_inputs(self, form_id: str, html: str) -> dict[str, object]: ... + def _check_formats(self, formats: list[dict[str, object]], video_id: str) -> None: ... + @staticmethod + def _remove_duplicate_formats(formats: list[dict[str, object]]) -> None: ... + def _is_valid_url(self, url: str, video_id: str, item: str = "video", headers: Mapping[str, object] = ...) -> bool: ... + def _proto_relative_url(self, url: str, scheme: str | None = None) -> str: ... + def _sleep(self, timeout: float, video_id: str, msg_template: str | None = None) -> None: ... + def _extract_f4m_formats( + self, + manifest_url: str, + video_id: str, + preference: object | None = None, + quality: object | None = None, + f4m_id: str | None = None, + transform_source: Callable[..., str] = ..., + fatal: bool = True, + m3u8_id: str | None = None, + data: str | None = None, + headers: Mapping[str, object] = ..., + query: Mapping[str, object] = ..., + ) -> list[dict[str, object]]: ... + def _parse_f4m_formats( + self, + manifest: str, + manifest_url: str, + video_id: str, + preference: object | None = None, + quality: object | None = None, + f4m_id: str | None = None, + transform_source: Callable[..., str] = ..., + fatal: bool = True, + m3u8_id: str | None = None, + ) -> list[dict[str, object]]: ... + def _m3u8_meta_format( + self, + m3u8_url: str, + ext: str | None = None, + preference: object | None = None, + quality: object | None = None, + m3u8_id: str | None = None, + ) -> dict[str, object]: ... + def _report_ignoring_subs(self, name: str) -> None: ... + def _extract_m3u8_formats(self, *args: object, **kwargs: object) -> list[dict[str, object]]: ... + def _extract_m3u8_formats_and_subtitles( + self, + m3u8_url: str, + video_id: str, + ext: str | None = None, + entry_protocol: str = "m3u8_native", + preference: object | None = None, + quality: object | None = None, + m3u8_id: str | None = None, + note: str | None = None, + errnote: str | None = None, + fatal: bool = True, + live: bool = False, + data: object | None = None, + headers: Mapping[str, object] = ..., + query: Mapping[str, object] = ..., + ) -> tuple[list[dict[str, object]], list[dict[str, object]]]: ... + def _parse_m3u8_formats_and_subtitles( + self, + m3u8_doc: str, + m3u8_url: str | None = None, + ext: str | None = None, + entry_protocol: str = "m3u8_native", + preference: object | None = None, + quality: object | None = None, + m3u8_id: str | None = None, + live: bool = False, + note: str | None = None, + errnote: str | None = None, + fatal: bool = True, + data: object | None = None, + headers: Mapping[str, object] = ..., + query: Mapping[str, object] = ..., + video_id: str | None = None, + ) -> tuple[list[dict[str, object]], list[dict[str, object]]]: ... + def _extract_m3u8_vod_duration( + self, + m3u8_vod_url: str, + video_id: str, + note: str | None = None, + errnote: str | None = None, + data: object | None = None, + headers: Mapping[str, object] = ..., + query: Mapping[str, object] = ..., + ) -> int | None: ... + def _parse_m3u8_vod_duration(self, m3u8_vod: str, video_id: str) -> int: ... + def _extract_mpd_vod_duration( + self, + mpd_url: str, + video_id: str, + note: str | None = None, + errnote: str | None = None, + data: object | None = None, + headers: Mapping[str, object] = ..., + query: Mapping[str, object] = ..., + ) -> int | None: ... + @staticmethod + def _xpath_ns(path: str, namespace: str | None = None) -> str: ... + def _extract_smil_formats_and_subtitles( + self, + smil_url: str, + video_id: str, + fatal: bool = True, + f4m_params: Mapping[str, object] | None = None, + transform_source: Callable[..., str] | None = None, + ) -> tuple[list[dict[str, object]], list[dict[str, object]]]: ... + def _extract_smil_formats(self, *args: object, **kwargs: object) -> list[dict[str, object]]: ... + def _extract_smil_info( + self, smil_url: str, video_id: str, fatal: bool = True, f4m_params: Mapping[str, object] | None = None + ) -> dict[str, object]: ... + def _download_smil( + self, smil_url: str, video_id: str, fatal: bool = True, transform_source: Callable[..., str] | None = None + ) -> ET.Element: ... + def _parse_smil( + self, smil: ET.Element, smil_url: str, video_id: str, f4m_params: Mapping[str, object] | None = None + ) -> dict[str, object]: ... + def _parse_smil_namespace(self, smil: str) -> str | None: ... + def _parse_smil_formats(self, *args: object, **kwargs: object) -> list[dict[str, object]]: ... + def _parse_smil_formats_and_subtitles( + self, + smil: ET.Element, + smil_url: str, + video_id: str, + namespace: str | None = None, + f4m_params: Mapping[str, object] | None = None, + transform_rtmp_url: Callable[[str, str], tuple[str, str]] | None = None, + ) -> tuple[list[dict[str, object]], list[dict[str, object]]]: ... + def _parse_smil_subtitles( + self, smil: ET.Element, namespace: str | None = None, subtitles_lang: str = "en" + ) -> list[dict[str, object]]: ... + def _extract_xspf_playlist(self, xspf_url: str, playlist_id: str, fatal: bool = True) -> list[dict[str, object]]: ... + def _parse_xspf( + self, xspf_doc: ET.Element, playlist_id: str, xspf_url: str | None = None, xspf_base_url: str | None = None + ) -> list[dict[str, object]]: ... + def _extract_mpd_formats(self, *args: object, **kwargs: object) -> list[dict[str, object]]: ... + def _extract_mpd_formats_and_subtitles( + self, *args: object, **kwargs: object + ) -> tuple[list[dict[str, object]], list[dict[str, object]]]: ... + def _extract_mpd_periods( + self, + mpd_url: str, + video_id: str, + mpd_id: str | None = None, + note: str | None = None, + errnote: str | None = None, + fatal: bool = True, + data: object | None = None, + headers: Mapping[str, object] = ..., + query: Mapping[str, object] = ..., + ) -> tuple[list[object], dict[str, object]]: ... + def _parse_mpd_formats_and_subtitles( + self, *args: object, **kwargs: object + ) -> tuple[list[dict[str, object]], list[dict[str, object]]]: ... + def _merge_mpd_periods(self, periods: Iterable[Mapping[str, object]]) -> tuple[list[object], dict[str, object]]: ... + def _parse_mpd_periods( + self, mpd_doc: ET.Element, mpd_id: str | None = None, mpd_base_url: str = "", mpd_url: str | None = None + ) -> tuple[list[object], dict[str, object]]: ... + def _extract_ism_formats(self, *args: object, **kwargs: object) -> list[dict[str, object]]: ... + def _extract_ism_formats_and_subtitles( + self, + ism_url: str, + video_id: str, + ism_id: str | None = None, + note: str | None = None, + errnote: str | None = None, + fatal: bool = True, + data: object | None = None, + headers: Mapping[str, object] = ..., + query: Mapping[str, object] = ..., + ) -> tuple[list[dict[str, object]], list[dict[str, object]]]: ... + def _parse_ism_formats_and_subtitles( + self, ism_doc: str, ism_url: str, ism_id: str | None = None + ) -> tuple[list[dict[str, object]], list[dict[str, object]]]: ... + def _parse_html5_media_entries( + self, + base_url: str, + webpage: str, + video_id: str, + m3u8_id: str | None = None, + m3u8_entry_protocol: str = "m3u8_native", + mpd_id: str | None = None, + preference: object | None = None, + quality: object | None = None, + _headers: Mapping[str, object] | None = None, + ) -> list[dict[str, object]]: ... + def _extract_akamai_formats(self, *args: object, **kwargs: object) -> list[dict[str, object]]: ... + def _extract_akamai_formats_and_subtitles( + self, manifest_url: str, video_id: str, hosts: Mapping[str, object] = ... + ) -> tuple[list[dict[str, object]], list[dict[str, object]]]: ... + def _extract_wowza_formats( + self, url: str, video_id: str, m3u8_entry_protocol: str = "m3u8_native", skip_protocols: Collection[str] = ... + ) -> list[dict[str, object]]: ... + def _find_jwplayer_data( + self, webpage: str, video_id: str | None = None, transform_source: Callable[..., object] = ... + ) -> object | None: ... + def _extract_jwplayer_data( + self, webpage: str, video_id: str, *args: object, transform_source: Callable[..., object] = ..., **kwargs: object + ) -> list[dict[str, object]]: ... + def _parse_jwplayer_data( + self, + jwplayer_data: Mapping[str, object], + video_id: str | None = None, + require_title: bool = True, + m3u8_id: str | None = None, + mpd_id: str | None = None, + rtmp_params: Mapping[str, object] | None = None, + base_url: str | None = None, + ) -> list[dict[str, object]]: ... + def _parse_jwplayer_formats( + self, + jwplayer_sources_data: Iterable[Mapping[str, object]], + video_id: str | None = None, + m3u8_id: str | None = None, + mpd_id: str | None = None, + rtmp_params: Mapping[str, object] | None = None, + base_url: str | None = None, + ) -> list[dict[str, object]]: ... + def _int(self, v: object, name: str, fatal: bool = False, **kwargs: object) -> int | None: ... + def _float(self, v: object, name: str, fatal: bool = False, **kwargs: object) -> float | None: ... + def _set_cookie( + self, + domain: str, + name: str, + value: str, + expire_time: int | None = None, + port: int | None = None, + path: str = "/", + secure: bool = False, + discard: bool = False, + rest: dict[str, object] = ..., + **kwargs: object, + ) -> None: ... + def _live_title(self, name: _T) -> _T: ... + def _get_cookies(self, url: str) -> LenientSimpleCookie: ... + def _apply_first_set_cookie_header(self, url_handle: Response, cookie: str) -> None: ... + @property + def _RETURN_TYPE(cls) -> str: ... + def _get_subtitles(self, *args: object, **kwargs: object) -> list[dict[str, object]]: ... + def extract_comments(self, *args: object, **kwargs: object) -> list[dict[str, object]]: ... + def _get_comments(self, *args: object, **kwargs: object) -> list[dict[str, object]]: ... + @staticmethod + def _merge_subtitle_items( + subtitle_list1: Iterable[Mapping[str, object]], subtitle_list2: Iterable[Mapping[str, object]] + ) -> list[dict[str, object]]: ... + @classmethod + def _merge_subtitles(cls, *dicts: dict[str, object], target: object | None = None) -> object: ... + def extract_automatic_captions(self, *args: object, **kwargs: object) -> dict[str, object]: ... + @cached_property + def _cookies_passed(self) -> bool: ... + def _mark_watched(self, *args: object, **kwargs: object) -> object: ... + @staticmethod + def _generic_id(url: str) -> str: ... + def _generic_title(self, url: str = "", webpage: str = "", *, default: str | None = None) -> str | None: ... + def _extract_chapters_helper( + self, + chapter_list: Iterable[Mapping[str, object]], + start_function: Callable[..., object], + title_function: Callable[..., object], + duration: float, + strict: bool = True, + ) -> list[dict[str, int]] | None: ... + def _extract_chapters_from_description( + self, description: str | None, duration: str | None + ) -> list[dict[str, int]] | None: ... + def mark_watched(self, *args: object, **kwargs: object) -> None: ... + def geo_verification_headers(self) -> dict[str, str]: ... + def RetryManager(self, **kwargs: object) -> _RetryManager: ... + @classmethod + def extract_from_webpage(cls, ydl: YoutubeDL, url: str, webpage: str) -> Iterator[_InfoDict]: ... + def _yes_playlist( + self, + playlist_id: str, + video_id: str, + smuggled_data: object | None = None, + *, + playlist_label: str = "playlist", + video_label: str = "video", + ) -> bool: ... + def _error_or_warning(self, err: str, _count: int | None = None, _retries: int = 0, *, fatal: bool = True) -> None: ... + def _extract_generic_embeds( + self, url: str, *args: object, info_dict: _InfoDict = ..., note: str = "Extracting generic embeds", **kwargs: object + ) -> list[dict[str, object]]: ... + @classmethod + def _extract_from_webpage(cls, url: str, webpage: str) -> Iterator[_InfoDict]: ... + @classmethod + def _extract_embed_urls(cls, url: str, webpage: str) -> Iterator[str]: ... + @classmethod + def _extract_url(cls, webpage: str) -> str | None: ... + @classmethod + def __init_subclass__(cls, *, plugin_name: str | None = None, **kwargs: object) -> None: ... + + class StopExtraction(Exception): ... + class CommentsDisabled(Exception): ... + +class SearchInfoExtractor(InfoExtractor): + def _real_extract(self, query: str) -> _InfoDict: ... + def _get_n_results(self, query: str, n: int) -> list[_InfoDict]: ... + def _search_results(self, query: str) -> list[_InfoDict]: ... + +class UnsupportedURLIE(InfoExtractor): ... diff --git a/stubs/yt-dlp/yt_dlp/extractor/commonmistakes.pyi b/stubs/yt-dlp/yt_dlp/extractor/commonmistakes.pyi new file mode 100644 index 000000000000..0b33ccd54acb --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/extractor/commonmistakes.pyi @@ -0,0 +1,10 @@ +from .common import InfoExtractor + +class CommonMistakesIE(InfoExtractor): + IE_DESC: bool + +class UnicodeBOMIE(InfoExtractor): + IE_DESC: bool + +class BlobIE(InfoExtractor): + IE_DESC: bool diff --git a/stubs/yt-dlp/yt_dlp/extractor/commonprotocols.pyi b/stubs/yt-dlp/yt_dlp/extractor/commonprotocols.pyi new file mode 100644 index 000000000000..495cdd49646d --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/extractor/commonprotocols.pyi @@ -0,0 +1,10 @@ +from .common import InfoExtractor + +class RtmpIE(InfoExtractor): + IE_DESC: bool + +class MmsIE(InfoExtractor): + IE_DESC: bool + +class ViewSourceIE(InfoExtractor): + IE_DESC: bool diff --git a/stubs/yt-dlp/yt_dlp/globals.pyi b/stubs/yt-dlp/yt_dlp/globals.pyi new file mode 100644 index 000000000000..981bbb4e1745 --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/globals.pyi @@ -0,0 +1,19 @@ +from collections import defaultdict +from typing import Generic, TypeVar + +_T = TypeVar("_T") + +class Indirect(Generic[_T]): + value: _T + def __init__(self, initial: _T, /) -> None: ... + +postprocessors: Indirect[dict[str, object]] +extractors: Indirect[dict[str, object]] +all_plugins_loaded: Indirect[bool] +plugin_specs: Indirect[dict[str, object]] +plugin_dirs: Indirect[list[str]] +plugin_ies: Indirect[dict[str, object]] +plugin_pps: Indirect[dict[str, object]] +plugin_ies_overrides: Indirect[defaultdict[str, object]] +IN_CLI: Indirect[bool] +LAZY_EXTRACTORS: Indirect[None | bool] diff --git a/stubs/yt-dlp/yt_dlp/jsinterp.pyi b/stubs/yt-dlp/yt_dlp/jsinterp.pyi new file mode 100644 index 000000000000..6edb2107bd84 --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/jsinterp.pyi @@ -0,0 +1,55 @@ +import collections +from collections.abc import Callable, Collection, Mapping +from typing import NoReturn +from typing_extensions import TypeAlias + +from yt_dlp.utils._utils import function_with_repr + +from .utils import ExtractorError + +def js_number_to_string(val: float, radix: int = 10) -> str: ... + +class JS_Undefined: ... + +class JS_Break(ExtractorError): + def __init__(self) -> None: ... + +class JS_Continue(ExtractorError): + def __init__(self) -> None: ... + +class JS_Throw(ExtractorError): + error: BaseException + def __init__(self, e: BaseException) -> None: ... + +class LocalNameSpace(collections.ChainMap[str, object]): + def __setitem__(self, key: str, value: object) -> None: ... + def __delitem__(self, key: str) -> NoReturn: ... + +class Debugger: + ENABLED: bool + @staticmethod + def write(*args: str, level: int = 100) -> None: ... + @classmethod + # Callable[[Debugger, str, object, int, ...], tuple[object, bool]] but it also accepts *args, **kwargs. + def wrap_interpreter(cls, f: Callable[..., tuple[object, bool]]) -> Callable[..., tuple[object, bool]]: ... + +_BuildFunctionReturnType: TypeAlias = Callable[[Collection[object], Mapping[str, object], int], object | None] + +class JSInterpreter: + def __init__(self, code: str, objects: Mapping[str, object] | None = None) -> None: ... + + class Exception(ExtractorError): + def __init__(self, msg: str, expr: str | None = None, *args: object, **kwargs: object) -> None: ... + + def interpret_statement( + self, stmt: str, local_vars: Mapping[str, object], allow_recursion: int, *args: object, **kwargs: object + ) -> tuple[object, bool]: ... + def interpret_expression(self, expr: str, local_vars: Mapping[str, object], allow_recursion: int) -> object: ... + def extract_object(self, objname: str, *global_stack: object) -> object: ... + def extract_function_code(self, funcname: str) -> tuple[list[str], tuple[str, str]]: ... + def extract_function(self, funcname: str, *global_stack: object) -> function_with_repr[object]: ... + def extract_function_from_code( + self, argnames: Collection[str], code: str, *global_stack: object + ) -> _BuildFunctionReturnType: ... + def call_function(self, funcname: str, *args: object) -> function_with_repr[object]: ... + def build_function(self, argnames: Collection[str], code: str, *global_stack: object) -> _BuildFunctionReturnType: ... diff --git a/stubs/yt-dlp/yt_dlp/minicurses.pyi b/stubs/yt-dlp/yt_dlp/minicurses.pyi new file mode 100644 index 000000000000..96e46ee302e3 --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/minicurses.pyi @@ -0,0 +1,23 @@ +from collections.abc import Callable +from typing import TextIO +from typing_extensions import Self + +CONTROL_SEQUENCES: dict[str, str] = ... + +def format_text(text: str, f: str) -> str: ... + +class MultilinePrinterBase: + def __init__(self, stream: TextIO | None = None, lines: int = ...) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: object) -> None: ... + def print_at_line(self, text: str, pos: int) -> None: ... + def end(self) -> None: ... + def write(self, *text: str) -> None: ... + +class QuietMultilinePrinter(MultilinePrinterBase): ... +class MultilineLogger(MultilinePrinterBase): ... +class BreaklineStatusPrinter(MultilinePrinterBase): ... + +class MultilinePrinter(MultilinePrinterBase): + def __init__(self, stream: TextIO | None = None, lines: int = ..., preserve_output: bool = ...) -> None: ... + def lock(func: Callable[..., object]) -> Callable[..., object]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] diff --git a/stubs/yt-dlp/yt_dlp/networking/__init__.pyi b/stubs/yt-dlp/yt_dlp/networking/__init__.pyi new file mode 100644 index 000000000000..274b53643736 --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/networking/__init__.pyi @@ -0,0 +1,9 @@ +from .common import ( + HEADRequest as HEADRequest, + PATCHRequest as PATCHRequest, + PUTRequest as PUTRequest, + Request as Request, + RequestDirector as RequestDirector, + RequestHandler as RequestHandler, + Response as Response, +) diff --git a/stubs/yt-dlp/yt_dlp/networking/_helper.pyi b/stubs/yt-dlp/yt_dlp/networking/_helper.pyi new file mode 100644 index 000000000000..be90056d322c --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/networking/_helper.pyi @@ -0,0 +1,51 @@ +import ssl +from _typeshed import ReadableBuffer, StrOrBytesPath +from collections.abc import Callable, Iterable, Mapping +from socket import AddressFamily, SocketKind +from typing import Any +from typing_extensions import TypeAlias + +from ..socks import sockssocket +from ..utils.networking import HTTPHeaderDict +from .common import Request, RequestHandler, Response + +def ssl_load_certs(context: ssl.SSLContext, use_certifi: bool = True) -> None: ... +def ssl_load_windows_store_certs(ssl_context: ssl.SSLContext, storename: str) -> None: ... +def make_socks_proxy_opts(socks_proxy: str) -> dict[str, object]: ... +def get_redirect_method(method: str, status: int) -> str: ... +def make_ssl_context( + verify: bool = True, + client_certificate: StrOrBytesPath | None = None, + client_certificate_key: StrOrBytesPath | None = None, + client_certificate_password: Callable[[], str | bytes | bytearray] | str | bytes | bytearray | None = None, + legacy_support: bool = False, + use_certifi: bool = True, +) -> ssl.SSLContext: ... + +class InstanceStoreMixin: + def __init__(self, **kwargs: object) -> None: ... + +def add_accept_encoding_header(headers: HTTPHeaderDict, supported_encodings: Iterable[str]) -> None: ... +def wrap_request_errors( + func: Callable[[RequestHandler, Request], Response | None], +) -> Callable[[RequestHandler, Request], None]: ... + +_Address: TypeAlias = tuple[Any, ...] | str | ReadableBuffer +_IPAddress: TypeAlias = tuple[ + AddressFamily, SocketKind, int, str, tuple[str, int] | tuple[str, int, int, int] | tuple[int, bytes] +] + +def create_socks_proxy_socket( + dest_addr: object, + proxy_args: Mapping[str, object], + proxy_ip_addr: _IPAddress, + timeout: float | None, + source_address: _Address, +) -> sockssocket: ... +def create_connection( + address: tuple[str, int], + timeout: int = ..., + source_address: _Address | None = None, + *, + _create_socket_func: Callable[[_IPAddress, int, object], sockssocket] = ..., +) -> sockssocket: ... diff --git a/stubs/yt-dlp/yt_dlp/networking/common.pyi b/stubs/yt-dlp/yt_dlp/networking/common.pyi new file mode 100644 index 000000000000..aa7d386de4c9 --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/networking/common.pyi @@ -0,0 +1,161 @@ +import abc +import enum +import io +from collections.abc import Callable, Iterable, Mapping +from email.message import Message +from logging import Logger +from typing import IO, Any +from typing_extensions import Self, TypeAlias + +from ..cookies import YoutubeDLCookieJar +from ..utils._utils import _YDLLogger +from ..utils.networking import HTTPHeaderDict + +DEFAULT_TIMEOUT: int +_RequestData: TypeAlias = bytes | Iterable[bytes] | IO[Any] | None +_Preference: TypeAlias = Callable[[RequestHandler, Request], int] + +def register_preference(*handlers: type[RequestHandler]) -> Callable[..., object]: ... + +class RequestDirector: + handlers: dict[str, RequestHandler] + preferences: set[_Preference] + logger: Logger + verbose: bool + def __init__(self, logger: Logger, verbose: bool = False) -> None: ... + def close(self) -> None: ... + def add_handler(self, handler: RequestHandler) -> None: ... + def send(self, request: Request) -> Response: ... + +def register_rh(handler: RequestHandler) -> RequestHandler: ... + +class Features(enum.Enum): + ALL_PROXY = 1 + NO_PROXY = 2 + +class RequestHandler(abc.ABC, metaclass=abc.ABCMeta): + headers: HTTPHeaderDict | dict[str, str] + cookiejar: YoutubeDLCookieJar | None + timeout: float | int + proxies: Mapping[str, object] | dict[str, object] + source_address: str | None + verbose: bool + prefer_system_certs: bool + verify: bool + legacy_ssl_support: bool + def __init__( + self, + *, + logger: _YDLLogger, + headers: HTTPHeaderDict | Mapping[str, str] | None = None, + cookiejar: YoutubeDLCookieJar | None = None, + timeout: float | None = None, + proxies: Mapping[str, object] | None = None, + source_address: str | None = None, + verbose: bool = False, + prefer_system_certs: bool = False, + client_cert: dict[str, str | None] | None = None, + verify: bool = True, + legacy_ssl_support: bool = False, + **_, + ) -> None: ... + def validate(self, request: Request) -> None: ... + def send(self, request: Request) -> Response: ... + def close(self) -> None: ... + @property + def RH_NAME(cls) -> str: ... + @property + def RH_KEY(cls) -> str: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: object) -> None: ... + +class Request: + proxies: Mapping[str, object] | dict[str, object] + extensions: Mapping[str, object] | dict[str, object] + def __init__( + self, + url: str, + data: _RequestData | None = None, + headers: HTTPHeaderDict | Mapping[str, str] | None = None, + proxies: Mapping[str, object] | None = None, + query: Mapping[str, str] | None = None, + method: str | None = None, + extensions: Mapping[str, object] | None = None, + ) -> None: ... + @property + def url(self) -> str: ... + @url.setter + def url(self, url: str) -> None: ... + @property + def method(self) -> str: ... + @method.setter + def method(self, method: str) -> None: ... + @property + def data(self) -> _RequestData | io.IOBase: ... + @data.setter + def data(self, data: _RequestData) -> None: ... + @property + def headers(self) -> HTTPHeaderDict | dict[str, str]: ... + @headers.setter + def headers(self, new_headers: Mapping[str, str] | HTTPHeaderDict) -> None: ... + def update( + self, + url: str | None = None, + data: str | None = None, + headers: HTTPHeaderDict | Mapping[str, str] | None = None, + query: Mapping[str, str] | None = None, + extensions: Mapping[str, object] | None = None, + ) -> None: ... + def copy(self) -> Self: ... + +def HEADRequest( + url: str, + data: _RequestData | None = None, + headers: HTTPHeaderDict | Mapping[str, str] | None = None, + proxies: Mapping[str, object] | None = None, + query: Mapping[str, str] | None = None, + *, + method: str = "HEAD", + extensions: Mapping[str, object] | None = None, +) -> Request: ... +def PATCHRequest( + url: str, + data: _RequestData | None = None, + headers: HTTPHeaderDict | Mapping[str, str] | None = None, + proxies: Mapping[str, object] | None = None, + query: Mapping[str, str] | None = None, + *, + method: str = "PATCH", + extensions: Mapping[str, object] | None = None, +) -> Request: ... +def PUTRequest( + url: str, + data: _RequestData | None = None, + headers: HTTPHeaderDict | Mapping[str, str] | None = None, + proxies: Mapping[str, object] | None = None, + query: Mapping[str, str] | None = None, + *, + method: str = "PUT", + extensions: Mapping[str, object] | None = None, +) -> Request: ... + +class Response(io.IOBase): + fp: io.IOBase + headers: Message + status: int + url: str + reason: str | None + extensions: Mapping[str, object] | dict[str, object] + def __init__( + self, + fp: io.IOBase, + url: str, + headers: Mapping[str, str], + status: int = 200, + reason: str | None = None, + extensions: Mapping[str, object] | dict[str, object] | None = None, + ) -> None: ... + def readable(self) -> bool: ... + def read(self, amt: int | None = None) -> bytes: ... + def close(self) -> None: ... + def get_header(self, name: str, default: str | None = None) -> str | None: ... diff --git a/stubs/yt-dlp/yt_dlp/networking/exceptions.pyi b/stubs/yt-dlp/yt_dlp/networking/exceptions.pyi new file mode 100644 index 000000000000..e9f7eeaa703f --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/networking/exceptions.pyi @@ -0,0 +1,36 @@ +from ..utils import YoutubeDLError +from .common import RequestHandler, Response + +class RequestError(YoutubeDLError): + handler: RequestHandler | None + cause: Exception | str | None + def __init__( + self, msg: str | None = None, cause: Exception | str | None = None, handler: RequestHandler | None = None + ) -> None: ... + +class UnsupportedRequest(RequestError): ... + +class NoSupportingHandlers(RequestError): + unsupported_errors: list[UnsupportedRequest] + unexpected_errors: list[UnsupportedRequest] + def __init__(self, unsupported_errors: list[UnsupportedRequest], unexpected_errors: list[Exception]) -> None: ... + +class TransportError(RequestError): ... + +class HTTPError(RequestError): + response: Response + status: int + reason: str | None + redirect_loop: bool + + def __init__(self, response: Response, redirect_loop: bool = False) -> None: ... + def close(self) -> None: ... + +class IncompleteRead(TransportError): + def __init__(self, partial: int, expected: int | None = None, **kwargs: object) -> None: ... + +class SSLError(TransportError): ... +class CertificateVerifyError(SSLError): ... +class ProxyError(TransportError): ... + +network_exceptions: tuple[type[RequestError], ...] diff --git a/stubs/yt-dlp/yt_dlp/networking/impersonate.pyi b/stubs/yt-dlp/yt_dlp/networking/impersonate.pyi new file mode 100644 index 000000000000..475be6d14817 --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/networking/impersonate.pyi @@ -0,0 +1,27 @@ +from abc import ABC +from dataclasses import dataclass +from typing_extensions import Self + +from .common import Request, RequestHandler + +@dataclass(order=True, frozen=True) +class ImpersonateTarget: + client: str | None = ... + version: str | None = ... + os: str | None = ... + os_version: str | None = ... + + def __post_init__(self) -> None: ... + def __contains__(self, target: Self) -> bool: ... + @classmethod + def from_str(cls, target: str) -> Self: ... + +class ImpersonateRequestHandler(RequestHandler, ABC): + _SUPPORTED_IMPERSONATE_TARGET_MAP: dict[ImpersonateTarget, object] = ... + + def __init__(self, *, impersonate: ImpersonateTarget | None = None, **kwargs: object) -> None: ... + @property + def supported_targets(cls) -> tuple[ImpersonateTarget, ...]: ... + def is_supported_target(self, target: ImpersonateTarget) -> bool: ... + +def impersonate_preference(rh: RequestHandler, request: Request) -> int: ... diff --git a/stubs/yt-dlp/yt_dlp/networking/websocket.pyi b/stubs/yt-dlp/yt_dlp/networking/websocket.pyi new file mode 100644 index 000000000000..61f896c90a82 --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/networking/websocket.pyi @@ -0,0 +1,9 @@ +import abc + +from .common import RequestHandler, Response + +class WebSocketResponse(Response): + def send(self, message: bytes | str) -> object: ... + def recv(self) -> object: ... + +class WebSocketRequestHandler(RequestHandler, abc.ABC, metaclass=abc.ABCMeta): ... diff --git a/stubs/yt-dlp/yt_dlp/options.pyi b/stubs/yt-dlp/yt_dlp/options.pyi new file mode 100644 index 000000000000..059795c4caa7 --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/options.pyi @@ -0,0 +1,15 @@ +import optparse + +def parseOpts( + overrideArguments: object | None = None, ignore_config_files: str = "if_override" +) -> tuple[_YoutubeDLOptionParser, optparse.Values, list[str]]: ... + +class _YoutubeDLOptionParser(optparse.OptionParser): + ALIAS_DEST: str + ALIAS_TRIGGER_LIMIT: int + def __init__(self) -> None: ... + def parse_known_args( + self, args: list[str] | None = None, values: optparse.Values | None = None, strict: bool = True + ) -> tuple[optparse.Values, list[str]]: ... + +def create_parser() -> _YoutubeDLOptionParser: ... diff --git a/stubs/yt-dlp/yt_dlp/plugins.pyi b/stubs/yt-dlp/yt_dlp/plugins.pyi new file mode 100644 index 000000000000..52ae55de1120 --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/plugins.pyi @@ -0,0 +1,44 @@ +import dataclasses +import importlib.abc +from _typeshed import Unused +from collections.abc import Iterator +from importlib.machinery import ModuleSpec +from pathlib import Path +from types import ModuleType + +from .globals import Indirect + +__all__ = [ + "COMPAT_PACKAGE_NAME", + "PACKAGE_NAME", + "PluginSpec", + "directories", + "load_all_plugins", + "load_plugins", + "register_plugin_spec", +] + +PACKAGE_NAME: str = "yt_dlp_plugins" +COMPAT_PACKAGE_NAME: str = "ytdlp_plugins" + +@dataclasses.dataclass +class PluginSpec: + module_name: str + suffix: str + destination: Indirect[object] + plugin_destination: Indirect[object] + +class PluginLoader(importlib.abc.Loader): + def exec_module(self, module: ModuleType) -> None: ... + +class PluginFinder(importlib.abc.MetaPathFinder): + packages: set[str] + def __init__(self, *packages: str) -> None: ... + def search_locations(self, fullname: str) -> Iterator[Path]: ... + def find_spec(self, fullname: str, path: Unused | None = None, target: Unused | None = None) -> ModuleSpec | None: ... + def invalidate_caches(self) -> None: ... + +def directories() -> list[str]: ... +def load_plugins(plugin_spec: PluginSpec) -> dict[str, type[object]]: ... +def load_all_plugins() -> None: ... +def register_plugin_spec(plugin_spec: PluginSpec) -> None: ... diff --git a/stubs/yt-dlp/yt_dlp/postprocessor/__init__.pyi b/stubs/yt-dlp/yt_dlp/postprocessor/__init__.pyi new file mode 100644 index 000000000000..a94f3c35484f --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/postprocessor/__init__.pyi @@ -0,0 +1,7 @@ +from collections.abc import Mapping + +from .common import PostProcessor + +_default_pps: Mapping[str, type[PostProcessor]] + +def get_postprocessor(key: str) -> type[PostProcessor]: ... diff --git a/stubs/yt-dlp/yt_dlp/postprocessor/common.pyi b/stubs/yt-dlp/yt_dlp/postprocessor/common.pyi new file mode 100644 index 000000000000..b8e75ec7d917 --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/postprocessor/common.pyi @@ -0,0 +1,28 @@ +from _typeshed import StrPath +from collections.abc import Callable +from typing import Any + +from ..extractor.common import _InfoDict +from ..YoutubeDL import YoutubeDL + +class PostProcessorMetaClass(type): + @staticmethod + def run_wrapper(func: Callable[..., object]) -> Callable[..., object]: ... + def __new__(cls, name: str, bases: tuple[type[Any], ...], attrs: dict[str, object]) -> type[Any]: ... + +class PostProcessor(metaclass=PostProcessorMetaClass): + PP_NAME: str + def __init__(self, downloader: YoutubeDL | None = None) -> None: ... + @classmethod + def pp_key(cls) -> str: ... + def to_screen(self, text: str, prefix: bool = True, *args: object, **kwargs: object) -> None: ... + def report_warning(self, text: str, *args: object, **kwargs: object) -> None: ... + def deprecation_warning(self, msg: str) -> None: ... + def deprecated_feature(self, msg: str) -> None: ... + def write_debug(self, text: str, *args: object, **kwargs: object) -> None: ... + def get_param(self, name: str, default: object | None = None, *args: object, **kwargs: object) -> object: ... + def set_downloader(self, downloader: YoutubeDL) -> None: ... + def run(self, information: _InfoDict) -> tuple[list[str], _InfoDict]: ... + def try_utime(self, path: StrPath, atime: int, mtime: int, errnote: str = "Cannot update utime of file") -> None: ... + def add_progress_hook(self, ph: Callable[[str], object]) -> None: ... + def report_progress(self, s: str) -> None: ... diff --git a/stubs/yt-dlp/yt_dlp/socks.pyi b/stubs/yt-dlp/yt_dlp/socks.pyi new file mode 100644 index 000000000000..14fe447d41af --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/socks.pyi @@ -0,0 +1,72 @@ +import socket +from _typeshed import ReadableBuffer +from collections.abc import Mapping +from typing import NamedTuple + +SOCKS4_VERSION: int +SOCKS4_REPLY_VERSION: int +SOCKS4_DEFAULT_DSTIP: bytes +SOCKS5_VERSION: int +SOCKS5_USER_AUTH_VERSION: int +SOCKS5_USER_AUTH_SUCCESS: int + +class Socks4Command: + CMD_CONNECT: int + CMD_BIND: int + +class Socks5Command(Socks4Command): + CMD_UDP_ASSOCIATE: int + +class Socks5Auth: + AUTH_NONE: int + AUTH_GSSAPI: int + AUTH_USER_PASS: int + AUTH_NO_ACCEPTABLE: int + +class Socks5AddressType: + ATYP_IPV4: int + ATYP_DOMAINNAME: int + ATYP_IPV6: int + +class ProxyError(OSError): + ERR_SUCCESS: int + def __init__(self, code: int | None = None, msg: str | None = None) -> None: ... + +class InvalidVersionError(ProxyError): + def __init__(self, expected_version: int, got_version: int) -> None: ... + +class Socks4Error(ProxyError): + ERR_SUCCESS: int + CODES: Mapping[int, str] + +class Socks5Error(ProxyError): + ERR_GENERAL_FAILURE: int + CODES: Mapping[int, str] + +class ProxyType: + SOCKS4: int + SOCKS4A: int + SOCKS5: int + +class Proxy(NamedTuple): + type: ProxyType + host: str + port: int + username: str + password: str + remote_dns: bool + +class sockssocket(socket.socket): + def __init__(self, *args: object, **kwargs: object) -> None: ... + def setproxy( + self, + proxytype: ProxyType, + addr: str, + port: int, + rdns: bool = True, + username: str | None = None, + password: str | None = None, + ) -> None: ... + def recvall(self, cnt: int) -> bytes: ... + def connect(self, address: tuple[object, ...] | str | ReadableBuffer) -> None: ... + def connect_ex(self, address: tuple[object, ...] | str | ReadableBuffer) -> int: ... diff --git a/stubs/yt-dlp/yt_dlp/update.pyi b/stubs/yt-dlp/yt_dlp/update.pyi new file mode 100644 index 000000000000..bd287ac6212e --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/update.pyi @@ -0,0 +1,34 @@ +import functools +from dataclasses import dataclass + +from yt_dlp.utils._utils import NO_DEFAULT +from yt_dlp.YoutubeDL import YoutubeDL + +__all__ = ["Updater"] + +@dataclass +class UpdateInfo: + tag: str + version: str | None = ... + requested_version: str | None = ... + commit: str | None = ... + binary_name: str | None = ... + checksum: str | None = ... + +class Updater: + ydl: YoutubeDL + requested_channel: str + requested_tag: str | None + requested_repo: str | None + def __init__(self, ydl: YoutubeDL, target: str | None = None) -> None: ... + @property + def current_version(self) -> str: ... + @property + def current_commit(self) -> str: ... + def query_update(self, *, _output: bool = False) -> UpdateInfo | None: ... + def update(self, update_info: type[NO_DEFAULT] | None | UpdateInfo = ...) -> bool | None: ... + @functools.cached_property + def filename(self) -> str: ... + @functools.cached_property + def cmd(self) -> list[str]: ... + def restart(self) -> int: ... diff --git a/stubs/yt-dlp/yt_dlp/utils/__init__.pyi b/stubs/yt-dlp/yt_dlp/utils/__init__.pyi new file mode 100644 index 000000000000..ddeaddbd388a --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/utils/__init__.pyi @@ -0,0 +1,263 @@ +from ._utils import ( + ACCENT_CHARS as ACCENT_CHARS, + BOMS as BOMS, + DATE_FORMATS as DATE_FORMATS, + DATE_FORMATS_DAY_FIRST as DATE_FORMATS_DAY_FIRST, + DATE_FORMATS_MONTH_FIRST as DATE_FORMATS_MONTH_FIRST, + DEFAULT_OUTTMPL as DEFAULT_OUTTMPL, + DOT_DESKTOP_LINK_TEMPLATE as DOT_DESKTOP_LINK_TEMPLATE, + DOT_URL_LINK_TEMPLATE as DOT_URL_LINK_TEMPLATE, + DOT_WEBLOC_LINK_TEMPLATE as DOT_WEBLOC_LINK_TEMPLATE, + ENGLISH_MONTH_NAMES as ENGLISH_MONTH_NAMES, + IDENTITY as IDENTITY, + JSON_LD_RE as JSON_LD_RE, + KNOWN_EXTENSIONS as KNOWN_EXTENSIONS, + LINK_TEMPLATES as LINK_TEMPLATES, + MEDIA_EXTENSIONS as MEDIA_EXTENSIONS, + MONTH_NAMES as MONTH_NAMES, + NO_DEFAULT as NO_DEFAULT, + NUMBER_RE as NUMBER_RE, + OUTTMPL_TYPES as OUTTMPL_TYPES, + PACKED_CODES_RE as PACKED_CODES_RE, + POSTPROCESS_WHEN as POSTPROCESS_WHEN, + STR_FORMAT_RE_TMPL as STR_FORMAT_RE_TMPL, + STR_FORMAT_TYPES as STR_FORMAT_TYPES, + TIMEZONE_NAMES as TIMEZONE_NAMES, + TV_PARENTAL_GUIDELINES as TV_PARENTAL_GUIDELINES, + US_RATINGS as US_RATINGS, + WINDOWS_VT_MODE as WINDOWS_VT_MODE, + Config as Config, + ContentTooShortError as ContentTooShortError, + DateRange as DateRange, + DownloadCancelled as DownloadCancelled, + DownloadError as DownloadError, + EntryNotInPlaylist as EntryNotInPlaylist, + ExistingVideoReached as ExistingVideoReached, + ExtractorError as ExtractorError, + FormatSorter as FormatSorter, + GeoRestrictedError as GeoRestrictedError, + GeoUtils as GeoUtils, + HTMLAttributeParser as HTMLAttributeParser, + HTMLBreakOnClosingTagParser as HTMLBreakOnClosingTagParser, + HTMLListAttrsParser as HTMLListAttrsParser, + InAdvancePagedList as InAdvancePagedList, + ISO639Utils as ISO639Utils, + ISO3166Utils as ISO3166Utils, + LazyList as LazyList, + LenientJSONDecoder as LenientJSONDecoder, + LockingUnsupportedError as LockingUnsupportedError, + MaxDownloadsReached as MaxDownloadsReached, + Namespace as Namespace, + OnDemandPagedList as OnDemandPagedList, + PagedList as PagedList, + PlaylistEntries as PlaylistEntries, + Popen as Popen, + PostProcessingError as PostProcessingError, + ReExtractInfo as ReExtractInfo, + RegexNotFoundError as RegexNotFoundError, + RejectedVideoReached as RejectedVideoReached, + RetryManager as RetryManager, + SameFileError as SameFileError, + ThrottledDownload as ThrottledDownload, + UnavailableVideoError as UnavailableVideoError, + UnsupportedError as UnsupportedError, + UserNotLive as UserNotLive, + XAttrMetadataError as XAttrMetadataError, + XAttrUnavailableError as XAttrUnavailableError, + YoutubeDLError as YoutubeDLError, + age_restricted as age_restricted, + args_to_str as args_to_str, + ass_subtitles_timecode as ass_subtitles_timecode, + base_url as base_url, + bool_or_none as bool_or_none, + bug_reports_message as bug_reports_message, + bytes_to_long as bytes_to_long, + cached_method as cached_method, + caesar as caesar, + check_executable as check_executable, + classproperty as classproperty, + clean_html as clean_html, + clean_podcast_url as clean_podcast_url, + cli_bool_option as cli_bool_option, + cli_configuration_args as cli_configuration_args, + cli_option as cli_option, + cli_valueless_option as cli_valueless_option, + date_formats as date_formats, + date_from_str as date_from_str, + datetime_add_months as datetime_add_months, + datetime_from_str as datetime_from_str, + datetime_round as datetime_round, + decode_base_n as decode_base_n, + decode_packed_codes as decode_packed_codes, + deprecation_warning as deprecation_warning, + detect_exe_version as detect_exe_version, + determine_ext as determine_ext, + determine_file_encoding as determine_file_encoding, + determine_protocol as determine_protocol, + dfxp2srt as dfxp2srt, + download_range_func as download_range_func, + encode_base_n as encode_base_n, + encode_compat_str as encode_compat_str, + encode_data_uri as encode_data_uri, + encodeArgument as encodeArgument, + error_to_str as error_to_str, + escapeHTML as escapeHTML, + expand_path as expand_path, + ext2mimetype as ext2mimetype, + extract_attributes as extract_attributes, + extract_basic_auth as extract_basic_auth, + extract_timezone as extract_timezone, + filesize_from_tbr as filesize_from_tbr, + filter_dict as filter_dict, + find_available_port as find_available_port, + find_xpath_attr as find_xpath_attr, + fix_xml_ampersands as fix_xml_ampersands, + float_or_none as float_or_none, + format_bytes as format_bytes, + format_decimal_suffix as format_decimal_suffix, + format_field as format_field, + formatSeconds as formatSeconds, + frange as frange, + function_with_repr as function_with_repr, + get_compatible_ext as get_compatible_ext, + get_domain as get_domain, + get_element_by_attribute as get_element_by_attribute, + get_element_by_class as get_element_by_class, + get_element_by_id as get_element_by_id, + get_element_html_by_attribute as get_element_html_by_attribute, + get_element_html_by_class as get_element_html_by_class, + get_element_html_by_id as get_element_html_by_id, + get_element_text_and_html_by_tag as get_element_text_and_html_by_tag, + get_elements_by_attribute as get_elements_by_attribute, + get_elements_by_class as get_elements_by_class, + get_elements_html_by_attribute as get_elements_html_by_attribute, + get_elements_html_by_class as get_elements_html_by_class, + get_elements_text_and_html_by_attribute as get_elements_text_and_html_by_attribute, + get_exe_version as get_exe_version, + get_executable_path as get_executable_path, + get_filesystem_encoding as get_filesystem_encoding, + get_system_config_dirs as get_system_config_dirs, + get_user_config_dirs as get_user_config_dirs, + get_windows_version as get_windows_version, + hyphenate_date as hyphenate_date, + int_or_none as int_or_none, + iri_to_uri as iri_to_uri, + is_html as is_html, + is_iterable_like as is_iterable_like, + is_outdated_version as is_outdated_version, + is_path_like as is_path_like, + join_nonempty as join_nonempty, + js_to_json as js_to_json, + jwt_decode_hs256 as jwt_decode_hs256, + jwt_encode_hs256 as jwt_encode_hs256, + limit_length as limit_length, + locked_file as locked_file, + long_to_bytes as long_to_bytes, + lookup_unit_table as lookup_unit_table, + lowercase_escape as lowercase_escape, + make_archive_id as make_archive_id, + make_dir as make_dir, + match_filter_func as match_filter_func, + match_str as match_str, + merge_dicts as merge_dicts, + merge_headers as merge_headers, + mimetype2ext as mimetype2ext, + month_by_abbreviation as month_by_abbreviation, + month_by_name as month_by_name, + multipart_encode as multipart_encode, + netrc_from_content as netrc_from_content, + number_of_digits as number_of_digits, + ohdave_rsa_encrypt as ohdave_rsa_encrypt, + orderedSet as orderedSet, + orderedSet_from_options as orderedSet_from_options, + parse_age_limit as parse_age_limit, + parse_bitrate as parse_bitrate, + parse_bytes as parse_bytes, + parse_codecs as parse_codecs, + parse_count as parse_count, + parse_dfxp_time_expr as parse_dfxp_time_expr, + parse_duration as parse_duration, + parse_filesize as parse_filesize, + parse_http_range as parse_http_range, + parse_iso8601 as parse_iso8601, + parse_list as parse_list, + parse_m3u8_attributes as parse_m3u8_attributes, + parse_qs as parse_qs, + parse_resolution as parse_resolution, + partial_application as partial_application, + pkcs1pad as pkcs1pad, + preferredencoding as preferredencoding, + prepend_extension as prepend_extension, + qualities as qualities, + random_birthday as random_birthday, + random_uuidv4 as random_uuidv4, + read_batch_urls as read_batch_urls, + read_stdin as read_stdin, + remove_end as remove_end, + remove_quotes as remove_quotes, + remove_start as remove_start, + remove_terminal_sequences as remove_terminal_sequences, + render_table as render_table, + replace_extension as replace_extension, + rot47 as rot47, + sanitize_filename as sanitize_filename, + sanitize_open as sanitize_open, + sanitize_path as sanitize_path, + sanitize_url as sanitize_url, + scale_thumbnails_to_max_format_width as scale_thumbnails_to_max_format_width, + setproctitle as setproctitle, + shell_quote as shell_quote, + smuggle_url as smuggle_url, + srt_subtitles_timecode as srt_subtitles_timecode, + str_or_none as str_or_none, + str_to_int as str_to_int, + strftime_or_none as strftime_or_none, + strip_jsonp as strip_jsonp, + strip_or_none as strip_or_none, + subtitles_filename as subtitles_filename, + supports_terminal_sequences as supports_terminal_sequences, + system_identifier as system_identifier, + time_seconds as time_seconds, + timeconvert as timeconvert, + timetuple_from_msec as timetuple_from_msec, + to_high_limit_path as to_high_limit_path, + truncate_string as truncate_string, + try_call as try_call, + try_get as try_get, + unescapeHTML as unescapeHTML, + unified_strdate as unified_strdate, + unified_timestamp as unified_timestamp, + unsmuggle_url as unsmuggle_url, + update_url as update_url, + update_url_query as update_url_query, + uppercase_escape as uppercase_escape, + url_basename as url_basename, + url_or_none as url_or_none, + urlencode_postdata as urlencode_postdata, + urlhandle_detect_ext as urlhandle_detect_ext, + urljoin as urljoin, + urshift as urshift, + variadic as variadic, + version_tuple as version_tuple, + windows_enable_vt_mode as windows_enable_vt_mode, + write_json_file as write_json_file, + write_string as write_string, + write_xattr as write_xattr, + xpath_attr as xpath_attr, + xpath_element as xpath_element, + xpath_text as xpath_text, + xpath_with_ns as xpath_with_ns, + ytdl_is_updateable as ytdl_is_updateable, +) +from .traversal import ( + dict_get as dict_get, + find_element as find_element, + find_elements as find_elements, + get_first as get_first, + require as require, + subs_list_to_dict as subs_list_to_dict, + traverse_obj as traverse_obj, + trim_str as trim_str, + unpack as unpack, + value as value, +) diff --git a/stubs/yt-dlp/yt_dlp/utils/_legacy.pyi b/stubs/yt-dlp/yt_dlp/utils/_legacy.pyi new file mode 100644 index 000000000000..3bf98c6ed792 --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/utils/_legacy.pyi @@ -0,0 +1,57 @@ +import types +import urllib.request +from _typeshed import Unused +from asyncio.events import AbstractEventLoop +from collections.abc import Awaitable, Callable, Collection, Mapping +from http.client import HTTPResponse +from http.cookiejar import CookieJar +from subprocess import Popen +from typing import Any, AnyStr, Generic, TypeVar +from typing_extensions import Self + +has_certifi: bool +has_websockets: bool +_T = TypeVar("_T") + +class WebSocketsWrapper(Generic[_T]): + pool: _T | None + loop: AbstractEventLoop + conn: object + def __init__(self, url: str, headers: Mapping[str, str] | None = None, connect: bool = True, **ws_kwargs: object) -> None: ... + def __enter__(self) -> Self: ... + def send(self, *args: object) -> None: ... + def recv(self, *args: object) -> bytes: ... + def __exit__( + self, type: type[BaseException] | None, value: BaseException | None, traceback: types.TracebackType | None + ) -> None: ... + @staticmethod + def run_with_loop(main: Awaitable[_T], loop: AbstractEventLoop) -> _T: ... + +def load_plugins(name: str, suffix: str, namespace: dict[str, object]) -> dict[str, type[object]]: ... +def traverse_dict(dictn: Mapping[str, object], keys: Collection[str], casesense: bool = True) -> object: ... +def decode_base(value: str, digits: str) -> int: ... +def platform_name() -> str: ... +def get_subprocess_encoding() -> str: ... +def register_socks_protocols() -> None: ... +def handle_youtubedl_headers(headers: dict[str, object]) -> dict[str, object]: ... +def request_to_url(req: urllib.request.Request | str) -> str: ... +def sanitized_Request(url: str, *args: object, **kwargs: object) -> urllib.request.Request: ... + +class YoutubeDLHandler(urllib.request.AbstractHTTPHandler): + def __init__(self, params: Mapping[str, object], *args: object, **kwargs: object) -> None: ... + +YoutubeDLHTTPSHandler = YoutubeDLHandler + +class YoutubeDLCookieProcessor(urllib.request.HTTPCookieProcessor): + def __init__(self, cookiejar: CookieJar | None = None) -> None: ... + def http_response(self, request: urllib.request.Request, response: HTTPResponse) -> HTTPResponse: ... + https_request: Callable[[urllib.request.HTTPCookieProcessor, urllib.request.Request], HTTPResponse] # type: ignore[assignment] + https_response = http_response + +def make_HTTPS_handler(params: Mapping[str, object], **kwargs: object) -> YoutubeDLHTTPSHandler: ... +def process_communicate_or_kill(p: Popen[Any], *args: object, **kwargs: object) -> tuple[AnyStr, AnyStr]: ... +def encodeFilename(s: str, for_subprocess: Unused = False) -> bytes: ... +def decodeFilename(b: bytes, for_subprocess: Unused = False) -> str: ... +def decodeArgument(b: _T) -> _T: ... +def decodeOption(optval: AnyStr) -> str: ... +def error_to_compat_str(err: object) -> str: ... diff --git a/stubs/yt-dlp/yt_dlp/utils/_utils.pyi b/stubs/yt-dlp/yt_dlp/utils/_utils.pyi new file mode 100644 index 000000000000..d47294f88170 --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/utils/_utils.pyi @@ -0,0 +1,686 @@ +import enum +import html.parser +import json +import netrc +import subprocess +import sys +import types +from _typeshed import ( + ExcInfo, + FileDescriptorLike, + FileDescriptorOrPath, + OpenBinaryMode, + OpenTextMode, + ReadableBuffer, + StrOrBytesPath, + Unused, +) +from collections import deque +from collections.abc import Callable, Collection, Hashable, Iterable, Iterator, Mapping, Sequence +from datetime import date, datetime, timedelta +from functools import cache +from optparse import Values +from os import PathLike +from re import Pattern +from typing import IO, Any, AnyStr, BinaryIO, Generic, NamedTuple, TextIO, TypeVar, overload +from typing_extensions import Self, TypeAlias +from xml.etree import ElementTree as ET + +from yt_dlp.networking import Response + +from .. import _Params +from ..extractor.common import InfoExtractor, _InfoDict +from ..options import _YoutubeDLOptionParser +from ..YoutubeDL import YoutubeDL + +_T = TypeVar("_T") + +class NO_DEFAULT: ... + +def IDENTITY(x: _T) -> _T: ... + +ENGLISH_MONTH_NAMES: Sequence[str] +MONTH_NAMES: Mapping[str, Sequence[str]] +TIMEZONE_NAMES: Mapping[str, str] +ACCENT_CHARS: Mapping[str, str] +DATE_FORMATS: Sequence[str] +DATE_FORMATS_DAY_FIRST: Sequence[str] +DATE_FORMATS_MONTH_FIRST: Sequence[str] +PACKED_CODES_RE: str +JSON_LD_RE: str +NUMBER_RE: str + +@cache +def preferredencoding() -> str: ... +def write_json_file(obj: Any, fn: str) -> None: ... +def partial_application(func: Callable[..., object]) -> Callable[..., object]: ... +def find_xpath_attr(node: ET.ElementTree, xpath: str, key: str, val: str | None = None) -> ET.Element | None: ... +def xpath_with_ns(path: str, ns_map: Mapping[str, str]) -> str: ... +def xpath_element( + node: ET.ElementTree, xpath: str, name: str | None = None, fatal: bool = False, default: ET.Element | type[NO_DEFAULT] = ... +) -> ET.Element | None: ... +def xpath_text( + node: ET.ElementTree, xpath: str, name: str | None = None, fatal: bool = False, default: str | type[NO_DEFAULT] = ... +) -> str | None: ... +def xpath_attr( + node: ET.ElementTree, + xpath: str, + key: str, + name: str | None = None, + fatal: bool = False, + default: str | type[NO_DEFAULT] = ..., +) -> str | None: ... +def get_element_by_id(id: str, html: str, **kwargs: object) -> str | None: ... +def get_element_html_by_id(id: str, html: str, **kwargs: object) -> str | None: ... +def get_element_by_class(class_name: str, html: str) -> str: ... +def get_element_html_by_class(class_name: str, html: str) -> str: ... +def get_element_by_attribute(attribute: str, value: str, html: str, **kwargs: object) -> str: ... +def get_element_html_by_attribute(attribute: str, value: str, html: str, **kargs: object) -> list[str]: ... +def get_elements_by_class(class_name: str, html: str, **kargs: object) -> list[str]: ... +def get_elements_html_by_class(class_name: str, html: str) -> list[str]: ... +def get_elements_by_attribute(*args: object, **kwargs: object) -> list[str]: ... +def get_elements_html_by_attribute(*args: object, **kwargs: object) -> list[str]: ... +def get_elements_text_and_html_by_attribute( + attribute: str, value: str, html: str, *, tag: str = "[\\w:.-]+", escape_value: bool = True +) -> Iterator[str]: ... + +class HTMLBreakOnClosingTagParser(html.parser.HTMLParser): + class HTMLBreakOnClosingTagException(Exception): ... + tagstack: deque[object] + def __init__(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, *_: object) -> None: ... + def close(self) -> None: ... + def handle_starttag(self, tag: str, _: object) -> None: ... + def handle_endtag(self, tag: str) -> None: ... + +def get_element_text_and_html_by_tag(tag: str, html: str) -> str: ... + +class HTMLAttributeParser(html.parser.HTMLParser): + attrs: dict[str, str | None] + def __init__(self) -> None: ... + def handle_starttag(self, tag: str, attrs: list[tuple[str, str | None]]) -> None: ... + +class HTMLListAttrsParser(html.parser.HTMLParser): + items: list[dict[str, str | None]] + def __init__(self) -> None: ... + def handle_starttag(self, tag: str, attrs: list[tuple[str, str | None]]) -> None: ... + def handle_endtag(self, tag: str) -> None: ... + +def extract_attributes(html_element: str) -> dict[str, str]: ... +def parse_list(webpage: str) -> list[dict[str, str | None]]: ... +def clean_html(html: str | None) -> str | None: ... + +class LenientJSONDecoder(json.JSONDecoder): + def __init__( + self, + *args: object, + transform_source: Callable[[str], str] | None = None, + ignore_extra: bool = False, + close_objects: int = 0, + **kwargs: object, + ) -> None: ... + def decode(self, s: str) -> Any: ... # type: ignore[override] + +@overload +def sanitize_open(filename: FileDescriptorOrPath, open_mode: OpenBinaryMode) -> BinaryIO: ... +@overload +def sanitize_open(filename: FileDescriptorOrPath, open_mode: OpenTextMode) -> TextIO: ... +def timeconvert(timestr: str) -> str: ... +def sanitize_filename(s: str, restricted: bool = False, is_id: bool | type[NO_DEFAULT] = ...) -> str: ... +def sanitize_path(s: str, force: bool = False) -> str: ... +def sanitize_url(url: str, *, scheme: str = "http") -> str: ... +def extract_basic_auth(url: str) -> tuple[str, str | None]: ... +def expand_path(s: str) -> str: ... +def orderedSet(iterable: Iterable[_T], *, lazy: bool = False) -> Iterator[_T]: ... +def unescapeHTML(s: str | None) -> str | None: ... +def escapeHTML(text: str) -> str: ... + +class netrc_from_content(netrc.netrc): + def __init__(self, content: str) -> None: ... + +def encodeArgument(s: str) -> str: ... + +class _timetuple(NamedTuple): + hours: tuple[int, int] + minutes: tuple[int, int] + seconds: tuple[int, int] + milliseconds: tuple[int, int] + +def timetuple_from_msec(msec: int) -> _timetuple: ... +def formatSeconds(secs: int, delim: str = ":", msec: bool = False) -> str: ... +def bug_reports_message(before: str = ";") -> None: ... + +class YoutubeDLError(Exception): + msg: str | None + def __init__(self, msg: str | None = None) -> None: ... + +class ExtractorError(YoutubeDLError): + orig_msg: object + traceback: types.TracebackType | None + expected: object + cause: object + video_id: str + ie: InfoExtractor + exc_info: ExcInfo + def __init__( + self, + msg: str, + tb: types.TracebackType | None = None, + expected: bool = False, + cause: object | None = None, + video_id: str | None = None, + ie: InfoExtractor | None = None, + ) -> None: ... + def format_traceback(self) -> str: ... + msg: str | None + args: tuple[object, ...] + def __setattr__(self, name: str, value: object) -> None: ... + +class UnsupportedError(ExtractorError): + url: str + def __init__(self, url: str) -> None: ... + +class RegexNotFoundError(ExtractorError): ... + +class GeoRestrictedError(ExtractorError): + countries: str | None + def __init__(self, msg: str, countries: str | None = None, **kwargs: object) -> None: ... + +class UserNotLive(ExtractorError): + def __init__(self, msg: str | None = None, **kwargs: object) -> None: ... + +class DownloadError(YoutubeDLError): + exc_info: ExcInfo + def __init__(self, msg: str, exc_info: ExcInfo | None = None) -> None: ... + +class EntryNotInPlaylist(YoutubeDLError): + msg: str + +class SameFileError(YoutubeDLError): + msg: str + def __init__(self, filename: str | None = None) -> None: ... + +class PostProcessingError(YoutubeDLError): ... + +class DownloadCancelled(YoutubeDLError): + msg: str + +class ExistingVideoReached(DownloadCancelled): + msg: str + +class RejectedVideoReached(DownloadCancelled): + msg: str + +class MaxDownloadsReached(DownloadCancelled): + msg: str + +class ReExtractInfo(YoutubeDLError): + expected: bool + def __init__(self, msg: str, expected: bool = False) -> None: ... + +class ThrottledDownload(ReExtractInfo): + msg: str + def __init__(self) -> None: ... + +class UnavailableVideoError(YoutubeDLError): + msg: str + def __init__(self, err: str | None = None) -> None: ... + +class ContentTooShortError(YoutubeDLError): + downloaded: int + expected: int + def __init__(self, downloaded: int, expected: int) -> None: ... + +class XAttrMetadataError(YoutubeDLError): + code: str | None + msg: str | None + reason: str + def __init__(self, code: str | None = None, msg: str = "Unknown error") -> None: ... + +class XAttrUnavailableError(YoutubeDLError): ... + +def is_path_like(f: object) -> bool: ... +def extract_timezone(date_str: str, default: type[NO_DEFAULT] | object | None = None) -> tuple[timedelta, str]: ... +def parse_iso8601(date_str: str, delimiter: str = "T", timezone: type[NO_DEFAULT] | object | None = None) -> int: ... +def date_formats(day_first: bool = True) -> list[str]: ... +def unified_strdate(date_str: str, day_first: bool = True) -> str: ... +def unified_timestamp(date_str: str, day_first: bool = True) -> int: ... +def determine_ext(url: str, default_ext: str = "unknown_video") -> str: ... +def subtitles_filename(filename: str, sub_lang: str, sub_format: str, expected_real_ext: str | None = None) -> str: ... +def datetime_from_str(date_str: str, precision: str = "auto", format: str = "%Y%m%d") -> datetime: ... +def date_from_str(date_str: str, format: str = "%Y%m%d", strict: bool = False) -> date: ... +def datetime_add_months(dt_: datetime, months: int) -> datetime: ... +def datetime_round(dt_: datetime, precision: str = "day") -> datetime: ... +def hyphenate_date(date_str: str) -> str: ... + +class DateRange: + start: date + end: date + def __init__(self, start: date | None = None, end: date | None = None) -> None: ... + @classmethod + def day(cls, day: date) -> Self: ... + def __contains__(self, date: date) -> bool: ... + def __eq__(self, other: object) -> bool: ... + +def system_identifier() -> str: ... +def get_windows_version() -> tuple[str, ...]: ... +def write_string(s: str, out: TextIO | None = None, encoding: str | None = None) -> None: ... +def deprecation_warning( + msg: str, *, printer: Callable[..., object] | None = None, stacklevel: int = 0, **kwargs: object +) -> None: ... + +class LockingUnsupportedError(OSError): + msg: str + def __init__(self) -> None: ... + +class locked_file: + locked: bool + f: TextIO + def __init__( + self, filename: AnyStr, mode: OpenTextMode | OpenBinaryMode, block: bool = True, encoding: str | None = None + ) -> None: ... + def __enter__(self) -> Self: ... + def unlock(self) -> None: ... + def __exit__(self, *_: object) -> None: ... + open = __enter__ + close = __exit__ + def __getattr__(self, attr: str) -> object: ... + def __iter__(self) -> str: ... + +def get_filesystem_encoding() -> str: ... +def shell_quote(args: str | Collection[str], *, shell: bool = False) -> str: ... +def smuggle_url(url: str, data: object) -> str: ... +def unsmuggle_url(smug_url: str, default: object | None = None) -> tuple[str, object]: ... +def format_decimal_suffix(num: float, fmt: str = "%d%s", *, factor: int = 1000) -> str: ... +def format_bytes(bytes: int) -> str: ... +def lookup_unit_table(unit_table: Mapping[str, int], s: str, strict: bool = False) -> float: ... +def parse_bytes(s: str) -> int: ... +def parse_filesize(s: str | None) -> int | None: ... +def parse_count(s: str | None) -> str | None: ... +def parse_resolution(s: str, *, lenient: bool = False) -> dict[str, int]: ... +def parse_bitrate(s: str) -> int: ... +def month_by_name(name: str, lang: str = "en") -> str | None: ... +def month_by_abbreviation(abbrev: str) -> str | None: ... +def fix_xml_ampersands(xml_str: str) -> str: ... +def setproctitle(title: str) -> None: ... +def remove_start(s: str, start: str) -> str: ... +def remove_end(s: str, end: str) -> str: ... +def remove_quotes(s: str) -> str: ... +def get_domain(url: str) -> str | None: ... +def url_basename(url: str) -> str: ... +def base_url(url: str) -> str: ... +def urljoin(base: str, path: str) -> str: ... +def int_or_none( + v: object, scale: int = 1, default: int | None = None, get_attr: str | None = None, invscale: int = 1, base: int | None = None +) -> int | None: ... +def str_or_none(v: object, default: str | None = None) -> str: ... +def str_to_int(int_str: str) -> int: ... +def float_or_none(v: object, scale: int = 1, invscale: int = 1, default: float | None = None) -> float | None: ... +def bool_or_none(v: object, default: bool | None = None) -> bool | None: ... +def strip_or_none(v: object, default: str | None = None) -> str | None: ... +def url_or_none(url: object) -> str | None: ... +def strftime_or_none(timestamp: int, date_format: str = "%Y%m%d", default: str | None = None) -> str | None: ... +def parse_duration(s: str | None) -> float: ... +def prepend_extension(filename: str, ext: str, expected_real_ext: str | None = None) -> str: ... +def replace_extension(filename: str, ext: str, expected_real_ext: str | None = None) -> str: ... +def check_executable(exe: str, args: Iterable[str] = ...) -> str | None: ... +def detect_exe_version(output: str, version_re: str | Pattern[str] | None = None, unrecognized: str = "present") -> str: ... +def get_exe_version( + exe: str, + args: Iterable[str] = ["--version"], + version_re: str | None = None, + unrecognized: Iterable[str] = ("present", "broken"), +) -> str: ... +def frange(start: int = 0, stop: int | None = None, step: int = 1) -> Iterator[float]: ... + +class LazyList(Sequence[_T]): + def __init__(self, iterable: Iterable[_T], *, reverse: bool = False, _cache: list[object] | None = None) -> None: ... + def __iter__(self) -> Iterator[_T]: ... + def exhaust(self) -> list[_T]: ... + @overload + def __getitem__(self, idx: int, /) -> _T: ... + @overload + def __getitem__(self, idx: slice, /) -> list[_T]: ... + def __bool__(self) -> bool: ... + def __len__(self) -> int: ... + def __reversed__(self) -> Iterator[_T]: ... + def __copy__(self) -> Self: ... + +class PagedList: + def __len__(self) -> int: ... + def __init__(self, pagefunc: Callable[[int], Iterator[object]], pagesize: int, use_cache: bool = True) -> None: ... + def getpage(self, pagenum: int) -> list[object]: ... + def getslice(self, start: int = 0, end: int | None = None) -> list[object]: ... + @overload + def __getitem__(self, idx: int, /) -> object: ... + @overload + def __getitem__(self, idx: slice, /) -> list[object]: ... + def __bool__(self) -> bool: ... + +class OnDemandPagedList(PagedList): ... + +class InAdvancePagedList(PagedList): + def __init__(self, pagefunc: Callable[[int], Iterator[object]], pagecount: int, pagesize: int) -> None: ... + +class PlaylistEntries: + MissingEntry: object + is_exhausted: bool + ydl: YoutubeDL + is_incomplete: bool + def __init__(self, ydl: YoutubeDL, info_dict: _InfoDict) -> None: ... + PLAYLIST_ITEMS_RE: Pattern[str] + @classmethod + def parse_playlist_items(cls, string: str) -> slice | int: ... + def get_requested_items(self) -> Iterator[tuple[int, object]]: ... + def get_full_count(self) -> int | None: ... + def __getitem__(self, idx: int) -> Iterator[tuple[int, object]]: ... + def __len__(self) -> int: ... + +_K = TypeVar("_K") +_V = TypeVar("_V") + +def uppercase_escape(s: str) -> str: ... +def lowercase_escape(s: str) -> str: ... +def parse_qs(url: str, **kwargs: object) -> dict[AnyStr, list[AnyStr]]: ... +def read_batch_urls(batch_fd: FileDescriptorLike) -> list[str]: ... +def urlencode_postdata(*args: object, **kargs: object) -> bytes: ... +def update_url(url: str, *, query_update: Mapping[str, str] | None = None, **kwargs: object) -> str: ... +def update_url_query(url: str, query: Mapping[str, str]) -> str: ... +def multipart_encode(data: Mapping[AnyStr, AnyStr], boundary: str | None = None) -> tuple[bytes, str]: ... +def is_iterable_like( + x: object, allowed_types: Collection[type[Any]] = ..., blocked_types: Collection[type[Any]] | type[NO_DEFAULT] = ... +) -> bool: ... +def variadic(x: _T, allowed_types: Collection[type[Any]] | type[NO_DEFAULT] = ...) -> _T | tuple[_T]: ... +def try_call( + *funcs: Callable[..., _T], + expected_type: type[_T] | None = None, + args: Iterable[object] = ..., + kwargs: Mapping[Hashable, object] = ..., +) -> _T | None: ... +def try_get( + src: object, getter: Callable[..., _T] | Collection[Callable[..., _T]], expected_type: type[_T] | None = None +) -> _T: ... +def filter_dict(dct: Mapping[_K, _V], cndn: Callable[[_K, _V], bool] = ...) -> dict[_K, _V]: ... +def merge_dicts(*dicts: Mapping[Hashable, object]) -> dict[Hashable, object]: ... +def encode_compat_str(string: str, encoding: str = ..., errors: str = "strict") -> str: ... + +US_RATINGS: Mapping[str, int] +TV_PARENTAL_GUIDELINES: Mapping[str, int] + +def parse_age_limit(s: int) -> int | None: ... +def strip_jsonp(code: str) -> str: ... +def js_to_json(code: str, vars: Mapping[str, object] = ..., *, strict: bool = False) -> str: ... +def qualities(quality_ids: Sequence[int]) -> Callable[[int], int]: ... + +POSTPROCESS_WHEN: tuple[str, ...] +DEFAULT_OUTTMPL: Mapping[str, str] +OUTTMPL_TYPES: Mapping[str, str | None] +STR_FORMAT_RE_TMPL: str +STR_FORMAT_TYPES: str + +def limit_length(s: str, length: int) -> str: ... +def version_tuple(v: str) -> tuple[int, ...]: ... +def is_outdated_version(version: str, limit: str, assume_new: bool = True) -> bool: ... +def ytdl_is_updateable() -> bool: ... +def args_to_str(args: str | Collection[str]) -> str: ... +def error_to_str(err: BaseException) -> str: ... +def mimetype2ext(mt: str, default: str | type[NO_DEFAULT] = ...) -> str: ... +def ext2mimetype(ext_or_url: str | None) -> str: ... +def parse_codecs(codecs_str: str) -> dict[str, str]: ... +def get_compatible_ext( + *, + vcodecs: Collection[str], + acodecs: Collection[str], + vexts: Collection[str], + aexts: Collection[str], + preferences: Sequence[str] | None = None, +) -> str: ... +def urlhandle_detect_ext(url_handle: Response, default: str | type[NO_DEFAULT] = ...) -> str | None: ... +def encode_data_uri(data: ReadableBuffer, mime_type: str) -> str: ... +def age_restricted(content_limit: int | None, age_limit: int | None) -> bool: ... + +BOMS: Collection[tuple[bytes, str]] + +def is_html(first_bytes: bytes) -> bool: ... +def determine_protocol(info_dict: _InfoDict) -> str: ... +def render_table( + header_row: Iterable[str], data: Iterable[str], delim: bool = False, extra_gap: int = 0, hide_empty: bool = False +) -> str: ... +def match_str(filter_str: str, dct: Mapping[str, object], incomplete: bool = False) -> bool: ... +def match_filter_func( + filters: Collection[str] | str, breaking_filters: Collection[str] | str | None = None +) -> Callable[..., str | type[NO_DEFAULT] | None]: ... + +class download_range_func: + def __init__( + self, chapters: Iterable[str | Pattern[str]], ranges: Iterable[tuple[int, int]], from_info: bool = False + ) -> None: ... + def __call__(self, info_dict: _InfoDict, ydl: YoutubeDL) -> Iterator[dict[str, object]]: ... + def __eq__(self, other: object) -> bool: ... + +def parse_dfxp_time_expr(time_expr: str | None) -> int | None: ... +def srt_subtitles_timecode(seconds: float) -> str: ... +def ass_subtitles_timecode(seconds: float) -> str: ... +def dfxp2srt(dfxp_data: bytes) -> str: ... +def cli_option(params: _Params, command_option: str, param: str, separator: str | None = None) -> object: ... +def cli_bool_option( + params: _Params, + command_option: str, + param: bool | None, + true_value: str = "true", + false_value: str = "false", + separator: str | None = None, +) -> object: ... +def cli_valueless_option(params: _Params, command_option: str, param: str, expected_value: bool = True) -> object: ... +def cli_configuration_args( + argdict: dict[str, object], keys: Iterable[str], default: object = ..., use_compat: bool = True +) -> object: ... + +class ISO639Utils: + @classmethod + def short2long(cls, code: str) -> str | None: ... + @classmethod + def long2short(cls, code: str) -> str | None: ... + +class ISO3166Utils: + @classmethod + def short2full(cls, code: str) -> str | None: ... + +class GeoUtils: + @classmethod + def random_ipv4(cls, code_or_block: str) -> str | None: ... + +def long_to_bytes(n: int, blocksize: int = 0) -> bytes: ... +def bytes_to_long(s: bytes) -> int: ... +def ohdave_rsa_encrypt(data: ReadableBuffer, exponent: float, modulus: float | None) -> str: ... +def pkcs1pad(data: Sequence[int], length: int) -> list[int]: ... +def encode_base_n(num: int, n: int | None = None, table: str | None = None) -> str: ... +def decode_base_n(string: str, n: int | None = None, table: str | None = None) -> int: ... +def decode_packed_codes(code: str) -> str: ... +def caesar(s: str, alphabet: str, shift: int) -> str: ... +def rot47(s: str) -> str: ... +def parse_m3u8_attributes(attrib: str) -> dict[str, str]: ... +def urshift(val: int, n: int) -> int: ... +def write_xattr(path: FileDescriptorOrPath, key: str, value: str) -> None: ... +def random_birthday(year_field: Hashable, month_field: Hashable, day_field: Hashable) -> dict[Hashable, str]: ... +def find_available_port(interface: str = "") -> object | None: ... + +DOT_URL_LINK_TEMPLATE: str +DOT_WEBLOC_LINK_TEMPLATE: str +DOT_DESKTOP_LINK_TEMPLATE: str +LINK_TEMPLATES: Mapping[str, str] + +def iri_to_uri(iri: str) -> str: ... +def to_high_limit_path(path: PathLike[AnyStr]) -> str: ... +def format_field( + obj: object, + field: str | Collection[str] | None = None, + template: str = "%s", + ignore: type[NO_DEFAULT] | str | Collection[str] = ..., + default: str = "", + func: Callable[[object], object] = ..., +) -> str: ... +def clean_podcast_url(url: str) -> str: ... +def random_uuidv4() -> str: ... +def make_dir(path: PathLike[AnyStr], to_screen: Callable[[str], object] | None = None) -> bool: ... +def get_executable_path() -> str: ... +def get_user_config_dirs(package_name: str) -> Iterator[str]: ... +def get_system_config_dirs(package_name: str) -> Iterator[str]: ... +def time_seconds(**kwargs: float) -> int: ... +def jwt_encode_hs256(payload_data: object, key: str, headers: Mapping[str, object] = ...) -> bytes: ... +def jwt_decode_hs256(jwt: str) -> object: ... + +WINDOWS_VT_MODE: bool | None + +def supports_terminal_sequences(stream: IO[Any]) -> bool: ... +def windows_enable_vt_mode() -> None: ... +def remove_terminal_sequences(string: str) -> str: ... +def number_of_digits(number: int) -> int: ... +def join_nonempty(*values: str, delim: str = "-", from_dict: Mapping[str, object] | None = None) -> str: ... +def scale_thumbnails_to_max_format_width( + formats: Iterable[Mapping[str, object]], thumbnails: Iterable[Mapping[str, object]], url_width_re: str | Pattern[str] +) -> list[dict[str, object]]: ... +def parse_http_range(range: str | None) -> tuple[int | None, int | None, int | None]: ... +def read_stdin(what: str) -> TextIO | object: ... +def determine_file_encoding(data: bytes) -> tuple[str | None, int]: ... + +class Config: + own_args: object | None + parsed_args: tuple[Values, list[str]] | None + filename: str | None + def __init__(self, parser: _YoutubeDLOptionParser, label: str | None = None) -> None: ... + def init(self, args: object | None = None, filename: str | None = None) -> bool: ... + def load_configs(self) -> bool: ... + @staticmethod + def read_file(filename: FileDescriptorOrPath, default: list[str] = []) -> list[str]: ... + @staticmethod + def hide_login_info(opts: Iterable[str]) -> list[str]: ... + def append_config(self, *args: object, label: str | None = None) -> None: ... + @property + def all_args(self) -> Iterator[str]: ... + def parse_known_args(self, **kwargs: object) -> tuple[Values, list[str]]: ... + def parse_args(self) -> tuple[Values, list[str]]: ... + +def merge_headers(*dicts: dict[str, object]) -> dict[str, object]: ... +def cached_method(f: Callable[..., object]) -> Callable[..., object]: ... + +class function_with_repr(Generic[_T]): + def __init__(self, func: Callable[..., _T], repr_: str | None = None) -> None: ... + def __call__(self, *args: object, **kwargs: object) -> _T: ... + @classmethod + def set_repr(cls, repr_: str) -> Callable[..., object]: ... + +class Namespace(types.SimpleNamespace): + def __iter__(self) -> Iterator[object]: ... + @property + def items_(self) -> dict[str, object]: ... + +MEDIA_EXTENSIONS: Namespace +KNOWN_EXTENSIONS: tuple[str, ...] + +class _UnsafeExtensionError(Exception): + ALLOWED_EXTENSIONS: frozenset[str] + extension: str + def __init__(self, extension: str, /) -> None: ... + @classmethod + def sanitize_extension(cls, extension: str, /, *, prepend: bool = False) -> str: ... + +class RetryManager: + attempt: int + retries: int + error_callback: Callable[[BaseException, int, int], object] + def __init__(self, _retries: int | None, _error_callback: Callable[..., object], **kwargs: object) -> None: ... + @property + def error(self) -> None: ... + @error.setter + def error(self, value: type[NO_DEFAULT] | BaseException) -> None: ... + def __iter__(self) -> Self: ... + @staticmethod + def report_retry( + e: BaseException, + count: int, + retries: int, + *, + sleep_func: Callable[..., float | None], + info: Callable[[str], object], + warn: Callable[[str], object], + error: Callable[[str], object] | None = None, + suffix: str | None = None, + ) -> None: ... + +def make_archive_id(ie: InfoExtractor, video_id: str) -> str: ... +def truncate_string(s: str, left: int, right: int = 0) -> str: ... +def orderedSet_from_options( + options: Sequence[str], + alias_dict: dict[str, Sequence[str]], + *, + use_regex: bool = False, + start: Iterable[object] | None = None, +) -> Iterator[object]: ... + +class FormatSorter: + regex: str + default: tuple[str, ...] + ytdl_default: tuple[str, ...] + settings: dict[str, object] + ydl: YoutubeDL + def __init__(self, ydl: YoutubeDL, field_preference: _Params) -> None: ... + def evaluate_params(self, params: _Params, sort_extractor: Collection[str]) -> None: ... + def print_verbose_info(self, write_debug: Callable[..., None]) -> None: ... + def calculate_preference(self, format: dict[str, object]) -> tuple[int, ...]: ... + +@overload +def filesize_from_tbr(tbr: None, duration: None) -> None: ... +@overload +def filesize_from_tbr(tbr: int, duration: None) -> None: ... +@overload +def filesize_from_tbr(tbr: None, duration: int) -> None: ... +@overload +def filesize_from_tbr(tbr: int | None, duration: int | None) -> int | None: ... + +class _YDLLogger: + def __init__(self, ydl: YoutubeDL | None = None) -> None: ... + def debug(self, message: str) -> None: ... + def info(self, message: str) -> None: ... + def warning(self, message: str, *, once: bool = False) -> None: ... + def error(self, message: str, *, is_error: bool = True) -> None: ... + def stdout(self, message: str) -> None: ... + def stderr(self, message: str) -> None: ... + +class _ProgressState(enum.Enum): + HIDDEN = 0 + INDETERMINATE = 3 + VISIBLE = 1 + WARNING = 4 + ERROR = 2 + @classmethod + def from_dict(cls, s: dict[str, object], /) -> _ProgressState: ... + def get_ansi_escape(self, /, percent: int | None = None) -> str: ... + +if sys.platform == "win32": + _ENV: TypeAlias = Mapping[str, str] +else: + _ENV: TypeAlias = Mapping[bytes, StrOrBytesPath] | Mapping[str, StrOrBytesPath] + +class Popen(subprocess.Popen[AnyStr]): + def __init__( + self, + args: StrOrBytesPath | Sequence[StrOrBytesPath], + *remaining: object, + env: _ENV | None = None, + text: bool = False, + shell: bool = False, + **kwargs: object, + ) -> None: ... + def communicate_or_kill(self, *args: object, **kwargs: object) -> tuple[AnyStr, AnyStr]: ... + def kill(self, *, timeout: int = 0) -> None: ... + @classmethod + def run(cls, *args: object, timeout: int | None = None, **kwargs: object) -> tuple[AnyStr, AnyStr]: ... + +class classproperty: + def __new__(cls, func: Callable[..., object] | None = None, *args: object, **kwargs: object) -> Self: ... + def __init__( # pyright: ignore[reportInconsistentConstructor] + self, func: Callable[..., object], *, cache: bool = False + ) -> None: ... + def __get__(self, _: Unused, cls: type[object]) -> object: ... diff --git a/stubs/yt-dlp/yt_dlp/utils/networking.pyi b/stubs/yt-dlp/yt_dlp/utils/networking.pyi new file mode 100644 index 000000000000..4ad81ea1b99e --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/utils/networking.pyi @@ -0,0 +1,42 @@ +from collections.abc import Mapping +from typing import Any, TypeVar, overload +from typing_extensions import Self + +from ._utils import NO_DEFAULT + +def random_user_agent() -> str: ... + +_T = TypeVar("_T") + +class HTTPHeaderDict(dict[str, str]): + def __new__(cls, *args: Any, **kwargs: Any) -> Self: ... + def __init__(self, /, *args: object, **kwargs: object) -> None: ... + def sensitive(self, /) -> dict[str, str]: ... + @overload # type: ignore[override] + def get(self, key: str, /) -> str | None: ... + @overload + def get(self, key: str, /, default: _T) -> str | _T: ... + @overload + def get(self, key: str, /, default: type[NO_DEFAULT] | _T = ...) -> str | _T | type[NO_DEFAULT]: ... + @overload + def pop(self, key: str, /) -> str: ... + @overload + def pop(self, key: str, /, default: _T) -> str | _T: ... + @overload + def pop(self, key: str, /, default: type[NO_DEFAULT] | _T | str = ...) -> str | _T | type[NO_DEFAULT]: ... + @overload + def setdefault(self, key: str, /) -> str: ... + @overload + def setdefault(self, key: str, /, default: str) -> str: ... + @overload + def setdefault(self, key: str, /, default: str | None = None) -> str: ... + def update(self, other: Mapping[str, str], /, **kwargs: str) -> None: ... # type: ignore[override] + +std_headers: HTTPHeaderDict + +def clean_proxies(proxies: dict[str, object], headers: HTTPHeaderDict) -> None: ... +def clean_headers(headers: HTTPHeaderDict) -> None: ... +def remove_dot_segments(path: str) -> str: ... +def escape_rfc3986(s: str) -> str: ... +def normalize_url(url: str) -> str: ... +def select_proxy(url: str, proxies: Mapping[str, object]) -> str: ... diff --git a/stubs/yt-dlp/yt_dlp/utils/progress.pyi b/stubs/yt-dlp/yt_dlp/utils/progress.pyi new file mode 100644 index 000000000000..bb213136def3 --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/utils/progress.pyi @@ -0,0 +1,22 @@ +class ProgressCalculator: + SAMPLING_WINDOW: int + SAMPLING_RATE: float + GRACE_PERIOD: int + downloaded: int + elapsed: float + speed: SmoothValue + eta: SmoothValue + def __init__(self, initial: int) -> None: ... + @property + def total(self) -> int | None: ... + @total.setter + def total(self, value: int | None) -> None: ... + def thread_reset(self) -> None: ... + def update(self, size: int | None) -> None: ... + +class SmoothValue: + value: float | None + def __init__(self, initial: float | None, smoothing: float) -> None: ... + smooth: float | None + def set(self, value: float) -> None: ... + def reset(self) -> None: ... diff --git a/stubs/yt-dlp/yt_dlp/utils/traversal.pyi b/stubs/yt-dlp/yt_dlp/utils/traversal.pyi new file mode 100644 index 000000000000..592426fea562 --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/utils/traversal.pyi @@ -0,0 +1,77 @@ +from collections.abc import Callable, Collection, Iterable, Mapping +from typing import TypeVar, overload +from typing_extensions import TypeAlias + +from ._utils import NO_DEFAULT, ExtractorError + +_Traversable: TypeAlias = Mapping[str, object] | Iterable[object] +_PathArg: TypeAlias = str | int + +def traverse_obj( + obj: _Traversable, + *paths: _PathArg, + default: object | None | type[NO_DEFAULT] = ..., + expected_type: type[object] | None = None, + get_all: bool = True, + casesense: bool = True, + is_user_input: bool | type[NO_DEFAULT] = ..., + traverse_string: bool = False, +) -> object: ... + +_T = TypeVar("_T") + +def value(value: _T, /) -> _T: ... +def require(name: str, /, *, expected: bool = False) -> Callable[[_T], _T]: ... + +class _RequiredError(ExtractorError): ... + +@overload +def subs_list_to_dict( + *, lang: str | None = "und", ext: str | None = None +) -> Callable[[list[dict[str, object]]], dict[str, list[dict[str, object]]]]: ... +@overload +def subs_list_to_dict( + subs: list[dict[str, object]] | None, /, *, lang: str | None = "und", ext: str | None = None +) -> dict[str, list[dict[str, object]]]: ... +@overload +def find_element(*, attr: str, value: str, tag: str | None = None, html: bool = False, regex: bool = False) -> str: ... +@overload +def find_element(*, cls: str, html: bool = False) -> str: ... +@overload +def find_element(*, id: str, tag: str | None = None, html: bool = False, regex: bool = False) -> str: ... +@overload +def find_element(*, tag: str, html: bool = False, regex: bool = False) -> str: ... +@overload +def find_element( + *, + tag: str | None = None, + id: str | None = None, + cls: str | None = None, + attr: str | None = None, + value: str | None = None, + html: bool = False, + regex: bool = False, +) -> str: ... +@overload +def find_elements(*, cls: str, html: bool = False) -> list[str]: ... +@overload +def find_elements(*, attr: str, value: str, tag: str | None = None, html: bool = False, regex: bool = False) -> list[str]: ... +@overload +def find_elements( + *, + tag: str | None = None, + cls: str | None = None, + attr: str | None = None, + value: str | None = None, + html: bool = False, + regex: bool = False, +) -> list[str]: ... +def trim_str(*, start: str | None = None, end: str | None = None) -> Callable[[str], str]: ... +def unpack(func: Callable[..., object], **kwargs: object) -> Callable[[object], object]: ... +def get_first(obj: _Traversable, *paths: _PathArg, **kwargs: object) -> object: ... +@overload +def dict_get(d: str, key_or_keys: str | Collection[str]) -> object | None: ... +@overload +def dict_get( + d: str, key_or_keys: str | Collection[str], default: object | None = None, skip_false_values: bool = True +) -> object | None: ... diff --git a/stubs/yt-dlp/yt_dlp/version.pyi b/stubs/yt-dlp/yt_dlp/version.pyi new file mode 100644 index 000000000000..e4e6cf246ae1 --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/version.pyi @@ -0,0 +1,6 @@ +__version__: str +RELEASE_GIT_HEAD: str +VARIANT: str +UPDATE_HINT: str +CHANNEL: str +ORIGIN: str diff --git a/stubs/yt-dlp/yt_dlp/webvtt.pyi b/stubs/yt-dlp/yt_dlp/webvtt.pyi new file mode 100644 index 000000000000..2dad26273adf --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/webvtt.pyi @@ -0,0 +1,49 @@ +import re +from collections.abc import Generator, Mapping +from typing import TextIO, TypeVar + +_AT = TypeVar("_AT", int, str, re.Match[str], None) + +class _MatchParser: + def __init__(self, string: str) -> None: ... + def match(self, r: re.Pattern[str]) -> re.Match[str] | int | None: ... + def advance(self, by: _AT) -> _AT: ... + def consume(self, r: re.Pattern[str]) -> re.Match[str]: ... + def child(self) -> _MatchChildParser: ... + +class _MatchChildParser(_MatchParser): + def __init__(self, parent: _MatchParser) -> None: ... + def commit(self) -> _MatchParser: ... + +class ParseError(Exception): + def __init__(self, parser: _MatchParser) -> None: ... + +class Block: + def __init__(self, **kwargs: object) -> None: ... + @classmethod + def parse(cls, parser: _MatchParser) -> Block: ... + def write_into(self, stream: TextIO) -> None: ... + +class HeaderBlock(Block): ... + +class Magic(HeaderBlock): + @classmethod + def parse(cls, parser: _MatchParser) -> Magic: ... + def write_into(self, stream: TextIO) -> None: ... + +class StyleBlock(HeaderBlock): ... +class RegionBlock(HeaderBlock): ... +class CommentBlock(Block): ... + +class CueBlock(Block): + @classmethod + def parse(cls, parser: _MatchParser) -> CueBlock: ... + def write_into(self, stream: TextIO) -> None: ... + @property + def as_json(self) -> dict[str, object]: ... + def __eq__(self, other: object) -> bool: ... + @classmethod + def from_json(cls, json: Mapping[str, object]) -> CueBlock: ... + def hinges(self, other: Block) -> bool: ... + +def parse_fragment(frag_content: bytes) -> Generator[Block]: ... From 3fc8e6705e9ce844111954b1f3efc237c6562db2 Mon Sep 17 00:00:00 2001 From: Andrew Udvare Date: Thu, 24 Jul 2025 19:57:43 -0400 Subject: [PATCH 02/13] Fix argument types; replace object with Any where appropriate --- stubs/yt-dlp/yt_dlp/YoutubeDL.pyi | 36 ++++++++++++----------- stubs/yt-dlp/yt_dlp/aes.pyi | 4 ++- stubs/yt-dlp/yt_dlp/cache.pyi | 8 ++--- stubs/yt-dlp/yt_dlp/cookies.pyi | 6 ++-- stubs/yt-dlp/yt_dlp/utils/networking.pyi | 7 +++-- stubs/yt-dlp/yt_dlp/utils/traversal.pyi | 37 +++++++++++++++--------- 6 files changed, 57 insertions(+), 41 deletions(-) diff --git a/stubs/yt-dlp/yt_dlp/YoutubeDL.pyi b/stubs/yt-dlp/yt_dlp/YoutubeDL.pyi index f22e938aeda3..9173e6cc8830 100644 --- a/stubs/yt-dlp/yt_dlp/YoutubeDL.pyi +++ b/stubs/yt-dlp/yt_dlp/YoutubeDL.pyi @@ -1,6 +1,8 @@ +import types from collections.abc import Callable, Collection, Iterable, Iterator, Mapping from functools import cached_property from types import TracebackType +from typing import Any from typing_extensions import Self, TypeAlias from urllib.request import Request @@ -14,7 +16,7 @@ from .extractor.common import InfoExtractor, _InfoDict from .postprocessor.common import PostProcessor from .utils._utils import _ProgressState -_FormatSelector: TypeAlias = Callable[[Mapping[str, object]], Iterator[object]] +_FormatSelector: TypeAlias = Callable[[Mapping[str, Any]], Iterator[Any]] class YoutubeDL: params: _Params @@ -48,7 +50,7 @@ class YoutubeDL: def report_warning(self, message: str, only_once: bool = False) -> None: ... def deprecation_warning(self, message: str, *, stacklevel: int = 0) -> None: ... def deprecated_feature(self, message: str) -> None: ... - def report_error(self, message: str, *args: object, **kwargs: object) -> None: ... + def report_error(self, message: str, tb: types.TracebackType | None = None, is_error: bool = True) -> None: ... def write_debug(self, message: str, only_once: bool = False) -> None: ... def report_file_already_downloaded(self, file_name: str) -> None: ... def report_file_delete(self, file_name: str) -> None: ... @@ -58,34 +60,34 @@ class YoutubeDL: def escape_outtmpl(outtmpl: str) -> str: ... @classmethod def validate_outtmpl(cls, outtmpl: str) -> ValueError | None: ... - def prepare_outtmpl(self, outtmpl: str, info_dict: _InfoDict, sanitize: bool = False) -> tuple[str, dict[str, object]]: ... - def evaluate_outtmpl(self, outtmpl: str, info_dict: _InfoDict, *args: object, **kwargs: object) -> str: ... + def prepare_outtmpl(self, outtmpl: str, info_dict: _InfoDict, sanitize: bool = False) -> tuple[str, dict[str, Any]]: ... + def evaluate_outtmpl(self, outtmpl: str, info_dict: _InfoDict, sanitize: bool = False) -> str: ... def prepare_filename( self, info_dict: _InfoDict, dir_type: str = "", *, outtmpl: str | None = None, warn: bool = False ) -> str: ... @staticmethod - def add_extra_info(info_dict: _InfoDict, extra_info: Mapping[str, object]) -> None: ... + def add_extra_info(info_dict: _InfoDict, extra_info: Mapping[str, Any]) -> None: ... def extract_info( self, url: str, download: bool = True, ie_key: str | None = None, - extra_info: object | None = None, + extra_info: Any | None = None, process: bool = True, force_generic_extractor: bool = False, ) -> _InfoDict: ... def add_default_extra_info(self, ie_result: _InfoDict, ie: InfoExtractor, url: str) -> None: ... def process_ie_result( - self, ie_result: _InfoDict, download: bool = True, extra_info: Mapping[str, object] | None = None + self, ie_result: _InfoDict, download: bool = True, extra_info: Mapping[str, Any] | None = None ) -> _InfoDict: ... def build_format_selector(self, format_spec: str) -> _FormatSelector: ... def sort_formats(self, info_dict: _InfoDict) -> None: ... def process_video_result(self, info_dict: _InfoDict, download: bool = True) -> _InfoDict: ... def process_subtitles( - self, video_id: str, normal_subtitles: Mapping[str, object], automatic_captions: Mapping[str, object] - ) -> dict[str, object] | None: ... + self, video_id: str, normal_subtitles: Mapping[str, Any], automatic_captions: Mapping[str, Any] + ) -> dict[str, Any] | None: ... def dl(self, name: str, info: _InfoDict, subtitle: bool = False, test: bool = False) -> bool: ... - def existing_file(self, filepaths: Iterable[str], *, default_overwrite: bool = True) -> object | None: ... + def existing_file(self, filepaths: Iterable[str], *, default_overwrite: bool = True) -> str | None: ... def process_info(self, info_dict: _InfoDict) -> None: ... def download(self, url_list: Collection[str]) -> None: ... def download_with_info_file(self, info_filename: str) -> int: ... @@ -98,27 +100,27 @@ class YoutubeDL: def run_pp(self, pp: PostProcessor, infodict: _InfoDict) -> _InfoDict: ... def run_all_pps(self, key: str, info: _InfoDict, *, additional_pps: Collection[PostProcessor] | None = None) -> _InfoDict: ... def pre_process( - self, ie_info: _InfoDict, key: str = "pre_process", files_to_move: Mapping[str, object] | None = None + self, ie_info: _InfoDict, key: str = "pre_process", files_to_move: Mapping[str, Any] | None = None ) -> tuple[_InfoDict, list[str] | None]: ... - def post_process(self, filename: str, info: _InfoDict, files_to_move: Mapping[str, object] | None = None) -> _InfoDict: ... + def post_process(self, filename: str, info: _InfoDict, files_to_move: Mapping[str, Any] | None = None) -> _InfoDict: ... def in_download_archive(self, info_dict: _InfoDict) -> bool: ... def record_download_archive(self, info_dict: _InfoDict) -> None: ... @staticmethod - def format_resolution(format: Mapping[str, object], default: str = "unknown") -> str: ... + def format_resolution(format: Mapping[str, Any], default: str = "unknown") -> str: ... def render_formats_table(self, info_dict: _InfoDict) -> str | None: ... def render_thumbnails_table(self, info_dict: _InfoDict) -> str | None: ... - def render_subtitles_table(self, video_id: str, subtitles: Iterable[Mapping[str, object]]) -> str | None: ... + def render_subtitles_table(self, video_id: str, subtitles: Iterable[Mapping[str, Any]]) -> str | None: ... def list_formats(self, info_dict: _InfoDict) -> None: ... def list_thumbnails(self, info_dict: _InfoDict) -> None: ... - def list_subtitles(self, video_id: str, subtitles: Iterable[Mapping[str, object]], name: str = "subtitles") -> None: ... + def list_subtitles(self, video_id: str, subtitles: Iterable[Mapping[str, Any]], name: str = "subtitles") -> None: ... def print_debug_header(self) -> None: ... @cached_property - def proxies(self) -> dict[str, object]: ... + def proxies(self) -> dict[str, Any]: ... @cached_property def cookiejar(self) -> YoutubeDLCookieJar: ... def urlopen(self, req: Request | str) -> Response: ... def build_request_director( - self, handlers: Collection[RequestHandler], preferences: Collection[object] | None = None + self, handlers: Collection[RequestHandler], preferences: Collection[Any] | None = None ) -> RequestDirector: ... def encode(self, s: str) -> bytes: ... def get_encoding(self) -> str: ... diff --git a/stubs/yt-dlp/yt_dlp/aes.pyi b/stubs/yt-dlp/yt_dlp/aes.pyi index 64a844f383ce..2590ce22aca7 100644 --- a/stubs/yt-dlp/yt_dlp/aes.pyi +++ b/stubs/yt-dlp/yt_dlp/aes.pyi @@ -23,7 +23,9 @@ __all__ = [ def aes_cbc_decrypt_bytes(data: bytes, key: bytes, iv: bytes) -> bytes: ... def aes_gcm_decrypt_and_verify_bytes(data: bytes, key: bytes, tag: bytes, nonce: bytes) -> bytes: ... -def aes_cbc_encrypt_bytes(data: bytes, key: bytes, iv: bytes, **kwargs: object) -> bytes: ... +def aes_cbc_encrypt_bytes( + data: bytes, key: bytes, iv: bytes, padding_mode: Literal["pkcs7", "iso7816", "whitespace", "zero"] +) -> bytes: ... def unpad_pkcs7(data: list[int]) -> list[int]: ... def pkcs7_padding(data: list[int]) -> list[int]: ... def pad_block(block: list[int], padding_mode: Literal["pkcs7", "iso7816", "whitespace", "zero"]) -> list[int]: ... diff --git a/stubs/yt-dlp/yt_dlp/cache.pyi b/stubs/yt-dlp/yt_dlp/cache.pyi index ebea38468205..7b27c3469634 100644 --- a/stubs/yt-dlp/yt_dlp/cache.pyi +++ b/stubs/yt-dlp/yt_dlp/cache.pyi @@ -1,4 +1,4 @@ -from typing import Literal +from typing import Any, Literal from .YoutubeDL import YoutubeDL @@ -6,14 +6,14 @@ class Cache: def __init__(self, ydl: YoutubeDL) -> None: ... @property def enabled(self) -> bool: ... - def store(self, section: str, key: str, data: object, dtype: Literal["json"] = "json") -> None: ... + def store(self, section: str, key: str, data: Any, dtype: Literal["json"] = "json") -> None: ... def load( self, section: str, key: str, dtype: Literal["json"] = "json", - default: object | None = None, + default: Any = None, # returned if not enabled or if the cache entry is not found *, min_ver: str | None = None, - ) -> object: ... + ) -> Any: ... # Anything JSON serializable def remove(self) -> None: ... diff --git a/stubs/yt-dlp/yt_dlp/cookies.pyi b/stubs/yt-dlp/yt_dlp/cookies.pyi index 769f1301343f..b6bea1d90e84 100644 --- a/stubs/yt-dlp/yt_dlp/cookies.pyi +++ b/stubs/yt-dlp/yt_dlp/cookies.pyi @@ -1,6 +1,6 @@ from collections.abc import Collection, Iterator, KeysView from enum import Enum -from http.cookiejar import Cookie, MozillaCookieJar +from http.cookiejar import Cookie, CookiePolicy, MozillaCookieJar from http.cookies import SimpleCookie from typing import TextIO, TypeVar @@ -32,7 +32,7 @@ class YDLLogger(_LoggerProtocol): class CookieLoadError(YoutubeDLError): ... class YoutubeDLCookieJar(MozillaCookieJar): - def __init__(self, filename: str | None = ..., *args: object, **kwargs: object) -> None: ... + def __init__(self, filename: str | None = ..., delayload: bool = False, policy: CookiePolicy | None = None) -> None: ... def open(self, file: str, *, write: bool = ...) -> Iterator[TextIO]: ... def get_cookie_header(self, url: str) -> str: ... def get_cookies_for_url(self, url: str) -> list[Cookie]: ... @@ -77,7 +77,7 @@ class WindowsChromeCookieDecryptor(ChromeCookieDecryptor): def __init__(self, browser_root: str, logger: YDLLogger, meta_version: int | None = None) -> None: ... def get_cookie_decryptor( - browser_root: object, + browser_root: str, browser_keyring_name: str, logger: _LoggerProtocol, *, diff --git a/stubs/yt-dlp/yt_dlp/utils/networking.pyi b/stubs/yt-dlp/yt_dlp/utils/networking.pyi index 4ad81ea1b99e..9eb0c1081dde 100644 --- a/stubs/yt-dlp/yt_dlp/utils/networking.pyi +++ b/stubs/yt-dlp/yt_dlp/utils/networking.pyi @@ -10,7 +10,8 @@ _T = TypeVar("_T") class HTTPHeaderDict(dict[str, str]): def __new__(cls, *args: Any, **kwargs: Any) -> Self: ... - def __init__(self, /, *args: object, **kwargs: object) -> None: ... + # *args is passed to filter: filter(None, args) + def __init__(self, /, *args: Any, **kwargs: str) -> None: ... def sensitive(self, /) -> dict[str, str]: ... @overload # type: ignore[override] def get(self, key: str, /) -> str | None: ... @@ -34,9 +35,9 @@ class HTTPHeaderDict(dict[str, str]): std_headers: HTTPHeaderDict -def clean_proxies(proxies: dict[str, object], headers: HTTPHeaderDict) -> None: ... +def clean_proxies(proxies: dict[str, Any], headers: HTTPHeaderDict) -> None: ... def clean_headers(headers: HTTPHeaderDict) -> None: ... def remove_dot_segments(path: str) -> str: ... def escape_rfc3986(s: str) -> str: ... def normalize_url(url: str) -> str: ... -def select_proxy(url: str, proxies: Mapping[str, object]) -> str: ... +def select_proxy(url: str, proxies: Mapping[str, Any]) -> str: ... diff --git a/stubs/yt-dlp/yt_dlp/utils/traversal.pyi b/stubs/yt-dlp/yt_dlp/utils/traversal.pyi index 592426fea562..a59073a2b380 100644 --- a/stubs/yt-dlp/yt_dlp/utils/traversal.pyi +++ b/stubs/yt-dlp/yt_dlp/utils/traversal.pyi @@ -1,22 +1,22 @@ from collections.abc import Callable, Collection, Iterable, Mapping -from typing import TypeVar, overload +from typing import Any, TypeVar, overload from typing_extensions import TypeAlias from ._utils import NO_DEFAULT, ExtractorError -_Traversable: TypeAlias = Mapping[str, object] | Iterable[object] +_Traversable: TypeAlias = Mapping[str, Any] | Iterable[Any] _PathArg: TypeAlias = str | int def traverse_obj( obj: _Traversable, *paths: _PathArg, - default: object | None | type[NO_DEFAULT] = ..., - expected_type: type[object] | None = None, + default: Any = ..., # Anything or type[NO_DEFAULT] + expected_type: type[Any] | None = None, get_all: bool = True, casesense: bool = True, is_user_input: bool | type[NO_DEFAULT] = ..., traverse_string: bool = False, -) -> object: ... +) -> Any: ... # Unknown return type _T = TypeVar("_T") @@ -28,11 +28,11 @@ class _RequiredError(ExtractorError): ... @overload def subs_list_to_dict( *, lang: str | None = "und", ext: str | None = None -) -> Callable[[list[dict[str, object]]], dict[str, list[dict[str, object]]]]: ... +) -> Callable[[list[dict[str, Any]]], dict[str, list[dict[str, Any]]]]: ... @overload def subs_list_to_dict( - subs: list[dict[str, object]] | None, /, *, lang: str | None = "und", ext: str | None = None -) -> dict[str, list[dict[str, object]]]: ... + subs: list[dict[str, Any]] | None, /, *, lang: str | None = "und", ext: str | None = None +) -> dict[str, list[dict[str, Any]]]: ... @overload def find_element(*, attr: str, value: str, tag: str | None = None, html: bool = False, regex: bool = False) -> str: ... @overload @@ -67,11 +67,22 @@ def find_elements( regex: bool = False, ) -> list[str]: ... def trim_str(*, start: str | None = None, end: str | None = None) -> Callable[[str], str]: ... -def unpack(func: Callable[..., object], **kwargs: object) -> Callable[[object], object]: ... -def get_first(obj: _Traversable, *paths: _PathArg, **kwargs: object) -> object: ... + +# Returns a callable f(items) which calls func(*items, **kwargs). +def unpack(func: Callable[..., Any], **kwargs: Any) -> Callable[..., Any]: ... +def get_first( + obj: _Traversable, + *paths: _PathArg, + default: Any = ..., # Anything or type[NO_DEFAULT] + expected_type: type[Any] | None = None, + get_all: bool = True, + casesense: bool = True, + is_user_input: bool | type[NO_DEFAULT] = ..., + traverse_string: bool = False, +) -> Any: ... @overload -def dict_get(d: str, key_or_keys: str | Collection[str]) -> object | None: ... +def dict_get(d: str, key_or_keys: str | Collection[str]) -> Any | None: ... @overload def dict_get( - d: str, key_or_keys: str | Collection[str], default: object | None = None, skip_false_values: bool = True -) -> object | None: ... + d: str, key_or_keys: str | Collection[str], default: Any | None = None, skip_false_values: bool = True +) -> Any | None: ... From b28f231ed6b4b84f95fb5e1f1b51dc9424fef383 Mon Sep 17 00:00:00 2001 From: Andrew Udvare Date: Thu, 24 Jul 2025 19:58:09 -0400 Subject: [PATCH 03/13] Redo typed dicts with total=False --- stubs/yt-dlp/yt_dlp/__init__.pyi | 399 +++++++++++++++---------------- 1 file changed, 198 insertions(+), 201 deletions(-) diff --git a/stubs/yt-dlp/yt_dlp/__init__.pyi b/stubs/yt-dlp/yt_dlp/__init__.pyi index 46879ff0713c..b89f59a4e0ed 100644 --- a/stubs/yt-dlp/yt_dlp/__init__.pyi +++ b/stubs/yt-dlp/yt_dlp/__init__.pyi @@ -1,6 +1,6 @@ import optparse from collections.abc import Callable, Collection, Iterator, Mapping -from typing import Literal, NamedTuple, Protocol, TypedDict, type_check_only +from typing import Any, Literal, NamedTuple, Protocol, TypedDict, type_check_only from typing_extensions import NotRequired from .extractor import gen_extractors, list_extractors @@ -20,26 +20,26 @@ class _LoggerProtocol(Protocol): # noqa: Y046 def stderr(self, message: str) -> None: ... @type_check_only -class _RetrySleepFunctions(TypedDict): - default: NotRequired[Callable[[int], int]] - file_access: NotRequired[Callable[[int], int]] - fragment: NotRequired[Callable[[int], int]] +class _RetrySleepFunctions(TypedDict, total=False): + default: Callable[[int], int] + file_access: Callable[[int], int] + fragment: Callable[[int], int] @type_check_only -class _ProgressTemplateValue(TypedDict): # noqa: Y049 - info: NotRequired[str] - progress: NotRequired[str] +class _ProgressTemplateValue(TypedDict, total=False): # noqa: Y049 + info: str + progress: str @type_check_only -class _ExternalDownloader(TypedDict): - dash: NotRequired[str] - default: NotRequired[str] - ftp: NotRequired[str] - http: NotRequired[str] - m3u8: NotRequired[str] - mms: NotRequired[str] - rtmp: NotRequired[str] - rtsp: NotRequired[str] +class _ExternalDownloader(TypedDict, total=False): + dash: str + default: str + ftp: str + http: str + m3u8: str + mms: str + rtmp: str + rtsp: str @type_check_only class _DownloadRange(TypedDict): @@ -49,9 +49,9 @@ class _DownloadRange(TypedDict): title: NotRequired[str] @type_check_only -class _Color(TypedDict): - stderr: NotRequired[Literal["always", "auto", "no_color", "never"]] - stdout: NotRequired[Literal["always", "auto", "no_color", "never"]] +class _Color(TypedDict, total=False): + stderr: Literal["always", "auto", "no_color", "never"] + stdout: Literal["always", "auto", "no_color", "never"] _ProgressTemplate = TypedDict( "_ProgressTemplate", @@ -64,188 +64,185 @@ _ProgressTemplate = TypedDict( ) @type_check_only -class _Params(TypedDict): - usenetrc: NotRequired[bool | None] - netrc_location: NotRequired[str | None] - netrc_cmd: NotRequired[str | None] - username: NotRequired[str | None] - password: NotRequired[str | None] - twofactor: NotRequired[str | None] - videopassword: NotRequired[str | None] - ap_mso: NotRequired[str | None] - ap_username: NotRequired[str | None] - ap_password: NotRequired[str | None] - client_certificate: NotRequired[str | None] - client_certificate_key: NotRequired[str | None] - client_certificate_password: NotRequired[str | None] - quiet: NotRequired[bool | None] - no_warnings: NotRequired[bool | None] - forceurl: NotRequired[bool | None] - forcetitle: NotRequired[str | None] - forceid: NotRequired[bool | None] - forcethumbnail: NotRequired[bool | None] - forcedescription: NotRequired[bool | None] - forceduration: NotRequired[str | None] - forcefilename: NotRequired[bool | None] - forceprint: NotRequired[Mapping[str, Collection[str]] | Collection[str] | None] - print_to_file: NotRequired[Mapping[str, tuple[str, str]] | None] - forcejson: NotRequired[bool | None] - dump_single_json: NotRequired[bool | None] - force_write_download_archive: NotRequired[str | None] - simulate: NotRequired[str | None] - skip_download: NotRequired[str | None] - format: NotRequired[str | Callable[[Mapping[str, object]], Mapping[str, object]] | None] - allow_unplayable_formats: NotRequired[bool | None] - ignore_no_formats_error: NotRequired[bool | None] - format_sort: NotRequired[Collection[str] | None] - format_sort_force: NotRequired[str | None] - allow_multiple_video_streams: NotRequired[bool | None] - allow_multiple_audio_streams: NotRequired[bool | None] - check_formats: NotRequired[bool | Literal["selected"] | None] - listformats: NotRequired[bool | None] - outtmpl: NotRequired[str | Mapping[str, str] | None] - outtmpl_na_placeholder: NotRequired[str | None] - paths: NotRequired[str | None] - restrictfilenames: NotRequired[bool | None] - windowsfilenames: NotRequired[bool | None] - ignoreerrors: NotRequired[bool | Literal["only_download"] | None] - force_generic_extractor: NotRequired[bool | None] - allowed_extractors: NotRequired[Collection[str] | None] - ratelimit: NotRequired[int | None] - throttledratelimit: NotRequired[int | None] - overwrites: NotRequired[bool | None] - retries: NotRequired[int | None] - file_access_retries: NotRequired[int | None] - fragment_retries: NotRequired[int | None] - extractor_retries: NotRequired[int | None] - retry_sleep_functions: NotRequired[_RetrySleepFunctions | None] - skip_unavailable_fragments: NotRequired[bool | None] - keep_fragments: NotRequired[bool | None] - concurrent_fragment_downloads: NotRequired[int | None] - buffersize: NotRequired[int | None] - noresizebuffer: NotRequired[bool | None] - http_chunk_size: NotRequired[int | None] - continuedl: NotRequired[bool | None] - noprogress: NotRequired[bool | None] - progress_with_newline: NotRequired[bool | None] - progress_template: NotRequired[_ProgressTemplate | None] - playliststart: NotRequired[int | None] - playlistend: NotRequired[int | None] - playlistreverse: NotRequired[bool | None] - playlistrandom: NotRequired[bool | None] - lazy_playlist: NotRequired[bool | None] - noplaylist: NotRequired[bool | None] - logtostderr: NotRequired[bool | None] - consoletitle: NotRequired[str | None] - nopart: NotRequired[bool | None] - updatetime: NotRequired[bool | None] - writedescription: NotRequired[bool | None] - writeannotations: NotRequired[bool | None] - writeinfojson: NotRequired[bool | None] - allow_playlist_files: NotRequired[bool | None] - clean_infojson: NotRequired[bool | None] - getcomments: NotRequired[bool | None] - writethumbnail: NotRequired[bool | None] - write_all_thumbnails: NotRequired[bool | None] - writelink: NotRequired[bool | None] - writeurllink: NotRequired[bool | None] - writewebloclink: NotRequired[bool | None] - writedesktoplink: NotRequired[bool | None] - writesubtitles: NotRequired[bool | None] - writeautomaticsub: NotRequired[bool | None] - allsubtitles: NotRequired[bool | None] - listsubtitles: NotRequired[bool | None] - subtitlesformat: NotRequired[str | None] - subtitleslangs: NotRequired[Collection[str] | None] - matchtitle: NotRequired[bool | None] - rejecttitle: NotRequired[bool | None] - prefer_free_formats: NotRequired[bool | None] - trim_file_name: NotRequired[int | None] - verbose: NotRequired[bool | None] - test: NotRequired[bool | None] - keepvideo: NotRequired[str | None] - min_filesize: NotRequired[int | None] - max_filesize: NotRequired[int | None] - min_views: NotRequired[str | None] - max_views: NotRequired[str | None] - daterange: NotRequired[str | None] - cachedir: NotRequired[str | None] - age_limit: NotRequired[str | None] - download_archive: NotRequired[str | None] - break_on_existing: NotRequired[str | None] - break_on_reject: NotRequired[bool | None] - break_per_url: NotRequired[bool | None] - skip_playlist_after_errors: NotRequired[bool | None] - cookiefile: NotRequired[str | None] - cookiesfrombrowser: NotRequired[tuple[str, ...] | None] - legacyserverconnect: NotRequired[bool | None] - nocheckcertificate: NotRequired[bool | None] - prefer_insecure: NotRequired[str | None] - enable_file_urls: NotRequired[str | None] - http_headers: NotRequired[Mapping[str, str] | None] - proxy: NotRequired[str | None] - socket_timeout: NotRequired[int | None] - bidi_workaround: NotRequired[bool | None] - debug_printtraffic: NotRequired[bool | None] - prefer_ffmpeg: NotRequired[bool | None] - include_ads: NotRequired[bool | None] - default_search: NotRequired[str | None] - dynamic_mpd: NotRequired[bool | None] - extractor_args: NotRequired[Mapping[str, Mapping[str, object]] | None] - youtube_include_dash_manifest: NotRequired[bool | None] - youtube_include_hls_manifest: NotRequired[bool | None] - encoding: NotRequired[str | None] - extract_flat: NotRequired[bool | Literal["in_playlist", "discard", "discard_in_playlist"] | None] - live_from_start: NotRequired[bool | None] - wait_for_video: NotRequired[tuple[int, int] | None] - mark_watched: NotRequired[bool | None] - merge_output_format: NotRequired[str | None] - final_ext: NotRequired[str | None] - postprocessors: NotRequired[Collection[Mapping[str, object]]] - fixup: NotRequired[Literal["never", "warn", "detect_or_warn"] | None] - source_address: NotRequired[str | None] - call_home: NotRequired[bool | None] - sleep_interval_requests: NotRequired[int | None] - sleep_interval: NotRequired[int | None] - max_sleep_interval: NotRequired[int | None] - sleep_interval_subtitles: NotRequired[int | None] - external_downloader: NotRequired[_ExternalDownloader | None] - download_ranges: NotRequired[Callable[[object, YoutubeDL], Iterator[_DownloadRange]] | None] - force_keyframes_at_cuts: NotRequired[bool | None] - list_thumbnails: NotRequired[str | None] - playlist_items: NotRequired[Collection[int] | None] - xattr_set_filesize: NotRequired[bool | None] - match_filter: NotRequired[ - Callable[[Mapping[str, object], bool], str | None] | Callable[[Mapping[str, object]], str | None] | None - ] - color: NotRequired[_Color | None] - ffmpeg_location: NotRequired[str | None] - hls_prefer_native: NotRequired[bool | None] - hls_use_mpegts: NotRequired[bool | None] - hls_split_discontinuity: NotRequired[bool | None] - max_downloads: NotRequired[int | None] - dump_intermediate_pages: NotRequired[bool | None] - listformats_table: NotRequired[bool | None] - write_pages: NotRequired[bool | None] - external_downloader_args: NotRequired[Literal["default"] | Mapping[str, Collection[str]] | Collection[str] | None] - postprocessor_args: NotRequired[Mapping[str, Collection[str]] | Collection[str] | None] - geo_verification_proxy: NotRequired[str | None] - geo_bypass: NotRequired[bool | None] - geo_bypass_country: NotRequired[str | None] - geo_bypass_ip_block: NotRequired[str | None] - compat_opts: NotRequired[dict[str, object] | None] +class _Params(TypedDict, total=False): + usenetrc: bool | None + netrc_location: str | None + netrc_cmd: str | None + username: str | None + password: str | None + twofactor: str | None + videopassword: str | None + ap_mso: str | None + ap_username: str | None + ap_password: str | None + client_certificate: str | None + client_certificate_key: str | None + client_certificate_password: str | None + quiet: bool | None + no_warnings: bool | None + forceurl: bool | None + forcetitle: str | None + forceid: bool | None + forcethumbnail: bool | None + forcedescription: bool | None + forceduration: str | None + forcefilename: bool | None + forceprint: Mapping[str, Collection[str]] | Collection[str] | None + print_to_file: Mapping[str, tuple[str, str]] | None + forcejson: bool | None + dump_single_json: bool | None + force_write_download_archive: str | None + simulate: str | None + skip_download: str | None + format: str | Callable[[Mapping[str, Any]], Mapping[str, Any]] | None + allow_unplayable_formats: bool | None + ignore_no_formats_error: bool | None + format_sort: Collection[str] | None + format_sort_force: str | None + allow_multiple_video_streams: bool | None + allow_multiple_audio_streams: bool | None + check_formats: bool | Literal["selected"] | None + listformats: bool | None + outtmpl: str | Mapping[str, str] | None + outtmpl_na_placeholder: str | None + paths: str | None + restrictfilenames: bool | None + windowsfilenames: bool | None + ignoreerrors: bool | Literal["only_download"] | None + force_generic_extractor: bool | None + allowed_extractors: Collection[str] | None + ratelimit: int | None + throttledratelimit: int | None + overwrites: bool | None + retries: int | None + file_access_retries: int | None + fragment_retries: int | None + extractor_retries: int | None + retry_sleep_functions: _RetrySleepFunctions | None + skip_unavailable_fragments: bool | None + keep_fragments: bool | None + concurrent_fragment_downloads: int | None + buffersize: int | None + noresizebuffer: bool | None + http_chunk_size: int | None + continuedl: bool | None + noprogress: bool | None + progress_with_newline: bool | None + progress_template: _ProgressTemplate | None + playliststart: int | None + playlistend: int | None + playlistreverse: bool | None + playlistrandom: bool | None + lazy_playlist: bool | None + noplaylist: bool | None + logtostderr: bool | None + consoletitle: str | None + nopart: bool | None + updatetime: bool | None + writedescription: bool | None + writeannotations: bool | None + writeinfojson: bool | None + allow_playlist_files: bool | None + clean_infojson: bool | None + getcomments: bool | None + writethumbnail: bool | None + write_all_thumbnails: bool | None + writelink: bool | None + writeurllink: bool | None + writewebloclink: bool | None + writedesktoplink: bool | None + writesubtitles: bool | None + writeautomaticsub: bool | None + allsubtitles: bool | None + listsubtitles: bool | None + subtitlesformat: str | None + subtitleslangs: Collection[str] | None + matchtitle: bool | None + rejecttitle: bool | None + prefer_free_formats: bool | None + trim_file_name: int | None + verbose: bool | None + test: bool | None + keepvideo: str | None + min_filesize: int | None + max_filesize: int | None + min_views: str | None + max_views: str | None + daterange: str | None + cachedir: str | None + age_limit: str | None + download_archive: str | None + break_on_existing: str | None + break_on_reject: bool | None + break_per_url: bool | None + skip_playlist_after_errors: bool | None + cookiefile: str | None + cookiesfrombrowser: tuple[str, ...] | None + legacyserverconnect: bool | None + nocheckcertificate: bool | None + prefer_insecure: str | None + enable_file_urls: str | None + http_headers: Mapping[str, str] | None + proxy: str | None + socket_timeout: int | None + bidi_workaround: bool | None + debug_printtraffic: bool | None + prefer_ffmpeg: bool | None + include_ads: bool | None + default_search: str | None + dynamic_mpd: bool | None + extractor_args: Mapping[str, Mapping[str, Any]] | None + youtube_include_dash_manifest: bool | None + youtube_include_hls_manifest: bool | None + encoding: str | None + extract_flat: bool | Literal["in_playlist", "discard", "discard_in_playlist"] | None + live_from_start: bool | None + wait_for_video: tuple[int, int] | None + mark_watched: bool | None + merge_output_format: str | None + final_ext: str | None + postprocessors: Collection[Mapping[str, Any]] + fixup: Literal["never", "warn", "detect_or_warn"] | None + source_address: str | None + call_home: bool | None + sleep_interval_requests: int | None + sleep_interval: int | None + max_sleep_interval: int | None + sleep_interval_subtitles: int | None + external_downloader: _ExternalDownloader | None + download_ranges: Callable[[Any, YoutubeDL], Iterator[_DownloadRange]] | None + force_keyframes_at_cuts: bool | None + list_thumbnails: str | None + playlist_items: Collection[int] | None + xattr_set_filesize: bool | None + match_filter: NotRequired[Callable[[Mapping[str, Any], bool], str | None] | Callable[[Mapping[str, Any]], str | None] | None] + color: _Color | None + ffmpeg_location: str | None + hls_prefer_native: bool | None + hls_use_mpegts: bool | None + hls_split_discontinuity: bool | None + max_downloads: int | None + dump_intermediate_pages: bool | None + listformats_table: bool | None + write_pages: bool | None + external_downloader_args: Literal["default"] | Mapping[str, Collection[str]] | Collection[str] | None + postprocessor_args: Mapping[str, Collection[str]] | Collection[str] | None + geo_verification_proxy: str | None + geo_bypass: bool | None + geo_bypass_country: str | None + geo_bypass_ip_block: str | None + compat_opts: dict[str, Any] | None + logger: _LoggerProtocol # Undocumented fields below. - _deprecation_warnings: NotRequired[Collection[str] | None] - _warnings: NotRequired[Collection[str] | None] - autonumber_size: NotRequired[int | None] - autonumber_start: NotRequired[int | None] - cn_verification_proxy: NotRequired[str | None] - forceformat: NotRequired[object] - load_pages: NotRequired[bool | None] - logger: NotRequired[_LoggerProtocol] - youtube_print_sig_code: NotRequired[bool | None] - progress_hooks: NotRequired[list[Callable[[object], object]]] - impersonate: NotRequired[ImpersonateTarget] + _deprecation_warnings: Collection[str] | None + _warnings: Collection[str] | None + autonumber_size: int | None + autonumber_start: int | None + cn_verification_proxy: str | None + load_pages: bool | None + youtube_print_sig_code: bool | None + progress_hooks: list[Callable[[Mapping[str, Any]], object]] + impersonate: ImpersonateTarget @type_check_only class _ParsedOptions(NamedTuple): From ffd575b48f11cd6f91c26cbc85f9c0f661183567 Mon Sep 17 00:00:00 2001 From: Andrew Udvare Date: Thu, 24 Jul 2025 20:14:03 -0400 Subject: [PATCH 04/13] Fix aes.aes_cbc_encrypt_bytes --- stubs/yt-dlp/yt_dlp/aes.pyi | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stubs/yt-dlp/yt_dlp/aes.pyi b/stubs/yt-dlp/yt_dlp/aes.pyi index 2590ce22aca7..299a0604fd27 100644 --- a/stubs/yt-dlp/yt_dlp/aes.pyi +++ b/stubs/yt-dlp/yt_dlp/aes.pyi @@ -24,7 +24,7 @@ __all__ = [ def aes_cbc_decrypt_bytes(data: bytes, key: bytes, iv: bytes) -> bytes: ... def aes_gcm_decrypt_and_verify_bytes(data: bytes, key: bytes, tag: bytes, nonce: bytes) -> bytes: ... def aes_cbc_encrypt_bytes( - data: bytes, key: bytes, iv: bytes, padding_mode: Literal["pkcs7", "iso7816", "whitespace", "zero"] + data: bytes, key: bytes, iv: bytes, *, padding_mode: Literal["pkcs7", "iso7816", "whitespace", "zero"] ) -> bytes: ... def unpad_pkcs7(data: list[int]) -> list[int]: ... def pkcs7_padding(data: list[int]) -> list[int]: ... From e6c440aedf2645f57faf9f68798e846c1a32e513 Mon Sep 17 00:00:00 2001 From: Andrew Udvare Date: Thu, 24 Jul 2025 21:36:39 -0400 Subject: [PATCH 05/13] Fixing more Any types --- stubs/yt-dlp/yt_dlp/downloader/f4m.pyi | 15 +- stubs/yt-dlp/yt_dlp/downloader/ism.pyi | 3 +- .../yt_dlp/downloader/youtube_live_chat.pyi | 4 +- stubs/yt-dlp/yt_dlp/options.pyi | 5 +- stubs/yt-dlp/yt_dlp/utils/_utils.pyi | 224 ++++++++++-------- stubs/yt-dlp/yt_dlp/webvtt.pyi | 8 +- 6 files changed, 141 insertions(+), 118 deletions(-) diff --git a/stubs/yt-dlp/yt_dlp/downloader/f4m.pyi b/stubs/yt-dlp/yt_dlp/downloader/f4m.pyi index 5a3c4cee1ab5..edc815a470d1 100644 --- a/stubs/yt-dlp/yt_dlp/downloader/f4m.pyi +++ b/stubs/yt-dlp/yt_dlp/downloader/f4m.pyi @@ -1,6 +1,7 @@ import io from _typeshed import SupportsWrite from collections.abc import Iterable, Mapping +from typing import Any from xml.etree.ElementTree import Element from .fragment import FragmentFD @@ -14,18 +15,18 @@ class FlvReader(io.BytesIO): def read_unsigned_char(self) -> str: ... def read_string(self) -> bytes: ... def read_box_info(self) -> tuple[int, bytes, bytes]: ... - def read_asrt(self) -> dict[str, object]: ... - def read_afrt(self) -> dict[str, object]: ... - def read_abst(self) -> dict[str, object]: ... - def read_bootstrap_info(self) -> dict[str, object]: ... + def read_asrt(self) -> dict[str, Any]: ... + def read_afrt(self) -> dict[str, Any]: ... + def read_abst(self) -> dict[str, Any]: ... + def read_bootstrap_info(self) -> dict[str, Any]: ... -def read_bootstrap_info(bootstrap_bytes: bytes) -> dict[str, object]: ... -def build_fragments_list(boot_info: Mapping[str, object]) -> list[tuple[object, int]]: ... +def read_bootstrap_info(bootstrap_bytes: bytes) -> dict[str, Any]: ... +def build_fragments_list(boot_info: Mapping[str, Any]) -> list[tuple[Any, int]]: ... def write_unsigned_int(stream: SupportsWrite[bytes], val: int) -> None: ... def write_unsigned_int_24(stream: SupportsWrite[bytes], val: int) -> None: ... def write_flv_header(stream: SupportsWrite[bytes]) -> None: ... def write_metadata_tag(stream: SupportsWrite[bytes], metadata: bytes) -> None: ... -def remove_encrypted_media(media: Iterable[Element]) -> list[object]: ... +def remove_encrypted_media(media: Iterable[Element]) -> list[Any]: ... def get_base_url(manifest: str) -> str | None: ... class F4mFD(FragmentFD): ... diff --git a/stubs/yt-dlp/yt_dlp/downloader/ism.pyi b/stubs/yt-dlp/yt_dlp/downloader/ism.pyi index c69117729a10..f439b30d4efd 100644 --- a/stubs/yt-dlp/yt_dlp/downloader/ism.pyi +++ b/stubs/yt-dlp/yt_dlp/downloader/ism.pyi @@ -1,6 +1,7 @@ import struct from _typeshed import SupportsWrite from collections.abc import Collection, Mapping +from typing import Any from .fragment import FragmentFD @@ -22,7 +23,7 @@ SELF_CONTAINED: int def box(box_type: bytes, payload: bytes) -> bytes: ... def full_box(box_type: bytes, version: int, flags: int, payload: bytes) -> bytes: ... -def write_piff_header(stream: SupportsWrite[bytes], params: Mapping[str, object]) -> None: ... +def write_piff_header(stream: SupportsWrite[bytes], params: Mapping[str, Any]) -> None: ... def extract_box_data(data: bytes, box_sequence: Collection[bytes]) -> bytes | None: ... class IsmFD(FragmentFD): ... diff --git a/stubs/yt-dlp/yt_dlp/downloader/youtube_live_chat.pyi b/stubs/yt-dlp/yt_dlp/downloader/youtube_live_chat.pyi index 475e01eb4c0a..d579c67224bf 100644 --- a/stubs/yt-dlp/yt_dlp/downloader/youtube_live_chat.pyi +++ b/stubs/yt-dlp/yt_dlp/downloader/youtube_live_chat.pyi @@ -1,7 +1,9 @@ +from typing import Any + from ..extractor.common import _InfoDict from .fragment import FragmentFD class YoutubeLiveChatFD(FragmentFD): def real_download(self, filename: str, info_dict: _InfoDict) -> bool: ... @staticmethod - def parse_live_timestamp(action: dict[str, object]) -> int | None: ... + def parse_live_timestamp(action: dict[str, Any]) -> int | None: ... diff --git a/stubs/yt-dlp/yt_dlp/options.pyi b/stubs/yt-dlp/yt_dlp/options.pyi index 059795c4caa7..cce470c8cbdb 100644 --- a/stubs/yt-dlp/yt_dlp/options.pyi +++ b/stubs/yt-dlp/yt_dlp/options.pyi @@ -1,7 +1,8 @@ import optparse +from collections.abc import Sequence def parseOpts( - overrideArguments: object | None = None, ignore_config_files: str = "if_override" + overrideArguments: Sequence[str] | None = None, ignore_config_files: str = "if_override" ) -> tuple[_YoutubeDLOptionParser, optparse.Values, list[str]]: ... class _YoutubeDLOptionParser(optparse.OptionParser): @@ -9,7 +10,7 @@ class _YoutubeDLOptionParser(optparse.OptionParser): ALIAS_TRIGGER_LIMIT: int def __init__(self) -> None: ... def parse_known_args( - self, args: list[str] | None = None, values: optparse.Values | None = None, strict: bool = True + self, args: Sequence[str] | None = None, values: optparse.Values | None = None, strict: bool = True ) -> tuple[optparse.Values, list[str]]: ... def create_parser() -> _YoutubeDLOptionParser: ... diff --git a/stubs/yt-dlp/yt_dlp/utils/_utils.pyi b/stubs/yt-dlp/yt_dlp/utils/_utils.pyi index d47294f88170..f3b68e4405a7 100644 --- a/stubs/yt-dlp/yt_dlp/utils/_utils.pyi +++ b/stubs/yt-dlp/yt_dlp/utils/_utils.pyi @@ -2,6 +2,7 @@ import enum import html.parser import json import netrc +import optparse import subprocess import sys import types @@ -53,7 +54,7 @@ NUMBER_RE: str @cache def preferredencoding() -> str: ... def write_json_file(obj: Any, fn: str) -> None: ... -def partial_application(func: Callable[..., object]) -> Callable[..., object]: ... +def partial_application(func: Callable[..., Any]) -> Callable[..., Any]: ... def find_xpath_attr(node: ET.ElementTree, xpath: str, key: str, val: str | None = None) -> ET.Element | None: ... def xpath_with_ns(path: str, ns_map: Mapping[str, str]) -> str: ... def xpath_element( @@ -70,28 +71,28 @@ def xpath_attr( fatal: bool = False, default: str | type[NO_DEFAULT] = ..., ) -> str | None: ... -def get_element_by_id(id: str, html: str, **kwargs: object) -> str | None: ... -def get_element_html_by_id(id: str, html: str, **kwargs: object) -> str | None: ... +def get_element_by_id(id: str, html: str, **kwargs: Any) -> str | None: ... +def get_element_html_by_id(id: str, html: str, **kwargs: Any) -> str | None: ... def get_element_by_class(class_name: str, html: str) -> str: ... def get_element_html_by_class(class_name: str, html: str) -> str: ... -def get_element_by_attribute(attribute: str, value: str, html: str, **kwargs: object) -> str: ... -def get_element_html_by_attribute(attribute: str, value: str, html: str, **kargs: object) -> list[str]: ... -def get_elements_by_class(class_name: str, html: str, **kargs: object) -> list[str]: ... +def get_element_by_attribute(attribute: str, value: str, html: str, **kwargs: Any) -> str: ... +def get_element_html_by_attribute(attribute: str, value: str, html: str, **kargs: Any) -> list[str]: ... +def get_elements_by_class(class_name: str, html: str, **kargs: Any) -> list[str]: ... def get_elements_html_by_class(class_name: str, html: str) -> list[str]: ... -def get_elements_by_attribute(*args: object, **kwargs: object) -> list[str]: ... -def get_elements_html_by_attribute(*args: object, **kwargs: object) -> list[str]: ... +def get_elements_by_attribute(*args: Any, **kwargs: Any) -> list[str]: ... +def get_elements_html_by_attribute(*args: Any, **kwargs: Any) -> list[str]: ... def get_elements_text_and_html_by_attribute( attribute: str, value: str, html: str, *, tag: str = "[\\w:.-]+", escape_value: bool = True ) -> Iterator[str]: ... class HTMLBreakOnClosingTagParser(html.parser.HTMLParser): class HTMLBreakOnClosingTagException(Exception): ... - tagstack: deque[object] + tagstack: deque[Any] def __init__(self) -> None: ... def __enter__(self) -> Self: ... def __exit__(self, *_: object) -> None: ... def close(self) -> None: ... - def handle_starttag(self, tag: str, _: object) -> None: ... + def handle_starttag(self, tag: str, _: Any) -> None: ... def handle_endtag(self, tag: str) -> None: ... def get_element_text_and_html_by_tag(tag: str, html: str) -> str: ... @@ -114,11 +115,11 @@ def clean_html(html: str | None) -> str | None: ... class LenientJSONDecoder(json.JSONDecoder): def __init__( self, - *args: object, + *args: Any, transform_source: Callable[[str], str] | None = None, ignore_extra: bool = False, close_objects: int = 0, - **kwargs: object, + **kwargs: Any, ) -> None: ... def decode(self, s: str) -> Any: ... # type: ignore[override] @@ -156,10 +157,10 @@ class YoutubeDLError(Exception): def __init__(self, msg: str | None = None) -> None: ... class ExtractorError(YoutubeDLError): - orig_msg: object + orig_msg: Any traceback: types.TracebackType | None - expected: object - cause: object + expected: Any + cause: Any video_id: str ie: InfoExtractor exc_info: ExcInfo @@ -168,14 +169,14 @@ class ExtractorError(YoutubeDLError): msg: str, tb: types.TracebackType | None = None, expected: bool = False, - cause: object | None = None, + cause: Any | None = None, video_id: str | None = None, ie: InfoExtractor | None = None, ) -> None: ... def format_traceback(self) -> str: ... msg: str | None - args: tuple[object, ...] - def __setattr__(self, name: str, value: object) -> None: ... + args: tuple[Any, ...] + def __setattr__(self, name: str, value: Any) -> None: ... class UnsupportedError(ExtractorError): url: str @@ -185,10 +186,29 @@ class RegexNotFoundError(ExtractorError): ... class GeoRestrictedError(ExtractorError): countries: str | None - def __init__(self, msg: str, countries: str | None = None, **kwargs: object) -> None: ... + def __init__( + self, + msg: str, + countries: str | None = None, + *, + tb: types.TracebackType | None = None, + expected: bool = False, + cause: Any | None = None, + video_id: str | None = None, + ie: InfoExtractor | None = None, + ) -> None: ... class UserNotLive(ExtractorError): - def __init__(self, msg: str | None = None, **kwargs: object) -> None: ... + def __init__( + self, + msg: str | None = None, + *, + tb: types.TracebackType | None = None, + expected: bool = False, + cause: Any | None = None, + video_id: str | None = None, + ie: InfoExtractor | None = None, + ) -> None: ... class DownloadError(YoutubeDLError): exc_info: ExcInfo @@ -240,9 +260,9 @@ class XAttrMetadataError(YoutubeDLError): class XAttrUnavailableError(YoutubeDLError): ... -def is_path_like(f: object) -> bool: ... -def extract_timezone(date_str: str, default: type[NO_DEFAULT] | object | None = None) -> tuple[timedelta, str]: ... -def parse_iso8601(date_str: str, delimiter: str = "T", timezone: type[NO_DEFAULT] | object | None = None) -> int: ... +def is_path_like(f: Any) -> bool: ... # Type checker. +def extract_timezone(date_str: str, default: Any = None) -> tuple[timedelta, str]: ... # Any or type[NO_DEFAULT] +def parse_iso8601(date_str: str, delimiter: str = "T", timezone: type[NO_DEFAULT] | Any | None = None) -> int: ... def date_formats(day_first: bool = True) -> list[str]: ... def unified_strdate(date_str: str, day_first: bool = True) -> str: ... def unified_timestamp(date_str: str, day_first: bool = True) -> int: ... @@ -267,7 +287,7 @@ def system_identifier() -> str: ... def get_windows_version() -> tuple[str, ...]: ... def write_string(s: str, out: TextIO | None = None, encoding: str | None = None) -> None: ... def deprecation_warning( - msg: str, *, printer: Callable[..., object] | None = None, stacklevel: int = 0, **kwargs: object + msg: str, *, printer: Callable[..., Any] | None = None, stacklevel: int = 0, **kwargs: Any # kwargs are passed to printer. ) -> None: ... class LockingUnsupportedError(OSError): @@ -285,13 +305,13 @@ class locked_file: def __exit__(self, *_: object) -> None: ... open = __enter__ close = __exit__ - def __getattr__(self, attr: str) -> object: ... + def __getattr__(self, attr: str) -> Any: ... def __iter__(self) -> str: ... def get_filesystem_encoding() -> str: ... def shell_quote(args: str | Collection[str], *, shell: bool = False) -> str: ... -def smuggle_url(url: str, data: object) -> str: ... -def unsmuggle_url(smug_url: str, default: object | None = None) -> tuple[str, object]: ... +def smuggle_url(url: str, data: Any) -> str: ... +def unsmuggle_url(smug_url: str, default: Any | None = None) -> tuple[str, Any]: ... def format_decimal_suffix(num: float, fmt: str = "%d%s", *, factor: int = 1000) -> str: ... def format_bytes(bytes: int) -> str: ... def lookup_unit_table(unit_table: Mapping[str, int], s: str, strict: bool = False) -> float: ... @@ -312,14 +332,14 @@ def url_basename(url: str) -> str: ... def base_url(url: str) -> str: ... def urljoin(base: str, path: str) -> str: ... def int_or_none( - v: object, scale: int = 1, default: int | None = None, get_attr: str | None = None, invscale: int = 1, base: int | None = None + v: Any, scale: int = 1, default: int | None = None, get_attr: str | None = None, invscale: int = 1, base: int | None = None ) -> int | None: ... -def str_or_none(v: object, default: str | None = None) -> str: ... +def str_or_none(v: Any, default: str | None = None) -> str: ... def str_to_int(int_str: str) -> int: ... -def float_or_none(v: object, scale: int = 1, invscale: int = 1, default: float | None = None) -> float | None: ... -def bool_or_none(v: object, default: bool | None = None) -> bool | None: ... -def strip_or_none(v: object, default: str | None = None) -> str | None: ... -def url_or_none(url: object) -> str | None: ... +def float_or_none(v: Any, scale: int = 1, invscale: int = 1, default: float | None = None) -> float | None: ... +def bool_or_none(v: Any, default: bool | None = None) -> bool | None: ... +def strip_or_none(v: Any, default: str | None = None) -> str | None: ... +def url_or_none(url: Any) -> str | None: ... def strftime_or_none(timestamp: int, date_format: str = "%Y%m%d", default: str | None = None) -> str | None: ... def parse_duration(s: str | None) -> float: ... def prepend_extension(filename: str, ext: str, expected_real_ext: str | None = None) -> str: ... @@ -335,7 +355,7 @@ def get_exe_version( def frange(start: int = 0, stop: int | None = None, step: int = 1) -> Iterator[float]: ... class LazyList(Sequence[_T]): - def __init__(self, iterable: Iterable[_T], *, reverse: bool = False, _cache: list[object] | None = None) -> None: ... + def __init__(self, iterable: Iterable[_T], *, reverse: bool = False, _cache: list[Any] | None = None) -> None: ... def __iter__(self) -> Iterator[_T]: ... def exhaust(self) -> list[_T]: ... @overload @@ -349,22 +369,22 @@ class LazyList(Sequence[_T]): class PagedList: def __len__(self) -> int: ... - def __init__(self, pagefunc: Callable[[int], Iterator[object]], pagesize: int, use_cache: bool = True) -> None: ... - def getpage(self, pagenum: int) -> list[object]: ... - def getslice(self, start: int = 0, end: int | None = None) -> list[object]: ... + def __init__(self, pagefunc: Callable[[int], Iterator[Any]], pagesize: int, use_cache: bool = True) -> None: ... + def getpage(self, pagenum: int) -> list[Any]: ... + def getslice(self, start: int = 0, end: int | None = None) -> list[Any]: ... @overload - def __getitem__(self, idx: int, /) -> object: ... + def __getitem__(self, idx: int, /) -> Any: ... @overload - def __getitem__(self, idx: slice, /) -> list[object]: ... + def __getitem__(self, idx: slice, /) -> list[Any]: ... def __bool__(self) -> bool: ... class OnDemandPagedList(PagedList): ... class InAdvancePagedList(PagedList): - def __init__(self, pagefunc: Callable[[int], Iterator[object]], pagecount: int, pagesize: int) -> None: ... + def __init__(self, pagefunc: Callable[[int], Iterator[Any]], pagecount: int, pagesize: int) -> None: ... class PlaylistEntries: - MissingEntry: object + MissingEntry: Any is_exhausted: bool ydl: YoutubeDL is_incomplete: bool @@ -372,9 +392,9 @@ class PlaylistEntries: PLAYLIST_ITEMS_RE: Pattern[str] @classmethod def parse_playlist_items(cls, string: str) -> slice | int: ... - def get_requested_items(self) -> Iterator[tuple[int, object]]: ... + def get_requested_items(self) -> Iterator[tuple[int, Any]]: ... def get_full_count(self) -> int | None: ... - def __getitem__(self, idx: int) -> Iterator[tuple[int, object]]: ... + def __getitem__(self, idx: int) -> Iterator[tuple[int, Any]]: ... def __len__(self) -> int: ... _K = TypeVar("_K") @@ -382,27 +402,25 @@ _V = TypeVar("_V") def uppercase_escape(s: str) -> str: ... def lowercase_escape(s: str) -> str: ... -def parse_qs(url: str, **kwargs: object) -> dict[AnyStr, list[AnyStr]]: ... +def parse_qs(url: str, **kwargs: Any) -> dict[AnyStr, list[AnyStr]]: ... def read_batch_urls(batch_fd: FileDescriptorLike) -> list[str]: ... -def urlencode_postdata(*args: object, **kargs: object) -> bytes: ... -def update_url(url: str, *, query_update: Mapping[str, str] | None = None, **kwargs: object) -> str: ... +def urlencode_postdata(*args: Any, **kargs: Any) -> bytes: ... +def update_url(url: str, *, query_update: Mapping[str, str] | None = None, **kwargs: Any) -> str: ... def update_url_query(url: str, query: Mapping[str, str]) -> str: ... def multipart_encode(data: Mapping[AnyStr, AnyStr], boundary: str | None = None) -> tuple[bytes, str]: ... def is_iterable_like( - x: object, allowed_types: Collection[type[Any]] = ..., blocked_types: Collection[type[Any]] | type[NO_DEFAULT] = ... + x: Any, allowed_types: Collection[type[Any]] = ..., blocked_types: Collection[type[Any]] | type[NO_DEFAULT] = ... ) -> bool: ... def variadic(x: _T, allowed_types: Collection[type[Any]] | type[NO_DEFAULT] = ...) -> _T | tuple[_T]: ... def try_call( *funcs: Callable[..., _T], expected_type: type[_T] | None = None, - args: Iterable[object] = ..., - kwargs: Mapping[Hashable, object] = ..., + args: Iterable[Any] = ..., + kwargs: Mapping[Hashable, Any] = ..., ) -> _T | None: ... -def try_get( - src: object, getter: Callable[..., _T] | Collection[Callable[..., _T]], expected_type: type[_T] | None = None -) -> _T: ... +def try_get(src: Any, getter: Callable[..., _T] | Collection[Callable[..., _T]], expected_type: type[_T] | None = None) -> _T: ... def filter_dict(dct: Mapping[_K, _V], cndn: Callable[[_K, _V], bool] = ...) -> dict[_K, _V]: ... -def merge_dicts(*dicts: Mapping[Hashable, object]) -> dict[Hashable, object]: ... +def merge_dicts(*dicts: Mapping[Hashable, Any]) -> dict[Hashable, Any]: ... def encode_compat_str(string: str, encoding: str = ..., errors: str = "strict") -> str: ... US_RATINGS: Mapping[str, int] @@ -410,7 +428,7 @@ TV_PARENTAL_GUIDELINES: Mapping[str, int] def parse_age_limit(s: int) -> int | None: ... def strip_jsonp(code: str) -> str: ... -def js_to_json(code: str, vars: Mapping[str, object] = ..., *, strict: bool = False) -> str: ... +def js_to_json(code: str, vars: Mapping[str, Any] = ..., *, strict: bool = False) -> str: ... def qualities(quality_ids: Sequence[int]) -> Callable[[int], int]: ... POSTPROCESS_WHEN: tuple[str, ...] @@ -447,7 +465,7 @@ def determine_protocol(info_dict: _InfoDict) -> str: ... def render_table( header_row: Iterable[str], data: Iterable[str], delim: bool = False, extra_gap: int = 0, hide_empty: bool = False ) -> str: ... -def match_str(filter_str: str, dct: Mapping[str, object], incomplete: bool = False) -> bool: ... +def match_str(filter_str: str, dct: Mapping[str, Any], incomplete: bool = False) -> bool: ... def match_filter_func( filters: Collection[str] | str, breaking_filters: Collection[str] | str | None = None ) -> Callable[..., str | type[NO_DEFAULT] | None]: ... @@ -456,14 +474,14 @@ class download_range_func: def __init__( self, chapters: Iterable[str | Pattern[str]], ranges: Iterable[tuple[int, int]], from_info: bool = False ) -> None: ... - def __call__(self, info_dict: _InfoDict, ydl: YoutubeDL) -> Iterator[dict[str, object]]: ... + def __call__(self, info_dict: _InfoDict, ydl: YoutubeDL) -> Iterator[dict[str, Any]]: ... def __eq__(self, other: object) -> bool: ... def parse_dfxp_time_expr(time_expr: str | None) -> int | None: ... def srt_subtitles_timecode(seconds: float) -> str: ... def ass_subtitles_timecode(seconds: float) -> str: ... def dfxp2srt(dfxp_data: bytes) -> str: ... -def cli_option(params: _Params, command_option: str, param: str, separator: str | None = None) -> object: ... +def cli_option(params: _Params, command_option: str, param: str, separator: str | None = None) -> Any: ... def cli_bool_option( params: _Params, command_option: str, @@ -471,11 +489,9 @@ def cli_bool_option( true_value: str = "true", false_value: str = "false", separator: str | None = None, -) -> object: ... -def cli_valueless_option(params: _Params, command_option: str, param: str, expected_value: bool = True) -> object: ... -def cli_configuration_args( - argdict: dict[str, object], keys: Iterable[str], default: object = ..., use_compat: bool = True -) -> object: ... +) -> Any: ... +def cli_valueless_option(params: _Params, command_option: str, param: str, expected_value: bool = True) -> Any: ... +def cli_configuration_args(argdict: dict[str, Any], keys: Iterable[str], default: Any = ..., use_compat: bool = True) -> Any: ... class ISO639Utils: @classmethod @@ -504,7 +520,7 @@ def parse_m3u8_attributes(attrib: str) -> dict[str, str]: ... def urshift(val: int, n: int) -> int: ... def write_xattr(path: FileDescriptorOrPath, key: str, value: str) -> None: ... def random_birthday(year_field: Hashable, month_field: Hashable, day_field: Hashable) -> dict[Hashable, str]: ... -def find_available_port(interface: str = "") -> object | None: ... +def find_available_port(interface: str = "") -> Any | None: ... DOT_URL_LINK_TEMPLATE: str DOT_WEBLOC_LINK_TEMPLATE: str @@ -514,22 +530,22 @@ LINK_TEMPLATES: Mapping[str, str] def iri_to_uri(iri: str) -> str: ... def to_high_limit_path(path: PathLike[AnyStr]) -> str: ... def format_field( - obj: object, + obj: Any, field: str | Collection[str] | None = None, template: str = "%s", ignore: type[NO_DEFAULT] | str | Collection[str] = ..., default: str = "", - func: Callable[[object], object] = ..., + func: Callable[[Any], Any] = ..., ) -> str: ... def clean_podcast_url(url: str) -> str: ... def random_uuidv4() -> str: ... -def make_dir(path: PathLike[AnyStr], to_screen: Callable[[str], object] | None = None) -> bool: ... +def make_dir(path: PathLike[AnyStr], to_screen: Callable[[str], Any] | None = None) -> bool: ... def get_executable_path() -> str: ... def get_user_config_dirs(package_name: str) -> Iterator[str]: ... def get_system_config_dirs(package_name: str) -> Iterator[str]: ... def time_seconds(**kwargs: float) -> int: ... -def jwt_encode_hs256(payload_data: object, key: str, headers: Mapping[str, object] = ...) -> bytes: ... -def jwt_decode_hs256(jwt: str) -> object: ... +def jwt_encode_hs256(payload_data: Any, key: str, headers: Mapping[str, Any] = ...) -> bytes: ... +def jwt_decode_hs256(jwt: str) -> Any: ... WINDOWS_VT_MODE: bool | None @@ -537,44 +553,46 @@ def supports_terminal_sequences(stream: IO[Any]) -> bool: ... def windows_enable_vt_mode() -> None: ... def remove_terminal_sequences(string: str) -> str: ... def number_of_digits(number: int) -> int: ... -def join_nonempty(*values: str, delim: str = "-", from_dict: Mapping[str, object] | None = None) -> str: ... +def join_nonempty(*values: str, delim: str = "-", from_dict: Mapping[str, Any] | None = None) -> str: ... def scale_thumbnails_to_max_format_width( - formats: Iterable[Mapping[str, object]], thumbnails: Iterable[Mapping[str, object]], url_width_re: str | Pattern[str] -) -> list[dict[str, object]]: ... + formats: Iterable[Mapping[str, Any]], thumbnails: Iterable[Mapping[str, Any]], url_width_re: str | Pattern[str] +) -> list[dict[str, Any]]: ... def parse_http_range(range: str | None) -> tuple[int | None, int | None, int | None]: ... -def read_stdin(what: str) -> TextIO | object: ... +def read_stdin(what: str) -> TextIO | Any: ... def determine_file_encoding(data: bytes) -> tuple[str | None, int]: ... class Config: - own_args: object | None + own_args: Any | None parsed_args: tuple[Values, list[str]] | None filename: str | None def __init__(self, parser: _YoutubeDLOptionParser, label: str | None = None) -> None: ... - def init(self, args: object | None = None, filename: str | None = None) -> bool: ... + def init(self, args: Any | None = None, filename: str | None = None) -> bool: ... def load_configs(self) -> bool: ... @staticmethod def read_file(filename: FileDescriptorOrPath, default: list[str] = []) -> list[str]: ... @staticmethod def hide_login_info(opts: Iterable[str]) -> list[str]: ... - def append_config(self, *args: object, label: str | None = None) -> None: ... + def append_config(self, *args: Any, label: str | None = None) -> None: ... @property def all_args(self) -> Iterator[str]: ... - def parse_known_args(self, **kwargs: object) -> tuple[Values, list[str]]: ... + def parse_known_args( + self, args: Sequence[str] | None = None, values: optparse.Values | None = None, strict: bool = True + ) -> tuple[Values, list[str]]: ... def parse_args(self) -> tuple[Values, list[str]]: ... -def merge_headers(*dicts: dict[str, object]) -> dict[str, object]: ... -def cached_method(f: Callable[..., object]) -> Callable[..., object]: ... +def merge_headers(*dicts: dict[str, Any]) -> dict[str, Any]: ... +def cached_method(f: Callable[..., Any]) -> Callable[..., Any]: ... class function_with_repr(Generic[_T]): def __init__(self, func: Callable[..., _T], repr_: str | None = None) -> None: ... - def __call__(self, *args: object, **kwargs: object) -> _T: ... + def __call__(self, *args: Any, **kwargs: Any) -> _T: ... @classmethod - def set_repr(cls, repr_: str) -> Callable[..., object]: ... + def set_repr(cls, repr_: str) -> Callable[..., Any]: ... class Namespace(types.SimpleNamespace): - def __iter__(self) -> Iterator[object]: ... + def __iter__(self) -> Iterator[Any]: ... @property - def items_(self) -> dict[str, object]: ... + def items_(self) -> dict[str, Any]: ... MEDIA_EXTENSIONS: Namespace KNOWN_EXTENSIONS: tuple[str, ...] @@ -589,8 +607,10 @@ class _UnsafeExtensionError(Exception): class RetryManager: attempt: int retries: int - error_callback: Callable[[BaseException, int, int], object] - def __init__(self, _retries: int | None, _error_callback: Callable[..., object], **kwargs: object) -> None: ... + error_callback: Callable[[BaseException, int, int], Any] + def __init__( + self, _retries: int | None, _error_callback: Callable[..., Any], **kwargs: Any # kwargs passed to _error_callback. + ) -> None: ... @property def error(self) -> None: ... @error.setter @@ -603,32 +623,28 @@ class RetryManager: retries: int, *, sleep_func: Callable[..., float | None], - info: Callable[[str], object], - warn: Callable[[str], object], - error: Callable[[str], object] | None = None, + info: Callable[[str], Any], + warn: Callable[[str], Any], + error: Callable[[str], Any] | None = None, suffix: str | None = None, ) -> None: ... def make_archive_id(ie: InfoExtractor, video_id: str) -> str: ... def truncate_string(s: str, left: int, right: int = 0) -> str: ... def orderedSet_from_options( - options: Sequence[str], - alias_dict: dict[str, Sequence[str]], - *, - use_regex: bool = False, - start: Iterable[object] | None = None, -) -> Iterator[object]: ... + options: Sequence[str], alias_dict: dict[str, Sequence[str]], *, use_regex: bool = False, start: Iterable[Any] | None = None +) -> Iterator[Any]: ... class FormatSorter: regex: str default: tuple[str, ...] ytdl_default: tuple[str, ...] - settings: dict[str, object] + settings: dict[str, Any] ydl: YoutubeDL def __init__(self, ydl: YoutubeDL, field_preference: _Params) -> None: ... def evaluate_params(self, params: _Params, sort_extractor: Collection[str]) -> None: ... def print_verbose_info(self, write_debug: Callable[..., None]) -> None: ... - def calculate_preference(self, format: dict[str, object]) -> tuple[int, ...]: ... + def calculate_preference(self, format: dict[str, Any]) -> tuple[int, ...]: ... @overload def filesize_from_tbr(tbr: None, duration: None) -> None: ... @@ -655,7 +671,7 @@ class _ProgressState(enum.Enum): WARNING = 4 ERROR = 2 @classmethod - def from_dict(cls, s: dict[str, object], /) -> _ProgressState: ... + def from_dict(cls, s: dict[str, Any], /) -> _ProgressState: ... def get_ansi_escape(self, /, percent: int | None = None) -> str: ... if sys.platform == "win32": @@ -663,24 +679,26 @@ if sys.platform == "win32": else: _ENV: TypeAlias = Mapping[bytes, StrOrBytesPath] | Mapping[str, StrOrBytesPath] +# Much of this is the same as subprocess.Popen, but I do not think copying all of the overloads here is necessary to fix +# the Any types. class Popen(subprocess.Popen[AnyStr]): def __init__( self, args: StrOrBytesPath | Sequence[StrOrBytesPath], - *remaining: object, + *remaining: Any, env: _ENV | None = None, text: bool = False, shell: bool = False, - **kwargs: object, + **kwargs: Any, ) -> None: ... - def communicate_or_kill(self, *args: object, **kwargs: object) -> tuple[AnyStr, AnyStr]: ... + def communicate_or_kill(self, input: AnyStr | None = None, timeout: float | None = None) -> tuple[AnyStr, AnyStr]: ... def kill(self, *, timeout: int = 0) -> None: ... @classmethod - def run(cls, *args: object, timeout: int | None = None, **kwargs: object) -> tuple[AnyStr, AnyStr]: ... + def run(cls, *args: Any, timeout: int | None = None, **kwargs: Any) -> tuple[AnyStr, AnyStr]: ... # Passed to cls.__init__() class classproperty: - def __new__(cls, func: Callable[..., object] | None = None, *args: object, **kwargs: object) -> Self: ... + def __new__(cls, func: Callable[..., Any] | None = None, *args: Any, **kwargs: Any) -> Self: ... def __init__( # pyright: ignore[reportInconsistentConstructor] - self, func: Callable[..., object], *, cache: bool = False + self, func: Callable[..., Any], *, cache: bool = False ) -> None: ... - def __get__(self, _: Unused, cls: type[object]) -> object: ... + def __get__(self, _: Unused, cls: type[Any]) -> Any: ... diff --git a/stubs/yt-dlp/yt_dlp/webvtt.pyi b/stubs/yt-dlp/yt_dlp/webvtt.pyi index 2dad26273adf..0a3b87f03980 100644 --- a/stubs/yt-dlp/yt_dlp/webvtt.pyi +++ b/stubs/yt-dlp/yt_dlp/webvtt.pyi @@ -1,6 +1,6 @@ import re from collections.abc import Generator, Mapping -from typing import TextIO, TypeVar +from typing import Any, TextIO, TypeVar _AT = TypeVar("_AT", int, str, re.Match[str], None) @@ -19,7 +19,7 @@ class ParseError(Exception): def __init__(self, parser: _MatchParser) -> None: ... class Block: - def __init__(self, **kwargs: object) -> None: ... + def __init__(self, **kwargs: Any) -> None: ... # Abstract. Accepts arbitrary keyword arguments. @classmethod def parse(cls, parser: _MatchParser) -> Block: ... def write_into(self, stream: TextIO) -> None: ... @@ -40,10 +40,10 @@ class CueBlock(Block): def parse(cls, parser: _MatchParser) -> CueBlock: ... def write_into(self, stream: TextIO) -> None: ... @property - def as_json(self) -> dict[str, object]: ... + def as_json(self) -> dict[str, Any]: ... def __eq__(self, other: object) -> bool: ... @classmethod - def from_json(cls, json: Mapping[str, object]) -> CueBlock: ... + def from_json(cls, json: Mapping[str, Any]) -> CueBlock: ... def hinges(self, other: Block) -> bool: ... def parse_fragment(frag_content: bytes) -> Generator[Block]: ... From ee2ab3de76056d93372e4a385e03b62ee8b32ae0 Mon Sep 17 00:00:00 2001 From: Andrew Udvare Date: Thu, 24 Jul 2025 22:22:41 -0400 Subject: [PATCH 06/13] Fix kwarg --- stubs/yt-dlp/yt_dlp/utils/_utils.pyi | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stubs/yt-dlp/yt_dlp/utils/_utils.pyi b/stubs/yt-dlp/yt_dlp/utils/_utils.pyi index f3b68e4405a7..0f1f7c901066 100644 --- a/stubs/yt-dlp/yt_dlp/utils/_utils.pyi +++ b/stubs/yt-dlp/yt_dlp/utils/_utils.pyi @@ -576,7 +576,7 @@ class Config: @property def all_args(self) -> Iterator[str]: ... def parse_known_args( - self, args: Sequence[str] | None = None, values: optparse.Values | None = None, strict: bool = True + self, args: Sequence[str] | None = None, *, values: optparse.Values | None = None, strict: bool = True ) -> tuple[Values, list[str]]: ... def parse_args(self) -> tuple[Values, list[str]]: ... From 77df0f5d56332a2769cc450fb43ef860710e3c23 Mon Sep 17 00:00:00 2001 From: Andrew Udvare Date: Thu, 24 Jul 2025 22:36:11 -0400 Subject: [PATCH 07/13] Fix kwarg --- stubs/yt-dlp/yt_dlp/utils/_utils.pyi | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/stubs/yt-dlp/yt_dlp/utils/_utils.pyi b/stubs/yt-dlp/yt_dlp/utils/_utils.pyi index 0f1f7c901066..c3e7094d6e31 100644 --- a/stubs/yt-dlp/yt_dlp/utils/_utils.pyi +++ b/stubs/yt-dlp/yt_dlp/utils/_utils.pyi @@ -575,9 +575,7 @@ class Config: def append_config(self, *args: Any, label: str | None = None) -> None: ... @property def all_args(self) -> Iterator[str]: ... - def parse_known_args( - self, args: Sequence[str] | None = None, *, values: optparse.Values | None = None, strict: bool = True - ) -> tuple[Values, list[str]]: ... + def parse_known_args(self, *, values: optparse.Values | None = None, strict: bool = True) -> tuple[Values, list[str]]: ... def parse_args(self) -> tuple[Values, list[str]]: ... def merge_headers(*dicts: dict[str, Any]) -> dict[str, Any]: ... From 67c6de255c9c5626f580dd674c29643e7aa549f0 Mon Sep 17 00:00:00 2001 From: Andrew Udvare Date: Sat, 26 Jul 2025 10:13:21 -0400 Subject: [PATCH 08/13] More progress fixing Any/object types --- stubs/yt-dlp/METADATA.toml | 1 + stubs/yt-dlp/yt_dlp/compat/compat_utils.pyi | 6 +- stubs/yt-dlp/yt_dlp/downloader/__init__.pyi | 4 +- stubs/yt-dlp/yt_dlp/downloader/common.pyi | 7 +- stubs/yt-dlp/yt_dlp/downloader/external.pyi | 6 +- stubs/yt-dlp/yt_dlp/downloader/fragment.pyi | 19 +- stubs/yt-dlp/yt_dlp/extractor/common.pyi | 325 +++++++++--------- stubs/yt-dlp/yt_dlp/globals.pyi | 14 +- stubs/yt-dlp/yt_dlp/jsinterp.pyi | 48 ++- stubs/yt-dlp/yt_dlp/networking/_helper.pyi | 16 +- stubs/yt-dlp/yt_dlp/networking/common.pyi | 30 +- stubs/yt-dlp/yt_dlp/networking/exceptions.pyi | 9 +- .../yt-dlp/yt_dlp/networking/impersonate.pyi | 12 +- stubs/yt-dlp/yt_dlp/networking/websocket.pyi | 6 +- stubs/yt-dlp/yt_dlp/plugins.pyi | 7 +- stubs/yt-dlp/yt_dlp/postprocessor/common.pyi | 20 +- stubs/yt-dlp/yt_dlp/socks.pyi | 12 +- stubs/yt-dlp/yt_dlp/utils/_legacy.pyi | 89 ++++- stubs/yt-dlp/yt_dlp/utils/_utils.pyi | 15 +- 19 files changed, 375 insertions(+), 271 deletions(-) diff --git a/stubs/yt-dlp/METADATA.toml b/stubs/yt-dlp/METADATA.toml index c971b23e8191..263a836c6994 100644 --- a/stubs/yt-dlp/METADATA.toml +++ b/stubs/yt-dlp/METADATA.toml @@ -1,2 +1,3 @@ version = "2025.05.*" upstream_repository = "https://github.com/yt-dlp/yt-dlp" +requires = ["websockets"] diff --git a/stubs/yt-dlp/yt_dlp/compat/compat_utils.pyi b/stubs/yt-dlp/yt_dlp/compat/compat_utils.pyi index 7a234e98c774..0ca913da582d 100644 --- a/stubs/yt-dlp/yt_dlp/compat/compat_utils.pyi +++ b/stubs/yt-dlp/yt_dlp/compat/compat_utils.pyi @@ -1,6 +1,6 @@ import types from collections.abc import Callable, Collection -from typing import NamedTuple +from typing import Any, NamedTuple class _Package(NamedTuple): name: str @@ -10,12 +10,12 @@ def get_package_info(module: types.ModuleType) -> _Package: ... class EnhancedModule(types.ModuleType): def __bool__(self) -> bool: ... - def __getattribute__(self, attr: str) -> object: ... + def __getattribute__(self, attr: str) -> Any: ... def passthrough_module( parent: types.ModuleType, child: str | types.ModuleType, allowed_attributes: Collection[str] = ..., *, - callback: Callable[[object], object] = ..., + callback: Callable[[str], object] = ..., ) -> types.ModuleType: ... diff --git a/stubs/yt-dlp/yt_dlp/downloader/__init__.pyi b/stubs/yt-dlp/yt_dlp/downloader/__init__.pyi index 03ff26bf70b1..71fde97a6912 100644 --- a/stubs/yt-dlp/yt_dlp/downloader/__init__.pyi +++ b/stubs/yt-dlp/yt_dlp/downloader/__init__.pyi @@ -1,5 +1,5 @@ from collections.abc import Mapping -from typing import Literal +from typing import Any, Literal from typing_extensions import TypeAlias from ..extractor.common import _InfoDict @@ -23,7 +23,7 @@ _Protocol: TypeAlias = Literal[ def get_suitable_downloader( info_dict: _InfoDict, - params: Mapping[str, object] = ..., + params: Mapping[str, Any] = ..., default: FileDownloader | type[NO_DEFAULT] = ..., protocol: _Protocol | None = None, to_stdout: bool = False, diff --git a/stubs/yt-dlp/yt_dlp/downloader/common.pyi b/stubs/yt-dlp/yt_dlp/downloader/common.pyi index fc687306a03e..e70d9ed12160 100644 --- a/stubs/yt-dlp/yt_dlp/downloader/common.pyi +++ b/stubs/yt-dlp/yt_dlp/downloader/common.pyi @@ -34,7 +34,7 @@ class _FileDownloaderParams(TypedDict): class FileDownloader: params: _FileDownloaderParams | None def __init__(self, ydl: YoutubeDL, params: _FileDownloaderParams) -> None: ... - def to_screen(self, *args: object, **kargs: object) -> None: ... + def to_screen(self, message: str, skip_eol: bool = False, quiet: bool | None = None, only_once: bool = False) -> None: ... @property def FD_NAME(cls) -> str: ... @staticmethod @@ -67,14 +67,15 @@ class FileDownloader: def temp_name(self, filename: str) -> str: ... def undo_temp_name(self, filename: str) -> str: ... def ytdl_filename(self, filename: str) -> str: ... - def wrap_file_access(action: str, *, fatal: bool = False) -> object: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + # Wrapper that can accept arbitrary function. + def wrap_file_access(action: str, *, fatal: bool = False) -> Any: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] def sanitize_open(self, filename: str, open_mode: OpenTextMode | OpenBinaryMode) -> tuple[IO[Any], str]: ... def try_remove(self, filename: str) -> None: ... def try_rename(self, old_filename: str, new_filename: str) -> None: ... def try_utime(self, filename: str, last_modified_hdr: str | None) -> int | None: ... def report_destination(self, filename: str) -> None: ... ProgressStyles: Namespace - def report_progress(self, s: Mapping[str, object]) -> None: ... + def report_progress(self, s: Mapping[str, Any]) -> None: ... def report_resuming_byte(self, resume_len: int) -> None: ... def report_retry( self, err: str, count: int, retries: int, frag_index: int | type[NO_DEFAULT] = ..., fatal: bool = True diff --git a/stubs/yt-dlp/yt_dlp/downloader/external.pyi b/stubs/yt-dlp/yt_dlp/downloader/external.pyi index a44f7e857ab9..d181a0065903 100644 --- a/stubs/yt-dlp/yt_dlp/downloader/external.pyi +++ b/stubs/yt-dlp/yt_dlp/downloader/external.pyi @@ -2,7 +2,7 @@ import enum import functools from _typeshed import Unused from collections.abc import Iterable, Mapping -from typing import Literal +from typing import Any, Literal from ..extractor.common import _InfoDict from .fragment import FragmentFD @@ -41,7 +41,7 @@ class Aria2cFD(ExternalFD): SUPPORTED_PROTOCOLS: tuple[str, ...] @staticmethod def supports_manifest(manifest: str) -> bool: ... - def aria2c_rpc(self, rpc_port: int, rpc_secret: str, method: str, params: Iterable[str] = ()) -> object: ... + def aria2c_rpc(self, rpc_port: int, rpc_secret: str, method: str, params: Iterable[str] = ()) -> Any: ... class HttpieFD(ExternalFD): AVAILABLE_OPT: str @@ -54,7 +54,7 @@ class FFmpegFD(ExternalFD): def available(cls, path: str | None = None) -> bool: ... # type: ignore[override] def on_process_started(self, proc: Unused, stdin: Unused) -> None: ... @classmethod - def can_merge_formats(cls, info_dict: _InfoDict, params: Mapping[str, object]) -> bool: ... + def can_merge_formats(cls, info_dict: _InfoDict, params: Mapping[str, Any]) -> bool: ... class AVconvFD(FFmpegFD): ... diff --git a/stubs/yt-dlp/yt_dlp/downloader/fragment.pyi b/stubs/yt-dlp/yt_dlp/downloader/fragment.pyi index 33a18419f527..7c3e4e6347ab 100644 --- a/stubs/yt-dlp/yt_dlp/downloader/fragment.pyi +++ b/stubs/yt-dlp/yt_dlp/downloader/fragment.pyi @@ -1,23 +1,32 @@ from collections.abc import Callable, Collection, Mapping, Sequence from concurrent.futures.thread import ThreadPoolExecutor +from typing import Any from ..extractor.common import _InfoDict from .common import FileDownloader from .http import HttpFD class HttpQuietDownloader(HttpFD): - def to_screen(self, *args: object, **kargs: object) -> None: ... + def to_screen(self, *args: Any, **kargs: Any) -> None: ... # This method is a no-op. to_console_title = to_screen class FragmentFD(FileDownloader): def report_retry_fragment(self, err: str, frag_index: int, count: int, retries: int) -> None: ... def report_skip_fragment(self, frag_index: int, err: str | None = None) -> None: ... - def decrypter(self, info_dict: _InfoDict) -> Callable[[Mapping[str, object], bytes], bytes]: ... - def download_and_append_fragments_multiple(self, *args: object, **kwargs: object) -> bool: ... + def decrypter(self, info_dict: _InfoDict) -> Callable[[Mapping[str, Any], bytes], bytes]: ... + def download_and_append_fragments_multiple( + self, + *args: tuple[Mapping[str, Any], Collection[Mapping[str, Any]], _InfoDict], + is_fatal: Callable[[int], bool] = ..., + pack_func: Callable[[str, int], bytes] = ..., + finish_func: Callable[[], Any] | None = None, + tpe: ThreadPoolExecutor | None = None, + interrupt_trigger: Sequence[bool] = (True,), + ) -> bool: ... def download_and_append_fragments( self, - ctx: Mapping[str, object], - fragments: Collection[Mapping[str, object]], + ctx: Mapping[str, Any], + fragments: Collection[Mapping[str, Any]], info_dict: _InfoDict, *, is_fatal: Callable[[int], bool] = ..., diff --git a/stubs/yt-dlp/yt_dlp/extractor/common.pyi b/stubs/yt-dlp/yt_dlp/extractor/common.pyi index 5aa2770799be..63243d6f0f96 100644 --- a/stubs/yt-dlp/yt_dlp/extractor/common.pyi +++ b/stubs/yt-dlp/yt_dlp/extractor/common.pyi @@ -1,9 +1,9 @@ import re from collections.abc import Callable, Collection, Iterable, Iterator, Mapping, Sequence from functools import cached_property -from typing import Literal, TypedDict, TypeVar, type_check_only +from typing import Any, Literal, TypedDict, TypeVar, type_check_only from typing_extensions import Required, TypeAlias -from urllib.request import Request +from urllib.request import Request, _DataType from xml.etree import ElementTree as ET from ..cache import Cache @@ -20,7 +20,7 @@ class _InfoDict(TypedDict, total=False): creator: str | None comment_count: int | None duration: int | None - formats: list[object] | None + formats: list[dict[str, Any]] | None id: Required[str] like_count: int | None tags: list[str] | None @@ -65,10 +65,10 @@ class InfoExtractor: def _create_request( self, url_or_request: str | Request, - data: object | None = None, + data: _DataType | None = None, headers: Mapping[str, str] | None = None, query: str | Mapping[str, str] | None = None, - extensions: Mapping[str, object] | None = None, + extensions: Mapping[str, Any] | None = None, ) -> Request: ... def _download_webpage_handle( self, @@ -78,7 +78,7 @@ class InfoExtractor: errnote: str | None = None, fatal: bool = True, encoding: str | None = None, - data: object | None = None, + data: _DataType | None = None, headers: Mapping[str, str] | None = None, query: str | Mapping[str, str] | None = None, expected_status: int | None = None, @@ -106,15 +106,15 @@ class InfoExtractor: transform_source: Callable[..., str] | None = None, fatal: bool = True, errnote: str | None = None, - **parser_kwargs: object, - ) -> object: ... - def _parse_socket_response_as_json(self, data: str, *args: object, **kwargs: object) -> object: ... - def report_warning( - self, msg: str, video_id: str | None = None, *args: object, only_once: bool = False, **kwargs: object + **parser_kwargs: Any, + ) -> Any: ... + def _parse_socket_response_as_json(self, data: str, *args: Any, **kwargs: Any) -> Any: ... + def report_warning(self, msg: str, video_id: str | None = None, only_once: bool = False) -> None: ... + def to_screen( + self, msg: str, message: str, skip_eol: bool = False, quiet: bool | None = None, only_once: bool = False ) -> None: ... - def to_screen(self, msg: str, *args: object, **kwargs: object) -> None: ... - def write_debug(self, msg: str, *args: object, **kwargs: object) -> None: ... - def get_param(self, name: str, default: object | None = None, *args: object, **kwargs: object) -> object: ... + def write_debug(self, msg: str, *args: Any, **kwargs: Any) -> None: ... + def get_param(self, name: str, default: Any = None, *args: Any, **kwargs: Any) -> Any: ... def report_drm(self, video_id: str) -> None: ... def report_extraction(self, id_or_name: str) -> None: ... def report_download_webpage(self, video_id: str) -> None: ... @@ -138,19 +138,19 @@ class InfoExtractor: video_title: str | None = None, *, url_transparent: bool = False, - **kwargs: object, - ) -> dict[str, object]: ... + **kwargs: Any, + ) -> dict[str, Any]: ... @classmethod def playlist_from_matches( cls, - matches: object, + matches: Any, playlist_id: str | None = None, playlist_title: str | None = None, - getter: Callable[..., object] = ..., + getter: Callable[..., Any] = ..., ie: InfoExtractor | None = None, - video_kwargs: Mapping[str, object] | None = None, - **kwargs: object, - ) -> dict[str, object]: ... + video_kwargs: Mapping[str, Any] | None = None, + **kwargs: Any, + ) -> dict[str, Any]: ... @staticmethod def playlist_result( entries: Iterable[_InfoDict], @@ -159,13 +159,13 @@ class InfoExtractor: playlist_description: str | None = ..., *, multi_video: bool = ..., - **kwargs: object, + **kwargs: Any, ) -> _InfoDict: ... def http_scheme(self) -> str: ... @classmethod - def get_testcases(cls, include_onlymatching: bool = False) -> Iterator[dict[str, object]]: ... + def get_testcases(cls, include_onlymatching: bool = False) -> Iterator[dict[str, Any]]: ... @classmethod - def get_webpage_testcases(cls) -> Iterator[dict[str, object]]: ... + def get_webpage_testcases(cls) -> Iterator[dict[str, Any]]: ... @property def age_limit(cls) -> int: ... @classmethod @@ -174,10 +174,8 @@ class InfoExtractor: def is_suitable(cls, age_limit: int) -> bool: ... @classmethod def description(cls, *, markdown: bool = True, search_examples: Sequence[str] | None = None) -> str: ... - def extract_subtitles(self, *args: object, **kwargs: object) -> list[dict[str, object]]: ... - def _configuration_arg( - self, key: str, default: object = ..., *, ie_key: str | None = ..., casesense: bool = ... - ) -> object: ... + def extract_subtitles(self, *args: Any, **kwargs: Any) -> list[dict[str, Any]]: ... + def _configuration_arg(self, key: str, default: Any = ..., *, ie_key: str | None = ..., casesense: bool = ...) -> Any: ... # These are dynamically created. def _download_xml_handle( self, @@ -188,7 +186,7 @@ class InfoExtractor: transform_source: Callable[..., str] | None = ..., fatal: bool = ..., encoding: str | None = ..., - data: object = ..., + data: _DataType | None = ..., headers: Mapping[str, str] = ..., query: Mapping[str, str] = ..., expected_status: int | None = ..., @@ -204,7 +202,7 @@ class InfoExtractor: transform_source: Callable[..., str] | None = ..., fatal: bool = ..., encoding: str | None = ..., - data: object = ..., + data: _DataType | None = ..., headers: Mapping[str, str] = ..., query: Mapping[str, str] = ..., expected_status: int | None = ..., @@ -220,13 +218,13 @@ class InfoExtractor: transform_source: Callable[..., str] | None = ..., fatal: bool = ..., encoding: str | None = ..., - data: object = ..., + data: _DataType | None = ..., headers: Mapping[str, str] = ..., query: Mapping[str, str] = ..., expected_status: int | None = ..., impersonate: str | None = ..., require_impersonation: bool = ..., - ) -> tuple[dict[str, object], Response]: ... + ) -> tuple[dict[str, Any], Response]: ... def _download_socket_json( self, url_or_request: str | Request, @@ -236,13 +234,13 @@ class InfoExtractor: transform_source: Callable[..., str] | None = ..., fatal: bool = ..., encoding: str | None = ..., - data: object = ..., + data: _DataType | None = ..., headers: Mapping[str, str] = ..., query: Mapping[str, str] = ..., expected_status: int | None = ..., impersonate: str | None = ..., require_impersonation: bool = ..., - ) -> dict[str, object]: ... + ) -> dict[str, Any]: ... def _download_json_handle( self, url_or_request: str | Request, @@ -252,13 +250,13 @@ class InfoExtractor: transform_source: Callable[..., str] | None = ..., fatal: bool = ..., encoding: str | None = ..., - data: object = ..., + data: _DataType | None = ..., headers: Mapping[str, str] = ..., query: Mapping[str, str] = ..., expected_status: int | None = ..., impersonate: str | None = ..., require_impersonation: bool = ..., - ) -> tuple[dict[str, object], Response]: ... + ) -> tuple[dict[str, Any], Response]: ... def _download_json( self, url_or_request: str | Request, @@ -268,13 +266,13 @@ class InfoExtractor: transform_source: Callable[..., str] | None = ..., fatal: bool = ..., encoding: str | None = ..., - data: object = ..., + data: _DataType | None = ..., headers: Mapping[str, str] = ..., query: Mapping[str, str] = ..., expected_status: int | None = ..., impersonate: str | None = ..., require_impersonation: bool = ..., - ) -> dict[str, object]: ... + ) -> dict[str, Any]: ... def _download_webpage( self, url_or_request: str | Request, @@ -284,7 +282,7 @@ class InfoExtractor: transform_source: Callable[..., str] | None = ..., fatal: bool = ..., encoding: str | None = ..., - data: object = ..., + data: _DataType | None = ..., headers: Mapping[str, str] = ..., query: Mapping[str, str] = ..., expected_status: int | None = ..., @@ -301,7 +299,7 @@ class InfoExtractor: ) -> ET.Element: ... def _parse_mpd_formats( self, mpd_doc: ET.Element, mpd_id: str | None = ..., mpd_base_url: str = ..., mpd_url: str | None = ... - ) -> list[object]: ... + ) -> list[Any]: ... def _real_extract(self, url: str) -> _InfoDict: ... @staticmethod def _availability( @@ -318,7 +316,7 @@ class InfoExtractor: note: str | None = ..., errnote: str | None = ..., fatal: bool = ..., - data: object = ..., + data: _DataType | None = ..., headers: Mapping[str, str] = ..., query: Mapping[str, str] = ..., expected_status: int | None = ..., @@ -346,8 +344,8 @@ class InfoExtractor: contains_pattern: str | re.Pattern[str] = r"{(?s:.+)}", fatal: bool = True, default: _StrNoDefaultOrNone = ..., - **kwargs: object, - ) -> object | None: ... + **kwargs: Any, + ) -> Any: ... def _html_search_regex( self, pattern: str | re.Pattern[str], @@ -367,15 +365,15 @@ class InfoExtractor: def _og_regexes(prop: str) -> list[str]: ... @staticmethod def _meta_regex(prop: str) -> str: ... - def _og_search_property(self, prop: str, html: str, name: str | None = None, **kargs: object) -> str | None: ... - def _og_search_thumbnail(self, html: str, **kargs: object) -> str | None: ... - def _og_search_description(self, html: str, **kargs: object) -> str | None: ... - def _og_search_title(self, html: str, *, fatal: bool = False, **kargs: object) -> str | None: ... - def _og_search_video_url(self, html: str, name: str = "video url", secure: bool = True, **kargs: object) -> str | None: ... - def _og_search_url(self, html: str, **kargs: object) -> str | None: ... - def _html_extract_title(self, html: str, name: str = "title", *, fatal: bool = False, **kwargs: object) -> str | None: ... + def _og_search_property(self, prop: str, html: str, name: str | None = None, **kargs: Any) -> str | None: ... + def _og_search_thumbnail(self, html: str, **kargs: Any) -> str | None: ... + def _og_search_description(self, html: str, **kargs: Any) -> str | None: ... + def _og_search_title(self, html: str, *, fatal: bool = False, **kargs: Any) -> str | None: ... + def _og_search_video_url(self, html: str, name: str = "video url", secure: bool = True, **kargs: Any) -> str | None: ... + def _og_search_url(self, html: str, **kargs: Any) -> str | None: ... + def _html_extract_title(self, html: str, name: str = "title", *, fatal: bool = False, **kwargs: Any) -> str | None: ... def _html_search_meta( - self, name: str, html: str, display_name: str | None = None, fatal: bool = False, **kwargs: object + self, name: str, html: str, display_name: str | None = None, fatal: bool = False, **kwargs: Any ) -> str | None: ... def _dc_search_uploader(self, html: str) -> str | None: ... @staticmethod @@ -385,7 +383,7 @@ class InfoExtractor: def _twitter_search_player(self, html: str) -> str | None: ... def _yield_json_ld( self, html: str, video_id: str, *, fatal: bool = True, default: type[NO_DEFAULT] | bool = ... - ) -> Iterator[dict[str, object]]: ... + ) -> Iterator[dict[str, Any]]: ... def _search_json_ld( self, html: str, @@ -394,13 +392,13 @@ class InfoExtractor: *, fatal: bool = True, default: type[NO_DEFAULT] | bool = ..., - ) -> dict[str, object]: ... + ) -> dict[str, Any]: ... def _json_ld( - self, json_ld: object, video_id: str, fatal: bool = True, expected_type: Iterable[str] | str | None = None - ) -> dict[str, object]: ... + self, json_ld: Any, video_id: str, fatal: bool = True, expected_type: Iterable[str] | str | None = None + ) -> dict[str, Any]: ... def _search_nextjs_data( - self, webpage: str, video_id: str, *, fatal: bool = True, default: type[NO_DEFAULT] | bool = ..., **kw: object - ) -> object: ... + self, webpage: str, video_id: str, *, fatal: bool = True, default: type[NO_DEFAULT] | bool = ..., **kw: Any + ) -> Any: ... def _search_nuxt_data( self, webpage: str, @@ -409,96 +407,91 @@ class InfoExtractor: *, fatal: bool = True, traverse: tuple[str, int] = ("data", 0), - ) -> object: ... + ) -> Any: ... @staticmethod - def _hidden_inputs(html: str) -> dict[str, object]: ... - def _form_hidden_inputs(self, form_id: str, html: str) -> dict[str, object]: ... - def _check_formats(self, formats: list[dict[str, object]], video_id: str) -> None: ... + def _hidden_inputs(html: str) -> dict[str, Any]: ... + def _form_hidden_inputs(self, form_id: str, html: str) -> dict[str, Any]: ... + def _check_formats(self, formats: list[dict[str, Any]], video_id: str) -> None: ... @staticmethod - def _remove_duplicate_formats(formats: list[dict[str, object]]) -> None: ... - def _is_valid_url(self, url: str, video_id: str, item: str = "video", headers: Mapping[str, object] = ...) -> bool: ... + def _remove_duplicate_formats(formats: list[dict[str, Any]]) -> None: ... + def _is_valid_url(self, url: str, video_id: str, item: str = "video", headers: Mapping[str, Any] = ...) -> bool: ... def _proto_relative_url(self, url: str, scheme: str | None = None) -> str: ... def _sleep(self, timeout: float, video_id: str, msg_template: str | None = None) -> None: ... def _extract_f4m_formats( self, manifest_url: str, video_id: str, - preference: object | None = None, - quality: object | None = None, + preference: Any = None, + quality: Any = None, f4m_id: str | None = None, transform_source: Callable[..., str] = ..., fatal: bool = True, m3u8_id: str | None = None, data: str | None = None, - headers: Mapping[str, object] = ..., - query: Mapping[str, object] = ..., - ) -> list[dict[str, object]]: ... + headers: Mapping[str, Any] = ..., + query: Mapping[str, Any] = ..., + ) -> list[dict[str, Any]]: ... def _parse_f4m_formats( self, manifest: str, manifest_url: str, video_id: str, - preference: object | None = None, - quality: object | None = None, + preference: Any = None, + quality: Any = None, f4m_id: str | None = None, transform_source: Callable[..., str] = ..., fatal: bool = True, m3u8_id: str | None = None, - ) -> list[dict[str, object]]: ... + ) -> list[dict[str, Any]]: ... def _m3u8_meta_format( - self, - m3u8_url: str, - ext: str | None = None, - preference: object | None = None, - quality: object | None = None, - m3u8_id: str | None = None, - ) -> dict[str, object]: ... + self, m3u8_url: str, ext: str | None = None, preference: Any = None, quality: Any = None, m3u8_id: str | None = None + ) -> dict[str, Any]: ... def _report_ignoring_subs(self, name: str) -> None: ... - def _extract_m3u8_formats(self, *args: object, **kwargs: object) -> list[dict[str, object]]: ... + def _extract_m3u8_formats(self, *args: Any, **kwargs: Any) -> list[dict[str, Any]]: ... def _extract_m3u8_formats_and_subtitles( self, m3u8_url: str, video_id: str, ext: str | None = None, entry_protocol: str = "m3u8_native", - preference: object | None = None, - quality: object | None = None, + preference: Any = None, + quality: Any = None, m3u8_id: str | None = None, note: str | None = None, errnote: str | None = None, fatal: bool = True, live: bool = False, - data: object | None = None, - headers: Mapping[str, object] = ..., - query: Mapping[str, object] = ..., - ) -> tuple[list[dict[str, object]], list[dict[str, object]]]: ... + data: Any = None, + headers: Mapping[str, Any] = ..., + query: Mapping[str, Any] = ..., + ) -> tuple[list[dict[str, Any]], list[dict[str, Any]]]: ... def _parse_m3u8_formats_and_subtitles( self, m3u8_doc: str, m3u8_url: str | None = None, ext: str | None = None, entry_protocol: str = "m3u8_native", - preference: object | None = None, - quality: object | None = None, + preference: Any = None, + quality: Any = None, m3u8_id: str | None = None, live: bool = False, note: str | None = None, errnote: str | None = None, fatal: bool = True, - data: object | None = None, - headers: Mapping[str, object] = ..., - query: Mapping[str, object] = ..., + data: Any = None, + headers: Mapping[str, Any] = ..., + query: Mapping[str, Any] = ..., video_id: str | None = None, - ) -> tuple[list[dict[str, object]], list[dict[str, object]]]: ... + ) -> tuple[list[dict[str, Any]], list[dict[str, Any]]]: ... def _extract_m3u8_vod_duration( self, m3u8_vod_url: str, video_id: str, note: str | None = None, errnote: str | None = None, - data: object | None = None, - headers: Mapping[str, object] = ..., - query: Mapping[str, object] = ..., + data: Any = None, + headers: Mapping[str, Any] = ..., + query: Mapping[str, Any] = ..., ) -> int | None: ... def _parse_m3u8_vod_duration(self, m3u8_vod: str, video_id: str) -> int: ... def _extract_mpd_vod_duration( @@ -507,9 +500,9 @@ class InfoExtractor: video_id: str, note: str | None = None, errnote: str | None = None, - data: object | None = None, - headers: Mapping[str, object] = ..., - query: Mapping[str, object] = ..., + data: Any = None, + headers: Mapping[str, Any] = ..., + query: Mapping[str, Any] = ..., ) -> int | None: ... @staticmethod def _xpath_ns(path: str, namespace: str | None = None) -> str: ... @@ -518,41 +511,41 @@ class InfoExtractor: smil_url: str, video_id: str, fatal: bool = True, - f4m_params: Mapping[str, object] | None = None, + f4m_params: Mapping[str, Any] | None = None, transform_source: Callable[..., str] | None = None, - ) -> tuple[list[dict[str, object]], list[dict[str, object]]]: ... - def _extract_smil_formats(self, *args: object, **kwargs: object) -> list[dict[str, object]]: ... + ) -> tuple[list[dict[str, Any]], list[dict[str, Any]]]: ... + def _extract_smil_formats(self, *args: Any, **kwargs: Any) -> list[dict[str, Any]]: ... def _extract_smil_info( - self, smil_url: str, video_id: str, fatal: bool = True, f4m_params: Mapping[str, object] | None = None - ) -> dict[str, object]: ... + self, smil_url: str, video_id: str, fatal: bool = True, f4m_params: Mapping[str, Any] | None = None + ) -> dict[str, Any]: ... def _download_smil( self, smil_url: str, video_id: str, fatal: bool = True, transform_source: Callable[..., str] | None = None ) -> ET.Element: ... def _parse_smil( - self, smil: ET.Element, smil_url: str, video_id: str, f4m_params: Mapping[str, object] | None = None - ) -> dict[str, object]: ... + self, smil: ET.Element, smil_url: str, video_id: str, f4m_params: Mapping[str, Any] | None = None + ) -> dict[str, Any]: ... def _parse_smil_namespace(self, smil: str) -> str | None: ... - def _parse_smil_formats(self, *args: object, **kwargs: object) -> list[dict[str, object]]: ... + def _parse_smil_formats(self, *args: Any, **kwargs: Any) -> list[dict[str, Any]]: ... def _parse_smil_formats_and_subtitles( self, smil: ET.Element, smil_url: str, video_id: str, namespace: str | None = None, - f4m_params: Mapping[str, object] | None = None, + f4m_params: Mapping[str, Any] | None = None, transform_rtmp_url: Callable[[str, str], tuple[str, str]] | None = None, - ) -> tuple[list[dict[str, object]], list[dict[str, object]]]: ... + ) -> tuple[list[dict[str, Any]], list[dict[str, Any]]]: ... def _parse_smil_subtitles( self, smil: ET.Element, namespace: str | None = None, subtitles_lang: str = "en" - ) -> list[dict[str, object]]: ... - def _extract_xspf_playlist(self, xspf_url: str, playlist_id: str, fatal: bool = True) -> list[dict[str, object]]: ... + ) -> list[dict[str, Any]]: ... + def _extract_xspf_playlist(self, xspf_url: str, playlist_id: str, fatal: bool = True) -> list[dict[str, Any]]: ... def _parse_xspf( self, xspf_doc: ET.Element, playlist_id: str, xspf_url: str | None = None, xspf_base_url: str | None = None - ) -> list[dict[str, object]]: ... - def _extract_mpd_formats(self, *args: object, **kwargs: object) -> list[dict[str, object]]: ... + ) -> list[dict[str, Any]]: ... + def _extract_mpd_formats(self, *args: Any, **kwargs: Any) -> list[dict[str, Any]]: ... def _extract_mpd_formats_and_subtitles( - self, *args: object, **kwargs: object - ) -> tuple[list[dict[str, object]], list[dict[str, object]]]: ... + self, *args: Any, **kwargs: Any + ) -> tuple[list[dict[str, Any]], list[dict[str, Any]]]: ... def _extract_mpd_periods( self, mpd_url: str, @@ -561,18 +554,18 @@ class InfoExtractor: note: str | None = None, errnote: str | None = None, fatal: bool = True, - data: object | None = None, - headers: Mapping[str, object] = ..., - query: Mapping[str, object] = ..., - ) -> tuple[list[object], dict[str, object]]: ... + data: Any = None, + headers: Mapping[str, Any] = ..., + query: Mapping[str, Any] = ..., + ) -> tuple[list[Any], dict[str, Any]]: ... def _parse_mpd_formats_and_subtitles( - self, *args: object, **kwargs: object - ) -> tuple[list[dict[str, object]], list[dict[str, object]]]: ... - def _merge_mpd_periods(self, periods: Iterable[Mapping[str, object]]) -> tuple[list[object], dict[str, object]]: ... + self, *args: Any, **kwargs: Any + ) -> tuple[list[dict[str, Any]], list[dict[str, Any]]]: ... + def _merge_mpd_periods(self, periods: Iterable[Mapping[str, Any]]) -> tuple[list[Any], dict[str, Any]]: ... def _parse_mpd_periods( self, mpd_doc: ET.Element, mpd_id: str | None = None, mpd_base_url: str = "", mpd_url: str | None = None - ) -> tuple[list[object], dict[str, object]]: ... - def _extract_ism_formats(self, *args: object, **kwargs: object) -> list[dict[str, object]]: ... + ) -> tuple[list[Any], dict[str, Any]]: ... + def _extract_ism_formats(self, *args: Any, **kwargs: Any) -> list[dict[str, Any]]: ... def _extract_ism_formats_and_subtitles( self, ism_url: str, @@ -581,13 +574,13 @@ class InfoExtractor: note: str | None = None, errnote: str | None = None, fatal: bool = True, - data: object | None = None, - headers: Mapping[str, object] = ..., - query: Mapping[str, object] = ..., - ) -> tuple[list[dict[str, object]], list[dict[str, object]]]: ... + data: Any = None, + headers: Mapping[str, Any] = ..., + query: Mapping[str, Any] = ..., + ) -> tuple[list[dict[str, Any]], list[dict[str, Any]]]: ... def _parse_ism_formats_and_subtitles( self, ism_doc: str, ism_url: str, ism_id: str | None = None - ) -> tuple[list[dict[str, object]], list[dict[str, object]]]: ... + ) -> tuple[list[dict[str, Any]], list[dict[str, Any]]]: ... def _parse_html5_media_entries( self, base_url: str, @@ -596,44 +589,44 @@ class InfoExtractor: m3u8_id: str | None = None, m3u8_entry_protocol: str = "m3u8_native", mpd_id: str | None = None, - preference: object | None = None, - quality: object | None = None, - _headers: Mapping[str, object] | None = None, - ) -> list[dict[str, object]]: ... - def _extract_akamai_formats(self, *args: object, **kwargs: object) -> list[dict[str, object]]: ... + preference: Any = None, + quality: Any = None, + _headers: Mapping[str, Any] | None = None, + ) -> list[dict[str, Any]]: ... + def _extract_akamai_formats(self, *args: Any, **kwargs: Any) -> list[dict[str, Any]]: ... def _extract_akamai_formats_and_subtitles( - self, manifest_url: str, video_id: str, hosts: Mapping[str, object] = ... - ) -> tuple[list[dict[str, object]], list[dict[str, object]]]: ... + self, manifest_url: str, video_id: str, hosts: Mapping[str, Any] = ... + ) -> tuple[list[dict[str, Any]], list[dict[str, Any]]]: ... def _extract_wowza_formats( self, url: str, video_id: str, m3u8_entry_protocol: str = "m3u8_native", skip_protocols: Collection[str] = ... - ) -> list[dict[str, object]]: ... + ) -> list[dict[str, Any]]: ... def _find_jwplayer_data( - self, webpage: str, video_id: str | None = None, transform_source: Callable[..., object] = ... - ) -> object | None: ... + self, webpage: str, video_id: str | None = None, transform_source: Callable[..., Any] = ... + ) -> Any: ... def _extract_jwplayer_data( - self, webpage: str, video_id: str, *args: object, transform_source: Callable[..., object] = ..., **kwargs: object - ) -> list[dict[str, object]]: ... + self, webpage: str, video_id: str, *args: Any, transform_source: Callable[..., Any] = ..., **kwargs: Any + ) -> list[dict[str, Any]]: ... def _parse_jwplayer_data( self, - jwplayer_data: Mapping[str, object], + jwplayer_data: Mapping[str, Any], video_id: str | None = None, require_title: bool = True, m3u8_id: str | None = None, mpd_id: str | None = None, - rtmp_params: Mapping[str, object] | None = None, + rtmp_params: Mapping[str, Any] | None = None, base_url: str | None = None, - ) -> list[dict[str, object]]: ... + ) -> list[dict[str, Any]]: ... def _parse_jwplayer_formats( self, - jwplayer_sources_data: Iterable[Mapping[str, object]], + jwplayer_sources_data: Iterable[Mapping[str, Any]], video_id: str | None = None, m3u8_id: str | None = None, mpd_id: str | None = None, - rtmp_params: Mapping[str, object] | None = None, + rtmp_params: Mapping[str, Any] | None = None, base_url: str | None = None, - ) -> list[dict[str, object]]: ... - def _int(self, v: object, name: str, fatal: bool = False, **kwargs: object) -> int | None: ... - def _float(self, v: object, name: str, fatal: bool = False, **kwargs: object) -> float | None: ... + ) -> list[dict[str, Any]]: ... + def _int(self, v: Any, name: str, fatal: bool = False, **kwargs: Any) -> int | None: ... + def _float(self, v: Any, name: str, fatal: bool = False, **kwargs: Any) -> float | None: ... def _set_cookie( self, domain: str, @@ -644,59 +637,59 @@ class InfoExtractor: path: str = "/", secure: bool = False, discard: bool = False, - rest: dict[str, object] = ..., - **kwargs: object, + rest: dict[str, Any] = ..., + **kwargs: Any, ) -> None: ... def _live_title(self, name: _T) -> _T: ... def _get_cookies(self, url: str) -> LenientSimpleCookie: ... def _apply_first_set_cookie_header(self, url_handle: Response, cookie: str) -> None: ... @property def _RETURN_TYPE(cls) -> str: ... - def _get_subtitles(self, *args: object, **kwargs: object) -> list[dict[str, object]]: ... - def extract_comments(self, *args: object, **kwargs: object) -> list[dict[str, object]]: ... - def _get_comments(self, *args: object, **kwargs: object) -> list[dict[str, object]]: ... + def _get_subtitles(self, *args: Any, **kwargs: Any) -> list[dict[str, Any]]: ... + def extract_comments(self, *args: Any, **kwargs: Any) -> list[dict[str, Any]]: ... + def _get_comments(self, *args: Any, **kwargs: Any) -> list[dict[str, Any]]: ... @staticmethod def _merge_subtitle_items( - subtitle_list1: Iterable[Mapping[str, object]], subtitle_list2: Iterable[Mapping[str, object]] - ) -> list[dict[str, object]]: ... + subtitle_list1: Iterable[Mapping[str, Any]], subtitle_list2: Iterable[Mapping[str, Any]] + ) -> list[dict[str, Any]]: ... @classmethod - def _merge_subtitles(cls, *dicts: dict[str, object], target: object | None = None) -> object: ... - def extract_automatic_captions(self, *args: object, **kwargs: object) -> dict[str, object]: ... + def _merge_subtitles(cls, *dicts: dict[str, Any], target: Any = None) -> Any: ... + def extract_automatic_captions(self, *args: Any, **kwargs: Any) -> dict[str, Any]: ... @cached_property def _cookies_passed(self) -> bool: ... - def _mark_watched(self, *args: object, **kwargs: object) -> object: ... + def _mark_watched(self, *args: Any, **kwargs: Any) -> Any: ... @staticmethod def _generic_id(url: str) -> str: ... def _generic_title(self, url: str = "", webpage: str = "", *, default: str | None = None) -> str | None: ... def _extract_chapters_helper( self, - chapter_list: Iterable[Mapping[str, object]], - start_function: Callable[..., object], - title_function: Callable[..., object], + chapter_list: Iterable[Mapping[str, Any]], + start_function: Callable[..., Any], + title_function: Callable[..., Any], duration: float, strict: bool = True, ) -> list[dict[str, int]] | None: ... def _extract_chapters_from_description( self, description: str | None, duration: str | None ) -> list[dict[str, int]] | None: ... - def mark_watched(self, *args: object, **kwargs: object) -> None: ... + def mark_watched(self, *args: Any, **kwargs: Any) -> None: ... def geo_verification_headers(self) -> dict[str, str]: ... - def RetryManager(self, **kwargs: object) -> _RetryManager: ... + def RetryManager(self, **kwargs: Any) -> _RetryManager: ... @classmethod def extract_from_webpage(cls, ydl: YoutubeDL, url: str, webpage: str) -> Iterator[_InfoDict]: ... def _yes_playlist( self, playlist_id: str, video_id: str, - smuggled_data: object | None = None, + smuggled_data: Any = None, *, playlist_label: str = "playlist", video_label: str = "video", ) -> bool: ... def _error_or_warning(self, err: str, _count: int | None = None, _retries: int = 0, *, fatal: bool = True) -> None: ... def _extract_generic_embeds( - self, url: str, *args: object, info_dict: _InfoDict = ..., note: str = "Extracting generic embeds", **kwargs: object - ) -> list[dict[str, object]]: ... + self, url: str, *args: Any, info_dict: _InfoDict = ..., note: str = "Extracting generic embeds", **kwargs: Any + ) -> list[dict[str, Any]]: ... @classmethod def _extract_from_webpage(cls, url: str, webpage: str) -> Iterator[_InfoDict]: ... @classmethod @@ -704,7 +697,7 @@ class InfoExtractor: @classmethod def _extract_url(cls, webpage: str) -> str | None: ... @classmethod - def __init_subclass__(cls, *, plugin_name: str | None = None, **kwargs: object) -> None: ... + def __init_subclass__(cls, *, plugin_name: str | None = None, **kwargs: Any) -> None: ... class StopExtraction(Exception): ... class CommentsDisabled(Exception): ... diff --git a/stubs/yt-dlp/yt_dlp/globals.pyi b/stubs/yt-dlp/yt_dlp/globals.pyi index 981bbb4e1745..45664ccc93d4 100644 --- a/stubs/yt-dlp/yt_dlp/globals.pyi +++ b/stubs/yt-dlp/yt_dlp/globals.pyi @@ -1,5 +1,5 @@ from collections import defaultdict -from typing import Generic, TypeVar +from typing import Any, Generic, TypeVar _T = TypeVar("_T") @@ -7,13 +7,13 @@ class Indirect(Generic[_T]): value: _T def __init__(self, initial: _T, /) -> None: ... -postprocessors: Indirect[dict[str, object]] -extractors: Indirect[dict[str, object]] +postprocessors: Indirect[dict[str, Any]] +extractors: Indirect[dict[str, Any]] all_plugins_loaded: Indirect[bool] -plugin_specs: Indirect[dict[str, object]] +plugin_specs: Indirect[dict[str, Any]] plugin_dirs: Indirect[list[str]] -plugin_ies: Indirect[dict[str, object]] -plugin_pps: Indirect[dict[str, object]] -plugin_ies_overrides: Indirect[defaultdict[str, object]] +plugin_ies: Indirect[dict[str, Any]] +plugin_pps: Indirect[dict[str, Any]] +plugin_ies_overrides: Indirect[defaultdict[str, Any]] IN_CLI: Indirect[bool] LAZY_EXTRACTORS: Indirect[None | bool] diff --git a/stubs/yt-dlp/yt_dlp/jsinterp.pyi b/stubs/yt-dlp/yt_dlp/jsinterp.pyi index 6edb2107bd84..f0612dbd7b15 100644 --- a/stubs/yt-dlp/yt_dlp/jsinterp.pyi +++ b/stubs/yt-dlp/yt_dlp/jsinterp.pyi @@ -1,8 +1,10 @@ import collections -from collections.abc import Callable, Collection, Mapping -from typing import NoReturn +import types +from collections.abc import Callable, Collection, Iterable, Mapping +from typing import Any, NoReturn from typing_extensions import TypeAlias +from yt_dlp.extractor.common import InfoExtractor from yt_dlp.utils._utils import function_with_repr from .utils import ExtractorError @@ -21,8 +23,8 @@ class JS_Throw(ExtractorError): error: BaseException def __init__(self, e: BaseException) -> None: ... -class LocalNameSpace(collections.ChainMap[str, object]): - def __setitem__(self, key: str, value: object) -> None: ... +class LocalNameSpace(collections.ChainMap[str, Any]): + def __setitem__(self, key: str, value: Any) -> None: ... def __delitem__(self, key: str) -> NoReturn: ... class Debugger: @@ -30,26 +32,38 @@ class Debugger: @staticmethod def write(*args: str, level: int = 100) -> None: ... @classmethod - # Callable[[Debugger, str, object, int, ...], tuple[object, bool]] but it also accepts *args, **kwargs. - def wrap_interpreter(cls, f: Callable[..., tuple[object, bool]]) -> Callable[..., tuple[object, bool]]: ... + # Callable[[Debugger, str, Any, int, ...], tuple[Any, bool]] but it also accepts *args, **kwargs. + def wrap_interpreter(cls, f: Callable[..., tuple[Any, bool]]) -> Callable[..., tuple[Any, bool]]: ... -_BuildFunctionReturnType: TypeAlias = Callable[[Collection[object], Mapping[str, object], int], object | None] +_BuildFunctionReturnType: TypeAlias = Callable[[Collection[Any], Mapping[str, Any], int], Any | None] class JSInterpreter: - def __init__(self, code: str, objects: Mapping[str, object] | None = None) -> None: ... + def __init__(self, code: str, objects: Mapping[str, Any] | None = None) -> None: ... class Exception(ExtractorError): - def __init__(self, msg: str, expr: str | None = None, *args: object, **kwargs: object) -> None: ... + def __init__( + self, + msg: str, + expr: str | None = None, + tb: types.TracebackType | None = None, + expected: bool = False, + cause: Exception | str | None = None, + video_id: str | None = None, + ie: InfoExtractor | None = None, + ) -> None: ... + # After wrapping, *args and **kwargs are added but do nothing for this fmethod. def interpret_statement( - self, stmt: str, local_vars: Mapping[str, object], allow_recursion: int, *args: object, **kwargs: object - ) -> tuple[object, bool]: ... - def interpret_expression(self, expr: str, local_vars: Mapping[str, object], allow_recursion: int) -> object: ... - def extract_object(self, objname: str, *global_stack: object) -> object: ... + self, stmt: str, local_vars: Mapping[str, Any], allow_recursion: int, *args: Any, **kwargs: Any + ) -> tuple[Any, bool]: ... + def interpret_expression(self, expr: str, local_vars: Mapping[str, Any], allow_recursion: int) -> Any: ... + def extract_object(self, objname: str, *global_stack: Iterable[dict[str, Any]]) -> Any: ... def extract_function_code(self, funcname: str) -> tuple[list[str], tuple[str, str]]: ... - def extract_function(self, funcname: str, *global_stack: object) -> function_with_repr[object]: ... + def extract_function(self, funcname: str, *global_stack: Iterable[dict[str, Any]]) -> function_with_repr[Any]: ... def extract_function_from_code( - self, argnames: Collection[str], code: str, *global_stack: object + self, argnames: Collection[str], code: str, *global_stack: Iterable[dict[str, Any]] + ) -> _BuildFunctionReturnType: ... + def call_function(self, funcname: str, *args: Any) -> function_with_repr[Any]: ... + def build_function( + self, argnames: Collection[str], code: str, *global_stack: Iterable[dict[str, Any]] ) -> _BuildFunctionReturnType: ... - def call_function(self, funcname: str, *args: object) -> function_with_repr[object]: ... - def build_function(self, argnames: Collection[str], code: str, *global_stack: object) -> _BuildFunctionReturnType: ... diff --git a/stubs/yt-dlp/yt_dlp/networking/_helper.pyi b/stubs/yt-dlp/yt_dlp/networking/_helper.pyi index be90056d322c..d93a606440ad 100644 --- a/stubs/yt-dlp/yt_dlp/networking/_helper.pyi +++ b/stubs/yt-dlp/yt_dlp/networking/_helper.pyi @@ -1,5 +1,6 @@ import ssl -from _typeshed import ReadableBuffer, StrOrBytesPath +from _socket import _Address +from _typeshed import StrOrBytesPath from collections.abc import Callable, Iterable, Mapping from socket import AddressFamily, SocketKind from typing import Any @@ -11,7 +12,7 @@ from .common import Request, RequestHandler, Response def ssl_load_certs(context: ssl.SSLContext, use_certifi: bool = True) -> None: ... def ssl_load_windows_store_certs(ssl_context: ssl.SSLContext, storename: str) -> None: ... -def make_socks_proxy_opts(socks_proxy: str) -> dict[str, object]: ... +def make_socks_proxy_opts(socks_proxy: str) -> dict[str, Any]: ... def get_redirect_method(method: str, status: int) -> str: ... def make_ssl_context( verify: bool = True, @@ -23,29 +24,24 @@ def make_ssl_context( ) -> ssl.SSLContext: ... class InstanceStoreMixin: - def __init__(self, **kwargs: object) -> None: ... + def __init__(self, **kwargs: Any) -> None: ... # Passed to non-existent parent so MRO works. def add_accept_encoding_header(headers: HTTPHeaderDict, supported_encodings: Iterable[str]) -> None: ... def wrap_request_errors( func: Callable[[RequestHandler, Request], Response | None], ) -> Callable[[RequestHandler, Request], None]: ... -_Address: TypeAlias = tuple[Any, ...] | str | ReadableBuffer _IPAddress: TypeAlias = tuple[ AddressFamily, SocketKind, int, str, tuple[str, int] | tuple[str, int, int, int] | tuple[int, bytes] ] def create_socks_proxy_socket( - dest_addr: object, - proxy_args: Mapping[str, object], - proxy_ip_addr: _IPAddress, - timeout: float | None, - source_address: _Address, + dest_addr: _Address, proxy_args: Mapping[str, Any], proxy_ip_addr: _IPAddress, timeout: float | None, source_address: _Address ) -> sockssocket: ... def create_connection( address: tuple[str, int], timeout: int = ..., source_address: _Address | None = None, *, - _create_socket_func: Callable[[_IPAddress, int, object], sockssocket] = ..., + _create_socket_func: Callable[[_IPAddress, int, _Address], sockssocket] = ..., ) -> sockssocket: ... diff --git a/stubs/yt-dlp/yt_dlp/networking/common.pyi b/stubs/yt-dlp/yt_dlp/networking/common.pyi index aa7d386de4c9..ecfbdcecbec1 100644 --- a/stubs/yt-dlp/yt_dlp/networking/common.pyi +++ b/stubs/yt-dlp/yt_dlp/networking/common.pyi @@ -37,7 +37,7 @@ class RequestHandler(abc.ABC, metaclass=abc.ABCMeta): headers: HTTPHeaderDict | dict[str, str] cookiejar: YoutubeDLCookieJar | None timeout: float | int - proxies: Mapping[str, object] | dict[str, object] + proxies: Mapping[str, Any] | dict[str, Any] source_address: str | None verbose: bool prefer_system_certs: bool @@ -50,7 +50,7 @@ class RequestHandler(abc.ABC, metaclass=abc.ABCMeta): headers: HTTPHeaderDict | Mapping[str, str] | None = None, cookiejar: YoutubeDLCookieJar | None = None, timeout: float | None = None, - proxies: Mapping[str, object] | None = None, + proxies: Mapping[str, Any] | None = None, source_address: str | None = None, verbose: bool = False, prefer_system_certs: bool = False, @@ -70,17 +70,17 @@ class RequestHandler(abc.ABC, metaclass=abc.ABCMeta): def __exit__(self, *args: object) -> None: ... class Request: - proxies: Mapping[str, object] | dict[str, object] - extensions: Mapping[str, object] | dict[str, object] + proxies: Mapping[str, Any] | dict[str, Any] + extensions: Mapping[str, Any] | dict[str, Any] def __init__( self, url: str, data: _RequestData | None = None, headers: HTTPHeaderDict | Mapping[str, str] | None = None, - proxies: Mapping[str, object] | None = None, + proxies: Mapping[str, Any] | None = None, query: Mapping[str, str] | None = None, method: str | None = None, - extensions: Mapping[str, object] | None = None, + extensions: Mapping[str, Any] | None = None, ) -> None: ... @property def url(self) -> str: ... @@ -104,7 +104,7 @@ class Request: data: str | None = None, headers: HTTPHeaderDict | Mapping[str, str] | None = None, query: Mapping[str, str] | None = None, - extensions: Mapping[str, object] | None = None, + extensions: Mapping[str, Any] | None = None, ) -> None: ... def copy(self) -> Self: ... @@ -112,31 +112,31 @@ def HEADRequest( url: str, data: _RequestData | None = None, headers: HTTPHeaderDict | Mapping[str, str] | None = None, - proxies: Mapping[str, object] | None = None, + proxies: Mapping[str, Any] | None = None, query: Mapping[str, str] | None = None, *, method: str = "HEAD", - extensions: Mapping[str, object] | None = None, + extensions: Mapping[str, Any] | None = None, ) -> Request: ... def PATCHRequest( url: str, data: _RequestData | None = None, headers: HTTPHeaderDict | Mapping[str, str] | None = None, - proxies: Mapping[str, object] | None = None, + proxies: Mapping[str, Any] | None = None, query: Mapping[str, str] | None = None, *, method: str = "PATCH", - extensions: Mapping[str, object] | None = None, + extensions: Mapping[str, Any] | None = None, ) -> Request: ... def PUTRequest( url: str, data: _RequestData | None = None, headers: HTTPHeaderDict | Mapping[str, str] | None = None, - proxies: Mapping[str, object] | None = None, + proxies: Mapping[str, Any] | None = None, query: Mapping[str, str] | None = None, *, method: str = "PUT", - extensions: Mapping[str, object] | None = None, + extensions: Mapping[str, Any] | None = None, ) -> Request: ... class Response(io.IOBase): @@ -145,7 +145,7 @@ class Response(io.IOBase): status: int url: str reason: str | None - extensions: Mapping[str, object] | dict[str, object] + extensions: Mapping[str, Any] | dict[str, Any] def __init__( self, fp: io.IOBase, @@ -153,7 +153,7 @@ class Response(io.IOBase): headers: Mapping[str, str], status: int = 200, reason: str | None = None, - extensions: Mapping[str, object] | dict[str, object] | None = None, + extensions: Mapping[str, Any] | dict[str, Any] | None = None, ) -> None: ... def readable(self) -> bool: ... def read(self, amt: int | None = None) -> bytes: ... diff --git a/stubs/yt-dlp/yt_dlp/networking/exceptions.pyi b/stubs/yt-dlp/yt_dlp/networking/exceptions.pyi index e9f7eeaa703f..15668162344b 100644 --- a/stubs/yt-dlp/yt_dlp/networking/exceptions.pyi +++ b/stubs/yt-dlp/yt_dlp/networking/exceptions.pyi @@ -27,7 +27,14 @@ class HTTPError(RequestError): def close(self) -> None: ... class IncompleteRead(TransportError): - def __init__(self, partial: int, expected: int | None = None, **kwargs: object) -> None: ... + def __init__( + self, + partial: int, + expected: int | None = None, + *, + cause: Exception | str | None = None, + handler: RequestHandler | None = None, + ) -> None: ... class SSLError(TransportError): ... class CertificateVerifyError(SSLError): ... diff --git a/stubs/yt-dlp/yt_dlp/networking/impersonate.pyi b/stubs/yt-dlp/yt_dlp/networking/impersonate.pyi index 475be6d14817..3fdc816bf04b 100644 --- a/stubs/yt-dlp/yt_dlp/networking/impersonate.pyi +++ b/stubs/yt-dlp/yt_dlp/networking/impersonate.pyi @@ -1,5 +1,6 @@ from abc import ABC from dataclasses import dataclass +from typing import Any from typing_extensions import Self from .common import Request, RequestHandler @@ -17,9 +18,16 @@ class ImpersonateTarget: def from_str(cls, target: str) -> Self: ... class ImpersonateRequestHandler(RequestHandler, ABC): - _SUPPORTED_IMPERSONATE_TARGET_MAP: dict[ImpersonateTarget, object] = ... + _SUPPORTED_IMPERSONATE_TARGET_MAP: dict[ImpersonateTarget, Any] = ... # Copied from source. - def __init__(self, *, impersonate: ImpersonateTarget | None = None, **kwargs: object) -> None: ... + def __init__( + self, + *, + impersonate: ImpersonateTarget | None = None, + # All keyword arguments are ignored (passed to RequestHandler as **kwargs but RequestHandler.__init__() has **_ and does + # not use it). + **kwargs: Any, + ) -> None: ... @property def supported_targets(cls) -> tuple[ImpersonateTarget, ...]: ... def is_supported_target(self, target: ImpersonateTarget) -> bool: ... diff --git a/stubs/yt-dlp/yt_dlp/networking/websocket.pyi b/stubs/yt-dlp/yt_dlp/networking/websocket.pyi index 61f896c90a82..f842d6a72adf 100644 --- a/stubs/yt-dlp/yt_dlp/networking/websocket.pyi +++ b/stubs/yt-dlp/yt_dlp/networking/websocket.pyi @@ -1,9 +1,11 @@ import abc +from typing import Any from .common import RequestHandler, Response class WebSocketResponse(Response): - def send(self, message: bytes | str) -> object: ... - def recv(self) -> object: ... + # Both raise NotImplementedError. + def send(self, message: bytes | str) -> Any: ... + def recv(self) -> Any: ... class WebSocketRequestHandler(RequestHandler, abc.ABC, metaclass=abc.ABCMeta): ... diff --git a/stubs/yt-dlp/yt_dlp/plugins.pyi b/stubs/yt-dlp/yt_dlp/plugins.pyi index 52ae55de1120..6394f6fd036d 100644 --- a/stubs/yt-dlp/yt_dlp/plugins.pyi +++ b/stubs/yt-dlp/yt_dlp/plugins.pyi @@ -5,6 +5,7 @@ from collections.abc import Iterator from importlib.machinery import ModuleSpec from pathlib import Path from types import ModuleType +from typing import Any from .globals import Indirect @@ -25,8 +26,8 @@ COMPAT_PACKAGE_NAME: str = "ytdlp_plugins" class PluginSpec: module_name: str suffix: str - destination: Indirect[object] - plugin_destination: Indirect[object] + destination: Indirect[dict[str, Any]] + plugin_destination: Indirect[dict[str, Any]] class PluginLoader(importlib.abc.Loader): def exec_module(self, module: ModuleType) -> None: ... @@ -39,6 +40,6 @@ class PluginFinder(importlib.abc.MetaPathFinder): def invalidate_caches(self) -> None: ... def directories() -> list[str]: ... -def load_plugins(plugin_spec: PluginSpec) -> dict[str, type[object]]: ... +def load_plugins(plugin_spec: PluginSpec) -> dict[str, Any]: ... def load_all_plugins() -> None: ... def register_plugin_spec(plugin_spec: PluginSpec) -> None: ... diff --git a/stubs/yt-dlp/yt_dlp/postprocessor/common.pyi b/stubs/yt-dlp/yt_dlp/postprocessor/common.pyi index b8e75ec7d917..bb02ab145873 100644 --- a/stubs/yt-dlp/yt_dlp/postprocessor/common.pyi +++ b/stubs/yt-dlp/yt_dlp/postprocessor/common.pyi @@ -8,19 +8,29 @@ from ..YoutubeDL import YoutubeDL class PostProcessorMetaClass(type): @staticmethod def run_wrapper(func: Callable[..., object]) -> Callable[..., object]: ... - def __new__(cls, name: str, bases: tuple[type[Any], ...], attrs: dict[str, object]) -> type[Any]: ... + def __new__(cls, name: str, bases: tuple[type[Any], ...], attrs: dict[str, Any]) -> type[Any]: ... class PostProcessor(metaclass=PostProcessorMetaClass): PP_NAME: str def __init__(self, downloader: YoutubeDL | None = None) -> None: ... @classmethod def pp_key(cls) -> str: ... - def to_screen(self, text: str, prefix: bool = True, *args: object, **kwargs: object) -> None: ... - def report_warning(self, text: str, *args: object, **kwargs: object) -> None: ... + def to_screen( + self, + text: str, + prefix: bool = True, + *, + message: str, + skip_eol: bool = False, + quiet: bool | None = None, + only_once: bool = False, + ) -> None: ... + def report_warning(self, text: str, only_once: bool = False) -> None: ... def deprecation_warning(self, msg: str) -> None: ... def deprecated_feature(self, msg: str) -> None: ... - def write_debug(self, text: str, *args: object, **kwargs: object) -> None: ... - def get_param(self, name: str, default: object | None = None, *args: object, **kwargs: object) -> object: ... + def write_debug(self, text: str, *, only_once: bool = False) -> None: ... + # *args and **kwargs are passed to .param.get() where param is normally a dict but does not have to be. + def get_param(self, name: str, default: Any = None, *args: Any, **kwargs: Any) -> Any: ... def set_downloader(self, downloader: YoutubeDL) -> None: ... def run(self, information: _InfoDict) -> tuple[list[str], _InfoDict]: ... def try_utime(self, path: StrPath, atime: int, mtime: int, errnote: str = "Cannot update utime of file") -> None: ... diff --git a/stubs/yt-dlp/yt_dlp/socks.pyi b/stubs/yt-dlp/yt_dlp/socks.pyi index 14fe447d41af..c4c908519ac1 100644 --- a/stubs/yt-dlp/yt_dlp/socks.pyi +++ b/stubs/yt-dlp/yt_dlp/socks.pyi @@ -1,7 +1,7 @@ import socket -from _typeshed import ReadableBuffer +from _socket import _Address from collections.abc import Mapping -from typing import NamedTuple +from typing import NamedTuple, SupportsIndex SOCKS4_VERSION: int SOCKS4_REPLY_VERSION: int @@ -57,7 +57,9 @@ class Proxy(NamedTuple): remote_dns: bool class sockssocket(socket.socket): - def __init__(self, *args: object, **kwargs: object) -> None: ... + def __init__( + self, family: int = -1, type: int = -1, proto: int = -1, fileno: SupportsIndex | bytes | None = None + ) -> None: ... def setproxy( self, proxytype: ProxyType, @@ -68,5 +70,5 @@ class sockssocket(socket.socket): password: str | None = None, ) -> None: ... def recvall(self, cnt: int) -> bytes: ... - def connect(self, address: tuple[object, ...] | str | ReadableBuffer) -> None: ... - def connect_ex(self, address: tuple[object, ...] | str | ReadableBuffer) -> int: ... + def connect(self, address: _Address) -> None: ... + def connect_ex(self, address: _Address) -> int: ... diff --git a/stubs/yt-dlp/yt_dlp/utils/_legacy.pyi b/stubs/yt-dlp/yt_dlp/utils/_legacy.pyi index 3bf98c6ed792..b5fd6edf83fb 100644 --- a/stubs/yt-dlp/yt_dlp/utils/_legacy.pyi +++ b/stubs/yt-dlp/yt_dlp/utils/_legacy.pyi @@ -1,14 +1,21 @@ +import ssl import types import urllib.request +from _socket import _Address from _typeshed import Unused from asyncio.events import AbstractEventLoop -from collections.abc import Awaitable, Callable, Collection, Mapping +from collections.abc import AsyncIterable, Awaitable, Callable, Collection, Iterable, Mapping, Sequence from http.client import HTTPResponse from http.cookiejar import CookieJar +from socket import socket from subprocess import Popen -from typing import Any, AnyStr, Generic, TypeVar +from typing import Any, AnyStr, Generic, Literal, TypeVar, overload from typing_extensions import Self +from websockets import ClientConnection, HeadersLike, LoggerLike, Origin, Subprotocol +from websockets.asyncio.client import connect +from websockets.extensions import ClientExtensionFactory + has_certifi: bool has_websockets: bool _T = TypeVar("_T") @@ -16,29 +23,79 @@ _T = TypeVar("_T") class WebSocketsWrapper(Generic[_T]): pool: _T | None loop: AbstractEventLoop - conn: object - def __init__(self, url: str, headers: Mapping[str, str] | None = None, connect: bool = True, **ws_kwargs: object) -> None: ... + conn: connect + def __init__( + self, + url: str, + headers: Mapping[str, str] | None = None, + connect: bool = True, + *, + # Passed to websockets.connect() + origin: Origin | None = None, + extensions: Sequence[ClientExtensionFactory] | None = None, + subprotocols: Sequence[Subprotocol] | None = None, + compression: str | None = "deflate", + additional_headers: HeadersLike | None = None, + user_agent_header: str | None = ..., + proxy: str | Literal[True] | None = True, + process_exception: Callable[[Exception], Exception | None] = ..., + open_timeout: float | None = 10, + ping_interval: float | None = 20, + ping_timeout: float | None = 20, + close_timeout: float | None = 10, + max_size: int | None = 1048576, + max_queue: int | None | tuple[int | None, int | None] = 16, + write_limit: int | tuple[int, int | None] = 32768, + logger: LoggerLike | None = None, + create_connection: type[ClientConnection] | None = None, + # Passed to AbstractEventLoop.connect() by websockets + ssl: bool | None | ssl.SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: socket | None = None, + local_addr: None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, + ) -> None: ... def __enter__(self) -> Self: ... - def send(self, *args: object) -> None: ... - def recv(self, *args: object) -> bytes: ... + def send( + self, message: str | bytes | Iterable[str | bytes] | AsyncIterable[str | bytes], text: bool | None = None + ) -> None: ... + @overload + def recv(self, decode: Literal[True]) -> str: ... + @overload + def recv(self, decode: Literal[False]) -> bytes: ... + @overload + def recv(self, decode: bool | None = None) -> str | bytes: ... def __exit__( self, type: type[BaseException] | None, value: BaseException | None, traceback: types.TracebackType | None ) -> None: ... @staticmethod def run_with_loop(main: Awaitable[_T], loop: AbstractEventLoop) -> _T: ... -def load_plugins(name: str, suffix: str, namespace: dict[str, object]) -> dict[str, type[object]]: ... -def traverse_dict(dictn: Mapping[str, object], keys: Collection[str], casesense: bool = True) -> object: ... +def load_plugins(name: str, suffix: str, namespace: dict[str, Any]) -> dict[str, type[Any]]: ... +def traverse_dict(dictn: Mapping[str, Any], keys: Collection[str], casesense: bool = True) -> Any: ... def decode_base(value: str, digits: str) -> int: ... def platform_name() -> str: ... def get_subprocess_encoding() -> str: ... def register_socks_protocols() -> None: ... -def handle_youtubedl_headers(headers: dict[str, object]) -> dict[str, object]: ... +def handle_youtubedl_headers(headers: dict[str, Any]) -> dict[str, Any]: ... def request_to_url(req: urllib.request.Request | str) -> str: ... -def sanitized_Request(url: str, *args: object, **kwargs: object) -> urllib.request.Request: ... +def sanitized_Request(url: str, *args: Any, **kwargs: Any) -> urllib.request.Request: ... class YoutubeDLHandler(urllib.request.AbstractHTTPHandler): - def __init__(self, params: Mapping[str, object], *args: object, **kwargs: object) -> None: ... + def __init__( + self, + params: Mapping[str, Any], + *, + context: Any = None, + debuglevel: int | None = None, + source_address: _Address | None = None, + ) -> None: ... YoutubeDLHTTPSHandler = YoutubeDLHandler @@ -48,10 +105,14 @@ class YoutubeDLCookieProcessor(urllib.request.HTTPCookieProcessor): https_request: Callable[[urllib.request.HTTPCookieProcessor, urllib.request.Request], HTTPResponse] # type: ignore[assignment] https_response = http_response -def make_HTTPS_handler(params: Mapping[str, object], **kwargs: object) -> YoutubeDLHTTPSHandler: ... -def process_communicate_or_kill(p: Popen[Any], *args: object, **kwargs: object) -> tuple[AnyStr, AnyStr]: ... +def make_HTTPS_handler( + params: Mapping[str, Any], *, debuglevel: int | None = None, source_address: _Address | None = None +) -> YoutubeDLHTTPSHandler: ... +def process_communicate_or_kill( + p: Popen[Any], *args: Any, **kwargs: Any # args/kwargs Passed to Popen.__init__(). +) -> tuple[AnyStr, AnyStr]: ... def encodeFilename(s: str, for_subprocess: Unused = False) -> bytes: ... def decodeFilename(b: bytes, for_subprocess: Unused = False) -> str: ... def decodeArgument(b: _T) -> _T: ... def decodeOption(optval: AnyStr) -> str: ... -def error_to_compat_str(err: object) -> str: ... +def error_to_compat_str(err: Any) -> str: ... # Calls str(err). diff --git a/stubs/yt-dlp/yt_dlp/utils/_utils.pyi b/stubs/yt-dlp/yt_dlp/utils/_utils.pyi index c3e7094d6e31..0c39a631eed3 100644 --- a/stubs/yt-dlp/yt_dlp/utils/_utils.pyi +++ b/stubs/yt-dlp/yt_dlp/utils/_utils.pyi @@ -160,7 +160,7 @@ class ExtractorError(YoutubeDLError): orig_msg: Any traceback: types.TracebackType | None expected: Any - cause: Any + cause: Exception | str | None video_id: str ie: InfoExtractor exc_info: ExcInfo @@ -169,7 +169,7 @@ class ExtractorError(YoutubeDLError): msg: str, tb: types.TracebackType | None = None, expected: bool = False, - cause: Any | None = None, + cause: Exception | str | None = None, video_id: str | None = None, ie: InfoExtractor | None = None, ) -> None: ... @@ -193,7 +193,7 @@ class GeoRestrictedError(ExtractorError): *, tb: types.TracebackType | None = None, expected: bool = False, - cause: Any | None = None, + cause: Exception | str | None = None, video_id: str | None = None, ie: InfoExtractor | None = None, ) -> None: ... @@ -205,7 +205,7 @@ class UserNotLive(ExtractorError): *, tb: types.TracebackType | None = None, expected: bool = False, - cause: Any | None = None, + cause: Exception | str | None = None, video_id: str | None = None, ie: InfoExtractor | None = None, ) -> None: ... @@ -677,8 +677,6 @@ if sys.platform == "win32": else: _ENV: TypeAlias = Mapping[bytes, StrOrBytesPath] | Mapping[str, StrOrBytesPath] -# Much of this is the same as subprocess.Popen, but I do not think copying all of the overloads here is necessary to fix -# the Any types. class Popen(subprocess.Popen[AnyStr]): def __init__( self, @@ -687,12 +685,13 @@ class Popen(subprocess.Popen[AnyStr]): env: _ENV | None = None, text: bool = False, shell: bool = False, - **kwargs: Any, + **kwargs: Any, # Passed to subprocess.Popen.__init__(). ) -> None: ... def communicate_or_kill(self, input: AnyStr | None = None, timeout: float | None = None) -> tuple[AnyStr, AnyStr]: ... def kill(self, *, timeout: int = 0) -> None: ... + # kwargs passed to cls.__init__(). @classmethod - def run(cls, *args: Any, timeout: int | None = None, **kwargs: Any) -> tuple[AnyStr, AnyStr]: ... # Passed to cls.__init__() + def run(cls, *args: Any, timeout: int | None = None, **kwargs: Any) -> tuple[AnyStr, AnyStr]: ... class classproperty: def __new__(cls, func: Callable[..., Any] | None = None, *args: Any, **kwargs: Any) -> Self: ... From 602a8f72ea10d3d64a1cfce48da5cab95ad303ec Mon Sep 17 00:00:00 2001 From: Andrew Udvare Date: Sat, 26 Jul 2025 10:47:06 -0400 Subject: [PATCH 09/13] More progress fixing Any/object types --- stubs/yt-dlp/yt_dlp/cache.pyi | 8 +-- stubs/yt-dlp/yt_dlp/extractor/common.pyi | 68 ++++++++++++++++++------ stubs/yt-dlp/yt_dlp/jsinterp.pyi | 3 +- stubs/yt-dlp/yt_dlp/utils/_legacy.pyi | 15 ++++-- stubs/yt-dlp/yt_dlp/utils/_utils.pyi | 24 ++++----- stubs/yt-dlp/yt_dlp/utils/networking.pyi | 1 + 6 files changed, 83 insertions(+), 36 deletions(-) diff --git a/stubs/yt-dlp/yt_dlp/cache.pyi b/stubs/yt-dlp/yt_dlp/cache.pyi index 7b27c3469634..ca24d93fddcf 100644 --- a/stubs/yt-dlp/yt_dlp/cache.pyi +++ b/stubs/yt-dlp/yt_dlp/cache.pyi @@ -6,14 +6,16 @@ class Cache: def __init__(self, ydl: YoutubeDL) -> None: ... @property def enabled(self) -> bool: ... - def store(self, section: str, key: str, data: Any, dtype: Literal["json"] = "json") -> None: ... + def store( + self, section: str, key: str, data: Any, dtype: Literal["json"] = "json" # data is anything JSON serializable. + ) -> None: ... def load( self, section: str, key: str, dtype: Literal["json"] = "json", - default: Any = None, # returned if not enabled or if the cache entry is not found + default: Any = None, # Returned if not enabled or if the cache entry is not found. *, min_ver: str | None = None, - ) -> Any: ... # Anything JSON serializable + ) -> Any: ... # Anything JSON serializable. def remove(self) -> None: ... diff --git a/stubs/yt-dlp/yt_dlp/extractor/common.pyi b/stubs/yt-dlp/yt_dlp/extractor/common.pyi index 63243d6f0f96..950da0bd6c55 100644 --- a/stubs/yt-dlp/yt_dlp/extractor/common.pyi +++ b/stubs/yt-dlp/yt_dlp/extractor/common.pyi @@ -1,6 +1,7 @@ import re from collections.abc import Callable, Collection, Iterable, Iterator, Mapping, Sequence from functools import cached_property +from json.decoder import JSONDecoder from typing import Any, Literal, TypedDict, TypeVar, type_check_only from typing_extensions import Required, TypeAlias from urllib.request import Request, _DataType @@ -106,14 +107,31 @@ class InfoExtractor: transform_source: Callable[..., str] | None = None, fatal: bool = True, errnote: str | None = None, - **parser_kwargs: Any, + *, + cls: type[JSONDecoder] | None = None, + object_hook: Callable[[dict[Any, Any]], Any] | None = None, + parse_float: Callable[[str], Any] | None = None, + parse_int: Callable[[str], Any] | None = None, + parse_constant: Callable[[str], Any] | None = None, + object_pairs_hook: Callable[[list[tuple[Any, Any]]], Any] | None = None, + ) -> Any: ... + def _parse_socket_response_as_json( + self, + data: str, + video_id: str, + cls: type[JSONDecoder] | None = None, + object_hook: Callable[[dict[Any, Any]], Any] | None = None, + parse_float: Callable[[str], Any] | None = None, + parse_int: Callable[[str], Any] | None = None, + parse_constant: Callable[[str], Any] | None = None, + object_pairs_hook: Callable[[list[tuple[Any, Any]]], Any] | None = None, ) -> Any: ... - def _parse_socket_response_as_json(self, data: str, *args: Any, **kwargs: Any) -> Any: ... def report_warning(self, msg: str, video_id: str | None = None, only_once: bool = False) -> None: ... def to_screen( self, msg: str, message: str, skip_eol: bool = False, quiet: bool | None = None, only_once: bool = False ) -> None: ... - def write_debug(self, msg: str, *args: Any, **kwargs: Any) -> None: ... + def write_debug(self, msg: str, only_once: bool = False) -> None: ... + # *args and **kwargs are passed to .params.get() where params is normally a mapping but is not required to be. def get_param(self, name: str, default: Any = None, *args: Any, **kwargs: Any) -> Any: ... def report_drm(self, video_id: str) -> None: ... def report_extraction(self, id_or_name: str) -> None: ... @@ -138,18 +156,19 @@ class InfoExtractor: video_title: str | None = None, *, url_transparent: bool = False, - **kwargs: Any, + **kwargs: Any, # Added to the dict return value. ) -> dict[str, Any]: ... @classmethod def playlist_from_matches( cls, - matches: Any, + matches: Sequence[str], playlist_id: str | None = None, playlist_title: str | None = None, getter: Callable[..., Any] = ..., ie: InfoExtractor | None = None, video_kwargs: Mapping[str, Any] | None = None, - **kwargs: Any, + multi_video: bool = False, + **kwargs: Any, # Added to the dict return value. ) -> dict[str, Any]: ... @staticmethod def playlist_result( @@ -159,7 +178,7 @@ class InfoExtractor: playlist_description: str | None = ..., *, multi_video: bool = ..., - **kwargs: Any, + **kwargs: Any, # Added to the dict return value. ) -> _InfoDict: ... def http_scheme(self) -> str: ... @classmethod @@ -174,6 +193,7 @@ class InfoExtractor: def is_suitable(cls, age_limit: int) -> bool: ... @classmethod def description(cls, *, markdown: bool = True, search_examples: Sequence[str] | None = None) -> str: ... + # Calls _get_subtitles which only raises NotImplementedError here. def extract_subtitles(self, *args: Any, **kwargs: Any) -> list[dict[str, Any]]: ... def _configuration_arg(self, key: str, default: Any = ..., *, ie_key: str | None = ..., casesense: bool = ...) -> Any: ... # These are dynamically created. @@ -344,7 +364,12 @@ class InfoExtractor: contains_pattern: str | re.Pattern[str] = r"{(?s:.+)}", fatal: bool = True, default: _StrNoDefaultOrNone = ..., - **kwargs: Any, + cls: type[JSONDecoder] | None = None, + object_hook: Callable[[dict[Any, Any]], Any] | None = None, + parse_float: Callable[[str], Any] | None = None, + parse_int: Callable[[str], Any] | None = None, + parse_constant: Callable[[str], Any] | None = None, + object_pairs_hook: Callable[[list[tuple[Any, Any]]], Any] | None = None, ) -> Any: ... def _html_search_regex( self, @@ -365,12 +390,22 @@ class InfoExtractor: def _og_regexes(prop: str) -> list[str]: ... @staticmethod def _meta_regex(prop: str) -> str: ... - def _og_search_property(self, prop: str, html: str, name: str | None = None, **kargs: Any) -> str | None: ... - def _og_search_thumbnail(self, html: str, **kargs: Any) -> str | None: ... - def _og_search_description(self, html: str, **kargs: Any) -> str | None: ... - def _og_search_title(self, html: str, *, fatal: bool = False, **kargs: Any) -> str | None: ... - def _og_search_video_url(self, html: str, name: str = "video url", secure: bool = True, **kargs: Any) -> str | None: ... - def _og_search_url(self, html: str, **kargs: Any) -> str | None: ... + def _og_search_property( + self, prop: str, html: str, name: str | None = None, *, default: type[NO_DEFAULT] | str = ..., fatal: bool = False + ) -> str | None: ... + def _og_search_thumbnail(self, html: str, *, default: type[NO_DEFAULT] | str = ...) -> str | None: ... + def _og_search_description(self, html: str, *, default: type[NO_DEFAULT] | str = ...) -> str | None: ... + def _og_search_title(self, html: str, *, fatal: bool = False, default: type[NO_DEFAULT] | str = ...) -> str | None: ... + def _og_search_video_url( + self, + html: str, + name: str = "video url", + secure: bool = True, + *, + default: type[NO_DEFAULT] | str = ..., + fatal: bool = False, + ) -> str | None: ... + def _og_search_url(self, html: str, *, default: type[NO_DEFAULT] | str = ..., fatal: bool = False) -> str | None: ... def _html_extract_title(self, html: str, name: str = "title", *, fatal: bool = False, **kwargs: Any) -> str | None: ... def _html_search_meta( self, name: str, html: str, display_name: str | None = None, fatal: bool = False, **kwargs: Any @@ -645,9 +680,10 @@ class InfoExtractor: def _apply_first_set_cookie_header(self, url_handle: Response, cookie: str) -> None: ... @property def _RETURN_TYPE(cls) -> str: ... - def _get_subtitles(self, *args: Any, **kwargs: Any) -> list[dict[str, Any]]: ... + def _get_subtitles(self, *args: Any, **kwargs: Any) -> list[dict[str, Any]]: ... # Not implemented here. + # Passes *args and **kwargs to _get_comments. def extract_comments(self, *args: Any, **kwargs: Any) -> list[dict[str, Any]]: ... - def _get_comments(self, *args: Any, **kwargs: Any) -> list[dict[str, Any]]: ... + def _get_comments(self, *args: Any, **kwargs: Any) -> list[dict[str, Any]]: ... # Not implemented here. @staticmethod def _merge_subtitle_items( subtitle_list1: Iterable[Mapping[str, Any]], subtitle_list2: Iterable[Mapping[str, Any]] diff --git a/stubs/yt-dlp/yt_dlp/jsinterp.pyi b/stubs/yt-dlp/yt_dlp/jsinterp.pyi index f0612dbd7b15..580673f074c3 100644 --- a/stubs/yt-dlp/yt_dlp/jsinterp.pyi +++ b/stubs/yt-dlp/yt_dlp/jsinterp.pyi @@ -52,7 +52,7 @@ class JSInterpreter: ie: InfoExtractor | None = None, ) -> None: ... - # After wrapping, *args and **kwargs are added but do nothing for this fmethod. + # After wrapping, *args and **kwargs are added but do nothing for this method. def interpret_statement( self, stmt: str, local_vars: Mapping[str, Any], allow_recursion: int, *args: Any, **kwargs: Any ) -> tuple[Any, bool]: ... @@ -63,6 +63,7 @@ class JSInterpreter: def extract_function_from_code( self, argnames: Collection[str], code: str, *global_stack: Iterable[dict[str, Any]] ) -> _BuildFunctionReturnType: ... + # args are passed to the extracted function. def call_function(self, funcname: str, *args: Any) -> function_with_repr[Any]: ... def build_function( self, argnames: Collection[str], code: str, *global_stack: Iterable[dict[str, Any]] diff --git a/stubs/yt-dlp/yt_dlp/utils/_legacy.pyi b/stubs/yt-dlp/yt_dlp/utils/_legacy.pyi index b5fd6edf83fb..8877b39066b8 100644 --- a/stubs/yt-dlp/yt_dlp/utils/_legacy.pyi +++ b/stubs/yt-dlp/yt_dlp/utils/_legacy.pyi @@ -2,9 +2,9 @@ import ssl import types import urllib.request from _socket import _Address -from _typeshed import Unused +from _typeshed import ReadableBuffer, SupportsRead, Unused from asyncio.events import AbstractEventLoop -from collections.abc import AsyncIterable, Awaitable, Callable, Collection, Iterable, Mapping, Sequence +from collections.abc import AsyncIterable, Awaitable, Callable, Collection, Iterable, Mapping, MutableMapping, Sequence from http.client import HTTPResponse from http.cookiejar import CookieJar from socket import socket @@ -85,7 +85,14 @@ def get_subprocess_encoding() -> str: ... def register_socks_protocols() -> None: ... def handle_youtubedl_headers(headers: dict[str, Any]) -> dict[str, Any]: ... def request_to_url(req: urllib.request.Request | str) -> str: ... -def sanitized_Request(url: str, *args: Any, **kwargs: Any) -> urllib.request.Request: ... +def sanitized_Request( + url: str, + data: ReadableBuffer | SupportsRead[bytes] | Iterable[bytes] | None = None, + headers: MutableMapping[str, str] = {}, + origin_req_host: str | None = None, + unverifiable: bool = False, + method: str | None = None, +) -> urllib.request.Request: ... class YoutubeDLHandler(urllib.request.AbstractHTTPHandler): def __init__( @@ -109,7 +116,7 @@ def make_HTTPS_handler( params: Mapping[str, Any], *, debuglevel: int | None = None, source_address: _Address | None = None ) -> YoutubeDLHTTPSHandler: ... def process_communicate_or_kill( - p: Popen[Any], *args: Any, **kwargs: Any # args/kwargs Passed to Popen.__init__(). + p: Popen[Any], *args: Any, **kwargs: Any # args/kwargs passed to Popen.__init__(). ) -> tuple[AnyStr, AnyStr]: ... def encodeFilename(s: str, for_subprocess: Unused = False) -> bytes: ... def decodeFilename(b: bytes, for_subprocess: Unused = False) -> str: ... diff --git a/stubs/yt-dlp/yt_dlp/utils/_utils.pyi b/stubs/yt-dlp/yt_dlp/utils/_utils.pyi index 0c39a631eed3..c03b254fd3db 100644 --- a/stubs/yt-dlp/yt_dlp/utils/_utils.pyi +++ b/stubs/yt-dlp/yt_dlp/utils/_utils.pyi @@ -53,7 +53,7 @@ NUMBER_RE: str @cache def preferredencoding() -> str: ... -def write_json_file(obj: Any, fn: str) -> None: ... +def write_json_file(obj: Any, fn: str) -> None: ... # obj passed to json.dump(). def partial_application(func: Callable[..., Any]) -> Callable[..., Any]: ... def find_xpath_attr(node: ET.ElementTree, xpath: str, key: str, val: str | None = None) -> ET.Element | None: ... def xpath_with_ns(path: str, ns_map: Mapping[str, str]) -> str: ... @@ -90,9 +90,9 @@ class HTMLBreakOnClosingTagParser(html.parser.HTMLParser): tagstack: deque[Any] def __init__(self) -> None: ... def __enter__(self) -> Self: ... - def __exit__(self, *_: object) -> None: ... + def __exit__(self, *_: Unused) -> None: ... def close(self) -> None: ... - def handle_starttag(self, tag: str, _: Any) -> None: ... + def handle_starttag(self, tag: str, _: Unused) -> None: ... def handle_endtag(self, tag: str) -> None: ... def get_element_text_and_html_by_tag(tag: str, html: str) -> str: ... @@ -302,7 +302,7 @@ class locked_file: ) -> None: ... def __enter__(self) -> Self: ... def unlock(self) -> None: ... - def __exit__(self, *_: object) -> None: ... + def __exit__(self, *_: Unused) -> None: ... open = __enter__ close = __exit__ def __getattr__(self, attr: str) -> Any: ... @@ -530,7 +530,7 @@ LINK_TEMPLATES: Mapping[str, str] def iri_to_uri(iri: str) -> str: ... def to_high_limit_path(path: PathLike[AnyStr]) -> str: ... def format_field( - obj: Any, + obj: Mapping[str, Any] | Sequence[Any], field: str | Collection[str] | None = None, template: str = "%s", ignore: type[NO_DEFAULT] | str | Collection[str] = ..., @@ -544,7 +544,7 @@ def get_executable_path() -> str: ... def get_user_config_dirs(package_name: str) -> Iterator[str]: ... def get_system_config_dirs(package_name: str) -> Iterator[str]: ... def time_seconds(**kwargs: float) -> int: ... -def jwt_encode_hs256(payload_data: Any, key: str, headers: Mapping[str, Any] = ...) -> bytes: ... +def jwt_encode_hs256(payload_data: Any, key: str, headers: Mapping[str, Any] = ...) -> bytes: ... # Passed to json.dumps(). def jwt_decode_hs256(jwt: str) -> Any: ... WINDOWS_VT_MODE: bool | None @@ -562,17 +562,17 @@ def read_stdin(what: str) -> TextIO | Any: ... def determine_file_encoding(data: bytes) -> tuple[str | None, int]: ... class Config: - own_args: Any | None + own_args: Sequence[str] | None parsed_args: tuple[Values, list[str]] | None filename: str | None def __init__(self, parser: _YoutubeDLOptionParser, label: str | None = None) -> None: ... - def init(self, args: Any | None = None, filename: str | None = None) -> bool: ... + def init(self, args: Sequence[str] | None = None, filename: str | None = None) -> bool: ... def load_configs(self) -> bool: ... @staticmethod def read_file(filename: FileDescriptorOrPath, default: list[str] = []) -> list[str]: ... @staticmethod def hide_login_info(opts: Iterable[str]) -> list[str]: ... - def append_config(self, *args: Any, label: str | None = None) -> None: ... + def append_config(self, args: Sequence[str] | None, filename: str | None, label: str | None = None) -> None: ... @property def all_args(self) -> Iterator[str]: ... def parse_known_args(self, *, values: optparse.Values | None = None, strict: bool = True) -> tuple[Values, list[str]]: ... @@ -583,7 +583,7 @@ def cached_method(f: Callable[..., Any]) -> Callable[..., Any]: ... class function_with_repr(Generic[_T]): def __init__(self, func: Callable[..., _T], repr_: str | None = None) -> None: ... - def __call__(self, *args: Any, **kwargs: Any) -> _T: ... + def __call__(self, *args: Any, **kwargs: Any) -> _T: ... # Arbitrary arguments. @classmethod def set_repr(cls, repr_: str) -> Callable[..., Any]: ... @@ -681,7 +681,7 @@ class Popen(subprocess.Popen[AnyStr]): def __init__( self, args: StrOrBytesPath | Sequence[StrOrBytesPath], - *remaining: Any, + *remaining: Any, # Passed to subprocess.Popen.__init__(). env: _ENV | None = None, text: bool = False, shell: bool = False, @@ -694,7 +694,7 @@ class Popen(subprocess.Popen[AnyStr]): def run(cls, *args: Any, timeout: int | None = None, **kwargs: Any) -> tuple[AnyStr, AnyStr]: ... class classproperty: - def __new__(cls, func: Callable[..., Any] | None = None, *args: Any, **kwargs: Any) -> Self: ... + def __new__(cls, func: Callable[..., Any] | None = None, *, cache: bool = False) -> Self: ... def __init__( # pyright: ignore[reportInconsistentConstructor] self, func: Callable[..., Any], *, cache: bool = False ) -> None: ... diff --git a/stubs/yt-dlp/yt_dlp/utils/networking.pyi b/stubs/yt-dlp/yt_dlp/utils/networking.pyi index 9eb0c1081dde..1d3d18310dd3 100644 --- a/stubs/yt-dlp/yt_dlp/utils/networking.pyi +++ b/stubs/yt-dlp/yt_dlp/utils/networking.pyi @@ -9,6 +9,7 @@ def random_user_agent() -> str: ... _T = TypeVar("_T") class HTTPHeaderDict(dict[str, str]): + # *args and **kwargs are passed to dict.__init__(). def __new__(cls, *args: Any, **kwargs: Any) -> Self: ... # *args is passed to filter: filter(None, args) def __init__(self, /, *args: Any, **kwargs: str) -> None: ... From d3a432032056120b22b490a6a296525e8383f385 Mon Sep 17 00:00:00 2001 From: Andrew Udvare Date: Sat, 26 Jul 2025 10:56:20 -0400 Subject: [PATCH 10/13] More progress fixing Any/object types --- stubs/yt-dlp/yt_dlp/extractor/common.pyi | 3 +- stubs/yt-dlp/yt_dlp/utils/_utils.pyi | 38 ++++++++++++++++++++---- 2 files changed, 34 insertions(+), 7 deletions(-) diff --git a/stubs/yt-dlp/yt_dlp/extractor/common.pyi b/stubs/yt-dlp/yt_dlp/extractor/common.pyi index 950da0bd6c55..16936322d8a6 100644 --- a/stubs/yt-dlp/yt_dlp/extractor/common.pyi +++ b/stubs/yt-dlp/yt_dlp/extractor/common.pyi @@ -1,4 +1,5 @@ import re +from _typeshed import Unused from collections.abc import Callable, Collection, Iterable, Iterator, Mapping, Sequence from functools import cached_property from json.decoder import JSONDecoder @@ -673,7 +674,7 @@ class InfoExtractor: secure: bool = False, discard: bool = False, rest: dict[str, Any] = ..., - **kwargs: Any, + **kwargs: Unused, ) -> None: ... def _live_title(self, name: _T) -> _T: ... def _get_cookies(self, url: str) -> LenientSimpleCookie: ... diff --git a/stubs/yt-dlp/yt_dlp/utils/_utils.pyi b/stubs/yt-dlp/yt_dlp/utils/_utils.pyi index c03b254fd3db..63756a1ec300 100644 --- a/stubs/yt-dlp/yt_dlp/utils/_utils.pyi +++ b/stubs/yt-dlp/yt_dlp/utils/_utils.pyi @@ -25,6 +25,7 @@ from os import PathLike from re import Pattern from typing import IO, Any, AnyStr, BinaryIO, Generic, NamedTuple, TextIO, TypeVar, overload from typing_extensions import Self, TypeAlias +from urllib.parse import _QueryType, _QuoteVia from xml.etree import ElementTree as ET from yt_dlp.networking import Response @@ -115,11 +116,16 @@ def clean_html(html: str | None) -> str | None: ... class LenientJSONDecoder(json.JSONDecoder): def __init__( self, - *args: Any, + *, transform_source: Callable[[str], str] | None = None, ignore_extra: bool = False, close_objects: int = 0, - **kwargs: Any, + object_hook: Callable[[dict[str, Any]], Any] | None = None, + parse_float: Callable[[str], Any] | None = None, + parse_int: Callable[[str], Any] | None = None, + parse_constant: Callable[[str], Any] | None = None, + strict: bool = True, + object_pairs_hook: Callable[[list[tuple[str, Any]]], Any] | None = None, ) -> None: ... def decode(self, s: str) -> Any: ... # type: ignore[override] @@ -310,7 +316,9 @@ class locked_file: def get_filesystem_encoding() -> str: ... def shell_quote(args: str | Collection[str], *, shell: bool = False) -> str: ... -def smuggle_url(url: str, data: Any) -> str: ... +def smuggle_url(url: str, data: Any) -> str: ... # data may be anything JSON serializable. + +# default is simply returned if #__youtubedl_smuggle is present. def unsmuggle_url(smug_url: str, default: Any | None = None) -> tuple[str, Any]: ... def format_decimal_suffix(num: float, fmt: str = "%d%s", *, factor: int = 1000) -> str: ... def format_bytes(bytes: int) -> str: ... @@ -402,9 +410,27 @@ _V = TypeVar("_V") def uppercase_escape(s: str) -> str: ... def lowercase_escape(s: str) -> str: ... -def parse_qs(url: str, **kwargs: Any) -> dict[AnyStr, list[AnyStr]]: ... +def parse_qs( + url: str, + *, + keep_blank_values: bool = False, + strict_parsing: bool = False, + encoding: str = "utf-8", + errors: str = "replace", + max_num_fields: int | None = None, + separator: str = "&", +) -> dict[AnyStr, list[AnyStr]]: ... def read_batch_urls(batch_fd: FileDescriptorLike) -> list[str]: ... -def urlencode_postdata(*args: Any, **kargs: Any) -> bytes: ... +def urlencode_postdata( + query: _QueryType, + doseq: bool = False, + safe: str | bytes = "", + encoding: str | None = None, + errors: str | None = None, + quote_via: _QuoteVia = ..., +) -> bytes: ... + +# Passes kwargs to NamedTuple._replace(). def update_url(url: str, *, query_update: Mapping[str, str] | None = None, **kwargs: Any) -> str: ... def update_url_query(url: str, query: Mapping[str, str]) -> str: ... def multipart_encode(data: Mapping[AnyStr, AnyStr], boundary: str | None = None) -> tuple[bytes, str]: ... @@ -545,7 +571,7 @@ def get_user_config_dirs(package_name: str) -> Iterator[str]: ... def get_system_config_dirs(package_name: str) -> Iterator[str]: ... def time_seconds(**kwargs: float) -> int: ... def jwt_encode_hs256(payload_data: Any, key: str, headers: Mapping[str, Any] = ...) -> bytes: ... # Passed to json.dumps(). -def jwt_decode_hs256(jwt: str) -> Any: ... +def jwt_decode_hs256(jwt: str) -> Any: ... # Returns json.loads() output. WINDOWS_VT_MODE: bool | None From 5e16a12da976af1411936d2a053a11561671b2ff Mon Sep 17 00:00:00 2001 From: Andrew Udvare Date: Sat, 26 Jul 2025 14:31:44 -0400 Subject: [PATCH 11/13] More progress fixing Any/object types --- stubs/yt-dlp/yt_dlp/utils/_legacy.pyi | 4 ++-- stubs/yt-dlp/yt_dlp/utils/_utils.pyi | 7 ++++--- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/stubs/yt-dlp/yt_dlp/utils/_legacy.pyi b/stubs/yt-dlp/yt_dlp/utils/_legacy.pyi index 8877b39066b8..838771c834a9 100644 --- a/stubs/yt-dlp/yt_dlp/utils/_legacy.pyi +++ b/stubs/yt-dlp/yt_dlp/utils/_legacy.pyi @@ -98,10 +98,10 @@ class YoutubeDLHandler(urllib.request.AbstractHTTPHandler): def __init__( self, params: Mapping[str, Any], - *, + *args: Any, # args passed to urllib.request.AbstractHTTPHandler.__init__(). context: Any = None, - debuglevel: int | None = None, source_address: _Address | None = None, + debuglevel: int | None = None, ) -> None: ... YoutubeDLHTTPSHandler = YoutubeDLHandler diff --git a/stubs/yt-dlp/yt_dlp/utils/_utils.pyi b/stubs/yt-dlp/yt_dlp/utils/_utils.pyi index 63756a1ec300..f1079f3a9352 100644 --- a/stubs/yt-dlp/yt_dlp/utils/_utils.pyi +++ b/stubs/yt-dlp/yt_dlp/utils/_utils.pyi @@ -116,7 +116,7 @@ def clean_html(html: str | None) -> str | None: ... class LenientJSONDecoder(json.JSONDecoder): def __init__( self, - *, + *args: Unused, transform_source: Callable[[str], str] | None = None, ignore_extra: bool = False, close_objects: int = 0, @@ -591,7 +591,7 @@ class Config: own_args: Sequence[str] | None parsed_args: tuple[Values, list[str]] | None filename: str | None - def __init__(self, parser: _YoutubeDLOptionParser, label: str | None = None) -> None: ... + def __init__(self, parser: _YoutubeDLOptionParser, *, label: str | None = None) -> None: ... def init(self, args: Sequence[str] | None = None, filename: str | None = None) -> bool: ... def load_configs(self) -> bool: ... @staticmethod @@ -720,7 +720,8 @@ class Popen(subprocess.Popen[AnyStr]): def run(cls, *args: Any, timeout: int | None = None, **kwargs: Any) -> tuple[AnyStr, AnyStr]: ... class classproperty: - def __new__(cls, func: Callable[..., Any] | None = None, *, cache: bool = False) -> Self: ... + # args passed to func(). + def __new__(cls, func: Callable[..., Any] | None = None, *args: Any, cache: bool = False) -> Self: ... def __init__( # pyright: ignore[reportInconsistentConstructor] self, func: Callable[..., Any], *, cache: bool = False ) -> None: ... From b31e807aae265426d0b42bd1057e8cd0a29b2453 Mon Sep 17 00:00:00 2001 From: Andrew Udvare Date: Sat, 26 Jul 2025 14:57:04 -0400 Subject: [PATCH 12/13] More progress fixing Any/object types --- stubs/yt-dlp/yt_dlp/extractor/common.pyi | 151 ++++++++++++++++++++--- stubs/yt-dlp/yt_dlp/utils/_utils.pyi | 20 +-- 2 files changed, 144 insertions(+), 27 deletions(-) diff --git a/stubs/yt-dlp/yt_dlp/extractor/common.pyi b/stubs/yt-dlp/yt_dlp/extractor/common.pyi index 16936322d8a6..bcb4dd51fce7 100644 --- a/stubs/yt-dlp/yt_dlp/extractor/common.pyi +++ b/stubs/yt-dlp/yt_dlp/extractor/common.pyi @@ -407,9 +407,24 @@ class InfoExtractor: fatal: bool = False, ) -> str | None: ... def _og_search_url(self, html: str, *, default: type[NO_DEFAULT] | str = ..., fatal: bool = False) -> str | None: ... - def _html_extract_title(self, html: str, name: str = "title", *, fatal: bool = False, **kwargs: Any) -> str | None: ... + def _html_extract_title( + self, + html: str, + name: str = "title", + *, + default: type[NO_DEFAULT] | str = ..., + flags: int = 0, + group: tuple[int, ...] | list[int] | None = None, + fatal: bool = False, + ) -> str | None: ... def _html_search_meta( - self, name: str, html: str, display_name: str | None = None, fatal: bool = False, **kwargs: Any + self, + name: str, + html: str, + display_name: str | None = None, + fatal: bool = False, + flags: int = 0, + group: tuple[int, ...] | list[int] | None = None, ) -> str | None: ... def _dc_search_uploader(self, html: str) -> str | None: ... @staticmethod @@ -429,6 +444,7 @@ class InfoExtractor: fatal: bool = True, default: type[NO_DEFAULT] | bool = ..., ) -> dict[str, Any]: ... + # json_ld parameter is passed to json.loads(). def _json_ld( self, json_ld: Any, video_id: str, fatal: bool = True, expected_type: Iterable[str] | str | None = None ) -> dict[str, Any]: ... @@ -483,7 +499,23 @@ class InfoExtractor: self, m3u8_url: str, ext: str | None = None, preference: Any = None, quality: Any = None, m3u8_id: str | None = None ) -> dict[str, Any]: ... def _report_ignoring_subs(self, name: str) -> None: ... - def _extract_m3u8_formats(self, *args: Any, **kwargs: Any) -> list[dict[str, Any]]: ... + def _extract_m3u8_formats( + self, + m3u8_url: str, + video_id: str, + ext: str | None = None, + entry_protocol: str = "m3u8_native", + preference: Any = None, + quality: Any = None, + m3u8_id: str | None = None, + note: str | None = None, + errnote: str | None = None, + fatal: bool = True, + live: bool = False, + data: Any = None, + headers: Mapping[str, Any] = ..., + query: Mapping[str, Any] = ..., + ) -> list[dict[str, Any]]: ... def _extract_m3u8_formats_and_subtitles( self, m3u8_url: str, @@ -550,7 +582,15 @@ class InfoExtractor: f4m_params: Mapping[str, Any] | None = None, transform_source: Callable[..., str] | None = None, ) -> tuple[list[dict[str, Any]], list[dict[str, Any]]]: ... - def _extract_smil_formats(self, *args: Any, **kwargs: Any) -> list[dict[str, Any]]: ... + def _extract_smil_formats( + self, + smil: ET.Element, + smil_url: str, + video_id: str, + namespace: str | None = None, + f4m_params: Mapping[str, Any] | None = None, + transform_rtmp_url: Callable[[str, str], tuple[str, str]] | None = None, + ) -> list[dict[str, Any]]: ... def _extract_smil_info( self, smil_url: str, video_id: str, fatal: bool = True, f4m_params: Mapping[str, Any] | None = None ) -> dict[str, Any]: ... @@ -561,7 +601,15 @@ class InfoExtractor: self, smil: ET.Element, smil_url: str, video_id: str, f4m_params: Mapping[str, Any] | None = None ) -> dict[str, Any]: ... def _parse_smil_namespace(self, smil: str) -> str | None: ... - def _parse_smil_formats(self, *args: Any, **kwargs: Any) -> list[dict[str, Any]]: ... + def _parse_smil_formats( + self, + smil: ET.Element, + smil_url: str, + video_id: str, + namespace: str | None = None, + f4m_params: Mapping[str, Any] | None = None, + transform_rtmp_url: Callable[[str, str], tuple[str, str]] | None = None, + ) -> list[dict[str, Any]]: ... def _parse_smil_formats_and_subtitles( self, smil: ET.Element, @@ -578,9 +626,29 @@ class InfoExtractor: def _parse_xspf( self, xspf_doc: ET.Element, playlist_id: str, xspf_url: str | None = None, xspf_base_url: str | None = None ) -> list[dict[str, Any]]: ... - def _extract_mpd_formats(self, *args: Any, **kwargs: Any) -> list[dict[str, Any]]: ... + def _extract_mpd_formats( + self, + mpd_url: str, + video_id: str, + mpd_id: str | None = None, + note: str | None = None, + errnote: str | None = None, + fatal: bool = True, + data: Any = None, + headers: Mapping[str, Any] = ..., + query: Mapping[str, Any] = ..., + ) -> list[dict[str, Any]]: ... def _extract_mpd_formats_and_subtitles( - self, *args: Any, **kwargs: Any + self, + mpd_url: str, + video_id: str, + mpd_id: str | None = None, + note: str | None = None, + errnote: str | None = None, + fatal: bool = True, + data: Any = None, + headers: Mapping[str, Any] = ..., + query: Mapping[str, Any] = ..., ) -> tuple[list[dict[str, Any]], list[dict[str, Any]]]: ... def _extract_mpd_periods( self, @@ -595,13 +663,33 @@ class InfoExtractor: query: Mapping[str, Any] = ..., ) -> tuple[list[Any], dict[str, Any]]: ... def _parse_mpd_formats_and_subtitles( - self, *args: Any, **kwargs: Any + self, + mpd_url: str, + video_id: str, + mpd_id: str | None = None, + note: str | None = None, + errnote: str | None = None, + fatal: bool = True, + data: Any = None, + headers: Mapping[str, Any] = ..., + query: Mapping[str, Any] = ..., ) -> tuple[list[dict[str, Any]], list[dict[str, Any]]]: ... def _merge_mpd_periods(self, periods: Iterable[Mapping[str, Any]]) -> tuple[list[Any], dict[str, Any]]: ... def _parse_mpd_periods( self, mpd_doc: ET.Element, mpd_id: str | None = None, mpd_base_url: str = "", mpd_url: str | None = None ) -> tuple[list[Any], dict[str, Any]]: ... - def _extract_ism_formats(self, *args: Any, **kwargs: Any) -> list[dict[str, Any]]: ... + def _extract_ism_formats( + self, + ism_url: str, + video_id: str, + ism_id: str | None = None, + note: str | None = None, + errnote: str | None = None, + fatal: bool = True, + data: Any = None, + headers: Mapping[str, Any] = ..., + query: Mapping[str, Any] = ..., + ) -> list[dict[str, Any]]: ... def _extract_ism_formats_and_subtitles( self, ism_url: str, @@ -629,7 +717,9 @@ class InfoExtractor: quality: Any = None, _headers: Mapping[str, Any] | None = None, ) -> list[dict[str, Any]]: ... - def _extract_akamai_formats(self, *args: Any, **kwargs: Any) -> list[dict[str, Any]]: ... + def _extract_akamai_formats( + self, manifest_url: str, video_id: str, hosts: Mapping[str, Any] = ... + ) -> list[dict[str, Any]]: ... def _extract_akamai_formats_and_subtitles( self, manifest_url: str, video_id: str, hosts: Mapping[str, Any] = ... ) -> tuple[list[dict[str, Any]], list[dict[str, Any]]]: ... @@ -637,10 +727,19 @@ class InfoExtractor: self, url: str, video_id: str, m3u8_entry_protocol: str = "m3u8_native", skip_protocols: Collection[str] = ... ) -> list[dict[str, Any]]: ... def _find_jwplayer_data( - self, webpage: str, video_id: str | None = None, transform_source: Callable[..., Any] = ... + self, webpage: str, video_id: str | None = None, transform_source: Callable[..., str] = ... ) -> Any: ... def _extract_jwplayer_data( - self, webpage: str, video_id: str, *args: Any, transform_source: Callable[..., Any] = ..., **kwargs: Any + self, + webpage: str, + video_id: str, + *args: Any, + transform_source: Callable[..., str] = ..., + require_title: bool = True, + m3u8_id: str | None = None, + mpd_id: str | None = None, + rtmp_params: Mapping[str, Any] | None = None, + base_url: str | None = None, ) -> list[dict[str, Any]]: ... def _parse_jwplayer_data( self, @@ -661,8 +760,21 @@ class InfoExtractor: rtmp_params: Mapping[str, Any] | None = None, base_url: str | None = None, ) -> list[dict[str, Any]]: ... - def _int(self, v: Any, name: str, fatal: bool = False, **kwargs: Any) -> int | None: ... - def _float(self, v: Any, name: str, fatal: bool = False, **kwargs: Any) -> float | None: ... + def _int( + self, + v: Any, + name: str, + fatal: bool = False, + *, + scale: int = 1, + default: int | None = None, + get_attr: str | None = None, + invscale: int = 1, + base: int | None = None, + ) -> int | None: ... + def _float( + self, v: Any, name: str, fatal: bool = False, *, scale: int = 1, invscale: int = 1, default: float | None = None + ) -> float | None: ... def _set_cookie( self, domain: str, @@ -690,11 +802,12 @@ class InfoExtractor: subtitle_list1: Iterable[Mapping[str, Any]], subtitle_list2: Iterable[Mapping[str, Any]] ) -> list[dict[str, Any]]: ... @classmethod - def _merge_subtitles(cls, *dicts: dict[str, Any], target: Any = None) -> Any: ... + def _merge_subtitles(cls, *dicts: dict[str, Any], target: Any = None) -> dict[str, Any]: ... + # Calls _get_automatic_captions which only raises NotImplementedError here. def extract_automatic_captions(self, *args: Any, **kwargs: Any) -> dict[str, Any]: ... @cached_property def _cookies_passed(self) -> bool: ... - def _mark_watched(self, *args: Any, **kwargs: Any) -> Any: ... + def _mark_watched(self, *args: Any, **kwargs: Any) -> Any: ... # Not implemented here. @staticmethod def _generic_id(url: str) -> str: ... def _generic_title(self, url: str = "", webpage: str = "", *, default: str | None = None) -> str | None: ... @@ -709,9 +822,11 @@ class InfoExtractor: def _extract_chapters_from_description( self, description: str | None, duration: str | None ) -> list[dict[str, int]] | None: ... + # Passes *args and **kwargs to _mark_watched which only raises NotImplementedError here. def mark_watched(self, *args: Any, **kwargs: Any) -> None: ... def geo_verification_headers(self) -> dict[str, str]: ... - def RetryManager(self, **kwargs: Any) -> _RetryManager: ... + # kwargs passed to _error_callback. + def RetryManager(self, *, _retries: int | None, _error_callback: Callable[..., Any], **kwargs: Any) -> _RetryManager: ... @classmethod def extract_from_webpage(cls, ydl: YoutubeDL, url: str, webpage: str) -> Iterator[_InfoDict]: ... def _yes_playlist( @@ -725,7 +840,7 @@ class InfoExtractor: ) -> bool: ... def _error_or_warning(self, err: str, _count: int | None = None, _retries: int = 0, *, fatal: bool = True) -> None: ... def _extract_generic_embeds( - self, url: str, *args: Any, info_dict: _InfoDict = ..., note: str = "Extracting generic embeds", **kwargs: Any + self, url: str, *args: Unused, info_dict: _InfoDict = ..., note: str = "Extracting generic embeds", **kwargs: Unused ) -> list[dict[str, Any]]: ... @classmethod def _extract_from_webpage(cls, url: str, webpage: str) -> Iterator[_InfoDict]: ... diff --git a/stubs/yt-dlp/yt_dlp/utils/_utils.pyi b/stubs/yt-dlp/yt_dlp/utils/_utils.pyi index f1079f3a9352..37545191f656 100644 --- a/stubs/yt-dlp/yt_dlp/utils/_utils.pyi +++ b/stubs/yt-dlp/yt_dlp/utils/_utils.pyi @@ -72,16 +72,18 @@ def xpath_attr( fatal: bool = False, default: str | type[NO_DEFAULT] = ..., ) -> str | None: ... -def get_element_by_id(id: str, html: str, **kwargs: Any) -> str | None: ... -def get_element_html_by_id(id: str, html: str, **kwargs: Any) -> str | None: ... +def get_element_by_id(id: str, html: str, *, tag: str, escape_value: bool = True) -> str | None: ... +def get_element_html_by_id(id: str, html: str, *, tag: str, escape_value: bool = True) -> str | None: ... def get_element_by_class(class_name: str, html: str) -> str: ... def get_element_html_by_class(class_name: str, html: str) -> str: ... -def get_element_by_attribute(attribute: str, value: str, html: str, **kwargs: Any) -> str: ... -def get_element_html_by_attribute(attribute: str, value: str, html: str, **kargs: Any) -> list[str]: ... -def get_elements_by_class(class_name: str, html: str, **kargs: Any) -> list[str]: ... +def get_element_by_attribute(attribute: str, value: str, html: str, *, tag: str, escape_value: bool = True) -> str: ... +def get_element_html_by_attribute(attribute: str, value: str, html: str, *, tag: str, escape_value: bool = True) -> list[str]: ... +def get_elements_by_class(class_name: str, html: str, **kargs: Unused) -> list[str]: ... def get_elements_html_by_class(class_name: str, html: str) -> list[str]: ... -def get_elements_by_attribute(*args: Any, **kwargs: Any) -> list[str]: ... -def get_elements_html_by_attribute(*args: Any, **kwargs: Any) -> list[str]: ... +def get_elements_by_attribute(attribute: str, value: str, html: str, *, tag: str, escape_value: bool = True) -> list[str]: ... +def get_elements_html_by_attribute( + attribute: str, value: str, html: str, *, tag: str = "[\\w:.-]+", escape_value: bool = True +) -> list[str]: ... def get_elements_text_and_html_by_attribute( attribute: str, value: str, html: str, *, tag: str = "[\\w:.-]+", escape_value: bool = True ) -> Iterator[str]: ... @@ -591,14 +593,14 @@ class Config: own_args: Sequence[str] | None parsed_args: tuple[Values, list[str]] | None filename: str | None - def __init__(self, parser: _YoutubeDLOptionParser, *, label: str | None = None) -> None: ... + def __init__(self, parser: _YoutubeDLOptionParser, label: str | None = None) -> None: ... def init(self, args: Sequence[str] | None = None, filename: str | None = None) -> bool: ... def load_configs(self) -> bool: ... @staticmethod def read_file(filename: FileDescriptorOrPath, default: list[str] = []) -> list[str]: ... @staticmethod def hide_login_info(opts: Iterable[str]) -> list[str]: ... - def append_config(self, args: Sequence[str] | None, filename: str | None, label: str | None = None) -> None: ... + def append_config(self, args: Sequence[str] | None, filename: str | None, *, label: str | None = None) -> None: ... @property def all_args(self) -> Iterator[str]: ... def parse_known_args(self, *, values: optparse.Values | None = None, strict: bool = True) -> tuple[Values, list[str]]: ... From 44a9721e2ff7d46ccdaf9bcb795acbea7980db9b Mon Sep 17 00:00:00 2001 From: Andrew Udvare Date: Sat, 26 Jul 2025 15:13:55 -0400 Subject: [PATCH 13/13] Add new items since last version --- stubs/yt-dlp/@tests/stubtest_allowlist.txt | 2 ++ stubs/yt-dlp/METADATA.toml | 2 +- stubs/yt-dlp/yt_dlp/jsinterp.pyi | 2 ++ stubs/yt-dlp/yt_dlp/utils/jslib/__init__.pyi | 0 stubs/yt-dlp/yt_dlp/utils/jslib/devalue.pyi | 9 +++++++++ 5 files changed, 14 insertions(+), 1 deletion(-) create mode 100644 stubs/yt-dlp/yt_dlp/utils/jslib/__init__.pyi create mode 100644 stubs/yt-dlp/yt_dlp/utils/jslib/devalue.pyi diff --git a/stubs/yt-dlp/@tests/stubtest_allowlist.txt b/stubs/yt-dlp/@tests/stubtest_allowlist.txt index c4222fabcefd..eec37a338479 100644 --- a/stubs/yt-dlp/@tests/stubtest_allowlist.txt +++ b/stubs/yt-dlp/@tests/stubtest_allowlist.txt @@ -24,3 +24,5 @@ yt_dlp.utils.(_utils.)?is_iterable_like # Generated with functools.partial. yt_dlp.utils.(_utils.)?prepend_extension yt_dlp.utils.(_utils.)?replace_extension +# Unsure why this is here. +yt_dlp.utils.jslib.devalue.TYPE_CHECKING diff --git a/stubs/yt-dlp/METADATA.toml b/stubs/yt-dlp/METADATA.toml index 263a836c6994..c382568e945e 100644 --- a/stubs/yt-dlp/METADATA.toml +++ b/stubs/yt-dlp/METADATA.toml @@ -1,3 +1,3 @@ -version = "2025.05.*" +version = "2025.07.21" upstream_repository = "https://github.com/yt-dlp/yt-dlp" requires = ["websockets"] diff --git a/stubs/yt-dlp/yt_dlp/jsinterp.pyi b/stubs/yt-dlp/yt_dlp/jsinterp.pyi index 580673f074c3..4fbd25299b5c 100644 --- a/stubs/yt-dlp/yt_dlp/jsinterp.pyi +++ b/stubs/yt-dlp/yt_dlp/jsinterp.pyi @@ -26,6 +26,8 @@ class JS_Throw(ExtractorError): class LocalNameSpace(collections.ChainMap[str, Any]): def __setitem__(self, key: str, value: Any) -> None: ... def __delitem__(self, key: str) -> NoReturn: ... + def set_local(self, key: str, value: Any) -> None: ... + def get_local(self, key: str) -> Any: ... class Debugger: ENABLED: bool diff --git a/stubs/yt-dlp/yt_dlp/utils/jslib/__init__.pyi b/stubs/yt-dlp/yt_dlp/utils/jslib/__init__.pyi new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/stubs/yt-dlp/yt_dlp/utils/jslib/devalue.pyi b/stubs/yt-dlp/yt_dlp/utils/jslib/devalue.pyi new file mode 100644 index 000000000000..5ca1e8a6d36d --- /dev/null +++ b/stubs/yt-dlp/yt_dlp/utils/jslib/devalue.pyi @@ -0,0 +1,9 @@ +from collections.abc import Callable, Generator +from typing import Any + +def parse_iter( + parsed: Any, /, *, revivers: dict[str, Callable[[list[Any]], Any]] | None = None # parsed: Any from the signature. +) -> Generator[TypeError | IndexError | ValueError, Any, float | None]: ... +def parse( + parsed: Any, /, *, revivers: dict[str, Callable[[Any], Any]] | None = None # parsed: Any from the signature. +) -> Any: ... # returns StopIteration.value if it is raised.