diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/VERSIONS b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/VERSIONS index a8526aa..2b2a66d 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/VERSIONS +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/VERSIONS @@ -34,11 +34,14 @@ _dummy_thread: 3.0-3.8 _dummy_threading: 3.0-3.8 _heapq: 3.0- _imp: 3.0- +_interpchannels: 3.13- +_interpqueues: 3.13- +_interpreters: 3.13- _json: 3.0- _locale: 3.0- _lsprof: 3.0- _markupbase: 3.0- -_msi: 3.0- +_msi: 3.0-3.12 _operator: 3.4- _osx_support: 3.0- _posixsubprocess: 3.2- @@ -47,6 +50,8 @@ _pydecimal: 3.5- _random: 3.0- _sitebuiltins: 3.4- _socket: 3.0- # present in 3.0 at runtime, but not in typeshed +_sqlite3: 3.0- +_ssl: 3.0- _stat: 3.4- _thread: 3.0- _threading_local: 3.0- @@ -65,9 +70,9 @@ array: 3.0- ast: 3.0- asynchat: 3.0-3.11 asyncio: 3.4- -asyncio.mixins: 3.10- asyncio.exceptions: 3.8- asyncio.format_helpers: 3.7- +asyncio.mixins: 3.10- asyncio.runners: 3.7- asyncio.staggered: 3.8- asyncio.taskgroups: 3.11- @@ -111,6 +116,7 @@ curses: 3.0- dataclasses: 3.7- datetime: 3.0- dbm: 3.0- +dbm.sqlite3: 3.13- decimal: 3.0- difflib: 3.0- dis: 3.0- @@ -154,8 +160,11 @@ importlib: 3.0- importlib._abc: 3.10- importlib.metadata: 3.8- importlib.metadata._meta: 3.10- +importlib.metadata.diagnose: 3.13- importlib.readers: 3.10- importlib.resources: 3.7- +importlib.resources._common: 3.11- +importlib.resources._functional: 3.13- importlib.resources.abc: 3.11- importlib.resources.readers: 3.11- importlib.resources.simple: 3.11- @@ -270,6 +279,7 @@ threading: 3.0- time: 3.0- timeit: 3.0- tkinter: 3.0- +tkinter.tix: 3.0-3.12 token: 3.0- tokenize: 3.0- tomllib: 3.11- diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_ast.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_ast.pyi index e1c23cc..8dc1bcb 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_ast.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_ast.pyi @@ -1,1473 +1,149 @@ import sys -import typing_extensions -from typing import Any, ClassVar, Generic, Literal, TypedDict, overload -from typing_extensions import Unpack +from ast import ( + AST as AST, + Add as Add, + And as And, + AnnAssign as AnnAssign, + Assert as Assert, + Assign as Assign, + AsyncFor as AsyncFor, + AsyncFunctionDef as AsyncFunctionDef, + AsyncWith as AsyncWith, + Attribute as Attribute, + AugAssign as AugAssign, + Await as Await, + BinOp as BinOp, + BitAnd as BitAnd, + BitOr as BitOr, + BitXor as BitXor, + BoolOp as BoolOp, + Break as Break, + Call as Call, + ClassDef as ClassDef, + Compare as Compare, + Constant as Constant, + Continue as Continue, + Del as Del, + Delete as Delete, + Dict as Dict, + DictComp as DictComp, + Div as Div, + Eq as Eq, + ExceptHandler as ExceptHandler, + Expr as Expr, + Expression as Expression, + FloorDiv as FloorDiv, + For as For, + FormattedValue as FormattedValue, + FunctionDef as FunctionDef, + FunctionType as FunctionType, + GeneratorExp as GeneratorExp, + Global as Global, + Gt as Gt, + GtE as GtE, + If as If, + IfExp as IfExp, + Import as Import, + ImportFrom as ImportFrom, + In as In, + Interactive as Interactive, + Invert as Invert, + Is as Is, + IsNot as IsNot, + JoinedStr as JoinedStr, + Lambda as Lambda, + List as List, + ListComp as ListComp, + Load as Load, + LShift as LShift, + Lt as Lt, + LtE as LtE, + MatMult as MatMult, + Mod as Mod, + Module as Module, + Mult as Mult, + Name as Name, + NamedExpr as NamedExpr, + Nonlocal as Nonlocal, + Not as Not, + NotEq as NotEq, + NotIn as NotIn, + Or as Or, + Pass as Pass, + Pow as Pow, + Raise as Raise, + Return as Return, + RShift as RShift, + Set as Set, + SetComp as SetComp, + Slice as Slice, + Starred as Starred, + Store as Store, + Sub as Sub, + Subscript as Subscript, + Try as Try, + Tuple as Tuple, + TypeIgnore as TypeIgnore, + UAdd as UAdd, + UnaryOp as UnaryOp, + USub as USub, + While as While, + With as With, + Yield as Yield, + YieldFrom as YieldFrom, + alias as alias, + arg as arg, + arguments as arguments, + boolop as boolop, + cmpop as cmpop, + comprehension as comprehension, + excepthandler as excepthandler, + expr as expr, + expr_context as expr_context, + keyword as keyword, + mod as mod, + operator as operator, + stmt as stmt, + type_ignore as type_ignore, + unaryop as unaryop, + withitem as withitem, +) +from typing import Literal -PyCF_ONLY_AST: Literal[1024] -PyCF_TYPE_COMMENTS: Literal[4096] -PyCF_ALLOW_TOP_LEVEL_AWAIT: Literal[8192] - -if sys.version_info >= (3, 13): - PyCF_OPTIMIZED_AST: Literal[33792] - -# Used for node end positions in constructor keyword arguments -_EndPositionT = typing_extensions.TypeVar("_EndPositionT", int, int | None, default=int | None) # noqa: Y023 - -# Alias used for fields that must always be valid identifiers -# A string `x` counts as a valid identifier if both the following are True -# (1) `x.isidentifier()` evaluates to `True` -# (2) `keyword.iskeyword(x)` evaluates to `False` -_Identifier: typing_extensions.TypeAlias = str - -# Corresponds to the names in the `_attributes` class variable which is non-empty in certain AST nodes -class _Attributes(TypedDict, Generic[_EndPositionT], total=False): - lineno: int - col_offset: int - end_lineno: _EndPositionT - end_col_offset: _EndPositionT - -class AST: - if sys.version_info >= (3, 10): - __match_args__ = () - _attributes: ClassVar[tuple[str, ...]] - _fields: ClassVar[tuple[str, ...]] - if sys.version_info >= (3, 13): - _field_types: ClassVar[dict[str, Any]] - -class mod(AST): - """ - mod = Module(stmt* body, type_ignore* type_ignores) - | Interactive(stmt* body) - | Expression(expr body) - | FunctionType(expr* argtypes, expr returns) - """ - ... -class type_ignore(AST): - """type_ignore = TypeIgnore(int lineno, string tag)""" - ... - -class TypeIgnore(type_ignore): - """TypeIgnore(int lineno, string tag)""" - if sys.version_info >= (3, 10): - __match_args__ = ("lineno", "tag") - lineno: int - tag: str - def __init__(self, lineno: int, tag: str) -> None: ... - -class FunctionType(mod): - """FunctionType(expr* argtypes, expr returns)""" - if sys.version_info >= (3, 10): - __match_args__ = ("argtypes", "returns") - argtypes: list[expr] - returns: expr - if sys.version_info >= (3, 13): - @overload - def __init__(self, argtypes: list[expr], returns: expr) -> None: ... - @overload - def __init__(self, argtypes: list[expr] = ..., *, returns: expr) -> None: ... - else: - def __init__(self, argtypes: list[expr], returns: expr) -> None: ... - -class Module(mod): - """Module(stmt* body, type_ignore* type_ignores)""" - if sys.version_info >= (3, 10): - __match_args__ = ("body", "type_ignores") - body: list[stmt] - type_ignores: list[TypeIgnore] - if sys.version_info >= (3, 13): - def __init__(self, body: list[stmt] = ..., type_ignores: list[TypeIgnore] = ...) -> None: ... - else: - def __init__(self, body: list[stmt], type_ignores: list[TypeIgnore]) -> None: ... - -class Interactive(mod): - """Interactive(stmt* body)""" - if sys.version_info >= (3, 10): - __match_args__ = ("body",) - body: list[stmt] - if sys.version_info >= (3, 13): - def __init__(self, body: list[stmt] = ...) -> None: ... - else: - def __init__(self, body: list[stmt]) -> None: ... - -class Expression(mod): - """Expression(expr body)""" - if sys.version_info >= (3, 10): - __match_args__ = ("body",) - body: expr - def __init__(self, body: expr) -> None: ... - -class stmt(AST): - """ - stmt = FunctionDef(identifier name, arguments args, stmt* body, expr* decorator_list, expr? returns, string? type_comment, type_param* type_params) - | AsyncFunctionDef(identifier name, arguments args, stmt* body, expr* decorator_list, expr? returns, string? type_comment, type_param* type_params) - | ClassDef(identifier name, expr* bases, keyword* keywords, stmt* body, expr* decorator_list, type_param* type_params) - | Return(expr? value) - | Delete(expr* targets) - | Assign(expr* targets, expr value, string? type_comment) - | TypeAlias(expr name, type_param* type_params, expr value) - | AugAssign(expr target, operator op, expr value) - | AnnAssign(expr target, expr annotation, expr? value, int simple) - | For(expr target, expr iter, stmt* body, stmt* orelse, string? type_comment) - | AsyncFor(expr target, expr iter, stmt* body, stmt* orelse, string? type_comment) - | While(expr test, stmt* body, stmt* orelse) - | If(expr test, stmt* body, stmt* orelse) - | With(withitem* items, stmt* body, string? type_comment) - | AsyncWith(withitem* items, stmt* body, string? type_comment) - | Match(expr subject, match_case* cases) - | Raise(expr? exc, expr? cause) - | Try(stmt* body, excepthandler* handlers, stmt* orelse, stmt* finalbody) - | TryStar(stmt* body, excepthandler* handlers, stmt* orelse, stmt* finalbody) - | Assert(expr test, expr? msg) - | Import(alias* names) - | ImportFrom(identifier? module, alias* names, int? level) - | Global(identifier* names) - | Nonlocal(identifier* names) - | Expr(expr value) - | Pass - | Break - | Continue - """ - lineno: int - col_offset: int - end_lineno: int | None - end_col_offset: int | None - def __init__(self, **kwargs: Unpack[_Attributes]) -> None: ... - -class FunctionDef(stmt): - """FunctionDef(identifier name, arguments args, stmt* body, expr* decorator_list, expr? returns, string? type_comment, type_param* type_params)""" - if sys.version_info >= (3, 12): - __match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment", "type_params") - elif sys.version_info >= (3, 10): - __match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment") - name: _Identifier - args: arguments - body: list[stmt] - decorator_list: list[expr] - returns: expr | None - type_comment: str | None - if sys.version_info >= (3, 12): - type_params: list[type_param] - if sys.version_info >= (3, 13): - def __init__( - self, - name: _Identifier, - args: arguments, - body: list[stmt] = ..., - decorator_list: list[expr] = ..., - returns: expr | None = None, - type_comment: str | None = None, - type_params: list[type_param] = ..., - **kwargs: Unpack[_Attributes], - ) -> None: ... - elif sys.version_info >= (3, 12): - @overload - def __init__( - self, - name: _Identifier, - args: arguments, - body: list[stmt], - decorator_list: list[expr], - returns: expr | None, - type_comment: str | None, - type_params: list[type_param], - **kwargs: Unpack[_Attributes], - ) -> None: ... - @overload - def __init__( - self, - name: _Identifier, - args: arguments, - body: list[stmt], - decorator_list: list[expr], - returns: expr | None = None, - type_comment: str | None = None, - *, - type_params: list[type_param], - **kwargs: Unpack[_Attributes], - ) -> None: ... - else: - def __init__( - self, - name: _Identifier, - args: arguments, - body: list[stmt], - decorator_list: list[expr], - returns: expr | None = None, - type_comment: str | None = None, - **kwargs: Unpack[_Attributes], - ) -> None: ... - -class AsyncFunctionDef(stmt): - """AsyncFunctionDef(identifier name, arguments args, stmt* body, expr* decorator_list, expr? returns, string? type_comment, type_param* type_params)""" - if sys.version_info >= (3, 12): - __match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment", "type_params") - elif sys.version_info >= (3, 10): - __match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment") - name: _Identifier - args: arguments - body: list[stmt] - decorator_list: list[expr] - returns: expr | None - type_comment: str | None - if sys.version_info >= (3, 12): - type_params: list[type_param] - if sys.version_info >= (3, 13): - def __init__( - self, - name: _Identifier, - args: arguments, - body: list[stmt] = ..., - decorator_list: list[expr] = ..., - returns: expr | None = None, - type_comment: str | None = None, - type_params: list[type_param] = ..., - **kwargs: Unpack[_Attributes], - ) -> None: ... - elif sys.version_info >= (3, 12): - @overload - def __init__( - self, - name: _Identifier, - args: arguments, - body: list[stmt], - decorator_list: list[expr], - returns: expr | None, - type_comment: str | None, - type_params: list[type_param], - **kwargs: Unpack[_Attributes], - ) -> None: ... - @overload - def __init__( - self, - name: _Identifier, - args: arguments, - body: list[stmt], - decorator_list: list[expr], - returns: expr | None = None, - type_comment: str | None = None, - *, - type_params: list[type_param], - **kwargs: Unpack[_Attributes], - ) -> None: ... - else: - def __init__( - self, - name: _Identifier, - args: arguments, - body: list[stmt], - decorator_list: list[expr], - returns: expr | None = None, - type_comment: str | None = None, - **kwargs: Unpack[_Attributes], - ) -> None: ... - -class ClassDef(stmt): - """ClassDef(identifier name, expr* bases, keyword* keywords, stmt* body, expr* decorator_list, type_param* type_params)""" - if sys.version_info >= (3, 12): - __match_args__ = ("name", "bases", "keywords", "body", "decorator_list", "type_params") - elif sys.version_info >= (3, 10): - __match_args__ = ("name", "bases", "keywords", "body", "decorator_list") - name: _Identifier - bases: list[expr] - keywords: list[keyword] - body: list[stmt] - decorator_list: list[expr] - if sys.version_info >= (3, 12): - type_params: list[type_param] - if sys.version_info >= (3, 13): - def __init__( - self, - name: _Identifier, - bases: list[expr] = ..., - keywords: list[keyword] = ..., - body: list[stmt] = ..., - decorator_list: list[expr] = ..., - type_params: list[type_param] = ..., - **kwargs: Unpack[_Attributes], - ) -> None: ... - elif sys.version_info >= (3, 12): - def __init__( - self, - name: _Identifier, - bases: list[expr], - keywords: list[keyword], - body: list[stmt], - decorator_list: list[expr], - type_params: list[type_param], - **kwargs: Unpack[_Attributes], - ) -> None: ... - else: - def __init__( - self, - name: _Identifier, - bases: list[expr], - keywords: list[keyword], - body: list[stmt], - decorator_list: list[expr], - **kwargs: Unpack[_Attributes], - ) -> None: ... - -class Return(stmt): - """Return(expr? value)""" - if sys.version_info >= (3, 10): - __match_args__ = ("value",) - value: expr | None - def __init__(self, value: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ... - -class Delete(stmt): - """Delete(expr* targets)""" - if sys.version_info >= (3, 10): - __match_args__ = ("targets",) - targets: list[expr] - if sys.version_info >= (3, 13): - def __init__(self, targets: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> None: ... - else: - def __init__(self, targets: list[expr], **kwargs: Unpack[_Attributes]) -> None: ... - -class Assign(stmt): - """Assign(expr* targets, expr value, string? type_comment)""" - if sys.version_info >= (3, 10): - __match_args__ = ("targets", "value", "type_comment") - targets: list[expr] - value: expr - type_comment: str | None - if sys.version_info >= (3, 13): - @overload - def __init__( - self, targets: list[expr], value: expr, type_comment: str | None = None, **kwargs: Unpack[_Attributes] - ) -> None: ... - @overload - def __init__( - self, targets: list[expr] = ..., *, value: expr, type_comment: str | None = None, **kwargs: Unpack[_Attributes] - ) -> None: ... - else: - def __init__( - self, targets: list[expr], value: expr, type_comment: str | None = None, **kwargs: Unpack[_Attributes] - ) -> None: ... - -class AugAssign(stmt): - """AugAssign(expr target, operator op, expr value)""" - if sys.version_info >= (3, 10): - __match_args__ = ("target", "op", "value") - target: Name | Attribute | Subscript - op: operator - value: expr - def __init__( - self, target: Name | Attribute | Subscript, op: operator, value: expr, **kwargs: Unpack[_Attributes] - ) -> None: ... - -class AnnAssign(stmt): - """AnnAssign(expr target, expr annotation, expr? value, int simple)""" - if sys.version_info >= (3, 10): - __match_args__ = ("target", "annotation", "value", "simple") - target: Name | Attribute | Subscript - annotation: expr - value: expr | None - simple: int - @overload - def __init__( - self, - target: Name | Attribute | Subscript, - annotation: expr, - value: expr | None, - simple: int, - **kwargs: Unpack[_Attributes], - ) -> None: ... - @overload - def __init__( - self, - target: Name | Attribute | Subscript, - annotation: expr, - value: expr | None = None, - *, - simple: int, - **kwargs: Unpack[_Attributes], - ) -> None: ... - -class For(stmt): - """For(expr target, expr iter, stmt* body, stmt* orelse, string? type_comment)""" - if sys.version_info >= (3, 10): - __match_args__ = ("target", "iter", "body", "orelse", "type_comment") - target: expr - iter: expr - body: list[stmt] - orelse: list[stmt] - type_comment: str | None - if sys.version_info >= (3, 13): - def __init__( - self, - target: expr, - iter: expr, - body: list[stmt] = ..., - orelse: list[stmt] = ..., - type_comment: str | None = None, - **kwargs: Unpack[_Attributes], - ) -> None: ... - else: - def __init__( - self, - target: expr, - iter: expr, - body: list[stmt], - orelse: list[stmt], - type_comment: str | None = None, - **kwargs: Unpack[_Attributes], - ) -> None: ... - -class AsyncFor(stmt): - """AsyncFor(expr target, expr iter, stmt* body, stmt* orelse, string? type_comment)""" - if sys.version_info >= (3, 10): - __match_args__ = ("target", "iter", "body", "orelse", "type_comment") - target: expr - iter: expr - body: list[stmt] - orelse: list[stmt] - type_comment: str | None - if sys.version_info >= (3, 13): - def __init__( - self, - target: expr, - iter: expr, - body: list[stmt] = ..., - orelse: list[stmt] = ..., - type_comment: str | None = None, - **kwargs: Unpack[_Attributes], - ) -> None: ... - else: - def __init__( - self, - target: expr, - iter: expr, - body: list[stmt], - orelse: list[stmt], - type_comment: str | None = None, - **kwargs: Unpack[_Attributes], - ) -> None: ... - -class While(stmt): - """While(expr test, stmt* body, stmt* orelse)""" - if sys.version_info >= (3, 10): - __match_args__ = ("test", "body", "orelse") - test: expr - body: list[stmt] - orelse: list[stmt] - if sys.version_info >= (3, 13): - def __init__( - self, test: expr, body: list[stmt] = ..., orelse: list[stmt] = ..., **kwargs: Unpack[_Attributes] - ) -> None: ... - else: - def __init__(self, test: expr, body: list[stmt], orelse: list[stmt], **kwargs: Unpack[_Attributes]) -> None: ... - -class If(stmt): - """If(expr test, stmt* body, stmt* orelse)""" - if sys.version_info >= (3, 10): - __match_args__ = ("test", "body", "orelse") - test: expr - body: list[stmt] - orelse: list[stmt] - if sys.version_info >= (3, 13): - def __init__( - self, test: expr, body: list[stmt] = ..., orelse: list[stmt] = ..., **kwargs: Unpack[_Attributes] - ) -> None: ... - else: - def __init__(self, test: expr, body: list[stmt], orelse: list[stmt], **kwargs: Unpack[_Attributes]) -> None: ... - -class With(stmt): - """With(withitem* items, stmt* body, string? type_comment)""" - if sys.version_info >= (3, 10): - __match_args__ = ("items", "body", "type_comment") - items: list[withitem] - body: list[stmt] - type_comment: str | None - if sys.version_info >= (3, 13): - def __init__( - self, - items: list[withitem] = ..., - body: list[stmt] = ..., - type_comment: str | None = None, - **kwargs: Unpack[_Attributes], - ) -> None: ... - else: - def __init__( - self, items: list[withitem], body: list[stmt], type_comment: str | None = None, **kwargs: Unpack[_Attributes] - ) -> None: ... - -class AsyncWith(stmt): - """AsyncWith(withitem* items, stmt* body, string? type_comment)""" - if sys.version_info >= (3, 10): - __match_args__ = ("items", "body", "type_comment") - items: list[withitem] - body: list[stmt] - type_comment: str | None - if sys.version_info >= (3, 13): - def __init__( - self, - items: list[withitem] = ..., - body: list[stmt] = ..., - type_comment: str | None = None, - **kwargs: Unpack[_Attributes], - ) -> None: ... - else: - def __init__( - self, items: list[withitem], body: list[stmt], type_comment: str | None = None, **kwargs: Unpack[_Attributes] - ) -> None: ... - -class Raise(stmt): - """Raise(expr? exc, expr? cause)""" - if sys.version_info >= (3, 10): - __match_args__ = ("exc", "cause") - exc: expr | None - cause: expr | None - def __init__(self, exc: expr | None = None, cause: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ... - -class Try(stmt): - """Try(stmt* body, excepthandler* handlers, stmt* orelse, stmt* finalbody)""" - if sys.version_info >= (3, 10): - __match_args__ = ("body", "handlers", "orelse", "finalbody") - body: list[stmt] - handlers: list[ExceptHandler] - orelse: list[stmt] - finalbody: list[stmt] - if sys.version_info >= (3, 13): - def __init__( - self, - body: list[stmt] = ..., - handlers: list[ExceptHandler] = ..., - orelse: list[stmt] = ..., - finalbody: list[stmt] = ..., - **kwargs: Unpack[_Attributes], - ) -> None: ... - else: - def __init__( - self, - body: list[stmt], - handlers: list[ExceptHandler], - orelse: list[stmt], - finalbody: list[stmt], - **kwargs: Unpack[_Attributes], - ) -> None: ... +if sys.version_info >= (3, 12): + from ast import ParamSpec as ParamSpec, TypeVar as TypeVar, TypeVarTuple as TypeVarTuple, type_param as type_param if sys.version_info >= (3, 11): - class TryStar(stmt): - """TryStar(stmt* body, excepthandler* handlers, stmt* orelse, stmt* finalbody)""" - __match_args__ = ("body", "handlers", "orelse", "finalbody") - body: list[stmt] - handlers: list[ExceptHandler] - orelse: list[stmt] - finalbody: list[stmt] - if sys.version_info >= (3, 13): - def __init__( - self, - body: list[stmt] = ..., - handlers: list[ExceptHandler] = ..., - orelse: list[stmt] = ..., - finalbody: list[stmt] = ..., - **kwargs: Unpack[_Attributes], - ) -> None: ... - else: - def __init__( - self, - body: list[stmt], - handlers: list[ExceptHandler], - orelse: list[stmt], - finalbody: list[stmt], - **kwargs: Unpack[_Attributes], - ) -> None: ... - -class Assert(stmt): - """Assert(expr test, expr? msg)""" - if sys.version_info >= (3, 10): - __match_args__ = ("test", "msg") - test: expr - msg: expr | None - def __init__(self, test: expr, msg: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ... - -class Import(stmt): - """Import(alias* names)""" - if sys.version_info >= (3, 10): - __match_args__ = ("names",) - names: list[alias] - if sys.version_info >= (3, 13): - def __init__(self, names: list[alias] = ..., **kwargs: Unpack[_Attributes]) -> None: ... - else: - def __init__(self, names: list[alias], **kwargs: Unpack[_Attributes]) -> None: ... - -class ImportFrom(stmt): - """ImportFrom(identifier? module, alias* names, int? level)""" - if sys.version_info >= (3, 10): - __match_args__ = ("module", "names", "level") - module: str | None - names: list[alias] - level: int - if sys.version_info >= (3, 13): - @overload - def __init__(self, module: str | None, names: list[alias], level: int, **kwargs: Unpack[_Attributes]) -> None: ... - @overload - def __init__( - self, module: str | None = None, names: list[alias] = ..., *, level: int, **kwargs: Unpack[_Attributes] - ) -> None: ... - else: - @overload - def __init__(self, module: str | None, names: list[alias], level: int, **kwargs: Unpack[_Attributes]) -> None: ... - @overload - def __init__( - self, module: str | None = None, *, names: list[alias], level: int, **kwargs: Unpack[_Attributes] - ) -> None: ... - -class Global(stmt): - """Global(identifier* names)""" - if sys.version_info >= (3, 10): - __match_args__ = ("names",) - names: list[_Identifier] - if sys.version_info >= (3, 13): - def __init__(self, names: list[_Identifier] = ..., **kwargs: Unpack[_Attributes]) -> None: ... - else: - def __init__(self, names: list[_Identifier], **kwargs: Unpack[_Attributes]) -> None: ... - -class Nonlocal(stmt): - """Nonlocal(identifier* names)""" - if sys.version_info >= (3, 10): - __match_args__ = ("names",) - names: list[_Identifier] - if sys.version_info >= (3, 13): - def __init__(self, names: list[_Identifier] = ..., **kwargs: Unpack[_Attributes]) -> None: ... - else: - def __init__(self, names: list[_Identifier], **kwargs: Unpack[_Attributes]) -> None: ... - -class Expr(stmt): - """Expr(expr value)""" - if sys.version_info >= (3, 10): - __match_args__ = ("value",) - value: expr - def __init__(self, value: expr, **kwargs: Unpack[_Attributes]) -> None: ... - -class Pass(stmt): - """Pass""" - ... -class Break(stmt): - """Break""" - ... -class Continue(stmt): - """Continue""" - ... - -class expr(AST): - """ - expr = BoolOp(boolop op, expr* values) - | NamedExpr(expr target, expr value) - | BinOp(expr left, operator op, expr right) - | UnaryOp(unaryop op, expr operand) - | Lambda(arguments args, expr body) - | IfExp(expr test, expr body, expr orelse) - | Dict(expr* keys, expr* values) - | Set(expr* elts) - | ListComp(expr elt, comprehension* generators) - | SetComp(expr elt, comprehension* generators) - | DictComp(expr key, expr value, comprehension* generators) - | GeneratorExp(expr elt, comprehension* generators) - | Await(expr value) - | Yield(expr? value) - | YieldFrom(expr value) - | Compare(expr left, cmpop* ops, expr* comparators) - | Call(expr func, expr* args, keyword* keywords) - | FormattedValue(expr value, int conversion, expr? format_spec) - | JoinedStr(expr* values) - | Constant(constant value, string? kind) - | Attribute(expr value, identifier attr, expr_context ctx) - | Subscript(expr value, expr slice, expr_context ctx) - | Starred(expr value, expr_context ctx) - | Name(identifier id, expr_context ctx) - | List(expr* elts, expr_context ctx) - | Tuple(expr* elts, expr_context ctx) - | Slice(expr? lower, expr? upper, expr? step) - """ - lineno: int - col_offset: int - end_lineno: int | None - end_col_offset: int | None - def __init__(self, **kwargs: Unpack[_Attributes]) -> None: ... - -class BoolOp(expr): - """BoolOp(boolop op, expr* values)""" - if sys.version_info >= (3, 10): - __match_args__ = ("op", "values") - op: boolop - values: list[expr] - if sys.version_info >= (3, 13): - def __init__(self, op: boolop, values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> None: ... - else: - def __init__(self, op: boolop, values: list[expr], **kwargs: Unpack[_Attributes]) -> None: ... - -class BinOp(expr): - """BinOp(expr left, operator op, expr right)""" - if sys.version_info >= (3, 10): - __match_args__ = ("left", "op", "right") - left: expr - op: operator - right: expr - def __init__(self, left: expr, op: operator, right: expr, **kwargs: Unpack[_Attributes]) -> None: ... - -class UnaryOp(expr): - """UnaryOp(unaryop op, expr operand)""" - if sys.version_info >= (3, 10): - __match_args__ = ("op", "operand") - op: unaryop - operand: expr - def __init__(self, op: unaryop, operand: expr, **kwargs: Unpack[_Attributes]) -> None: ... - -class Lambda(expr): - """Lambda(arguments args, expr body)""" - if sys.version_info >= (3, 10): - __match_args__ = ("args", "body") - args: arguments - body: expr - def __init__(self, args: arguments, body: expr, **kwargs: Unpack[_Attributes]) -> None: ... - -class IfExp(expr): - """IfExp(expr test, expr body, expr orelse)""" - if sys.version_info >= (3, 10): - __match_args__ = ("test", "body", "orelse") - test: expr - body: expr - orelse: expr - def __init__(self, test: expr, body: expr, orelse: expr, **kwargs: Unpack[_Attributes]) -> None: ... - -class Dict(expr): - """Dict(expr* keys, expr* values)""" - if sys.version_info >= (3, 10): - __match_args__ = ("keys", "values") - keys: list[expr | None] - values: list[expr] - if sys.version_info >= (3, 13): - def __init__(self, keys: list[expr | None] = ..., values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> None: ... - else: - def __init__(self, keys: list[expr | None], values: list[expr], **kwargs: Unpack[_Attributes]) -> None: ... - -class Set(expr): - """Set(expr* elts)""" - if sys.version_info >= (3, 10): - __match_args__ = ("elts",) - elts: list[expr] - if sys.version_info >= (3, 13): - def __init__(self, elts: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> None: ... - else: - def __init__(self, elts: list[expr], **kwargs: Unpack[_Attributes]) -> None: ... - -class ListComp(expr): - """ListComp(expr elt, comprehension* generators)""" - if sys.version_info >= (3, 10): - __match_args__ = ("elt", "generators") - elt: expr - generators: list[comprehension] - if sys.version_info >= (3, 13): - def __init__(self, elt: expr, generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes]) -> None: ... - else: - def __init__(self, elt: expr, generators: list[comprehension], **kwargs: Unpack[_Attributes]) -> None: ... - -class SetComp(expr): - """SetComp(expr elt, comprehension* generators)""" - if sys.version_info >= (3, 10): - __match_args__ = ("elt", "generators") - elt: expr - generators: list[comprehension] - if sys.version_info >= (3, 13): - def __init__(self, elt: expr, generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes]) -> None: ... - else: - def __init__(self, elt: expr, generators: list[comprehension], **kwargs: Unpack[_Attributes]) -> None: ... - -class DictComp(expr): - """DictComp(expr key, expr value, comprehension* generators)""" - if sys.version_info >= (3, 10): - __match_args__ = ("key", "value", "generators") - key: expr - value: expr - generators: list[comprehension] - if sys.version_info >= (3, 13): - def __init__( - self, key: expr, value: expr, generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes] - ) -> None: ... - else: - def __init__(self, key: expr, value: expr, generators: list[comprehension], **kwargs: Unpack[_Attributes]) -> None: ... - -class GeneratorExp(expr): - """GeneratorExp(expr elt, comprehension* generators)""" - if sys.version_info >= (3, 10): - __match_args__ = ("elt", "generators") - elt: expr - generators: list[comprehension] - if sys.version_info >= (3, 13): - def __init__(self, elt: expr, generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes]) -> None: ... - else: - def __init__(self, elt: expr, generators: list[comprehension], **kwargs: Unpack[_Attributes]) -> None: ... - -class Await(expr): - """Await(expr value)""" - if sys.version_info >= (3, 10): - __match_args__ = ("value",) - value: expr - def __init__(self, value: expr, **kwargs: Unpack[_Attributes]) -> None: ... - -class Yield(expr): - """Yield(expr? value)""" - if sys.version_info >= (3, 10): - __match_args__ = ("value",) - value: expr | None - def __init__(self, value: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ... - -class YieldFrom(expr): - """YieldFrom(expr value)""" - if sys.version_info >= (3, 10): - __match_args__ = ("value",) - value: expr - def __init__(self, value: expr, **kwargs: Unpack[_Attributes]) -> None: ... - -class Compare(expr): - """Compare(expr left, cmpop* ops, expr* comparators)""" - if sys.version_info >= (3, 10): - __match_args__ = ("left", "ops", "comparators") - left: expr - ops: list[cmpop] - comparators: list[expr] - if sys.version_info >= (3, 13): - def __init__( - self, left: expr, ops: list[cmpop] = ..., comparators: list[expr] = ..., **kwargs: Unpack[_Attributes] - ) -> None: ... - else: - def __init__(self, left: expr, ops: list[cmpop], comparators: list[expr], **kwargs: Unpack[_Attributes]) -> None: ... - -class Call(expr): - """Call(expr func, expr* args, keyword* keywords)""" - if sys.version_info >= (3, 10): - __match_args__ = ("func", "args", "keywords") - func: expr - args: list[expr] - keywords: list[keyword] - if sys.version_info >= (3, 13): - def __init__( - self, func: expr, args: list[expr] = ..., keywords: list[keyword] = ..., **kwargs: Unpack[_Attributes] - ) -> None: ... - else: - def __init__(self, func: expr, args: list[expr], keywords: list[keyword], **kwargs: Unpack[_Attributes]) -> None: ... - -class FormattedValue(expr): - """FormattedValue(expr value, int conversion, expr? format_spec)""" - if sys.version_info >= (3, 10): - __match_args__ = ("value", "conversion", "format_spec") - value: expr - conversion: int - format_spec: expr | None - def __init__(self, value: expr, conversion: int, format_spec: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ... - -class JoinedStr(expr): - """JoinedStr(expr* values)""" - if sys.version_info >= (3, 10): - __match_args__ = ("values",) - values: list[expr] - if sys.version_info >= (3, 13): - def __init__(self, values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> None: ... - else: - def __init__(self, values: list[expr], **kwargs: Unpack[_Attributes]) -> None: ... - -class Constant(expr): - """Constant(constant value, string? kind)""" - if sys.version_info >= (3, 10): - __match_args__ = ("value", "kind") - value: Any # None, str, bytes, bool, int, float, complex, Ellipsis - kind: str | None - # Aliases for value, for backwards compatibility - s: Any - n: int | float | complex - def __init__(self, value: Any, kind: str | None = None, **kwargs: Unpack[_Attributes]) -> None: ... - -class NamedExpr(expr): - """NamedExpr(expr target, expr value)""" - if sys.version_info >= (3, 10): - __match_args__ = ("target", "value") - target: Name - value: expr - def __init__(self, target: Name, value: expr, **kwargs: Unpack[_Attributes]) -> None: ... - -class Attribute(expr): - """Attribute(expr value, identifier attr, expr_context ctx)""" - if sys.version_info >= (3, 10): - __match_args__ = ("value", "attr", "ctx") - value: expr - attr: _Identifier - ctx: expr_context # Not present in Python < 3.13 if not passed to `__init__` - def __init__(self, value: expr, attr: _Identifier, ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ... - -if sys.version_info >= (3, 9): - _Slice: typing_extensions.TypeAlias = expr - _SliceAttributes: typing_extensions.TypeAlias = _Attributes -else: - class slice(AST): ... - _Slice: typing_extensions.TypeAlias = slice - - class _SliceAttributes(TypedDict): ... - -class Slice(_Slice): - """Slice(expr? lower, expr? upper, expr? step)""" - if sys.version_info >= (3, 10): - __match_args__ = ("lower", "upper", "step") - lower: expr | None - upper: expr | None - step: expr | None - def __init__( - self, lower: expr | None = None, upper: expr | None = None, step: expr | None = None, **kwargs: Unpack[_SliceAttributes] - ) -> None: ... - -if sys.version_info < (3, 9): - class ExtSlice(slice): - dims: list[slice] - def __init__(self, dims: list[slice], **kwargs: Unpack[_SliceAttributes]) -> None: ... - - class Index(slice): - value: expr - def __init__(self, value: expr, **kwargs: Unpack[_SliceAttributes]) -> None: ... - -class Subscript(expr): - """Subscript(expr value, expr slice, expr_context ctx)""" - if sys.version_info >= (3, 10): - __match_args__ = ("value", "slice", "ctx") - value: expr - slice: _Slice - ctx: expr_context # Not present in Python < 3.13 if not passed to `__init__` - def __init__(self, value: expr, slice: _Slice, ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ... - -class Starred(expr): - """Starred(expr value, expr_context ctx)""" - if sys.version_info >= (3, 10): - __match_args__ = ("value", "ctx") - value: expr - ctx: expr_context # Not present in Python < 3.13 if not passed to `__init__` - def __init__(self, value: expr, ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ... - -class Name(expr): - """Name(identifier id, expr_context ctx)""" - if sys.version_info >= (3, 10): - __match_args__ = ("id", "ctx") - id: _Identifier - ctx: expr_context # Not present in Python < 3.13 if not passed to `__init__` - def __init__(self, id: _Identifier, ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ... - -class List(expr): - """List(expr* elts, expr_context ctx)""" - if sys.version_info >= (3, 10): - __match_args__ = ("elts", "ctx") - elts: list[expr] - ctx: expr_context # Not present in Python < 3.13 if not passed to `__init__` - if sys.version_info >= (3, 13): - def __init__(self, elts: list[expr] = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ... - else: - def __init__(self, elts: list[expr], ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ... - -class Tuple(expr): - """Tuple(expr* elts, expr_context ctx)""" - if sys.version_info >= (3, 10): - __match_args__ = ("elts", "ctx") - elts: list[expr] - ctx: expr_context # Not present in Python < 3.13 if not passed to `__init__` - if sys.version_info >= (3, 9): - dims: list[expr] - if sys.version_info >= (3, 13): - def __init__(self, elts: list[expr] = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ... - else: - def __init__(self, elts: list[expr], ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ... - -class expr_context(AST): - """expr_context = Load | Store | Del""" - ... - -if sys.version_info < (3, 9): - class AugLoad(expr_context): ... - class AugStore(expr_context): ... - class Param(expr_context): ... - - class Suite(mod): - body: list[stmt] - def __init__(self, body: list[stmt]) -> None: ... - -class Del(expr_context): - """Del""" - ... -class Load(expr_context): - """Load""" - ... -class Store(expr_context): - """Store""" - ... -class boolop(AST): - """boolop = And | Or""" - ... -class And(boolop): - """And""" - ... -class Or(boolop): - """Or""" - ... -class operator(AST): - """operator = Add | Sub | Mult | MatMult | Div | Mod | Pow | LShift | RShift | BitOr | BitXor | BitAnd | FloorDiv""" - ... -class Add(operator): - """Add""" - ... -class BitAnd(operator): - """BitAnd""" - ... -class BitOr(operator): - """BitOr""" - ... -class BitXor(operator): - """BitXor""" - ... -class Div(operator): - """Div""" - ... -class FloorDiv(operator): - """FloorDiv""" - ... -class LShift(operator): - """LShift""" - ... -class Mod(operator): - """Mod""" - ... -class Mult(operator): - """Mult""" - ... -class MatMult(operator): - """MatMult""" - ... -class Pow(operator): - """Pow""" - ... -class RShift(operator): - """RShift""" - ... -class Sub(operator): - """Sub""" - ... -class unaryop(AST): - """unaryop = Invert | Not | UAdd | USub""" - ... -class Invert(unaryop): - """Invert""" - ... -class Not(unaryop): - """Not""" - ... -class UAdd(unaryop): - """UAdd""" - ... -class USub(unaryop): - """USub""" - ... -class cmpop(AST): - """cmpop = Eq | NotEq | Lt | LtE | Gt | GtE | Is | IsNot | In | NotIn""" - ... -class Eq(cmpop): - """Eq""" - ... -class Gt(cmpop): - """Gt""" - ... -class GtE(cmpop): - """GtE""" - ... -class In(cmpop): - """In""" - ... -class Is(cmpop): - """Is""" - ... -class IsNot(cmpop): - """IsNot""" - ... -class Lt(cmpop): - """Lt""" - ... -class LtE(cmpop): - """LtE""" - ... -class NotEq(cmpop): - """NotEq""" - ... -class NotIn(cmpop): - """NotIn""" - ... - -class comprehension(AST): - """comprehension(expr target, expr iter, expr* ifs, int is_async)""" - if sys.version_info >= (3, 10): - __match_args__ = ("target", "iter", "ifs", "is_async") - target: expr - iter: expr - ifs: list[expr] - is_async: int - if sys.version_info >= (3, 13): - @overload - def __init__(self, target: expr, iter: expr, ifs: list[expr], is_async: int) -> None: ... - @overload - def __init__(self, target: expr, iter: expr, ifs: list[expr] = ..., *, is_async: int) -> None: ... - else: - def __init__(self, target: expr, iter: expr, ifs: list[expr], is_async: int) -> None: ... - -class excepthandler(AST): - """excepthandler = ExceptHandler(expr? type, identifier? name, stmt* body)""" - lineno: int - col_offset: int - end_lineno: int | None - end_col_offset: int | None - def __init__(self, **kwargs: Unpack[_Attributes]) -> None: ... - -class ExceptHandler(excepthandler): - """ExceptHandler(expr? type, identifier? name, stmt* body)""" - if sys.version_info >= (3, 10): - __match_args__ = ("type", "name", "body") - type: expr | None - name: _Identifier | None - body: list[stmt] - if sys.version_info >= (3, 13): - def __init__( - self, type: expr | None = None, name: _Identifier | None = None, body: list[stmt] = ..., **kwargs: Unpack[_Attributes] - ) -> None: ... - else: - @overload - def __init__( - self, type: expr | None, name: _Identifier | None, body: list[stmt], **kwargs: Unpack[_Attributes] - ) -> None: ... - @overload - def __init__( - self, type: expr | None = None, name: _Identifier | None = None, *, body: list[stmt], **kwargs: Unpack[_Attributes] - ) -> None: ... - -class arguments(AST): - """arguments(arg* posonlyargs, arg* args, arg? vararg, arg* kwonlyargs, expr* kw_defaults, arg? kwarg, expr* defaults)""" - if sys.version_info >= (3, 10): - __match_args__ = ("posonlyargs", "args", "vararg", "kwonlyargs", "kw_defaults", "kwarg", "defaults") - posonlyargs: list[arg] - args: list[arg] - vararg: arg | None - kwonlyargs: list[arg] - kw_defaults: list[expr | None] - kwarg: arg | None - defaults: list[expr] - if sys.version_info >= (3, 13): - def __init__( - self, - posonlyargs: list[arg] = ..., - args: list[arg] = ..., - vararg: arg | None = None, - kwonlyargs: list[arg] = ..., - kw_defaults: list[expr | None] = ..., - kwarg: arg | None = None, - defaults: list[expr] = ..., - ) -> None: ... - else: - @overload - def __init__( - self, - posonlyargs: list[arg], - args: list[arg], - vararg: arg | None, - kwonlyargs: list[arg], - kw_defaults: list[expr | None], - kwarg: arg | None, - defaults: list[expr], - ) -> None: ... - @overload - def __init__( - self, - posonlyargs: list[arg], - args: list[arg], - vararg: arg | None, - kwonlyargs: list[arg], - kw_defaults: list[expr | None], - kwarg: arg | None = None, - *, - defaults: list[expr], - ) -> None: ... - @overload - def __init__( - self, - posonlyargs: list[arg], - args: list[arg], - vararg: arg | None = None, - *, - kwonlyargs: list[arg], - kw_defaults: list[expr | None], - kwarg: arg | None = None, - defaults: list[expr], - ) -> None: ... - -class arg(AST): - """arg(identifier arg, expr? annotation, string? type_comment)""" - lineno: int - col_offset: int - end_lineno: int | None - end_col_offset: int | None - if sys.version_info >= (3, 10): - __match_args__ = ("arg", "annotation", "type_comment") - arg: _Identifier - annotation: expr | None - type_comment: str | None - def __init__( - self, arg: _Identifier, annotation: expr | None = None, type_comment: str | None = None, **kwargs: Unpack[_Attributes] - ) -> None: ... - -class keyword(AST): - """keyword(identifier? arg, expr value)""" - lineno: int - col_offset: int - end_lineno: int | None - end_col_offset: int | None - if sys.version_info >= (3, 10): - __match_args__ = ("arg", "value") - arg: _Identifier | None - value: expr - @overload - def __init__(self, arg: _Identifier | None, value: expr, **kwargs: Unpack[_Attributes]) -> None: ... - @overload - def __init__(self, arg: _Identifier | None = None, *, value: expr, **kwargs: Unpack[_Attributes]) -> None: ... - -class alias(AST): - """alias(identifier name, identifier? asname)""" - lineno: int - col_offset: int - end_lineno: int | None - end_col_offset: int | None - if sys.version_info >= (3, 10): - __match_args__ = ("name", "asname") - name: str - asname: _Identifier | None - def __init__(self, name: str, asname: _Identifier | None = None, **kwargs: Unpack[_Attributes]) -> None: ... - -class withitem(AST): - """withitem(expr context_expr, expr? optional_vars)""" - if sys.version_info >= (3, 10): - __match_args__ = ("context_expr", "optional_vars") - context_expr: expr - optional_vars: expr | None - def __init__(self, context_expr: expr, optional_vars: expr | None = None) -> None: ... + from ast import TryStar as TryStar if sys.version_info >= (3, 10): - class Match(stmt): - """Match(expr subject, match_case* cases)""" - __match_args__ = ("subject", "cases") - subject: expr - cases: list[match_case] - if sys.version_info >= (3, 13): - def __init__(self, subject: expr, cases: list[match_case] = ..., **kwargs: Unpack[_Attributes]) -> None: ... - else: - def __init__(self, subject: expr, cases: list[match_case], **kwargs: Unpack[_Attributes]) -> None: ... - - class pattern(AST): - """ - pattern = MatchValue(expr value) - | MatchSingleton(constant value) - | MatchSequence(pattern* patterns) - | MatchMapping(expr* keys, pattern* patterns, identifier? rest) - | MatchClass(expr cls, pattern* patterns, identifier* kwd_attrs, pattern* kwd_patterns) - | MatchStar(identifier? name) - | MatchAs(pattern? pattern, identifier? name) - | MatchOr(pattern* patterns) - """ - lineno: int - col_offset: int - end_lineno: int - end_col_offset: int - def __init__(self, **kwargs: Unpack[_Attributes[int]]) -> None: ... - - # Without the alias, Pyright complains variables named pattern are recursively defined - _Pattern: typing_extensions.TypeAlias = pattern - - class match_case(AST): - """match_case(pattern pattern, expr? guard, stmt* body)""" - __match_args__ = ("pattern", "guard", "body") - pattern: _Pattern - guard: expr | None - body: list[stmt] - if sys.version_info >= (3, 13): - def __init__(self, pattern: _Pattern, guard: expr | None = None, body: list[stmt] = ...) -> None: ... - else: - @overload - def __init__(self, pattern: _Pattern, guard: expr | None, body: list[stmt]) -> None: ... - @overload - def __init__(self, pattern: _Pattern, guard: expr | None = None, *, body: list[stmt]) -> None: ... - - class MatchValue(pattern): - """MatchValue(expr value)""" - __match_args__ = ("value",) - value: expr - def __init__(self, value: expr, **kwargs: Unpack[_Attributes[int]]) -> None: ... - - class MatchSingleton(pattern): - """MatchSingleton(constant value)""" - __match_args__ = ("value",) - value: Literal[True, False] | None - def __init__(self, value: Literal[True, False] | None, **kwargs: Unpack[_Attributes[int]]) -> None: ... - - class MatchSequence(pattern): - """MatchSequence(pattern* patterns)""" - __match_args__ = ("patterns",) - patterns: list[pattern] - if sys.version_info >= (3, 13): - def __init__(self, patterns: list[pattern] = ..., **kwargs: Unpack[_Attributes[int]]) -> None: ... - else: - def __init__(self, patterns: list[pattern], **kwargs: Unpack[_Attributes[int]]) -> None: ... - - class MatchStar(pattern): - """MatchStar(identifier? name)""" - __match_args__ = ("name",) - name: _Identifier | None - def __init__(self, name: _Identifier | None, **kwargs: Unpack[_Attributes[int]]) -> None: ... + from ast import ( + MatchAs as MatchAs, + MatchClass as MatchClass, + MatchMapping as MatchMapping, + MatchOr as MatchOr, + MatchSequence as MatchSequence, + MatchSingleton as MatchSingleton, + MatchStar as MatchStar, + MatchValue as MatchValue, + match_case as match_case, + pattern as pattern, + ) - class MatchMapping(pattern): - """MatchMapping(expr* keys, pattern* patterns, identifier? rest)""" - __match_args__ = ("keys", "patterns", "rest") - keys: list[expr] - patterns: list[pattern] - rest: _Identifier | None - if sys.version_info >= (3, 13): - def __init__( - self, - keys: list[expr] = ..., - patterns: list[pattern] = ..., - rest: _Identifier | None = None, - **kwargs: Unpack[_Attributes[int]], - ) -> None: ... - else: - def __init__( - self, - keys: list[expr], - patterns: list[pattern], - rest: _Identifier | None = None, - **kwargs: Unpack[_Attributes[int]], - ) -> None: ... - - class MatchClass(pattern): - """MatchClass(expr cls, pattern* patterns, identifier* kwd_attrs, pattern* kwd_patterns)""" - __match_args__ = ("cls", "patterns", "kwd_attrs", "kwd_patterns") - cls: expr - patterns: list[pattern] - kwd_attrs: list[_Identifier] - kwd_patterns: list[pattern] - if sys.version_info >= (3, 13): - def __init__( - self, - cls: expr, - patterns: list[pattern] = ..., - kwd_attrs: list[_Identifier] = ..., - kwd_patterns: list[pattern] = ..., - **kwargs: Unpack[_Attributes[int]], - ) -> None: ... - else: - def __init__( - self, - cls: expr, - patterns: list[pattern], - kwd_attrs: list[_Identifier], - kwd_patterns: list[pattern], - **kwargs: Unpack[_Attributes[int]], - ) -> None: ... - - class MatchAs(pattern): - """MatchAs(pattern? pattern, identifier? name)""" - __match_args__ = ("pattern", "name") - pattern: _Pattern | None - name: _Identifier | None - def __init__( - self, pattern: _Pattern | None = None, name: _Identifier | None = None, **kwargs: Unpack[_Attributes[int]] - ) -> None: ... - - class MatchOr(pattern): - """MatchOr(pattern* patterns)""" - __match_args__ = ("patterns",) - patterns: list[pattern] - if sys.version_info >= (3, 13): - def __init__(self, patterns: list[pattern] = ..., **kwargs: Unpack[_Attributes[int]]) -> None: ... - else: - def __init__(self, patterns: list[pattern], **kwargs: Unpack[_Attributes[int]]) -> None: ... - -if sys.version_info >= (3, 12): - class type_param(AST): - """ - type_param = TypeVar(identifier name, expr? bound) - | ParamSpec(identifier name) - | TypeVarTuple(identifier name) - """ - lineno: int - col_offset: int - end_lineno: int - end_col_offset: int - def __init__(self, **kwargs: Unpack[_Attributes[int]]) -> None: ... - - class TypeVar(type_param): - """TypeVar(identifier name, expr? bound)""" - if sys.version_info >= (3, 13): - __match_args__ = ("name", "bound", "default_value") - else: - __match_args__ = ("name", "bound") - name: _Identifier - bound: expr | None - if sys.version_info >= (3, 13): - default_value: expr | None - def __init__( - self, - name: _Identifier, - bound: expr | None = None, - default_value: expr | None = None, - **kwargs: Unpack[_Attributes[int]], - ) -> None: ... - else: - def __init__(self, name: _Identifier, bound: expr | None = None, **kwargs: Unpack[_Attributes[int]]) -> None: ... - - class ParamSpec(type_param): - """ParamSpec(identifier name)""" - if sys.version_info >= (3, 13): - __match_args__ = ("name", "default_value") - else: - __match_args__ = ("name",) - name: _Identifier - if sys.version_info >= (3, 13): - default_value: expr | None - def __init__( - self, name: _Identifier, default_value: expr | None = None, **kwargs: Unpack[_Attributes[int]] - ) -> None: ... - else: - def __init__(self, name: _Identifier, **kwargs: Unpack[_Attributes[int]]) -> None: ... +if sys.version_info < (3, 9): + from ast import ( + AugLoad as AugLoad, + AugStore as AugStore, + ExtSlice as ExtSlice, + Index as Index, + Param as Param, + Suite as Suite, + slice as slice, + ) - class TypeVarTuple(type_param): - """TypeVarTuple(identifier name)""" - if sys.version_info >= (3, 13): - __match_args__ = ("name", "default_value") - else: - __match_args__ = ("name",) - name: _Identifier - if sys.version_info >= (3, 13): - default_value: expr | None - def __init__( - self, name: _Identifier, default_value: expr | None = None, **kwargs: Unpack[_Attributes[int]] - ) -> None: ... - else: - def __init__(self, name: _Identifier, **kwargs: Unpack[_Attributes[int]]) -> None: ... +PyCF_ALLOW_TOP_LEVEL_AWAIT: Literal[8192] +PyCF_ONLY_AST: Literal[1024] +PyCF_TYPE_COMMENTS: Literal[4096] - class TypeAlias(stmt): - """TypeAlias(expr name, type_param* type_params, expr value)""" - __match_args__ = ("name", "type_params", "value") - name: Name - type_params: list[type_param] - value: expr - if sys.version_info >= (3, 13): - @overload - def __init__( - self, name: Name, type_params: list[type_param], value: expr, **kwargs: Unpack[_Attributes[int]] - ) -> None: ... - @overload - def __init__( - self, name: Name, type_params: list[type_param] = ..., *, value: expr, **kwargs: Unpack[_Attributes[int]] - ) -> None: ... - else: - def __init__( - self, name: Name, type_params: list[type_param], value: expr, **kwargs: Unpack[_Attributes[int]] - ) -> None: ... +if sys.version_info >= (3, 13): + PyCF_OPTIMIZED_AST: Literal[33792] diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_collections_abc.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_collections_abc.pyi index 2acdaf4..6503bc0 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_collections_abc.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_collections_abc.pyi @@ -7,13 +7,12 @@ Unit tests are in test_collections. import sys from abc import abstractmethod from types import MappingProxyType -from typing import ( # noqa: Y022,Y038,Y057 +from typing import ( # noqa: Y022,Y038 AbstractSet as Set, AsyncGenerator as AsyncGenerator, AsyncIterable as AsyncIterable, AsyncIterator as AsyncIterator, Awaitable as Awaitable, - ByteString as ByteString, Callable as Callable, Collection as Collection, Container as Container, @@ -65,8 +64,12 @@ __all__ = [ "ValuesView", "Sequence", "MutableSequence", - "ByteString", ] +if sys.version_info < (3, 14): + from typing import ByteString as ByteString # noqa: Y057 + + __all__ += ["ByteString"] + if sys.version_info >= (3, 12): __all__ += ["Buffer"] @@ -78,6 +81,10 @@ class dict_keys(KeysView[_KT_co], Generic[_KT_co, _VT_co]): # undocumented def __eq__(self, value: object, /) -> bool: """Return self==value.""" ... + if sys.version_info >= (3, 13): + def isdisjoint(self, other: Iterable[_KT_co], /) -> bool: + """Return True if the view and the given iterable have a null intersection.""" + ... if sys.version_info >= (3, 10): @property def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: @@ -97,6 +104,10 @@ class dict_items(ItemsView[_KT_co, _VT_co]): # undocumented def __eq__(self, value: object, /) -> bool: """Return self==value.""" ... + if sys.version_info >= (3, 13): + def isdisjoint(self, other: Iterable[tuple[_KT_co, _VT_co]], /) -> bool: + """Return True if the view and the given iterable have a null intersection.""" + ... if sys.version_info >= (3, 10): @property def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_csv.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_csv.pyi index a8af39e..1ccc833 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_csv.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_csv.pyi @@ -1,82 +1,20 @@ -r""" -CSV parsing and writing. - -This module provides classes that assist in the reading and writing -of Comma Separated Value (CSV) files, and implements the interface -described by PEP 305. Although many CSV files are simple to parse, -the format is not formally defined by a stable specification and -is subtle enough that parsing lines of a CSV file with something -like line.split(",") is bound to fail. The module supports three -basic APIs: reading, writing, and registration of dialects. - - -DIALECT REGISTRATION: - -Readers and writers support a dialect argument, which is a convenient -handle on a group of settings. When the dialect argument is a string, -it identifies one of the dialects previously registered with the module. -If it is a class or instance, the attributes of the argument are used as -the settings for the reader or writer: - - class excel: - delimiter = ',' - quotechar = '"' - escapechar = None - doublequote = True - skipinitialspace = False - lineterminator = '\r\n' - quoting = QUOTE_MINIMAL - -SETTINGS: - - * quotechar - specifies a one-character string to use as the - quoting character. It defaults to '"'. - * delimiter - specifies a one-character string to use as the - field separator. It defaults to ','. - * skipinitialspace - specifies how to interpret spaces which - immediately follow a delimiter. It defaults to False, which - means that spaces immediately following a delimiter is part - of the following field. - * lineterminator - specifies the character sequence which should - terminate rows. - * quoting - controls when quotes should be generated by the writer. - It can take on any of the following module constants: - - csv.QUOTE_MINIMAL means only when required, for example, when a - field contains either the quotechar or the delimiter - csv.QUOTE_ALL means that quotes are always placed around fields. - csv.QUOTE_NONNUMERIC means that quotes are always placed around - fields which do not parse as integers or floating point - numbers. - csv.QUOTE_STRINGS means that quotes are always placed around - fields which are strings. Note that the Python value None - is not a string. - csv.QUOTE_NOTNULL means that quotes are only placed around fields - that are not the Python value None. - csv.QUOTE_NONE means that quotes are never placed around fields. - * escapechar - specifies a one-character string used to escape - the delimiter when quoting is set to QUOTE_NONE. - * doublequote - controls the handling of quotes inside fields. When - True, two consecutive quotes are interpreted as one during read, - and when writing, each quote character embedded in the data is - written as two quotes -""" +"""CSV parsing and writing.""" import sys from _typeshed import SupportsWrite from collections.abc import Iterable, Iterator -from typing import Any, Final, Literal +from typing import Any, Final from typing_extensions import TypeAlias __version__: Final[str] -QUOTE_ALL: Literal[1] -QUOTE_MINIMAL: Literal[0] -QUOTE_NONE: Literal[3] -QUOTE_NONNUMERIC: Literal[2] +QUOTE_ALL: Final = 1 +QUOTE_MINIMAL: Final = 0 +QUOTE_NONE: Final = 3 +QUOTE_NONNUMERIC: Final = 2 if sys.version_info >= (3, 12): - QUOTE_STRINGS: Literal[4] - QUOTE_NOTNULL: Literal[5] + QUOTE_STRINGS: Final = 4 + QUOTE_NOTNULL: Final = 5 # Ideally this would be `QUOTE_ALL | QUOTE_MINIMAL | QUOTE_NONE | QUOTE_NONNUMERIC` # However, using literals in situations like these can cause false-positives (see #7258) @@ -84,6 +22,8 @@ _QuotingType: TypeAlias = int class Error(Exception): ... +_DialectLike: TypeAlias = str | Dialect | type[Dialect] + class Dialect: """ CSV dialect @@ -98,9 +38,18 @@ class Dialect: lineterminator: str quoting: _QuotingType strict: bool - def __init__(self) -> None: ... - -_DialectLike: TypeAlias = str | Dialect | type[Dialect] + def __init__( + self, + dialect: _DialectLike | None = ..., + delimiter: str = ",", + doublequote: bool = True, + escapechar: str | None = None, + lineterminator: str = "\r\n", + quotechar: str | None = '"', + quoting: _QuotingType = 0, + skipinitialspace: bool = False, + strict: bool = False, + ) -> None: ... class _reader(Iterator[list[str]]): @property diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_ctypes.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_ctypes.pyi index fdbb2ed..018700e 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_ctypes.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_ctypes.pyi @@ -53,8 +53,8 @@ class _CDataMeta(type): # By default mypy complains about the following two methods, because strictly speaking cls # might not be a Type[_CT]. However this can never actually happen, because the only class that # uses _CDataMeta as its metaclass is _CData. So it's safe to ignore the errors here. - def __mul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] - def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] + def __mul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] class _CData(metaclass=_CDataMeta): _b_base_: int @@ -66,7 +66,6 @@ class _CData(metaclass=_CDataMeta): # Structure.from_buffer(...) # valid at runtime # Structure(...).from_buffer(...) # invalid at runtime # - @classmethod def from_buffer(cls, source: WriteableBuffer, offset: int = ...) -> Self: ... @classmethod @@ -74,7 +73,7 @@ class _CData(metaclass=_CDataMeta): @classmethod def from_address(cls, address: int) -> Self: ... @classmethod - def from_param(cls, obj: Any) -> Self | _CArgObject: ... + def from_param(cls, value: Any, /) -> Self | _CArgObject: ... @classmethod def in_dll(cls, library: CDLL, name: str) -> Self: ... def __buffer__(self, flags: int, /) -> memoryview: ... @@ -110,8 +109,26 @@ class _Pointer(_PointerLike, _CData, Generic[_CT]): """Set self[key] to value.""" ... -def POINTER(type: type[_CT]) -> type[_Pointer[_CT]]: ... -def pointer(arg: _CT, /) -> _Pointer[_CT]: ... +def POINTER(type: type[_CT], /) -> type[_Pointer[_CT]]: + """ + Create and return a new ctypes pointer type. + + type + A ctypes type. + + Pointer types are cached and reused internally, + so calling this function repeatedly is cheap. + """ + ... +def pointer(obj: _CT, /) -> _Pointer[_CT]: + """ + Create a new pointer instance, pointing to 'obj'. + + The returned object is of the type POINTER(type(obj)). Note that if you + just want to pass a pointer to an object to a foreign function call, you + should use byref(obj) which is much faster. + """ + ... class _CArgObject: ... @@ -182,7 +199,15 @@ class Structure(_StructUnionBase): ... class Array(_CData, Generic[_CT]): - """XXX to be provided""" + """ + Abstract base class for arrays. + + The recommended way to create concrete array types is by multiplying any + ctypes data type with a non-negative integer. Alternatively, you can subclass + this type and define _length_ and _type_ class variables. Array elements can + be read and written using standard subscript and slice accesses for slice + reads, the resulting object is not itself an Array. + """ @property @abstractmethod def _length_(self) -> int: ... @@ -239,13 +264,13 @@ class Array(_CData, Generic[_CT]): """See PEP 585""" ... -def addressof(obj: _CData) -> int: +def addressof(obj: _CData, /) -> int: """ addressof(C instance) -> integer Return the address of the C instance internal buffer """ ... -def alignment(obj_or_type: _CData | type[_CData]) -> int: +def alignment(obj_or_type: _CData | type[_CData], /) -> int: """ alignment(C type) -> integer alignment(C instance) -> integer @@ -253,11 +278,11 @@ def alignment(obj_or_type: _CData | type[_CData]) -> int: """ ... def get_errno() -> int: ... -def resize(obj: _CData, size: int) -> None: +def resize(obj: _CData, size: int, /) -> None: """Resize the memory buffer of a ctypes instance""" ... -def set_errno(value: int) -> int: ... -def sizeof(obj_or_type: _CData | type[_CData]) -> int: +def set_errno(value: int, /) -> int: ... +def sizeof(obj_or_type: _CData | type[_CData], /) -> int: """ sizeof(C type) -> integer sizeof(C instance) -> integer diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_curses.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_curses.pyi index 1c6e623..6ed95d0 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_curses.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_curses.pyi @@ -63,8 +63,7 @@ A_COLOR: int A_DIM: int A_HORIZONTAL: int A_INVIS: int -if sys.platform != "darwin": - A_ITALIC: int +A_ITALIC: int A_LEFT: int A_LOW: int A_NORMAL: int @@ -429,7 +428,7 @@ def getsyx() -> tuple[int, int]: Return a (y, x) tuple. If leaveok is currently true, return (-1, -1). """ ... -def getwin(file: SupportsRead[bytes], /) -> _CursesWindow: +def getwin(file: SupportsRead[bytes], /) -> window: """ Read window related data stored in the file by an earlier putwin() call. @@ -508,7 +507,7 @@ def init_pair(pair_number: int, fg: int, bg: int, /) -> None: all occurrences of that color-pair are changed to the new definition. """ ... -def initscr() -> _CursesWindow: +def initscr() -> window: """ Initialize the library. @@ -586,7 +585,7 @@ def napms(ms: int, /) -> int: Duration in milliseconds. """ ... -def newpad(nlines: int, ncols: int, /) -> _CursesWindow: +def newpad(nlines: int, ncols: int, /) -> window: """ Create and return a pointer to a new pad data structure. @@ -596,7 +595,7 @@ def newpad(nlines: int, ncols: int, /) -> _CursesWindow: Width. """ ... -def newwin(nlines: int, ncols: int, begin_y: int = ..., begin_x: int = ..., /) -> _CursesWindow: +def newwin(nlines: int, ncols: int, begin_y: int = ..., begin_x: int = ..., /) -> window: """ newwin(nlines, ncols, [begin_y=0, begin_x=0]) Return a new window. @@ -887,13 +886,9 @@ def unctrl(ch: _ChType, /) -> bytes: for example as ^C. Printing characters are left as they are. """ ... - -if sys.version_info < (3, 12) or sys.platform != "darwin": - # The support for macos was dropped in 3.12 - def unget_wch(ch: int | str, /) -> None: - """Push ch so the next get_wch() will return it.""" - ... - +def unget_wch(ch: int | str, /) -> None: + """Push ch so the next get_wch() will return it.""" + ... def ungetch(ch: _ChType, /) -> None: """Push ch so the next getch() will return it.""" ... @@ -931,25 +926,167 @@ def use_env(flag: bool, /) -> None: class error(Exception): ... @final -class _CursesWindow: +class window: # undocumented encoding: str @overload - def addch(self, ch: _ChType, attr: int = ...) -> None: ... + def addch(self, ch: _ChType, attr: int = ...) -> None: + """ + addch([y, x,] ch, [attr=_curses.A_NORMAL]) + Paint the character. + + y + Y-coordinate. + x + X-coordinate. + ch + Character to add. + attr + Attributes for the character. + + Paint character ch at (y, x) with attributes attr, + overwriting any character previously painted at that location. + By default, the character position and attributes are the + current settings for the window object. + """ + ... @overload - def addch(self, y: int, x: int, ch: _ChType, attr: int = ...) -> None: ... + def addch(self, y: int, x: int, ch: _ChType, attr: int = ...) -> None: + """ + addch([y, x,] ch, [attr=_curses.A_NORMAL]) + Paint the character. + + y + Y-coordinate. + x + X-coordinate. + ch + Character to add. + attr + Attributes for the character. + + Paint character ch at (y, x) with attributes attr, + overwriting any character previously painted at that location. + By default, the character position and attributes are the + current settings for the window object. + """ + ... @overload - def addnstr(self, str: str, n: int, attr: int = ...) -> None: ... + def addnstr(self, str: str, n: int, attr: int = ...) -> None: + """ + addnstr([y, x,] str, n, [attr]) + Paint at most n characters of the string. + + y + Y-coordinate. + x + X-coordinate. + str + String to add. + n + Maximal number of characters. + attr + Attributes for characters. + + Paint at most n characters of the string str at (y, x) with + attributes attr, overwriting anything previously on the display. + By default, the character position and attributes are the + current settings for the window object. + """ + ... @overload - def addnstr(self, y: int, x: int, str: str, n: int, attr: int = ...) -> None: ... + def addnstr(self, y: int, x: int, str: str, n: int, attr: int = ...) -> None: + """ + addnstr([y, x,] str, n, [attr]) + Paint at most n characters of the string. + + y + Y-coordinate. + x + X-coordinate. + str + String to add. + n + Maximal number of characters. + attr + Attributes for characters. + + Paint at most n characters of the string str at (y, x) with + attributes attr, overwriting anything previously on the display. + By default, the character position and attributes are the + current settings for the window object. + """ + ... @overload - def addstr(self, str: str, attr: int = ...) -> None: ... + def addstr(self, str: str, attr: int = ...) -> None: + """ + addstr([y, x,] str, [attr]) + Paint the string. + + y + Y-coordinate. + x + X-coordinate. + str + String to add. + attr + Attributes for characters. + + Paint the string str at (y, x) with attributes attr, + overwriting anything previously on the display. + By default, the character position and attributes are the + current settings for the window object. + """ + ... @overload - def addstr(self, y: int, x: int, str: str, attr: int = ...) -> None: ... - def attroff(self, attr: int, /) -> None: ... - def attron(self, attr: int, /) -> None: ... - def attrset(self, attr: int, /) -> None: ... - def bkgd(self, ch: _ChType, attr: int = ..., /) -> None: ... - def bkgdset(self, ch: _ChType, attr: int = ..., /) -> None: ... + def addstr(self, y: int, x: int, str: str, attr: int = ...) -> None: + """ + addstr([y, x,] str, [attr]) + Paint the string. + + y + Y-coordinate. + x + X-coordinate. + str + String to add. + attr + Attributes for characters. + + Paint the string str at (y, x) with attributes attr, + overwriting anything previously on the display. + By default, the character position and attributes are the + current settings for the window object. + """ + ... + def attroff(self, attr: int, /) -> None: + """Remove attribute attr from the "background" set.""" + ... + def attron(self, attr: int, /) -> None: + """Add attribute attr from the "background" set.""" + ... + def attrset(self, attr: int, /) -> None: + """Set the "background" set of attributes.""" + ... + def bkgd(self, ch: _ChType, attr: int = ..., /) -> None: + """ + Set the background property of the window. + + ch + Background character. + attr + Background attributes. + """ + ... + def bkgdset(self, ch: _ChType, attr: int = ..., /) -> None: + """ + Set the window's background. + + ch + Background character. + attr + Background attributes. + """ + ... def border( self, ls: _ChType = ..., @@ -960,11 +1097,63 @@ class _CursesWindow: tr: _ChType = ..., bl: _ChType = ..., br: _ChType = ..., - ) -> None: ... + ) -> None: + """ + Draw a border around the edges of the window. + + ls + Left side. + rs + Right side. + ts + Top side. + bs + Bottom side. + tl + Upper-left corner. + tr + Upper-right corner. + bl + Bottom-left corner. + br + Bottom-right corner. + + Each parameter specifies the character to use for a specific part of the + border. The characters can be specified as integers or as one-character + strings. A 0 value for any parameter will cause the default character to be + used for that parameter. + """ + ... @overload - def box(self) -> None: ... + def box(self) -> None: + """ + box([verch=0, horch=0]) + Draw a border around the edges of the window. + + verch + Left and right side. + horch + Top and bottom side. + + Similar to border(), but both ls and rs are verch and both ts and bs are + horch. The default corner characters are always used by this function. + """ + ... @overload - def box(self, vertch: _ChType = ..., horch: _ChType = ...) -> None: ... + def box(self, vertch: _ChType = ..., horch: _ChType = ...) -> None: + """ + box([verch=0, horch=0]) + Draw a border around the edges of the window. + + verch + Left and right side. + horch + Top and bottom side. + + Similar to border(), but both ls and rs are verch and both ts and bs are + horch. The default corner characters are always used by this function. + """ + ... @overload def chgat(self, attr: int) -> None: ... @overload @@ -979,34 +1168,189 @@ class _CursesWindow: def clrtoeol(self) -> None: ... def cursyncup(self) -> None: ... @overload - def delch(self) -> None: ... + def delch(self) -> None: + """ + delch([y, x]) + Delete any character at (y, x). + + y + Y-coordinate. + x + X-coordinate. + """ + ... @overload - def delch(self, y: int, x: int) -> None: ... + def delch(self, y: int, x: int) -> None: + """ + delch([y, x]) + Delete any character at (y, x). + + y + Y-coordinate. + x + X-coordinate. + """ + ... def deleteln(self) -> None: ... @overload - def derwin(self, begin_y: int, begin_x: int) -> _CursesWindow: ... + def derwin(self, begin_y: int, begin_x: int) -> window: + """ + derwin([nlines=0, ncols=0,] begin_y, begin_x) + Create a sub-window (window-relative coordinates). + + nlines + Height. + ncols + Width. + begin_y + Top side y-coordinate. + begin_x + Left side x-coordinate. + + derwin() is the same as calling subwin(), except that begin_y and begin_x + are relative to the origin of the window, rather than relative to the entire + screen. + """ + ... @overload - def derwin(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> _CursesWindow: ... - def echochar(self, ch: _ChType, attr: int = ..., /) -> None: ... - def enclose(self, y: int, x: int, /) -> bool: ... + def derwin(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> window: + """ + derwin([nlines=0, ncols=0,] begin_y, begin_x) + Create a sub-window (window-relative coordinates). + + nlines + Height. + ncols + Width. + begin_y + Top side y-coordinate. + begin_x + Left side x-coordinate. + + derwin() is the same as calling subwin(), except that begin_y and begin_x + are relative to the origin of the window, rather than relative to the entire + screen. + """ + ... + def echochar(self, ch: _ChType, attr: int = ..., /) -> None: + """ + Add character ch with attribute attr, and refresh. + + ch + Character to add. + attr + Attributes for the character. + """ + ... + def enclose(self, y: int, x: int, /) -> bool: + """ + Return True if the screen-relative coordinates are enclosed by the window. + + y + Y-coordinate. + x + X-coordinate. + """ + ... def erase(self) -> None: ... def getbegyx(self) -> tuple[int, int]: ... - def getbkgd(self) -> tuple[int, int]: ... + def getbkgd(self) -> tuple[int, int]: + """Return the window's current background character/attribute pair.""" + ... + @overload + def getch(self) -> int: + """ + getch([y, x]) + Get a character code from terminal keyboard. + + y + Y-coordinate. + x + X-coordinate. + + The integer returned does not have to be in ASCII range: function keys, + keypad keys and so on return numbers higher than 256. In no-delay mode, -1 + is returned if there is no input, else getch() waits until a key is pressed. + """ + ... + @overload + def getch(self, y: int, x: int) -> int: + """ + getch([y, x]) + Get a character code from terminal keyboard. + + y + Y-coordinate. + x + X-coordinate. + + The integer returned does not have to be in ASCII range: function keys, + keypad keys and so on return numbers higher than 256. In no-delay mode, -1 + is returned if there is no input, else getch() waits until a key is pressed. + """ + ... @overload - def getch(self) -> int: ... + def get_wch(self) -> int | str: + """ + get_wch([y, x]) + Get a wide character from terminal keyboard. + + y + Y-coordinate. + x + X-coordinate. + + Return a character for most keys, or an integer for function keys, + keypad keys, and other special keys. + """ + ... @overload - def getch(self, y: int, x: int) -> int: ... - if sys.version_info < (3, 12) or sys.platform != "darwin": - # The support for macos was dropped in 3.12 - @overload - def get_wch(self) -> int | str: ... - @overload - def get_wch(self, y: int, x: int) -> int | str: ... + def get_wch(self, y: int, x: int) -> int | str: + """ + get_wch([y, x]) + Get a wide character from terminal keyboard. + + y + Y-coordinate. + x + X-coordinate. + Return a character for most keys, or an integer for function keys, + keypad keys, and other special keys. + """ + ... @overload - def getkey(self) -> str: ... + def getkey(self) -> str: + """ + getkey([y, x]) + Get a character (string) from terminal keyboard. + + y + Y-coordinate. + x + X-coordinate. + + Returning a string instead of an integer, as getch() does. Function keys, + keypad keys and other special keys return a multibyte string containing the + key name. In no-delay mode, an exception is raised if there is no input. + """ + ... @overload - def getkey(self, y: int, x: int) -> str: ... + def getkey(self, y: int, x: int) -> str: + """ + getkey([y, x]) + Get a character (string) from terminal keyboard. + + y + Y-coordinate. + x + X-coordinate. + + Returning a string instead of an integer, as getch() does. Function keys, + keypad keys and other special keys return a multibyte string containing the + key name. In no-delay mode, an exception is raised if there is no input. + """ + ... def getmaxyx(self) -> tuple[int, int]: ... def getparyx(self) -> tuple[int, int]: ... @overload @@ -1019,37 +1363,222 @@ class _CursesWindow: def getstr(self, y: int, x: int, n: int) -> bytes: ... def getyx(self) -> tuple[int, int]: ... @overload - def hline(self, ch: _ChType, n: int) -> None: ... + def hline(self, ch: _ChType, n: int) -> None: + """ + hline([y, x,] ch, n, [attr=_curses.A_NORMAL]) + Display a horizontal line. + + y + Starting Y-coordinate. + x + Starting X-coordinate. + ch + Character to draw. + n + Line length. + attr + Attributes for the characters. + """ + ... @overload - def hline(self, y: int, x: int, ch: _ChType, n: int) -> None: ... + def hline(self, y: int, x: int, ch: _ChType, n: int) -> None: + """ + hline([y, x,] ch, n, [attr=_curses.A_NORMAL]) + Display a horizontal line. + + y + Starting Y-coordinate. + x + Starting X-coordinate. + ch + Character to draw. + n + Line length. + attr + Attributes for the characters. + """ + ... def idcok(self, flag: bool) -> None: ... def idlok(self, yes: bool) -> None: ... def immedok(self, flag: bool) -> None: ... @overload - def inch(self) -> int: ... + def inch(self) -> int: + """ + inch([y, x]) + Return the character at the given position in the window. + + y + Y-coordinate. + x + X-coordinate. + + The bottom 8 bits are the character proper, and upper bits are the attributes. + """ + ... @overload - def inch(self, y: int, x: int) -> int: ... + def inch(self, y: int, x: int) -> int: + """ + inch([y, x]) + Return the character at the given position in the window. + + y + Y-coordinate. + x + X-coordinate. + + The bottom 8 bits are the character proper, and upper bits are the attributes. + """ + ... @overload - def insch(self, ch: _ChType, attr: int = ...) -> None: ... + def insch(self, ch: _ChType, attr: int = ...) -> None: + """ + insch([y, x,] ch, [attr=_curses.A_NORMAL]) + Insert a character before the current or specified position. + + y + Y-coordinate. + x + X-coordinate. + ch + Character to insert. + attr + Attributes for the character. + + All characters to the right of the cursor are shifted one position right, with + the rightmost characters on the line being lost. + """ + ... @overload - def insch(self, y: int, x: int, ch: _ChType, attr: int = ...) -> None: ... + def insch(self, y: int, x: int, ch: _ChType, attr: int = ...) -> None: + """ + insch([y, x,] ch, [attr=_curses.A_NORMAL]) + Insert a character before the current or specified position. + + y + Y-coordinate. + x + X-coordinate. + ch + Character to insert. + attr + Attributes for the character. + + All characters to the right of the cursor are shifted one position right, with + the rightmost characters on the line being lost. + """ + ... def insdelln(self, nlines: int) -> None: ... def insertln(self) -> None: ... @overload - def insnstr(self, str: str, n: int, attr: int = ...) -> None: ... + def insnstr(self, str: str, n: int, attr: int = ...) -> None: + """ + insnstr([y, x,] str, n, [attr]) + Insert at most n characters of the string. + + y + Y-coordinate. + x + X-coordinate. + str + String to insert. + n + Maximal number of characters. + attr + Attributes for characters. + + Insert a character string (as many characters as will fit on the line) + before the character under the cursor, up to n characters. If n is zero + or negative, the entire string is inserted. All characters to the right + of the cursor are shifted right, with the rightmost characters on the line + being lost. The cursor position does not change (after moving to y, x, if + specified). + """ + ... @overload - def insnstr(self, y: int, x: int, str: str, n: int, attr: int = ...) -> None: ... + def insnstr(self, y: int, x: int, str: str, n: int, attr: int = ...) -> None: + """ + insnstr([y, x,] str, n, [attr]) + Insert at most n characters of the string. + + y + Y-coordinate. + x + X-coordinate. + str + String to insert. + n + Maximal number of characters. + attr + Attributes for characters. + + Insert a character string (as many characters as will fit on the line) + before the character under the cursor, up to n characters. If n is zero + or negative, the entire string is inserted. All characters to the right + of the cursor are shifted right, with the rightmost characters on the line + being lost. The cursor position does not change (after moving to y, x, if + specified). + """ + ... @overload - def insstr(self, str: str, attr: int = ...) -> None: ... + def insstr(self, str: str, attr: int = ...) -> None: + """ + insstr([y, x,] str, [attr]) + Insert the string before the current or specified position. + + y + Y-coordinate. + x + X-coordinate. + str + String to insert. + attr + Attributes for characters. + + Insert a character string (as many characters as will fit on the line) + before the character under the cursor. All characters to the right of + the cursor are shifted right, with the rightmost characters on the line + being lost. The cursor position does not change (after moving to y, x, + if specified). + """ + ... @overload - def insstr(self, y: int, x: int, str: str, attr: int = ...) -> None: ... + def insstr(self, y: int, x: int, str: str, attr: int = ...) -> None: + """ + insstr([y, x,] str, [attr]) + Insert the string before the current or specified position. + + y + Y-coordinate. + x + X-coordinate. + str + String to insert. + attr + Attributes for characters. + + Insert a character string (as many characters as will fit on the line) + before the character under the cursor. All characters to the right of + the cursor are shifted right, with the rightmost characters on the line + being lost. The cursor position does not change (after moving to y, x, + if specified). + """ + ... @overload def instr(self, n: int = ...) -> bytes: ... @overload def instr(self, y: int, x: int, n: int = ...) -> bytes: ... - def is_linetouched(self, line: int, /) -> bool: ... + def is_linetouched(self, line: int, /) -> bool: + """ + Return True if the specified line was modified, otherwise return False. + + line + Line number. + + Raise a curses.error exception if line is not valid for the given window. + """ + ... def is_wintouched(self) -> bool: ... - def keypad(self, yes: bool) -> None: ... + def keypad(self, yes: bool, /) -> None: ... def leaveok(self, yes: bool) -> None: ... def move(self, new_y: int, new_x: int) -> None: ... def mvderwin(self, y: int, x: int) -> None: ... @@ -1057,53 +1586,309 @@ class _CursesWindow: def nodelay(self, yes: bool) -> None: ... def notimeout(self, yes: bool) -> None: ... @overload - def noutrefresh(self) -> None: ... + def noutrefresh(self) -> None: + """ + noutrefresh([pminrow, pmincol, sminrow, smincol, smaxrow, smaxcol]) + Mark for refresh but wait. + + This function updates the data structure representing the desired state of the + window, but does not force an update of the physical screen. To accomplish + that, call doupdate(). + """ + ... @overload - def noutrefresh(self, pminrow: int, pmincol: int, sminrow: int, smincol: int, smaxrow: int, smaxcol: int) -> None: ... + def noutrefresh(self, pminrow: int, pmincol: int, sminrow: int, smincol: int, smaxrow: int, smaxcol: int) -> None: + """ + noutrefresh([pminrow, pmincol, sminrow, smincol, smaxrow, smaxcol]) + Mark for refresh but wait. + + This function updates the data structure representing the desired state of the + window, but does not force an update of the physical screen. To accomplish + that, call doupdate(). + """ + ... @overload - def overlay(self, destwin: _CursesWindow) -> None: ... + def overlay(self, destwin: window) -> None: + """ + overlay(destwin, [sminrow, smincol, dminrow, dmincol, dmaxrow, dmaxcol]) + Overlay the window on top of destwin. + + The windows need not be the same size, only the overlapping region is copied. + This copy is non-destructive, which means that the current background + character does not overwrite the old contents of destwin. + + To get fine-grained control over the copied region, the second form of + overlay() can be used. sminrow and smincol are the upper-left coordinates + of the source window, and the other variables mark a rectangle in the + destination window. + """ + ... @overload def overlay( - self, destwin: _CursesWindow, sminrow: int, smincol: int, dminrow: int, dmincol: int, dmaxrow: int, dmaxcol: int - ) -> None: ... + self, destwin: window, sminrow: int, smincol: int, dminrow: int, dmincol: int, dmaxrow: int, dmaxcol: int + ) -> None: + """ + overlay(destwin, [sminrow, smincol, dminrow, dmincol, dmaxrow, dmaxcol]) + Overlay the window on top of destwin. + + The windows need not be the same size, only the overlapping region is copied. + This copy is non-destructive, which means that the current background + character does not overwrite the old contents of destwin. + + To get fine-grained control over the copied region, the second form of + overlay() can be used. sminrow and smincol are the upper-left coordinates + of the source window, and the other variables mark a rectangle in the + destination window. + """ + ... @overload - def overwrite(self, destwin: _CursesWindow) -> None: ... + def overwrite(self, destwin: window) -> None: + """ + overwrite(destwin, [sminrow, smincol, dminrow, dmincol, dmaxrow, + dmaxcol]) + Overwrite the window on top of destwin. + + The windows need not be the same size, in which case only the overlapping + region is copied. This copy is destructive, which means that the current + background character overwrites the old contents of destwin. + + To get fine-grained control over the copied region, the second form of + overwrite() can be used. sminrow and smincol are the upper-left coordinates + of the source window, the other variables mark a rectangle in the destination + window. + """ + ... @overload def overwrite( - self, destwin: _CursesWindow, sminrow: int, smincol: int, dminrow: int, dmincol: int, dmaxrow: int, dmaxcol: int - ) -> None: ... - def putwin(self, file: IO[Any], /) -> None: ... - def redrawln(self, beg: int, num: int, /) -> None: ... + self, destwin: window, sminrow: int, smincol: int, dminrow: int, dmincol: int, dmaxrow: int, dmaxcol: int + ) -> None: + """ + overwrite(destwin, [sminrow, smincol, dminrow, dmincol, dmaxrow, + dmaxcol]) + Overwrite the window on top of destwin. + + The windows need not be the same size, in which case only the overlapping + region is copied. This copy is destructive, which means that the current + background character overwrites the old contents of destwin. + + To get fine-grained control over the copied region, the second form of + overwrite() can be used. sminrow and smincol are the upper-left coordinates + of the source window, the other variables mark a rectangle in the destination + window. + """ + ... + def putwin(self, file: IO[Any], /) -> None: + """ + Write all data associated with the window into the provided file object. + + This information can be later retrieved using the getwin() function. + """ + ... + def redrawln(self, beg: int, num: int, /) -> None: + """ + Mark the specified lines corrupted. + + beg + Starting line number. + num + The number of lines. + + They should be completely redrawn on the next refresh() call. + """ + ... def redrawwin(self) -> None: ... @overload - def refresh(self) -> None: ... + def refresh(self) -> None: + """ + refresh([pminrow, pmincol, sminrow, smincol, smaxrow, smaxcol]) + Update the display immediately. + + Synchronize actual screen with previous drawing/deleting methods. + The 6 optional arguments can only be specified when the window is a pad + created with newpad(). The additional parameters are needed to indicate + what part of the pad and screen are involved. pminrow and pmincol specify + the upper left-hand corner of the rectangle to be displayed in the pad. + sminrow, smincol, smaxrow, and smaxcol specify the edges of the rectangle to + be displayed on the screen. The lower right-hand corner of the rectangle to + be displayed in the pad is calculated from the screen coordinates, since the + rectangles must be the same size. Both rectangles must be entirely contained + within their respective structures. Negative values of pminrow, pmincol, + sminrow, or smincol are treated as if they were zero. + """ + ... @overload - def refresh(self, pminrow: int, pmincol: int, sminrow: int, smincol: int, smaxrow: int, smaxcol: int) -> None: ... + def refresh(self, pminrow: int, pmincol: int, sminrow: int, smincol: int, smaxrow: int, smaxcol: int) -> None: + """ + refresh([pminrow, pmincol, sminrow, smincol, smaxrow, smaxcol]) + Update the display immediately. + + Synchronize actual screen with previous drawing/deleting methods. + The 6 optional arguments can only be specified when the window is a pad + created with newpad(). The additional parameters are needed to indicate + what part of the pad and screen are involved. pminrow and pmincol specify + the upper left-hand corner of the rectangle to be displayed in the pad. + sminrow, smincol, smaxrow, and smaxcol specify the edges of the rectangle to + be displayed on the screen. The lower right-hand corner of the rectangle to + be displayed in the pad is calculated from the screen coordinates, since the + rectangles must be the same size. Both rectangles must be entirely contained + within their respective structures. Negative values of pminrow, pmincol, + sminrow, or smincol are treated as if they were zero. + """ + ... def resize(self, nlines: int, ncols: int) -> None: ... - def scroll(self, lines: int = ...) -> None: ... + def scroll(self, lines: int = ...) -> None: + """ + scroll([lines=1]) + Scroll the screen or scrolling region. + + lines + Number of lines to scroll. + + Scroll upward if the argument is positive and downward if it is negative. + """ + ... def scrollok(self, flag: bool) -> None: ... - def setscrreg(self, top: int, bottom: int, /) -> None: ... + def setscrreg(self, top: int, bottom: int, /) -> None: + """ + Define a software scrolling region. + + top + First line number. + bottom + Last line number. + + All scrolling actions will take place in this region. + """ + ... def standend(self) -> None: ... def standout(self) -> None: ... @overload - def subpad(self, begin_y: int, begin_x: int) -> _CursesWindow: ... + def subpad(self, begin_y: int, begin_x: int) -> window: + """ + subwin([nlines=0, ncols=0,] begin_y, begin_x) + Create a sub-window (screen-relative coordinates). + + nlines + Height. + ncols + Width. + begin_y + Top side y-coordinate. + begin_x + Left side x-coordinate. + + By default, the sub-window will extend from the specified position to the + lower right corner of the window. + """ + ... @overload - def subpad(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> _CursesWindow: ... + def subpad(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> window: + """ + subwin([nlines=0, ncols=0,] begin_y, begin_x) + Create a sub-window (screen-relative coordinates). + + nlines + Height. + ncols + Width. + begin_y + Top side y-coordinate. + begin_x + Left side x-coordinate. + + By default, the sub-window will extend from the specified position to the + lower right corner of the window. + """ + ... @overload - def subwin(self, begin_y: int, begin_x: int) -> _CursesWindow: ... + def subwin(self, begin_y: int, begin_x: int) -> window: + """ + subwin([nlines=0, ncols=0,] begin_y, begin_x) + Create a sub-window (screen-relative coordinates). + + nlines + Height. + ncols + Width. + begin_y + Top side y-coordinate. + begin_x + Left side x-coordinate. + + By default, the sub-window will extend from the specified position to the + lower right corner of the window. + """ + ... @overload - def subwin(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> _CursesWindow: ... + def subwin(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> window: + """ + subwin([nlines=0, ncols=0,] begin_y, begin_x) + Create a sub-window (screen-relative coordinates). + + nlines + Height. + ncols + Width. + begin_y + Top side y-coordinate. + begin_x + Left side x-coordinate. + + By default, the sub-window will extend from the specified position to the + lower right corner of the window. + """ + ... def syncdown(self) -> None: ... def syncok(self, flag: bool) -> None: ... def syncup(self) -> None: ... def timeout(self, delay: int) -> None: ... - def touchline(self, start: int, count: int, changed: bool = ...) -> None: ... + def touchline(self, start: int, count: int, changed: bool = ...) -> None: + """ + touchline(start, count, [changed=True]) + Pretend count lines have been changed, starting with line start. + + If changed is supplied, it specifies whether the affected lines are marked + as having been changed (changed=True) or unchanged (changed=False). + """ + ... def touchwin(self) -> None: ... def untouchwin(self) -> None: ... @overload - def vline(self, ch: _ChType, n: int) -> None: ... + def vline(self, ch: _ChType, n: int) -> None: + """ + vline([y, x,] ch, n, [attr=_curses.A_NORMAL]) + Display a vertical line. + + y + Starting Y-coordinate. + x + Starting X-coordinate. + ch + Character to draw. + n + Line length. + attr + Attributes for the character. + """ + ... @overload - def vline(self, y: int, x: int, ch: _ChType, n: int) -> None: ... + def vline(self, y: int, x: int, ch: _ChType, n: int) -> None: + """ + vline([y, x,] ch, n, [attr=_curses.A_NORMAL]) + Display a vertical line. + + y + Starting Y-coordinate. + x + Starting X-coordinate. + ch + Character to draw. + n + Line length. + attr + Attributes for the character. + """ + ... class _ncurses_version(NamedTuple): major: int @@ -1111,4 +1896,3 @@ class _ncurses_version(NamedTuple): patch: int ncurses_version: _ncurses_version -window = _CursesWindow # undocumented diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_decimal.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_decimal.pyi index 43662dc..864d094 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_decimal.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_decimal.pyi @@ -1,54 +1,55 @@ """C decimal arithmetic module""" -import numbers import sys -from collections.abc import Container, Sequence +from decimal import ( + Clamped as Clamped, + Context as Context, + ConversionSyntax as ConversionSyntax, + Decimal as Decimal, + DecimalException as DecimalException, + DecimalTuple as DecimalTuple, + DivisionByZero as DivisionByZero, + DivisionImpossible as DivisionImpossible, + DivisionUndefined as DivisionUndefined, + FloatOperation as FloatOperation, + Inexact as Inexact, + InvalidContext as InvalidContext, + InvalidOperation as InvalidOperation, + Overflow as Overflow, + Rounded as Rounded, + Subnormal as Subnormal, + Underflow as Underflow, +) from types import TracebackType -from typing import Any, ClassVar, Final, Literal, NamedTuple, overload -from typing_extensions import Self, TypeAlias +from typing import Final +from typing_extensions import TypeAlias -_Decimal: TypeAlias = Decimal | int -_DecimalNew: TypeAlias = Decimal | float | str | tuple[int, Sequence[int], int] -_ComparableNum: TypeAlias = Decimal | float | numbers.Rational +_TrapType: TypeAlias = type[DecimalException] + +class _ContextManager: + new_context: Context + saved_context: Context + def __init__(self, new_context: Context) -> None: ... + def __enter__(self) -> Context: ... + def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... __version__: Final[str] __libmpdec_version__: Final[str] -class DecimalTuple(NamedTuple): - """DecimalTuple(sign, digits, exponent)""" - sign: int - digits: tuple[int, ...] - exponent: int | Literal["n", "N", "F"] - -ROUND_DOWN: str -ROUND_HALF_UP: str -ROUND_HALF_EVEN: str -ROUND_CEILING: str -ROUND_FLOOR: str -ROUND_UP: str -ROUND_HALF_DOWN: str -ROUND_05UP: str -HAVE_CONTEXTVAR: bool -HAVE_THREADS: bool -MAX_EMAX: int -MAX_PREC: int -MIN_EMIN: int -MIN_ETINY: int - -class DecimalException(ArithmeticError): ... -class Clamped(DecimalException): ... -class InvalidOperation(DecimalException): ... -class ConversionSyntax(InvalidOperation): ... -class DivisionByZero(DecimalException, ZeroDivisionError): ... -class DivisionImpossible(InvalidOperation): ... -class DivisionUndefined(InvalidOperation, ZeroDivisionError): ... -class Inexact(DecimalException): ... -class InvalidContext(InvalidOperation): ... -class Rounded(DecimalException): ... -class Subnormal(DecimalException): ... -class Overflow(Inexact, Rounded): ... -class Underflow(Inexact, Rounded, Subnormal): ... -class FloatOperation(DecimalException, TypeError): ... +ROUND_DOWN: Final[str] +ROUND_HALF_UP: Final[str] +ROUND_HALF_EVEN: Final[str] +ROUND_CEILING: Final[str] +ROUND_FLOOR: Final[str] +ROUND_UP: Final[str] +ROUND_HALF_DOWN: Final[str] +ROUND_05UP: Final[str] +HAVE_CONTEXTVAR: Final[bool] +HAVE_THREADS: Final[bool] +MAX_EMAX: Final[int] +MAX_PREC: Final[int] +MIN_EMIN: Final[int] +MIN_ETINY: Final[int] def setcontext(context: Context, /) -> None: """Set a new default context.""" @@ -79,824 +80,7 @@ if sys.version_info >= (3, 11): ... else: - def localcontext(ctx: Context | None = None) -> _ContextManager: - """ - Return a context manager that will set the default context to a copy of ctx - on entry to the with-statement and restore the previous default context when - exiting the with-statement. If no context is specified, a copy of the current - default context is used. - """ - ... - -class Decimal: - """ - Construct a new Decimal object. 'value' can be an integer, string, tuple, - or another Decimal object. If no value is given, return Decimal('0'). The - context does not affect the conversion and is only passed to determine if - the InvalidOperation trap is active. - """ - def __new__(cls, value: _DecimalNew = ..., context: Context | None = ...) -> Self: ... - @classmethod - def from_float(cls, f: float, /) -> Self: - """ - Class method that converts a float to a decimal number, exactly. - Since 0.1 is not exactly representable in binary floating point, - Decimal.from_float(0.1) is not the same as Decimal('0.1'). - - >>> Decimal.from_float(0.1) - Decimal('0.1000000000000000055511151231257827021181583404541015625') - >>> Decimal.from_float(float('nan')) - Decimal('NaN') - >>> Decimal.from_float(float('inf')) - Decimal('Infinity') - >>> Decimal.from_float(float('-inf')) - Decimal('-Infinity') - """ - ... - def __bool__(self) -> bool: - """True if self else False""" - ... - def compare(self, other: _Decimal, context: Context | None = None) -> Decimal: - """ - Compare self to other. Return a decimal value: - - a or b is a NaN ==> Decimal('NaN') - a < b ==> Decimal('-1') - a == b ==> Decimal('0') - a > b ==> Decimal('1') - """ - ... - def __hash__(self) -> int: - """Return hash(self).""" - ... - def as_tuple(self) -> DecimalTuple: - """Return a tuple representation of the number.""" - ... - def as_integer_ratio(self) -> tuple[int, int]: - """ - Decimal.as_integer_ratio() -> (int, int) - - Return a pair of integers, whose ratio is exactly equal to the original - Decimal and with a positive denominator. The ratio is in lowest terms. - Raise OverflowError on infinities and a ValueError on NaNs. - """ - ... - def to_eng_string(self, context: Context | None = None) -> str: - """ - Convert to an engineering-type string. Engineering notation has an exponent - which is a multiple of 3, so there are up to 3 digits left of the decimal - place. For example, Decimal('123E+1') is converted to Decimal('1.23E+3'). - - The value of context.capitals determines whether the exponent sign is lower - or upper case. Otherwise, the context does not affect the operation. - """ - ... - def __abs__(self) -> Decimal: - """abs(self)""" - ... - def __add__(self, value: _Decimal, /) -> Decimal: - """Return self+value.""" - ... - def __divmod__(self, value: _Decimal, /) -> tuple[Decimal, Decimal]: - """Return divmod(self, value).""" - ... - def __eq__(self, value: object, /) -> bool: - """Return self==value.""" - ... - def __floordiv__(self, value: _Decimal, /) -> Decimal: - """Return self//value.""" - ... - def __ge__(self, value: _ComparableNum, /) -> bool: - """Return self>=value.""" - ... - def __gt__(self, value: _ComparableNum, /) -> bool: - """Return self>value.""" - ... - def __le__(self, value: _ComparableNum, /) -> bool: - """Return self<=value.""" - ... - def __lt__(self, value: _ComparableNum, /) -> bool: - """Return self Decimal: - """Return self%value.""" - ... - def __mul__(self, value: _Decimal, /) -> Decimal: - """Return self*value.""" - ... - def __neg__(self) -> Decimal: - """-self""" - ... - def __pos__(self) -> Decimal: - """+self""" - ... - def __pow__(self, value: _Decimal, mod: _Decimal | None = None, /) -> Decimal: - """Return pow(self, value, mod).""" - ... - def __radd__(self, value: _Decimal, /) -> Decimal: - """Return value+self.""" - ... - def __rdivmod__(self, value: _Decimal, /) -> tuple[Decimal, Decimal]: - """Return divmod(value, self).""" - ... - def __rfloordiv__(self, value: _Decimal, /) -> Decimal: - """Return value//self.""" - ... - def __rmod__(self, value: _Decimal, /) -> Decimal: - """Return value%self.""" - ... - def __rmul__(self, value: _Decimal, /) -> Decimal: - """Return value*self.""" - ... - def __rsub__(self, value: _Decimal, /) -> Decimal: - """Return value-self.""" - ... - def __rtruediv__(self, value: _Decimal, /) -> Decimal: - """Return value/self.""" - ... - def __sub__(self, value: _Decimal, /) -> Decimal: - """Return self-value.""" - ... - def __truediv__(self, value: _Decimal, /) -> Decimal: - """Return self/value.""" - ... - def remainder_near(self, other: _Decimal, context: Context | None = None) -> Decimal: - """ - Return the remainder from dividing self by other. This differs from - self % other in that the sign of the remainder is chosen so as to minimize - its absolute value. More precisely, the return value is self - n * other - where n is the integer nearest to the exact value of self / other, and - if two integers are equally near then the even one is chosen. - - If the result is zero then its sign will be the sign of self. - """ - ... - def __float__(self) -> float: - """float(self)""" - ... - def __int__(self) -> int: - """int(self)""" - ... - def __trunc__(self) -> int: ... - @property - def real(self) -> Decimal: ... - @property - def imag(self) -> Decimal: ... - def conjugate(self) -> Decimal: - """Return self.""" - ... - def __complex__(self) -> complex: ... - @overload - def __round__(self) -> int: ... - @overload - def __round__(self, ndigits: int, /) -> Decimal: ... - def __floor__(self) -> int: ... - def __ceil__(self) -> int: ... - def fma(self, other: _Decimal, third: _Decimal, context: Context | None = None) -> Decimal: - """ - Fused multiply-add. Return self*other+third with no rounding of the - intermediate product self*other. - - >>> Decimal(2).fma(3, 5) - Decimal('11') - """ - ... - def __rpow__(self, value: _Decimal, mod: Context | None = None, /) -> Decimal: - """Return pow(value, self, mod).""" - ... - def normalize(self, context: Context | None = None) -> Decimal: - """ - Normalize the number by stripping the rightmost trailing zeros and - converting any result equal to Decimal('0') to Decimal('0e0'). Used - for producing canonical values for members of an equivalence class. - For example, Decimal('32.100') and Decimal('0.321000e+2') both normalize - to the equivalent value Decimal('32.1'). - """ - ... - def quantize(self, exp: _Decimal, rounding: str | None = None, context: Context | None = None) -> Decimal: - """ - Return a value equal to the first operand after rounding and having the - exponent of the second operand. - - >>> Decimal('1.41421356').quantize(Decimal('1.000')) - Decimal('1.414') - - Unlike other operations, if the length of the coefficient after the quantize - operation would be greater than precision, then an InvalidOperation is signaled. - This guarantees that, unless there is an error condition, the quantized exponent - is always equal to that of the right-hand operand. - - Also unlike other operations, quantize never signals Underflow, even if the - result is subnormal and inexact. - - If the exponent of the second operand is larger than that of the first, then - rounding may be necessary. In this case, the rounding mode is determined by the - rounding argument if given, else by the given context argument; if neither - argument is given, the rounding mode of the current thread's context is used. - """ - ... - def same_quantum(self, other: _Decimal, context: Context | None = None) -> bool: - """ - Test whether self and other have the same exponent or whether both are NaN. - - This operation is unaffected by context and is quiet: no flags are changed - and no rounding is performed. As an exception, the C version may raise - InvalidOperation if the second operand cannot be converted exactly. - """ - ... - def to_integral_exact(self, rounding: str | None = None, context: Context | None = None) -> Decimal: - """ - Round to the nearest integer, signaling Inexact or Rounded as appropriate if - rounding occurs. The rounding mode is determined by the rounding parameter - if given, else by the given context. If neither parameter is given, then the - rounding mode of the current default context is used. - """ - ... - def to_integral_value(self, rounding: str | None = None, context: Context | None = None) -> Decimal: - """ - Round to the nearest integer without signaling Inexact or Rounded. The - rounding mode is determined by the rounding parameter if given, else by - the given context. If neither parameter is given, then the rounding mode - of the current default context is used. - """ - ... - def to_integral(self, rounding: str | None = None, context: Context | None = None) -> Decimal: - """ - Identical to the to_integral_value() method. The to_integral() name has been - kept for compatibility with older versions. - """ - ... - def sqrt(self, context: Context | None = None) -> Decimal: - """ - Return the square root of the argument to full precision. The result is - correctly rounded using the ROUND_HALF_EVEN rounding mode. - """ - ... - def max(self, other: _Decimal, context: Context | None = None) -> Decimal: - """ - Maximum of self and other. If one operand is a quiet NaN and the other is - numeric, the numeric operand is returned. - """ - ... - def min(self, other: _Decimal, context: Context | None = None) -> Decimal: - """ - Minimum of self and other. If one operand is a quiet NaN and the other is - numeric, the numeric operand is returned. - """ - ... - def adjusted(self) -> int: - """Return the adjusted exponent of the number. Defined as exp + digits - 1.""" - ... - def canonical(self) -> Decimal: - """ - Return the canonical encoding of the argument. Currently, the encoding - of a Decimal instance is always canonical, so this operation returns its - argument unchanged. - """ - ... - def compare_signal(self, other: _Decimal, context: Context | None = None) -> Decimal: - """Identical to compare, except that all NaNs signal.""" - ... - def compare_total(self, other: _Decimal, context: Context | None = None) -> Decimal: - """ - Compare two operands using their abstract representation rather than - their numerical value. Similar to the compare() method, but the result - gives a total ordering on Decimal instances. Two Decimal instances with - the same numeric value but different representations compare unequal - in this ordering: - - >>> Decimal('12.0').compare_total(Decimal('12')) - Decimal('-1') - - Quiet and signaling NaNs are also included in the total ordering. The result - of this function is Decimal('0') if both operands have the same representation, - Decimal('-1') if the first operand is lower in the total order than the second, - and Decimal('1') if the first operand is higher in the total order than the - second operand. See the specification for details of the total order. - - This operation is unaffected by context and is quiet: no flags are changed - and no rounding is performed. As an exception, the C version may raise - InvalidOperation if the second operand cannot be converted exactly. - """ - ... - def compare_total_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: - """ - Compare two operands using their abstract representation rather than their - value as in compare_total(), but ignoring the sign of each operand. - - x.compare_total_mag(y) is equivalent to x.copy_abs().compare_total(y.copy_abs()). - - This operation is unaffected by context and is quiet: no flags are changed - and no rounding is performed. As an exception, the C version may raise - InvalidOperation if the second operand cannot be converted exactly. - """ - ... - def copy_abs(self) -> Decimal: - """ - Return the absolute value of the argument. This operation is unaffected by - context and is quiet: no flags are changed and no rounding is performed. - """ - ... - def copy_negate(self) -> Decimal: - """ - Return the negation of the argument. This operation is unaffected by context - and is quiet: no flags are changed and no rounding is performed. - """ - ... - def copy_sign(self, other: _Decimal, context: Context | None = None) -> Decimal: - """ - Return a copy of the first operand with the sign set to be the same as the - sign of the second operand. For example: - - >>> Decimal('2.3').copy_sign(Decimal('-1.5')) - Decimal('-2.3') - - This operation is unaffected by context and is quiet: no flags are changed - and no rounding is performed. As an exception, the C version may raise - InvalidOperation if the second operand cannot be converted exactly. - """ - ... - def exp(self, context: Context | None = None) -> Decimal: - """ - Return the value of the (natural) exponential function e**x at the given - number. The function always uses the ROUND_HALF_EVEN mode and the result - is correctly rounded. - """ - ... - def is_canonical(self) -> bool: - """ - Return True if the argument is canonical and False otherwise. Currently, - a Decimal instance is always canonical, so this operation always returns - True. - """ - ... - def is_finite(self) -> bool: - """ - Return True if the argument is a finite number, and False if the argument - is infinite or a NaN. - """ - ... - def is_infinite(self) -> bool: - """ - Return True if the argument is either positive or negative infinity and - False otherwise. - """ - ... - def is_nan(self) -> bool: - """ - Return True if the argument is a (quiet or signaling) NaN and False - otherwise. - """ - ... - def is_normal(self, context: Context | None = None) -> bool: - """ - Return True if the argument is a normal finite non-zero number with an - adjusted exponent greater than or equal to Emin. Return False if the - argument is zero, subnormal, infinite or a NaN. - """ - ... - def is_qnan(self) -> bool: - """Return True if the argument is a quiet NaN, and False otherwise.""" - ... - def is_signed(self) -> bool: - """ - Return True if the argument has a negative sign and False otherwise. - Note that both zeros and NaNs can carry signs. - """ - ... - def is_snan(self) -> bool: - """Return True if the argument is a signaling NaN and False otherwise.""" - ... - def is_subnormal(self, context: Context | None = None) -> bool: - """ - Return True if the argument is subnormal, and False otherwise. A number is - subnormal if it is non-zero, finite, and has an adjusted exponent less - than Emin. - """ - ... - def is_zero(self) -> bool: - """ - Return True if the argument is a (positive or negative) zero and False - otherwise. - """ - ... - def ln(self, context: Context | None = None) -> Decimal: - """ - Return the natural (base e) logarithm of the operand. The function always - uses the ROUND_HALF_EVEN mode and the result is correctly rounded. - """ - ... - def log10(self, context: Context | None = None) -> Decimal: - """ - Return the base ten logarithm of the operand. The function always uses the - ROUND_HALF_EVEN mode and the result is correctly rounded. - """ - ... - def logb(self, context: Context | None = None) -> Decimal: - """ - For a non-zero number, return the adjusted exponent of the operand as a - Decimal instance. If the operand is a zero, then Decimal('-Infinity') is - returned and the DivisionByZero condition is raised. If the operand is - an infinity then Decimal('Infinity') is returned. - """ - ... - def logical_and(self, other: _Decimal, context: Context | None = None) -> Decimal: - """Return the digit-wise 'and' of the two (logical) operands.""" - ... - def logical_invert(self, context: Context | None = None) -> Decimal: - """Return the digit-wise inversion of the (logical) operand.""" - ... - def logical_or(self, other: _Decimal, context: Context | None = None) -> Decimal: - """Return the digit-wise 'or' of the two (logical) operands.""" - ... - def logical_xor(self, other: _Decimal, context: Context | None = None) -> Decimal: - """Return the digit-wise 'exclusive or' of the two (logical) operands.""" - ... - def max_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: - """ - Similar to the max() method, but the comparison is done using the absolute - values of the operands. - """ - ... - def min_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: - """ - Similar to the min() method, but the comparison is done using the absolute - values of the operands. - """ - ... - def next_minus(self, context: Context | None = None) -> Decimal: - """ - Return the largest number representable in the given context (or in the - current default context if no context is given) that is smaller than the - given operand. - """ - ... - def next_plus(self, context: Context | None = None) -> Decimal: - """ - Return the smallest number representable in the given context (or in the - current default context if no context is given) that is larger than the - given operand. - """ - ... - def next_toward(self, other: _Decimal, context: Context | None = None) -> Decimal: - """ - If the two operands are unequal, return the number closest to the first - operand in the direction of the second operand. If both operands are - numerically equal, return a copy of the first operand with the sign set - to be the same as the sign of the second operand. - """ - ... - def number_class(self, context: Context | None = None) -> str: - """ - Return a string describing the class of the operand. The returned value - is one of the following ten strings: - - * '-Infinity', indicating that the operand is negative infinity. - * '-Normal', indicating that the operand is a negative normal number. - * '-Subnormal', indicating that the operand is negative and subnormal. - * '-Zero', indicating that the operand is a negative zero. - * '+Zero', indicating that the operand is a positive zero. - * '+Subnormal', indicating that the operand is positive and subnormal. - * '+Normal', indicating that the operand is a positive normal number. - * '+Infinity', indicating that the operand is positive infinity. - * 'NaN', indicating that the operand is a quiet NaN (Not a Number). - * 'sNaN', indicating that the operand is a signaling NaN. - """ - ... - def radix(self) -> Decimal: - """ - Return Decimal(10), the radix (base) in which the Decimal class does - all its arithmetic. Included for compatibility with the specification. - """ - ... - def rotate(self, other: _Decimal, context: Context | None = None) -> Decimal: - """ - Return the result of rotating the digits of the first operand by an amount - specified by the second operand. The second operand must be an integer in - the range -precision through precision. The absolute value of the second - operand gives the number of places to rotate. If the second operand is - positive then rotation is to the left; otherwise rotation is to the right. - The coefficient of the first operand is padded on the left with zeros to - length precision if necessary. The sign and exponent of the first operand are - unchanged. - """ - ... - def scaleb(self, other: _Decimal, context: Context | None = None) -> Decimal: - """ - Return the first operand with the exponent adjusted the second. Equivalently, - return the first operand multiplied by 10**other. The second operand must be - an integer. - """ - ... - def shift(self, other: _Decimal, context: Context | None = None) -> Decimal: - """ - Return the result of shifting the digits of the first operand by an amount - specified by the second operand. The second operand must be an integer in - the range -precision through precision. The absolute value of the second - operand gives the number of places to shift. If the second operand is - positive, then the shift is to the left; otherwise the shift is to the - right. Digits shifted into the coefficient are zeros. The sign and exponent - of the first operand are unchanged. - """ - ... - def __reduce__(self) -> tuple[type[Self], tuple[str]]: ... - def __copy__(self) -> Self: ... - def __deepcopy__(self, memo: Any, /) -> Self: ... - def __format__(self, specifier: str, context: Context | None = ..., /) -> str: ... - -class _ContextManager: - new_context: Context - saved_context: Context - def __init__(self, new_context: Context) -> None: ... - def __enter__(self) -> Context: ... - def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... - -_TrapType: TypeAlias = type[DecimalException] - -class Context: - """ - The context affects almost all operations and controls rounding, - Over/Underflow, raising of exceptions and much more. A new context - can be constructed as follows: - - >>> c = Context(prec=28, Emin=-425000000, Emax=425000000, - ... rounding=ROUND_HALF_EVEN, capitals=1, clamp=1, - ... traps=[InvalidOperation, DivisionByZero, Overflow], - ... flags=[]) - >>> - """ - # TODO: Context doesn't allow you to delete *any* attributes from instances of the class at runtime, - # even settable attributes like `prec` and `rounding`, - # but that's inexpressable in the stub. - # Type checkers either ignore it or misinterpret it - # if you add a `def __delattr__(self, name: str, /) -> NoReturn` method to the stub - prec: int - rounding: str - Emin: int - Emax: int - capitals: int - clamp: int - traps: dict[_TrapType, bool] - flags: dict[_TrapType, bool] - def __init__( - self, - prec: int | None = ..., - rounding: str | None = ..., - Emin: int | None = ..., - Emax: int | None = ..., - capitals: int | None = ..., - clamp: int | None = ..., - flags: None | dict[_TrapType, bool] | Container[_TrapType] = ..., - traps: None | dict[_TrapType, bool] | Container[_TrapType] = ..., - _ignored_flags: list[_TrapType] | None = ..., - ) -> None: ... - def __reduce__(self) -> tuple[type[Self], tuple[Any, ...]]: ... - def clear_flags(self) -> None: - """Reset all flags to False.""" - ... - def clear_traps(self) -> None: - """Set all traps to False.""" - ... - def copy(self) -> Context: - """Return a duplicate of the context with all flags cleared.""" - ... - def __copy__(self) -> Context: ... - # see https://github.com/python/cpython/issues/94107 - __hash__: ClassVar[None] # type: ignore[assignment] - def Etiny(self) -> int: - """ - Return a value equal to Emin - prec + 1, which is the minimum exponent value - for subnormal results. When underflow occurs, the exponent is set to Etiny. - """ - ... - def Etop(self) -> int: - """ - Return a value equal to Emax - prec + 1. This is the maximum exponent - if the _clamp field of the context is set to 1 (IEEE clamp mode). Etop() - must not be negative. - """ - ... - def create_decimal(self, num: _DecimalNew = "0", /) -> Decimal: - """ - Create a new Decimal instance from num, using self as the context. Unlike the - Decimal constructor, this function observes the context limits. - """ - ... - def create_decimal_from_float(self, f: float, /) -> Decimal: - """ - Create a new Decimal instance from float f. Unlike the Decimal.from_float() - class method, this function observes the context limits. - """ - ... - def abs(self, x: _Decimal, /) -> Decimal: - """Return the absolute value of x.""" - ... - def add(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Return the sum of x and y.""" - ... - def canonical(self, x: Decimal, /) -> Decimal: - """Return a new instance of x.""" - ... - def compare(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Compare x and y numerically.""" - ... - def compare_signal(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Compare x and y numerically. All NaNs signal.""" - ... - def compare_total(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Compare x and y using their abstract representation.""" - ... - def compare_total_mag(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Compare x and y using their abstract representation, ignoring sign.""" - ... - def copy_abs(self, x: _Decimal, /) -> Decimal: - """Return a copy of x with the sign set to 0.""" - ... - def copy_decimal(self, x: _Decimal, /) -> Decimal: - """Return a copy of Decimal x.""" - ... - def copy_negate(self, x: _Decimal, /) -> Decimal: - """Return a copy of x with the sign inverted.""" - ... - def copy_sign(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Copy the sign from y to x.""" - ... - def divide(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Return x divided by y.""" - ... - def divide_int(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Return x divided by y, truncated to an integer.""" - ... - def divmod(self, x: _Decimal, y: _Decimal, /) -> tuple[Decimal, Decimal]: - """Return quotient and remainder of the division x / y.""" - ... - def exp(self, x: _Decimal, /) -> Decimal: - """Return e ** x.""" - ... - def fma(self, x: _Decimal, y: _Decimal, z: _Decimal, /) -> Decimal: - """Return x multiplied by y, plus z.""" - ... - def is_canonical(self, x: _Decimal, /) -> bool: - """Return True if x is canonical, False otherwise.""" - ... - def is_finite(self, x: _Decimal, /) -> bool: - """Return True if x is finite, False otherwise.""" - ... - def is_infinite(self, x: _Decimal, /) -> bool: - """Return True if x is infinite, False otherwise.""" - ... - def is_nan(self, x: _Decimal, /) -> bool: - """Return True if x is a qNaN or sNaN, False otherwise.""" - ... - def is_normal(self, x: _Decimal, /) -> bool: - """Return True if x is a normal number, False otherwise.""" - ... - def is_qnan(self, x: _Decimal, /) -> bool: - """Return True if x is a quiet NaN, False otherwise.""" - ... - def is_signed(self, x: _Decimal, /) -> bool: - """Return True if x is negative, False otherwise.""" - ... - def is_snan(self, x: _Decimal, /) -> bool: - """Return True if x is a signaling NaN, False otherwise.""" - ... - def is_subnormal(self, x: _Decimal, /) -> bool: - """Return True if x is subnormal, False otherwise.""" - ... - def is_zero(self, x: _Decimal, /) -> bool: - """Return True if x is a zero, False otherwise.""" - ... - def ln(self, x: _Decimal, /) -> Decimal: - """Return the natural (base e) logarithm of x.""" - ... - def log10(self, x: _Decimal, /) -> Decimal: - """Return the base 10 logarithm of x.""" - ... - def logb(self, x: _Decimal, /) -> Decimal: - """Return the exponent of the magnitude of the operand's MSD.""" - ... - def logical_and(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Digit-wise and of x and y.""" - ... - def logical_invert(self, x: _Decimal, /) -> Decimal: - """Invert all digits of x.""" - ... - def logical_or(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Digit-wise or of x and y.""" - ... - def logical_xor(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Digit-wise xor of x and y.""" - ... - def max(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Compare the values numerically and return the maximum.""" - ... - def max_mag(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Compare the values numerically with their sign ignored.""" - ... - def min(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Compare the values numerically and return the minimum.""" - ... - def min_mag(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Compare the values numerically with their sign ignored.""" - ... - def minus(self, x: _Decimal, /) -> Decimal: - """ - Minus corresponds to the unary prefix minus operator in Python, but applies - the context to the result. - """ - ... - def multiply(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Return the product of x and y.""" - ... - def next_minus(self, x: _Decimal, /) -> Decimal: - """Return the largest representable number smaller than x.""" - ... - def next_plus(self, x: _Decimal, /) -> Decimal: - """Return the smallest representable number larger than x.""" - ... - def next_toward(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Return the number closest to x, in the direction towards y.""" - ... - def normalize(self, x: _Decimal, /) -> Decimal: - """Reduce x to its simplest form. Alias for reduce(x).""" - ... - def number_class(self, x: _Decimal, /) -> str: - """Return an indication of the class of x.""" - ... - def plus(self, x: _Decimal, /) -> Decimal: - """ - Plus corresponds to the unary prefix plus operator in Python, but applies - the context to the result. - """ - ... - def power(self, a: _Decimal, b: _Decimal, modulo: _Decimal | None = None) -> Decimal: - """ - Compute a**b. If 'a' is negative, then 'b' must be integral. The result - will be inexact unless 'a' is integral and the result is finite and can - be expressed exactly in 'precision' digits. In the Python version the - result is always correctly rounded, in the C version the result is almost - always correctly rounded. - - If modulo is given, compute (a**b) % modulo. The following restrictions - hold: - - * all three arguments must be integral - * 'b' must be nonnegative - * at least one of 'a' or 'b' must be nonzero - * modulo must be nonzero and less than 10**prec in absolute value - """ - ... - def quantize(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Return a value equal to x (rounded), having the exponent of y.""" - ... - def radix(self) -> Decimal: - """Return 10.""" - ... - def remainder(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """ - Return the remainder from integer division. The sign of the result, - if non-zero, is the same as that of the original dividend. - """ - ... - def remainder_near(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """ - Return x - y * n, where n is the integer nearest the exact value of x / y - (if the result is 0 then its sign will be the sign of x). - """ - ... - def rotate(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Return a copy of x, rotated by y places.""" - ... - def same_quantum(self, x: _Decimal, y: _Decimal, /) -> bool: - """Return True if the two operands have the same exponent.""" - ... - def scaleb(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Return the first operand after adding the second value to its exp.""" - ... - def shift(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Return a copy of x, shifted by y places.""" - ... - def sqrt(self, x: _Decimal, /) -> Decimal: - """Square root of a non-negative number to context precision.""" - ... - def subtract(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Return the difference between x and y.""" - ... - def to_eng_string(self, x: _Decimal, /) -> str: - """Convert a number to a string, using engineering notation.""" - ... - def to_sci_string(self, x: _Decimal, /) -> str: - """Convert a number to a string using scientific notation.""" - ... - def to_integral_exact(self, x: _Decimal, /) -> Decimal: - """Round to an integer. Signal if the result is rounded or inexact.""" - ... - def to_integral_value(self, x: _Decimal, /) -> Decimal: - """Round to an integer.""" - ... - def to_integral(self, x: _Decimal, /) -> Decimal: - """Identical to to_integral_value(x).""" - ... + def localcontext(ctx: Context | None = None) -> _ContextManager: ... DefaultContext: Context BasicContext: Context diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_imp.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_imp.pyi index 37c476d..a005478 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_imp.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_imp.pyi @@ -78,6 +78,4 @@ if sys.version_info >= (3, 11): ... else: - def get_frozen_object(name: str, /) -> types.CodeType: - """Create a code object for a frozen module.""" - ... + def get_frozen_object(name: str, /) -> types.CodeType: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_interpchannels.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_interpchannels.pyi new file mode 100644 index 0000000..b1e175c --- /dev/null +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_interpchannels.pyi @@ -0,0 +1,258 @@ +""" +This module provides primitive operations to manage Python interpreters. +The 'interpreters' module provides a more convenient interface. +""" + +from _typeshed import structseq +from typing import Any, Final, Literal, SupportsIndex, final +from typing_extensions import Buffer, Self + +class ChannelError(RuntimeError): ... +class ChannelClosedError(ChannelError): ... +class ChannelEmptyError(ChannelError): ... +class ChannelNotEmptyError(ChannelError): ... +class ChannelNotFoundError(ChannelError): ... + +# Mark as final, since instantiating ChannelID is not supported. +@final +class ChannelID: + """A channel ID identifies a channel and may be used as an int.""" + @property + def end(self) -> Literal["send", "recv", "both"]: + """'send', 'recv', or 'both'""" + ... + @property + def send(self) -> Self: + """the 'send' end of the channel""" + ... + @property + def recv(self) -> Self: + """the 'recv' end of the channel""" + ... + def __eq__(self, other: object) -> bool: + """Return self==value.""" + ... + def __ge__(self, other: ChannelID) -> bool: + """Return self>=value.""" + ... + def __gt__(self, other: ChannelID) -> bool: + """Return self>value.""" + ... + def __hash__(self) -> int: + """Return hash(self).""" + ... + def __index__(self) -> int: + """Return self converted to an integer, if self is suitable for use as an index into a list.""" + ... + def __int__(self) -> int: + """int(self)""" + ... + def __le__(self, other: ChannelID) -> bool: + """Return self<=value.""" + ... + def __lt__(self, other: ChannelID) -> bool: + """Return self bool: + """Return self!=value.""" + ... + +@final +class ChannelInfo(structseq[int], tuple[bool, bool, bool, int, int, int, int, int]): + """ + ChannelInfo + + A named tuple of a channel's state. + """ + __match_args__: Final = ( + "open", + "closing", + "closed", + "count", + "num_interp_send", + "num_interp_send_released", + "num_interp_recv", + "num_interp_recv_released", + ) + @property + def open(self) -> bool: + """both ends are open""" + ... + @property + def closing(self) -> bool: + """send is closed, recv is non-empty""" + ... + @property + def closed(self) -> bool: + """both ends are closed""" + ... + @property + def count(self) -> int: + """queued objects""" + ... + @property + def num_interp_send(self) -> int: + """interpreters bound to the send end""" + ... + @property + def num_interp_send_released(self) -> int: + """interpreters bound to the send end and released""" + ... + @property + def num_interp_recv(self) -> int: + """interpreters bound to the send end""" + ... + @property + def num_interp_recv_released(self) -> int: + """interpreters bound to the send end and released""" + ... + @property + def num_interp_both(self) -> int: + """interpreters bound to both ends""" + ... + @property + def num_interp_both_recv_released(self) -> int: + """interpreters bound to both ends and released_from_the recv end""" + ... + @property + def num_interp_both_send_released(self) -> int: + """interpreters bound to both ends and released_from_the send end""" + ... + @property + def num_interp_both_released(self) -> int: + """interpreters bound to both ends and released_from_both""" + ... + @property + def recv_associated(self) -> bool: + """current interpreter is bound to the recv end""" + ... + @property + def recv_released(self) -> bool: + """current interpreter *was* bound to the recv end""" + ... + @property + def send_associated(self) -> bool: + """current interpreter is bound to the send end""" + ... + @property + def send_released(self) -> bool: + """current interpreter *was* bound to the send end""" + ... + +def create(unboundop: Literal[1, 2, 3]) -> ChannelID: + """ + channel_create(unboundop) -> cid + + Create a new cross-interpreter channel and return a unique generated ID. + """ + ... +def destroy(cid: SupportsIndex) -> None: + """ + channel_destroy(cid) + + Close and finalize the channel. Afterward attempts to use the channel + will behave as though it never existed. + """ + ... +def list_all() -> list[ChannelID]: + """ + channel_list_all() -> [cid] + + Return the list of all IDs for active channels. + """ + ... +def list_interpreters(cid: SupportsIndex, *, send: bool) -> list[int]: + """ + channel_list_interpreters(cid, *, send) -> [id] + + Return the list of all interpreter IDs associated with an end of the channel. + + The 'send' argument should be a boolean indicating whether to use the send or + receive end. + """ + ... +def send(cid: SupportsIndex, obj: object, *, blocking: bool = True, timeout: float | None = None) -> None: + """ + channel_send(cid, obj, *, blocking=True, timeout=None) + + Add the object's data to the channel's queue. + By default this waits for the object to be received. + """ + ... +def send_buffer(cid: SupportsIndex, obj: Buffer, *, blocking: bool = True, timeout: float | None = None) -> None: + """ + channel_send_buffer(cid, obj, *, blocking=True, timeout=None) + + Add the object's buffer to the channel's queue. + By default this waits for the object to be received. + """ + ... +def recv(cid: SupportsIndex, default: object = ...) -> tuple[Any, Literal[1, 2, 3]]: + """ + channel_recv(cid, [default]) -> (obj, unboundop) + + Return a new object from the data at the front of the channel's queue. + + If there is nothing to receive then raise ChannelEmptyError, unless + a default value is provided. In that case return it. + """ + ... +def close(cid: SupportsIndex, *, send: bool = False, recv: bool = False) -> None: + """ + channel_close(cid, *, send=None, recv=None, force=False) + + Close the channel for all interpreters. + + If the channel is empty then the keyword args are ignored and both + ends are immediately closed. Otherwise, if 'force' is True then + all queued items are released and both ends are immediately + closed. + + If the channel is not empty *and* 'force' is False then following + happens: + + * recv is True (regardless of send): + - raise ChannelNotEmptyError + * recv is None and send is None: + - raise ChannelNotEmptyError + * send is True and recv is not True: + - fully close the 'send' end + - close the 'recv' end to interpreters not already receiving + - fully close it once empty + + Closing an already closed channel results in a ChannelClosedError. + + Once the channel's ID has no more ref counts in any interpreter + the channel will be destroyed. + """ + ... +def get_count(cid: SupportsIndex) -> int: + """ + get_count(cid) + + Return the number of items in the channel. + """ + ... +def get_info(cid: SupportsIndex) -> ChannelInfo: + """ + get_info(cid) + + Return details about the channel. + """ + ... +def get_channel_defaults(cid: SupportsIndex) -> Literal[1, 2, 3]: + """ + get_channel_defaults(cid) + + Return the channel's default values, set when it was created. + """ + ... +def release(cid: SupportsIndex, *, send: bool = False, recv: bool = False, force: bool = False) -> None: + """ + channel_release(cid, *, send=None, recv=None, force=True) + + Close the channel for the current interpreter. 'send' and 'recv' + (bool) may be used to indicate the ends to close. By default both + ends are closed. Closing an already closed end is a noop. + """ + ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_interpqueues.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_interpqueues.pyi new file mode 100644 index 0000000..c51960a --- /dev/null +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_interpqueues.pyi @@ -0,0 +1,100 @@ +""" +This module provides primitive operations to manage Python interpreters. +The 'interpreters' module provides a more convenient interface. +""" + +from typing import Any, SupportsIndex + +class QueueError(RuntimeError): + """Indicates that a queue-related error happened.""" + ... +class QueueNotFoundError(QueueError): ... + +def bind(qid: SupportsIndex) -> None: + """ + bind(qid) + + Take a reference to the identified queue. + The queue is not destroyed until there are no references left. + """ + ... +def create(maxsize: SupportsIndex, fmt: SupportsIndex) -> int: + """ + create(maxsize, fmt, unboundop) -> qid + + Create a new cross-interpreter queue and return its unique generated ID. + It is a new reference as though bind() had been called on the queue. + + The caller is responsible for calling destroy() for the new queue + before the runtime is finalized. + """ + ... +def destroy(qid: SupportsIndex) -> None: + """ + destroy(qid) + + Clear and destroy the queue. Afterward attempts to use the queue + will behave as though it never existed. + """ + ... +def get(qid: SupportsIndex) -> tuple[Any, int]: + """ + get(qid) -> (obj, fmt) + + Return a new object from the data at the front of the queue. + The object's format is also returned. + + If there is nothing to receive then raise QueueEmpty. + """ + ... +def get_count(qid: SupportsIndex) -> int: + """ + get_count(qid) + + Return the number of items in the queue. + """ + ... +def get_maxsize(qid: SupportsIndex) -> int: + """ + get_maxsize(qid) + + Return the maximum number of items in the queue. + """ + ... +def get_queue_defaults(qid: SupportsIndex) -> tuple[int]: + """ + get_queue_defaults(qid) + + Return the queue's default values, set when it was created. + """ + ... +def is_full(qid: SupportsIndex) -> bool: + """ + is_full(qid) + + Return true if the queue has a maxsize and has reached it. + """ + ... +def list_all() -> list[tuple[int, int]]: + """ + list_all() -> [(qid, fmt)] + + Return the list of IDs for all queues. + Each corresponding default format is also included. + """ + ... +def put(qid: SupportsIndex, obj: Any, fmt: SupportsIndex) -> None: + """ + put(qid, obj, fmt) + + Add the object's data to the queue. + """ + ... +def release(qid: SupportsIndex) -> None: + """ + release(qid) + + Release a reference to the queue. + The queue is destroyed once there are no references left. + """ + ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_interpreters.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_interpreters.pyi new file mode 100644 index 0000000..19443fb --- /dev/null +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_interpreters.pyi @@ -0,0 +1,204 @@ +""" +This module provides primitive operations to manage Python interpreters. +The 'interpreters' module provides a more convenient interface. +""" + +import types +from collections.abc import Callable, Mapping +from typing import Final, Literal, SupportsIndex +from typing_extensions import TypeAlias + +_Configs: TypeAlias = Literal["default", "isolated", "legacy", "empty", ""] + +class InterpreterError(Exception): + """A cross-interpreter operation failed""" + ... +class InterpreterNotFoundError(InterpreterError): + """An interpreter was not found""" + ... +class NotShareableError(Exception): ... + +class CrossInterpreterBufferView: + def __buffer__(self, flags: int, /) -> memoryview: + """Return a buffer object that exposes the underlying memory of the object.""" + ... + +def new_config(name: _Configs = "isolated", /, **overides: object) -> types.SimpleNamespace: + """ + new_config(name='isolated', /, **overrides) -> type.SimpleNamespace + + Return a representation of a new PyInterpreterConfig. + + The name determines the initial values of the config. Supported named + configs are: default, isolated, legacy, and empty. + + Any keyword arguments are set on the corresponding config fields, + overriding the initial values. + """ + ... +def create(config: types.SimpleNamespace | _Configs | None = "isolated", *, reqrefs: bool = False) -> int: + """ + create([config], *, reqrefs=False) -> ID + + Create a new interpreter and return a unique generated ID. + + The caller is responsible for destroying the interpreter before exiting, + typically by using _interpreters.destroy(). This can be managed + automatically by passing "reqrefs=True" and then using _incref() and + _decref()` appropriately. + + "config" must be a valid interpreter config or the name of a + predefined config ("isolated" or "legacy"). The default + is "isolated". + """ + ... +def destroy(id: SupportsIndex, *, restrict: bool = False) -> None: + """ + destroy(id, *, restrict=False) + + Destroy the identified interpreter. + + Attempting to destroy the current interpreter raises InterpreterError. + So does an unrecognized ID. + """ + ... +def list_all(*, require_ready: bool) -> list[tuple[int, int]]: + """ + list_all() -> [(ID, whence)] + + Return a list containing the ID of every existing interpreter. + """ + ... +def get_current() -> tuple[int, int]: + """ + get_current() -> (ID, whence) + + Return the ID of current interpreter. + """ + ... +def get_main() -> tuple[int, int]: + """ + get_main() -> (ID, whence) + + Return the ID of main interpreter. + """ + ... +def is_running(id: SupportsIndex, *, restrict: bool = False) -> bool: + """ + is_running(id, *, restrict=False) -> bool + + Return whether or not the identified interpreter is running. + """ + ... +def get_config(id: SupportsIndex, *, restrict: bool = False) -> types.SimpleNamespace: + """ + get_config(id, *, restrict=False) -> types.SimpleNamespace + + Return a representation of the config used to initialize the interpreter. + """ + ... +def whence(id: SupportsIndex) -> int: + """ + whence(id) -> int + + Return an identifier for where the interpreter was created. + """ + ... +def exec(id: SupportsIndex, code: str, shared: bool | None = None, *, restrict: bool = False) -> None: + """ + exec(id, code, shared=None, *, restrict=False) + + Execute the provided code in the identified interpreter. + This is equivalent to running the builtin exec() under the target + interpreter, using the __dict__ of its __main__ module as both + globals and locals. + + "code" may be a string containing the text of a Python script. + + Functions (and code objects) are also supported, with some restrictions. + The code/function must not take any arguments or be a closure + (i.e. have cell vars). Methods and other callables are not supported. + + If a function is provided, its code object is used and all its state + is ignored, including its __globals__ dict. + """ + ... +def call( + id: SupportsIndex, + callable: Callable[..., object], + args: tuple[object, ...] | None = None, + kwargs: dict[str, object] | None = None, + *, + restrict: bool = False, +) -> object: + """ + call(id, callable, args=None, kwargs=None, *, restrict=False) + + Call the provided object in the identified interpreter. + Pass the given args and kwargs, if possible. + + "callable" may be a plain function with no free vars that takes + no arguments. + + The function's code object is used and all its state + is ignored, including its __globals__ dict. + """ + ... +def run_string( + id: SupportsIndex, script: str | types.CodeType | Callable[[], object], shared: bool | None = None, *, restrict: bool = False +) -> None: + """ + run_string(id, script, shared=None, *, restrict=False) + + Execute the provided string in the identified interpreter. + + (See _interpreters.exec(). + """ + ... +def run_func( + id: SupportsIndex, func: types.CodeType | Callable[[], object], shared: bool | None = None, *, restrict: bool = False +) -> None: + """ + run_func(id, func, shared=None, *, restrict=False) + + Execute the body of the provided function in the identified interpreter. + Code objects are also supported. In both cases, closures and args + are not supported. Methods and other callables are not supported either. + + (See _interpreters.exec(). + """ + ... +def set___main___attrs(id: SupportsIndex, updates: Mapping[str, object], *, restrict: bool = False) -> None: + """ + set___main___attrs(id, ns, *, restrict=False) + + Bind the given attributes in the interpreter's __main__ module. + """ + ... +def incref(id: SupportsIndex, *, implieslink: bool = False, restrict: bool = False) -> None: ... +def decref(id: SupportsIndex, *, restrict: bool = False) -> None: ... +def is_shareable(obj: object) -> bool: + """ + is_shareable(obj) -> bool + + Return True if the object's data may be shared between interpreters and + False otherwise. + """ + ... +def capture_exception(exc: BaseException | None = None) -> types.SimpleNamespace: + """ + capture_exception(exc=None) -> types.SimpleNamespace + + Return a snapshot of an exception. If "exc" is None + then the current exception, if any, is used (but not cleared). + + The returned snapshot is the same as what _interpreters.exec() returns. + """ + ... + +WHENCE_UNKNOWN: Final = 0 +WHENCE_RUNTIME: Final = 1 +WHENCE_LEGACY_CAPI: Final = 2 +WHENCE_CAPI: Final = 3 +WHENCE_XI: Final = 4 +WHENCE_STDLIB: Final = 5 diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_json.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_json.pyi index 8028b6f..c95e873 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_json.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_json.pyi @@ -5,7 +5,7 @@ from typing import Any, final @final class make_encoder: - """_iterencode(obj, _current_indent_level) -> iterable""" + """Encoder(markers, default, encoder, indent, key_separator, item_separator, sort_keys, skipkeys, allow_nan)""" @property def sort_keys(self) -> bool: """sort_keys""" diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_locale.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_locale.pyi index 9cb3123..a515a32 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_locale.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_locale.pyi @@ -2,20 +2,41 @@ import sys from _typeshed import StrPath -from collections.abc import Mapping - -LC_CTYPE: int -LC_COLLATE: int -LC_TIME: int -LC_MONETARY: int -LC_NUMERIC: int -LC_ALL: int -CHAR_MAX: int +from typing import Final, Literal, TypedDict, type_check_only + +@type_check_only +class _LocaleConv(TypedDict): + decimal_point: str + grouping: list[int] + thousands_sep: str + int_curr_symbol: str + currency_symbol: str + p_cs_precedes: Literal[0, 1, 127] + n_cs_precedes: Literal[0, 1, 127] + p_sep_by_space: Literal[0, 1, 127] + n_sep_by_space: Literal[0, 1, 127] + mon_decimal_point: str + frac_digits: int + int_frac_digits: int + mon_thousands_sep: str + mon_grouping: list[int] + positive_sign: str + negative_sign: str + p_sign_posn: Literal[0, 1, 2, 3, 4, 127] + n_sign_posn: Literal[0, 1, 2, 3, 4, 127] + +LC_CTYPE: Final[int] +LC_COLLATE: Final[int] +LC_TIME: Final[int] +LC_MONETARY: Final[int] +LC_NUMERIC: Final[int] +LC_ALL: Final[int] +CHAR_MAX: Final = 127 def setlocale(category: int, locale: str | None = None, /) -> str: """Activates/queries locale processing.""" ... -def localeconv() -> Mapping[str, int | str | list[int]]: +def localeconv() -> _LocaleConv: """Returns numeric and monetary locale-specific parameters.""" ... @@ -37,67 +58,67 @@ def strxfrm(string: str, /) -> str: if sys.platform != "win32": LC_MESSAGES: int - ABDAY_1: int - ABDAY_2: int - ABDAY_3: int - ABDAY_4: int - ABDAY_5: int - ABDAY_6: int - ABDAY_7: int - - ABMON_1: int - ABMON_2: int - ABMON_3: int - ABMON_4: int - ABMON_5: int - ABMON_6: int - ABMON_7: int - ABMON_8: int - ABMON_9: int - ABMON_10: int - ABMON_11: int - ABMON_12: int - - DAY_1: int - DAY_2: int - DAY_3: int - DAY_4: int - DAY_5: int - DAY_6: int - DAY_7: int - - ERA: int - ERA_D_T_FMT: int - ERA_D_FMT: int - ERA_T_FMT: int - - MON_1: int - MON_2: int - MON_3: int - MON_4: int - MON_5: int - MON_6: int - MON_7: int - MON_8: int - MON_9: int - MON_10: int - MON_11: int - MON_12: int - - CODESET: int - D_T_FMT: int - D_FMT: int - T_FMT: int - T_FMT_AMPM: int - AM_STR: int - PM_STR: int - - RADIXCHAR: int - THOUSEP: int - YESEXPR: int - NOEXPR: int - CRNCYSTR: int - ALT_DIGITS: int + ABDAY_1: Final[int] + ABDAY_2: Final[int] + ABDAY_3: Final[int] + ABDAY_4: Final[int] + ABDAY_5: Final[int] + ABDAY_6: Final[int] + ABDAY_7: Final[int] + + ABMON_1: Final[int] + ABMON_2: Final[int] + ABMON_3: Final[int] + ABMON_4: Final[int] + ABMON_5: Final[int] + ABMON_6: Final[int] + ABMON_7: Final[int] + ABMON_8: Final[int] + ABMON_9: Final[int] + ABMON_10: Final[int] + ABMON_11: Final[int] + ABMON_12: Final[int] + + DAY_1: Final[int] + DAY_2: Final[int] + DAY_3: Final[int] + DAY_4: Final[int] + DAY_5: Final[int] + DAY_6: Final[int] + DAY_7: Final[int] + + ERA: Final[int] + ERA_D_T_FMT: Final[int] + ERA_D_FMT: Final[int] + ERA_T_FMT: Final[int] + + MON_1: Final[int] + MON_2: Final[int] + MON_3: Final[int] + MON_4: Final[int] + MON_5: Final[int] + MON_6: Final[int] + MON_7: Final[int] + MON_8: Final[int] + MON_9: Final[int] + MON_10: Final[int] + MON_11: Final[int] + MON_12: Final[int] + + CODESET: Final[int] + D_T_FMT: Final[int] + D_FMT: Final[int] + T_FMT: Final[int] + T_FMT_AMPM: Final[int] + AM_STR: Final[int] + PM_STR: Final[int] + + RADIXCHAR: Final[int] + THOUSEP: Final[int] + YESEXPR: Final[int] + NOEXPR: Final[int] + CRNCYSTR: Final[int] + ALT_DIGITS: Final[int] def nl_langinfo(key: int, /) -> str: """Return the value for the locale information associated with key.""" diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_msi.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_msi.pyi index f3f268e..779fda3 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_msi.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_msi.pyi @@ -1,5 +1,3 @@ -"""Documentation""" - import sys if sys.platform == "win32": diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_operator.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_operator.pyi index e466922..9ecbf29 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_operator.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_operator.pyi @@ -12,7 +12,7 @@ import sys from _typeshed import SupportsGetItem from collections.abc import Callable, Container, Iterable, MutableMapping, MutableSequence, Sequence from typing import Any, AnyStr, Generic, Protocol, SupportsAbs, SupportsIndex, TypeVar, final, overload -from typing_extensions import ParamSpec, TypeAlias, TypeVarTuple, Unpack +from typing_extensions import ParamSpec, TypeAlias, TypeIs, TypeVarTuple, Unpack _R = TypeVar("_R") _T = TypeVar("_T") @@ -315,3 +315,7 @@ def _compare_digest(a: AnyStr, b: AnyStr, /) -> bool: types and lengths of a and b--but not their values. """ ... + +if sys.version_info >= (3, 14): + def is_none(a: object, /) -> TypeIs[None]: ... + def is_not_none(a: _T | None, /) -> TypeIs[_T]: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_osx_support.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_osx_support.pyi index c98780b..466fd61 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_osx_support.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_osx_support.pyi @@ -1,7 +1,7 @@ """Shared OS X support functions.""" from collections.abc import Iterable, Sequence -from typing import TypeVar +from typing import Final, TypeVar _T = TypeVar("_T") _K = TypeVar("_K") @@ -9,9 +9,9 @@ _V = TypeVar("_V") __all__ = ["compiler_fixup", "customize_config_vars", "customize_compiler", "get_platform_osx"] -_UNIVERSAL_CONFIG_VARS: tuple[str, ...] # undocumented -_COMPILER_CONFIG_VARS: tuple[str, ...] # undocumented -_INITPRE: str # undocumented +_UNIVERSAL_CONFIG_VARS: Final[tuple[str, ...]] # undocumented +_COMPILER_CONFIG_VARS: Final[tuple[str, ...]] # undocumented +_INITPRE: Final[str] # undocumented def _find_executable(executable: str, path: str | None = None) -> str | None: """ @@ -28,7 +28,7 @@ def _find_build_tool(toolname: str) -> str: """Find a build tool on current path or using xcrun""" ... -_SYSTEM_VERSION: str | None # undocumented +_SYSTEM_VERSION: Final[str | None] # undocumented def _get_system_version() -> str: """Return the OS X system version as a string""" diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_posixsubprocess.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_posixsubprocess.pyi index 2666bbc..6bbd16a 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_posixsubprocess.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_posixsubprocess.pyi @@ -34,22 +34,18 @@ if sys.platform != "win32": /, ) -> int: """ - fork_exec(args, executable_list, close_fds, pass_fds, cwd, env, - p2cread, p2cwrite, c2pread, c2pwrite, - errread, errwrite, errpipe_read, errpipe_write, - restore_signals, call_setsid, - gid, groups_list, uid, - preexec_fn) - - Forks a child process, closes parent file descriptors as appropriate in the - child and dups the few that are needed before calling exec() in the child - process. - - If close_fds is true, close file descriptors 3 and higher, except those listed + Spawn a fresh new child process. + + Fork a child process, close parent file descriptors as appropriate in the + child and duplicate the few that are needed before calling exec() in the + child process. + + If close_fds is True, close file descriptors 3 and higher, except those listed in the sorted tuple pass_fds. The preexec_fn, if supplied, will be called immediately before closing file descriptors and exec. + WARNING: preexec_fn is NOT SAFE if your application uses threads. It may trigger infrequent, difficult to debug deadlocks. diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_socket.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_socket.pyi index 12359d3..13989a6 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_socket.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_socket.pyi @@ -7,6 +7,7 @@ See the socket module for documentation. import sys from _typeshed import ReadableBuffer, WriteableBuffer from collections.abc import Iterable +from socket import error as error, gaierror as gaierror, herror as herror, timeout as timeout from typing import Any, SupportsIndex, overload from typing_extensions import TypeAlias @@ -672,18 +673,6 @@ if sys.platform != "win32": if sys.platform != "win32" and sys.platform != "darwin": IPX_TYPE: int -# ===== Exceptions ===== - -error = OSError - -class herror(error): ... -class gaierror(error): ... - -if sys.version_info >= (3, 10): - timeout = TimeoutError -else: - class timeout(error): ... - # ===== Classes ===== class socket: @@ -745,6 +734,7 @@ class socket: def proto(self) -> int: """the socket protocol""" ... + # F811: "Redefinition of unused `timeout`" @property def timeout(self) -> float | None: """the socket timeout""" @@ -1208,11 +1198,7 @@ def ntohl(x: int, /) -> int: """ ... def ntohs(x: int, /) -> int: - """ - ntohs(integer) -> integer - - Convert a 16-bit unsigned integer from network to host byte order. - """ + """Convert a 16-bit unsigned integer from network to host byte order.""" ... def htonl(x: int, /) -> int: """ @@ -1222,26 +1208,13 @@ def htonl(x: int, /) -> int: """ ... def htons(x: int, /) -> int: - """ - htons(integer) -> integer - - Convert a 16-bit unsigned integer from host to network byte order. - """ + """Convert a 16-bit unsigned integer from host to network byte order.""" ... def inet_aton(ip_addr: str, /) -> bytes: - """ - inet_aton(string) -> bytes giving packed 32-bit IP representation - - Convert an IP address in string format (123.45.67.89) to the 32-bit packed - binary format used in low-level network functions. - """ + """Convert an IP address in string format (123.45.67.89) to the 32-bit packed binary format used in low-level network functions.""" ... def inet_ntoa(packed_ip: ReadableBuffer, /) -> str: - """ - inet_ntoa(packed_ip) -> ip_address_string - - Convert an IP address from 32-bit packed binary format to string format - """ + """Convert an IP address from 32-bit packed binary format to string format.""" ... def inet_pton(address_family: int, ip_string: str, /) -> bytes: """ @@ -1267,6 +1240,8 @@ def getdefaulttimeout() -> float | None: When the socket module is first imported, the default is None. """ ... + +# F811: "Redefinition of unused `timeout`" def setdefaulttimeout(timeout: float | None, /) -> None: """ setdefaulttimeout(timeout) @@ -1329,11 +1304,7 @@ def if_nameindex() -> list[tuple[int, str]]: """ ... def if_nametoindex(oname: str, /) -> int: - """ - if_nametoindex(if_name) - - Returns the interface index corresponding to the interface name if_name. - """ + """Returns the interface index corresponding to the interface name if_name.""" ... def if_indextoname(index: int, /) -> str: """ diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_sqlite3.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_sqlite3.pyi new file mode 100644 index 0000000..b53aca1 --- /dev/null +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_sqlite3.pyi @@ -0,0 +1,360 @@ +import sys +from _typeshed import ReadableBuffer, StrOrBytesPath +from collections.abc import Callable +from sqlite3 import ( + Connection as Connection, + Cursor as Cursor, + DatabaseError as DatabaseError, + DataError as DataError, + Error as Error, + IntegrityError as IntegrityError, + InterfaceError as InterfaceError, + InternalError as InternalError, + NotSupportedError as NotSupportedError, + OperationalError as OperationalError, + PrepareProtocol as PrepareProtocol, + ProgrammingError as ProgrammingError, + Row as Row, + Warning as Warning, +) +from typing import Any, Final, Literal, TypeVar, overload +from typing_extensions import TypeAlias + +if sys.version_info >= (3, 11): + from sqlite3 import Blob as Blob + +_T = TypeVar("_T") +_ConnectionT = TypeVar("_ConnectionT", bound=Connection) +_SqliteData: TypeAlias = str | ReadableBuffer | int | float | None +_Adapter: TypeAlias = Callable[[_T], _SqliteData] +_Converter: TypeAlias = Callable[[bytes], Any] + +PARSE_COLNAMES: Final[int] +PARSE_DECLTYPES: Final[int] +SQLITE_ALTER_TABLE: Final[int] +SQLITE_ANALYZE: Final[int] +SQLITE_ATTACH: Final[int] +SQLITE_CREATE_INDEX: Final[int] +SQLITE_CREATE_TABLE: Final[int] +SQLITE_CREATE_TEMP_INDEX: Final[int] +SQLITE_CREATE_TEMP_TABLE: Final[int] +SQLITE_CREATE_TEMP_TRIGGER: Final[int] +SQLITE_CREATE_TEMP_VIEW: Final[int] +SQLITE_CREATE_TRIGGER: Final[int] +SQLITE_CREATE_VIEW: Final[int] +SQLITE_CREATE_VTABLE: Final[int] +SQLITE_DELETE: Final[int] +SQLITE_DENY: Final[int] +SQLITE_DETACH: Final[int] +SQLITE_DONE: Final[int] +SQLITE_DROP_INDEX: Final[int] +SQLITE_DROP_TABLE: Final[int] +SQLITE_DROP_TEMP_INDEX: Final[int] +SQLITE_DROP_TEMP_TABLE: Final[int] +SQLITE_DROP_TEMP_TRIGGER: Final[int] +SQLITE_DROP_TEMP_VIEW: Final[int] +SQLITE_DROP_TRIGGER: Final[int] +SQLITE_DROP_VIEW: Final[int] +SQLITE_DROP_VTABLE: Final[int] +SQLITE_FUNCTION: Final[int] +SQLITE_IGNORE: Final[int] +SQLITE_INSERT: Final[int] +SQLITE_OK: Final[int] +SQLITE_PRAGMA: Final[int] +SQLITE_READ: Final[int] +SQLITE_RECURSIVE: Final[int] +SQLITE_REINDEX: Final[int] +SQLITE_SAVEPOINT: Final[int] +SQLITE_SELECT: Final[int] +SQLITE_TRANSACTION: Final[int] +SQLITE_UPDATE: Final[int] +adapters: dict[tuple[type[Any], type[Any]], _Adapter[Any]] +converters: dict[str, _Converter] +sqlite_version: str + +if sys.version_info < (3, 12): + version: str + +if sys.version_info >= (3, 12): + LEGACY_TRANSACTION_CONTROL: Final[int] + SQLITE_DBCONFIG_DEFENSIVE: Final[int] + SQLITE_DBCONFIG_DQS_DDL: Final[int] + SQLITE_DBCONFIG_DQS_DML: Final[int] + SQLITE_DBCONFIG_ENABLE_FKEY: Final[int] + SQLITE_DBCONFIG_ENABLE_FTS3_TOKENIZER: Final[int] + SQLITE_DBCONFIG_ENABLE_LOAD_EXTENSION: Final[int] + SQLITE_DBCONFIG_ENABLE_QPSG: Final[int] + SQLITE_DBCONFIG_ENABLE_TRIGGER: Final[int] + SQLITE_DBCONFIG_ENABLE_VIEW: Final[int] + SQLITE_DBCONFIG_LEGACY_ALTER_TABLE: Final[int] + SQLITE_DBCONFIG_LEGACY_FILE_FORMAT: Final[int] + SQLITE_DBCONFIG_NO_CKPT_ON_CLOSE: Final[int] + SQLITE_DBCONFIG_RESET_DATABASE: Final[int] + SQLITE_DBCONFIG_TRIGGER_EQP: Final[int] + SQLITE_DBCONFIG_TRUSTED_SCHEMA: Final[int] + SQLITE_DBCONFIG_WRITABLE_SCHEMA: Final[int] + +if sys.version_info >= (3, 11): + SQLITE_ABORT: Final[int] + SQLITE_ABORT_ROLLBACK: Final[int] + SQLITE_AUTH: Final[int] + SQLITE_AUTH_USER: Final[int] + SQLITE_BUSY: Final[int] + SQLITE_BUSY_RECOVERY: Final[int] + SQLITE_BUSY_SNAPSHOT: Final[int] + SQLITE_BUSY_TIMEOUT: Final[int] + SQLITE_CANTOPEN: Final[int] + SQLITE_CANTOPEN_CONVPATH: Final[int] + SQLITE_CANTOPEN_DIRTYWAL: Final[int] + SQLITE_CANTOPEN_FULLPATH: Final[int] + SQLITE_CANTOPEN_ISDIR: Final[int] + SQLITE_CANTOPEN_NOTEMPDIR: Final[int] + SQLITE_CANTOPEN_SYMLINK: Final[int] + SQLITE_CONSTRAINT: Final[int] + SQLITE_CONSTRAINT_CHECK: Final[int] + SQLITE_CONSTRAINT_COMMITHOOK: Final[int] + SQLITE_CONSTRAINT_FOREIGNKEY: Final[int] + SQLITE_CONSTRAINT_FUNCTION: Final[int] + SQLITE_CONSTRAINT_NOTNULL: Final[int] + SQLITE_CONSTRAINT_PINNED: Final[int] + SQLITE_CONSTRAINT_PRIMARYKEY: Final[int] + SQLITE_CONSTRAINT_ROWID: Final[int] + SQLITE_CONSTRAINT_TRIGGER: Final[int] + SQLITE_CONSTRAINT_UNIQUE: Final[int] + SQLITE_CONSTRAINT_VTAB: Final[int] + SQLITE_CORRUPT: Final[int] + SQLITE_CORRUPT_INDEX: Final[int] + SQLITE_CORRUPT_SEQUENCE: Final[int] + SQLITE_CORRUPT_VTAB: Final[int] + SQLITE_EMPTY: Final[int] + SQLITE_ERROR: Final[int] + SQLITE_ERROR_MISSING_COLLSEQ: Final[int] + SQLITE_ERROR_RETRY: Final[int] + SQLITE_ERROR_SNAPSHOT: Final[int] + SQLITE_FORMAT: Final[int] + SQLITE_FULL: Final[int] + SQLITE_INTERNAL: Final[int] + SQLITE_INTERRUPT: Final[int] + SQLITE_IOERR: Final[int] + SQLITE_IOERR_ACCESS: Final[int] + SQLITE_IOERR_AUTH: Final[int] + SQLITE_IOERR_BEGIN_ATOMIC: Final[int] + SQLITE_IOERR_BLOCKED: Final[int] + SQLITE_IOERR_CHECKRESERVEDLOCK: Final[int] + SQLITE_IOERR_CLOSE: Final[int] + SQLITE_IOERR_COMMIT_ATOMIC: Final[int] + SQLITE_IOERR_CONVPATH: Final[int] + SQLITE_IOERR_CORRUPTFS: Final[int] + SQLITE_IOERR_DATA: Final[int] + SQLITE_IOERR_DELETE: Final[int] + SQLITE_IOERR_DELETE_NOENT: Final[int] + SQLITE_IOERR_DIR_CLOSE: Final[int] + SQLITE_IOERR_DIR_FSYNC: Final[int] + SQLITE_IOERR_FSTAT: Final[int] + SQLITE_IOERR_FSYNC: Final[int] + SQLITE_IOERR_GETTEMPPATH: Final[int] + SQLITE_IOERR_LOCK: Final[int] + SQLITE_IOERR_MMAP: Final[int] + SQLITE_IOERR_NOMEM: Final[int] + SQLITE_IOERR_RDLOCK: Final[int] + SQLITE_IOERR_READ: Final[int] + SQLITE_IOERR_ROLLBACK_ATOMIC: Final[int] + SQLITE_IOERR_SEEK: Final[int] + SQLITE_IOERR_SHMLOCK: Final[int] + SQLITE_IOERR_SHMMAP: Final[int] + SQLITE_IOERR_SHMOPEN: Final[int] + SQLITE_IOERR_SHMSIZE: Final[int] + SQLITE_IOERR_SHORT_READ: Final[int] + SQLITE_IOERR_TRUNCATE: Final[int] + SQLITE_IOERR_UNLOCK: Final[int] + SQLITE_IOERR_VNODE: Final[int] + SQLITE_IOERR_WRITE: Final[int] + SQLITE_LIMIT_ATTACHED: Final[int] + SQLITE_LIMIT_COLUMN: Final[int] + SQLITE_LIMIT_COMPOUND_SELECT: Final[int] + SQLITE_LIMIT_EXPR_DEPTH: Final[int] + SQLITE_LIMIT_FUNCTION_ARG: Final[int] + SQLITE_LIMIT_LENGTH: Final[int] + SQLITE_LIMIT_LIKE_PATTERN_LENGTH: Final[int] + SQLITE_LIMIT_SQL_LENGTH: Final[int] + SQLITE_LIMIT_TRIGGER_DEPTH: Final[int] + SQLITE_LIMIT_VARIABLE_NUMBER: Final[int] + SQLITE_LIMIT_VDBE_OP: Final[int] + SQLITE_LIMIT_WORKER_THREADS: Final[int] + SQLITE_LOCKED: Final[int] + SQLITE_LOCKED_SHAREDCACHE: Final[int] + SQLITE_LOCKED_VTAB: Final[int] + SQLITE_MISMATCH: Final[int] + SQLITE_MISUSE: Final[int] + SQLITE_NOLFS: Final[int] + SQLITE_NOMEM: Final[int] + SQLITE_NOTADB: Final[int] + SQLITE_NOTFOUND: Final[int] + SQLITE_NOTICE: Final[int] + SQLITE_NOTICE_RECOVER_ROLLBACK: Final[int] + SQLITE_NOTICE_RECOVER_WAL: Final[int] + SQLITE_OK_LOAD_PERMANENTLY: Final[int] + SQLITE_OK_SYMLINK: Final[int] + SQLITE_PERM: Final[int] + SQLITE_PROTOCOL: Final[int] + SQLITE_RANGE: Final[int] + SQLITE_READONLY: Final[int] + SQLITE_READONLY_CANTINIT: Final[int] + SQLITE_READONLY_CANTLOCK: Final[int] + SQLITE_READONLY_DBMOVED: Final[int] + SQLITE_READONLY_DIRECTORY: Final[int] + SQLITE_READONLY_RECOVERY: Final[int] + SQLITE_READONLY_ROLLBACK: Final[int] + SQLITE_ROW: Final[int] + SQLITE_SCHEMA: Final[int] + SQLITE_TOOBIG: Final[int] + SQLITE_WARNING: Final[int] + SQLITE_WARNING_AUTOINDEX: Final[int] + threadsafety: Final[int] + +# Can take or return anything depending on what's in the registry. +@overload +def adapt(obj: Any, proto: Any, /) -> Any: + """Adapt given object to given protocol.""" + ... +@overload +def adapt(obj: Any, proto: Any, alt: _T, /) -> Any | _T: + """Adapt given object to given protocol.""" + ... +def complete_statement(statement: str) -> bool: + """Checks if a string contains a complete SQL statement.""" + ... + +if sys.version_info >= (3, 12): + @overload + def connect( + database: StrOrBytesPath, + timeout: float = 5.0, + detect_types: int = 0, + isolation_level: Literal["DEFERRED", "EXCLUSIVE", "IMMEDIATE"] | None = "DEFERRED", + check_same_thread: bool = True, + cached_statements: int = 128, + uri: bool = False, + *, + autocommit: bool = ..., + ) -> Connection: + """ + Open a connection to the SQLite database file 'database'. + + You can use ":memory:" to open a database connection to a database that + resides in RAM instead of on disk. + + Note: Passing more than 1 positional argument to _sqlite3.connect() is + deprecated. Parameters 'timeout', 'detect_types', 'isolation_level', + 'check_same_thread', 'factory', 'cached_statements' and 'uri' will + become keyword-only parameters in Python 3.15. + """ + ... + @overload + def connect( + database: StrOrBytesPath, + timeout: float, + detect_types: int, + isolation_level: Literal["DEFERRED", "EXCLUSIVE", "IMMEDIATE"] | None, + check_same_thread: bool, + factory: type[_ConnectionT], + cached_statements: int = 128, + uri: bool = False, + *, + autocommit: bool = ..., + ) -> _ConnectionT: + """ + Open a connection to the SQLite database file 'database'. + + You can use ":memory:" to open a database connection to a database that + resides in RAM instead of on disk. + + Note: Passing more than 1 positional argument to _sqlite3.connect() is + deprecated. Parameters 'timeout', 'detect_types', 'isolation_level', + 'check_same_thread', 'factory', 'cached_statements' and 'uri' will + become keyword-only parameters in Python 3.15. + """ + ... + @overload + def connect( + database: StrOrBytesPath, + timeout: float = 5.0, + detect_types: int = 0, + isolation_level: Literal["DEFERRED", "EXCLUSIVE", "IMMEDIATE"] | None = "DEFERRED", + check_same_thread: bool = True, + *, + factory: type[_ConnectionT], + cached_statements: int = 128, + uri: bool = False, + autocommit: bool = ..., + ) -> _ConnectionT: + """ + Open a connection to the SQLite database file 'database'. + + You can use ":memory:" to open a database connection to a database that + resides in RAM instead of on disk. + + Note: Passing more than 1 positional argument to _sqlite3.connect() is + deprecated. Parameters 'timeout', 'detect_types', 'isolation_level', + 'check_same_thread', 'factory', 'cached_statements' and 'uri' will + become keyword-only parameters in Python 3.15. + """ + ... + +else: + @overload + def connect( + database: StrOrBytesPath, + timeout: float = 5.0, + detect_types: int = 0, + isolation_level: Literal["DEFERRED", "EXCLUSIVE", "IMMEDIATE"] | None = "DEFERRED", + check_same_thread: bool = True, + cached_statements: int = 128, + uri: bool = False, + ) -> Connection: ... + @overload + def connect( + database: StrOrBytesPath, + timeout: float, + detect_types: int, + isolation_level: Literal["DEFERRED", "EXCLUSIVE", "IMMEDIATE"] | None, + check_same_thread: bool, + factory: type[_ConnectionT], + cached_statements: int = 128, + uri: bool = False, + ) -> _ConnectionT: ... + @overload + def connect( + database: StrOrBytesPath, + timeout: float = 5.0, + detect_types: int = 0, + isolation_level: Literal["DEFERRED", "EXCLUSIVE", "IMMEDIATE"] | None = "DEFERRED", + check_same_thread: bool = True, + *, + factory: type[_ConnectionT], + cached_statements: int = 128, + uri: bool = False, + ) -> _ConnectionT: ... + +def enable_callback_tracebacks(enable: bool, /) -> None: + """Enable or disable callback functions throwing errors to stderr.""" + ... + +if sys.version_info < (3, 12): + # takes a pos-or-keyword argument because there is a C wrapper + def enable_shared_cache(do_enable: int) -> None: ... + +if sys.version_info >= (3, 10): + def register_adapter(type: type[_T], adapter: _Adapter[_T], /) -> None: + """Register a function to adapt Python objects to SQLite values.""" + ... + def register_converter(typename: str, converter: _Converter, /) -> None: + """Register a function to convert SQLite values to Python objects.""" + ... + +else: + def register_adapter(type: type[_T], caster: _Adapter[_T], /) -> None: ... + def register_converter(name: str, converter: _Converter, /) -> None: ... + +if sys.version_info < (3, 10): + OptimizedUnicode = str diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_ssl.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_ssl.pyi new file mode 100644 index 0000000..2282b1c --- /dev/null +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_ssl.pyi @@ -0,0 +1,399 @@ +""" +Implementation module for SSL socket operations. See the socket module +for documentation. +""" + +import sys +from _typeshed import ReadableBuffer, StrOrBytesPath +from collections.abc import Callable +from ssl import ( + SSLCertVerificationError as SSLCertVerificationError, + SSLContext, + SSLEOFError as SSLEOFError, + SSLError as SSLError, + SSLObject, + SSLSyscallError as SSLSyscallError, + SSLWantReadError as SSLWantReadError, + SSLWantWriteError as SSLWantWriteError, + SSLZeroReturnError as SSLZeroReturnError, +) +from typing import Any, Literal, TypedDict, final, overload +from typing_extensions import NotRequired, Self, TypeAlias + +_PasswordType: TypeAlias = Callable[[], str | bytes | bytearray] | str | bytes | bytearray +_PCTRTT: TypeAlias = tuple[tuple[str, str], ...] +_PCTRTTT: TypeAlias = tuple[_PCTRTT, ...] +_PeerCertRetDictType: TypeAlias = dict[str, str | _PCTRTTT | _PCTRTT] + +class _Cipher(TypedDict): + aead: bool + alg_bits: int + auth: str + description: str + digest: str | None + id: int + kea: str + name: str + protocol: str + strength_bits: int + symmetric: str + +class _CertInfo(TypedDict): + subject: tuple[tuple[tuple[str, str], ...], ...] + issuer: tuple[tuple[tuple[str, str], ...], ...] + version: int + serialNumber: str + notBefore: str + notAfter: str + subjectAltName: NotRequired[tuple[tuple[str, str], ...] | None] + OCSP: NotRequired[tuple[str, ...] | None] + caIssuers: NotRequired[tuple[str, ...] | None] + crlDistributionPoints: NotRequired[tuple[str, ...] | None] + +def RAND_add(string: str | ReadableBuffer, entropy: float, /) -> None: + """ + Mix string into the OpenSSL PRNG state. + + entropy (a float) is a lower bound on the entropy contained in + string. See RFC 4086. + """ + ... +def RAND_bytes(n: int, /) -> bytes: + """Generate n cryptographically strong pseudo-random bytes.""" + ... + +if sys.version_info < (3, 12): + def RAND_pseudo_bytes(n: int, /) -> tuple[bytes, bool]: ... + +if sys.version_info < (3, 10): + def RAND_egd(path: str) -> None: ... + +def RAND_status() -> bool: + """ + Returns True if the OpenSSL PRNG has been seeded with enough data and False if not. + + It is necessary to seed the PRNG with RAND_add() on some platforms before + using the ssl() function. + """ + ... +def get_default_verify_paths() -> tuple[str, str, str, str]: + """ + Return search paths and environment vars that are used by SSLContext's set_default_verify_paths() to load default CAs. + + The values are 'cert_file_env', 'cert_file', 'cert_dir_env', 'cert_dir'. + """ + ... + +if sys.platform == "win32": + _EnumRetType: TypeAlias = list[tuple[bytes, str, set[str] | bool]] + def enum_certificates(store_name: str) -> _EnumRetType: ... + def enum_crls(store_name: str) -> _EnumRetType: ... + +def txt2obj(txt: str, name: bool = False) -> tuple[int, str, str, str]: + """ + Lookup NID, short name, long name and OID of an ASN1_OBJECT. + + By default objects are looked up by OID. With name=True short and + long name are also matched. + """ + ... +def nid2obj(nid: int, /) -> tuple[int, str, str, str]: + """Lookup NID, short name, long name and OID of an ASN1_OBJECT by NID.""" + ... + +class _SSLContext: + check_hostname: bool + keylog_filename: str | None + maximum_version: int + minimum_version: int + num_tickets: int + options: int + post_handshake_auth: bool + protocol: int + if sys.version_info >= (3, 10): + security_level: int + sni_callback: Callable[[SSLObject, str, SSLContext], None | int] | None + verify_flags: int + verify_mode: int + def __new__(cls, protocol: int, /) -> Self: ... + def cert_store_stats(self) -> dict[str, int]: + """ + Returns quantities of loaded X.509 certificates. + + X.509 certificates with a CA extension and certificate revocation lists + inside the context's cert store. + + NOTE: Certificates in a capath directory aren't loaded unless they have + been used at least once. + """ + ... + @overload + def get_ca_certs(self, binary_form: Literal[False] = False) -> list[_PeerCertRetDictType]: + """ + Returns a list of dicts with information of loaded CA certs. + + If the optional argument is True, returns a DER-encoded copy of the CA + certificate. + + NOTE: Certificates in a capath directory aren't loaded unless they have + been used at least once. + """ + ... + @overload + def get_ca_certs(self, binary_form: Literal[True]) -> list[bytes]: + """ + Returns a list of dicts with information of loaded CA certs. + + If the optional argument is True, returns a DER-encoded copy of the CA + certificate. + + NOTE: Certificates in a capath directory aren't loaded unless they have + been used at least once. + """ + ... + @overload + def get_ca_certs(self, binary_form: bool = False) -> Any: + """ + Returns a list of dicts with information of loaded CA certs. + + If the optional argument is True, returns a DER-encoded copy of the CA + certificate. + + NOTE: Certificates in a capath directory aren't loaded unless they have + been used at least once. + """ + ... + def get_ciphers(self) -> list[_Cipher]: ... + def load_cert_chain( + self, certfile: StrOrBytesPath, keyfile: StrOrBytesPath | None = None, password: _PasswordType | None = None + ) -> None: ... + def load_dh_params(self, path: str, /) -> None: ... + def load_verify_locations( + self, + cafile: StrOrBytesPath | None = None, + capath: StrOrBytesPath | None = None, + cadata: str | ReadableBuffer | None = None, + ) -> None: ... + def session_stats(self) -> dict[str, int]: ... + def set_ciphers(self, cipherlist: str, /) -> None: ... + def set_default_verify_paths(self) -> None: ... + def set_ecdh_curve(self, name: str, /) -> None: ... + if sys.version_info >= (3, 13): + def set_psk_client_callback(self, callback: Callable[[str | None], tuple[str | None, bytes]] | None) -> None: ... + def set_psk_server_callback( + self, callback: Callable[[str | None], tuple[str | None, bytes]] | None, identity_hint: str | None = None + ) -> None: ... + +@final +class MemoryBIO: + eof: bool + pending: int + def __new__(self) -> Self: ... + def read(self, size: int = -1, /) -> bytes: + """ + Read up to size bytes from the memory BIO. + + If size is not specified, read the entire buffer. + If the return value is an empty bytes instance, this means either + EOF or that no data is available. Use the "eof" property to + distinguish between the two. + """ + ... + def write(self, b: ReadableBuffer, /) -> int: + """ + Writes the bytes b into the memory BIO. + + Returns the number of bytes written. + """ + ... + def write_eof(self) -> None: + """ + Write an EOF marker to the memory BIO. + + When all data has been read, the "eof" property will be True. + """ + ... + +@final +class SSLSession: + @property + def has_ticket(self) -> bool: + """Does the session contain a ticket?""" + ... + @property + def id(self) -> bytes: + """Session id""" + ... + @property + def ticket_lifetime_hint(self) -> int: + """Ticket life time hint.""" + ... + @property + def time(self) -> int: + """Session creation time (seconds since epoch).""" + ... + @property + def timeout(self) -> int: + """Session timeout (delta in seconds).""" + ... + +# _ssl.Certificate is weird: it can't be instantiated or subclassed. +# Instances can only be created via methods of the private _ssl._SSLSocket class, +# for which the relevant method signatures are: +# +# class _SSLSocket: +# def get_unverified_chain(self) -> list[Certificate] | None: ... +# def get_verified_chain(self) -> list[Certificate] | None: ... +# +# You can find a _ssl._SSLSocket object as the _sslobj attribute of a ssl.SSLSocket object + +if sys.version_info >= (3, 10): + @final + class Certificate: + def get_info(self) -> _CertInfo: ... + @overload + def public_bytes(self) -> str: ... + @overload + def public_bytes(self, format: Literal[1] = 1, /) -> str: ... # ENCODING_PEM + @overload + def public_bytes(self, format: Literal[2], /) -> bytes: ... # ENCODING_DER + @overload + def public_bytes(self, format: int, /) -> str | bytes: ... + +if sys.version_info < (3, 12): + err_codes_to_names: dict[tuple[int, int], str] + err_names_to_codes: dict[str, tuple[int, int]] + lib_codes_to_names: dict[int, str] + +_DEFAULT_CIPHERS: str + +# SSL error numbers +SSL_ERROR_ZERO_RETURN: int +SSL_ERROR_WANT_READ: int +SSL_ERROR_WANT_WRITE: int +SSL_ERROR_WANT_X509_LOOKUP: int +SSL_ERROR_SYSCALL: int +SSL_ERROR_SSL: int +SSL_ERROR_WANT_CONNECT: int +SSL_ERROR_EOF: int +SSL_ERROR_INVALID_ERROR_CODE: int + +# verify modes +CERT_NONE: int +CERT_OPTIONAL: int +CERT_REQUIRED: int + +# verify flags +VERIFY_DEFAULT: int +VERIFY_CRL_CHECK_LEAF: int +VERIFY_CRL_CHECK_CHAIN: int +VERIFY_X509_STRICT: int +VERIFY_X509_TRUSTED_FIRST: int +if sys.version_info >= (3, 10): + VERIFY_ALLOW_PROXY_CERTS: int + VERIFY_X509_PARTIAL_CHAIN: int + +# alert descriptions +ALERT_DESCRIPTION_CLOSE_NOTIFY: int +ALERT_DESCRIPTION_UNEXPECTED_MESSAGE: int +ALERT_DESCRIPTION_BAD_RECORD_MAC: int +ALERT_DESCRIPTION_RECORD_OVERFLOW: int +ALERT_DESCRIPTION_DECOMPRESSION_FAILURE: int +ALERT_DESCRIPTION_HANDSHAKE_FAILURE: int +ALERT_DESCRIPTION_BAD_CERTIFICATE: int +ALERT_DESCRIPTION_UNSUPPORTED_CERTIFICATE: int +ALERT_DESCRIPTION_CERTIFICATE_REVOKED: int +ALERT_DESCRIPTION_CERTIFICATE_EXPIRED: int +ALERT_DESCRIPTION_CERTIFICATE_UNKNOWN: int +ALERT_DESCRIPTION_ILLEGAL_PARAMETER: int +ALERT_DESCRIPTION_UNKNOWN_CA: int +ALERT_DESCRIPTION_ACCESS_DENIED: int +ALERT_DESCRIPTION_DECODE_ERROR: int +ALERT_DESCRIPTION_DECRYPT_ERROR: int +ALERT_DESCRIPTION_PROTOCOL_VERSION: int +ALERT_DESCRIPTION_INSUFFICIENT_SECURITY: int +ALERT_DESCRIPTION_INTERNAL_ERROR: int +ALERT_DESCRIPTION_USER_CANCELLED: int +ALERT_DESCRIPTION_NO_RENEGOTIATION: int +ALERT_DESCRIPTION_UNSUPPORTED_EXTENSION: int +ALERT_DESCRIPTION_CERTIFICATE_UNOBTAINABLE: int +ALERT_DESCRIPTION_UNRECOGNIZED_NAME: int +ALERT_DESCRIPTION_BAD_CERTIFICATE_STATUS_RESPONSE: int +ALERT_DESCRIPTION_BAD_CERTIFICATE_HASH_VALUE: int +ALERT_DESCRIPTION_UNKNOWN_PSK_IDENTITY: int + +# protocol versions +PROTOCOL_SSLv23: int +PROTOCOL_TLS: int +PROTOCOL_TLS_CLIENT: int +PROTOCOL_TLS_SERVER: int +PROTOCOL_TLSv1: int +PROTOCOL_TLSv1_1: int +PROTOCOL_TLSv1_2: int + +# protocol options +OP_ALL: int +OP_NO_SSLv2: int +OP_NO_SSLv3: int +OP_NO_TLSv1: int +OP_NO_TLSv1_1: int +OP_NO_TLSv1_2: int +OP_NO_TLSv1_3: int +OP_CIPHER_SERVER_PREFERENCE: int +OP_SINGLE_DH_USE: int +OP_NO_TICKET: int +OP_SINGLE_ECDH_USE: int +OP_NO_COMPRESSION: int +OP_ENABLE_MIDDLEBOX_COMPAT: int +OP_NO_RENEGOTIATION: int +if sys.version_info >= (3, 11): + OP_IGNORE_UNEXPECTED_EOF: int +elif sys.version_info >= (3, 8) and sys.platform == "linux": + OP_IGNORE_UNEXPECTED_EOF: int +if sys.version_info >= (3, 12): + OP_LEGACY_SERVER_CONNECT: int + OP_ENABLE_KTLS: int + +# host flags +HOSTFLAG_ALWAYS_CHECK_SUBJECT: int +HOSTFLAG_NEVER_CHECK_SUBJECT: int +HOSTFLAG_NO_WILDCARDS: int +HOSTFLAG_NO_PARTIAL_WILDCARDS: int +HOSTFLAG_MULTI_LABEL_WILDCARDS: int +HOSTFLAG_SINGLE_LABEL_SUBDOMAINS: int + +if sys.version_info >= (3, 10): + # certificate file types + # Typed as Literal so the overload on Certificate.public_bytes can work properly. + ENCODING_PEM: Literal[1] + ENCODING_DER: Literal[2] + +# protocol versions +PROTO_MINIMUM_SUPPORTED: int +PROTO_MAXIMUM_SUPPORTED: int +PROTO_SSLv3: int +PROTO_TLSv1: int +PROTO_TLSv1_1: int +PROTO_TLSv1_2: int +PROTO_TLSv1_3: int + +# feature support +HAS_SNI: bool +HAS_TLS_UNIQUE: bool +HAS_ECDH: bool +HAS_NPN: bool +if sys.version_info >= (3, 13): + HAS_PSK: bool +HAS_ALPN: bool +HAS_SSLv2: bool +HAS_SSLv3: bool +HAS_TLSv1: bool +HAS_TLSv1_1: bool +HAS_TLSv1_2: bool +HAS_TLSv1_3: bool + +# version info +OPENSSL_VERSION_NUMBER: int +OPENSSL_VERSION_INFO: tuple[int, int, int, int, int] +OPENSSL_VERSION: str +_OPENSSL_API_VERSION: tuple[int, int, int, int, int] diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_stat.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_stat.pyi index c0b57d4..5222e22 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_stat.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_stat.pyi @@ -31,18 +31,29 @@ S_IROTH: read by others S_IWOTH: write by others S_IXOTH: execute by others +UF_SETTABLE: mask of owner changable flags UF_NODUMP: do not dump file UF_IMMUTABLE: file may not be changed UF_APPEND: file may only be appended to UF_OPAQUE: directory is opaque when viewed through a union stack UF_NOUNLINK: file may not be renamed or deleted -UF_COMPRESSED: OS X: file is hfs-compressed -UF_HIDDEN: OS X: file should not be displayed +UF_COMPRESSED: macOS: file is hfs-compressed +UF_TRACKED: used for dealing with document IDs +UF_DATAVAULT: entitlement required for reading and writing +UF_HIDDEN: macOS: file should not be displayed +SF_SETTABLE: mask of super user changeable flags SF_ARCHIVED: file may be archived SF_IMMUTABLE: file may not be changed SF_APPEND: file may only be appended to +SF_RESTRICTED: entitlement required for writing SF_NOUNLINK: file may not be renamed or deleted SF_SNAPSHOT: file is a snapshot file +SF_FIRMLINK: file is a firmlink +SF_DATALESS: file is a dataless object + +On macOS: +SF_SUPPORTED: mask of super user supported flags +SF_SYNTHETIC: mask of read-only synthetic flags ST_MODE ST_INO @@ -60,70 +71,70 @@ FILE_ATTRIBUTE_*: Windows file attribute constants """ import sys -from typing import Literal - -SF_APPEND: Literal[0x00040000] -SF_ARCHIVED: Literal[0x00010000] -SF_IMMUTABLE: Literal[0x00020000] -SF_NOUNLINK: Literal[0x00100000] -SF_SNAPSHOT: Literal[0x00200000] - -ST_MODE: Literal[0] -ST_INO: Literal[1] -ST_DEV: Literal[2] -ST_NLINK: Literal[3] -ST_UID: Literal[4] -ST_GID: Literal[5] -ST_SIZE: Literal[6] -ST_ATIME: Literal[7] -ST_MTIME: Literal[8] -ST_CTIME: Literal[9] - -S_IFIFO: Literal[0o010000] -S_IFLNK: Literal[0o120000] -S_IFREG: Literal[0o100000] -S_IFSOCK: Literal[0o140000] -S_IFBLK: Literal[0o060000] -S_IFCHR: Literal[0o020000] -S_IFDIR: Literal[0o040000] +from typing import Final + +SF_APPEND: Final = 0x00040000 +SF_ARCHIVED: Final = 0x00010000 +SF_IMMUTABLE: Final = 0x00020000 +SF_NOUNLINK: Final = 0x00100000 +SF_SNAPSHOT: Final = 0x00200000 + +ST_MODE: Final = 0 +ST_INO: Final = 1 +ST_DEV: Final = 2 +ST_NLINK: Final = 3 +ST_UID: Final = 4 +ST_GID: Final = 5 +ST_SIZE: Final = 6 +ST_ATIME: Final = 7 +ST_MTIME: Final = 8 +ST_CTIME: Final = 9 + +S_IFIFO: Final = 0o010000 +S_IFLNK: Final = 0o120000 +S_IFREG: Final = 0o100000 +S_IFSOCK: Final = 0o140000 +S_IFBLK: Final = 0o060000 +S_IFCHR: Final = 0o020000 +S_IFDIR: Final = 0o040000 # These are 0 on systems that don't support the specific kind of file. # Example: Linux doesn't support door files, so S_IFDOOR is 0 on linux. -S_IFDOOR: int -S_IFPORT: int -S_IFWHT: int - -S_ISUID: Literal[0o4000] -S_ISGID: Literal[0o2000] -S_ISVTX: Literal[0o1000] - -S_IRWXU: Literal[0o0700] -S_IRUSR: Literal[0o0400] -S_IWUSR: Literal[0o0200] -S_IXUSR: Literal[0o0100] - -S_IRWXG: Literal[0o0070] -S_IRGRP: Literal[0o0040] -S_IWGRP: Literal[0o0020] -S_IXGRP: Literal[0o0010] - -S_IRWXO: Literal[0o0007] -S_IROTH: Literal[0o0004] -S_IWOTH: Literal[0o0002] -S_IXOTH: Literal[0o0001] - -S_ENFMT: Literal[0o2000] -S_IREAD: Literal[0o0400] -S_IWRITE: Literal[0o0200] -S_IEXEC: Literal[0o0100] - -UF_APPEND: Literal[0x00000004] -UF_COMPRESSED: Literal[0x00000020] # OS X 10.6+ only -UF_HIDDEN: Literal[0x00008000] # OX X 10.5+ only -UF_IMMUTABLE: Literal[0x00000002] -UF_NODUMP: Literal[0x00000001] -UF_NOUNLINK: Literal[0x00000010] -UF_OPAQUE: Literal[0x00000008] +S_IFDOOR: Final[int] +S_IFPORT: Final[int] +S_IFWHT: Final[int] + +S_ISUID: Final = 0o4000 +S_ISGID: Final = 0o2000 +S_ISVTX: Final = 0o1000 + +S_IRWXU: Final = 0o0700 +S_IRUSR: Final = 0o0400 +S_IWUSR: Final = 0o0200 +S_IXUSR: Final = 0o0100 + +S_IRWXG: Final = 0o0070 +S_IRGRP: Final = 0o0040 +S_IWGRP: Final = 0o0020 +S_IXGRP: Final = 0o0010 + +S_IRWXO: Final = 0o0007 +S_IROTH: Final = 0o0004 +S_IWOTH: Final = 0o0002 +S_IXOTH: Final = 0o0001 + +S_ENFMT: Final = 0o2000 +S_IREAD: Final = 0o0400 +S_IWRITE: Final = 0o0200 +S_IEXEC: Final = 0o0100 + +UF_APPEND: Final = 0x00000004 +UF_COMPRESSED: Final = 0x00000020 # OS X 10.6+ only +UF_HIDDEN: Final = 0x00008000 # OX X 10.5+ only +UF_IMMUTABLE: Final = 0x00000002 +UF_NODUMP: Final = 0x00000001 +UF_NOUNLINK: Final = 0x00000010 +UF_OPAQUE: Final = 0x00000008 def S_IMODE(mode: int, /) -> int: """Return the portion of the file's mode that can be set by os.chmod().""" @@ -206,39 +217,41 @@ def filemode(mode: int, /) -> str: ... if sys.platform == "win32": - IO_REPARSE_TAG_SYMLINK: int - IO_REPARSE_TAG_MOUNT_POINT: int - IO_REPARSE_TAG_APPEXECLINK: int + IO_REPARSE_TAG_SYMLINK: Final = 0xA000000C + IO_REPARSE_TAG_MOUNT_POINT: Final = 0xA0000003 + IO_REPARSE_TAG_APPEXECLINK: Final = 0x8000001B if sys.platform == "win32": - FILE_ATTRIBUTE_ARCHIVE: Literal[32] - FILE_ATTRIBUTE_COMPRESSED: Literal[2048] - FILE_ATTRIBUTE_DEVICE: Literal[64] - FILE_ATTRIBUTE_DIRECTORY: Literal[16] - FILE_ATTRIBUTE_ENCRYPTED: Literal[16384] - FILE_ATTRIBUTE_HIDDEN: Literal[2] - FILE_ATTRIBUTE_INTEGRITY_STREAM: Literal[32768] - FILE_ATTRIBUTE_NORMAL: Literal[128] - FILE_ATTRIBUTE_NOT_CONTENT_INDEXED: Literal[8192] - FILE_ATTRIBUTE_NO_SCRUB_DATA: Literal[131072] - FILE_ATTRIBUTE_OFFLINE: Literal[4096] - FILE_ATTRIBUTE_READONLY: Literal[1] - FILE_ATTRIBUTE_REPARSE_POINT: Literal[1024] - FILE_ATTRIBUTE_SPARSE_FILE: Literal[512] - FILE_ATTRIBUTE_SYSTEM: Literal[4] - FILE_ATTRIBUTE_TEMPORARY: Literal[256] - FILE_ATTRIBUTE_VIRTUAL: Literal[65536] + FILE_ATTRIBUTE_ARCHIVE: Final = 32 + FILE_ATTRIBUTE_COMPRESSED: Final = 2048 + FILE_ATTRIBUTE_DEVICE: Final = 64 + FILE_ATTRIBUTE_DIRECTORY: Final = 16 + FILE_ATTRIBUTE_ENCRYPTED: Final = 16384 + FILE_ATTRIBUTE_HIDDEN: Final = 2 + FILE_ATTRIBUTE_INTEGRITY_STREAM: Final = 32768 + FILE_ATTRIBUTE_NORMAL: Final = 128 + FILE_ATTRIBUTE_NOT_CONTENT_INDEXED: Final = 8192 + FILE_ATTRIBUTE_NO_SCRUB_DATA: Final = 131072 + FILE_ATTRIBUTE_OFFLINE: Final = 4096 + FILE_ATTRIBUTE_READONLY: Final = 1 + FILE_ATTRIBUTE_REPARSE_POINT: Final = 1024 + FILE_ATTRIBUTE_SPARSE_FILE: Final = 512 + FILE_ATTRIBUTE_SYSTEM: Final = 4 + FILE_ATTRIBUTE_TEMPORARY: Final = 256 + FILE_ATTRIBUTE_VIRTUAL: Final = 65536 if sys.version_info >= (3, 13): - SF_SETTABLE: Literal[0x3FFF0000] + # Varies by platform. + SF_SETTABLE: Final[int] # https://github.com/python/cpython/issues/114081#issuecomment-2119017790 # SF_RESTRICTED: Literal[0x00080000] - SF_FIRMLINK: Literal[0x00800000] - SF_DATALESS: Literal[0x40000000] + SF_FIRMLINK: Final = 0x00800000 + SF_DATALESS: Final = 0x40000000 - SF_SUPPORTED: Literal[0x9F0000] - SF_SYNTHETIC: Literal[0xC0000000] + if sys.platform == "darwin": + SF_SUPPORTED: Final = 0x9F0000 + SF_SYNTHETIC: Final = 0xC0000000 - UF_TRACKED: Literal[0x00000040] - UF_DATAVAULT: Literal[0x00000080] - UF_SETTABLE: Literal[0x0000FFFF] + UF_TRACKED: Final = 0x00000040 + UF_DATAVAULT: Final = 0x00000080 + UF_SETTABLE: Final = 0x0000FFFF diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_thread.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_thread.pyi index 332aa0c..e155726 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_thread.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_thread.pyi @@ -3,6 +3,7 @@ This module provides primitive operations to write multi-threaded programs. The 'threading' module provides a more convenient interface. """ +import signal import sys from _typeshed import structseq from collections.abc import Callable @@ -17,8 +18,6 @@ error = RuntimeError def _count() -> int: """ - _count() -> integer - Return the number of currently running Python threads, excluding the main thread. The returned number comprises all threads created through `start_new_thread()` as well as `threading.Thread`, and not @@ -42,11 +41,8 @@ class LockType: unlock it. A thread attempting to lock a lock that it has already locked will block until another thread unlocks it. Deadlocks may ensue. """ - def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: + def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: """ - acquire(blocking=True, timeout=-1) -> bool - (acquire_lock() is an obsolete synonym) - Lock the lock. Without argument, this blocks if the lock is already locked (even by the same thread), waiting for another thread to release the lock, and return True once the lock is acquired. @@ -57,110 +53,114 @@ class LockType: ... def release(self) -> None: """ - release() - (release_lock() is an obsolete synonym) - Release the lock, allowing another thread that is blocked waiting for the lock to acquire the lock. The lock must be in the locked state, but it needn't be locked by the same thread that unlocks it. """ ... def locked(self) -> bool: - """ - locked() -> bool - (locked_lock() is an obsolete synonym) - - Return whether the lock is in the locked state. - """ + """Return whether the lock is in the locked state.""" + ... + def acquire_lock(self, blocking: bool = True, timeout: float = -1) -> bool: + """An obsolete synonym of acquire().""" + ... + def release_lock(self) -> None: + """An obsolete synonym of release().""" + ... + def locked_lock(self) -> bool: + """An obsolete synonym of locked().""" ... def __enter__(self) -> bool: - """ - acquire(blocking=True, timeout=-1) -> bool - (acquire_lock() is an obsolete synonym) - - Lock the lock. Without argument, this blocks if the lock is already - locked (even by the same thread), waiting for another thread to release - the lock, and return True once the lock is acquired. - With an argument, this will only block if the argument is true, - and the return value reflects whether the lock is acquired. - The blocking operation is interruptible. - """ + """Lock the lock.""" ... def __exit__( self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> None: - """ - release() - (release_lock() is an obsolete synonym) + """Release the lock.""" + ... - Release the lock, allowing another thread that is blocked waiting for - the lock to acquire the lock. The lock must be in the locked state, - but it needn't be locked by the same thread that unlocks it. +if sys.version_info >= (3, 13): + @final + class _ThreadHandle: + ident: int + + def join(self, timeout: float | None = None, /) -> None: ... + def is_done(self) -> bool: ... + def _set_done(self) -> None: ... + + def start_joinable_thread( + function: Callable[[], object], handle: _ThreadHandle | None = None, daemon: bool = True + ) -> _ThreadHandle: + """ + *For internal use only*: start a new thread. + + Like start_new_thread(), this starts a new thread calling the given function. + Unlike start_new_thread(), this returns a handle object with methods to join + or detach the given thread. + This function is not for third-party code, please use the + `threading` module instead. During finalization the runtime will not wait for + the thread to exit if daemon is True. If handle is provided it must be a + newly created thread._ThreadHandle instance. """ ... + lock = LockType @overload -def start_new_thread(function: Callable[[Unpack[_Ts]], object], args: tuple[Unpack[_Ts]]) -> int: +def start_new_thread(function: Callable[[Unpack[_Ts]], object], args: tuple[Unpack[_Ts]], /) -> int: """ - start_new_thread(function, args[, kwargs]) - (start_new() is an obsolete synonym) + Start a new thread and return its identifier. - Start a new thread and return its identifier. The thread will call the - function with positional arguments from the tuple args and keyword arguments - taken from the optional dictionary kwargs. The thread exits when the - function returns; the return value is ignored. The thread will also exit - when the function raises an unhandled exception; a stack trace will be - printed unless the exception is SystemExit. + The thread will call the function with positional arguments from the + tuple args and keyword arguments taken from the optional dictionary + kwargs. The thread exits when the function returns; the return value + is ignored. The thread will also exit when the function raises an + unhandled exception; a stack trace will be printed unless the exception + is SystemExit. """ ... @overload -def start_new_thread(function: Callable[..., object], args: tuple[Any, ...], kwargs: dict[str, Any]) -> int: +def start_new_thread(function: Callable[..., object], args: tuple[Any, ...], kwargs: dict[str, Any], /) -> int: """ - start_new_thread(function, args[, kwargs]) - (start_new() is an obsolete synonym) + Start a new thread and return its identifier. - Start a new thread and return its identifier. The thread will call the - function with positional arguments from the tuple args and keyword arguments - taken from the optional dictionary kwargs. The thread exits when the - function returns; the return value is ignored. The thread will also exit - when the function raises an unhandled exception; a stack trace will be - printed unless the exception is SystemExit. + The thread will call the function with positional arguments from the + tuple args and keyword arguments taken from the optional dictionary + kwargs. The thread exits when the function returns; the return value + is ignored. The thread will also exit when the function raises an + unhandled exception; a stack trace will be printed unless the exception + is SystemExit. """ ... -def interrupt_main() -> None: - """ - interrupt_main(signum=signal.SIGINT, /) - Simulate the arrival of the given signal in the main thread, - where the corresponding signal handler will be executed. - If *signum* is omitted, SIGINT is assumed. - A subthread can use this function to interrupt the main thread. +if sys.version_info >= (3, 10): + def interrupt_main(signum: signal.Signals = ..., /) -> None: + """ + Simulate the arrival of the given signal in the main thread, + where the corresponding signal handler will be executed. + If *signum* is omitted, SIGINT is assumed. + A subthread can use this function to interrupt the main thread. + + Note: the default signal handler for SIGINT raises ``KeyboardInterrupt``. + """ + ... + +else: + def interrupt_main() -> None: ... - Note: the default signal handler for SIGINT raises ``KeyboardInterrupt``. - """ - ... def exit() -> NoReturn: """ - exit() - (exit_thread() is an obsolete synonym) - This is synonymous to ``raise SystemExit''. It will cause the current thread to exit silently unless the exception is caught. """ ... def allocate_lock() -> LockType: """ - allocate_lock() -> lock object - (allocate() is an obsolete synonym) - Create a new lock object. See help(type(threading.Lock())) for information about locks. """ ... def get_ident() -> int: """ - get_ident() -> integer - Return a non-zero integer that uniquely identifies the current thread amongst other threads that exist simultaneously. This may be used to identify per-thread resources. @@ -170,10 +170,8 @@ def get_ident() -> int: A thread's identity may be reused for another thread after it exits. """ ... -def stack_size(size: int = ...) -> int: +def stack_size(size: int = 0, /) -> int: """ - stack_size([size]) -> size - Return the thread stack size used when creating new threads. The optional size argument specifies the stack size (in bytes) to be used for subsequently created threads, and must be 0 (use platform or @@ -197,8 +195,6 @@ TIMEOUT_MAX: float def get_native_id() -> int: """ - get_native_id() -> integer - Return a non-negative integer identifying the thread as reported by the OS (kernel). This may be used to uniquely identify a particular thread within a system. @@ -236,8 +232,6 @@ _excepthook: Callable[[_ExceptHookArgs], Any] if sys.version_info >= (3, 12): def daemon_threads_allowed() -> bool: """ - daemon_threads_allowed() - Return True if daemon threads are allowed in the current interpreter, and False otherwise. """ diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_tkinter.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_tkinter.pyi index a8ab7dc..480a444 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_tkinter.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_tkinter.pyi @@ -1,5 +1,7 @@ import sys -from typing import Any, ClassVar, Literal, final +from collections.abc import Callable +from typing import Any, ClassVar, Final, final +from typing_extensions import TypeAlias # _tkinter is meant to be only used internally by tkinter, but some tkinter # functions e.g. return _tkinter.Tcl_Obj objects. Tcl_Obj represents a Tcl @@ -46,6 +48,8 @@ class Tcl_Obj: class TclError(Exception): ... +_TkinterTraceFunc: TypeAlias = Callable[[tuple[str, ...]], object] + # This class allows running Tcl code. Tkinter uses it internally a lot, and # it's often handy to drop a piece of Tcl code into a tkinter program. Example: # @@ -102,21 +106,28 @@ class TkappType: def unsetvar(self, *args, **kwargs): ... def wantobjects(self, *args, **kwargs): ... def willdispatch(self): ... + if sys.version_info >= (3, 12): + def gettrace(self, /) -> _TkinterTraceFunc | None: + """Get the tracing function.""" + ... + def settrace(self, func: _TkinterTraceFunc | None, /) -> None: + """Set the tracing function.""" + ... # These should be kept in sync with tkinter.tix constants, except ALL_EVENTS which doesn't match TCL_ALL_EVENTS -ALL_EVENTS: Literal[-3] -FILE_EVENTS: Literal[8] -IDLE_EVENTS: Literal[32] -TIMER_EVENTS: Literal[16] -WINDOW_EVENTS: Literal[4] +ALL_EVENTS: Final = -3 +FILE_EVENTS: Final = 8 +IDLE_EVENTS: Final = 32 +TIMER_EVENTS: Final = 16 +WINDOW_EVENTS: Final = 4 -DONT_WAIT: Literal[2] -EXCEPTION: Literal[8] -READABLE: Literal[2] -WRITABLE: Literal[4] +DONT_WAIT: Final = 2 +EXCEPTION: Final = 8 +READABLE: Final = 2 +WRITABLE: Final = 4 -TCL_VERSION: str -TK_VERSION: str +TCL_VERSION: Final[str] +TK_VERSION: Final[str] @final class TkttType: diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_warnings.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_warnings.pyi index 8b56817..a93567b 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_warnings.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_warnings.pyi @@ -66,13 +66,9 @@ if sys.version_info >= (3, 12): else: @overload - def warn(message: str, category: type[Warning] | None = None, stacklevel: int = 1, source: Any | None = None) -> None: - """Issue a warning, or maybe ignore it or raise an exception.""" - ... + def warn(message: str, category: type[Warning] | None = None, stacklevel: int = 1, source: Any | None = None) -> None: ... @overload - def warn(message: Warning, category: Any = None, stacklevel: int = 1, source: Any | None = None) -> None: - """Issue a warning, or maybe ignore it or raise an exception.""" - ... + def warn(message: Warning, category: Any = None, stacklevel: int = 1, source: Any | None = None) -> None: ... @overload def warn_explicit( diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_weakref.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_weakref.pyi index ad96497..24ef53d 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_weakref.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_weakref.pyi @@ -1,50 +1,12 @@ """Weak-reference support module.""" -import sys from collections.abc import Callable -from typing import Any, Generic, TypeVar, final, overload -from typing_extensions import Self - -if sys.version_info >= (3, 9): - from types import GenericAlias +from typing import Any, TypeVar, overload +from weakref import CallableProxyType as CallableProxyType, ProxyType as ProxyType, ReferenceType as ReferenceType, ref as ref _C = TypeVar("_C", bound=Callable[..., Any]) _T = TypeVar("_T") -@final -class CallableProxyType(Generic[_C]): # "weakcallableproxy" - def __eq__(self, value: object, /) -> bool: - """Return self==value.""" - ... - def __getattr__(self, attr: str) -> Any: ... - __call__: _C - -@final -class ProxyType(Generic[_T]): # "weakproxy" - def __eq__(self, value: object, /) -> bool: - """Return self==value.""" - ... - def __getattr__(self, attr: str) -> Any: ... - -class ReferenceType(Generic[_T]): - __callback__: Callable[[ReferenceType[_T]], Any] - def __new__(cls, o: _T, callback: Callable[[ReferenceType[_T]], Any] | None = ..., /) -> Self: ... - def __call__(self) -> _T | None: - """Call self as a function.""" - ... - def __eq__(self, value: object, /) -> bool: - """Return self==value.""" - ... - def __hash__(self) -> int: - """Return hash(self).""" - ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585""" - ... - -ref = ReferenceType - def getweakrefcount(object: Any, /) -> int: """Return the number of weak references to 'object'.""" ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_winapi.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_winapi.pyi index c6fb048..0f71a06 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_winapi.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/_winapi.pyi @@ -1,117 +1,131 @@ import sys from _typeshed import ReadableBuffer from collections.abc import Sequence -from typing import Any, Literal, NoReturn, final, overload +from typing import Any, Final, Literal, NoReturn, final, overload if sys.platform == "win32": - ABOVE_NORMAL_PRIORITY_CLASS: Literal[0x8000] - BELOW_NORMAL_PRIORITY_CLASS: Literal[0x4000] - - CREATE_BREAKAWAY_FROM_JOB: Literal[0x1000000] - CREATE_DEFAULT_ERROR_MODE: Literal[0x4000000] - CREATE_NO_WINDOW: Literal[0x8000000] - CREATE_NEW_CONSOLE: Literal[0x10] - CREATE_NEW_PROCESS_GROUP: Literal[0x200] - - DETACHED_PROCESS: Literal[8] - DUPLICATE_CLOSE_SOURCE: Literal[1] - DUPLICATE_SAME_ACCESS: Literal[2] - - ERROR_ALREADY_EXISTS: Literal[183] - ERROR_BROKEN_PIPE: Literal[109] - ERROR_IO_PENDING: Literal[997] - ERROR_MORE_DATA: Literal[234] - ERROR_NETNAME_DELETED: Literal[64] - ERROR_NO_DATA: Literal[232] - ERROR_NO_SYSTEM_RESOURCES: Literal[1450] - ERROR_OPERATION_ABORTED: Literal[995] - ERROR_PIPE_BUSY: Literal[231] - ERROR_PIPE_CONNECTED: Literal[535] - ERROR_SEM_TIMEOUT: Literal[121] - - FILE_FLAG_FIRST_PIPE_INSTANCE: Literal[0x80000] - FILE_FLAG_OVERLAPPED: Literal[0x40000000] - - FILE_GENERIC_READ: Literal[1179785] - FILE_GENERIC_WRITE: Literal[1179926] - - FILE_MAP_ALL_ACCESS: Literal[983071] - FILE_MAP_COPY: Literal[1] - FILE_MAP_EXECUTE: Literal[32] - FILE_MAP_READ: Literal[4] - FILE_MAP_WRITE: Literal[2] - - FILE_TYPE_CHAR: Literal[2] - FILE_TYPE_DISK: Literal[1] - FILE_TYPE_PIPE: Literal[3] - FILE_TYPE_REMOTE: Literal[32768] - FILE_TYPE_UNKNOWN: Literal[0] - - GENERIC_READ: Literal[0x80000000] - GENERIC_WRITE: Literal[0x40000000] - HIGH_PRIORITY_CLASS: Literal[0x80] - INFINITE: Literal[0xFFFFFFFF] + ABOVE_NORMAL_PRIORITY_CLASS: Final = 0x8000 + BELOW_NORMAL_PRIORITY_CLASS: Final = 0x4000 + + CREATE_BREAKAWAY_FROM_JOB: Final = 0x1000000 + CREATE_DEFAULT_ERROR_MODE: Final = 0x4000000 + CREATE_NO_WINDOW: Final = 0x8000000 + CREATE_NEW_CONSOLE: Final = 0x10 + CREATE_NEW_PROCESS_GROUP: Final = 0x200 + + DETACHED_PROCESS: Final = 8 + DUPLICATE_CLOSE_SOURCE: Final = 1 + DUPLICATE_SAME_ACCESS: Final = 2 + + ERROR_ALREADY_EXISTS: Final = 183 + ERROR_BROKEN_PIPE: Final = 109 + ERROR_IO_PENDING: Final = 997 + ERROR_MORE_DATA: Final = 234 + ERROR_NETNAME_DELETED: Final = 64 + ERROR_NO_DATA: Final = 232 + ERROR_NO_SYSTEM_RESOURCES: Final = 1450 + ERROR_OPERATION_ABORTED: Final = 995 + ERROR_PIPE_BUSY: Final = 231 + ERROR_PIPE_CONNECTED: Final = 535 + ERROR_SEM_TIMEOUT: Final = 121 + + FILE_FLAG_FIRST_PIPE_INSTANCE: Final = 0x80000 + FILE_FLAG_OVERLAPPED: Final = 0x40000000 + + FILE_GENERIC_READ: Final = 1179785 + FILE_GENERIC_WRITE: Final = 1179926 + + FILE_MAP_ALL_ACCESS: Final = 983071 + FILE_MAP_COPY: Final = 1 + FILE_MAP_EXECUTE: Final = 32 + FILE_MAP_READ: Final = 4 + FILE_MAP_WRITE: Final = 2 + + FILE_TYPE_CHAR: Final = 2 + FILE_TYPE_DISK: Final = 1 + FILE_TYPE_PIPE: Final = 3 + FILE_TYPE_REMOTE: Final = 32768 + FILE_TYPE_UNKNOWN: Final = 0 + + GENERIC_READ: Final = 0x80000000 + GENERIC_WRITE: Final = 0x40000000 + HIGH_PRIORITY_CLASS: Final = 0x80 + INFINITE: Final = 0xFFFFFFFF # Ignore the Flake8 error -- flake8-pyi assumes # most numbers this long will be implementation details, # but here we can see that it's a power of 2 - INVALID_HANDLE_VALUE: Literal[0xFFFFFFFFFFFFFFFF] # noqa: Y054 - IDLE_PRIORITY_CLASS: Literal[0x40] - NORMAL_PRIORITY_CLASS: Literal[0x20] - REALTIME_PRIORITY_CLASS: Literal[0x100] - NMPWAIT_WAIT_FOREVER: Literal[0xFFFFFFFF] - - MEM_COMMIT: Literal[0x1000] - MEM_FREE: Literal[0x10000] - MEM_IMAGE: Literal[0x1000000] - MEM_MAPPED: Literal[0x40000] - MEM_PRIVATE: Literal[0x20000] - MEM_RESERVE: Literal[0x2000] - - NULL: Literal[0] - OPEN_EXISTING: Literal[3] - - PIPE_ACCESS_DUPLEX: Literal[3] - PIPE_ACCESS_INBOUND: Literal[1] - PIPE_READMODE_MESSAGE: Literal[2] - PIPE_TYPE_MESSAGE: Literal[4] - PIPE_UNLIMITED_INSTANCES: Literal[255] - PIPE_WAIT: Literal[0] - - PAGE_EXECUTE: Literal[0x10] - PAGE_EXECUTE_READ: Literal[0x20] - PAGE_EXECUTE_READWRITE: Literal[0x40] - PAGE_EXECUTE_WRITECOPY: Literal[0x80] - PAGE_GUARD: Literal[0x100] - PAGE_NOACCESS: Literal[0x1] - PAGE_NOCACHE: Literal[0x200] - PAGE_READONLY: Literal[0x2] - PAGE_READWRITE: Literal[0x4] - PAGE_WRITECOMBINE: Literal[0x400] - PAGE_WRITECOPY: Literal[0x8] - - PROCESS_ALL_ACCESS: Literal[0x1FFFFF] - PROCESS_DUP_HANDLE: Literal[0x40] - - SEC_COMMIT: Literal[0x8000000] - SEC_IMAGE: Literal[0x1000000] - SEC_LARGE_PAGES: Literal[0x80000000] - SEC_NOCACHE: Literal[0x10000000] - SEC_RESERVE: Literal[0x4000000] - SEC_WRITECOMBINE: Literal[0x40000000] - - STARTF_USESHOWWINDOW: Literal[0x1] - STARTF_USESTDHANDLES: Literal[0x100] - - STD_ERROR_HANDLE: Literal[0xFFFFFFF4] - STD_OUTPUT_HANDLE: Literal[0xFFFFFFF5] - STD_INPUT_HANDLE: Literal[0xFFFFFFF6] - - STILL_ACTIVE: Literal[259] - SW_HIDE: Literal[0] - SYNCHRONIZE: Literal[0x100000] - WAIT_ABANDONED_0: Literal[128] - WAIT_OBJECT_0: Literal[0] - WAIT_TIMEOUT: Literal[258] + INVALID_HANDLE_VALUE: Final = 0xFFFFFFFFFFFFFFFF # noqa: Y054 + IDLE_PRIORITY_CLASS: Final = 0x40 + NORMAL_PRIORITY_CLASS: Final = 0x20 + REALTIME_PRIORITY_CLASS: Final = 0x100 + NMPWAIT_WAIT_FOREVER: Final = 0xFFFFFFFF + + MEM_COMMIT: Final = 0x1000 + MEM_FREE: Final = 0x10000 + MEM_IMAGE: Final = 0x1000000 + MEM_MAPPED: Final = 0x40000 + MEM_PRIVATE: Final = 0x20000 + MEM_RESERVE: Final = 0x2000 + + NULL: Final = 0 + OPEN_EXISTING: Final = 3 + + PIPE_ACCESS_DUPLEX: Final = 3 + PIPE_ACCESS_INBOUND: Final = 1 + PIPE_READMODE_MESSAGE: Final = 2 + PIPE_TYPE_MESSAGE: Final = 4 + PIPE_UNLIMITED_INSTANCES: Final = 255 + PIPE_WAIT: Final = 0 + + PAGE_EXECUTE: Final = 0x10 + PAGE_EXECUTE_READ: Final = 0x20 + PAGE_EXECUTE_READWRITE: Final = 0x40 + PAGE_EXECUTE_WRITECOPY: Final = 0x80 + PAGE_GUARD: Final = 0x100 + PAGE_NOACCESS: Final = 0x1 + PAGE_NOCACHE: Final = 0x200 + PAGE_READONLY: Final = 0x2 + PAGE_READWRITE: Final = 0x4 + PAGE_WRITECOMBINE: Final = 0x400 + PAGE_WRITECOPY: Final = 0x8 + + PROCESS_ALL_ACCESS: Final = 0x1FFFFF + PROCESS_DUP_HANDLE: Final = 0x40 + + SEC_COMMIT: Final = 0x8000000 + SEC_IMAGE: Final = 0x1000000 + SEC_LARGE_PAGES: Final = 0x80000000 + SEC_NOCACHE: Final = 0x10000000 + SEC_RESERVE: Final = 0x4000000 + SEC_WRITECOMBINE: Final = 0x40000000 + + if sys.version_info >= (3, 13): + STARTF_FORCEOFFFEEDBACK: Final = 0x80 + STARTF_FORCEONFEEDBACK: Final = 0x40 + STARTF_PREVENTPINNING: Final = 0x2000 + STARTF_RUNFULLSCREEN: Final = 0x20 + STARTF_TITLEISAPPID: Final = 0x1000 + STARTF_TITLEISLINKNAME: Final = 0x800 + STARTF_UNTRUSTEDSOURCE: Final = 0x8000 + STARTF_USECOUNTCHARS: Final = 0x8 + STARTF_USEFILLATTRIBUTE: Final = 0x10 + STARTF_USEHOTKEY: Final = 0x200 + STARTF_USEPOSITION: Final = 0x4 + STARTF_USESIZE: Final = 0x2 + + STARTF_USESHOWWINDOW: Final = 0x1 + STARTF_USESTDHANDLES: Final = 0x100 + + STD_ERROR_HANDLE: Final = 0xFFFFFFF4 + STD_OUTPUT_HANDLE: Final = 0xFFFFFFF5 + STD_INPUT_HANDLE: Final = 0xFFFFFFF6 + + STILL_ACTIVE: Final = 259 + SW_HIDE: Final = 0 + SYNCHRONIZE: Final = 0x100000 + WAIT_ABANDONED_0: Final = 128 + WAIT_OBJECT_0: Final = 0 + WAIT_TIMEOUT: Final = 258 if sys.version_info >= (3, 10): LOCALE_NAME_INVARIANT: str @@ -131,32 +145,32 @@ if sys.platform == "win32": LCMAP_UPPERCASE: int if sys.version_info >= (3, 12): - COPYFILE2_CALLBACK_CHUNK_STARTED: Literal[1] - COPYFILE2_CALLBACK_CHUNK_FINISHED: Literal[2] - COPYFILE2_CALLBACK_STREAM_STARTED: Literal[3] - COPYFILE2_CALLBACK_STREAM_FINISHED: Literal[4] - COPYFILE2_CALLBACK_POLL_CONTINUE: Literal[5] - COPYFILE2_CALLBACK_ERROR: Literal[6] - - COPYFILE2_PROGRESS_CONTINUE: Literal[0] - COPYFILE2_PROGRESS_CANCEL: Literal[1] - COPYFILE2_PROGRESS_STOP: Literal[2] - COPYFILE2_PROGRESS_QUIET: Literal[3] - COPYFILE2_PROGRESS_PAUSE: Literal[4] - - COPY_FILE_FAIL_IF_EXISTS: Literal[0x1] - COPY_FILE_RESTARTABLE: Literal[0x2] - COPY_FILE_OPEN_SOURCE_FOR_WRITE: Literal[0x4] - COPY_FILE_ALLOW_DECRYPTED_DESTINATION: Literal[0x8] - COPY_FILE_COPY_SYMLINK: Literal[0x800] - COPY_FILE_NO_BUFFERING: Literal[0x1000] - COPY_FILE_REQUEST_SECURITY_PRIVILEGES: Literal[0x2000] - COPY_FILE_RESUME_FROM_PAUSE: Literal[0x4000] - COPY_FILE_NO_OFFLOAD: Literal[0x40000] - COPY_FILE_REQUEST_COMPRESSED_TRAFFIC: Literal[0x10000000] - - ERROR_ACCESS_DENIED: Literal[5] - ERROR_PRIVILEGE_NOT_HELD: Literal[1314] + COPYFILE2_CALLBACK_CHUNK_STARTED: Final = 1 + COPYFILE2_CALLBACK_CHUNK_FINISHED: Final = 2 + COPYFILE2_CALLBACK_STREAM_STARTED: Final = 3 + COPYFILE2_CALLBACK_STREAM_FINISHED: Final = 4 + COPYFILE2_CALLBACK_POLL_CONTINUE: Final = 5 + COPYFILE2_CALLBACK_ERROR: Final = 6 + + COPYFILE2_PROGRESS_CONTINUE: Final = 0 + COPYFILE2_PROGRESS_CANCEL: Final = 1 + COPYFILE2_PROGRESS_STOP: Final = 2 + COPYFILE2_PROGRESS_QUIET: Final = 3 + COPYFILE2_PROGRESS_PAUSE: Final = 4 + + COPY_FILE_FAIL_IF_EXISTS: Final = 0x1 + COPY_FILE_RESTARTABLE: Final = 0x2 + COPY_FILE_OPEN_SOURCE_FOR_WRITE: Final = 0x4 + COPY_FILE_ALLOW_DECRYPTED_DESTINATION: Final = 0x8 + COPY_FILE_COPY_SYMLINK: Final = 0x800 + COPY_FILE_NO_BUFFERING: Final = 0x1000 + COPY_FILE_REQUEST_SECURITY_PRIVILEGES: Final = 0x2000 + COPY_FILE_RESUME_FROM_PAUSE: Final = 0x4000 + COPY_FILE_NO_OFFLOAD: Final = 0x40000 + COPY_FILE_REQUEST_COMPRESSED_TRAFFIC: Final = 0x10000000 + + ERROR_ACCESS_DENIED: Final = 5 + ERROR_PRIVILEGE_NOT_HELD: Final = 1314 def CloseHandle(handle: int, /) -> None: ... @overload @@ -250,6 +264,20 @@ if sys.platform == "win32": def cancel(self) -> None: ... def getbuffer(self) -> bytes | None: ... + if sys.version_info >= (3, 13): + def BatchedWaitForMultipleObjects( + handle_seq: Sequence[int], wait_all: bool, milliseconds: int = 0xFFFFFFFF + ) -> list[int]: ... + def CreateEventW(security_attributes: int, manual_reset: bool, initial_state: bool, name: str | None) -> int: ... + def CreateMutexW(security_attributes: int, initial_owner: bool, name: str) -> int: ... + def GetLongPathName(path: str) -> str: ... + def GetShortPathName(path: str) -> str: ... + def OpenEventW(desired_access: int, inherit_handle: bool, name: str) -> int: ... + def OpenMutexW(desired_access: int, inherit_handle: bool, name: str) -> int: ... + def ReleaseMutex(mutex: int) -> None: ... + def ResetEvent(event: int) -> None: ... + def SetEvent(event: int) -> None: ... + if sys.version_info >= (3, 12): def CopyFile2(existing_file_name: str, new_file_name: str, flags: int, progress_routine: int | None = None) -> int: ... def NeedCurrentDirectoryForExePath(exe_name: str, /) -> bool: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/abc.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/abc.pyi index e8006e0..a1f9fdd 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/abc.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/abc.pyi @@ -73,7 +73,7 @@ def abstractmethod(funcobj: _FuncT) -> _FuncT: ... """ ... -@deprecated("Deprecated, use 'classmethod' with 'abstractmethod' instead") +@deprecated("Use 'classmethod' with 'abstractmethod' instead") class abstractclassmethod(classmethod[_T, _P, _R_co]): """ A decorator indicating abstract classmethods. @@ -89,7 +89,7 @@ class abstractclassmethod(classmethod[_T, _P, _R_co]): __isabstractmethod__: Literal[True] def __init__(self, callable: Callable[Concatenate[type[_T], _P], _R_co]) -> None: ... -@deprecated("Deprecated, use 'staticmethod' with 'abstractmethod' instead") +@deprecated("Use 'staticmethod' with 'abstractmethod' instead") class abstractstaticmethod(staticmethod[_P, _R_co]): """ A decorator indicating abstract staticmethods. @@ -105,7 +105,7 @@ class abstractstaticmethod(staticmethod[_P, _R_co]): __isabstractmethod__: Literal[True] def __init__(self, callable: Callable[_P, _R_co]) -> None: ... -@deprecated("Deprecated, use 'property' with 'abstractmethod' instead") +@deprecated("Use 'property' with 'abstractmethod' instead") class abstractproperty(property): """ A decorator indicating abstract properties. diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/aifc.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/aifc.pyi index ca9c027..05bf539 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/aifc.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/aifc.pyi @@ -1,140 +1,3 @@ -""" -Stuff to parse AIFF-C and AIFF files. - -Unless explicitly stated otherwise, the description below is true -both for AIFF-C files and AIFF files. - -An AIFF-C file has the following structure. - - +-----------------+ - | FORM | - +-----------------+ - | | - +----+------------+ - | | AIFC | - | +------------+ - | | | - | | . | - | | . | - | | . | - +----+------------+ - -An AIFF file has the string "AIFF" instead of "AIFC". - -A chunk consists of an identifier (4 bytes) followed by a size (4 bytes, -big endian order), followed by the data. The size field does not include -the size of the 8 byte header. - -The following chunk types are recognized. - - FVER - (AIFF-C only). - MARK - <# of markers> (2 bytes) - list of markers: - (2 bytes, must be > 0) - (4 bytes) - ("pstring") - COMM - <# of channels> (2 bytes) - <# of sound frames> (4 bytes) - (2 bytes) - (10 bytes, IEEE 80-bit extended - floating point) - in AIFF-C files only: - (4 bytes) - ("pstring") - SSND - (4 bytes, not used by this program) - (4 bytes, not used by this program) - - -A pstring consists of 1 byte length, a string of characters, and 0 or 1 -byte pad to make the total length even. - -Usage. - -Reading AIFF files: - f = aifc.open(file, 'r') -where file is either the name of a file or an open file pointer. -The open file pointer must have methods read(), seek(), and close(). -In some types of audio files, if the setpos() method is not used, -the seek() method is not necessary. - -This returns an instance of a class with the following public methods: - getnchannels() -- returns number of audio channels (1 for - mono, 2 for stereo) - getsampwidth() -- returns sample width in bytes - getframerate() -- returns sampling frequency - getnframes() -- returns number of audio frames - getcomptype() -- returns compression type ('NONE' for AIFF files) - getcompname() -- returns human-readable version of - compression type ('not compressed' for AIFF files) - getparams() -- returns a namedtuple consisting of all of the - above in the above order - getmarkers() -- get the list of marks in the audio file or None - if there are no marks - getmark(id) -- get mark with the specified id (raises an error - if the mark does not exist) - readframes(n) -- returns at most n frames of audio - rewind() -- rewind to the beginning of the audio stream - setpos(pos) -- seek to the specified position - tell() -- return the current position - close() -- close the instance (make it unusable) -The position returned by tell(), the position given to setpos() and -the position of marks are all compatible and have nothing to do with -the actual position in the file. -The close() method is called automatically when the class instance -is destroyed. - -Writing AIFF files: - f = aifc.open(file, 'w') -where file is either the name of a file or an open file pointer. -The open file pointer must have methods write(), tell(), seek(), and -close(). - -This returns an instance of a class with the following public methods: - aiff() -- create an AIFF file (AIFF-C default) - aifc() -- create an AIFF-C file - setnchannels(n) -- set the number of channels - setsampwidth(n) -- set the sample width - setframerate(n) -- set the frame rate - setnframes(n) -- set the number of frames - setcomptype(type, name) - -- set the compression type and the - human-readable compression type - setparams(tuple) - -- set all parameters at once - setmark(id, pos, name) - -- add specified mark to the list of marks - tell() -- return current position in output file (useful - in combination with setmark()) - writeframesraw(data) - -- write audio frames without pathing up the - file header - writeframes(data) - -- write audio frames and patch up the file header - close() -- patch up the file header and close the - output file -You should set the parameters before the first writeframesraw or -writeframes. The total number of frames does not need to be set, -but when it is set to the correct value, the header does not have to -be patched up. -It is best to first set all parameters, perhaps possibly the -compression type, and then write audio frames using writeframesraw. -When all frames have been written, either call writeframes(b'') or -close() to patch up the sizes in the header. -Marks can be added anytime. If there are any marks, you must call -close() after all frames have been written. -The close() method is called automatically when the class instance -is destroyed. - -When a file is opened with the extension '.aiff', an AIFF file is -written, otherwise an AIFF-C file is written. This default can be -changed by calling aiff() or aifc() before the first writeframes or -writeframesraw. -""" - import sys from types import TracebackType from typing import IO, Any, Literal, NamedTuple, overload @@ -148,7 +11,6 @@ else: class Error(Exception): ... class _aifc_params(NamedTuple): - """_aifc_params(nchannels, sampwidth, framerate, nframes, comptype, compname)""" nchannels: int sampwidth: int framerate: int diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/argparse.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/argparse.pyi index bc67752..fb6a247 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/argparse.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/argparse.pyi @@ -64,7 +64,7 @@ import sys from _typeshed import sentinel from collections.abc import Callable, Generator, Iterable, Sequence from re import Pattern -from typing import IO, Any, Generic, Literal, NewType, NoReturn, Protocol, TypeVar, overload +from typing import IO, Any, Final, Generic, NewType, NoReturn, Protocol, TypeVar, overload from typing_extensions import Self, TypeAlias, deprecated __all__ = [ @@ -94,6 +94,7 @@ _T = TypeVar("_T") _ActionT = TypeVar("_ActionT", bound=Action) _ArgumentParserT = TypeVar("_ArgumentParserT", bound=ArgumentParser) _N = TypeVar("_N") +_ActionType: TypeAlias = Callable[[str], Any] | FileType | str # more precisely, Literal["store", "store_const", "store_true", # "store_false", "append", "append_const", "count", "help", "version", # "extend"], but using this would make it hard to annotate callers @@ -104,15 +105,15 @@ _ActionStr: TypeAlias = str # callers that don't use a literal argument _NArgsStr: TypeAlias = str -ONE_OR_MORE: Literal["+"] -OPTIONAL: Literal["?"] -PARSER: Literal["A..."] -REMAINDER: Literal["..."] +ONE_OR_MORE: Final = "+" +OPTIONAL: Final = "?" +PARSER: Final = "A..." +REMAINDER: Final = "..." _SUPPRESS_T = NewType("_SUPPRESS_T", str) SUPPRESS: _SUPPRESS_T | str # not using Literal because argparse sometimes compares SUPPRESS with is # the | str is there so that foo = argparse.SUPPRESS; foo = "test" checks out in mypy -ZERO_OR_MORE: Literal["*"] -_UNRECOGNIZED_ARGS_ATTR: str # undocumented +ZERO_OR_MORE: Final = "*" +_UNRECOGNIZED_ARGS_ATTR: Final[str] # undocumented class ArgumentError(Exception): """ @@ -165,7 +166,7 @@ class _ActionsContainer: nargs: int | _NArgsStr | _SUPPRESS_T | None = None, const: Any = ..., default: Any = ..., - type: Callable[[str], _T] | FileType = ..., + type: _ActionType = ..., choices: Iterable[_T] | None = ..., required: bool = ..., help: str | None = ..., @@ -510,7 +511,7 @@ class Action(_AttributeHolder): nargs: int | str | None const: Any default: Any - type: Callable[[str], Any] | FileType | None + type: _ActionType | None choices: Iterable[Any] | None required: bool help: str | None @@ -553,7 +554,17 @@ class Action(_AttributeHolder): if sys.version_info >= (3, 12): class BooleanOptionalAction(Action): - if sys.version_info >= (3, 13): + if sys.version_info >= (3, 14): + def __init__( + self, + option_strings: Sequence[str], + dest: str, + default: bool | None = None, + required: bool = False, + help: str | None = None, + deprecated: bool = False, + ) -> None: ... + elif sys.version_info >= (3, 13): @overload def __init__( self, @@ -920,6 +931,7 @@ class _SubParsersAction(Action, Generic[_ArgumentParserT]): add_help: bool = ..., allow_abbrev: bool = ..., exit_on_error: bool = ..., + **kwargs: Any, # Accepting any additional kwargs for custom parser classes ) -> _ArgumentParserT: ... elif sys.version_info >= (3, 9): def add_parser( @@ -942,6 +954,7 @@ class _SubParsersAction(Action, Generic[_ArgumentParserT]): add_help: bool = ..., allow_abbrev: bool = ..., exit_on_error: bool = ..., + **kwargs: Any, # Accepting any additional kwargs for custom parser classes ) -> _ArgumentParserT: ... else: def add_parser( @@ -963,6 +976,7 @@ class _SubParsersAction(Action, Generic[_ArgumentParserT]): conflict_handler: str = ..., add_help: bool = ..., allow_abbrev: bool = ..., + **kwargs: Any, # Accepting any additional kwargs for custom parser classes ) -> _ArgumentParserT: ... def _get_subactions(self) -> list[Action]: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/array.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/array.pyi index bffe263..a270681 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/array.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/array.pyi @@ -1,6 +1,6 @@ """ This module defines an object type which can efficiently represent -an array of basic values: characters, integers, floating point +an array of basic values: characters, integers, floating-point numbers. Arrays are sequence types and behave very much like lists, except that the type of objects stored in them is constrained. """ @@ -50,8 +50,8 @@ class array(MutableSequence[_T]): 'L' unsigned integer 4 'q' signed integer 8 (see note) 'Q' unsigned integer 8 (see note) - 'f' floating point 4 - 'd' floating point 8 + 'f' floating-point 4 + 'd' floating-point 8 NOTE: The 'u' typecode corresponds to Python's unicode character. On narrow builds this is 2-bytes on wide builds this is 4-bytes. diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/ast.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/ast.pyi index 6cea7f0..103b801 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/ast.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/ast.pyi @@ -27,261 +27,1976 @@ that work tightly with the python syntax (template engines for example). import os import sys -from _ast import * +import typing_extensions +from _ast import ( + PyCF_ALLOW_TOP_LEVEL_AWAIT as PyCF_ALLOW_TOP_LEVEL_AWAIT, + PyCF_ONLY_AST as PyCF_ONLY_AST, + PyCF_TYPE_COMMENTS as PyCF_TYPE_COMMENTS, +) from _typeshed import ReadableBuffer, Unused from collections.abc import Iterator -from typing import Any, Literal, TypeVar as _TypeVar, overload -from typing_extensions import deprecated +from typing import Any, ClassVar, Generic, Literal, TypedDict, TypeVar as _TypeVar, overload +from typing_extensions import Self, Unpack, deprecated -class _ABC(type): - if sys.version_info >= (3, 9): - def __init__(cls, *args: Unused) -> None: ... +if sys.version_info >= (3, 13): + from _ast import PyCF_OPTIMIZED_AST as PyCF_OPTIMIZED_AST -@deprecated("Replaced by ast.Constant; removal scheduled for Python 3.14") -class Num(Constant, metaclass=_ABC): - """Deprecated AST node class. Use ast.Constant instead""" - value: int | float | complex +# Alias used for fields that must always be valid identifiers +# A string `x` counts as a valid identifier if both the following are True +# (1) `x.isidentifier()` evaluates to `True` +# (2) `keyword.iskeyword(x)` evaluates to `False` +_Identifier: typing_extensions.TypeAlias = str -@deprecated("Replaced by ast.Constant; removal scheduled for Python 3.14") -class Str(Constant, metaclass=_ABC): - """Deprecated AST node class. Use ast.Constant instead""" - value: str - # Aliases for value, for backwards compatibility - s: str +# Used for node end positions in constructor keyword arguments +_EndPositionT = typing_extensions.TypeVar("_EndPositionT", int, int | None, default=int | None) -@deprecated("Replaced by ast.Constant; removal scheduled for Python 3.14") -class Bytes(Constant, metaclass=_ABC): - """Deprecated AST node class. Use ast.Constant instead""" - value: bytes - # Aliases for value, for backwards compatibility - s: bytes +# Corresponds to the names in the `_attributes` class variable which is non-empty in certain AST nodes +class _Attributes(TypedDict, Generic[_EndPositionT], total=False): + lineno: int + col_offset: int + end_lineno: _EndPositionT + end_col_offset: _EndPositionT -@deprecated("Replaced by ast.Constant; removal scheduled for Python 3.14") -class NameConstant(Constant, metaclass=_ABC): - """Deprecated AST node class. Use ast.Constant instead""" - ... +# The various AST classes are implemented in C, and imported from _ast at runtime, +# but they consider themselves to live in the ast module, +# so we'll define the stubs in this file. +class AST: + if sys.version_info >= (3, 10): + __match_args__ = () + _attributes: ClassVar[tuple[str, ...]] + _fields: ClassVar[tuple[str, ...]] + if sys.version_info >= (3, 13): + _field_types: ClassVar[dict[str, Any]] -@deprecated("Replaced by ast.Constant; removal scheduled for Python 3.14") -class Ellipsis(Constant, metaclass=_ABC): - """Deprecated AST node class. Use ast.Constant instead""" + if sys.version_info >= (3, 14): + def __replace__(self) -> Self: ... + +class mod(AST): + """ + mod = Module(stmt* body, type_ignore* type_ignores) + | Interactive(stmt* body) + | Expression(expr body) + | FunctionType(expr* argtypes, expr returns) + """ ... -if sys.version_info >= (3, 9): - class slice(AST): - """Deprecated AST node class.""" - ... - class ExtSlice(slice): - """Deprecated AST node class. Use ast.Tuple instead.""" - ... - class Index(slice): - """Deprecated AST node class. Use the index value directly instead.""" - ... - class Suite(mod): - """Deprecated AST node class. Unused in Python 3.""" - ... - class AugLoad(expr_context): - """Deprecated AST node class. Unused in Python 3.""" - ... - class AugStore(expr_context): - """Deprecated AST node class. Unused in Python 3.""" - ... - class Param(expr_context): - """Deprecated AST node class. Unused in Python 3.""" - ... +class Module(mod): + """Module(stmt* body, type_ignore* type_ignores)""" + if sys.version_info >= (3, 10): + __match_args__ = ("body", "type_ignores") + body: list[stmt] + type_ignores: list[TypeIgnore] + if sys.version_info >= (3, 13): + def __init__(self, body: list[stmt] = ..., type_ignores: list[TypeIgnore] = ...) -> None: ... + else: + def __init__(self, body: list[stmt], type_ignores: list[TypeIgnore]) -> None: ... -class NodeVisitor: + if sys.version_info >= (3, 14): + def __replace__(self, *, body: list[stmt] = ..., type_ignores: list[TypeIgnore] = ...) -> Self: ... + +class Interactive(mod): + """Interactive(stmt* body)""" + if sys.version_info >= (3, 10): + __match_args__ = ("body",) + body: list[stmt] + if sys.version_info >= (3, 13): + def __init__(self, body: list[stmt] = ...) -> None: ... + else: + def __init__(self, body: list[stmt]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, body: list[stmt] = ...) -> Self: ... + +class Expression(mod): + """Expression(expr body)""" + if sys.version_info >= (3, 10): + __match_args__ = ("body",) + body: expr + def __init__(self, body: expr) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, body: expr = ...) -> Self: ... + +class FunctionType(mod): + """FunctionType(expr* argtypes, expr returns)""" + if sys.version_info >= (3, 10): + __match_args__ = ("argtypes", "returns") + argtypes: list[expr] + returns: expr + if sys.version_info >= (3, 13): + @overload + def __init__(self, argtypes: list[expr], returns: expr) -> None: ... + @overload + def __init__(self, argtypes: list[expr] = ..., *, returns: expr) -> None: ... + else: + def __init__(self, argtypes: list[expr], returns: expr) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, argtypes: list[expr] = ..., returns: expr = ...) -> Self: ... + +class stmt(AST): """ - A node visitor base class that walks the abstract syntax tree and calls a - visitor function for every node found. This function may return a value - which is forwarded by the `visit` method. + stmt = FunctionDef(identifier name, arguments args, stmt* body, expr* decorator_list, expr? returns, string? type_comment, type_param* type_params) + | AsyncFunctionDef(identifier name, arguments args, stmt* body, expr* decorator_list, expr? returns, string? type_comment, type_param* type_params) + | ClassDef(identifier name, expr* bases, keyword* keywords, stmt* body, expr* decorator_list, type_param* type_params) + | Return(expr? value) + | Delete(expr* targets) + | Assign(expr* targets, expr value, string? type_comment) + | TypeAlias(expr name, type_param* type_params, expr value) + | AugAssign(expr target, operator op, expr value) + | AnnAssign(expr target, expr annotation, expr? value, int simple) + | For(expr target, expr iter, stmt* body, stmt* orelse, string? type_comment) + | AsyncFor(expr target, expr iter, stmt* body, stmt* orelse, string? type_comment) + | While(expr test, stmt* body, stmt* orelse) + | If(expr test, stmt* body, stmt* orelse) + | With(withitem* items, stmt* body, string? type_comment) + | AsyncWith(withitem* items, stmt* body, string? type_comment) + | Match(expr subject, match_case* cases) + | Raise(expr? exc, expr? cause) + | Try(stmt* body, excepthandler* handlers, stmt* orelse, stmt* finalbody) + | TryStar(stmt* body, excepthandler* handlers, stmt* orelse, stmt* finalbody) + | Assert(expr test, expr? msg) + | Import(alias* names) + | ImportFrom(identifier? module, alias* names, int? level) + | Global(identifier* names) + | Nonlocal(identifier* names) + | Expr(expr value) + | Pass + | Break + | Continue + """ + lineno: int + col_offset: int + end_lineno: int | None + end_col_offset: int | None + def __init__(self, **kwargs: Unpack[_Attributes]) -> None: ... - This class is meant to be subclassed, with the subclass adding visitor - methods. + if sys.version_info >= (3, 14): + def __replace__(self, **kwargs: Unpack[_Attributes]) -> Self: ... - Per default the visitor functions for the nodes are ``'visit_'`` + - class name of the node. So a `TryFinally` node visit function would - be `visit_TryFinally`. This behavior can be changed by overriding - the `visit` method. If no visitor function exists for a node - (return value `None`) the `generic_visit` visitor is used instead. +class FunctionDef(stmt): + """FunctionDef(identifier name, arguments args, stmt* body, expr* decorator_list, expr? returns, string? type_comment, type_param* type_params)""" + if sys.version_info >= (3, 12): + __match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment", "type_params") + elif sys.version_info >= (3, 10): + __match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment") + name: _Identifier + args: arguments + body: list[stmt] + decorator_list: list[expr] + returns: expr | None + type_comment: str | None + if sys.version_info >= (3, 12): + type_params: list[type_param] + if sys.version_info >= (3, 13): + def __init__( + self, + name: _Identifier, + args: arguments, + body: list[stmt] = ..., + decorator_list: list[expr] = ..., + returns: expr | None = None, + type_comment: str | None = None, + type_params: list[type_param] = ..., + **kwargs: Unpack[_Attributes], + ) -> None: ... + elif sys.version_info >= (3, 12): + @overload + def __init__( + self, + name: _Identifier, + args: arguments, + body: list[stmt], + decorator_list: list[expr], + returns: expr | None, + type_comment: str | None, + type_params: list[type_param], + **kwargs: Unpack[_Attributes], + ) -> None: ... + @overload + def __init__( + self, + name: _Identifier, + args: arguments, + body: list[stmt], + decorator_list: list[expr], + returns: expr | None = None, + type_comment: str | None = None, + *, + type_params: list[type_param], + **kwargs: Unpack[_Attributes], + ) -> None: ... + else: + def __init__( + self, + name: _Identifier, + args: arguments, + body: list[stmt], + decorator_list: list[expr], + returns: expr | None = None, + type_comment: str | None = None, + **kwargs: Unpack[_Attributes], + ) -> None: ... - Don't use the `NodeVisitor` if you want to apply changes to nodes during - traversing. For this a special visitor exists (`NodeTransformer`) that - allows modifications. + if sys.version_info >= (3, 14): + def __replace__( + self, + *, + name: _Identifier = ..., + args: arguments = ..., + body: list[stmt] = ..., + decorator_list: list[expr] = ..., + returns: expr | None = ..., + type_comment: str | None = ..., + type_params: list[type_param] = ..., + ) -> Self: ... + +class AsyncFunctionDef(stmt): + """AsyncFunctionDef(identifier name, arguments args, stmt* body, expr* decorator_list, expr? returns, string? type_comment, type_param* type_params)""" + if sys.version_info >= (3, 12): + __match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment", "type_params") + elif sys.version_info >= (3, 10): + __match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment") + name: _Identifier + args: arguments + body: list[stmt] + decorator_list: list[expr] + returns: expr | None + type_comment: str | None + if sys.version_info >= (3, 12): + type_params: list[type_param] + if sys.version_info >= (3, 13): + def __init__( + self, + name: _Identifier, + args: arguments, + body: list[stmt] = ..., + decorator_list: list[expr] = ..., + returns: expr | None = None, + type_comment: str | None = None, + type_params: list[type_param] = ..., + **kwargs: Unpack[_Attributes], + ) -> None: ... + elif sys.version_info >= (3, 12): + @overload + def __init__( + self, + name: _Identifier, + args: arguments, + body: list[stmt], + decorator_list: list[expr], + returns: expr | None, + type_comment: str | None, + type_params: list[type_param], + **kwargs: Unpack[_Attributes], + ) -> None: ... + @overload + def __init__( + self, + name: _Identifier, + args: arguments, + body: list[stmt], + decorator_list: list[expr], + returns: expr | None = None, + type_comment: str | None = None, + *, + type_params: list[type_param], + **kwargs: Unpack[_Attributes], + ) -> None: ... + else: + def __init__( + self, + name: _Identifier, + args: arguments, + body: list[stmt], + decorator_list: list[expr], + returns: expr | None = None, + type_comment: str | None = None, + **kwargs: Unpack[_Attributes], + ) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, + *, + name: _Identifier = ..., + args: arguments = ..., + body: list[stmt], + decorator_list: list[expr], + returns: expr | None, + type_comment: str | None, + type_params: list[type_param], + ) -> Self: ... + +class ClassDef(stmt): + """ClassDef(identifier name, expr* bases, keyword* keywords, stmt* body, expr* decorator_list, type_param* type_params)""" + if sys.version_info >= (3, 12): + __match_args__ = ("name", "bases", "keywords", "body", "decorator_list", "type_params") + elif sys.version_info >= (3, 10): + __match_args__ = ("name", "bases", "keywords", "body", "decorator_list") + name: _Identifier + bases: list[expr] + keywords: list[keyword] + body: list[stmt] + decorator_list: list[expr] + if sys.version_info >= (3, 12): + type_params: list[type_param] + if sys.version_info >= (3, 13): + def __init__( + self, + name: _Identifier, + bases: list[expr] = ..., + keywords: list[keyword] = ..., + body: list[stmt] = ..., + decorator_list: list[expr] = ..., + type_params: list[type_param] = ..., + **kwargs: Unpack[_Attributes], + ) -> None: ... + elif sys.version_info >= (3, 12): + def __init__( + self, + name: _Identifier, + bases: list[expr], + keywords: list[keyword], + body: list[stmt], + decorator_list: list[expr], + type_params: list[type_param], + **kwargs: Unpack[_Attributes], + ) -> None: ... + else: + def __init__( + self, + name: _Identifier, + bases: list[expr], + keywords: list[keyword], + body: list[stmt], + decorator_list: list[expr], + **kwargs: Unpack[_Attributes], + ) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, + *, + name: _Identifier, + bases: list[expr], + keywords: list[keyword], + body: list[stmt], + decorator_list: list[expr], + type_params: list[type_param], + **kwargs: Unpack[_Attributes], + ) -> Self: ... + +class Return(stmt): + """Return(expr? value)""" + if sys.version_info >= (3, 10): + __match_args__ = ("value",) + value: expr | None + def __init__(self, value: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, value: expr | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +class Delete(stmt): + """Delete(expr* targets)""" + if sys.version_info >= (3, 10): + __match_args__ = ("targets",) + targets: list[expr] + if sys.version_info >= (3, 13): + def __init__(self, targets: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> None: ... + else: + def __init__(self, targets: list[expr], **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, targets: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +class Assign(stmt): + """Assign(expr* targets, expr value, string? type_comment)""" + if sys.version_info >= (3, 10): + __match_args__ = ("targets", "value", "type_comment") + targets: list[expr] + value: expr + type_comment: str | None + if sys.version_info >= (3, 13): + @overload + def __init__( + self, targets: list[expr], value: expr, type_comment: str | None = None, **kwargs: Unpack[_Attributes] + ) -> None: ... + @overload + def __init__( + self, targets: list[expr] = ..., *, value: expr, type_comment: str | None = None, **kwargs: Unpack[_Attributes] + ) -> None: ... + else: + def __init__( + self, targets: list[expr], value: expr, type_comment: str | None = None, **kwargs: Unpack[_Attributes] + ) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, targets: list[expr] = ..., value: expr = ..., type_comment: str | None = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... + +if sys.version_info >= (3, 12): + class TypeAlias(stmt): + """TypeAlias(expr name, type_param* type_params, expr value)""" + __match_args__ = ("name", "type_params", "value") + name: Name + type_params: list[type_param] + value: expr + if sys.version_info >= (3, 13): + @overload + def __init__( + self, name: Name, type_params: list[type_param], value: expr, **kwargs: Unpack[_Attributes[int]] + ) -> None: ... + @overload + def __init__( + self, name: Name, type_params: list[type_param] = ..., *, value: expr, **kwargs: Unpack[_Attributes[int]] + ) -> None: ... + else: + def __init__( + self, name: Name, type_params: list[type_param], value: expr, **kwargs: Unpack[_Attributes[int]] + ) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, + *, + name: Name = ..., + type_params: list[type_param] = ..., + value: expr = ..., + **kwargs: Unpack[_Attributes[int]], + ) -> Self: ... + +class AugAssign(stmt): + """AugAssign(expr target, operator op, expr value)""" + if sys.version_info >= (3, 10): + __match_args__ = ("target", "op", "value") + target: Name | Attribute | Subscript + op: operator + value: expr + def __init__( + self, target: Name | Attribute | Subscript, op: operator, value: expr, **kwargs: Unpack[_Attributes] + ) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, + *, + target: Name | Attribute | Subscript = ..., + op: operator = ..., + value: expr = ..., + **kwargs: Unpack[_Attributes], + ) -> Self: ... + +class AnnAssign(stmt): + """AnnAssign(expr target, expr annotation, expr? value, int simple)""" + if sys.version_info >= (3, 10): + __match_args__ = ("target", "annotation", "value", "simple") + target: Name | Attribute | Subscript + annotation: expr + value: expr | None + simple: int + @overload + def __init__( + self, + target: Name | Attribute | Subscript, + annotation: expr, + value: expr | None, + simple: int, + **kwargs: Unpack[_Attributes], + ) -> None: ... + @overload + def __init__( + self, + target: Name | Attribute | Subscript, + annotation: expr, + value: expr | None = None, + *, + simple: int, + **kwargs: Unpack[_Attributes], + ) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, + *, + target: Name | Attribute | Subscript = ..., + annotation: expr = ..., + value: expr | None = ..., + simple: int = ..., + **kwargs: Unpack[_Attributes], + ) -> Self: ... + +class For(stmt): + """For(expr target, expr iter, stmt* body, stmt* orelse, string? type_comment)""" + if sys.version_info >= (3, 10): + __match_args__ = ("target", "iter", "body", "orelse", "type_comment") + target: expr + iter: expr + body: list[stmt] + orelse: list[stmt] + type_comment: str | None + if sys.version_info >= (3, 13): + def __init__( + self, + target: expr, + iter: expr, + body: list[stmt] = ..., + orelse: list[stmt] = ..., + type_comment: str | None = None, + **kwargs: Unpack[_Attributes], + ) -> None: ... + else: + def __init__( + self, + target: expr, + iter: expr, + body: list[stmt], + orelse: list[stmt], + type_comment: str | None = None, + **kwargs: Unpack[_Attributes], + ) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, + *, + target: expr = ..., + iter: expr = ..., + body: list[stmt] = ..., + orelse: list[stmt] = ..., + type_comment: str | None = ..., + **kwargs: Unpack[_Attributes], + ) -> Self: ... + +class AsyncFor(stmt): + """AsyncFor(expr target, expr iter, stmt* body, stmt* orelse, string? type_comment)""" + if sys.version_info >= (3, 10): + __match_args__ = ("target", "iter", "body", "orelse", "type_comment") + target: expr + iter: expr + body: list[stmt] + orelse: list[stmt] + type_comment: str | None + if sys.version_info >= (3, 13): + def __init__( + self, + target: expr, + iter: expr, + body: list[stmt] = ..., + orelse: list[stmt] = ..., + type_comment: str | None = None, + **kwargs: Unpack[_Attributes], + ) -> None: ... + else: + def __init__( + self, + target: expr, + iter: expr, + body: list[stmt], + orelse: list[stmt], + type_comment: str | None = None, + **kwargs: Unpack[_Attributes], + ) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, + *, + target: expr = ..., + iter: expr = ..., + body: list[stmt] = ..., + orelse: list[stmt] = ..., + type_comment: str | None = ..., + **kwargs: Unpack[_Attributes], + ) -> Self: ... + +class While(stmt): + """While(expr test, stmt* body, stmt* orelse)""" + if sys.version_info >= (3, 10): + __match_args__ = ("test", "body", "orelse") + test: expr + body: list[stmt] + orelse: list[stmt] + if sys.version_info >= (3, 13): + def __init__( + self, test: expr, body: list[stmt] = ..., orelse: list[stmt] = ..., **kwargs: Unpack[_Attributes] + ) -> None: ... + else: + def __init__(self, test: expr, body: list[stmt], orelse: list[stmt], **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, test: expr, body: list[stmt], orelse: list[stmt], **kwargs: Unpack[_Attributes]) -> Self: ... + +class If(stmt): + """If(expr test, stmt* body, stmt* orelse)""" + if sys.version_info >= (3, 10): + __match_args__ = ("test", "body", "orelse") + test: expr + body: list[stmt] + orelse: list[stmt] + if sys.version_info >= (3, 13): + def __init__( + self, test: expr, body: list[stmt] = ..., orelse: list[stmt] = ..., **kwargs: Unpack[_Attributes] + ) -> None: ... + else: + def __init__(self, test: expr, body: list[stmt], orelse: list[stmt], **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, test: expr = ..., body: list[stmt] = ..., orelse: list[stmt] = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... + +class With(stmt): + """With(withitem* items, stmt* body, string? type_comment)""" + if sys.version_info >= (3, 10): + __match_args__ = ("items", "body", "type_comment") + items: list[withitem] + body: list[stmt] + type_comment: str | None + if sys.version_info >= (3, 13): + def __init__( + self, + items: list[withitem] = ..., + body: list[stmt] = ..., + type_comment: str | None = None, + **kwargs: Unpack[_Attributes], + ) -> None: ... + else: + def __init__( + self, items: list[withitem], body: list[stmt], type_comment: str | None = None, **kwargs: Unpack[_Attributes] + ) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, + *, + items: list[withitem] = ..., + body: list[stmt] = ..., + type_comment: str | None = ..., + **kwargs: Unpack[_Attributes], + ) -> Self: ... + +class AsyncWith(stmt): + """AsyncWith(withitem* items, stmt* body, string? type_comment)""" + if sys.version_info >= (3, 10): + __match_args__ = ("items", "body", "type_comment") + items: list[withitem] + body: list[stmt] + type_comment: str | None + if sys.version_info >= (3, 13): + def __init__( + self, + items: list[withitem] = ..., + body: list[stmt] = ..., + type_comment: str | None = None, + **kwargs: Unpack[_Attributes], + ) -> None: ... + else: + def __init__( + self, items: list[withitem], body: list[stmt], type_comment: str | None = None, **kwargs: Unpack[_Attributes] + ) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, + *, + items: list[withitem] = ..., + body: list[stmt] = ..., + type_comment: str | None = ..., + **kwargs: Unpack[_Attributes], + ) -> Self: ... + +if sys.version_info >= (3, 10): + class Match(stmt): + """Match(expr subject, match_case* cases)""" + __match_args__ = ("subject", "cases") + subject: expr + cases: list[match_case] + if sys.version_info >= (3, 13): + def __init__(self, subject: expr, cases: list[match_case] = ..., **kwargs: Unpack[_Attributes]) -> None: ... + else: + def __init__(self, subject: expr, cases: list[match_case], **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, subject: expr = ..., cases: list[match_case] = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... + +class Raise(stmt): + """Raise(expr? exc, expr? cause)""" + if sys.version_info >= (3, 10): + __match_args__ = ("exc", "cause") + exc: expr | None + cause: expr | None + def __init__(self, exc: expr | None = None, cause: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, exc: expr | None = ..., cause: expr | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +class Try(stmt): + """Try(stmt* body, excepthandler* handlers, stmt* orelse, stmt* finalbody)""" + if sys.version_info >= (3, 10): + __match_args__ = ("body", "handlers", "orelse", "finalbody") + body: list[stmt] + handlers: list[ExceptHandler] + orelse: list[stmt] + finalbody: list[stmt] + if sys.version_info >= (3, 13): + def __init__( + self, + body: list[stmt] = ..., + handlers: list[ExceptHandler] = ..., + orelse: list[stmt] = ..., + finalbody: list[stmt] = ..., + **kwargs: Unpack[_Attributes], + ) -> None: ... + else: + def __init__( + self, + body: list[stmt], + handlers: list[ExceptHandler], + orelse: list[stmt], + finalbody: list[stmt], + **kwargs: Unpack[_Attributes], + ) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, + *, + body: list[stmt] = ..., + handlers: list[ExceptHandler] = ..., + orelse: list[stmt] = ..., + finalbody: list[stmt] = ..., + **kwargs: Unpack[_Attributes], + ) -> Self: ... + +if sys.version_info >= (3, 11): + class TryStar(stmt): + """TryStar(stmt* body, excepthandler* handlers, stmt* orelse, stmt* finalbody)""" + __match_args__ = ("body", "handlers", "orelse", "finalbody") + body: list[stmt] + handlers: list[ExceptHandler] + orelse: list[stmt] + finalbody: list[stmt] + if sys.version_info >= (3, 13): + def __init__( + self, + body: list[stmt] = ..., + handlers: list[ExceptHandler] = ..., + orelse: list[stmt] = ..., + finalbody: list[stmt] = ..., + **kwargs: Unpack[_Attributes], + ) -> None: ... + else: + def __init__( + self, + body: list[stmt], + handlers: list[ExceptHandler], + orelse: list[stmt], + finalbody: list[stmt], + **kwargs: Unpack[_Attributes], + ) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, + *, + body: list[stmt] = ..., + handlers: list[ExceptHandler] = ..., + orelse: list[stmt] = ..., + finalbody: list[stmt] = ..., + **kwargs: Unpack[_Attributes], + ) -> Self: ... + +class Assert(stmt): + """Assert(expr test, expr? msg)""" + if sys.version_info >= (3, 10): + __match_args__ = ("test", "msg") + test: expr + msg: expr | None + def __init__(self, test: expr, msg: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, test: expr, msg: expr | None, **kwargs: Unpack[_Attributes]) -> Self: ... + +class Import(stmt): + """Import(alias* names)""" + if sys.version_info >= (3, 10): + __match_args__ = ("names",) + names: list[alias] + if sys.version_info >= (3, 13): + def __init__(self, names: list[alias] = ..., **kwargs: Unpack[_Attributes]) -> None: ... + else: + def __init__(self, names: list[alias], **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, names: list[alias] = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +class ImportFrom(stmt): + """ImportFrom(identifier? module, alias* names, int? level)""" + if sys.version_info >= (3, 10): + __match_args__ = ("module", "names", "level") + module: str | None + names: list[alias] + level: int + if sys.version_info >= (3, 13): + @overload + def __init__(self, module: str | None, names: list[alias], level: int, **kwargs: Unpack[_Attributes]) -> None: ... + @overload + def __init__( + self, module: str | None = None, names: list[alias] = ..., *, level: int, **kwargs: Unpack[_Attributes] + ) -> None: ... + else: + @overload + def __init__(self, module: str | None, names: list[alias], level: int, **kwargs: Unpack[_Attributes]) -> None: ... + @overload + def __init__( + self, module: str | None = None, *, names: list[alias], level: int, **kwargs: Unpack[_Attributes] + ) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, module: str | None = ..., names: list[alias] = ..., level: int = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... + +class Global(stmt): + """Global(identifier* names)""" + if sys.version_info >= (3, 10): + __match_args__ = ("names",) + names: list[_Identifier] + if sys.version_info >= (3, 13): + def __init__(self, names: list[_Identifier] = ..., **kwargs: Unpack[_Attributes]) -> None: ... + else: + def __init__(self, names: list[_Identifier], **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, names: list[_Identifier], **kwargs: Unpack[_Attributes]) -> Self: ... + +class Nonlocal(stmt): + """Nonlocal(identifier* names)""" + if sys.version_info >= (3, 10): + __match_args__ = ("names",) + names: list[_Identifier] + if sys.version_info >= (3, 13): + def __init__(self, names: list[_Identifier] = ..., **kwargs: Unpack[_Attributes]) -> None: ... + else: + def __init__(self, names: list[_Identifier], **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, names: list[_Identifier] = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +class Expr(stmt): + """Expr(expr value)""" + if sys.version_info >= (3, 10): + __match_args__ = ("value",) + value: expr + def __init__(self, value: expr, **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +class Pass(stmt): + """Pass""" + ... +class Break(stmt): + """Break""" + ... +class Continue(stmt): + """Continue""" + ... + +class expr(AST): """ - def visit(self, node: AST) -> Any: - """Visit a node.""" - ... - def generic_visit(self, node: AST) -> Any: - """Called if no explicit visitor function exists for a node.""" - ... - def visit_Module(self, node: Module) -> Any: ... - def visit_Interactive(self, node: Interactive) -> Any: ... - def visit_Expression(self, node: Expression) -> Any: ... - def visit_FunctionDef(self, node: FunctionDef) -> Any: ... - def visit_AsyncFunctionDef(self, node: AsyncFunctionDef) -> Any: ... - def visit_ClassDef(self, node: ClassDef) -> Any: ... - def visit_Return(self, node: Return) -> Any: ... - def visit_Delete(self, node: Delete) -> Any: ... - def visit_Assign(self, node: Assign) -> Any: ... - def visit_AugAssign(self, node: AugAssign) -> Any: ... - def visit_AnnAssign(self, node: AnnAssign) -> Any: ... - def visit_For(self, node: For) -> Any: ... - def visit_AsyncFor(self, node: AsyncFor) -> Any: ... - def visit_While(self, node: While) -> Any: ... - def visit_If(self, node: If) -> Any: ... - def visit_With(self, node: With) -> Any: ... - def visit_AsyncWith(self, node: AsyncWith) -> Any: ... - def visit_Raise(self, node: Raise) -> Any: ... - def visit_Try(self, node: Try) -> Any: ... - def visit_Assert(self, node: Assert) -> Any: ... - def visit_Import(self, node: Import) -> Any: ... - def visit_ImportFrom(self, node: ImportFrom) -> Any: ... - def visit_Global(self, node: Global) -> Any: ... - def visit_Nonlocal(self, node: Nonlocal) -> Any: ... - def visit_Expr(self, node: Expr) -> Any: ... - def visit_Pass(self, node: Pass) -> Any: ... - def visit_Break(self, node: Break) -> Any: ... - def visit_Continue(self, node: Continue) -> Any: ... - def visit_Slice(self, node: Slice) -> Any: ... - def visit_BoolOp(self, node: BoolOp) -> Any: ... - def visit_BinOp(self, node: BinOp) -> Any: ... - def visit_UnaryOp(self, node: UnaryOp) -> Any: ... - def visit_Lambda(self, node: Lambda) -> Any: ... - def visit_IfExp(self, node: IfExp) -> Any: ... - def visit_Dict(self, node: Dict) -> Any: ... - def visit_Set(self, node: Set) -> Any: ... - def visit_ListComp(self, node: ListComp) -> Any: ... - def visit_SetComp(self, node: SetComp) -> Any: ... - def visit_DictComp(self, node: DictComp) -> Any: ... - def visit_GeneratorExp(self, node: GeneratorExp) -> Any: ... - def visit_Await(self, node: Await) -> Any: ... - def visit_Yield(self, node: Yield) -> Any: ... - def visit_YieldFrom(self, node: YieldFrom) -> Any: ... - def visit_Compare(self, node: Compare) -> Any: ... - def visit_Call(self, node: Call) -> Any: ... - def visit_FormattedValue(self, node: FormattedValue) -> Any: ... - def visit_JoinedStr(self, node: JoinedStr) -> Any: ... - def visit_Constant(self, node: Constant) -> Any: ... - def visit_NamedExpr(self, node: NamedExpr) -> Any: ... - def visit_TypeIgnore(self, node: TypeIgnore) -> Any: ... - def visit_Attribute(self, node: Attribute) -> Any: ... - def visit_Subscript(self, node: Subscript) -> Any: ... - def visit_Starred(self, node: Starred) -> Any: ... - def visit_Name(self, node: Name) -> Any: ... - def visit_List(self, node: List) -> Any: ... - def visit_Tuple(self, node: Tuple) -> Any: ... - def visit_Del(self, node: Del) -> Any: ... - def visit_Load(self, node: Load) -> Any: ... - def visit_Store(self, node: Store) -> Any: ... - def visit_And(self, node: And) -> Any: ... - def visit_Or(self, node: Or) -> Any: ... - def visit_Add(self, node: Add) -> Any: ... - def visit_BitAnd(self, node: BitAnd) -> Any: ... - def visit_BitOr(self, node: BitOr) -> Any: ... - def visit_BitXor(self, node: BitXor) -> Any: ... - def visit_Div(self, node: Div) -> Any: ... - def visit_FloorDiv(self, node: FloorDiv) -> Any: ... - def visit_LShift(self, node: LShift) -> Any: ... - def visit_Mod(self, node: Mod) -> Any: ... - def visit_Mult(self, node: Mult) -> Any: ... - def visit_MatMult(self, node: MatMult) -> Any: ... - def visit_Pow(self, node: Pow) -> Any: ... - def visit_RShift(self, node: RShift) -> Any: ... - def visit_Sub(self, node: Sub) -> Any: ... - def visit_Invert(self, node: Invert) -> Any: ... - def visit_Not(self, node: Not) -> Any: ... - def visit_UAdd(self, node: UAdd) -> Any: ... - def visit_USub(self, node: USub) -> Any: ... - def visit_Eq(self, node: Eq) -> Any: ... - def visit_Gt(self, node: Gt) -> Any: ... - def visit_GtE(self, node: GtE) -> Any: ... - def visit_In(self, node: In) -> Any: ... - def visit_Is(self, node: Is) -> Any: ... - def visit_IsNot(self, node: IsNot) -> Any: ... - def visit_Lt(self, node: Lt) -> Any: ... - def visit_LtE(self, node: LtE) -> Any: ... - def visit_NotEq(self, node: NotEq) -> Any: ... - def visit_NotIn(self, node: NotIn) -> Any: ... - def visit_comprehension(self, node: comprehension) -> Any: ... - def visit_ExceptHandler(self, node: ExceptHandler) -> Any: ... - def visit_arguments(self, node: arguments) -> Any: ... - def visit_arg(self, node: arg) -> Any: ... - def visit_keyword(self, node: keyword) -> Any: ... - def visit_alias(self, node: alias) -> Any: ... - def visit_withitem(self, node: withitem) -> Any: ... + expr = BoolOp(boolop op, expr* values) + | NamedExpr(expr target, expr value) + | BinOp(expr left, operator op, expr right) + | UnaryOp(unaryop op, expr operand) + | Lambda(arguments args, expr body) + | IfExp(expr test, expr body, expr orelse) + | Dict(expr* keys, expr* values) + | Set(expr* elts) + | ListComp(expr elt, comprehension* generators) + | SetComp(expr elt, comprehension* generators) + | DictComp(expr key, expr value, comprehension* generators) + | GeneratorExp(expr elt, comprehension* generators) + | Await(expr value) + | Yield(expr? value) + | YieldFrom(expr value) + | Compare(expr left, cmpop* ops, expr* comparators) + | Call(expr func, expr* args, keyword* keywords) + | FormattedValue(expr value, int conversion, expr? format_spec) + | JoinedStr(expr* values) + | Constant(constant value, string? kind) + | Attribute(expr value, identifier attr, expr_context ctx) + | Subscript(expr value, expr slice, expr_context ctx) + | Starred(expr value, expr_context ctx) + | Name(identifier id, expr_context ctx) + | List(expr* elts, expr_context ctx) + | Tuple(expr* elts, expr_context ctx) + | Slice(expr? lower, expr? upper, expr? step) + """ + lineno: int + col_offset: int + end_lineno: int | None + end_col_offset: int | None + def __init__(self, **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, **kwargs: Unpack[_Attributes]) -> Self: ... + +class BoolOp(expr): + """BoolOp(boolop op, expr* values)""" + if sys.version_info >= (3, 10): + __match_args__ = ("op", "values") + op: boolop + values: list[expr] + if sys.version_info >= (3, 13): + def __init__(self, op: boolop, values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> None: ... + else: + def __init__(self, op: boolop, values: list[expr], **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, op: boolop = ..., values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +class NamedExpr(expr): + """NamedExpr(expr target, expr value)""" + if sys.version_info >= (3, 10): + __match_args__ = ("target", "value") + target: Name + value: expr + def __init__(self, target: Name, value: expr, **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, target: Name = ..., value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +class BinOp(expr): + """BinOp(expr left, operator op, expr right)""" + if sys.version_info >= (3, 10): + __match_args__ = ("left", "op", "right") + left: expr + op: operator + right: expr + def __init__(self, left: expr, op: operator, right: expr, **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, left: expr = ..., op: operator = ..., right: expr = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... + +class UnaryOp(expr): + """UnaryOp(unaryop op, expr operand)""" + if sys.version_info >= (3, 10): + __match_args__ = ("op", "operand") + op: unaryop + operand: expr + def __init__(self, op: unaryop, operand: expr, **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, op: unaryop = ..., operand: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +class Lambda(expr): + """Lambda(arguments args, expr body)""" + if sys.version_info >= (3, 10): + __match_args__ = ("args", "body") + args: arguments + body: expr + def __init__(self, args: arguments, body: expr, **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, args: arguments = ..., body: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +class IfExp(expr): + """IfExp(expr test, expr body, expr orelse)""" + if sys.version_info >= (3, 10): + __match_args__ = ("test", "body", "orelse") + test: expr + body: expr + orelse: expr + def __init__(self, test: expr, body: expr, orelse: expr, **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, test: expr = ..., body: expr = ..., orelse: expr = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... + +class Dict(expr): + """Dict(expr* keys, expr* values)""" + if sys.version_info >= (3, 10): + __match_args__ = ("keys", "values") + keys: list[expr | None] + values: list[expr] + if sys.version_info >= (3, 13): + def __init__(self, keys: list[expr | None] = ..., values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> None: ... + else: + def __init__(self, keys: list[expr | None], values: list[expr], **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, keys: list[expr | None] = ..., values: list[expr] = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... + +class Set(expr): + """Set(expr* elts)""" + if sys.version_info >= (3, 10): + __match_args__ = ("elts",) + elts: list[expr] + if sys.version_info >= (3, 13): + def __init__(self, elts: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> None: ... + else: + def __init__(self, elts: list[expr], **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, elts: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +class ListComp(expr): + """ListComp(expr elt, comprehension* generators)""" + if sys.version_info >= (3, 10): + __match_args__ = ("elt", "generators") + elt: expr + generators: list[comprehension] + if sys.version_info >= (3, 13): + def __init__(self, elt: expr, generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes]) -> None: ... + else: + def __init__(self, elt: expr, generators: list[comprehension], **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, elt: expr = ..., generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... + +class SetComp(expr): + """SetComp(expr elt, comprehension* generators)""" + if sys.version_info >= (3, 10): + __match_args__ = ("elt", "generators") + elt: expr + generators: list[comprehension] + if sys.version_info >= (3, 13): + def __init__(self, elt: expr, generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes]) -> None: ... + else: + def __init__(self, elt: expr, generators: list[comprehension], **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, elt: expr = ..., generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... + +class DictComp(expr): + """DictComp(expr key, expr value, comprehension* generators)""" + if sys.version_info >= (3, 10): + __match_args__ = ("key", "value", "generators") + key: expr + value: expr + generators: list[comprehension] + if sys.version_info >= (3, 13): + def __init__( + self, key: expr, value: expr, generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes] + ) -> None: ... + else: + def __init__(self, key: expr, value: expr, generators: list[comprehension], **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, key: expr = ..., value: expr = ..., generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... + +class GeneratorExp(expr): + """GeneratorExp(expr elt, comprehension* generators)""" + if sys.version_info >= (3, 10): + __match_args__ = ("elt", "generators") + elt: expr + generators: list[comprehension] + if sys.version_info >= (3, 13): + def __init__(self, elt: expr, generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes]) -> None: ... + else: + def __init__(self, elt: expr, generators: list[comprehension], **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, elt: expr = ..., generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... + +class Await(expr): + """Await(expr value)""" + if sys.version_info >= (3, 10): + __match_args__ = ("value",) + value: expr + def __init__(self, value: expr, **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +class Yield(expr): + """Yield(expr? value)""" + if sys.version_info >= (3, 10): + __match_args__ = ("value",) + value: expr | None + def __init__(self, value: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, value: expr | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +class YieldFrom(expr): + """YieldFrom(expr value)""" + if sys.version_info >= (3, 10): + __match_args__ = ("value",) + value: expr + def __init__(self, value: expr, **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +class Compare(expr): + """Compare(expr left, cmpop* ops, expr* comparators)""" + if sys.version_info >= (3, 10): + __match_args__ = ("left", "ops", "comparators") + left: expr + ops: list[cmpop] + comparators: list[expr] + if sys.version_info >= (3, 13): + def __init__( + self, left: expr, ops: list[cmpop] = ..., comparators: list[expr] = ..., **kwargs: Unpack[_Attributes] + ) -> None: ... + else: + def __init__(self, left: expr, ops: list[cmpop], comparators: list[expr], **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, left: expr = ..., ops: list[cmpop] = ..., comparators: list[expr] = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... + +class Call(expr): + """Call(expr func, expr* args, keyword* keywords)""" + if sys.version_info >= (3, 10): + __match_args__ = ("func", "args", "keywords") + func: expr + args: list[expr] + keywords: list[keyword] + if sys.version_info >= (3, 13): + def __init__( + self, func: expr, args: list[expr] = ..., keywords: list[keyword] = ..., **kwargs: Unpack[_Attributes] + ) -> None: ... + else: + def __init__(self, func: expr, args: list[expr], keywords: list[keyword], **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, func: expr = ..., args: list[expr] = ..., keywords: list[keyword] = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... + +class FormattedValue(expr): + """FormattedValue(expr value, int conversion, expr? format_spec)""" + if sys.version_info >= (3, 10): + __match_args__ = ("value", "conversion", "format_spec") + value: expr + conversion: int + format_spec: expr | None + def __init__(self, value: expr, conversion: int, format_spec: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, value: expr = ..., conversion: int = ..., format_spec: expr | None = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... + +class JoinedStr(expr): + """JoinedStr(expr* values)""" + if sys.version_info >= (3, 10): + __match_args__ = ("values",) + values: list[expr] + if sys.version_info >= (3, 13): + def __init__(self, values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> None: ... + else: + def __init__(self, values: list[expr], **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +class Constant(expr): + """Constant(constant value, string? kind)""" + if sys.version_info >= (3, 10): + __match_args__ = ("value", "kind") + value: Any # None, str, bytes, bool, int, float, complex, Ellipsis + kind: str | None + if sys.version_info < (3, 14): + # Aliases for value, for backwards compatibility + s: Any + n: int | float | complex + + def __init__(self, value: Any, kind: str | None = None, **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, value: Any = ..., kind: str | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +class Attribute(expr): + """Attribute(expr value, identifier attr, expr_context ctx)""" + if sys.version_info >= (3, 10): + __match_args__ = ("value", "attr", "ctx") + value: expr + attr: _Identifier + ctx: expr_context # Not present in Python < 3.13 if not passed to `__init__` + def __init__(self, value: expr, attr: _Identifier, ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, value: expr = ..., attr: _Identifier = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... + +class Subscript(expr): + """Subscript(expr value, expr slice, expr_context ctx)""" + if sys.version_info >= (3, 10): + __match_args__ = ("value", "slice", "ctx") + value: expr + slice: _Slice + ctx: expr_context # Not present in Python < 3.13 if not passed to `__init__` + def __init__(self, value: expr, slice: _Slice, ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, value: expr = ..., slice: _Slice = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... + +class Starred(expr): + """Starred(expr value, expr_context ctx)""" + if sys.version_info >= (3, 10): + __match_args__ = ("value", "ctx") + value: expr + ctx: expr_context # Not present in Python < 3.13 if not passed to `__init__` + def __init__(self, value: expr, ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, value: expr = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +class Name(expr): + """Name(identifier id, expr_context ctx)""" + if sys.version_info >= (3, 10): + __match_args__ = ("id", "ctx") + id: _Identifier + ctx: expr_context # Not present in Python < 3.13 if not passed to `__init__` + def __init__(self, id: _Identifier, ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, id: _Identifier = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +class List(expr): + """List(expr* elts, expr_context ctx)""" + if sys.version_info >= (3, 10): + __match_args__ = ("elts", "ctx") + elts: list[expr] + ctx: expr_context # Not present in Python < 3.13 if not passed to `__init__` + if sys.version_info >= (3, 13): + def __init__(self, elts: list[expr] = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ... + else: + def __init__(self, elts: list[expr], ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, elts: list[expr] = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +class Tuple(expr): + """Tuple(expr* elts, expr_context ctx)""" + if sys.version_info >= (3, 10): + __match_args__ = ("elts", "ctx") + elts: list[expr] + ctx: expr_context # Not present in Python < 3.13 if not passed to `__init__` + if sys.version_info >= (3, 9): + dims: list[expr] + if sys.version_info >= (3, 13): + def __init__(self, elts: list[expr] = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ... + else: + def __init__(self, elts: list[expr], ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, elts: list[expr] = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + +class slice(AST): + """Deprecated AST node class.""" + ... + +if sys.version_info >= (3, 9): + _Slice: typing_extensions.TypeAlias = expr + _SliceAttributes: typing_extensions.TypeAlias = _Attributes +else: + # alias for use with variables named slice + _Slice: typing_extensions.TypeAlias = slice + + class _SliceAttributes(TypedDict): ... + +class Slice(_Slice): + """Slice(expr? lower, expr? upper, expr? step)""" + if sys.version_info >= (3, 10): + __match_args__ = ("lower", "upper", "step") + lower: expr | None + upper: expr | None + step: expr | None + def __init__( + self, lower: expr | None = None, upper: expr | None = None, step: expr | None = None, **kwargs: Unpack[_SliceAttributes] + ) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, + *, + lower: expr | None = ..., + upper: expr | None = ..., + step: expr | None = ..., + **kwargs: Unpack[_SliceAttributes], + ) -> Self: ... + +class ExtSlice(slice): # deprecated and moved to ast.py if sys.version_info >= (3, 9) + """Deprecated AST node class. Use ast.Tuple instead.""" + dims: list[slice] + def __init__(self, dims: list[slice], **kwargs: Unpack[_SliceAttributes]) -> None: ... + +class Index(slice): # deprecated and moved to ast.py if sys.version_info >= (3, 9) + """Deprecated AST node class. Use the index value directly instead.""" + value: expr + def __init__(self, value: expr, **kwargs: Unpack[_SliceAttributes]) -> None: ... + +class expr_context(AST): + """expr_context = Load | Store | Del""" + ... +class AugLoad(expr_context): + """Deprecated AST node class. Unused in Python 3.""" + ... +class AugStore(expr_context): + """Deprecated AST node class. Unused in Python 3.""" + ... +class Param(expr_context): + """Deprecated AST node class. Unused in Python 3.""" + ... + +class Suite(mod): # deprecated and moved to ast.py if sys.version_info >= (3, 9) + """Deprecated AST node class. Unused in Python 3.""" + body: list[stmt] + def __init__(self, body: list[stmt]) -> None: ... + +class Load(expr_context): + """Load""" + ... +class Store(expr_context): + """Store""" + ... +class Del(expr_context): + """Del""" + ... +class boolop(AST): + """boolop = And | Or""" + ... +class And(boolop): + """And""" + ... +class Or(boolop): + """Or""" + ... +class operator(AST): + """operator = Add | Sub | Mult | MatMult | Div | Mod | Pow | LShift | RShift | BitOr | BitXor | BitAnd | FloorDiv""" + ... +class Add(operator): + """Add""" + ... +class Sub(operator): + """Sub""" + ... +class Mult(operator): + """Mult""" + ... +class MatMult(operator): + """MatMult""" + ... +class Div(operator): + """Div""" + ... +class Mod(operator): + """Mod""" + ... +class Pow(operator): + """Pow""" + ... +class LShift(operator): + """LShift""" + ... +class RShift(operator): + """RShift""" + ... +class BitOr(operator): + """BitOr""" + ... +class BitXor(operator): + """BitXor""" + ... +class BitAnd(operator): + """BitAnd""" + ... +class FloorDiv(operator): + """FloorDiv""" + ... +class unaryop(AST): + """unaryop = Invert | Not | UAdd | USub""" + ... +class Invert(unaryop): + """Invert""" + ... +class Not(unaryop): + """Not""" + ... +class UAdd(unaryop): + """UAdd""" + ... +class USub(unaryop): + """USub""" + ... +class cmpop(AST): + """cmpop = Eq | NotEq | Lt | LtE | Gt | GtE | Is | IsNot | In | NotIn""" + ... +class Eq(cmpop): + """Eq""" + ... +class NotEq(cmpop): + """NotEq""" + ... +class Lt(cmpop): + """Lt""" + ... +class LtE(cmpop): + """LtE""" + ... +class Gt(cmpop): + """Gt""" + ... +class GtE(cmpop): + """GtE""" + ... +class Is(cmpop): + """Is""" + ... +class IsNot(cmpop): + """IsNot""" + ... +class In(cmpop): + """In""" + ... +class NotIn(cmpop): + """NotIn""" + ... + +class comprehension(AST): + """comprehension(expr target, expr iter, expr* ifs, int is_async)""" + if sys.version_info >= (3, 10): + __match_args__ = ("target", "iter", "ifs", "is_async") + target: expr + iter: expr + ifs: list[expr] + is_async: int + if sys.version_info >= (3, 13): + @overload + def __init__(self, target: expr, iter: expr, ifs: list[expr], is_async: int) -> None: ... + @overload + def __init__(self, target: expr, iter: expr, ifs: list[expr] = ..., *, is_async: int) -> None: ... + else: + def __init__(self, target: expr, iter: expr, ifs: list[expr], is_async: int) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, target: expr = ..., iter: expr = ..., ifs: list[expr] = ..., is_async: int = ...) -> Self: ... + +class excepthandler(AST): + """excepthandler = ExceptHandler(expr? type, identifier? name, stmt* body)""" + lineno: int + col_offset: int + end_lineno: int | None + end_col_offset: int | None + def __init__(self, **kwargs: Unpack[_Attributes]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, lineno: int = ..., col_offset: int = ..., end_lineno: int | None = ..., end_col_offset: int | None = ... + ) -> Self: ... + +class ExceptHandler(excepthandler): + """ExceptHandler(expr? type, identifier? name, stmt* body)""" if sys.version_info >= (3, 10): - def visit_Match(self, node: Match) -> Any: ... - def visit_match_case(self, node: match_case) -> Any: ... - def visit_MatchValue(self, node: MatchValue) -> Any: ... - def visit_MatchSequence(self, node: MatchSequence) -> Any: ... - def visit_MatchSingleton(self, node: MatchSingleton) -> Any: ... - def visit_MatchStar(self, node: MatchStar) -> Any: ... - def visit_MatchMapping(self, node: MatchMapping) -> Any: ... - def visit_MatchClass(self, node: MatchClass) -> Any: ... - def visit_MatchAs(self, node: MatchAs) -> Any: ... - def visit_MatchOr(self, node: MatchOr) -> Any: ... + __match_args__ = ("type", "name", "body") + type: expr | None + name: _Identifier | None + body: list[stmt] + if sys.version_info >= (3, 13): + def __init__( + self, type: expr | None = None, name: _Identifier | None = None, body: list[stmt] = ..., **kwargs: Unpack[_Attributes] + ) -> None: ... + else: + @overload + def __init__( + self, type: expr | None, name: _Identifier | None, body: list[stmt], **kwargs: Unpack[_Attributes] + ) -> None: ... + @overload + def __init__( + self, type: expr | None = None, name: _Identifier | None = None, *, body: list[stmt], **kwargs: Unpack[_Attributes] + ) -> None: ... - if sys.version_info >= (3, 11): - def visit_TryStar(self, node: TryStar) -> Any: ... + if sys.version_info >= (3, 14): + def __replace__( + self, + *, + type: expr | None = ..., + name: _Identifier | None = ..., + body: list[stmt] = ..., + **kwargs: Unpack[_Attributes], + ) -> Self: ... - if sys.version_info >= (3, 12): - def visit_TypeVar(self, node: TypeVar) -> Any: ... - def visit_ParamSpec(self, node: ParamSpec) -> Any: ... - def visit_TypeVarTuple(self, node: TypeVarTuple) -> Any: ... - def visit_TypeAlias(self, node: TypeAlias) -> Any: ... +class arguments(AST): + """arguments(arg* posonlyargs, arg* args, arg? vararg, arg* kwonlyargs, expr* kw_defaults, arg? kwarg, expr* defaults)""" + if sys.version_info >= (3, 10): + __match_args__ = ("posonlyargs", "args", "vararg", "kwonlyargs", "kw_defaults", "kwarg", "defaults") + posonlyargs: list[arg] + args: list[arg] + vararg: arg | None + kwonlyargs: list[arg] + kw_defaults: list[expr | None] + kwarg: arg | None + defaults: list[expr] + if sys.version_info >= (3, 13): + def __init__( + self, + posonlyargs: list[arg] = ..., + args: list[arg] = ..., + vararg: arg | None = None, + kwonlyargs: list[arg] = ..., + kw_defaults: list[expr | None] = ..., + kwarg: arg | None = None, + defaults: list[expr] = ..., + ) -> None: ... + else: + @overload + def __init__( + self, + posonlyargs: list[arg], + args: list[arg], + vararg: arg | None, + kwonlyargs: list[arg], + kw_defaults: list[expr | None], + kwarg: arg | None, + defaults: list[expr], + ) -> None: ... + @overload + def __init__( + self, + posonlyargs: list[arg], + args: list[arg], + vararg: arg | None, + kwonlyargs: list[arg], + kw_defaults: list[expr | None], + kwarg: arg | None = None, + *, + defaults: list[expr], + ) -> None: ... + @overload + def __init__( + self, + posonlyargs: list[arg], + args: list[arg], + vararg: arg | None = None, + *, + kwonlyargs: list[arg], + kw_defaults: list[expr | None], + kwarg: arg | None = None, + defaults: list[expr], + ) -> None: ... - # visit methods for deprecated nodes - def visit_ExtSlice(self, node: ExtSlice) -> Any: ... - def visit_Index(self, node: Index) -> Any: ... - def visit_Suite(self, node: Suite) -> Any: ... - def visit_AugLoad(self, node: AugLoad) -> Any: ... - def visit_AugStore(self, node: AugStore) -> Any: ... - def visit_Param(self, node: Param) -> Any: ... - def visit_Num(self, node: Num) -> Any: ... - def visit_Str(self, node: Str) -> Any: ... - def visit_Bytes(self, node: Bytes) -> Any: ... - def visit_NameConstant(self, node: NameConstant) -> Any: ... - def visit_Ellipsis(self, node: Ellipsis) -> Any: ... + if sys.version_info >= (3, 14): + def __replace__( + self, + *, + posonlyargs: list[arg] = ..., + args: list[arg] = ..., + vararg: arg | None = ..., + kwonlyargs: list[arg] = ..., + kw_defaults: list[expr | None] = ..., + kwarg: arg | None = ..., + defaults: list[expr] = ..., + ) -> Self: ... -class NodeTransformer(NodeVisitor): - """ - A :class:`NodeVisitor` subclass that walks the abstract syntax tree and - allows modification of nodes. +class arg(AST): + """arg(identifier arg, expr? annotation, string? type_comment)""" + lineno: int + col_offset: int + end_lineno: int | None + end_col_offset: int | None + if sys.version_info >= (3, 10): + __match_args__ = ("arg", "annotation", "type_comment") + arg: _Identifier + annotation: expr | None + type_comment: str | None + def __init__( + self, arg: _Identifier, annotation: expr | None = None, type_comment: str | None = None, **kwargs: Unpack[_Attributes] + ) -> None: ... - The `NodeTransformer` will walk the AST and use the return value of the - visitor methods to replace or remove the old node. If the return value of - the visitor method is ``None``, the node will be removed from its location, - otherwise it is replaced with the return value. The return value may be the - original node in which case no replacement takes place. + if sys.version_info >= (3, 14): + def __replace__( + self, + *, + arg: _Identifier = ..., + annotation: expr | None = ..., + type_comment: str | None = ..., + **kwargs: Unpack[_Attributes], + ) -> Self: ... - Here is an example transformer that rewrites all occurrences of name lookups - (``foo``) to ``data['foo']``:: +class keyword(AST): + """keyword(identifier? arg, expr value)""" + lineno: int + col_offset: int + end_lineno: int | None + end_col_offset: int | None + if sys.version_info >= (3, 10): + __match_args__ = ("arg", "value") + arg: _Identifier | None + value: expr + @overload + def __init__(self, arg: _Identifier | None, value: expr, **kwargs: Unpack[_Attributes]) -> None: ... + @overload + def __init__(self, arg: _Identifier | None = None, *, value: expr, **kwargs: Unpack[_Attributes]) -> None: ... - class RewriteName(NodeTransformer): + if sys.version_info >= (3, 14): + def __replace__(self, *, arg: _Identifier | None = ..., value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ... - def visit_Name(self, node): - return Subscript( - value=Name(id='data', ctx=Load()), - slice=Constant(value=node.id), - ctx=node.ctx - ) +class alias(AST): + """alias(identifier name, identifier? asname)""" + lineno: int + col_offset: int + end_lineno: int | None + end_col_offset: int | None + if sys.version_info >= (3, 10): + __match_args__ = ("name", "asname") + name: str + asname: _Identifier | None + def __init__(self, name: str, asname: _Identifier | None = None, **kwargs: Unpack[_Attributes]) -> None: ... - Keep in mind that if the node you're operating on has child nodes you must - either transform the child nodes yourself or call the :meth:`generic_visit` - method for the node first. + if sys.version_info >= (3, 14): + def __replace__(self, *, name: str = ..., asname: _Identifier | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ... - For nodes that were part of a collection of statements (that applies to all - statement nodes), the visitor may also return a list of nodes rather than - just a single node. +class withitem(AST): + """withitem(expr context_expr, expr? optional_vars)""" + if sys.version_info >= (3, 10): + __match_args__ = ("context_expr", "optional_vars") + context_expr: expr + optional_vars: expr | None + def __init__(self, context_expr: expr, optional_vars: expr | None = None) -> None: ... - Usually you use the transformer like this:: + if sys.version_info >= (3, 14): + def __replace__(self, *, context_expr: expr = ..., optional_vars: expr | None = ...) -> Self: ... - node = YourTransformer().visit(node) - """ - def generic_visit(self, node: AST) -> AST: ... - # TODO: Override the visit_* methods with better return types. - # The usual return type is AST | None, but Iterable[AST] - # is also allowed in some cases -- this needs to be mapped. +if sys.version_info >= (3, 10): + class match_case(AST): + """match_case(pattern pattern, expr? guard, stmt* body)""" + __match_args__ = ("pattern", "guard", "body") + pattern: _Pattern + guard: expr | None + body: list[stmt] + if sys.version_info >= (3, 13): + def __init__(self, pattern: _Pattern, guard: expr | None = None, body: list[stmt] = ...) -> None: ... + else: + @overload + def __init__(self, pattern: _Pattern, guard: expr | None, body: list[stmt]) -> None: ... + @overload + def __init__(self, pattern: _Pattern, guard: expr | None = None, *, body: list[stmt]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, pattern: _Pattern = ..., guard: expr | None = ..., body: list[stmt] = ...) -> Self: ... + + class pattern(AST): + """ + pattern = MatchValue(expr value) + | MatchSingleton(constant value) + | MatchSequence(pattern* patterns) + | MatchMapping(expr* keys, pattern* patterns, identifier? rest) + | MatchClass(expr cls, pattern* patterns, identifier* kwd_attrs, pattern* kwd_patterns) + | MatchStar(identifier? name) + | MatchAs(pattern? pattern, identifier? name) + | MatchOr(pattern* patterns) + """ + lineno: int + col_offset: int + end_lineno: int + end_col_offset: int + def __init__(self, **kwargs: Unpack[_Attributes[int]]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, lineno: int = ..., col_offset: int = ..., end_lineno: int = ..., end_col_offset: int = ... + ) -> Self: ... + + # Without the alias, Pyright complains variables named pattern are recursively defined + _Pattern: typing_extensions.TypeAlias = pattern + + class MatchValue(pattern): + """MatchValue(expr value)""" + __match_args__ = ("value",) + value: expr + def __init__(self, value: expr, **kwargs: Unpack[_Attributes[int]]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ... + + class MatchSingleton(pattern): + """MatchSingleton(constant value)""" + __match_args__ = ("value",) + value: Literal[True, False] | None + def __init__(self, value: Literal[True, False] | None, **kwargs: Unpack[_Attributes[int]]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, value: Literal[True, False] | None = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ... + + class MatchSequence(pattern): + """MatchSequence(pattern* patterns)""" + __match_args__ = ("patterns",) + patterns: list[pattern] + if sys.version_info >= (3, 13): + def __init__(self, patterns: list[pattern] = ..., **kwargs: Unpack[_Attributes[int]]) -> None: ... + else: + def __init__(self, patterns: list[pattern], **kwargs: Unpack[_Attributes[int]]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, patterns: list[pattern] = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ... + + class MatchMapping(pattern): + """MatchMapping(expr* keys, pattern* patterns, identifier? rest)""" + __match_args__ = ("keys", "patterns", "rest") + keys: list[expr] + patterns: list[pattern] + rest: _Identifier | None + if sys.version_info >= (3, 13): + def __init__( + self, + keys: list[expr] = ..., + patterns: list[pattern] = ..., + rest: _Identifier | None = None, + **kwargs: Unpack[_Attributes[int]], + ) -> None: ... + else: + def __init__( + self, + keys: list[expr], + patterns: list[pattern], + rest: _Identifier | None = None, + **kwargs: Unpack[_Attributes[int]], + ) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, + *, + keys: list[expr] = ..., + patterns: list[pattern] = ..., + rest: _Identifier | None = ..., + **kwargs: Unpack[_Attributes[int]], + ) -> Self: ... + + class MatchClass(pattern): + """MatchClass(expr cls, pattern* patterns, identifier* kwd_attrs, pattern* kwd_patterns)""" + __match_args__ = ("cls", "patterns", "kwd_attrs", "kwd_patterns") + cls: expr + patterns: list[pattern] + kwd_attrs: list[_Identifier] + kwd_patterns: list[pattern] + if sys.version_info >= (3, 13): + def __init__( + self, + cls: expr, + patterns: list[pattern] = ..., + kwd_attrs: list[_Identifier] = ..., + kwd_patterns: list[pattern] = ..., + **kwargs: Unpack[_Attributes[int]], + ) -> None: ... + else: + def __init__( + self, + cls: expr, + patterns: list[pattern], + kwd_attrs: list[_Identifier], + kwd_patterns: list[pattern], + **kwargs: Unpack[_Attributes[int]], + ) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, + *, + cls: expr = ..., + patterns: list[pattern] = ..., + kwd_attrs: list[_Identifier] = ..., + kwd_patterns: list[pattern] = ..., + **kwargs: Unpack[_Attributes[int]], + ) -> Self: ... + + class MatchStar(pattern): + """MatchStar(identifier? name)""" + __match_args__ = ("name",) + name: _Identifier | None + def __init__(self, name: _Identifier | None, **kwargs: Unpack[_Attributes[int]]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, name: _Identifier | None = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ... + + class MatchAs(pattern): + """MatchAs(pattern? pattern, identifier? name)""" + __match_args__ = ("pattern", "name") + pattern: _Pattern | None + name: _Identifier | None + def __init__( + self, pattern: _Pattern | None = None, name: _Identifier | None = None, **kwargs: Unpack[_Attributes[int]] + ) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, pattern: _Pattern | None = ..., name: _Identifier | None = ..., **kwargs: Unpack[_Attributes[int]] + ) -> Self: ... + + class MatchOr(pattern): + """MatchOr(pattern* patterns)""" + __match_args__ = ("patterns",) + patterns: list[pattern] + if sys.version_info >= (3, 13): + def __init__(self, patterns: list[pattern] = ..., **kwargs: Unpack[_Attributes[int]]) -> None: ... + else: + def __init__(self, patterns: list[pattern], **kwargs: Unpack[_Attributes[int]]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, patterns: list[pattern] = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ... + +class type_ignore(AST): + """type_ignore = TypeIgnore(int lineno, string tag)""" + ... + +class TypeIgnore(type_ignore): + """TypeIgnore(int lineno, string tag)""" + if sys.version_info >= (3, 10): + __match_args__ = ("lineno", "tag") + lineno: int + tag: str + def __init__(self, lineno: int, tag: str) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, *, lineno: int = ..., tag: str = ...) -> Self: ... + +if sys.version_info >= (3, 12): + class type_param(AST): + """ + type_param = TypeVar(identifier name, expr? bound, expr? default_value) + | ParamSpec(identifier name, expr? default_value) + | TypeVarTuple(identifier name, expr? default_value) + """ + lineno: int + col_offset: int + end_lineno: int + end_col_offset: int + def __init__(self, **kwargs: Unpack[_Attributes[int]]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__(self, **kwargs: Unpack[_Attributes[int]]) -> Self: ... + + class TypeVar(type_param): + """TypeVar(identifier name, expr? bound, expr? default_value)""" + if sys.version_info >= (3, 13): + __match_args__ = ("name", "bound", "default_value") + else: + __match_args__ = ("name", "bound") + name: _Identifier + bound: expr | None + if sys.version_info >= (3, 13): + default_value: expr | None + def __init__( + self, + name: _Identifier, + bound: expr | None = None, + default_value: expr | None = None, + **kwargs: Unpack[_Attributes[int]], + ) -> None: ... + else: + def __init__(self, name: _Identifier, bound: expr | None = None, **kwargs: Unpack[_Attributes[int]]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, + *, + name: _Identifier = ..., + bound: expr | None = ..., + default_value: expr | None = ..., + **kwargs: Unpack[_Attributes[int]], + ) -> Self: ... + + class ParamSpec(type_param): + """ParamSpec(identifier name, expr? default_value)""" + if sys.version_info >= (3, 13): + __match_args__ = ("name", "default_value") + else: + __match_args__ = ("name",) + name: _Identifier + if sys.version_info >= (3, 13): + default_value: expr | None + def __init__( + self, name: _Identifier, default_value: expr | None = None, **kwargs: Unpack[_Attributes[int]] + ) -> None: ... + else: + def __init__(self, name: _Identifier, **kwargs: Unpack[_Attributes[int]]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, name: _Identifier = ..., default_value: expr | None = ..., **kwargs: Unpack[_Attributes[int]] + ) -> Self: ... + + class TypeVarTuple(type_param): + """TypeVarTuple(identifier name, expr? default_value)""" + if sys.version_info >= (3, 13): + __match_args__ = ("name", "default_value") + else: + __match_args__ = ("name",) + name: _Identifier + if sys.version_info >= (3, 13): + default_value: expr | None + def __init__( + self, name: _Identifier, default_value: expr | None = None, **kwargs: Unpack[_Attributes[int]] + ) -> None: ... + else: + def __init__(self, name: _Identifier, **kwargs: Unpack[_Attributes[int]]) -> None: ... + + if sys.version_info >= (3, 14): + def __replace__( + self, *, name: _Identifier = ..., default_value: expr | None = ..., **kwargs: Unpack[_Attributes[int]] + ) -> Self: ... + +class _ABC(type): + if sys.version_info >= (3, 9): + def __init__(cls, *args: Unused) -> None: ... + +if sys.version_info < (3, 14): + @deprecated("Replaced by ast.Constant; removed in Python 3.14") + class Num(Constant, metaclass=_ABC): + """Deprecated AST node class. Use ast.Constant instead""" + value: int | float | complex + + @deprecated("Replaced by ast.Constant; removed in Python 3.14") + class Str(Constant, metaclass=_ABC): + """Deprecated AST node class. Use ast.Constant instead""" + value: str + # Aliases for value, for backwards compatibility + s: str + + @deprecated("Replaced by ast.Constant; removed in Python 3.14") + class Bytes(Constant, metaclass=_ABC): + """Deprecated AST node class. Use ast.Constant instead""" + value: bytes + # Aliases for value, for backwards compatibility + s: bytes + + @deprecated("Replaced by ast.Constant; removed in Python 3.14") + class NameConstant(Constant, metaclass=_ABC): + """Deprecated AST node class. Use ast.Constant instead""" + ... + + @deprecated("Replaced by ast.Constant; removed in Python 3.14") + class Ellipsis(Constant, metaclass=_ABC): + """Deprecated AST node class. Use ast.Constant instead""" + ... + +# everything below here is defined in ast.py _T = _TypeVar("_T", bound=AST) @@ -295,7 +2010,13 @@ if sys.version_info >= (3, 13): type_comments: bool = False, feature_version: None | int | tuple[int, int] = None, optimize: Literal[-1, 0, 1, 2] = -1, - ) -> Module: ... + ) -> Module: + """ + Parse the source into an AST node. + Equivalent to compile(source, filename, mode, PyCF_ONLY_AST). + Pass type_comments=True to get back type comments where the syntax allows. + """ + ... @overload def parse( source: str | ReadableBuffer, @@ -305,7 +2026,13 @@ if sys.version_info >= (3, 13): type_comments: bool = False, feature_version: None | int | tuple[int, int] = None, optimize: Literal[-1, 0, 1, 2] = -1, - ) -> Expression: ... + ) -> Expression: + """ + Parse the source into an AST node. + Equivalent to compile(source, filename, mode, PyCF_ONLY_AST). + Pass type_comments=True to get back type comments where the syntax allows. + """ + ... @overload def parse( source: str | ReadableBuffer, @@ -315,7 +2042,13 @@ if sys.version_info >= (3, 13): type_comments: bool = False, feature_version: None | int | tuple[int, int] = None, optimize: Literal[-1, 0, 1, 2] = -1, - ) -> FunctionType: ... + ) -> FunctionType: + """ + Parse the source into an AST node. + Equivalent to compile(source, filename, mode, PyCF_ONLY_AST). + Pass type_comments=True to get back type comments where the syntax allows. + """ + ... @overload def parse( source: str | ReadableBuffer, @@ -325,7 +2058,13 @@ if sys.version_info >= (3, 13): type_comments: bool = False, feature_version: None | int | tuple[int, int] = None, optimize: Literal[-1, 0, 1, 2] = -1, - ) -> Interactive: ... + ) -> Interactive: + """ + Parse the source into an AST node. + Equivalent to compile(source, filename, mode, PyCF_ONLY_AST). + Pass type_comments=True to get back type comments where the syntax allows. + """ + ... @overload def parse( source: str | ReadableBuffer, @@ -334,7 +2073,13 @@ if sys.version_info >= (3, 13): type_comments: bool = False, feature_version: None | int | tuple[int, int] = None, optimize: Literal[-1, 0, 1, 2] = -1, - ) -> Expression: ... + ) -> Expression: + """ + Parse the source into an AST node. + Equivalent to compile(source, filename, mode, PyCF_ONLY_AST). + Pass type_comments=True to get back type comments where the syntax allows. + """ + ... @overload def parse( source: str | ReadableBuffer, @@ -343,7 +2088,13 @@ if sys.version_info >= (3, 13): type_comments: bool = False, feature_version: None | int | tuple[int, int] = None, optimize: Literal[-1, 0, 1, 2] = -1, - ) -> FunctionType: ... + ) -> FunctionType: + """ + Parse the source into an AST node. + Equivalent to compile(source, filename, mode, PyCF_ONLY_AST). + Pass type_comments=True to get back type comments where the syntax allows. + """ + ... @overload def parse( source: str | ReadableBuffer, @@ -352,7 +2103,13 @@ if sys.version_info >= (3, 13): type_comments: bool = False, feature_version: None | int | tuple[int, int] = None, optimize: Literal[-1, 0, 1, 2] = -1, - ) -> Interactive: ... + ) -> Interactive: + """ + Parse the source into an AST node. + Equivalent to compile(source, filename, mode, PyCF_ONLY_AST). + Pass type_comments=True to get back type comments where the syntax allows. + """ + ... @overload def parse( source: str | ReadableBuffer, @@ -362,7 +2119,13 @@ if sys.version_info >= (3, 13): type_comments: bool = False, feature_version: None | int | tuple[int, int] = None, optimize: Literal[-1, 0, 1, 2] = -1, - ) -> AST: ... + ) -> AST: + """ + Parse the source into an AST node. + Equivalent to compile(source, filename, mode, PyCF_ONLY_AST). + Pass type_comments=True to get back type comments where the syntax allows. + """ + ... else: @overload @@ -373,13 +2136,7 @@ else: *, type_comments: bool = False, feature_version: None | int | tuple[int, int] = None, - ) -> Module: - """ - Parse the source into an AST node. - Equivalent to compile(source, filename, mode, PyCF_ONLY_AST). - Pass type_comments=True to get back type comments where the syntax allows. - """ - ... + ) -> Module: ... @overload def parse( source: str | ReadableBuffer, @@ -388,13 +2145,7 @@ else: *, type_comments: bool = False, feature_version: None | int | tuple[int, int] = None, - ) -> Expression: - """ - Parse the source into an AST node. - Equivalent to compile(source, filename, mode, PyCF_ONLY_AST). - Pass type_comments=True to get back type comments where the syntax allows. - """ - ... + ) -> Expression: ... @overload def parse( source: str | ReadableBuffer, @@ -403,13 +2154,7 @@ else: *, type_comments: bool = False, feature_version: None | int | tuple[int, int] = None, - ) -> FunctionType: - """ - Parse the source into an AST node. - Equivalent to compile(source, filename, mode, PyCF_ONLY_AST). - Pass type_comments=True to get back type comments where the syntax allows. - """ - ... + ) -> FunctionType: ... @overload def parse( source: str | ReadableBuffer, @@ -418,13 +2163,7 @@ else: *, type_comments: bool = False, feature_version: None | int | tuple[int, int] = None, - ) -> Interactive: - """ - Parse the source into an AST node. - Equivalent to compile(source, filename, mode, PyCF_ONLY_AST). - Pass type_comments=True to get back type comments where the syntax allows. - """ - ... + ) -> Interactive: ... @overload def parse( source: str | ReadableBuffer, @@ -432,13 +2171,7 @@ else: mode: Literal["eval"], type_comments: bool = False, feature_version: None | int | tuple[int, int] = None, - ) -> Expression: - """ - Parse the source into an AST node. - Equivalent to compile(source, filename, mode, PyCF_ONLY_AST). - Pass type_comments=True to get back type comments where the syntax allows. - """ - ... + ) -> Expression: ... @overload def parse( source: str | ReadableBuffer, @@ -446,13 +2179,7 @@ else: mode: Literal["func_type"], type_comments: bool = False, feature_version: None | int | tuple[int, int] = None, - ) -> FunctionType: - """ - Parse the source into an AST node. - Equivalent to compile(source, filename, mode, PyCF_ONLY_AST). - Pass type_comments=True to get back type comments where the syntax allows. - """ - ... + ) -> FunctionType: ... @overload def parse( source: str | ReadableBuffer, @@ -460,13 +2187,7 @@ else: mode: Literal["single"], type_comments: bool = False, feature_version: None | int | tuple[int, int] = None, - ) -> Interactive: - """ - Parse the source into an AST node. - Equivalent to compile(source, filename, mode, PyCF_ONLY_AST). - Pass type_comments=True to get back type comments where the syntax allows. - """ - ... + ) -> Interactive: ... @overload def parse( source: str | ReadableBuffer, @@ -475,21 +2196,16 @@ else: *, type_comments: bool = False, feature_version: None | int | tuple[int, int] = None, - ) -> AST: - """ - Parse the source into an AST node. - Equivalent to compile(source, filename, mode, PyCF_ONLY_AST). - Pass type_comments=True to get back type comments where the syntax allows. - """ - ... - -if sys.version_info >= (3, 9): - def unparse(ast_obj: AST) -> str: ... + ) -> AST: ... -def copy_location(new_node: _T, old_node: AST) -> _T: +def literal_eval(node_or_string: str | AST) -> Any: """ - Copy source location (`lineno`, `col_offset`, `end_lineno`, and `end_col_offset` - attributes) from *old_node* to *new_node* if possible, and return *new_node*. + Evaluate an expression node or a string containing only a Python + expression. The string or node provided may only consist of the following + Python literal structures: strings, bytes, numbers, tuples, lists, dicts, + sets, booleans, and None. + + Caution: A complex expression can overflow the C stack and cause a crash. """ ... @@ -501,11 +2217,6 @@ if sys.version_info >= (3, 13): *, indent: int | str | None = None, show_empty: bool = False, - ) -> str: ... - -elif sys.version_info >= (3, 9): - def dump( - node: AST, annotate_fields: bool = True, include_attributes: bool = False, *, indent: int | str | None = None ) -> str: """ Return a formatted dump of the tree in node. This is mainly useful for @@ -517,12 +2228,25 @@ elif sys.version_info >= (3, 9): include_attributes can be set to true. If indent is a non-negative integer or string, then the tree will be pretty-printed with that indent level. None (the default) selects the single line representation. + If show_empty is False, then empty lists and fields that are None + will be omitted from the output for better readability. """ ... +elif sys.version_info >= (3, 9): + def dump( + node: AST, annotate_fields: bool = True, include_attributes: bool = False, *, indent: int | str | None = None + ) -> str: ... + else: def dump(node: AST, annotate_fields: bool = True, include_attributes: bool = False) -> str: ... +def copy_location(new_node: _T, old_node: AST) -> _T: + """ + Copy source location (`lineno`, `col_offset`, `end_lineno`, and `end_col_offset` + attributes) from *old_node* to *new_node* if possible, and return *new_node*. + """ + ... def fix_missing_locations(node: _T) -> _T: """ When you compile a node tree with compile(), the compiler expects lineno and @@ -532,16 +2256,6 @@ def fix_missing_locations(node: _T) -> _T: parent node. It works recursively starting at *node*. """ ... -def get_docstring(node: AsyncFunctionDef | FunctionDef | ClassDef | Module, clean: bool = True) -> str | None: - """ - Return the docstring for the given node or None if no docstring can - be found. If the node provided does not have docstrings a TypeError - will be raised. - - If *clean* is `True`, all tabs are expanded to spaces and any whitespace - that can be uniformly removed from the second line onwards is removed. - """ - ... def increment_lineno(node: _T, n: int = 1) -> _T: """ Increment the line number and end line number of each node in the tree @@ -549,26 +2263,26 @@ def increment_lineno(node: _T, n: int = 1) -> _T: location in a file. """ ... -def iter_child_nodes(node: AST) -> Iterator[AST]: - """ - Yield all direct child nodes of *node*, that is, all fields that are nodes - and all items of fields that are lists of nodes. - """ - ... def iter_fields(node: AST) -> Iterator[tuple[str, Any]]: """ Yield a tuple of ``(fieldname, value)`` for each field in ``node._fields`` that is present on *node*. """ ... -def literal_eval(node_or_string: str | AST) -> Any: +def iter_child_nodes(node: AST) -> Iterator[AST]: """ - Evaluate an expression node or a string containing only a Python - expression. The string or node provided may only consist of the following - Python literal structures: strings, bytes, numbers, tuples, lists, dicts, - sets, booleans, and None. + Yield all direct child nodes of *node*, that is, all fields that are nodes + and all items of fields that are lists of nodes. + """ + ... +def get_docstring(node: AsyncFunctionDef | FunctionDef | ClassDef | Module, clean: bool = True) -> str | None: + """ + Return the docstring for the given node or None if no docstring can + be found. If the node provided does not have docstrings a TypeError + will be raised. - Caution: A complex expression can overflow the C stack and cause a crash. + If *clean* is `True`, all tabs are expanded to spaces and any whitespace + that can be uniformly removed from the second line onwards is removed. """ ... def get_source_segment(source: str, node: AST, *, padded: bool = False) -> str | None: @@ -590,8 +2304,205 @@ def walk(node: AST) -> Iterator[AST]: """ ... -if sys.version_info >= (3, 9): - def main() -> None: ... - if sys.version_info >= (3, 14): def compare(left: AST, right: AST, /, *, compare_attributes: bool = False) -> bool: ... + +class NodeVisitor: + """ + A node visitor base class that walks the abstract syntax tree and calls a + visitor function for every node found. This function may return a value + which is forwarded by the `visit` method. + + This class is meant to be subclassed, with the subclass adding visitor + methods. + + Per default the visitor functions for the nodes are ``'visit_'`` + + class name of the node. So a `TryFinally` node visit function would + be `visit_TryFinally`. This behavior can be changed by overriding + the `visit` method. If no visitor function exists for a node + (return value `None`) the `generic_visit` visitor is used instead. + + Don't use the `NodeVisitor` if you want to apply changes to nodes during + traversing. For this a special visitor exists (`NodeTransformer`) that + allows modifications. + """ + def visit(self, node: AST) -> Any: + """Visit a node.""" + ... + def generic_visit(self, node: AST) -> Any: + """Called if no explicit visitor function exists for a node.""" + ... + def visit_Module(self, node: Module) -> Any: ... + def visit_Interactive(self, node: Interactive) -> Any: ... + def visit_Expression(self, node: Expression) -> Any: ... + def visit_FunctionDef(self, node: FunctionDef) -> Any: ... + def visit_AsyncFunctionDef(self, node: AsyncFunctionDef) -> Any: ... + def visit_ClassDef(self, node: ClassDef) -> Any: ... + def visit_Return(self, node: Return) -> Any: ... + def visit_Delete(self, node: Delete) -> Any: ... + def visit_Assign(self, node: Assign) -> Any: ... + def visit_AugAssign(self, node: AugAssign) -> Any: ... + def visit_AnnAssign(self, node: AnnAssign) -> Any: ... + def visit_For(self, node: For) -> Any: ... + def visit_AsyncFor(self, node: AsyncFor) -> Any: ... + def visit_While(self, node: While) -> Any: ... + def visit_If(self, node: If) -> Any: ... + def visit_With(self, node: With) -> Any: ... + def visit_AsyncWith(self, node: AsyncWith) -> Any: ... + def visit_Raise(self, node: Raise) -> Any: ... + def visit_Try(self, node: Try) -> Any: ... + def visit_Assert(self, node: Assert) -> Any: ... + def visit_Import(self, node: Import) -> Any: ... + def visit_ImportFrom(self, node: ImportFrom) -> Any: ... + def visit_Global(self, node: Global) -> Any: ... + def visit_Nonlocal(self, node: Nonlocal) -> Any: ... + def visit_Expr(self, node: Expr) -> Any: ... + def visit_Pass(self, node: Pass) -> Any: ... + def visit_Break(self, node: Break) -> Any: ... + def visit_Continue(self, node: Continue) -> Any: ... + def visit_Slice(self, node: Slice) -> Any: ... + def visit_BoolOp(self, node: BoolOp) -> Any: ... + def visit_BinOp(self, node: BinOp) -> Any: ... + def visit_UnaryOp(self, node: UnaryOp) -> Any: ... + def visit_Lambda(self, node: Lambda) -> Any: ... + def visit_IfExp(self, node: IfExp) -> Any: ... + def visit_Dict(self, node: Dict) -> Any: ... + def visit_Set(self, node: Set) -> Any: ... + def visit_ListComp(self, node: ListComp) -> Any: ... + def visit_SetComp(self, node: SetComp) -> Any: ... + def visit_DictComp(self, node: DictComp) -> Any: ... + def visit_GeneratorExp(self, node: GeneratorExp) -> Any: ... + def visit_Await(self, node: Await) -> Any: ... + def visit_Yield(self, node: Yield) -> Any: ... + def visit_YieldFrom(self, node: YieldFrom) -> Any: ... + def visit_Compare(self, node: Compare) -> Any: ... + def visit_Call(self, node: Call) -> Any: ... + def visit_FormattedValue(self, node: FormattedValue) -> Any: ... + def visit_JoinedStr(self, node: JoinedStr) -> Any: ... + def visit_Constant(self, node: Constant) -> Any: ... + def visit_NamedExpr(self, node: NamedExpr) -> Any: ... + def visit_TypeIgnore(self, node: TypeIgnore) -> Any: ... + def visit_Attribute(self, node: Attribute) -> Any: ... + def visit_Subscript(self, node: Subscript) -> Any: ... + def visit_Starred(self, node: Starred) -> Any: ... + def visit_Name(self, node: Name) -> Any: ... + def visit_List(self, node: List) -> Any: ... + def visit_Tuple(self, node: Tuple) -> Any: ... + def visit_Del(self, node: Del) -> Any: ... + def visit_Load(self, node: Load) -> Any: ... + def visit_Store(self, node: Store) -> Any: ... + def visit_And(self, node: And) -> Any: ... + def visit_Or(self, node: Or) -> Any: ... + def visit_Add(self, node: Add) -> Any: ... + def visit_BitAnd(self, node: BitAnd) -> Any: ... + def visit_BitOr(self, node: BitOr) -> Any: ... + def visit_BitXor(self, node: BitXor) -> Any: ... + def visit_Div(self, node: Div) -> Any: ... + def visit_FloorDiv(self, node: FloorDiv) -> Any: ... + def visit_LShift(self, node: LShift) -> Any: ... + def visit_Mod(self, node: Mod) -> Any: ... + def visit_Mult(self, node: Mult) -> Any: ... + def visit_MatMult(self, node: MatMult) -> Any: ... + def visit_Pow(self, node: Pow) -> Any: ... + def visit_RShift(self, node: RShift) -> Any: ... + def visit_Sub(self, node: Sub) -> Any: ... + def visit_Invert(self, node: Invert) -> Any: ... + def visit_Not(self, node: Not) -> Any: ... + def visit_UAdd(self, node: UAdd) -> Any: ... + def visit_USub(self, node: USub) -> Any: ... + def visit_Eq(self, node: Eq) -> Any: ... + def visit_Gt(self, node: Gt) -> Any: ... + def visit_GtE(self, node: GtE) -> Any: ... + def visit_In(self, node: In) -> Any: ... + def visit_Is(self, node: Is) -> Any: ... + def visit_IsNot(self, node: IsNot) -> Any: ... + def visit_Lt(self, node: Lt) -> Any: ... + def visit_LtE(self, node: LtE) -> Any: ... + def visit_NotEq(self, node: NotEq) -> Any: ... + def visit_NotIn(self, node: NotIn) -> Any: ... + def visit_comprehension(self, node: comprehension) -> Any: ... + def visit_ExceptHandler(self, node: ExceptHandler) -> Any: ... + def visit_arguments(self, node: arguments) -> Any: ... + def visit_arg(self, node: arg) -> Any: ... + def visit_keyword(self, node: keyword) -> Any: ... + def visit_alias(self, node: alias) -> Any: ... + def visit_withitem(self, node: withitem) -> Any: ... + if sys.version_info >= (3, 10): + def visit_Match(self, node: Match) -> Any: ... + def visit_match_case(self, node: match_case) -> Any: ... + def visit_MatchValue(self, node: MatchValue) -> Any: ... + def visit_MatchSequence(self, node: MatchSequence) -> Any: ... + def visit_MatchSingleton(self, node: MatchSingleton) -> Any: ... + def visit_MatchStar(self, node: MatchStar) -> Any: ... + def visit_MatchMapping(self, node: MatchMapping) -> Any: ... + def visit_MatchClass(self, node: MatchClass) -> Any: ... + def visit_MatchAs(self, node: MatchAs) -> Any: ... + def visit_MatchOr(self, node: MatchOr) -> Any: ... + + if sys.version_info >= (3, 11): + def visit_TryStar(self, node: TryStar) -> Any: ... + + if sys.version_info >= (3, 12): + def visit_TypeVar(self, node: TypeVar) -> Any: ... + def visit_ParamSpec(self, node: ParamSpec) -> Any: ... + def visit_TypeVarTuple(self, node: TypeVarTuple) -> Any: ... + def visit_TypeAlias(self, node: TypeAlias) -> Any: ... + + # visit methods for deprecated nodes + def visit_ExtSlice(self, node: ExtSlice) -> Any: ... + def visit_Index(self, node: Index) -> Any: ... + def visit_Suite(self, node: Suite) -> Any: ... + def visit_AugLoad(self, node: AugLoad) -> Any: ... + def visit_AugStore(self, node: AugStore) -> Any: ... + def visit_Param(self, node: Param) -> Any: ... + def visit_Num(self, node: Num) -> Any: ... + def visit_Str(self, node: Str) -> Any: ... + def visit_Bytes(self, node: Bytes) -> Any: ... + def visit_NameConstant(self, node: NameConstant) -> Any: ... + def visit_Ellipsis(self, node: Ellipsis) -> Any: ... + +class NodeTransformer(NodeVisitor): + """ + A :class:`NodeVisitor` subclass that walks the abstract syntax tree and + allows modification of nodes. + + The `NodeTransformer` will walk the AST and use the return value of the + visitor methods to replace or remove the old node. If the return value of + the visitor method is ``None``, the node will be removed from its location, + otherwise it is replaced with the return value. The return value may be the + original node in which case no replacement takes place. + + Here is an example transformer that rewrites all occurrences of name lookups + (``foo``) to ``data['foo']``:: + + class RewriteName(NodeTransformer): + + def visit_Name(self, node): + return Subscript( + value=Name(id='data', ctx=Load()), + slice=Constant(value=node.id), + ctx=node.ctx + ) + + Keep in mind that if the node you're operating on has child nodes you must + either transform the child nodes yourself or call the :meth:`generic_visit` + method for the node first. + + For nodes that were part of a collection of statements (that applies to all + statement nodes), the visitor may also return a list of nodes rather than + just a single node. + + Usually you use the transformer like this:: + + node = YourTransformer().visit(node) + """ + def generic_visit(self, node: AST) -> AST: ... + # TODO: Override the visit_* methods with better return types. + # The usual return type is AST | None, but Iterable[AST] + # is also allowed in some cases -- this needs to be mapped. + +if sys.version_info >= (3, 9): + def unparse(ast_obj: AST) -> str: ... + +if sys.version_info >= (3, 9): + def main() -> None: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asynchat.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asynchat.pyi index 96a33da..79a70d1 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asynchat.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asynchat.pyi @@ -1,25 +1,3 @@ -r""" -A class supporting chat-style (command/response) protocols. - -This class adds support for 'chat' style protocols - where one side -sends a 'command', and the other sends a response (examples would be -the common internet protocols - smtp, nntp, ftp, etc..). - -The handle_read() method looks at the input stream for the current -'terminator' (usually '\r\n' for single-line responses, '\r\n.\r\n' -for multi-line output), calling self.found_terminator() on its -receipt. - -for example: -Say you build an async nntp client using this class. At the start -of the connection, you'll have self.terminator set to '\r\n', in -order to process the single-line greeting. Just before issuing a -'LIST' command you'll set it to '\r\n.\r\n'. The output of the LIST -command will be accumulated (using your own 'collect_incoming_data' -method) up to the terminator, and then control will be returned to -you - by calling your self.found_terminator() method. -""" - import asyncore from abc import abstractmethod @@ -28,28 +6,16 @@ class simple_producer: def more(self) -> bytes: ... class async_chat(asyncore.dispatcher): - """ - This is an abstract class. You must derive from this class, and add - the two methods collect_incoming_data() and found_terminator() - """ ac_in_buffer_size: int ac_out_buffer_size: int @abstractmethod def collect_incoming_data(self, data: bytes) -> None: ... @abstractmethod def found_terminator(self) -> None: ... - def set_terminator(self, term: bytes | int | None) -> None: - """ - Set the input delimiter. - - Can be a fixed string of any length, an integer, or None. - """ - ... + def set_terminator(self, term: bytes | int | None) -> None: ... def get_terminator(self) -> bytes | int | None: ... def push(self, data: bytes) -> None: ... def push_with_producer(self, producer: simple_producer) -> None: ... - def close_when_done(self) -> None: - """automatically close this channel once the outgoing queue is empty""" - ... + def close_when_done(self) -> None: ... def initiate_send(self) -> None: ... def discard_buffers(self) -> None: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/base_events.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/base_events.pyi index 4a91aea..387d4ad 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/base_events.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/base_events.pyi @@ -65,6 +65,10 @@ class Server(AbstractServer): ssl_handshake_timeout: float | None, ) -> None: ... + if sys.version_info >= (3, 13): + def close_clients(self) -> None: ... + def abort_clients(self) -> None: ... + def get_loop(self) -> AbstractEventLoop: ... def is_serving(self) -> bool: ... async def start_serving(self) -> None: ... @@ -203,13 +207,7 @@ class BaseEventLoop(AbstractEventLoop): """ ... else: - def create_task(self, coro: _CoroutineLike[_T], *, name: object = None) -> Task[_T]: - """ - Schedule a coroutine object. - - Return a task object. - """ - ... + def create_task(self, coro: _CoroutineLike[_T], *, name: object = None) -> Task[_T]: ... def set_task_factory(self, factory: _TaskFactory | None) -> None: """ @@ -370,20 +368,7 @@ class BaseEventLoop(AbstractEventLoop): ssl_handshake_timeout: float | None = None, happy_eyeballs_delay: float | None = None, interleave: int | None = None, - ) -> tuple[Transport, _ProtocolT]: - """ - Connect to a TCP server. - - Create a streaming transport connection to a given internet host and - port: socket family AF_INET or socket.AF_INET6 depending on host (or - family if specified), socket type SOCK_STREAM. protocol_factory must be - a callable returning a protocol instance. - - This method is a coroutine which will try to establish the connection - in the background. When successful, the coroutine returns a - (transport, protocol) pair. - """ - ... + ) -> tuple[Transport, _ProtocolT]: ... @overload async def create_connection( self, @@ -401,21 +386,10 @@ class BaseEventLoop(AbstractEventLoop): ssl_handshake_timeout: float | None = None, happy_eyeballs_delay: float | None = None, interleave: int | None = None, - ) -> tuple[Transport, _ProtocolT]: - """ - Connect to a TCP server. - - Create a streaming transport connection to a given internet host and - port: socket family AF_INET or socket.AF_INET6 depending on host (or - family if specified), socket type SOCK_STREAM. protocol_factory must be - a callable returning a protocol instance. + ) -> tuple[Transport, _ProtocolT]: ... - This method is a coroutine which will try to establish the connection - in the background. When successful, the coroutine returns a - (transport, protocol) pair. - """ - ... - if sys.version_info >= (3, 11): + if sys.version_info >= (3, 13): + # 3.13 added `keep_alive`. @overload async def create_server( self, @@ -430,6 +404,7 @@ class BaseEventLoop(AbstractEventLoop): ssl: _SSLContext = None, reuse_address: bool | None = None, reuse_port: bool | None = None, + keep_alive: bool | None = None, ssl_handshake_timeout: float | None = None, ssl_shutdown_timeout: float | None = None, start_serving: bool = True, @@ -465,6 +440,7 @@ class BaseEventLoop(AbstractEventLoop): ssl: _SSLContext = None, reuse_address: bool | None = None, reuse_port: bool | None = None, + keep_alive: bool | None = None, ssl_handshake_timeout: float | None = None, ssl_shutdown_timeout: float | None = None, start_serving: bool = True, @@ -486,33 +462,43 @@ class BaseEventLoop(AbstractEventLoop): This method is a coroutine. """ ... - async def start_tls( + elif sys.version_info >= (3, 11): + @overload + async def create_server( self, - transport: BaseTransport, - protocol: BaseProtocol, - sslcontext: ssl.SSLContext, + protocol_factory: _ProtocolFactory, + host: str | Sequence[str] | None = None, + port: int = ..., *, - server_side: bool = False, - server_hostname: str | None = None, + family: int = ..., + flags: int = ..., + sock: None = None, + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, ssl_handshake_timeout: float | None = None, ssl_shutdown_timeout: float | None = None, - ) -> Transport | None: - """ - Upgrade transport to TLS. - - Return a new transport that *protocol* should start using - immediately. - """ - ... - async def connect_accepted_socket( + start_serving: bool = True, + ) -> Server: ... + @overload + async def create_server( self, - protocol_factory: Callable[[], _ProtocolT], - sock: socket, + protocol_factory: _ProtocolFactory, + host: None = None, + port: None = None, *, + family: int = ..., + flags: int = ..., + sock: socket = ..., + backlog: int = 100, ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, ssl_handshake_timeout: float | None = None, ssl_shutdown_timeout: float | None = None, - ) -> tuple[Transport, _ProtocolT]: ... + start_serving: bool = True, + ) -> Server: ... else: @overload async def create_server( @@ -530,24 +516,7 @@ class BaseEventLoop(AbstractEventLoop): reuse_port: bool | None = None, ssl_handshake_timeout: float | None = None, start_serving: bool = True, - ) -> Server: - """ - Create a TCP server. - - The host parameter can be a string, in that case the TCP server is - bound to host and port. - - The host parameter can also be a sequence of strings and in that case - the TCP server is bound to all hosts of the sequence. If a host - appears multiple times (possibly indirectly e.g. when hostnames - resolve to the same IP address), the server is only bound once to that - host. - - Return a Server object which can be used to stop the service. - - This method is a coroutine. - """ - ... + ) -> Server: ... @overload async def create_server( self, @@ -564,24 +533,9 @@ class BaseEventLoop(AbstractEventLoop): reuse_port: bool | None = None, ssl_handshake_timeout: float | None = None, start_serving: bool = True, - ) -> Server: - """ - Create a TCP server. - - The host parameter can be a string, in that case the TCP server is - bound to host and port. - - The host parameter can also be a sequence of strings and in that case - the TCP server is bound to all hosts of the sequence. If a host - appears multiple times (possibly indirectly e.g. when hostnames - resolve to the same IP address), the server is only bound once to that - host. + ) -> Server: ... - Return a Server object which can be used to stop the service. - - This method is a coroutine. - """ - ... + if sys.version_info >= (3, 11): async def start_tls( self, transport: BaseTransport, @@ -591,6 +545,7 @@ class BaseEventLoop(AbstractEventLoop): server_side: bool = False, server_hostname: str | None = None, ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, ) -> Transport | None: """ Upgrade transport to TLS. @@ -599,6 +554,26 @@ class BaseEventLoop(AbstractEventLoop): immediately. """ ... + async def connect_accepted_socket( + self, + protocol_factory: Callable[[], _ProtocolT], + sock: socket, + *, + ssl: _SSLContext = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + else: + async def start_tls( + self, + transport: BaseTransport, + protocol: BaseProtocol, + sslcontext: ssl.SSLContext, + *, + server_side: bool = False, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ) -> Transport | None: ... async def connect_accepted_socket( self, protocol_factory: Callable[[], _ProtocolT], @@ -668,9 +643,7 @@ class BaseEventLoop(AbstractEventLoop): reuse_port: bool | None = None, allow_broadcast: bool | None = None, sock: socket | None = None, - ) -> tuple[DatagramTransport, _ProtocolT]: - """Create datagram connection.""" - ... + ) -> tuple[DatagramTransport, _ProtocolT]: ... # Pipes and subprocesses. async def connect_read_pipe( self, protocol_factory: Callable[[], _ProtocolT], pipe: Any @@ -803,8 +776,6 @@ class BaseEventLoop(AbstractEventLoop): """ ... elif sys.version_info >= (3, 9): - async def shutdown_default_executor(self) -> None: - """Schedule the shutdown of the default executor.""" - ... + async def shutdown_default_executor(self) -> None: ... def __del__(self) -> None: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/base_futures.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/base_futures.pyi index 4c24d45..5da90c4 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/base_futures.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/base_futures.pyi @@ -1,6 +1,6 @@ from collections.abc import Callable, Sequence from contextvars import Context -from typing import Any, Literal +from typing import Any, Final from . import futures @@ -11,9 +11,9 @@ __all__ = () # That's why the import order is reversed. from .futures import isfuture as isfuture -_PENDING: Literal["PENDING"] # undocumented -_CANCELLED: Literal["CANCELLED"] # undocumented -_FINISHED: Literal["FINISHED"] # undocumented +_PENDING: Final = "PENDING" # undocumented +_CANCELLED: Final = "CANCELLED" # undocumented +_FINISHED: Final = "FINISHED" # undocumented def _format_callbacks(cb: Sequence[tuple[Callable[[futures.Future[Any]], None], Context]]) -> str: """helper function for Future.__repr__""" diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/constants.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/constants.pyi index 8ab7e18..5c6456b 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/constants.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/constants.pyi @@ -1,21 +1,20 @@ import enum import sys -from typing import Literal +from typing import Final -LOG_THRESHOLD_FOR_CONNLOST_WRITES: Literal[5] -ACCEPT_RETRY_DELAY: Literal[1] -DEBUG_STACK_DEPTH: Literal[10] +LOG_THRESHOLD_FOR_CONNLOST_WRITES: Final = 5 +ACCEPT_RETRY_DELAY: Final = 1 +DEBUG_STACK_DEPTH: Final = 10 SSL_HANDSHAKE_TIMEOUT: float -SENDFILE_FALLBACK_READBUFFER_SIZE: Literal[262144] +SENDFILE_FALLBACK_READBUFFER_SIZE: Final = 262144 if sys.version_info >= (3, 11): SSL_SHUTDOWN_TIMEOUT: float - FLOW_CONTROL_HIGH_WATER_SSL_READ: Literal[256] - FLOW_CONTROL_HIGH_WATER_SSL_WRITE: Literal[512] + FLOW_CONTROL_HIGH_WATER_SSL_READ: Final = 256 + FLOW_CONTROL_HIGH_WATER_SSL_WRITE: Final = 512 if sys.version_info >= (3, 12): - THREAD_JOIN_TIMEOUT: Literal[300] + THREAD_JOIN_TIMEOUT: Final = 300 class _SendfileMode(enum.Enum): - """An enumeration.""" UNSUPPORTED = 1 TRY_NATIVE = 2 FALLBACK = 3 diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/coroutines.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/coroutines.pyi index 5f4db0e..fa7a9ea 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/coroutines.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/coroutines.pyi @@ -13,14 +13,7 @@ _FunctionT = TypeVar("_FunctionT", bound=Callable[..., Any]) _P = ParamSpec("_P") if sys.version_info < (3, 11): - def coroutine(func: _FunctionT) -> _FunctionT: - """ - Decorator to mark coroutines. - - If the coroutine is not yielded from before it is destroyed, - an error message is logged. - """ - ... + def coroutine(func: _FunctionT) -> _FunctionT: ... @overload def iscoroutinefunction(func: Callable[..., Coroutine[Any, Any, Any]]) -> bool: diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/events.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/events.pyi index 1e61212..8bd1772 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/events.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/events.pyi @@ -18,23 +18,40 @@ from .tasks import Task from .transports import BaseTransport, DatagramTransport, ReadTransport, SubprocessTransport, Transport, WriteTransport from .unix_events import AbstractChildWatcher -__all__ = ( - "AbstractEventLoopPolicy", - "AbstractEventLoop", - "AbstractServer", - "Handle", - "TimerHandle", - "get_event_loop_policy", - "set_event_loop_policy", - "get_event_loop", - "set_event_loop", - "new_event_loop", - "get_child_watcher", - "set_child_watcher", - "_set_running_loop", - "get_running_loop", - "_get_running_loop", -) +if sys.version_info >= (3, 14): + __all__ = ( + "AbstractEventLoopPolicy", + "AbstractEventLoop", + "AbstractServer", + "Handle", + "TimerHandle", + "get_event_loop_policy", + "set_event_loop_policy", + "get_event_loop", + "set_event_loop", + "new_event_loop", + "_set_running_loop", + "get_running_loop", + "_get_running_loop", + ) +else: + __all__ = ( + "AbstractEventLoopPolicy", + "AbstractEventLoop", + "AbstractServer", + "Handle", + "TimerHandle", + "get_event_loop_policy", + "set_event_loop_policy", + "get_event_loop", + "set_event_loop", + "new_event_loop", + "get_child_watcher", + "set_child_watcher", + "_set_running_loop", + "get_running_loop", + "_get_running_loop", + ) _T = TypeVar("_T") _Ts = TypeVarTuple("_Ts") @@ -91,6 +108,16 @@ class AbstractServer: def close(self) -> None: """Stop serving. This leaves existing connections open.""" ... + if sys.version_info >= (3, 13): + @abstractmethod + def close_clients(self) -> None: + """Close all active connections.""" + ... + @abstractmethod + def abort_clients(self) -> None: + """Close all active connections immediately.""" + ... + async def __aenter__(self) -> Self: ... async def __aexit__(self, *exc: Unused) -> None: ... @abstractmethod @@ -320,7 +347,9 @@ class AbstractEventLoop: happy_eyeballs_delay: float | None = None, interleave: int | None = None, ) -> tuple[Transport, _ProtocolT]: ... - if sys.version_info >= (3, 11): + + if sys.version_info >= (3, 13): + # 3.13 added `keep_alive`. @overload @abstractmethod async def create_server( @@ -336,6 +365,7 @@ class AbstractEventLoop: ssl: _SSLContext = None, reuse_address: bool | None = None, reuse_port: bool | None = None, + keep_alive: bool | None = None, ssl_handshake_timeout: float | None = None, ssl_shutdown_timeout: float | None = None, start_serving: bool = True, @@ -376,6 +406,9 @@ class AbstractEventLoop: they all set this flag when being created. This option is not supported on Windows. + keep_alive set to True keeps connections active by enabling the + periodic transmission of messages. + ssl_handshake_timeout is the time in seconds that an SSL server will wait for completion of the SSL handshake before aborting the connection. Default is 60s. @@ -405,6 +438,7 @@ class AbstractEventLoop: ssl: _SSLContext = None, reuse_address: bool | None = None, reuse_port: bool | None = None, + keep_alive: bool | None = None, ssl_handshake_timeout: float | None = None, ssl_shutdown_timeout: float | None = None, start_serving: bool = True, @@ -445,6 +479,9 @@ class AbstractEventLoop: they all set this flag when being created. This option is not supported on Windows. + keep_alive set to True keeps connections active by enabling the + periodic transmission of messages. + ssl_handshake_timeout is the time in seconds that an SSL server will wait for completion of the SSL handshake before aborting the connection. Default is 60s. @@ -459,67 +496,45 @@ class AbstractEventLoop: to make the server to start accepting connections. """ ... + elif sys.version_info >= (3, 11): + @overload @abstractmethod - async def start_tls( + async def create_server( self, - transport: WriteTransport, - protocol: BaseProtocol, - sslcontext: ssl.SSLContext, + protocol_factory: _ProtocolFactory, + host: str | Sequence[str] | None = None, + port: int = ..., *, - server_side: bool = False, - server_hostname: str | None = None, + family: int = ..., + flags: int = ..., + sock: None = None, + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, ssl_handshake_timeout: float | None = None, ssl_shutdown_timeout: float | None = None, - ) -> Transport | None: - """ - Upgrade a transport to TLS. - - Return a new transport that *protocol* should start using - immediately. - """ - ... - async def create_unix_server( + start_serving: bool = True, + ) -> Server: ... + @overload + @abstractmethod + async def create_server( self, protocol_factory: _ProtocolFactory, - path: StrPath | None = None, + host: None = None, + port: None = None, *, - sock: socket | None = None, + family: int = ..., + flags: int = ..., + sock: socket = ..., backlog: int = 100, ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, ssl_handshake_timeout: float | None = None, ssl_shutdown_timeout: float | None = None, start_serving: bool = True, - ) -> Server: - """ - A coroutine which creates a UNIX Domain Socket server. - - The return value is a Server object, which can be used to stop - the service. - - path is a str, representing a file system path to bind the - server socket to. - - sock can optionally be specified in order to use a preexisting - socket object. - - backlog is the maximum number of queued connections passed to - listen() (defaults to 100). - - ssl can be set to an SSLContext to enable SSL over the - accepted connections. - - ssl_handshake_timeout is the time in seconds that an SSL server - will wait for the SSL handshake to complete (defaults to 60s). - - ssl_shutdown_timeout is the time in seconds that an SSL server - will wait for the SSL shutdown to finish (defaults to 30s). - - start_serving set to True (default) causes the created server - to start accepting connections immediately. When set to False, - the user should await Server.start_serving() or Server.serve_forever() - to make the server to start accepting connections. - """ - ... + ) -> Server: ... else: @overload @abstractmethod @@ -538,53 +553,7 @@ class AbstractEventLoop: reuse_port: bool | None = None, ssl_handshake_timeout: float | None = None, start_serving: bool = True, - ) -> Server: - """ - A coroutine which creates a TCP server bound to host and port. - - The return value is a Server object which can be used to stop - the service. - - If host is an empty string or None all interfaces are assumed - and a list of multiple sockets will be returned (most likely - one for IPv4 and another one for IPv6). The host parameter can also be - a sequence (e.g. list) of hosts to bind to. - - family can be set to either AF_INET or AF_INET6 to force the - socket to use IPv4 or IPv6. If not set it will be determined - from host (defaults to AF_UNSPEC). - - flags is a bitmask for getaddrinfo(). - - sock can optionally be specified in order to use a preexisting - socket object. - - backlog is the maximum number of queued connections passed to - listen() (defaults to 100). - - ssl can be set to an SSLContext to enable SSL over the - accepted connections. - - reuse_address tells the kernel to reuse a local socket in - TIME_WAIT state, without waiting for its natural timeout to - expire. If not specified will automatically be set to True on - UNIX. - - reuse_port tells the kernel to allow this endpoint to be bound to - the same port as other existing endpoints are bound to, so long as - they all set this flag when being created. This option is not - supported on Windows. - - ssl_handshake_timeout is the time in seconds that an SSL server - will wait for completion of the SSL handshake before aborting the - connection. Default is 60s. - - start_serving set to True (default) causes the created server - to start accepting connections immediately. When set to False, - the user should await Server.start_serving() or Server.serve_forever() - to make the server to start accepting connections. - """ - ... + ) -> Server: ... @overload @abstractmethod async def create_server( @@ -602,63 +571,20 @@ class AbstractEventLoop: reuse_port: bool | None = None, ssl_handshake_timeout: float | None = None, start_serving: bool = True, - ) -> Server: - """ - A coroutine which creates a TCP server bound to host and port. - - The return value is a Server object which can be used to stop - the service. - - If host is an empty string or None all interfaces are assumed - and a list of multiple sockets will be returned (most likely - one for IPv4 and another one for IPv6). The host parameter can also be - a sequence (e.g. list) of hosts to bind to. - - family can be set to either AF_INET or AF_INET6 to force the - socket to use IPv4 or IPv6. If not set it will be determined - from host (defaults to AF_UNSPEC). - - flags is a bitmask for getaddrinfo(). - - sock can optionally be specified in order to use a preexisting - socket object. - - backlog is the maximum number of queued connections passed to - listen() (defaults to 100). - - ssl can be set to an SSLContext to enable SSL over the - accepted connections. + ) -> Server: ... - reuse_address tells the kernel to reuse a local socket in - TIME_WAIT state, without waiting for its natural timeout to - expire. If not specified will automatically be set to True on - UNIX. - - reuse_port tells the kernel to allow this endpoint to be bound to - the same port as other existing endpoints are bound to, so long as - they all set this flag when being created. This option is not - supported on Windows. - - ssl_handshake_timeout is the time in seconds that an SSL server - will wait for completion of the SSL handshake before aborting the - connection. Default is 60s. - - start_serving set to True (default) causes the created server - to start accepting connections immediately. When set to False, - the user should await Server.start_serving() or Server.serve_forever() - to make the server to start accepting connections. - """ - ... + if sys.version_info >= (3, 11): @abstractmethod async def start_tls( self, - transport: BaseTransport, + transport: WriteTransport, protocol: BaseProtocol, sslcontext: ssl.SSLContext, *, server_side: bool = False, server_hostname: str | None = None, ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, ) -> Transport | None: """ Upgrade a transport to TLS. @@ -676,6 +602,7 @@ class AbstractEventLoop: backlog: int = 100, ssl: _SSLContext = None, ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, start_serving: bool = True, ) -> Server: """ @@ -699,12 +626,39 @@ class AbstractEventLoop: ssl_handshake_timeout is the time in seconds that an SSL server will wait for the SSL handshake to complete (defaults to 60s). + ssl_shutdown_timeout is the time in seconds that an SSL server + will wait for the SSL shutdown to finish (defaults to 30s). + start_serving set to True (default) causes the created server to start accepting connections immediately. When set to False, the user should await Server.start_serving() or Server.serve_forever() to make the server to start accepting connections. """ ... + else: + @abstractmethod + async def start_tls( + self, + transport: BaseTransport, + protocol: BaseProtocol, + sslcontext: ssl.SSLContext, + *, + server_side: bool = False, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ) -> Transport | None: ... + async def create_unix_server( + self, + protocol_factory: _ProtocolFactory, + path: StrPath | None = None, + *, + sock: socket | None = None, + backlog: int = 100, + ssl: _SSLContext = None, + ssl_handshake_timeout: float | None = None, + start_serving: bool = True, + ) -> Server: ... + if sys.version_info >= (3, 11): async def connect_accepted_socket( self, @@ -733,17 +687,7 @@ class AbstractEventLoop: *, ssl: _SSLContext = None, ssl_handshake_timeout: float | None = None, - ) -> tuple[Transport, _ProtocolT]: - """ - Handle an accepted connection. - - This is used by servers that accept connections outside of - asyncio, but use asyncio to handle connections. - - This method is a coroutine. When completed, the coroutine - returns a (transport, protocol) pair. - """ - ... + ) -> tuple[Transport, _ProtocolT]: ... if sys.version_info >= (3, 11): async def create_unix_connection( self, @@ -964,26 +908,23 @@ class AbstractEventLoopPolicy: """ ... # Child processes handling (Unix only). - if sys.version_info >= (3, 12): - @abstractmethod - @deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14") - def get_child_watcher(self) -> AbstractChildWatcher: - """Get the watcher for child processes.""" - ... - @abstractmethod - @deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14") - def set_child_watcher(self, watcher: AbstractChildWatcher) -> None: - """Set the watcher for child processes.""" - ... - else: - @abstractmethod - def get_child_watcher(self) -> AbstractChildWatcher: - """Get the watcher for child processes.""" - ... - @abstractmethod - def set_child_watcher(self, watcher: AbstractChildWatcher) -> None: - """Set the watcher for child processes.""" - ... + if sys.version_info < (3, 14): + if sys.version_info >= (3, 12): + @abstractmethod + @deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14") + def get_child_watcher(self) -> AbstractChildWatcher: + """Get the watcher for child processes.""" + ... + @abstractmethod + @deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14") + def set_child_watcher(self, watcher: AbstractChildWatcher) -> None: + """Set the watcher for child processes.""" + ... + else: + @abstractmethod + def get_child_watcher(self) -> AbstractChildWatcher: ... + @abstractmethod + def set_child_watcher(self, watcher: AbstractChildWatcher) -> None: ... class BaseDefaultEventLoopPolicy(AbstractEventLoopPolicy, metaclass=ABCMeta): """ @@ -1046,29 +987,23 @@ def new_event_loop() -> AbstractEventLoop: """Equivalent to calling get_event_loop_policy().new_event_loop().""" ... -if sys.version_info >= (3, 12): - @deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14") - def get_child_watcher() -> AbstractChildWatcher: - """Equivalent to calling get_event_loop_policy().get_child_watcher().""" - ... - @deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14") - def set_child_watcher(watcher: AbstractChildWatcher) -> None: - """ - Equivalent to calling - get_event_loop_policy().set_child_watcher(watcher). - """ - ... +if sys.version_info < (3, 14): + if sys.version_info >= (3, 12): + @deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14") + def get_child_watcher() -> AbstractChildWatcher: + """Equivalent to calling get_event_loop_policy().get_child_watcher().""" + ... + @deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14") + def set_child_watcher(watcher: AbstractChildWatcher) -> None: + """ + Equivalent to calling + get_event_loop_policy().set_child_watcher(watcher). + """ + ... -else: - def get_child_watcher() -> AbstractChildWatcher: - """Equivalent to calling get_event_loop_policy().get_child_watcher().""" - ... - def set_child_watcher(watcher: AbstractChildWatcher) -> None: - """ - Equivalent to calling - get_event_loop_policy().set_child_watcher(watcher). - """ - ... + else: + def get_child_watcher() -> AbstractChildWatcher: ... + def set_child_watcher(watcher: AbstractChildWatcher) -> None: ... def _set_running_loop(loop: AbstractEventLoop | None, /) -> None: """ diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/exceptions.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/exceptions.pyi index 04e5e53..49c2200 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/exceptions.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/exceptions.pyi @@ -29,9 +29,7 @@ class CancelledError(BaseException): if sys.version_info >= (3, 11): from builtins import TimeoutError as TimeoutError else: - class TimeoutError(Exception): - """The operation exceeded the given deadline.""" - ... + class TimeoutError(Exception): ... class InvalidStateError(Exception): """The operation is not allowed in this state.""" diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/format_helpers.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/format_helpers.pyi index b80dde8..e492469 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/format_helpers.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/format_helpers.pyi @@ -1,4 +1,5 @@ import functools +import sys import traceback from collections.abc import Iterable from types import FrameType, FunctionType @@ -14,15 +15,29 @@ _FuncType: TypeAlias = FunctionType | _HasWrapper | functools.partial[Any] | fun def _get_function_source(func: _FuncType) -> tuple[str, int]: ... @overload def _get_function_source(func: object) -> tuple[str, int] | None: ... -def _format_callback_source(func: object, args: Iterable[Any]) -> str: ... -def _format_args_and_kwargs(args: Iterable[Any], kwargs: dict[str, Any]) -> str: - """ - Format function arguments and keyword arguments. - Special case for a single parameter: ('hello',) is formatted as ('hello'). - """ - ... -def _format_callback(func: object, args: Iterable[Any], kwargs: dict[str, Any], suffix: str = "") -> str: ... +if sys.version_info >= (3, 13): + def _format_callback_source(func: object, args: Iterable[Any], *, debug: bool = False) -> str: ... + def _format_args_and_kwargs(args: Iterable[Any], kwargs: dict[str, Any], *, debug: bool = False) -> str: + """ + Format function arguments and keyword arguments. + + Special case for a single parameter: ('hello',) is formatted as ('hello'). + + Note that this function only returns argument details when + debug=True is specified, as arguments may contain sensitive + information. + """ + ... + def _format_callback( + func: object, args: Iterable[Any], kwargs: dict[str, Any], *, debug: bool = False, suffix: str = "" + ) -> str: ... + +else: + def _format_callback_source(func: object, args: Iterable[Any]) -> str: ... + def _format_args_and_kwargs(args: Iterable[Any], kwargs: dict[str, Any]) -> str: ... + def _format_callback(func: object, args: Iterable[Any], kwargs: dict[str, Any], suffix: str = "") -> str: ... + def extract_stack(f: FrameType | None = None, limit: int | None = None) -> traceback.StackSummary: """ Replacement for traceback.extract_stack() that only does the diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/futures.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/futures.pyi index 8ea9971..6ce6c29 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/futures.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/futures.pyi @@ -54,7 +54,9 @@ class Future(Awaitable[_T], Iterable[_T]): def _log_traceback(self, val: Literal[False]) -> None: ... _asyncio_future_blocking: bool # is a part of duck-typing contract for `Future` def __init__(self, *, loop: AbstractEventLoop | None = ...) -> None: ... - def __del__(self) -> None: ... + def __del__(self) -> None: + """Called when the instance is about to be destroyed.""" + ... def get_loop(self) -> AbstractEventLoop: """Return the event loop the Future is bound to.""" ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/locks.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/locks.pyi index 8454c70..930f8bf 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/locks.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/locks.pyi @@ -53,25 +53,23 @@ class Lock(_ContextManagerMixin, _LoopBoundMixin): Primitive lock objects. A primitive lock is a synchronization primitive that is not owned - by a particular coroutine when locked. A primitive lock is in one + by a particular task when locked. A primitive lock is in one of two states, 'locked' or 'unlocked'. It is created in the unlocked state. It has two basic methods, acquire() and release(). When the state is unlocked, acquire() changes the state to locked and returns immediately. When the state is locked, acquire() blocks until a call to release() in - another coroutine changes it to unlocked, then the acquire() call + another task changes it to unlocked, then the acquire() call resets it to locked and returns. The release() method should only be called in the locked state; it changes the state to unlocked and returns immediately. If an attempt is made to release an unlocked lock, a RuntimeError will be raised. - When more than one coroutine is blocked in acquire() waiting for - the state to turn to unlocked, only one coroutine proceeds when a - release() call resets the state to unlocked; first coroutine which - is blocked in acquire() is being processed. - - acquire() is a coroutine and should be called with 'await'. + When more than one task is blocked in acquire() waiting for + the state to turn to unlocked, only one task proceeds when a + release() call resets the state to unlocked; successive release() + calls will unblock tasks in FIFO order. Locks also support the asynchronous context management protocol. 'async with lock' statement should be used. @@ -123,7 +121,7 @@ class Lock(_ContextManagerMixin, _LoopBoundMixin): Release a lock. When the lock is locked, reset it to unlocked, and return. - If any other coroutines are blocked waiting for the lock to become + If any other tasks are blocked waiting for the lock to become unlocked, allow exactly one of them to proceed. When invoked on an unlocked lock, a RuntimeError is raised. @@ -152,14 +150,14 @@ class Event(_LoopBoundMixin): ... def set(self) -> None: """ - Set the internal flag to true. All coroutines waiting for it to - become true are awakened. Coroutine that call wait() once the flag is + Set the internal flag to true. All tasks waiting for it to + become true are awakened. Tasks that call wait() once the flag is true will not block at all. """ ... def clear(self) -> None: """ - Reset the internal flag to false. Subsequently, coroutines calling + Reset the internal flag to false. Subsequently, tasks calling wait() will block until set() is called to set the internal flag to true again. """ @@ -169,7 +167,7 @@ class Event(_LoopBoundMixin): Block until the internal flag is true. If the internal flag is true on entry, return True - immediately. Otherwise, block until another coroutine calls + immediately. Otherwise, block until another task calls set() to set the flag to true, then return True. """ ... @@ -179,8 +177,8 @@ class Condition(_ContextManagerMixin, _LoopBoundMixin): Asynchronous equivalent to threading.Condition. This class implements condition variable objects. A condition variable - allows one or more coroutines to wait until they are notified by another - coroutine. + allows one or more tasks to wait until they are notified by another + task. A new Lock object is created and used as the underlying lock. """ @@ -197,34 +195,39 @@ class Condition(_ContextManagerMixin, _LoopBoundMixin): """ Wait until notified. - If the calling coroutine has not acquired the lock when this + If the calling task has not acquired the lock when this method is called, a RuntimeError is raised. This method releases the underlying lock, and then blocks until it is awakened by a notify() or notify_all() call for - the same condition variable in another coroutine. Once + the same condition variable in another task. Once awakened, it re-acquires the lock and returns True. + + This method may return spuriously, + which is why the caller should always + re-check the state and be prepared to wait() again. """ ... async def wait_for(self, predicate: Callable[[], _T]) -> _T: """ Wait until a predicate becomes true. - The predicate should be a callable which result will be - interpreted as a boolean value. The final predicate value is + The predicate should be a callable whose result will be + interpreted as a boolean value. The method will repeatedly + wait() until it evaluates to true. The final predicate value is the return value. """ ... def notify(self, n: int = 1) -> None: """ - By default, wake up one coroutine waiting on this condition, if any. - If the calling coroutine has not acquired the lock when this method + By default, wake up one task waiting on this condition, if any. + If the calling task has not acquired the lock when this method is called, a RuntimeError is raised. - This method wakes up at most n of the coroutines waiting for the - condition variable; it is a no-op if no coroutines are waiting. + This method wakes up n of the tasks waiting for the condition + variable; if fewer than n are waiting, they are all awoken. - Note: an awakened coroutine does not actually return from its + Note: an awakened task does not actually return from its wait() call until it can reacquire the lock. Since notify() does not release the lock, its caller should. """ @@ -269,7 +272,7 @@ class Semaphore(_ContextManagerMixin, _LoopBoundMixin): If the internal counter is larger than zero on entry, decrement it by one and return True immediately. If it is - zero on entry, block, waiting until some other coroutine has + zero on entry, block, waiting until some other task has called release() to make it larger than 0, and then return True. """ @@ -278,8 +281,8 @@ class Semaphore(_ContextManagerMixin, _LoopBoundMixin): """ Release a semaphore, incrementing the internal counter by one. - When it was zero on entry and another coroutine is waiting for it to - become larger than zero again, wake up that coroutine. + When it was zero on entry and another task is waiting for it to + become larger than zero again, wake up that task. """ ... def _wake_up_next(self) -> None: diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/queues.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/queues.pyi index f11d4c3..0cdbc19 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/queues.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/queues.pyi @@ -10,8 +10,6 @@ if sys.version_info >= (3, 10): else: _LoopBoundMixin = object -__all__ = ("Queue", "PriorityQueue", "LifoQueue", "QueueFull", "QueueEmpty") - class QueueEmpty(Exception): """Raised when Queue.get_nowait() is called on an empty Queue.""" ... @@ -19,8 +17,19 @@ class QueueFull(Exception): """Raised when the Queue.put_nowait() method is called on a full Queue.""" ... +if sys.version_info >= (3, 13): + __all__ = ("Queue", "PriorityQueue", "LifoQueue", "QueueFull", "QueueEmpty", "QueueShutDown") + +else: + __all__ = ("Queue", "PriorityQueue", "LifoQueue", "QueueFull", "QueueEmpty") + _T = TypeVar("_T") +if sys.version_info >= (3, 13): + class QueueShutDown(Exception): + """Raised when putting on to or getting from a shut-down Queue.""" + ... + # If Generic[_T] is last and _LoopBoundMixin is object, pyright is unhappy. # We can remove the noqa pragma when dropping 3.9 support. class Queue(Generic[_T], _LoopBoundMixin): # noqa: Y059 @@ -68,6 +77,8 @@ class Queue(Generic[_T], _LoopBoundMixin): # noqa: Y059 Put an item into the queue. If the queue is full, wait until a free slot is available before adding item. + + Raises QueueShutDown if the queue has been shut down. """ ... def put_nowait(self, item: _T) -> None: @@ -75,6 +86,8 @@ class Queue(Generic[_T], _LoopBoundMixin): # noqa: Y059 Put an item into the queue without blocking. If no free slot is immediately available, raise QueueFull. + + Raises QueueShutDown if the queue has been shut down. """ ... async def get(self) -> _T: @@ -82,6 +95,9 @@ class Queue(Generic[_T], _LoopBoundMixin): # noqa: Y059 Remove and return an item from the queue. If queue is empty, wait until an item is available. + + Raises QueueShutDown if the queue has been shut down and is empty, or + if the queue has been shut down immediately. """ ... def get_nowait(self) -> _T: @@ -89,6 +105,9 @@ class Queue(Generic[_T], _LoopBoundMixin): # noqa: Y059 Remove and return an item from the queue. Return an item if one is immediately available, else raise QueueEmpty. + + Raises QueueShutDown if the queue has been shut down and is empty, or + if the queue has been shut down immediately. """ ... async def join(self) -> None: @@ -113,6 +132,9 @@ class Queue(Generic[_T], _LoopBoundMixin): # noqa: Y059 been processed (meaning that a task_done() call was received for every item that had been put() into the queue). + shutdown(immediate=True) calls task_done() for each remaining item in + the queue. + Raises ValueError if called more times than there were items placed in the queue. """ @@ -125,6 +147,19 @@ class Queue(Generic[_T], _LoopBoundMixin): # noqa: Y059 E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). """ ... + if sys.version_info >= (3, 13): + def shutdown(self, immediate: bool = False) -> None: + """ + Shut-down the queue, making queue gets and puts raise QueueShutDown. + + By default, gets will only raise once the queue is empty. Set + 'immediate' to True to make gets raise immediately instead. + + All blocked callers of put() and get() will be unblocked. If + 'immediate', a task is marked as done for each item remaining in + the queue, which may unblock callers of join(). + """ + ... class PriorityQueue(Queue[_T]): """ diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/runners.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/runners.pyi index e24f4fb..a879f27 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/runners.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/runners.pyi @@ -88,29 +88,4 @@ if sys.version_info >= (3, 12): ... else: - def run(main: Coroutine[Any, Any, _T], *, debug: bool | None = None) -> _T: - """ - Execute the coroutine and return the result. - - This function runs the passed coroutine, taking care of - managing the asyncio event loop and finalizing asynchronous - generators. - - This function cannot be called when another asyncio event loop is - running in the same thread. - - If debug is True, the event loop will be run in debug mode. - - This function always creates a new event loop and closes it at the end. - It should be used as a main entry point for asyncio programs, and should - ideally only be called once. - - Example: - - async def main(): - await asyncio.sleep(1) - print('hello') - - asyncio.run(main()) - """ - ... + def run(main: Coroutine[Any, Any, _T], *, debug: bool | None = None) -> _T: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/sslproto.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/sslproto.pyi index b8feeb9..6a46ef0 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/sslproto.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/sslproto.pyi @@ -3,7 +3,7 @@ import sys from collections import deque from collections.abc import Callable from enum import Enum -from typing import Any, ClassVar, Literal +from typing import Any, ClassVar, Final, Literal from typing_extensions import TypeAlias from . import constants, events, futures, protocols, transports @@ -29,30 +29,13 @@ if sys.version_info >= (3, 11): def add_flowcontrol_defaults(high: int | None, low: int | None, kb: int) -> tuple[int, int]: ... else: - _UNWRAPPED: Literal["UNWRAPPED"] - _DO_HANDSHAKE: Literal["DO_HANDSHAKE"] - _WRAPPED: Literal["WRAPPED"] - _SHUTDOWN: Literal["SHUTDOWN"] + _UNWRAPPED: Final = "UNWRAPPED" + _DO_HANDSHAKE: Final = "DO_HANDSHAKE" + _WRAPPED: Final = "WRAPPED" + _SHUTDOWN: Final = "SHUTDOWN" if sys.version_info < (3, 11): class _SSLPipe: - """ - An SSL "Pipe". - - An SSL pipe allows you to communicate with an SSL/TLS protocol instance - through memory buffers. It can be used to implement a security layer for an - existing connection where you don't have access to the connection's file - descriptor, or for some reason you don't want to use it. - - An SSL pipe can be in "wrapped" and "unwrapped" mode. In unwrapped mode, - data is passed through untransformed. In wrapped mode, application level - data is encrypted to SSL record level data and vice versa. The SSL record - level is the lowest level in the SSL protocol suite and is what travels - as-is over the wire. - - An SslPipe initially is in "unwrapped" mode. To start SSL, call - do_handshake(). To shutdown SSL again, call unwrap(). - """ max_size: ClassVar[int] _context: ssl.SSLContext @@ -65,109 +48,20 @@ if sys.version_info < (3, 11): _need_ssldata: bool _handshake_cb: Callable[[BaseException | None], None] | None _shutdown_cb: Callable[[], None] | None - def __init__(self, context: ssl.SSLContext, server_side: bool, server_hostname: str | None = None) -> None: - """ - The *context* argument specifies the ssl.SSLContext to use. - - The *server_side* argument indicates whether this is a server side or - client side transport. - - The optional *server_hostname* argument can be used to specify the - hostname you are connecting to. You may only specify this parameter if - the _ssl module supports Server Name Indication (SNI). - """ - ... + def __init__(self, context: ssl.SSLContext, server_side: bool, server_hostname: str | None = None) -> None: ... @property - def context(self) -> ssl.SSLContext: - """The SSL context passed to the constructor.""" - ... + def context(self) -> ssl.SSLContext: ... @property - def ssl_object(self) -> ssl.SSLObject | None: - """ - The internal ssl.SSLObject instance. - - Return None if the pipe is not wrapped. - """ - ... + def ssl_object(self) -> ssl.SSLObject | None: ... @property - def need_ssldata(self) -> bool: - """ - Whether more record level data is needed to complete a handshake - that is currently in progress. - """ - ... + def need_ssldata(self) -> bool: ... @property - def wrapped(self) -> bool: - """ - Whether a security layer is currently in effect. - - Return False during handshake. - """ - ... - def do_handshake(self, callback: Callable[[BaseException | None], object] | None = None) -> list[bytes]: - """ - Start the SSL handshake. - - Return a list of ssldata. A ssldata element is a list of buffers - - The optional *callback* argument can be used to install a callback that - will be called when the handshake is complete. The callback will be - called with None if successful, else an exception instance. - """ - ... - def shutdown(self, callback: Callable[[], object] | None = None) -> list[bytes]: - """ - Start the SSL shutdown sequence. - - Return a list of ssldata. A ssldata element is a list of buffers - - The optional *callback* argument can be used to install a callback that - will be called when the shutdown is complete. The callback will be - called without arguments. - """ - ... - def feed_eof(self) -> None: - """ - Send a potentially "ragged" EOF. - - This method will raise an SSL_ERROR_EOF exception if the EOF is - unexpected. - """ - ... - def feed_ssldata(self, data: bytes, only_handshake: bool = False) -> tuple[list[bytes], list[bytes]]: - """ - Feed SSL record level data into the pipe. - - The data must be a bytes instance. It is OK to send an empty bytes - instance. This can be used to get ssldata for a handshake initiated by - this endpoint. - - Return a (ssldata, appdata) tuple. The ssldata element is a list of - buffers containing SSL data that needs to be sent to the remote SSL. - - The appdata element is a list of buffers containing plaintext data that - needs to be forwarded to the application. The appdata list may contain - an empty buffer indicating an SSL "close_notify" alert. This alert must - be acknowledged by calling shutdown(). - """ - ... - def feed_appdata(self, data: bytes, offset: int = 0) -> tuple[list[bytes], int]: - """ - Feed plaintext data into the pipe. - - Return an (ssldata, offset) tuple. The ssldata element is a list of - buffers containing record level data that needs to be sent to the - remote SSL instance. The offset is the number of plaintext bytes that - were processed, which may be less than the length of data. - - NOTE: In case of short writes, this call MUST be retried with the SAME - buffer passed into the *data* argument (i.e. the id() must be the - same). This is an OpenSSL requirement. A further particularity is that - a short write will always have offset == 0, because the _ssl module - does not enable partial writes. And even though the offset is zero, - there will still be encrypted data in ssldata. - """ - ... + def wrapped(self) -> bool: ... + def do_handshake(self, callback: Callable[[BaseException | None], object] | None = None) -> list[bytes]: ... + def shutdown(self, callback: Callable[[], object] | None = None) -> list[bytes]: ... + def feed_eof(self) -> None: ... + def feed_ssldata(self, data: bytes, only_handshake: bool = False) -> tuple[list[bytes], list[bytes]]: ... + def feed_appdata(self, data: bytes, offset: int = 0) -> tuple[list[bytes], int]: ... class _SSLProtocolTransport(transports._FlowControlMixin, transports.Transport): _sendfile_compatible: ClassVar[constants._SendfileMode] @@ -231,12 +125,6 @@ else: _SSLProtocolBase: TypeAlias = protocols.Protocol class SSLProtocol(_SSLProtocolBase): - """ - SSL protocol. - - Implementation of SSL on top of a socket using incoming and outgoing - buffers which are ssl.MemoryBIO objects. - """ _server_side: bool _server_hostname: str | None _sslcontext: ssl.SSLContext diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/streams.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/streams.pyi index 3cf6b8d..f3dcc39 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/streams.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/streams.pyi @@ -2,6 +2,7 @@ import ssl import sys from _typeshed import ReadableBuffer, StrPath from collections.abc import AsyncIterator, Awaitable, Callable, Iterable, Sequence, Sized +from types import ModuleType from typing import Any, Protocol, SupportsIndex from typing_extensions import Self, TypeAlias @@ -212,7 +213,10 @@ class StreamWriter: async def start_tls( self, sslcontext: ssl.SSLContext, *, server_hostname: str | None = None, ssl_handshake_timeout: float | None = None ) -> None: ... - if sys.version_info >= (3, 11): + + if sys.version_info >= (3, 13): + def __del__(self, warnings: ModuleType = ...) -> None: ... + elif sys.version_info >= (3, 11): def __del__(self) -> None: ... class StreamReader(AsyncIterator[bytes]): @@ -246,9 +250,7 @@ class StreamReader(AsyncIterator[bytes]): """ ... if sys.version_info >= (3, 13): - async def readuntil(self, separator: _ReaduntilBuffer | tuple[_ReaduntilBuffer, ...] = b"\n") -> bytes: ... - else: - async def readuntil(self, separator: _ReaduntilBuffer = b"\n") -> bytes: + async def readuntil(self, separator: _ReaduntilBuffer | tuple[_ReaduntilBuffer, ...] = b"\n") -> bytes: """ Read data from the stream until ``separator`` is found. @@ -268,8 +270,16 @@ class StreamReader(AsyncIterator[bytes]): If the data cannot be read because of over limit, a LimitOverrunError exception will be raised, and the data will be left in the internal buffer, so it can be read again. + + The ``separator`` may also be a tuple of separators. In this + case the return value will be the shortest possible that has any + separator as the suffix. For the purposes of LimitOverrunError, + the shortest possible separator is considered to be the one that + matched. """ ... + else: + async def readuntil(self, separator: _ReaduntilBuffer = b"\n") -> bytes: ... async def read(self, n: int = -1) -> bytes: """ diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/tasks.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/tasks.pyi index 11d9525..1dffb98 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/tasks.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/tasks.pyi @@ -72,7 +72,10 @@ _T4 = TypeVar("_T4") _T5 = TypeVar("_T5") _T6 = TypeVar("_T6") _FT = TypeVar("_FT", bound=Future[Any]) -_FutureLike: TypeAlias = Future[_T] | Generator[Any, None, _T] | Awaitable[_T] +if sys.version_info >= (3, 12): + _FutureLike: TypeAlias = Future[_T] | Awaitable[_T] +else: + _FutureLike: TypeAlias = Future[_T] | Generator[Any, None, _T] | Awaitable[_T] _TaskYieldType: TypeAlias = Future[object] | None FIRST_COMPLETED = concurrent.futures.FIRST_COMPLETED @@ -82,22 +85,50 @@ ALL_COMPLETED = concurrent.futures.ALL_COMPLETED if sys.version_info >= (3, 10): def as_completed(fs: Iterable[_FutureLike[_T]], *, timeout: float | None = None) -> Iterator[Future[_T]]: """ - Return an iterator whose values are coroutines. - - When waiting for the yielded coroutines you'll get the results (or - exceptions!) of the original Futures (or coroutines), in the order - in which and as soon as they complete. - - This differs from PEP 3148; the proper way to use this is: - - for f in as_completed(fs): - result = await f # The 'await' may raise. - # Use result. - - If a timeout is specified, the 'await' will raise - TimeoutError when the timeout occurs before all Futures are done. - - Note: The futures 'f' are not necessarily members of fs. + Create an iterator of awaitables or their results in completion order. + + Run the supplied awaitables concurrently. The returned object can be + iterated to obtain the results of the awaitables as they finish. + + The object returned can be iterated as an asynchronous iterator or a plain + iterator. When asynchronous iteration is used, the originally-supplied + awaitables are yielded if they are tasks or futures. This makes it easy to + correlate previously-scheduled tasks with their results: + + ipv4_connect = create_task(open_connection("127.0.0.1", 80)) + ipv6_connect = create_task(open_connection("::1", 80)) + tasks = [ipv4_connect, ipv6_connect] + + async for earliest_connect in as_completed(tasks): + # earliest_connect is done. The result can be obtained by + # awaiting it or calling earliest_connect.result() + reader, writer = await earliest_connect + + if earliest_connect is ipv6_connect: + print("IPv6 connection established.") + else: + print("IPv4 connection established.") + + During asynchronous iteration, implicitly-created tasks will be yielded for + supplied awaitables that aren't tasks or futures. + + When used as a plain iterator, each iteration yields a new coroutine that + returns the result or raises the exception of the next completed awaitable. + This pattern is compatible with Python versions older than 3.13: + + ipv4_connect = create_task(open_connection("127.0.0.1", 80)) + ipv6_connect = create_task(open_connection("::1", 80)) + tasks = [ipv4_connect, ipv6_connect] + + for next_connect in as_completed(tasks): + # next_connect is not one of the original task objects. It must be + # awaited to obtain the result value or raise the exception of the + # awaitable that finishes next. + reader, writer = await next_connect + + A TimeoutError is raised if the timeout occurs before all awaitables are + done. This is raised by the async for loop during asynchronous iteration or + by the coroutines yielded during plain iteration. """ ... @@ -431,7 +462,7 @@ if sys.version_info >= (3, 10): """ ... @overload - def gather( # type: ignore[overload-overlap] + def gather( coro_or_future1: _FutureLike[_T1], coro_or_future2: _FutureLike[_T2], /, *, return_exceptions: bool ) -> Future[tuple[_T1 | BaseException, _T2 | BaseException]]: """ @@ -466,7 +497,7 @@ if sys.version_info >= (3, 10): """ ... @overload - def gather( # type: ignore[overload-overlap] + def gather( coro_or_future1: _FutureLike[_T1], coro_or_future2: _FutureLike[_T2], coro_or_future3: _FutureLike[_T3], @@ -506,7 +537,7 @@ if sys.version_info >= (3, 10): """ ... @overload - def gather( # type: ignore[overload-overlap] + def gather( coro_or_future1: _FutureLike[_T1], coro_or_future2: _FutureLike[_T2], coro_or_future3: _FutureLike[_T3], @@ -547,7 +578,7 @@ if sys.version_info >= (3, 10): """ ... @overload - def gather( # type: ignore[overload-overlap] + def gather( coro_or_future1: _FutureLike[_T1], coro_or_future2: _FutureLike[_T2], coro_or_future3: _FutureLike[_T3], @@ -591,7 +622,7 @@ if sys.version_info >= (3, 10): """ ... @overload - def gather( # type: ignore[overload-overlap] + def gather( coro_or_future1: _FutureLike[_T1], coro_or_future2: _FutureLike[_T2], coro_or_future3: _FutureLike[_T3], @@ -890,8 +921,6 @@ if sys.version_info >= (3, 11): The fs iterable must not be empty. - Coroutines will be wrapped in Tasks. - Returns two sets of Future: (done, pending). Usage: @@ -911,8 +940,6 @@ if sys.version_info >= (3, 11): The fs iterable must not be empty. - Coroutines will be wrapped in Tasks. - Returns two sets of Future: (done, pending). Usage: @@ -928,45 +955,11 @@ elif sys.version_info >= (3, 10): @overload async def wait( # type: ignore[overload-overlap] fs: Iterable[_FT], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED" - ) -> tuple[set[_FT], set[_FT]]: - """ - Wait for the Futures and coroutines given by fs to complete. - - The fs iterable must not be empty. - - Coroutines will be wrapped in Tasks. - - Returns two sets of Future: (done, pending). - - Usage: - - done, pending = await asyncio.wait(fs) - - Note: This does not raise TimeoutError! Futures that aren't done - when the timeout occurs are returned in the second set. - """ - ... + ) -> tuple[set[_FT], set[_FT]]: ... @overload async def wait( fs: Iterable[Awaitable[_T]], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED" - ) -> tuple[set[Task[_T]], set[Task[_T]]]: - """ - Wait for the Futures and coroutines given by fs to complete. - - The fs iterable must not be empty. - - Coroutines will be wrapped in Tasks. - - Returns two sets of Future: (done, pending). - - Usage: - - done, pending = await asyncio.wait(fs) - - Note: This does not raise TimeoutError! Futures that aren't done - when the timeout occurs are returned in the second set. - """ - ... + ) -> tuple[set[Task[_T]], set[Task[_T]]]: ... else: @overload @@ -1023,7 +1016,11 @@ class Task(Future[_T_co]): # type: ignore[type-var] # pyright: ignore[reportIn self, coro: _TaskCompatibleCoro[_T_co], *, loop: AbstractEventLoop = ..., name: str | None = ... ) -> None: ... - def get_coro(self) -> _TaskCompatibleCoro[_T_co]: ... + if sys.version_info >= (3, 12): + def get_coro(self) -> _TaskCompatibleCoro[_T_co] | None: ... + else: + def get_coro(self) -> _TaskCompatibleCoro[_T_co]: ... + def get_name(self) -> str: ... def set_name(self, value: object, /) -> None: ... if sys.version_info >= (3, 12): @@ -1106,13 +1103,7 @@ if sys.version_info >= (3, 11): ... else: - def create_task(coro: _CoroutineLike[_T], *, name: str | None = None) -> Task[_T]: - """ - Schedule the execution of a coroutine object in a spawn task. - - Return a Task object. - """ - ... + def create_task(coro: _CoroutineLike[_T], *, name: str | None = None) -> Task[_T]: ... def current_task(loop: AbstractEventLoop | None = None) -> Task[Any] | None: """Return a currently executed task.""" diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/transports.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/transports.pyi index 3a34c2e..90664af 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/transports.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/transports.pyi @@ -160,6 +160,8 @@ class DatagramTransport(BaseTransport): to be sent out asynchronously. addr is target socket address. If addr is None use target address pointed on transport creation. + If data is an empty bytes object a zero-length datagram will be + sent. """ ... def abort(self) -> None: diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/unix_events.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/unix_events.pyi index 96f6978..b4b7d66 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/unix_events.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/unix_events.pyi @@ -2,133 +2,35 @@ import sys import types +from _typeshed import StrPath from abc import ABCMeta, abstractmethod from collections.abc import Callable +from socket import socket from typing import Literal from typing_extensions import Self, TypeVarTuple, Unpack, deprecated +from .base_events import Server, _ProtocolFactory, _SSLContext from .events import AbstractEventLoop, BaseDefaultEventLoopPolicy from .selector_events import BaseSelectorEventLoop _Ts = TypeVarTuple("_Ts") -# This is also technically not available on Win, -# but other parts of typeshed need this definition. -# So, it is special cased. -if sys.version_info >= (3, 12): - @deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14") - class AbstractChildWatcher: - @abstractmethod - def add_child_handler( - self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts] - ) -> None: ... - @abstractmethod - def remove_child_handler(self, pid: int) -> bool: ... - @abstractmethod - def attach_loop(self, loop: AbstractEventLoop | None) -> None: ... - @abstractmethod - def close(self) -> None: ... - @abstractmethod - def __enter__(self) -> Self: ... - @abstractmethod - def __exit__( - self, typ: type[BaseException] | None, exc: BaseException | None, tb: types.TracebackType | None - ) -> None: ... - @abstractmethod - def is_active(self) -> bool: ... - -else: - class AbstractChildWatcher: - """ - Abstract base class for monitoring child processes. - - Objects derived from this class monitor a collection of subprocesses and - report their termination or interruption by a signal. - - New callbacks are registered with .add_child_handler(). Starting a new - process must be done within a 'with' block to allow the watcher to suspend - its activity until the new process if fully registered (this is needed to - prevent a race condition in some implementations). - - Example: - with watcher: - proc = subprocess.Popen("sleep 1") - watcher.add_child_handler(proc.pid, callback) - - Notes: - Implementations of this class must be thread-safe. - - Since child watcher objects may catch the SIGCHLD signal and call - waitpid(-1), there should be only one active object per process. - """ - @abstractmethod - def add_child_handler( - self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts] - ) -> None: - """ - Register a new child handler. - - Arrange for callback(pid, returncode, *args) to be called when - process 'pid' terminates. Specifying another callback for the same - process replaces the previous handler. - - Note: callback() must be thread-safe. - """ - ... - @abstractmethod - def remove_child_handler(self, pid: int) -> bool: - """ - Removes the handler for process 'pid'. - - The function returns True if the handler was successfully removed, - False if there was nothing to remove. - """ - ... - @abstractmethod - def attach_loop(self, loop: AbstractEventLoop | None) -> None: - """ - Attach the watcher to an event loop. - - If the watcher was previously attached to an event loop, then it is - first detached before attaching to the new loop. - - Note: loop may be None. - """ - ... - @abstractmethod - def close(self) -> None: - """ - Close the watcher. - - This must be called to make sure that any underlying resource is freed. - """ - ... - @abstractmethod - def __enter__(self) -> Self: - """ - Enter the watcher's context and allow starting new processes - - This function must return self - """ - ... - @abstractmethod - def __exit__( - self, typ: type[BaseException] | None, exc: BaseException | None, tb: types.TracebackType | None - ) -> None: - """Exit the watcher's context""" - ... - @abstractmethod - def is_active(self) -> bool: - """ - Return ``True`` if the watcher is active and is used by the event loop. - - Return True if the watcher is installed and ready to handle process exit - notifications. - """ - ... - if sys.platform != "win32": - if sys.version_info >= (3, 9): + if sys.version_info >= (3, 14): + __all__ = ("SelectorEventLoop", "DefaultEventLoopPolicy", "EventLoop") + elif sys.version_info >= (3, 13): + __all__ = ( + "SelectorEventLoop", + "AbstractChildWatcher", + "SafeChildWatcher", + "FastChildWatcher", + "PidfdChildWatcher", + "MultiLoopChildWatcher", + "ThreadedChildWatcher", + "DefaultEventLoopPolicy", + "EventLoop", + ) + elif sys.version_info >= (3, 9): __all__ = ( "SelectorEventLoop", "AbstractChildWatcher", @@ -150,68 +52,201 @@ if sys.platform != "win32": "DefaultEventLoopPolicy", ) - # Doesn't actually have ABCMeta metaclass at runtime, but mypy complains if we don't have it in the stub. - # See discussion in #7412 - class BaseChildWatcher(AbstractChildWatcher, metaclass=ABCMeta): - def close(self) -> None: ... - def is_active(self) -> bool: ... - def attach_loop(self, loop: AbstractEventLoop | None) -> None: ... - +# This is also technically not available on Win, +# but other parts of typeshed need this definition. +# So, it is special cased. +if sys.version_info < (3, 14): if sys.version_info >= (3, 12): @deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14") - class SafeChildWatcher(BaseChildWatcher): - def __enter__(self) -> Self: ... - def __exit__(self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None) -> None: ... - def add_child_handler( - self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts] - ) -> None: ... - def remove_child_handler(self, pid: int) -> bool: ... + class AbstractChildWatcher: + """ + Abstract base class for monitoring child processes. - @deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14") - class FastChildWatcher(BaseChildWatcher): - def __enter__(self) -> Self: ... - def __exit__(self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None) -> None: ... - def add_child_handler( - self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts] - ) -> None: ... - def remove_child_handler(self, pid: int) -> bool: ... + Objects derived from this class monitor a collection of subprocesses and + report their termination or interruption by a signal. - else: - class SafeChildWatcher(BaseChildWatcher): - """ - 'Safe' child watcher implementation. + New callbacks are registered with .add_child_handler(). Starting a new + process must be done within a 'with' block to allow the watcher to suspend + its activity until the new process if fully registered (this is needed to + prevent a race condition in some implementations). - This implementation avoids disrupting other code spawning processes by - polling explicitly each process in the SIGCHLD handler instead of calling - os.waitpid(-1). + Example: + with watcher: + proc = subprocess.Popen("sleep 1") + watcher.add_child_handler(proc.pid, callback) - This is a safe solution but it has a significant overhead when handling a - big number of children (O(n) each time SIGCHLD is raised) + Notes: + Implementations of this class must be thread-safe. + + Since child watcher objects may catch the SIGCHLD signal and call + waitpid(-1), there should be only one active object per process. """ - def __enter__(self) -> Self: ... - def __exit__(self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None) -> None: ... + @abstractmethod def add_child_handler( self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts] - ) -> None: ... - def remove_child_handler(self, pid: int) -> bool: ... + ) -> None: + """ + Register a new child handler. - class FastChildWatcher(BaseChildWatcher): - """ - 'Fast' child watcher implementation. + Arrange for callback(pid, returncode, *args) to be called when + process 'pid' terminates. Specifying another callback for the same + process replaces the previous handler. - This implementation reaps every terminated processes by calling - os.waitpid(-1) directly, possibly breaking other code spawning processes - and waiting for their termination. + Note: callback() must be thread-safe. + """ + ... + @abstractmethod + def remove_child_handler(self, pid: int) -> bool: + """ + Removes the handler for process 'pid'. - There is no noticeable overhead when handling a big number of children - (O(1) each time a child terminates). - """ - def __enter__(self) -> Self: ... - def __exit__(self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None) -> None: ... + The function returns True if the handler was successfully removed, + False if there was nothing to remove. + """ + ... + @abstractmethod + def attach_loop(self, loop: AbstractEventLoop | None) -> None: + """ + Attach the watcher to an event loop. + + If the watcher was previously attached to an event loop, then it is + first detached before attaching to the new loop. + + Note: loop may be None. + """ + ... + @abstractmethod + def close(self) -> None: + """ + Close the watcher. + + This must be called to make sure that any underlying resource is freed. + """ + ... + @abstractmethod + def __enter__(self) -> Self: + """ + Enter the watcher's context and allow starting new processes + + This function must return self + """ + ... + @abstractmethod + def __exit__( + self, typ: type[BaseException] | None, exc: BaseException | None, tb: types.TracebackType | None + ) -> None: + """Exit the watcher's context""" + ... + @abstractmethod + def is_active(self) -> bool: + """ + Return ``True`` if the watcher is active and is used by the event loop. + + Return True if the watcher is installed and ready to handle process exit + notifications. + """ + ... + + else: + class AbstractChildWatcher: + @abstractmethod def add_child_handler( self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts] ) -> None: ... + @abstractmethod def remove_child_handler(self, pid: int) -> bool: ... + @abstractmethod + def attach_loop(self, loop: AbstractEventLoop | None) -> None: ... + @abstractmethod + def close(self) -> None: ... + @abstractmethod + def __enter__(self) -> Self: ... + @abstractmethod + def __exit__( + self, typ: type[BaseException] | None, exc: BaseException | None, tb: types.TracebackType | None + ) -> None: ... + @abstractmethod + def is_active(self) -> bool: ... + +if sys.platform != "win32": + if sys.version_info < (3, 14): + if sys.version_info >= (3, 12): + # Doesn't actually have ABCMeta metaclass at runtime, but mypy complains if we don't have it in the stub. + # See discussion in #7412 + class BaseChildWatcher(AbstractChildWatcher, metaclass=ABCMeta): + def close(self) -> None: ... + def is_active(self) -> bool: ... + def attach_loop(self, loop: AbstractEventLoop | None) -> None: ... + + @deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14") + class SafeChildWatcher(BaseChildWatcher): + """ + 'Safe' child watcher implementation. + + This implementation avoids disrupting other code spawning processes by + polling explicitly each process in the SIGCHLD handler instead of calling + os.waitpid(-1). + + This is a safe solution but it has a significant overhead when handling a + big number of children (O(n) each time SIGCHLD is raised) + """ + def __enter__(self) -> Self: ... + def __exit__( + self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None + ) -> None: ... + def add_child_handler( + self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts] + ) -> None: ... + def remove_child_handler(self, pid: int) -> bool: ... + + @deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14") + class FastChildWatcher(BaseChildWatcher): + """ + 'Fast' child watcher implementation. + + This implementation reaps every terminated processes by calling + os.waitpid(-1) directly, possibly breaking other code spawning processes + and waiting for their termination. + + There is no noticeable overhead when handling a big number of children + (O(1) each time a child terminates). + """ + def __enter__(self) -> Self: ... + def __exit__( + self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None + ) -> None: ... + def add_child_handler( + self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts] + ) -> None: ... + def remove_child_handler(self, pid: int) -> bool: ... + + else: + # Doesn't actually have ABCMeta metaclass at runtime, but mypy complains if we don't have it in the stub. + # See discussion in #7412 + class BaseChildWatcher(AbstractChildWatcher, metaclass=ABCMeta): + def close(self) -> None: ... + def is_active(self) -> bool: ... + def attach_loop(self, loop: AbstractEventLoop | None) -> None: ... + + class SafeChildWatcher(BaseChildWatcher): + def __enter__(self) -> Self: ... + def __exit__( + self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None + ) -> None: ... + def add_child_handler( + self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts] + ) -> None: ... + def remove_child_handler(self, pid: int) -> bool: ... + + class FastChildWatcher(BaseChildWatcher): + def __enter__(self) -> Self: ... + def __exit__( + self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None + ) -> None: ... + def add_child_handler( + self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts] + ) -> None: ... + def remove_child_handler(self, pid: int) -> bool: ... class _UnixSelectorEventLoop(BaseSelectorEventLoop): """ @@ -219,118 +254,137 @@ if sys.platform != "win32": Adds signal handling and UNIX Domain Socket support to SelectorEventLoop. """ - ... + if sys.version_info >= (3, 13): + async def create_unix_server( + self, + protocol_factory: _ProtocolFactory, + path: StrPath | None = None, + *, + sock: socket | None = None, + backlog: int = 100, + ssl: _SSLContext = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + start_serving: bool = True, + cleanup_socket: bool = True, + ) -> Server: ... class _UnixDefaultEventLoopPolicy(BaseDefaultEventLoopPolicy): """UNIX event loop policy with a watcher for child processes.""" - if sys.version_info >= (3, 12): - @deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14") - def get_child_watcher(self) -> AbstractChildWatcher: ... - @deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14") - def set_child_watcher(self, watcher: AbstractChildWatcher | None) -> None: ... - else: - def get_child_watcher(self) -> AbstractChildWatcher: - """ - Get the watcher for child processes. - - If not yet set, a ThreadedChildWatcher object is automatically created. - """ - ... - def set_child_watcher(self, watcher: AbstractChildWatcher | None) -> None: - """Set the watcher for child processes.""" - ... + if sys.version_info < (3, 14): + if sys.version_info >= (3, 12): + @deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14") + def get_child_watcher(self) -> AbstractChildWatcher: + """ + Get the watcher for child processes. + + If not yet set, a ThreadedChildWatcher object is automatically created. + """ + ... + @deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14") + def set_child_watcher(self, watcher: AbstractChildWatcher | None) -> None: + """Set the watcher for child processes.""" + ... + else: + def get_child_watcher(self) -> AbstractChildWatcher: ... + def set_child_watcher(self, watcher: AbstractChildWatcher | None) -> None: ... SelectorEventLoop = _UnixSelectorEventLoop DefaultEventLoopPolicy = _UnixDefaultEventLoopPolicy - if sys.version_info >= (3, 12): - @deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14") - class MultiLoopChildWatcher(AbstractChildWatcher): - def is_active(self) -> bool: ... - def close(self) -> None: ... - def __enter__(self) -> Self: ... - def __exit__( - self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None - ) -> None: ... - def add_child_handler( - self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts] - ) -> None: ... - def remove_child_handler(self, pid: int) -> bool: ... - def attach_loop(self, loop: AbstractEventLoop | None) -> None: ... + if sys.version_info >= (3, 13): + EventLoop = SelectorEventLoop - else: - class MultiLoopChildWatcher(AbstractChildWatcher): - """ - A watcher that doesn't require running loop in the main thread. + if sys.version_info < (3, 14): + if sys.version_info >= (3, 12): + @deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14") + class MultiLoopChildWatcher(AbstractChildWatcher): + """ + A watcher that doesn't require running loop in the main thread. - This implementation registers a SIGCHLD signal handler on - instantiation (which may conflict with other code that - install own handler for this signal). + This implementation registers a SIGCHLD signal handler on + instantiation (which may conflict with other code that + install own handler for this signal). - The solution is safe but it has a significant overhead when - handling a big number of processes (*O(n)* each time a - SIGCHLD is received). - """ - def is_active(self) -> bool: ... - def close(self) -> None: ... - def __enter__(self) -> Self: ... - def __exit__( - self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None - ) -> None: ... - def add_child_handler( - self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts] - ) -> None: ... - def remove_child_handler(self, pid: int) -> bool: ... - def attach_loop(self, loop: AbstractEventLoop | None) -> None: ... + The solution is safe but it has a significant overhead when + handling a big number of processes (*O(n)* each time a + SIGCHLD is received). + """ + def is_active(self) -> bool: ... + def close(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None + ) -> None: ... + def add_child_handler( + self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts] + ) -> None: ... + def remove_child_handler(self, pid: int) -> bool: ... + def attach_loop(self, loop: AbstractEventLoop | None) -> None: ... - class ThreadedChildWatcher(AbstractChildWatcher): - """ - Threaded child watcher implementation. + else: + class MultiLoopChildWatcher(AbstractChildWatcher): + def is_active(self) -> bool: ... + def close(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None + ) -> None: ... + def add_child_handler( + self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts] + ) -> None: ... + def remove_child_handler(self, pid: int) -> bool: ... + def attach_loop(self, loop: AbstractEventLoop | None) -> None: ... + + if sys.version_info < (3, 14): + class ThreadedChildWatcher(AbstractChildWatcher): + """ + Threaded child watcher implementation. - The watcher uses a thread per process - for waiting for the process finish. + The watcher uses a thread per process + for waiting for the process finish. - It doesn't require subscription on POSIX signal - but a thread creation is not free. + It doesn't require subscription on POSIX signal + but a thread creation is not free. - The watcher has O(1) complexity, its performance doesn't depend - on amount of spawn processes. - """ - def is_active(self) -> Literal[True]: ... - def close(self) -> None: ... - def __enter__(self) -> Self: ... - def __exit__( - self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None - ) -> None: ... - def __del__(self) -> None: ... - def add_child_handler( - self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts] - ) -> None: ... - def remove_child_handler(self, pid: int) -> bool: ... - def attach_loop(self, loop: AbstractEventLoop | None) -> None: ... - - if sys.version_info >= (3, 9): - class PidfdChildWatcher(AbstractChildWatcher): - """ - Child watcher implementation using Linux's pid file descriptors. - - This child watcher polls process file descriptors (pidfds) to await child - process termination. In some respects, PidfdChildWatcher is a "Goldilocks" - child watcher implementation. It doesn't require signals or threads, doesn't - interfere with any processes launched outside the event loop, and scales - linearly with the number of subprocesses launched by the event loop. The - main disadvantage is that pidfds are specific to Linux, and only work on - recent (5.3+) kernels. + The watcher has O(1) complexity, its performance doesn't depend + on amount of spawn processes. """ + def is_active(self) -> Literal[True]: ... + def close(self) -> None: ... def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None ) -> None: ... - def is_active(self) -> bool: ... - def close(self) -> None: ... - def attach_loop(self, loop: AbstractEventLoop | None) -> None: ... + def __del__(self) -> None: ... def add_child_handler( self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts] ) -> None: ... def remove_child_handler(self, pid: int) -> bool: ... + def attach_loop(self, loop: AbstractEventLoop | None) -> None: ... + + if sys.version_info >= (3, 9): + class PidfdChildWatcher(AbstractChildWatcher): + """ + Child watcher implementation using Linux's pid file descriptors. + + This child watcher polls process file descriptors (pidfds) to await child + process termination. In some respects, PidfdChildWatcher is a "Goldilocks" + child watcher implementation. It doesn't require signals or threads, doesn't + interfere with any processes launched outside the event loop, and scales + linearly with the number of subprocesses launched by the event loop. The + main disadvantage is that pidfds are specific to Linux, and only work on + recent (5.3+) kernels. + """ + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None + ) -> None: ... + def is_active(self) -> bool: ... + def close(self) -> None: ... + def attach_loop(self, loop: AbstractEventLoop | None) -> None: ... + def add_child_handler( + self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts] + ) -> None: ... + def remove_child_handler(self, pid: int) -> bool: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/windows_events.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/windows_events.pyi index b08190f..1fbcefe 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/windows_events.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/windows_events.pyi @@ -4,24 +4,36 @@ import socket import sys from _typeshed import Incomplete, ReadableBuffer, WriteableBuffer from collections.abc import Callable -from typing import IO, Any, ClassVar, Literal, NoReturn +from typing import IO, Any, ClassVar, Final, NoReturn from . import events, futures, proactor_events, selector_events, streams, windows_utils if sys.platform == "win32": - __all__ = ( - "SelectorEventLoop", - "ProactorEventLoop", - "IocpProactor", - "DefaultEventLoopPolicy", - "WindowsSelectorEventLoopPolicy", - "WindowsProactorEventLoopPolicy", - ) + if sys.version_info >= (3, 13): + # 3.13 added `EventLoop`. + __all__ = ( + "SelectorEventLoop", + "ProactorEventLoop", + "IocpProactor", + "DefaultEventLoopPolicy", + "WindowsSelectorEventLoopPolicy", + "WindowsProactorEventLoopPolicy", + "EventLoop", + ) + else: + __all__ = ( + "SelectorEventLoop", + "ProactorEventLoop", + "IocpProactor", + "DefaultEventLoopPolicy", + "WindowsSelectorEventLoopPolicy", + "WindowsProactorEventLoopPolicy", + ) - NULL: Literal[0] - INFINITE: Literal[0xFFFFFFFF] - ERROR_CONNECTION_REFUSED: Literal[1225] - ERROR_CONNECTION_ABORTED: Literal[1236] + NULL: Final = 0 + INFINITE: Final = 0xFFFFFFFF + ERROR_CONNECTION_REFUSED: Final = 1225 + ERROR_CONNECTION_ABORTED: Final = 1236 CONNECT_PIPE_INIT_DELAY: float CONNECT_PIPE_MAX_DELAY: float @@ -76,8 +88,9 @@ if sys.platform == "win32": class WindowsSelectorEventLoopPolicy(events.BaseDefaultEventLoopPolicy): _loop_factory: ClassVar[type[SelectorEventLoop]] - def get_child_watcher(self) -> NoReturn: ... - def set_child_watcher(self, watcher: Any) -> NoReturn: ... + if sys.version_info < (3, 14): + def get_child_watcher(self) -> NoReturn: ... + def set_child_watcher(self, watcher: Any) -> NoReturn: ... class WindowsProactorEventLoopPolicy(events.BaseDefaultEventLoopPolicy): _loop_factory: ClassVar[type[ProactorEventLoop]] @@ -85,3 +98,5 @@ if sys.platform == "win32": def set_child_watcher(self, watcher: Any) -> NoReturn: ... DefaultEventLoopPolicy = WindowsSelectorEventLoopPolicy + if sys.version_info >= (3, 13): + EventLoop = ProactorEventLoop diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/windows_utils.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/windows_utils.pyi index cd58f8f..e738abc 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/windows_utils.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncio/windows_utils.pyi @@ -4,13 +4,13 @@ import subprocess import sys from collections.abc import Callable from types import TracebackType -from typing import Any, AnyStr, Literal +from typing import Any, AnyStr, Final from typing_extensions import Self if sys.platform == "win32": __all__ = ("pipe", "Popen", "PIPE", "PipeHandle") - BUFSIZE: Literal[8192] + BUFSIZE: Final = 8192 PIPE = subprocess.PIPE STDOUT = subprocess.STDOUT def pipe(*, duplex: bool = False, overlapped: tuple[bool, bool] = (True, True), bufsize: int = 8192) -> tuple[int, int]: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncore.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncore.pyi index c1b8ac0..36d1862 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncore.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/asyncore.pyi @@ -1,25 +1,3 @@ -""" -Basic infrastructure for asynchronous socket service clients and servers. - -There are only two ways to have a program on a single processor do "more -than one thing at a time". Multi-threaded programming is the simplest and -most popular way to do it, but there is another very different technique, -that lets you have nearly all the advantages of multi-threading, without -actually using multiple threads. it's really only practical if your program -is largely I/O bound. If your program is CPU bound, then pre-emptive -scheduled threads are probably what you really need. Network servers are -rarely CPU-bound, however. - -If your operating system supports the select() system call in its I/O -library (and nearly all do), then you can use it to juggle multiple -communication channels at once; doing other work while your I/O is taking -place in the "background." Although this strategy can seem strange and -complex, especially at first, it is in many ways easier to understand and -control than multi-threaded programming. The module documented here solves -many of the difficult problems for you, making the task of building -sophisticated high-performance network servers and clients a snap. -""" - import sys from _typeshed import FileDescriptorLike, ReadableBuffer from socket import socket diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/atexit.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/atexit.pyi index 84881fe..b613243 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/atexit.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/atexit.pyi @@ -13,23 +13,13 @@ _T = TypeVar("_T") _P = ParamSpec("_P") def _clear() -> None: - """ - _clear() -> None - - Clear the list of previously registered exit functions. - """ + """Clear the list of previously registered exit functions.""" ... def _ncallbacks() -> int: - """ - _ncallbacks() -> int - - Return the number of registered exit functions. - """ + """Return the number of registered exit functions.""" ... def _run_exitfuncs() -> None: """ - _run_exitfuncs() -> None - Run all registered exit functions. If a callback raises an exception, it is logged with sys.unraisablehook. @@ -37,21 +27,17 @@ def _run_exitfuncs() -> None: ... def register(func: Callable[_P, _T], /, *args: _P.args, **kwargs: _P.kwargs) -> Callable[_P, _T]: """ - register(func, *args, **kwargs) -> func - Register a function to be executed upon normal program termination - func - function to be called at exit - args - optional arguments to pass to func - kwargs - optional keyword arguments to pass to func + func - function to be called at exit + args - optional arguments to pass to func + kwargs - optional keyword arguments to pass to func - func is returned to facilitate usage as a decorator. + func is returned to facilitate usage as a decorator. """ ... def unregister(func: Callable[..., object], /) -> None: """ - unregister(func) -> None - Unregister an exit function which was previously registered using atexit.register diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/audioop.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/audioop.pyi index 6850f1e..f3ce78c 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/audioop.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/audioop.pyi @@ -5,66 +5,26 @@ _RatecvState: TypeAlias = tuple[int, tuple[tuple[int, int], ...]] class error(Exception): ... -def add(fragment1: Buffer, fragment2: Buffer, width: int, /) -> bytes: - """Return a fragment which is the addition of the two samples passed as parameters.""" - ... -def adpcm2lin(fragment: Buffer, width: int, state: _AdpcmState | None, /) -> tuple[bytes, _AdpcmState]: - """Decode an Intel/DVI ADPCM coded fragment to a linear fragment.""" - ... -def alaw2lin(fragment: Buffer, width: int, /) -> bytes: - """Convert sound fragments in a-LAW encoding to linearly encoded sound fragments.""" - ... -def avg(fragment: Buffer, width: int, /) -> int: - """Return the average over all samples in the fragment.""" - ... -def avgpp(fragment: Buffer, width: int, /) -> int: - """Return the average peak-peak value over all samples in the fragment.""" - ... -def bias(fragment: Buffer, width: int, bias: int, /) -> bytes: - """Return a fragment that is the original fragment with a bias added to each sample.""" - ... -def byteswap(fragment: Buffer, width: int, /) -> bytes: - """Convert big-endian samples to little-endian and vice versa.""" - ... -def cross(fragment: Buffer, width: int, /) -> int: - """Return the number of zero crossings in the fragment passed as an argument.""" - ... -def findfactor(fragment: Buffer, reference: Buffer, /) -> float: - """Return a factor F such that rms(add(fragment, mul(reference, -F))) is minimal.""" - ... -def findfit(fragment: Buffer, reference: Buffer, /) -> tuple[int, float]: - """Try to match reference as well as possible to a portion of fragment.""" - ... -def findmax(fragment: Buffer, length: int, /) -> int: - """Search fragment for a slice of specified number of samples with maximum energy.""" - ... -def getsample(fragment: Buffer, width: int, index: int, /) -> int: - """Return the value of sample index from the fragment.""" - ... -def lin2adpcm(fragment: Buffer, width: int, state: _AdpcmState | None, /) -> tuple[bytes, _AdpcmState]: - """Convert samples to 4 bit Intel/DVI ADPCM encoding.""" - ... -def lin2alaw(fragment: Buffer, width: int, /) -> bytes: - """Convert samples in the audio fragment to a-LAW encoding.""" - ... -def lin2lin(fragment: Buffer, width: int, newwidth: int, /) -> bytes: - """Convert samples between 1-, 2-, 3- and 4-byte formats.""" - ... -def lin2ulaw(fragment: Buffer, width: int, /) -> bytes: - """Convert samples in the audio fragment to u-LAW encoding.""" - ... -def max(fragment: Buffer, width: int, /) -> int: - """Return the maximum of the absolute value of all samples in a fragment.""" - ... -def maxpp(fragment: Buffer, width: int, /) -> int: - """Return the maximum peak-peak value in the sound fragment.""" - ... -def minmax(fragment: Buffer, width: int, /) -> tuple[int, int]: - """Return the minimum and maximum values of all samples in the sound fragment.""" - ... -def mul(fragment: Buffer, width: int, factor: float, /) -> bytes: - """Return a fragment that has all samples in the original fragment multiplied by the floating-point value factor.""" - ... +def add(fragment1: Buffer, fragment2: Buffer, width: int, /) -> bytes: ... +def adpcm2lin(fragment: Buffer, width: int, state: _AdpcmState | None, /) -> tuple[bytes, _AdpcmState]: ... +def alaw2lin(fragment: Buffer, width: int, /) -> bytes: ... +def avg(fragment: Buffer, width: int, /) -> int: ... +def avgpp(fragment: Buffer, width: int, /) -> int: ... +def bias(fragment: Buffer, width: int, bias: int, /) -> bytes: ... +def byteswap(fragment: Buffer, width: int, /) -> bytes: ... +def cross(fragment: Buffer, width: int, /) -> int: ... +def findfactor(fragment: Buffer, reference: Buffer, /) -> float: ... +def findfit(fragment: Buffer, reference: Buffer, /) -> tuple[int, float]: ... +def findmax(fragment: Buffer, length: int, /) -> int: ... +def getsample(fragment: Buffer, width: int, index: int, /) -> int: ... +def lin2adpcm(fragment: Buffer, width: int, state: _AdpcmState | None, /) -> tuple[bytes, _AdpcmState]: ... +def lin2alaw(fragment: Buffer, width: int, /) -> bytes: ... +def lin2lin(fragment: Buffer, width: int, newwidth: int, /) -> bytes: ... +def lin2ulaw(fragment: Buffer, width: int, /) -> bytes: ... +def max(fragment: Buffer, width: int, /) -> int: ... +def maxpp(fragment: Buffer, width: int, /) -> int: ... +def minmax(fragment: Buffer, width: int, /) -> tuple[int, int]: ... +def mul(fragment: Buffer, width: int, factor: float, /) -> bytes: ... def ratecv( fragment: Buffer, width: int, @@ -75,21 +35,9 @@ def ratecv( weightA: int = 1, weightB: int = 0, /, -) -> tuple[bytes, _RatecvState]: - """Convert the frame rate of the input fragment.""" - ... -def reverse(fragment: Buffer, width: int, /) -> bytes: - """Reverse the samples in a fragment and returns the modified fragment.""" - ... -def rms(fragment: Buffer, width: int, /) -> int: - """Return the root-mean-square of the fragment, i.e. sqrt(sum(S_i^2)/n).""" - ... -def tomono(fragment: Buffer, width: int, lfactor: float, rfactor: float, /) -> bytes: - """Convert a stereo fragment to a mono fragment.""" - ... -def tostereo(fragment: Buffer, width: int, lfactor: float, rfactor: float, /) -> bytes: - """Generate a stereo fragment from a mono fragment.""" - ... -def ulaw2lin(fragment: Buffer, width: int, /) -> bytes: - """Convert sound fragments in u-LAW encoding to linearly encoded sound fragments.""" - ... +) -> tuple[bytes, _RatecvState]: ... +def reverse(fragment: Buffer, width: int, /) -> bytes: ... +def rms(fragment: Buffer, width: int, /) -> int: ... +def tomono(fragment: Buffer, width: int, lfactor: float, rfactor: float, /) -> bytes: ... +def tostereo(fragment: Buffer, width: int, lfactor: float, rfactor: float, /) -> bytes: ... +def ulaw2lin(fragment: Buffer, width: int, /) -> bytes: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/base64.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/base64.pyi index 1a05105..4c4dae7 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/base64.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/base64.pyi @@ -234,5 +234,13 @@ if sys.version_info < (3, 9): def decodestring(s: ReadableBuffer) -> bytes: ... if sys.version_info >= (3, 13): - def z85encode(s: ReadableBuffer) -> bytes: ... - def z85decode(s: str | ReadableBuffer) -> bytes: ... + def z85encode(s: ReadableBuffer) -> bytes: + """Encode bytes-like object b in z85 format and return a bytes object.""" + ... + def z85decode(s: str | ReadableBuffer) -> bytes: + """ + Decode the z85-encoded bytes-like object or ASCII string b + + The result is returned as a bytes object. + """ + ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/bdb.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/bdb.pyi index 6badabf..a4d0614 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/bdb.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/bdb.pyi @@ -1,10 +1,10 @@ """Debugger basics""" import sys -from _typeshed import ExcInfo, TraceFunction +from _typeshed import ExcInfo, TraceFunction, Unused from collections.abc import Callable, Iterable, Mapping from types import CodeType, FrameType, TracebackType -from typing import IO, Any, Literal, SupportsInt, TypeVar +from typing import IO, Any, Final, SupportsInt, TypeVar from typing_extensions import ParamSpec __all__ = ["BdbQuit", "Bdb", "Breakpoint"] @@ -12,7 +12,10 @@ __all__ = ["BdbQuit", "Bdb", "Breakpoint"] _T = TypeVar("_T") _P = ParamSpec("_P") -GENERATOR_AND_COROUTINE_FLAGS: Literal[672] +# A union of code-object flags at runtime. +# The exact values of code-object flags are implementation details, +# so we don't include the value of this constant in the stubs. +GENERATOR_AND_COROUTINE_FLAGS: Final[int] class BdbQuit(Exception): """Exception to give up completely.""" @@ -117,6 +120,16 @@ class Bdb: Return self.trace_dispatch to continue tracing in this scope. """ ... + if sys.version_info >= (3, 13): + def dispatch_opcode(self, frame: FrameType, arg: Unused) -> Callable[[FrameType, str, Any], TraceFunction]: + """ + Invoke user function and return trace function for opcode event. + If the debugger stops on the current opcode, invoke + self.user_opcode(). Raise BdbQuit if self.quitting is set. + Return self.trace_dispatch to continue tracing in this scope. + """ + ... + def is_skipped_module(self, module_name: str) -> bool: """Return True if module_name matches any skip pattern.""" ... @@ -162,9 +175,19 @@ class Bdb: reached or when returning from current frame. """ ... + if sys.version_info >= (3, 13): + def user_opcode(self, frame: FrameType) -> None: + """Called when we are about to execute an opcode.""" + ... + def set_step(self) -> None: """Stop after one line of code.""" ... + if sys.version_info >= (3, 13): + def set_stepinstr(self) -> None: + """Stop before the next instruction.""" + ... + def set_next(self, frame: FrameType) -> None: """Stop on the next line in or below the given frame.""" ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/binascii.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/binascii.pyi index 8dd0180..baae1ea 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/binascii.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/binascii.pyi @@ -27,9 +27,7 @@ if sys.version_info >= (3, 11): ... else: - def a2b_base64(data: _AsciiBuffer, /) -> bytes: - """Decode a line of base64 data.""" - ... + def a2b_base64(data: _AsciiBuffer, /) -> bytes: ... def b2a_base64(data: ReadableBuffer, /, *, newline: bool = True) -> bytes: """Base64-code line of data.""" @@ -48,18 +46,10 @@ def b2a_qp(data: ReadableBuffer, quotetabs: bool = False, istext: bool = True, h ... if sys.version_info < (3, 11): - def a2b_hqx(data: _AsciiBuffer, /) -> bytes: - """Decode .hqx coding.""" - ... - def rledecode_hqx(data: ReadableBuffer, /) -> bytes: - """Decode hexbin RLE-coded string.""" - ... - def rlecode_hqx(data: ReadableBuffer, /) -> bytes: - """Binhex RLE-code binary data.""" - ... - def b2a_hqx(data: ReadableBuffer, /) -> bytes: - """Encode .hqx data.""" - ... + def a2b_hqx(data: _AsciiBuffer, /) -> bytes: ... + def rledecode_hqx(data: ReadableBuffer, /) -> bytes: ... + def rlecode_hqx(data: ReadableBuffer, /) -> bytes: ... + def b2a_hqx(data: ReadableBuffer, /) -> bytes: ... def crc_hqx(data: ReadableBuffer, crc: int, /) -> int: """Compute CRC-CCITT incrementally.""" diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/binhex.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/binhex.pyi index 27e2d58..bdead92 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/binhex.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/binhex.pyi @@ -1,22 +1,14 @@ -""" -Macintosh binhex compression/decompression. - -easy interface: -binhex(inputfilename, outputfilename) -hexbin(inputfilename, outputfilename) -""" - from _typeshed import SizedBuffer -from typing import IO, Any, Literal +from typing import IO, Any, Final from typing_extensions import TypeAlias __all__ = ["binhex", "hexbin", "Error"] class Error(Exception): ... -REASONABLY_LARGE: Literal[32768] -LINELEN: Literal[64] -RUNCHAR: Literal[b"\x90"] +REASONABLY_LARGE: Final = 32768 +LINELEN: Final = 64 +RUNCHAR: Final = b"\x90" class FInfo: Type: str @@ -41,9 +33,7 @@ class BinHex: def write_rsrc(self, data: SizedBuffer) -> None: ... def close(self) -> None: ... -def binhex(inp: str, out: str) -> None: - """binhex(infilename, outfilename): create binhex-encoded copy of a file""" - ... +def binhex(inp: str, out: str) -> None: ... class HexBin: def __init__(self, ifp: _FileHandleUnion) -> None: ... @@ -52,6 +42,4 @@ class HexBin: def read_rsrc(self, *n: int) -> bytes: ... def close(self) -> None: ... -def hexbin(inp: str, out: str) -> None: - """hexbin(infilename, outfilename) - Decode binhexed file""" - ... +def hexbin(inp: str, out: str) -> None: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/builtins.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/builtins.pyi index ea7ce55..6355074 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/builtins.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/builtins.pyi @@ -1,3 +1,5 @@ +# ruff: noqa: PYI036 # This is the module declaring BaseException + """ Built-in functions, types, exceptions, and other objects. @@ -46,7 +48,8 @@ from collections.abc import Awaitable, Callable, Iterable, Iterator, MutableSet, from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOWrapper from types import CellType, CodeType, TracebackType -# mypy crashes if any of {ByteString, Sequence, MutableSequence, Mapping, MutableMapping} are imported from collections.abc in builtins.pyi +# mypy crashes if any of {ByteString, Sequence, MutableSequence, Mapping, MutableMapping} +# are imported from collections.abc in builtins.pyi from typing import ( # noqa: Y022 IO, Any, @@ -88,6 +91,7 @@ if sys.version_info >= (3, 9): from types import GenericAlias _T = TypeVar("_T") +_I = TypeVar("_I", default=int) _T_co = TypeVar("_T_co", covariant=True) _T_contra = TypeVar("_T_contra", contravariant=True) _R_co = TypeVar("_R_co", covariant=True) @@ -204,8 +208,6 @@ class object: class staticmethod(Generic[_P, _R_co]): """ - staticmethod(function) -> method - Convert a function to be a static method. A static method does not receive an implicit first argument. @@ -247,8 +249,6 @@ class staticmethod(Generic[_P, _R_co]): class classmethod(Generic[_T, _P, _R_co]): """ - classmethod(function) -> method - Convert a function to be a class method. A class method receives the class as implicit first argument, @@ -344,10 +344,7 @@ class type: ... @classmethod def __prepare__(metacls, name: str, bases: tuple[type, ...], /, **kwds: Any) -> MutableMapping[str, object]: - """ - __prepare__() -> dict - used to create the namespace for the class statement - """ + """Create the namespace for the class statement""" ... if sys.version_info >= (3, 10): def __or__(self, value: Any, /) -> types.UnionType: @@ -392,7 +389,7 @@ class int: int(x, base=10) -> integer Convert a number or string to an integer, or return 0 if no arguments - are given. If x is a number, return x.__int__(). For floating point + are given. If x is a number, return x.__int__(). For floating-point numbers, this truncates towards zero. If x is not a number or if base is given, then x must be a string, @@ -480,7 +477,7 @@ class int: the most significant byte is at the beginning of the byte array. If byteorder is 'little', the most significant byte is at the end of the byte array. To request the native byte order of the host system, use - `sys.byteorder' as the byte order value. Default is to use 'big'. + sys.byteorder as the byte order value. Default is to use 'big'. signed Determines whether two's complement is used to represent the integer. If signed is False and a negative integer is given, an OverflowError @@ -508,31 +505,13 @@ class int: the most significant byte is at the beginning of the byte array. If byteorder is 'little', the most significant byte is at the end of the byte array. To request the native byte order of the host system, use - `sys.byteorder' as the byte order value. Default is to use 'big'. + sys.byteorder as the byte order value. Default is to use 'big'. signed Indicates whether two's complement is used to represent the integer. """ ... else: - def to_bytes(self, length: SupportsIndex, byteorder: Literal["little", "big"], *, signed: bool = False) -> bytes: - """ - Return an array of bytes representing an integer. - - length - Length of bytes object to use. An OverflowError is raised if the - integer is not representable with the given number of bytes. - byteorder - The byte order used to represent the integer. If byteorder is 'big', - the most significant byte is at the beginning of the byte array. If - byteorder is 'little', the most significant byte is at the end of the - byte array. To request the native byte order of the host system, use - `sys.byteorder' as the byte order value. - signed - Determines whether two's complement is used to represent the integer. - If signed is False and a negative integer is given, an OverflowError - is raised. - """ - ... + def to_bytes(self, length: SupportsIndex, byteorder: Literal["little", "big"], *, signed: bool = False) -> bytes: ... @classmethod def from_bytes( cls, @@ -540,25 +519,7 @@ class int: byteorder: Literal["little", "big"], *, signed: bool = False, - ) -> Self: - """ - Return the integer represented by the given array of bytes. - - bytes - Holds the array of bytes to convert. The argument must either - support the buffer protocol or be an iterable object producing bytes. - Bytes and bytearray are examples of built-in objects that support the - buffer protocol. - byteorder - The byte order used to represent the integer. If byteorder is 'big', - the most significant byte is at the beginning of the byte array. If - byteorder is 'little', the most significant byte is at the end of the - byte array. To request the native byte order of the host system, use - `sys.byteorder' as the byte order value. - signed - Indicates whether two's complement is used to represent the integer. - """ - ... + ) -> Self: ... if sys.version_info >= (3, 12): def is_integer(self) -> Literal[True]: @@ -730,7 +691,7 @@ class int: ... class float: - """Convert a string or number to a floating point number, if possible.""" + """Convert a string or number to a floating-point number, if possible.""" def __new__(cls, x: ConvertibleToFloat = ..., /) -> Self: ... def as_integer_ratio(self) -> tuple[int, int]: """ @@ -1016,7 +977,7 @@ class str(Sequence[str]): that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). - encoding defaults to sys.getdefaultencoding(). + encoding defaults to 'utf-8'. errors defaults to 'strict'. """ @overload @@ -1067,11 +1028,9 @@ class str(Sequence[str]): ... def count(self, sub: str, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., /) -> int: """ - S.count(sub[, start[, end]]) -> int + Return the number of non-overlapping occurrences of substring sub in string S[start:end]. - Return the number of non-overlapping occurrences of substring sub in - string S[start:end]. Optional arguments start and end are - interpreted as in slice notation. + Optional arguments start and end are interpreted as in slice notation. """ ... def encode(self, encoding: str = "utf-8", errors: str = "strict") -> bytes: @@ -1092,12 +1051,14 @@ class str(Sequence[str]): self, suffix: str | tuple[str, ...], start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., / ) -> bool: """ - S.endswith(suffix[, start[, end]]) -> bool + Return True if the string ends with the specified suffix, False otherwise. - Return True if S ends with the specified suffix, False otherwise. - With optional start, test S beginning at that position. - With optional end, stop comparing S at that position. - suffix can also be a tuple of strings to try. + suffix + A string or a tuple of strings to try. + start + Optional start position. Default: start of the string. + end + Optional stop position. Default: end of the string. """ ... @overload @@ -1118,49 +1079,37 @@ class str(Sequence[str]): ... def find(self, sub: str, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., /) -> int: """ - S.find(sub[, start[, end]]) -> int - - Return the lowest index in S where substring sub is found, - such that sub is contained within S[start:end]. Optional - arguments start and end are interpreted as in slice notation. + Return the lowest index in S where substring sub is found, such that sub is contained within S[start:end]. + Optional arguments start and end are interpreted as in slice notation. Return -1 on failure. """ ... @overload def format(self: LiteralString, *args: LiteralString, **kwargs: LiteralString) -> LiteralString: """ - S.format(*args, **kwargs) -> str - - Return a formatted version of S, using substitutions from args and kwargs. + Return a formatted version of the string, using substitutions from args and kwargs. The substitutions are identified by braces ('{' and '}'). """ ... @overload def format(self, *args: object, **kwargs: object) -> str: """ - S.format(*args, **kwargs) -> str - - Return a formatted version of S, using substitutions from args and kwargs. + Return a formatted version of the string, using substitutions from args and kwargs. The substitutions are identified by braces ('{' and '}'). """ ... def format_map(self, mapping: _FormatMapMapping, /) -> str: """ - S.format_map(mapping) -> str - - Return a formatted version of S, using substitutions from mapping. + Return a formatted version of the string, using substitutions from mapping. The substitutions are identified by braces ('{' and '}'). """ ... def index(self, sub: str, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., /) -> int: """ - S.index(sub[, start[, end]]) -> int - - Return the lowest index in S where substring sub is found, - such that sub is contained within S[start:end]. Optional - arguments start and end are interpreted as in slice notation. + Return the lowest index in S where substring sub is found, such that sub is contained within S[start:end]. + Optional arguments start and end are interpreted as in slice notation. Raises ValueError when the substring is not found. """ ... @@ -1352,13 +1301,6 @@ class str(Sequence[str]): @overload def replace( self: LiteralString, old: LiteralString, new: LiteralString, /, count: SupportsIndex = -1 - ) -> LiteralString: ... - @overload - def replace(self, old: str, new: str, /, count: SupportsIndex = -1) -> str: ... # type: ignore[misc] - else: - @overload - def replace( - self: LiteralString, old: LiteralString, new: LiteralString, count: SupportsIndex = -1, / ) -> LiteralString: """ Return a copy with all occurrences of substring old replaced by new. @@ -1372,7 +1314,7 @@ class str(Sequence[str]): """ ... @overload - def replace(self, old: str, new: str, count: SupportsIndex = -1, /) -> str: + def replace(self, old: str, new: str, /, count: SupportsIndex = -1) -> str: """ Return a copy with all occurrences of substring old replaced by new. @@ -1384,6 +1326,13 @@ class str(Sequence[str]): replaced. """ ... + else: + @overload + def replace( + self: LiteralString, old: LiteralString, new: LiteralString, count: SupportsIndex = -1, / + ) -> LiteralString: ... + @overload + def replace(self, old: str, new: str, count: SupportsIndex = -1, /) -> str: ... # type: ignore[misc] if sys.version_info >= (3, 9): @overload def removeprefix(self: LiteralString, prefix: LiteralString, /) -> LiteralString: @@ -1426,23 +1375,17 @@ class str(Sequence[str]): def rfind(self, sub: str, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., /) -> int: """ - S.rfind(sub[, start[, end]]) -> int - - Return the highest index in S where substring sub is found, - such that sub is contained within S[start:end]. Optional - arguments start and end are interpreted as in slice notation. + Return the highest index in S where substring sub is found, such that sub is contained within S[start:end]. + Optional arguments start and end are interpreted as in slice notation. Return -1 on failure. """ ... def rindex(self, sub: str, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., /) -> int: """ - S.rindex(sub[, start[, end]]) -> int - - Return the highest index in S where substring sub is found, - such that sub is contained within S[start:end]. Optional - arguments start and end are interpreted as in slice notation. + Return the highest index in S where substring sub is found, such that sub is contained within S[start:end]. + Optional arguments start and end are interpreted as in slice notation. Raises ValueError when the substring is not found. """ ... @@ -1606,12 +1549,14 @@ class str(Sequence[str]): self, prefix: str | tuple[str, ...], start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., / ) -> bool: """ - S.startswith(prefix[, start[, end]]) -> bool + Return True if the string starts with the specified prefix, False otherwise. - Return True if S starts with the specified prefix, False otherwise. - With optional start, test S beginning at that position. - With optional end, stop comparing S at that position. - prefix can also be a tuple of strings to try. + prefix + A string or a tuple of strings to try. + start + Optional start position. Default: start of the string. + end + Optional stop position. Default: end of the string. """ ... @overload @@ -1756,6 +1701,11 @@ class str(Sequence[str]): def __ge__(self, value: str, /) -> bool: """Return self>=value.""" ... + @overload + def __getitem__(self: LiteralString, key: SupportsIndex | slice, /) -> LiteralString: + """Return self[key].""" + ... + @overload def __getitem__(self, key: SupportsIndex | slice, /) -> str: """Return self[key].""" ... @@ -1850,11 +1800,12 @@ class bytes(Sequence[int]): self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., / ) -> int: """ - B.count(sub[, start[, end]]) -> int + Return the number of non-overlapping occurrences of subsection 'sub' in bytes B[start:end]. - Return the number of non-overlapping occurrences of subsection sub in - bytes B[start:end]. Optional arguments start and end are interpreted - as in slice notation. + start + Optional start position. Default: start of the bytes. + end + Optional stop position. Default: end of the bytes. """ ... def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: @@ -1879,12 +1830,14 @@ class bytes(Sequence[int]): /, ) -> bool: """ - B.endswith(suffix[, start[, end]]) -> bool + Return True if the bytes ends with the specified suffix, False otherwise. - Return True if B ends with the specified suffix, False otherwise. - With optional start, test B beginning at that position. - With optional end, stop comparing B at that position. - suffix can also be a tuple of bytes to try. + suffix + A bytes or a tuple of bytes to try. + start + Optional start position. Default: start of the bytes. + end + Optional stop position. Default: end of the bytes. """ ... def expandtabs(self, tabsize: SupportsIndex = 8) -> bytes: @@ -1898,11 +1851,12 @@ class bytes(Sequence[int]): self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., / ) -> int: """ - B.find(sub[, start[, end]]) -> int + Return the lowest index in B where subsection 'sub' is found, such that 'sub' is contained within B[start,end]. - Return the lowest index in B where subsection sub is found, - such that sub is contained within B[start,end]. Optional - arguments start and end are interpreted as in slice notation. + start + Optional start position. Default: start of the bytes. + end + Optional stop position. Default: end of the bytes. Return -1 on failure. """ @@ -1933,13 +1887,14 @@ class bytes(Sequence[int]): self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., / ) -> int: """ - B.index(sub[, start[, end]]) -> int + Return the lowest index in B where subsection 'sub' is found, such that 'sub' is contained within B[start,end]. - Return the lowest index in B where subsection sub is found, - such that sub is contained within B[start,end]. Optional - arguments start and end are interpreted as in slice notation. + start + Optional start position. Default: start of the bytes. + end + Optional stop position. Default: end of the bytes. - Raises ValueError when the subsection is not found. + Raise ValueError if the subsection is not found. """ ... def isalnum(self) -> bool: @@ -2087,11 +2042,12 @@ class bytes(Sequence[int]): self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., / ) -> int: """ - B.rfind(sub[, start[, end]]) -> int + Return the highest index in B where subsection 'sub' is found, such that 'sub' is contained within B[start,end]. - Return the highest index in B where subsection sub is found, - such that sub is contained within B[start,end]. Optional - arguments start and end are interpreted as in slice notation. + start + Optional start position. Default: start of the bytes. + end + Optional stop position. Default: end of the bytes. Return -1 on failure. """ @@ -2100,13 +2056,14 @@ class bytes(Sequence[int]): self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., / ) -> int: """ - B.rindex(sub[, start[, end]]) -> int + Return the highest index in B where subsection 'sub' is found, such that 'sub' is contained within B[start,end]. - Return the highest index in B where subsection sub is found, - such that sub is contained within B[start,end]. Optional - arguments start and end are interpreted as in slice notation. + start + Optional start position. Default: start of the bytes. + end + Optional stop position. Default: end of the bytes. - Raise ValueError when the subsection is not found. + Raise ValueError if the subsection is not found. """ ... def rjust(self, width: SupportsIndex, fillchar: bytes | bytearray = b" ", /) -> bytes: @@ -2179,12 +2136,14 @@ class bytes(Sequence[int]): /, ) -> bool: """ - B.startswith(prefix[, start[, end]]) -> bool + Return True if the bytes starts with the specified prefix, False otherwise. - Return True if B starts with the specified prefix, False otherwise. - With optional start, test B beginning at that position. - With optional end, stop comparing B at that position. - prefix can also be a tuple of bytes to try. + prefix + A bytes or a tuple of bytes to try. + start + Optional start position. Default: start of the bytes. + end + Optional stop position. Default: end of the bytes. """ ... def strip(self, bytes: ReadableBuffer | None = None, /) -> bytes: @@ -2247,7 +2206,7 @@ class bytes(Sequence[int]): @staticmethod def maketrans(frm: ReadableBuffer, to: ReadableBuffer, /) -> bytes: """ - Return a translation table useable for the bytes or bytearray translate method. + Return a translation table usable for the bytes or bytearray translate method. The returned table will be one where each byte in frm is mapped to the byte at the same position in to. @@ -2364,11 +2323,12 @@ class bytearray(MutableSequence[int]): self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., / ) -> int: """ - B.count(sub[, start[, end]]) -> int + Return the number of non-overlapping occurrences of subsection 'sub' in bytes B[start:end]. - Return the number of non-overlapping occurrences of subsection sub in - bytes B[start:end]. Optional arguments start and end are interpreted - as in slice notation. + start + Optional start position. Default: start of the bytes. + end + Optional stop position. Default: end of the bytes. """ ... def copy(self) -> bytearray: @@ -2396,12 +2356,14 @@ class bytearray(MutableSequence[int]): /, ) -> bool: """ - B.endswith(suffix[, start[, end]]) -> bool + Return True if the bytearray ends with the specified suffix, False otherwise. - Return True if B ends with the specified suffix, False otherwise. - With optional start, test B beginning at that position. - With optional end, stop comparing B at that position. - suffix can also be a tuple of bytes to try. + suffix + A bytes or a tuple of bytes to try. + start + Optional start position. Default: start of the bytearray. + end + Optional stop position. Default: end of the bytearray. """ ... def expandtabs(self, tabsize: SupportsIndex = 8) -> bytearray: @@ -2423,11 +2385,12 @@ class bytearray(MutableSequence[int]): self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., / ) -> int: """ - B.find(sub[, start[, end]]) -> int + Return the lowest index in B where subsection 'sub' is found, such that 'sub' is contained within B[start:end]. - Return the lowest index in B where subsection sub is found, - such that sub is contained within B[start,end]. Optional - arguments start and end are interpreted as in slice notation. + start + Optional start position. Default: start of the bytes. + end + Optional stop position. Default: end of the bytes. Return -1 on failure. """ @@ -2458,13 +2421,14 @@ class bytearray(MutableSequence[int]): self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., / ) -> int: """ - B.index(sub[, start[, end]]) -> int + Return the lowest index in B where subsection 'sub' is found, such that 'sub' is contained within B[start:end]. - Return the lowest index in B where subsection sub is found, - such that sub is contained within B[start,end]. Optional - arguments start and end are interpreted as in slice notation. + start + Optional start position. Default: start of the bytes. + end + Optional stop position. Default: end of the bytes. - Raises ValueError when the subsection is not found. + Raise ValueError if the subsection is not found. """ ... def insert(self, index: SupportsIndex, item: SupportsIndex, /) -> None: @@ -2640,11 +2604,12 @@ class bytearray(MutableSequence[int]): self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., / ) -> int: """ - B.rfind(sub[, start[, end]]) -> int + Return the highest index in B where subsection 'sub' is found, such that 'sub' is contained within B[start:end]. - Return the highest index in B where subsection sub is found, - such that sub is contained within B[start,end]. Optional - arguments start and end are interpreted as in slice notation. + start + Optional start position. Default: start of the bytes. + end + Optional stop position. Default: end of the bytes. Return -1 on failure. """ @@ -2653,13 +2618,14 @@ class bytearray(MutableSequence[int]): self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., / ) -> int: """ - B.rindex(sub[, start[, end]]) -> int + Return the highest index in B where subsection 'sub' is found, such that 'sub' is contained within B[start:end]. - Return the highest index in B where subsection sub is found, - such that sub is contained within B[start,end]. Optional - arguments start and end are interpreted as in slice notation. + start + Optional start position. Default: start of the bytes. + end + Optional stop position. Default: end of the bytes. - Raise ValueError when the subsection is not found. + Raise ValueError if the subsection is not found. """ ... def rjust(self, width: SupportsIndex, fillchar: bytes | bytearray = b" ", /) -> bytearray: @@ -2733,12 +2699,14 @@ class bytearray(MutableSequence[int]): /, ) -> bool: """ - B.startswith(prefix[, start[, end]]) -> bool + Return True if the bytearray starts with the specified prefix, False otherwise. - Return True if B starts with the specified prefix, False otherwise. - With optional start, test B beginning at that position. - With optional end, stop comparing B at that position. - prefix can also be a tuple of bytes to try. + prefix + A bytes or a tuple of bytes to try. + start + Optional start position. Default: start of the bytearray. + end + Optional stop position. Default: end of the bytearray. """ ... def strip(self, bytes: ReadableBuffer | None = None, /) -> bytearray: @@ -2801,7 +2769,7 @@ class bytearray(MutableSequence[int]): @staticmethod def maketrans(frm: ReadableBuffer, to: ReadableBuffer, /) -> bytes: """ - Return a translation table useable for the bytes or bytearray translate method. + Return a translation table usable for the bytes or bytearray translate method. The returned table will be one where each byte in frm is mapped to the byte at the same position in to. @@ -2890,8 +2858,12 @@ class bytearray(MutableSequence[int]): """Release the buffer object that exposes the underlying memory of the object.""" ... +_IntegerFormats: TypeAlias = Literal[ + "b", "B", "@b", "@B", "h", "H", "@h", "@H", "i", "I", "@i", "@I", "l", "L", "@l", "@L", "q", "Q", "@q", "@Q", "P", "@P" +] + @final -class memoryview(Sequence[int]): +class memoryview(Sequence[_I]): """Create a new memoryview object which references the given object.""" @property def format(self) -> str: @@ -2960,20 +2932,35 @@ class memoryview(Sequence[int]): def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None, / - ) -> None: ... - def cast(self, format: str, shape: list[int] | tuple[int, ...] = ...) -> memoryview: + ) -> None: + """Release the underlying buffer exposed by the memoryview object.""" + ... + @overload + def cast(self, format: Literal["c", "@c"], shape: list[int] | tuple[int, ...] = ...) -> memoryview[bytes]: + """Cast a memoryview to a new format or shape.""" + ... + @overload + def cast(self, format: Literal["f", "@f", "d", "@d"], shape: list[int] | tuple[int, ...] = ...) -> memoryview[float]: + """Cast a memoryview to a new format or shape.""" + ... + @overload + def cast(self, format: Literal["?"], shape: list[int] | tuple[int, ...] = ...) -> memoryview[bool]: """Cast a memoryview to a new format or shape.""" ... @overload - def __getitem__(self, key: SupportsIndex | tuple[SupportsIndex, ...], /) -> int: + def cast(self, format: _IntegerFormats, shape: list[int] | tuple[int, ...] = ...) -> memoryview: + """Cast a memoryview to a new format or shape.""" + ... + @overload + def __getitem__(self, key: SupportsIndex | tuple[SupportsIndex, ...], /) -> _I: """Return self[key].""" ... @overload - def __getitem__(self, key: slice, /) -> memoryview: + def __getitem__(self, key: slice, /) -> memoryview[_I]: """Return self[key].""" ... def __contains__(self, x: object, /) -> bool: ... - def __iter__(self) -> Iterator[int]: + def __iter__(self) -> Iterator[_I]: """Implement iter(self).""" ... def __len__(self) -> int: @@ -3049,9 +3036,7 @@ class memoryview(Sequence[int]): @final class bool(int): """ - bool(x) -> bool - - Returns True when the argument x is true, False otherwise. + Returns True when the argument is true, False otherwise. The builtins True and False are the only two instances of the class bool. The class bool is a subclass of the class int, and cannot be subclassed. """ @@ -3219,7 +3204,9 @@ class tuple(Sequence[_T_co]): """See PEP 585""" ... -# Doesn't exist at runtime, but deleting this breaks mypy. See #2999 +# Doesn't exist at runtime, but deleting this breaks mypy and pyright. See: +# https://github.com/python/typeshed/issues/7580 +# https://github.com/python/mypy/issues/8240 @final @type_check_only class function: @@ -3444,18 +3431,19 @@ class dict(MutableMapping[_KT, _VT]): def __init__(self: dict[bytes, bytes], iterable: Iterable[list[bytes]], /) -> None: ... def __new__(cls, *args: Any, **kwargs: Any) -> Self: ... def copy(self) -> dict[_KT, _VT]: - """D.copy() -> a shallow copy of D""" + """Return a shallow copy of the dict.""" ... def keys(self) -> dict_keys[_KT, _VT]: - """D.keys() -> a set-like object providing a view on D's keys""" + """Return a set-like object providing a view on the dict's keys.""" ... def values(self) -> dict_values[_KT, _VT]: - """D.values() -> an object providing a view on D's values""" + """Return an object providing a view on the dict's values.""" ... def items(self) -> dict_items[_KT, _VT]: - """D.items() -> a set-like object providing a view on D's items""" + """Return a set-like object providing a view on the dict's items.""" ... - # Signature of `dict.fromkeys` should be kept identical to `fromkeys` methods of `OrderedDict`/`ChainMap`/`UserDict` in `collections` + # Signature of `dict.fromkeys` should be kept identical to + # `fromkeys` methods of `OrderedDict`/`ChainMap`/`UserDict` in `collections` # TODO: the true signature of `dict.fromkeys` is not expressible in the current type system. # See #3800 & https://github.com/python/typing/issues/548#issuecomment-683336963. @classmethod @@ -3561,12 +3549,7 @@ class dict(MutableMapping[_KT, _VT]): ... class set(MutableSet[_T]): - """ - set() -> new empty set object - set(iterable) -> new set object - - Build an unordered collection of unique elements. - """ + """Build an unordered collection of unique elements.""" @overload def __init__(self) -> None: ... @overload @@ -3582,14 +3565,10 @@ class set(MutableSet[_T]): """Return a shallow copy of a set.""" ... def difference(self, *s: Iterable[Any]) -> set[_T]: - """ - Return the difference of two or more sets as a new set. - - (i.e. all elements that are in this set but not the others.) - """ + """Return a new set with elements in the set that are not in the others.""" ... def difference_update(self, *s: Iterable[Any]) -> None: - """Remove all elements of another set from this set.""" + """Update the set, removing elements found in others.""" ... def discard(self, element: _T, /) -> None: """ @@ -3600,23 +3579,19 @@ class set(MutableSet[_T]): """ ... def intersection(self, *s: Iterable[Any]) -> set[_T]: - """ - Return the intersection of two sets as a new set. - - (i.e. all elements that are in both sets.) - """ + """Return a new set with elements common to the set and all others.""" ... def intersection_update(self, *s: Iterable[Any]) -> None: - """Update a set with the intersection of itself and another.""" + """Update the set, keeping only elements found in it and all others.""" ... def isdisjoint(self, s: Iterable[Any], /) -> bool: """Return True if two sets have a null intersection.""" ... def issubset(self, s: Iterable[Any], /) -> bool: - """Test whether every element in the set is in other.""" + """Report whether another set contains this set.""" ... def issuperset(self, s: Iterable[Any], /) -> bool: - """Test whether every element in other is in the set.""" + """Report whether this set contains another set.""" ... def remove(self, element: _T, /) -> None: """ @@ -3626,24 +3601,16 @@ class set(MutableSet[_T]): """ ... def symmetric_difference(self, s: Iterable[_T], /) -> set[_T]: - """ - Return the symmetric difference of two sets as a new set. - - (i.e. all elements that are in exactly one of the sets.) - """ + """Return a new set with elements in either the set or other but not both.""" ... def symmetric_difference_update(self, s: Iterable[_T], /) -> None: - """Update a set with the symmetric difference of itself and another.""" + """Update the set, keeping only elements found in either set, but not in both.""" ... def union(self, *s: Iterable[_S]) -> set[_T | _S]: - """ - Return the union of sets as a new set. - - (i.e. all elements that are in either set.) - """ + """Return a new set with elements from the set and all others.""" ... def update(self, *s: Iterable[_T]) -> None: - """Update a set with the union of itself and others.""" + """Update the set, adding elements from all others.""" ... def __len__(self) -> int: """Return len(self).""" @@ -3700,12 +3667,7 @@ class set(MutableSet[_T]): ... class frozenset(AbstractSet[_T_co]): - """ - frozenset() -> empty frozenset object - frozenset(iterable) -> frozenset object - - Build an immutable unordered collection of unique elements. - """ + """Build an immutable unordered collection of unique elements.""" @overload def __new__(cls) -> Self: ... @overload @@ -3714,41 +3676,25 @@ class frozenset(AbstractSet[_T_co]): """Return a shallow copy of a set.""" ... def difference(self, *s: Iterable[object]) -> frozenset[_T_co]: - """ - Return the difference of two or more sets as a new set. - - (i.e. all elements that are in this set but not the others.) - """ + """Return a new set with elements in the set that are not in the others.""" ... def intersection(self, *s: Iterable[object]) -> frozenset[_T_co]: - """ - Return the intersection of two sets as a new set. - - (i.e. all elements that are in both sets.) - """ + """Return a new set with elements common to the set and all others.""" ... def isdisjoint(self, s: Iterable[_T_co], /) -> bool: """Return True if two sets have a null intersection.""" ... def issubset(self, s: Iterable[object], /) -> bool: - """Test whether every element in the set is in other.""" + """Report whether another set contains this set.""" ... def issuperset(self, s: Iterable[object], /) -> bool: - """Test whether every element in other is in the set.""" + """Report whether this set contains another set.""" ... def symmetric_difference(self, s: Iterable[_T_co], /) -> frozenset[_T_co]: - """ - Return the symmetric difference of two sets as a new set. - - (i.e. all elements that are in exactly one of the sets.) - """ + """Return a new set with elements in either the set or other but not both.""" ... def union(self, *s: Iterable[_S]) -> frozenset[_T_co | _S]: - """ - Return the union of sets as a new set. - - (i.e. all elements that are in either set.) - """ + """Return a new set with elements from the set and all others.""" ... def __len__(self) -> int: """Return len(self).""" @@ -3990,8 +3936,6 @@ def bin(number: int | SupportsIndex, /) -> str: ... def breakpoint(*args: Any, **kws: Any) -> None: """ - breakpoint(*args, **kws) - Call sys.breakpointhook(*args, **kws). sys.breakpointhook() must accept whatever arguments are passed. @@ -4029,19 +3973,19 @@ if sys.version_info >= (3, 10): # See discussion in #7491 and pure-Python implementation of `anext` at https://github.com/python/cpython/blob/ea786a882b9ed4261eafabad6011bc7ef3b5bf94/Lib/test/test_asyncgen.py#L52-L80 def anext(i: _SupportsSynchronousAnext[_AwaitableT], /) -> _AwaitableT: """ - async anext(aiterator[, default]) + Return the next item from the async iterator. - Return the next item from the async iterator. If default is given and the async - iterator is exhausted, it is returned instead of raising StopAsyncIteration. + If default is given and the async iterator is exhausted, + it is returned instead of raising StopAsyncIteration. """ ... @overload async def anext(i: SupportsAnext[_T], default: _VT, /) -> _T | _VT: """ - async anext(aiterator[, default]) + Return the next item from the async iterator. - Return the next item from the async iterator. If default is given and the async - iterator is exhausted, it is returned instead of raising StopAsyncIteration. + If default is given and the async iterator is exhausted, + it is returned instead of raising StopAsyncIteration. """ ... @@ -4162,7 +4106,7 @@ def delattr(obj: object, name: str, /) -> None: ... def dir(o: object = ..., /) -> list[str]: """ - Show attributes of an object. + dir([object]) -> list of strings If called without an argument, return the names in the current scope. Else, return an alphabetized list of names comprising (some of) the attributes @@ -4193,14 +4137,6 @@ if sys.version_info >= (3, 13): /, globals: dict[str, Any] | None = None, locals: Mapping[str, object] | None = None, - ) -> Any: ... - -else: - def eval( - source: str | ReadableBuffer | CodeType, - globals: dict[str, Any] | None = None, - locals: Mapping[str, object] | None = None, - /, ) -> Any: """ Evaluate the given source in the context of globals and locals. @@ -4213,23 +4149,21 @@ else: """ ... -# Comment above regarding `eval` applies to `exec` as well -if sys.version_info >= (3, 13): - def exec( +else: + def eval( source: str | ReadableBuffer | CodeType, - /, globals: dict[str, Any] | None = None, locals: Mapping[str, object] | None = None, - *, - closure: tuple[CellType, ...] | None = None, - ) -> None: ... + /, + ) -> Any: ... -elif sys.version_info >= (3, 11): +# Comment above regarding `eval` applies to `exec` as well +if sys.version_info >= (3, 13): def exec( source: str | ReadableBuffer | CodeType, + /, globals: dict[str, Any] | None = None, locals: Mapping[str, object] | None = None, - /, *, closure: tuple[CellType, ...] | None = None, ) -> None: @@ -4246,30 +4180,28 @@ elif sys.version_info >= (3, 11): """ ... -else: +elif sys.version_info >= (3, 11): def exec( source: str | ReadableBuffer | CodeType, globals: dict[str, Any] | None = None, locals: Mapping[str, object] | None = None, /, - ) -> None: - """ - Execute the given source in the context of globals and locals. + *, + closure: tuple[CellType, ...] | None = None, + ) -> None: ... - The source may be a string representing one or more Python statements - or a code object as returned by compile(). - The globals must be a dictionary and locals can be any mapping, - defaulting to the current globals and locals. - If only globals is given, locals defaults to it. - """ - ... +else: + def exec( + source: str | ReadableBuffer | CodeType, + globals: dict[str, Any] | None = None, + locals: Mapping[str, object] | None = None, + /, + ) -> None: ... def exit(code: sys._ExitCode = None) -> NoReturn: ... class filter(Iterator[_T]): """ - filter(function or None, iterable) --> filter object - Return an iterator yielding those items of iterable for which function(item) is true. If function is None, return the items that are true. """ @@ -4303,9 +4235,9 @@ def format(value: object, format_spec: str = "", /) -> str: @overload def getattr(o: object, name: str, /) -> Any: """ - Get a named attribute from an object. + getattr(object, name[, default]) -> value - getattr(x, 'y') is equivalent to x.y + Get a named attribute from an object; getattr(x, 'y') is equivalent to x.y. When a default argument is given, it is returned when the attribute doesn't exist; without it, an exception is raised in that case. """ @@ -4317,9 +4249,9 @@ def getattr(o: object, name: str, /) -> Any: @overload def getattr(o: object, name: str, default: None, /) -> Any | None: """ - Get a named attribute from an object. + getattr(object, name[, default]) -> value - getattr(x, 'y') is equivalent to x.y + Get a named attribute from an object; getattr(x, 'y') is equivalent to x.y. When a default argument is given, it is returned when the attribute doesn't exist; without it, an exception is raised in that case. """ @@ -4327,9 +4259,9 @@ def getattr(o: object, name: str, default: None, /) -> Any | None: @overload def getattr(o: object, name: str, default: bool, /) -> Any | bool: """ - Get a named attribute from an object. + getattr(object, name[, default]) -> value - getattr(x, 'y') is equivalent to x.y + Get a named attribute from an object; getattr(x, 'y') is equivalent to x.y. When a default argument is given, it is returned when the attribute doesn't exist; without it, an exception is raised in that case. """ @@ -4337,9 +4269,9 @@ def getattr(o: object, name: str, default: bool, /) -> Any | bool: @overload def getattr(o: object, name: str, default: list[Any], /) -> Any | list[Any]: """ - Get a named attribute from an object. + getattr(object, name[, default]) -> value - getattr(x, 'y') is equivalent to x.y + Get a named attribute from an object; getattr(x, 'y') is equivalent to x.y. When a default argument is given, it is returned when the attribute doesn't exist; without it, an exception is raised in that case. """ @@ -4347,9 +4279,9 @@ def getattr(o: object, name: str, default: list[Any], /) -> Any | list[Any]: @overload def getattr(o: object, name: str, default: dict[Any, Any], /) -> Any | dict[Any, Any]: """ - Get a named attribute from an object. + getattr(object, name[, default]) -> value - getattr(x, 'y') is equivalent to x.y + Get a named attribute from an object; getattr(x, 'y') is equivalent to x.y. When a default argument is given, it is returned when the attribute doesn't exist; without it, an exception is raised in that case. """ @@ -4357,9 +4289,9 @@ def getattr(o: object, name: str, default: dict[Any, Any], /) -> Any | dict[Any, @overload def getattr(o: object, name: str, default: _T, /) -> Any | _T: """ - Get a named attribute from an object. + getattr(object, name[, default]) -> value - getattr(x, 'y') is equivalent to x.y + Get a named attribute from an object; getattr(x, 'y') is equivalent to x.y. When a default argument is given, it is returned when the attribute doesn't exist; without it, an exception is raised in that case. """ @@ -4422,36 +4354,44 @@ class _GetItemIterable(Protocol[_T_co]): @overload def iter(object: SupportsIter[_SupportsNextT], /) -> _SupportsNextT: """ - Get an iterator from an object. + iter(iterable) -> iterator + iter(callable, sentinel) -> iterator - In the first form, the argument must supply its own iterator, or be a sequence. + Get an iterator from an object. In the first form, the argument must + supply its own iterator, or be a sequence. In the second form, the callable is called until it returns the sentinel. """ ... @overload def iter(object: _GetItemIterable[_T], /) -> Iterator[_T]: """ - Get an iterator from an object. + iter(iterable) -> iterator + iter(callable, sentinel) -> iterator - In the first form, the argument must supply its own iterator, or be a sequence. + Get an iterator from an object. In the first form, the argument must + supply its own iterator, or be a sequence. In the second form, the callable is called until it returns the sentinel. """ ... @overload def iter(object: Callable[[], _T | None], sentinel: None, /) -> Iterator[_T]: """ - Get an iterator from an object. + iter(iterable) -> iterator + iter(callable, sentinel) -> iterator - In the first form, the argument must supply its own iterator, or be a sequence. + Get an iterator from an object. In the first form, the argument must + supply its own iterator, or be a sequence. In the second form, the callable is called until it returns the sentinel. """ ... @overload def iter(object: Callable[[], _T], sentinel: object, /) -> Iterator[_T]: """ - Get an iterator from an object. + iter(iterable) -> iterator + iter(callable, sentinel) -> iterator - In the first form, the argument must supply its own iterator, or be a sequence. + Get an iterator from an object. In the first form, the argument must + supply its own iterator, or be a sequence. In the second form, the callable is called until it returns the sentinel. """ ... @@ -4496,8 +4436,6 @@ def locals() -> dict[str, Any]: class map(Iterator[_S]): """ - map(func, *iterables) --> map object - Make an iterator that computes the function using arguments from each of the iterables. Stops when the shortest iterable is exhausted. """ @@ -4561,7 +4499,7 @@ def max( With a single iterable argument, return its biggest item. The default keyword-only argument specifies an object to return if the provided iterable is empty. - With two or more arguments, return the largest argument. + With two or more positional arguments, return the largest argument. """ ... @overload @@ -4573,7 +4511,7 @@ def max(arg1: _T, arg2: _T, /, *_args: _T, key: Callable[[_T], SupportsRichCompa With a single iterable argument, return its biggest item. The default keyword-only argument specifies an object to return if the provided iterable is empty. - With two or more arguments, return the largest argument. + With two or more positional arguments, return the largest argument. """ ... @overload @@ -4585,7 +4523,7 @@ def max(iterable: Iterable[SupportsRichComparisonT], /, *, key: None = None) -> With a single iterable argument, return its biggest item. The default keyword-only argument specifies an object to return if the provided iterable is empty. - With two or more arguments, return the largest argument. + With two or more positional arguments, return the largest argument. """ ... @overload @@ -4597,7 +4535,7 @@ def max(iterable: Iterable[_T], /, *, key: Callable[[_T], SupportsRichComparison With a single iterable argument, return its biggest item. The default keyword-only argument specifies an object to return if the provided iterable is empty. - With two or more arguments, return the largest argument. + With two or more positional arguments, return the largest argument. """ ... @overload @@ -4609,7 +4547,7 @@ def max(iterable: Iterable[SupportsRichComparisonT], /, *, key: None = None, def With a single iterable argument, return its biggest item. The default keyword-only argument specifies an object to return if the provided iterable is empty. - With two or more arguments, return the largest argument. + With two or more positional arguments, return the largest argument. """ ... @overload @@ -4621,7 +4559,7 @@ def max(iterable: Iterable[_T1], /, *, key: Callable[[_T1], SupportsRichComparis With a single iterable argument, return its biggest item. The default keyword-only argument specifies an object to return if the provided iterable is empty. - With two or more arguments, return the largest argument. + With two or more positional arguments, return the largest argument. """ ... @overload @@ -4635,7 +4573,7 @@ def min( With a single iterable argument, return its smallest item. The default keyword-only argument specifies an object to return if the provided iterable is empty. - With two or more arguments, return the smallest argument. + With two or more positional arguments, return the smallest argument. """ ... @overload @@ -4647,7 +4585,7 @@ def min(arg1: _T, arg2: _T, /, *_args: _T, key: Callable[[_T], SupportsRichCompa With a single iterable argument, return its smallest item. The default keyword-only argument specifies an object to return if the provided iterable is empty. - With two or more arguments, return the smallest argument. + With two or more positional arguments, return the smallest argument. """ ... @overload @@ -4659,7 +4597,7 @@ def min(iterable: Iterable[SupportsRichComparisonT], /, *, key: None = None) -> With a single iterable argument, return its smallest item. The default keyword-only argument specifies an object to return if the provided iterable is empty. - With two or more arguments, return the smallest argument. + With two or more positional arguments, return the smallest argument. """ ... @overload @@ -4671,7 +4609,7 @@ def min(iterable: Iterable[_T], /, *, key: Callable[[_T], SupportsRichComparison With a single iterable argument, return its smallest item. The default keyword-only argument specifies an object to return if the provided iterable is empty. - With two or more arguments, return the smallest argument. + With two or more positional arguments, return the smallest argument. """ ... @overload @@ -4683,7 +4621,7 @@ def min(iterable: Iterable[SupportsRichComparisonT], /, *, key: None = None, def With a single iterable argument, return its smallest item. The default keyword-only argument specifies an object to return if the provided iterable is empty. - With two or more arguments, return the smallest argument. + With two or more positional arguments, return the smallest argument. """ ... @overload @@ -4695,25 +4633,25 @@ def min(iterable: Iterable[_T1], /, *, key: Callable[[_T1], SupportsRichComparis With a single iterable argument, return its smallest item. The default keyword-only argument specifies an object to return if the provided iterable is empty. - With two or more arguments, return the smallest argument. + With two or more positional arguments, return the smallest argument. """ ... @overload def next(i: SupportsNext[_T], /) -> _T: """ - Return the next item from the iterator. + next(iterator[, default]) - If default is given and the iterator is exhausted, - it is returned instead of raising StopIteration. + Return the next item from the iterator. If default is given and the iterator + is exhausted, it is returned instead of raising StopIteration. """ ... @overload def next(i: SupportsNext[_T], default: _VT, /) -> _T | _VT: """ - Return the next item from the iterator. + next(iterator[, default]) - If default is given and the iterator is exhausted, - it is returned instead of raising StopIteration. + Return the next item from the iterator. If default is given and the iterator + is exhausted, it is returned instead of raising StopIteration. """ ... def oct(number: int | SupportsIndex, /) -> str: @@ -5959,11 +5897,10 @@ def sum(iterable: Iterable[_AddableT1], /, start: _AddableT2) -> _AddableT1 | _A # The argument to `vars()` has to have a `__dict__` attribute, so the second overload can't be annotated with `object` # (A "SupportsDunderDict" protocol doesn't work) -# Use a type: ignore to make complaints about overlapping overloads go away @overload def vars(object: type, /) -> types.MappingProxyType[str, Any]: """ - Show vars. + vars([object]) -> dictionary Without arguments, equivalent to locals(). With an argument, equivalent to object.__dict__. @@ -5972,7 +5909,7 @@ def vars(object: type, /) -> types.MappingProxyType[str, Any]: @overload def vars(object: Any = ..., /) -> dict[str, Any]: """ - Show vars. + vars([object]) -> dictionary Without arguments, equivalent to locals(). With an argument, equivalent to object.__dict__. @@ -5981,11 +5918,6 @@ def vars(object: Any = ..., /) -> dict[str, Any]: class zip(Iterator[_T_co]): """ - zip(*iterables, strict=False) --> Yield tuples until an input is exhausted. - - >>> list(zip('abcdefg', range(3), range(4))) - [('a', 0, 0), ('b', 1, 1), ('c', 2, 2)] - The zip object yields n-length tuples, where n is the number of iterables passed as positional arguments to zip(). The i-th element in every tuple comes from the i-th iterable argument to zip(). This continues until the @@ -5993,6 +5925,9 @@ class zip(Iterator[_T_co]): If strict is true and one of the arguments is exhausted before the others, raise a ValueError. + + >>> list(zip('abcdefg', range(3), range(4))) + [('a', 0, 0), ('b', 1, 1), ('c', 2, 2)] """ if sys.version_info >= (3, 10): @overload @@ -6132,6 +6067,7 @@ class BaseException: __suppress_context__: bool __traceback__: TracebackType | None def __init__(self, *args: object) -> None: ... + def __new__(cls, *args: Any, **kwds: Any) -> Self: ... def __setstate__(self, state: dict[str, Any] | None, /) -> None: ... def with_traceback(self, tb: TracebackType | None, /) -> Self: """ @@ -6263,7 +6199,7 @@ class ValueError(Exception): """Inappropriate argument value (of correct type).""" ... class FloatingPointError(ArithmeticError): - """Floating point operation failed.""" + """Floating-point operation failed.""" ... class OverflowError(ArithmeticError): """Result too large to be represented.""" @@ -6425,9 +6361,9 @@ if sys.version_info >= (3, 10): ... if sys.version_info >= (3, 11): - _BaseExceptionT_co = TypeVar("_BaseExceptionT_co", bound=BaseException, covariant=True) + _BaseExceptionT_co = TypeVar("_BaseExceptionT_co", bound=BaseException, covariant=True, default=BaseException) _BaseExceptionT = TypeVar("_BaseExceptionT", bound=BaseException) - _ExceptionT_co = TypeVar("_ExceptionT_co", bound=Exception, covariant=True) + _ExceptionT_co = TypeVar("_ExceptionT_co", bound=Exception, covariant=True, default=Exception) _ExceptionT = TypeVar("_ExceptionT", bound=Exception) # See `check_exception_group.py` for use-cases and comments. @@ -6500,5 +6436,6 @@ if sys.version_info >= (3, 11): ) -> tuple[ExceptionGroup[_ExceptionT_co] | None, ExceptionGroup[_ExceptionT_co] | None]: ... if sys.version_info >= (3, 13): - class IncompleteInputError(SyntaxError): ... - class PythonFinalizationError(RuntimeError): ... + class PythonFinalizationError(RuntimeError): + """Operation blocked during Python finalization.""" + ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/cProfile.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/cProfile.pyi index d4056f7..5d02451 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/cProfile.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/cProfile.pyi @@ -5,7 +5,7 @@ Compatible with the 'profile' module. import _lsprof from _typeshed import StrOrBytesPath, Unused -from collections.abc import Callable +from collections.abc import Callable, Mapping from types import CodeType from typing import Any, TypeVar from typing_extensions import ParamSpec, Self, TypeAlias @@ -26,7 +26,7 @@ def run(statement: str, filename: str | None = None, sort: str | int = -1) -> No """ ... def runctx( - statement: str, globals: dict[str, Any], locals: dict[str, Any], filename: str | None = None, sort: str | int = -1 + statement: str, globals: dict[str, Any], locals: Mapping[str, Any], filename: str | None = None, sort: str | int = -1 ) -> None: """ Run statement under profiler, supplying your own globals and locals, @@ -56,7 +56,7 @@ class Profile(_lsprof.Profiler): def create_stats(self) -> None: ... def snapshot_stats(self) -> None: ... def run(self, cmd: str) -> Self: ... - def runctx(self, cmd: str, globals: dict[str, Any], locals: dict[str, Any]) -> Self: ... + def runctx(self, cmd: str, globals: dict[str, Any], locals: Mapping[str, Any]) -> Self: ... def runcall(self, func: Callable[_P, _T], /, *args: _P.args, **kw: _P.kwargs) -> _T: ... def __enter__(self) -> Self: ... def __exit__(self, *exc_info: Unused) -> None: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/calendar.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/calendar.pyi index 426df37..c1eac5d 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/calendar.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/calendar.pyi @@ -85,8 +85,8 @@ def weekday(year: int, month: int, day: int) -> int: ... def monthrange(year: int, month: int) -> tuple[int, int]: """ - Return weekday (0-6 ~ Mon-Sun) and number of days (28-31) for - year, month. + Return weekday of first day of month (0-6 ~ Mon-Sun) + and number of days (28-31) for year, month. """ ... @@ -144,7 +144,7 @@ class Calendar: Each row represents a week; days outside this month are zero. """ ... - def yeardatescalendar(self, year: int, width: int = 3) -> list[list[int]]: + def yeardatescalendar(self, year: int, width: int = 3) -> list[list[list[list[datetime.date]]]]: """ Return the data for the specified year ready for formatting. The return value is a list of month rows. Each month row contains up to width months. @@ -152,7 +152,7 @@ class Calendar: days. Days are datetime.date objects. """ ... - def yeardays2calendar(self, year: int, width: int = 3) -> list[list[tuple[int, int]]]: + def yeardays2calendar(self, year: int, width: int = 3) -> list[list[list[list[tuple[int, int]]]]]: """ Return the data for the specified year ready for formatting (similar to yeardatescalendar()). Entries in the week lists are @@ -160,7 +160,7 @@ class Calendar: zero. """ ... - def yeardayscalendar(self, year: int, width: int = 3) -> list[list[int]]: + def yeardayscalendar(self, year: int, width: int = 3) -> list[list[list[list[int]]]]: """ Return the data for the specified year ready for formatting (similar to yeardatescalendar()). Entries in the week lists are day numbers. diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/cgi.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/cgi.pyi index 2f00128..3a2e2a9 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/cgi.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/cgi.pyi @@ -1,15 +1,3 @@ -""" -Support module for CGI (Common Gateway Interface) scripts. - -This module defines a number of utilities for use by CGI scripts -written in Python. - -The global variable maxlen can be set to an integer indicating the maximum size -of a POST request. POST requests larger than this size will result in a -ValueError being raised during parsing. The default value of this variable is 0, -meaning the request size is unlimited. -""" - from _typeshed import SupportsContainsAndGetItem, SupportsGetItem, SupportsItemAccess, Unused from builtins import list as _list, type as _type from collections.abc import Iterable, Iterator, Mapping @@ -39,83 +27,23 @@ def parse( keep_blank_values: bool = ..., strict_parsing: bool = ..., separator: str = "&", -) -> dict[str, list[str]]: - """ - Parse a query in the environment or from a file (default stdin) - - Arguments, all optional: - - fp : file pointer; default: sys.stdin.buffer - - environ : environment dictionary; default: os.environ - - keep_blank_values: flag indicating whether blank values in - percent-encoded forms should be treated as blank strings. - A true value indicates that blanks should be retained as - blank strings. The default false value indicates that - blank values are to be ignored and treated as if they were - not included. - - strict_parsing: flag indicating what to do with parsing errors. - If false (the default), errors are silently ignored. - If true, errors raise a ValueError exception. - - separator: str. The symbol to use for separating the query arguments. - Defaults to &. - """ - ... +) -> dict[str, list[str]]: ... def parse_multipart( fp: IO[Any], pdict: SupportsGetItem[str, bytes], encoding: str = "utf-8", errors: str = "replace", separator: str = "&" -) -> dict[str, list[Any]]: - """ - Parse multipart input. - - Arguments: - fp : input file - pdict: dictionary containing other parameters of content-type header - encoding, errors: request encoding and error handler, passed to - FieldStorage - - Returns a dictionary just like parse_qs(): keys are the field names, each - value is a list of values for that field. For non-file fields, the value - is a list of strings. - """ - ... +) -> dict[str, list[Any]]: ... class _Environ(Protocol): def __getitem__(self, k: str, /) -> str: ... def keys(self) -> Iterable[str]: ... -def parse_header(line: str) -> tuple[str, dict[str, str]]: - """ - Parse a Content-type like header. - - Return the main content-type and a dictionary of options. - """ - ... -def test(environ: _Environ = ...) -> None: - """ - Robust test CGI script, usable as main program. - - Write minimal HTTP headers and dump all information provided to - the script in HTML form. - """ - ... -def print_environ(environ: _Environ = ...) -> None: - """Dump the shell environment as HTML.""" - ... -def print_form(form: dict[str, Any]) -> None: - """Dump the contents of a form as HTML.""" - ... -def print_directory() -> None: - """Dump the current directory as HTML.""" - ... -def print_environ_usage() -> None: - """Dump a list of environment variables used by CGI as HTML.""" - ... +def parse_header(line: str) -> tuple[str, dict[str, str]]: ... +def test(environ: _Environ = ...) -> None: ... +def print_environ(environ: _Environ = ...) -> None: ... +def print_form(form: dict[str, Any]) -> None: ... +def print_directory() -> None: ... +def print_environ_usage() -> None: ... class MiniFieldStorage: - """Like FieldStorage, for use when no file uploads are possible.""" # The first five "Any" attributes here are always None, but mypy doesn't support that filename: Any list: Any @@ -127,52 +55,9 @@ class MiniFieldStorage: headers: dict[Any, Any] name: Any value: Any - def __init__(self, name: Any, value: Any) -> None: - """Constructor from field name and value.""" - ... + def __init__(self, name: Any, value: Any) -> None: ... class FieldStorage: - """ - Store a sequence of fields, reading multipart/form-data. - - This class provides naming, typing, files stored on disk, and - more. At the top level, it is accessible like a dictionary, whose - keys are the field names. (Note: None can occur as a field name.) - The items are either a Python list (if there's multiple values) or - another FieldStorage or MiniFieldStorage object. If it's a single - object, it has the following attributes: - - name: the field name, if specified; otherwise None - - filename: the filename, if specified; otherwise None; this is the - client side filename, *not* the file name on which it is - stored (that's a temporary file you don't deal with) - - value: the value as a *string*; for file uploads, this - transparently reads the file every time you request the value - and returns *bytes* - - file: the file(-like) object from which you can read the data *as - bytes* ; None if the data is stored a simple string - - type: the content-type, or None if not specified - - type_options: dictionary of options specified on the content-type - line - - disposition: content-disposition, or None if not specified - - disposition_options: dictionary of corresponding options - - headers: a dictionary(-like) object (sometimes email.message.Message or a - subclass thereof) containing *all* headers - - The class is subclassable, mostly for the purpose of overriding - the make_file() method, which is called internally to come up with - a file open for reading and writing. This makes it possible to - override the default choice of storing all files in a temporary - directory and unlinking them as soon as they have been opened. - """ FieldStorageClass: _type | None keep_blank_values: int strict_parsing: int @@ -208,103 +93,21 @@ class FieldStorage: errors: str = "replace", max_num_fields: int | None = None, separator: str = "&", - ) -> None: - """ - Constructor. Read multipart/* until last part. - - Arguments, all optional: - - fp : file pointer; default: sys.stdin.buffer - (not used when the request method is GET) - Can be : - 1. a TextIOWrapper object - 2. an object whose read() and readline() methods return bytes - - headers : header dictionary-like object; default: - taken from environ as per CGI spec - - outerboundary : terminating multipart boundary - (for internal use only) - - environ : environment dictionary; default: os.environ - - keep_blank_values: flag indicating whether blank values in - percent-encoded forms should be treated as blank strings. - A true value indicates that blanks should be retained as - blank strings. The default false value indicates that - blank values are to be ignored and treated as if they were - not included. - - strict_parsing: flag indicating what to do with parsing errors. - If false (the default), errors are silently ignored. - If true, errors raise a ValueError exception. - - limit : used internally to read parts of multipart/form-data forms, - to exit from the reading loop when reached. It is the difference - between the form content-length and the number of bytes already - read - - encoding, errors : the encoding and error handler used to decode the - binary stream to strings. Must be the same as the charset defined - for the page sending the form (content-type : meta http-equiv or - header) - - max_num_fields: int. If set, then __init__ throws a ValueError - if there are more than n fields read by parse_qsl(). - """ - ... + ) -> None: ... def __enter__(self) -> Self: ... def __exit__(self, *args: Unused) -> None: ... def __iter__(self) -> Iterator[str]: ... - def __getitem__(self, key: str) -> Any: - """Dictionary style indexing.""" - ... - def getvalue(self, key: str, default: Any = None) -> Any: - """Dictionary style get() method, including 'value' lookup.""" - ... - def getfirst(self, key: str, default: Any = None) -> Any: - """Return the first value received.""" - ... - def getlist(self, key: str) -> _list[Any]: - """Return list of received values.""" - ... - def keys(self) -> _list[str]: - """Dictionary style keys() method.""" - ... - def __contains__(self, key: str) -> bool: - """Dictionary style __contains__ method.""" - ... - def __len__(self) -> int: - """Dictionary style len(x) support.""" - ... + def __getitem__(self, key: str) -> Any: ... + def getvalue(self, key: str, default: Any = None) -> Any: ... + def getfirst(self, key: str, default: Any = None) -> Any: ... + def getlist(self, key: str) -> _list[Any]: ... + def keys(self) -> _list[str]: ... + def __contains__(self, key: str) -> bool: ... + def __len__(self) -> int: ... def __bool__(self) -> bool: ... def __del__(self) -> None: ... # Returns bytes or str IO depending on an internal flag - def make_file(self) -> IO[Any]: - """ - Overridable: return a readable & writable file. - - The file will be used as follows: - - data is written to it - - seek(0) - - data is read from it - - The file is opened in binary mode for files, in text mode - for other fields - - This version opens a temporary file for reading and writing, - and immediately deletes (unlinks) it. The trick (on Unix!) is - that the file can still be used, but it can't be opened by - another process, and it will automatically be deleted when it - is closed or when the current process terminates. - - If you want a more permanent file, you derive a class which - overrides this method. If you want a visible temporary file - that is nevertheless automatically deleted when the script - terminates, try defining a __del__ method in a derived class - which unlinks the temporary files you have created. - """ - ... + def make_file(self) -> IO[Any]: ... def print_exception( type: type[BaseException] | None = None, diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/cgitb.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/cgitb.pyi index e728e0d..5657258 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/cgitb.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/cgitb.pyi @@ -1,27 +1,3 @@ -""" -More comprehensive traceback formatting for Python scripts. - -To enable this module, do: - - import cgitb; cgitb.enable() - -at the top of your script. The optional arguments to enable() are: - - display - if true, tracebacks are displayed in the web browser - logdir - if set, tracebacks are written to files in this directory - context - number of lines of source code to show for each stack frame - format - 'text' or 'html' controls the output format - -By default, tracebacks are displayed but not saved, the context is 5 lines -and the output format is 'html' (for backwards compatibility with the -original use of this module) - -Alternatively, if you have caught an exception and want cgitb to display it -for you, call cgitb.handler(). The optional argument to handler() is a -3-item tuple (etype, evalue, etb) just like the value of sys.exc_info(). -The default handler displays output as HTML. -""" - from _typeshed import OptExcInfo, StrOrBytesPath from collections.abc import Callable from types import FrameType, TracebackType @@ -29,29 +5,18 @@ from typing import IO, Any, Final __UNDEF__: Final[object] # undocumented sentinel -def reset() -> str: - """Return a string that resets the CGI and browser to a known state.""" - ... +def reset() -> str: ... # undocumented def small(text: str) -> str: ... # undocumented def strong(text: str) -> str: ... # undocumented def grey(text: str) -> str: ... # undocumented -def lookup(name: str, frame: FrameType, locals: dict[str, Any]) -> tuple[str | None, Any]: - """Find the value for a given name in the given environment.""" - ... +def lookup(name: str, frame: FrameType, locals: dict[str, Any]) -> tuple[str | None, Any]: ... # undocumented def scanvars( reader: Callable[[], bytes], frame: FrameType, locals: dict[str, Any] -) -> list[tuple[str, str | None, Any]]: - """Scan one logical line of Python and look up values of variables used.""" - ... -def html(einfo: OptExcInfo, context: int = 5) -> str: - """Return a nice HTML document describing a given traceback.""" - ... -def text(einfo: OptExcInfo, context: int = 5) -> str: - """Return a plain text document describing a given traceback.""" - ... +) -> list[tuple[str, str | None, Any]]: ... # undocumented +def html(einfo: OptExcInfo, context: int = 5) -> str: ... +def text(einfo: OptExcInfo, context: int = 5) -> str: ... class Hook: # undocumented - """A hook to replace sys.excepthook that shows tracebacks in HTML.""" def __init__( self, display: int = 1, @@ -64,12 +29,4 @@ class Hook: # undocumented def handle(self, info: OptExcInfo | None = None) -> None: ... def handler(info: OptExcInfo | None = None) -> None: ... -def enable(display: int = 1, logdir: StrOrBytesPath | None = None, context: int = 5, format: str = "html") -> None: - """ - Install an exception handler that formats tracebacks as HTML. - - The optional argument 'display' can be set to 0 to suppress sending the - traceback to the browser, and 'logdir' can be set to a directory to cause - tracebacks to be written to files there. - """ - ... +def enable(display: int = 1, logdir: StrOrBytesPath | None = None, context: int = 5, format: str = "html") -> None: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/chunk.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/chunk.pyi index fd0e706..9788d35 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/chunk.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/chunk.pyi @@ -1,54 +1,3 @@ -""" -Simple class to read IFF chunks. - -An IFF chunk (used in formats such as AIFF, TIFF, RMFF (RealMedia File -Format)) has the following structure: - -+----------------+ -| ID (4 bytes) | -+----------------+ -| size (4 bytes) | -+----------------+ -| data | -| ... | -+----------------+ - -The ID is a 4-byte string which identifies the type of chunk. - -The size field (a 32-bit value, encoded using big-endian byte order) -gives the size of the whole chunk, including the 8-byte header. - -Usually an IFF-type file consists of one or more chunks. The proposed -usage of the Chunk class defined here is to instantiate an instance at -the start of each chunk and read from the instance until it reaches -the end, after which a new instance can be instantiated. At the end -of the file, creating a new instance will fail with an EOFError -exception. - -Usage: -while True: - try: - chunk = Chunk(file) - except EOFError: - break - chunktype = chunk.getname() - while True: - data = chunk.read(nbytes) - if not data: - pass - # do something with data - -The interface is file-like. The implemented methods are: -read, close, seek, tell, isatty. -Extra methods are: skip() (called by close, skips to the end of the chunk), -getname() (returns the name (ID) of the chunk) - -The __init__ method has one required argument, a file-like object -(including a chunk instance), and one optional argument, a flag which -specifies whether or not chunks are aligned on 2-byte boundaries. The -default is 1, i.e. aligned. -""" - from typing import IO class Chunk: @@ -61,34 +10,11 @@ class Chunk: offset: int seekable: bool def __init__(self, file: IO[bytes], align: bool = True, bigendian: bool = True, inclheader: bool = False) -> None: ... - def getname(self) -> bytes: - """Return the name (ID) of the current chunk.""" - ... - def getsize(self) -> int: - """Return the size of the current chunk.""" - ... + def getname(self) -> bytes: ... + def getsize(self) -> int: ... def close(self) -> None: ... def isatty(self) -> bool: ... - def seek(self, pos: int, whence: int = 0) -> None: - """ - Seek to specified position into the chunk. - Default position is 0 (start of chunk). - If the file is not seekable, this will result in an error. - """ - ... + def seek(self, pos: int, whence: int = 0) -> None: ... def tell(self) -> int: ... - def read(self, size: int = -1) -> bytes: - """ - Read at most size bytes from the chunk. - If size is omitted or negative, read until the end - of the chunk. - """ - ... - def skip(self) -> None: - """ - Skip the rest of the chunk. - If you are not interested in the contents of the chunk, - this method should be called so that the file points to - the start of the next chunk. - """ - ... + def read(self, size: int = -1) -> bytes: ... + def skip(self) -> None: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/cmd.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/cmd.pyi index 71243a5..e9cb642 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/cmd.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/cmd.pyi @@ -44,12 +44,13 @@ functions respectively. """ from collections.abc import Callable -from typing import IO, Any, Literal +from typing import IO, Any, Final +from typing_extensions import LiteralString __all__ = ["Cmd"] -PROMPT: Literal["(Cmd) "] -IDENTCHARS: str # Too big to be `Literal` +PROMPT: Final = "(Cmd) " +IDENTCHARS: Final[LiteralString] # Too big to be `Literal` class Cmd: """ diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/code.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/code.pyi index 12e5ab1..b21fe30 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/code.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/code.pyi @@ -22,10 +22,10 @@ class InteractiveInterpreter: """ Constructor. - The optional 'locals' argument specifies the dictionary in - which code will be executed; it defaults to a newly created - dictionary with key "__name__" set to "__console__" and key - "__doc__" set to None. + The optional 'locals' argument specifies a mapping to use as the + namespace in which code will be executed; it defaults to a newly + created dictionary with key "__name__" set to "__console__" and + key "__doc__" set to None. """ ... def runsource(self, source: str, filename: str = "", symbol: str = "single") -> bool: @@ -109,10 +109,7 @@ class InteractiveConsole(InteractiveInterpreter): if sys.version_info >= (3, 13): def __init__( self, locals: Mapping[str, Any] | None = None, filename: str = "", *, local_exit: bool = False - ) -> None: ... - def push(self, line: str, filename: str | None = None) -> bool: ... - else: - def __init__(self, locals: Mapping[str, Any] | None = None, filename: str = "") -> None: + ) -> None: """ Constructor. @@ -123,7 +120,7 @@ class InteractiveConsole(InteractiveInterpreter): of the input stream; it will show up in tracebacks. """ ... - def push(self, line: str) -> bool: + def push(self, line: str, filename: str | None = None) -> bool: """ Push a line to the interpreter. @@ -138,6 +135,9 @@ class InteractiveConsole(InteractiveInterpreter): with in some way (this is the same as runsource()). """ ... + else: + def __init__(self, locals: Mapping[str, Any] | None = None, filename: str = "") -> None: ... + def push(self, line: str) -> bool: ... def interact(self, banner: str | None = None, exitmsg: str | None = None) -> None: """ @@ -179,14 +179,6 @@ if sys.version_info >= (3, 13): local: Mapping[str, Any] | None = None, exitmsg: str | None = None, local_exit: bool = False, - ) -> None: ... - -else: - def interact( - banner: str | None = None, - readfunc: Callable[[str], str] | None = None, - local: Mapping[str, Any] | None = None, - exitmsg: str | None = None, ) -> None: """ Closely emulate the interactive Python interpreter. @@ -201,9 +193,18 @@ else: readfunc -- if not None, replaces InteractiveConsole.raw_input() local -- passed to InteractiveInterpreter.__init__() exitmsg -- passed to InteractiveConsole.interact() + local_exit -- passed to InteractiveConsole.__init__() """ ... +else: + def interact( + banner: str | None = None, + readfunc: Callable[[str], str] | None = None, + local: Mapping[str, Any] | None = None, + exitmsg: str | None = None, + ) -> None: ... + def compile_command(source: str, filename: str = "", symbol: str = "single") -> CodeType | None: r""" Compile a command and determine whether it is incomplete. diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/codecs.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/codecs.pyi index a3387b5..d8913f0 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/codecs.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/codecs.pyi @@ -12,7 +12,7 @@ from _codecs import * from _typeshed import ReadableBuffer from abc import abstractmethod from collections.abc import Callable, Generator, Iterable -from typing import Any, BinaryIO, Literal, Protocol, TextIO +from typing import Any, BinaryIO, Final, Literal, Protocol, TextIO from typing_extensions import Self __all__ = [ @@ -62,10 +62,10 @@ __all__ = [ "lookup_error", ] -BOM32_BE: Literal[b"\xfe\xff"] -BOM32_LE: Literal[b"\xff\xfe"] -BOM64_BE: Literal[b"\x00\x00\xfe\xff"] -BOM64_LE: Literal[b"\xff\xfe\x00\x00"] +BOM32_BE: Final = b"\xfe\xff" +BOM32_LE: Final = b"\xff\xfe" +BOM64_BE: Final = b"\x00\x00\xfe\xff" +BOM64_LE: Final = b"\xff\xfe\x00\x00" class _WritableStream(Protocol): def write(self, data: bytes, /) -> object: ... @@ -89,7 +89,7 @@ class _Encoder(Protocol): def __call__(self, input: str, errors: str = ..., /) -> tuple[bytes, int]: ... # signature of Codec().encode class _Decoder(Protocol): - def __call__(self, input: bytes, errors: str = ..., /) -> tuple[str, int]: ... # signature of Codec().decode + def __call__(self, input: ReadableBuffer, errors: str = ..., /) -> tuple[str, int]: ... # signature of Codec().decode class _StreamReader(Protocol): def __call__(self, stream: _ReadableStream, errors: str = ..., /) -> StreamReader: ... @@ -261,33 +261,33 @@ def iterdecode(iterator: Iterable[bytes], encoding: str, errors: str = "strict") """ ... -BOM: Literal[b"\xff\xfe", b"\xfe\xff"] # depends on `sys.byteorder` -BOM_BE: Literal[b"\xfe\xff"] -BOM_LE: Literal[b"\xff\xfe"] -BOM_UTF8: Literal[b"\xef\xbb\xbf"] -BOM_UTF16: Literal[b"\xff\xfe", b"\xfe\xff"] # depends on `sys.byteorder` -BOM_UTF16_BE: Literal[b"\xfe\xff"] -BOM_UTF16_LE: Literal[b"\xff\xfe"] -BOM_UTF32: Literal[b"\xff\xfe\x00\x00", b"\x00\x00\xfe\xff"] # depends on `sys.byteorder` -BOM_UTF32_BE: Literal[b"\x00\x00\xfe\xff"] -BOM_UTF32_LE: Literal[b"\xff\xfe\x00\x00"] - -def strict_errors(exception: UnicodeError) -> tuple[str | bytes, int]: +BOM: Final[Literal[b"\xff\xfe", b"\xfe\xff"]] # depends on `sys.byteorder` +BOM_BE: Final = b"\xfe\xff" +BOM_LE: Final = b"\xff\xfe" +BOM_UTF8: Final = b"\xef\xbb\xbf" +BOM_UTF16: Final[Literal[b"\xff\xfe", b"\xfe\xff"]] # depends on `sys.byteorder` +BOM_UTF16_BE: Final = b"\xfe\xff" +BOM_UTF16_LE: Final = b"\xff\xfe" +BOM_UTF32: Final[Literal[b"\xff\xfe\x00\x00", b"\x00\x00\xfe\xff"]] # depends on `sys.byteorder` +BOM_UTF32_BE: Final = b"\x00\x00\xfe\xff" +BOM_UTF32_LE: Final = b"\xff\xfe\x00\x00" + +def strict_errors(exception: UnicodeError, /) -> tuple[str | bytes, int]: """Implements the 'strict' error handling, which raises a UnicodeError on coding errors.""" ... -def replace_errors(exception: UnicodeError) -> tuple[str | bytes, int]: +def replace_errors(exception: UnicodeError, /) -> tuple[str | bytes, int]: """Implements the 'replace' error handling, which replaces malformed data with a replacement marker.""" ... -def ignore_errors(exception: UnicodeError) -> tuple[str | bytes, int]: +def ignore_errors(exception: UnicodeError, /) -> tuple[str | bytes, int]: """Implements the 'ignore' error handling, which ignores malformed data and continues.""" ... -def xmlcharrefreplace_errors(exception: UnicodeError) -> tuple[str | bytes, int]: +def xmlcharrefreplace_errors(exception: UnicodeError, /) -> tuple[str | bytes, int]: """Implements the 'xmlcharrefreplace' error handling, which replaces an unencodable character with the appropriate XML character reference.""" ... -def backslashreplace_errors(exception: UnicodeError) -> tuple[str | bytes, int]: +def backslashreplace_errors(exception: UnicodeError, /) -> tuple[str | bytes, int]: """Implements the 'backslashreplace' error handling, which replaces malformed data with a backslashed escape sequence.""" ... -def namereplace_errors(exception: UnicodeError) -> tuple[str | bytes, int]: +def namereplace_errors(exception: UnicodeError, /) -> tuple[str | bytes, int]: r"""Implements the 'namereplace' error handling, which replaces an unencodable character with a \N{...} escape sequence.""" ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/collections/__init__.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/collections/__init__.pyi index f722ba7..04e23a8 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/collections/__init__.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/collections/__init__.pyi @@ -275,11 +275,7 @@ class UserString(Sequence[UserString]): def zfill(self, width: int) -> Self: ... class deque(MutableSequence[_T]): - """ - deque([iterable[, maxlen]]) --> deque object - - A list-like sequence optimized for data accesses near its endpoints. - """ + """A list-like sequence optimized for data accesses near its endpoints.""" @property def maxlen(self) -> int | None: """maximum size of a deque or None if unbounded""" @@ -298,20 +294,21 @@ class deque(MutableSequence[_T]): """Return a shallow copy of a deque.""" ... def count(self, x: _T, /) -> int: - """D.count(value) -- return number of occurrences of value""" + """Return number of occurrences of value.""" ... def extend(self, iterable: Iterable[_T], /) -> None: - """Extend the right side of the deque with elements from the iterable""" + """Extend the right side of the deque with elements from the iterable.""" ... def extendleft(self, iterable: Iterable[_T], /) -> None: - """Extend the left side of the deque with elements from the iterable""" + """Extend the left side of the deque with elements from the iterable.""" ... def insert(self, i: int, x: _T, /) -> None: - """D.insert(index, object) -- insert object before index""" + """Insert value before index.""" ... def index(self, x: _T, start: int = 0, stop: int = ..., /) -> int: """ - D.index(value, [start, [stop]]) -- return first index of value. + Return first index of value. + Raises ValueError if the value is not present. """ ... @@ -322,10 +319,10 @@ class deque(MutableSequence[_T]): """Remove and return the leftmost element.""" ... def remove(self, value: _T, /) -> None: - """D.remove(value) -- remove first occurrence of value.""" + """Remove first occurrence of value.""" ... def rotate(self, n: int = 1, /) -> None: - """Rotate the deque n steps to the right (default n=1). If n is negative, rotates left.""" + """Rotate the deque n steps to the right. If n is negative, rotates left.""" ... def __copy__(self) -> Self: """Return a shallow copy of a deque.""" @@ -744,15 +741,15 @@ class _OrderedDictValuesView(ValuesView[_VT_co], Reversible[_VT_co]): # but they are not exposed anywhere) # pyright doesn't have a specific error code for subclassing error! @final -class _odict_keys(dict_keys[_KT_co, _VT_co], Reversible[_KT_co]): # type: ignore[misc] # pyright: ignore +class _odict_keys(dict_keys[_KT_co, _VT_co], Reversible[_KT_co]): # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] def __reversed__(self) -> Iterator[_KT_co]: ... @final -class _odict_items(dict_items[_KT_co, _VT_co], Reversible[tuple[_KT_co, _VT_co]]): # type: ignore[misc] # pyright: ignore +class _odict_items(dict_items[_KT_co, _VT_co], Reversible[tuple[_KT_co, _VT_co]]): # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] def __reversed__(self) -> Iterator[tuple[_KT_co, _VT_co]]: ... @final -class _odict_values(dict_values[_KT_co, _VT_co], Reversible[_VT_co], Generic[_KT_co, _VT_co]): # type: ignore[misc] # pyright: ignore +class _odict_values(dict_values[_KT_co, _VT_co], Reversible[_VT_co], Generic[_KT_co, _VT_co]): # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] def __reversed__(self) -> Iterator[_VT_co]: ... class OrderedDict(dict[_KT, _VT], Reversible[_KT], Generic[_KT, _VT]): @@ -1003,22 +1000,23 @@ class ChainMap(MutableMapping[_KT, _VT]): """New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]""" ... __copy__ = copy - # All arguments to `fromkeys` are passed to `dict.fromkeys` at runtime, so the signature should be kept in line with `dict.fromkeys`. + # All arguments to `fromkeys` are passed to `dict.fromkeys` at runtime, + # so the signature should be kept in line with `dict.fromkeys`. @classmethod @overload def fromkeys(cls, iterable: Iterable[_T]) -> ChainMap[_T, Any | None]: - """Create a ChainMap with a single dict created from the iterable.""" + """Create a new ChainMap with keys from iterable and values set to value.""" ... @classmethod @overload # Special-case None: the user probably wants to add non-None values later. def fromkeys(cls, iterable: Iterable[_T], value: None, /) -> ChainMap[_T, Any | None]: - """Create a ChainMap with a single dict created from the iterable.""" + """Create a new ChainMap with keys from iterable and values set to value.""" ... @classmethod @overload def fromkeys(cls, iterable: Iterable[_T], value: _S, /) -> ChainMap[_T, _S]: - """Create a ChainMap with a single dict created from the iterable.""" + """Create a new ChainMap with keys from iterable and values set to value.""" ... if sys.version_info >= (3, 9): @overload diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/collections/abc.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/collections/abc.pyi index 3df2a1d..85d3648 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/collections/abc.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/collections/abc.pyi @@ -1,2 +1,8 @@ +""" +Abstract Base Classes (ABCs) for collections, according to PEP 3119. + +Unit tests are in test_collections. +""" + from _collections_abc import * from _collections_abc import __all__ as __all__ diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/concurrent/futures/__init__.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/concurrent/futures/__init__.pyi index 41eb828..8a9cdb3 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/concurrent/futures/__init__.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/concurrent/futures/__init__.pyi @@ -1,5 +1,7 @@ """Execute computations asynchronously using threads or processes.""" +import sys + from ._base import ( ALL_COMPLETED as ALL_COMPLETED, FIRST_COMPLETED as FIRST_COMPLETED, @@ -16,19 +18,36 @@ from ._base import ( from .process import ProcessPoolExecutor as ProcessPoolExecutor from .thread import ThreadPoolExecutor as ThreadPoolExecutor -__all__ = ( - "FIRST_COMPLETED", - "FIRST_EXCEPTION", - "ALL_COMPLETED", - "CancelledError", - "TimeoutError", - "BrokenExecutor", - "Future", - "Executor", - "wait", - "as_completed", - "ProcessPoolExecutor", - "ThreadPoolExecutor", -) +if sys.version_info >= (3, 13): + __all__ = ( + "FIRST_COMPLETED", + "FIRST_EXCEPTION", + "ALL_COMPLETED", + "CancelledError", + "TimeoutError", + "InvalidStateError", + "BrokenExecutor", + "Future", + "Executor", + "wait", + "as_completed", + "ProcessPoolExecutor", + "ThreadPoolExecutor", + ) +else: + __all__ = ( + "FIRST_COMPLETED", + "FIRST_EXCEPTION", + "ALL_COMPLETED", + "CancelledError", + "TimeoutError", + "BrokenExecutor", + "Future", + "Executor", + "wait", + "as_completed", + "ProcessPoolExecutor", + "ThreadPoolExecutor", + ) def __dir__() -> tuple[str, ...]: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/concurrent/futures/_base.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/concurrent/futures/_base.pyi index 5be7b68..b480745 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/concurrent/futures/_base.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/concurrent/futures/_base.pyi @@ -4,20 +4,20 @@ from _typeshed import Unused from collections.abc import Callable, Collection, Iterable, Iterator from logging import Logger from types import TracebackType -from typing import Any, Generic, Literal, NamedTuple, Protocol, TypeVar +from typing import Any, Final, Generic, NamedTuple, Protocol, TypeVar from typing_extensions import ParamSpec, Self if sys.version_info >= (3, 9): from types import GenericAlias -FIRST_COMPLETED: Literal["FIRST_COMPLETED"] -FIRST_EXCEPTION: Literal["FIRST_EXCEPTION"] -ALL_COMPLETED: Literal["ALL_COMPLETED"] -PENDING: Literal["PENDING"] -RUNNING: Literal["RUNNING"] -CANCELLED: Literal["CANCELLED"] -CANCELLED_AND_NOTIFIED: Literal["CANCELLED_AND_NOTIFIED"] -FINISHED: Literal["FINISHED"] +FIRST_COMPLETED: Final = "FIRST_COMPLETED" +FIRST_EXCEPTION: Final = "FIRST_EXCEPTION" +ALL_COMPLETED: Final = "ALL_COMPLETED" +PENDING: Final = "PENDING" +RUNNING: Final = "RUNNING" +CANCELLED: Final = "CANCELLED" +CANCELLED_AND_NOTIFIED: Final = "CANCELLED_AND_NOTIFIED" +FINISHED: Final = "FINISHED" _FUTURE_STATES: list[str] _STATE_TO_DESCRIPTION_MAP: dict[str, str] LOGGER: Logger @@ -32,9 +32,7 @@ class CancelledError(Error): if sys.version_info >= (3, 11): from builtins import TimeoutError as TimeoutError else: - class TimeoutError(Error): - """The operation exceeded the given deadline.""" - ... + class TimeoutError(Error): ... class InvalidStateError(Error): """The operation is not allowed in this state.""" diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/concurrent/futures/process.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/concurrent/futures/process.pyi index 8ad2d88..64e23dc 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/concurrent/futures/process.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/concurrent/futures/process.pyi @@ -62,8 +62,9 @@ _global_shutdown: bool class _ThreadWakeup: _closed: bool - _reader: Connection - _writer: Connection + # Any: Unused send and recv methods + _reader: Connection[Any, Any] + _writer: Connection[Any, Any] def close(self) -> None: ... def wakeup(self) -> None: ... def clear(self) -> None: ... @@ -134,9 +135,7 @@ class _SafeQueue(Queue[Future[Any]]): def _on_queue_feeder_error(self, e: Exception, obj: _CallItem) -> None: ... -def _get_chunks(*iterables: Any, chunksize: int) -> Generator[tuple[Any, ...], None, None]: - """Iterates over zip()ed iterables in chunks. """ - ... +def _get_chunks(*iterables: Any, chunksize: int) -> Generator[tuple[Any, ...], None, None]: ... def _process_chunk(fn: Callable[..., _T], chunk: Iterable[tuple[Any, ...]]) -> list[_T]: """ Processes a chunk of an iterable passed to map. @@ -162,9 +161,7 @@ if sys.version_info >= (3, 11): else: def _sendback_result( result_queue: SimpleQueue[_WorkItem[Any]], work_id: int, result: Any | None = None, exception: Exception | None = None - ) -> None: - """Safely send back the given result or exception""" - ... + ) -> None: ... if sys.version_info >= (3, 11): def _process_worker( @@ -195,21 +192,7 @@ else: result_queue: SimpleQueue[_ResultItem], initializer: Callable[[Unpack[_Ts]], object] | None, initargs: tuple[Unpack[_Ts]], - ) -> None: - """ - Evaluates calls from call_queue and places the results in result_queue. - - This worker is run in a separate process. - - Args: - call_queue: A ctx.Queue of _CallItems that will be read and - evaluated by the worker. - result_queue: A ctx.Queue of _ResultItems that will written - to by the worker. - initializer: A callable initializer, or None - initargs: A tuple of args for the initializer - """ - ... + ) -> None: ... if sys.version_info >= (3, 9): class _ExecutorManagerThread(Thread): @@ -378,20 +361,7 @@ class ProcessPoolExecutor(Executor): mp_context: BaseContext | None = None, initializer: Callable[[], object] | None = None, initargs: tuple[()] = (), - ) -> None: - """ - Initializes a new ProcessPoolExecutor instance. - - Args: - max_workers: The maximum number of processes that can be used to - execute the given calls. If None or not given then as many - worker processes will be created as the machine has processors. - mp_context: A multiprocessing context to launch the workers. This - object should provide SimpleQueue, Queue and Process. - initializer: A callable used to initialize worker processes. - initargs: A tuple of arguments to pass to the initializer. - """ - ... + ) -> None: ... @overload def __init__( self, @@ -400,20 +370,7 @@ class ProcessPoolExecutor(Executor): *, initializer: Callable[[Unpack[_Ts]], object], initargs: tuple[Unpack[_Ts]], - ) -> None: - """ - Initializes a new ProcessPoolExecutor instance. - - Args: - max_workers: The maximum number of processes that can be used to - execute the given calls. If None or not given then as many - worker processes will be created as the machine has processors. - mp_context: A multiprocessing context to launch the workers. This - object should provide SimpleQueue, Queue and Process. - initializer: A callable used to initialize worker processes. - initargs: A tuple of arguments to pass to the initializer. - """ - ... + ) -> None: ... @overload def __init__( self, @@ -421,20 +378,7 @@ class ProcessPoolExecutor(Executor): mp_context: BaseContext | None, initializer: Callable[[Unpack[_Ts]], object], initargs: tuple[Unpack[_Ts]], - ) -> None: - """ - Initializes a new ProcessPoolExecutor instance. - - Args: - max_workers: The maximum number of processes that can be used to - execute the given calls. If None or not given then as many - worker processes will be created as the machine has processors. - mp_context: A multiprocessing context to launch the workers. This - object should provide SimpleQueue, Queue and Process. - initializer: A callable used to initialize worker processes. - initargs: A tuple of arguments to pass to the initializer. - """ - ... + ) -> None: ... if sys.version_info >= (3, 9): def _start_executor_manager_thread(self) -> None: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/configparser.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/configparser.pyi index 747ee36..41766f6 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/configparser.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/configparser.pyi @@ -19,8 +19,8 @@ ConfigParser -- responsible for parsing a list of delimiters=('=', ':'), comment_prefixes=('#', ';'), inline_comment_prefixes=None, strict=True, empty_lines_in_values=True, default_section='DEFAULT', - interpolation=, converters=): - + interpolation=, converters=, + allow_unnamed_section=False): Create the parser. When `defaults` is given, it is initialized into the dictionary or intrinsic defaults. The keys must be strings, the values must be appropriate for %()s string interpolation. @@ -69,6 +69,10 @@ ConfigParser -- responsible for parsing a list of converter gets its corresponding get*() method on the parser object and section proxies. + When `allow_unnamed_section` is True (default: False), options + without section are accepted: the section for these is + ``configparser.UNNAMED_SECTION``. + sections() Return all the configuration section names, sans DEFAULT. @@ -144,7 +148,7 @@ import sys from _typeshed import StrOrBytesPath, SupportsWrite from collections.abc import Callable, ItemsView, Iterable, Iterator, Mapping, MutableMapping, Sequence from re import Pattern -from typing import Any, ClassVar, Literal, TypeVar, overload +from typing import Any, ClassVar, Final, Literal, TypeVar, overload from typing_extensions import TypeAlias if sys.version_info >= (3, 13): @@ -225,8 +229,8 @@ _ConverterCallback: TypeAlias = Callable[[str], Any] _ConvertersMap: TypeAlias = dict[str, _ConverterCallback] _T = TypeVar("_T") -DEFAULTSECT: Literal["DEFAULT"] -MAX_INTERPOLATION_DEPTH: Literal[10] +DEFAULTSECT: Final = "DEFAULT" +MAX_INTERPOLATION_DEPTH: Final = 10 class Interpolation: """Dummy interpolation that passes the value through with no changes.""" @@ -261,10 +265,6 @@ class ExtendedInterpolation(Interpolation): if sys.version_info < (3, 13): class LegacyInterpolation(Interpolation): - """ - Deprecated interpolation used in old versions of ConfigParser. - Use BasicInterpolation or ExtendedInterpolation instead. - """ def before_get(self, parser: _Parser, section: str, option: str, value: str, vars: _Section) -> str: ... class RawConfigParser(_Parser): @@ -460,9 +460,7 @@ class RawConfigParser(_Parser): """ ... if sys.version_info < (3, 12): - def readfp(self, fp: Iterable[str], filename: str | None = None) -> None: - """Deprecated, use read_file instead.""" - ... + def readfp(self, fp: Iterable[str], filename: str | None = None) -> None: ... # These get* methods are partially applied (with the same names) in # SectionProxy; the stubs should be kept updated together @overload @@ -623,9 +621,7 @@ class ConfigParser(RawConfigParser): ... if sys.version_info < (3, 12): - class SafeConfigParser(ConfigParser): - """ConfigParser alias for backwards compatibility purposes.""" - ... + class SafeConfigParser(ConfigParser): ... # deprecated alias class SectionProxy(MutableMapping[str, str]): """A proxy for a single section from a parser.""" @@ -783,6 +779,7 @@ if sys.version_info >= (3, 13): UNNAMED_SECTION: _UNNAMED_SECTION class MultilineContinuationError(ParsingError): + """Raised when a key without value is followed by continuation line""" lineno: int line: str def __init__(self, filename: str, lineno: int, line: str) -> None: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/contextlib.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/contextlib.pyi index 5c1a6ba..3ef398b 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/contextlib.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/contextlib.pyi @@ -68,6 +68,18 @@ class AbstractAsyncContextManager(Protocol[_T_co, _ExitT_co]): class ContextDecorator: """A base class or mixin that enables context managers to work as decorators.""" + def _recreate_cm(self) -> Self: + """ + Return a recreated instance of self. + + Allows an otherwise one-shot context manager like + _GeneratorContextManager to support use as + a decorator via implicit recreation. + + This is a private interface just for _GeneratorContextManager. + See issue #11647 for details. + """ + ... def __call__(self, func: _F) -> _F: ... class _GeneratorContextManager(AbstractContextManager[_T_co, bool | None], ContextDecorator): @@ -123,6 +135,12 @@ if sys.version_info >= (3, 10): class AsyncContextDecorator: """A base class or mixin that enables async context managers to work as decorators.""" + def _recreate_cm(self) -> Self: + """ + Return a recreated instance of self. + + """ + ... def __call__(self, func: _AF) -> _AF: ... class _AsyncGeneratorContextManager(AbstractAsyncContextManager[_T_co, bool | None], AsyncContextDecorator): @@ -271,19 +289,8 @@ class redirect_stderr(_RedirectStream[_T_io]): """Context manager for temporarily redirecting stderr to another file.""" ... -# In reality this is a subclass of `AbstractContextManager`; -# see #7961 for why we don't do that in the stub -class ExitStack(Generic[_ExitT_co], metaclass=abc.ABCMeta): - """ - Context manager for dynamic management of a stack of exit callbacks. - - For example: - with ExitStack() as stack: - files = [stack.enter_context(open(fname)) for fname in filenames] - # All opened files will automatically be closed at the end of - # the with statement, even if attempts to open files later - # in the list raise an exception. - """ +class _BaseExitStack(Generic[_ExitT_co]): + """A base class for ExitStack and AsyncExitStack.""" def enter_context(self, cm: AbstractContextManager[_T, _ExitT_co]) -> _T: """ Enters the supplied context manager. @@ -311,6 +318,20 @@ class ExitStack(Generic[_ExitT_co], metaclass=abc.ABCMeta): def pop_all(self) -> Self: """Preserve the context stack by transferring it to a new instance.""" ... + +# In reality this is a subclass of `AbstractContextManager`; +# see #7961 for why we don't do that in the stub +class ExitStack(_BaseExitStack[_ExitT_co], metaclass=abc.ABCMeta): + """ + Context manager for dynamic management of a stack of exit callbacks. + + For example: + with ExitStack() as stack: + files = [stack.enter_context(open(fname)) for fname in filenames] + # All opened files will automatically be closed at the end of + # the with statement, even if attempts to open files later + # in the list raise an exception. + """ def close(self) -> None: """Immediately unwind the context stack.""" ... @@ -326,7 +347,7 @@ _ACM_EF = TypeVar("_ACM_EF", bound=AbstractAsyncContextManager[Any, Any] | _Exit # In reality this is a subclass of `AbstractAsyncContextManager`; # see #7961 for why we don't do that in the stub -class AsyncExitStack(Generic[_ExitT_co], metaclass=abc.ABCMeta): +class AsyncExitStack(_BaseExitStack[_ExitT_co], metaclass=abc.ABCMeta): """ Async context manager for dynamic management of a stack of exit callbacks. @@ -339,14 +360,6 @@ class AsyncExitStack(Generic[_ExitT_co], metaclass=abc.ABCMeta): # end of the async with statement, even if attempts to open a # connection later in the list raise an exception. """ - def enter_context(self, cm: AbstractContextManager[_T, _ExitT_co]) -> _T: - """ - Enters the supplied context manager. - - If successful, also pushes its __exit__ method as a callback and - returns the result of the __enter__ method. - """ - ... async def enter_async_context(self, cm: AbstractAsyncContextManager[_T, _ExitT_co]) -> _T: """ Enters the supplied async context manager. @@ -355,15 +368,6 @@ class AsyncExitStack(Generic[_ExitT_co], metaclass=abc.ABCMeta): returns the result of the __aenter__ method. """ ... - def push(self, exit: _CM_EF) -> _CM_EF: - """ - Registers a callback with the standard __exit__ method signature. - - Can suppress exceptions the same way __exit__ method can. - Also accepts any object with an __exit__ method (registering a call - to the method instead of the object itself). - """ - ... def push_async_exit(self, exit: _ACM_EF) -> _ACM_EF: """ Registers a coroutine function with the standard __aexit__ method @@ -374,13 +378,6 @@ class AsyncExitStack(Generic[_ExitT_co], metaclass=abc.ABCMeta): to the method instead of the object itself). """ ... - def callback(self, callback: Callable[_P, _T], /, *args: _P.args, **kwds: _P.kwargs) -> Callable[_P, _T]: - """ - Registers an arbitrary callback and arguments. - - Cannot suppress exceptions. - """ - ... def push_async_callback( self, callback: Callable[_P, Awaitable[_T]], /, *args: _P.args, **kwds: _P.kwargs ) -> Callable[_P, Awaitable[_T]]: @@ -390,9 +387,6 @@ class AsyncExitStack(Generic[_ExitT_co], metaclass=abc.ABCMeta): Cannot suppress exceptions. """ ... - def pop_all(self) -> Self: - """Preserve the context stack by transferring it to a new instance.""" - ... async def aclose(self) -> None: """Immediately unwind the context stack.""" ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/copy.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/copy.pyi index e69ea94..c943017 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/copy.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/copy.pyi @@ -5,8 +5,9 @@ Interface summary: import copy - x = copy.copy(y) # make a shallow copy of y - x = copy.deepcopy(y) # make a deep copy of y + x = copy.copy(y) # make a shallow copy of y + x = copy.deepcopy(y) # make a deep copy of y + x = copy.replace(y, a=1, b=2) # new object with fields replaced, as defined by `__replace__` For module specific errors, copy.Error is raised. @@ -49,11 +50,18 @@ __getstate__() and __setstate__(). See the documentation for module "pickle" for information on these methods. """ -from typing import Any, TypeVar +import sys +from typing import Any, Protocol, TypeVar +from typing_extensions import Self __all__ = ["Error", "copy", "deepcopy"] _T = TypeVar("_T") +_SR = TypeVar("_SR", bound=_SupportsReplace) + +class _SupportsReplace(Protocol): + # In reality doesn't support args, but there's no other great way to express this. + def __replace__(self, *args: Any, **kwargs: Any) -> Self: ... # None in CPython but non-None in Jython PyStringMap: Any @@ -74,6 +82,17 @@ def copy(x: _T) -> _T: """ ... +if sys.version_info >= (3, 13): + __all__ += ["replace"] + def replace(obj: _SR, /, **changes: Any) -> _SR: + """ + Return a new object replacing specified fields with new values. + + This is especially useful for immutable objects, like named tuples or + frozen dataclasses. + """ + ... + class Error(Exception): ... error = Error diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/crypt.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/crypt.pyi index 6ca1cb0..2940038 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/crypt.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/crypt.pyi @@ -1,35 +1,13 @@ -"""Wrapper to the POSIX crypt library call and associated functionality.""" - import sys +from typing import Final if sys.platform != "win32": - class _Method: - """ - Class representing a salt method per the Modular Crypt Format or the - legacy 2-character crypt method. - """ - ... - METHOD_CRYPT: _Method - METHOD_MD5: _Method - METHOD_SHA256: _Method - METHOD_SHA512: _Method - METHOD_BLOWFISH: _Method + class _Method: ... + METHOD_CRYPT: Final[_Method] + METHOD_MD5: Final[_Method] + METHOD_SHA256: Final[_Method] + METHOD_SHA512: Final[_Method] + METHOD_BLOWFISH: Final[_Method] methods: list[_Method] - def mksalt(method: _Method | None = None, *, rounds: int | None = None) -> str: - """ - Generate a salt for the specified method. - - If not specified, the strongest available method will be used. - """ - ... - def crypt(word: str, salt: str | _Method | None = None) -> str: - """ - Return a string representing the one-way hash of a password, with a salt - prepended. - - If ``salt`` is not specified or is ``None``, the strongest - available method will be selected and a salt generated. Otherwise, - ``salt`` may be one of the ``crypt.METHOD_*`` values, or a string as - returned by ``crypt.mksalt()``. - """ - ... + def mksalt(method: _Method | None = None, *, rounds: int | None = None) -> str: ... + def crypt(word: str, salt: str | _Method | None = None) -> str: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/csv.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/csv.pyi index 7a70349..554f822 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/csv.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/csv.pyi @@ -37,7 +37,7 @@ SETTINGS: immediately follow a delimiter. It defaults to False, which means that spaces immediately following a delimiter is part of the following field. - * lineterminator - specifies the character sequence which should + * lineterminator - specifies the character sequence which should terminate rows. * quoting - controls when quotes should be generated by the writer. It can take on any of the following module constants: @@ -46,7 +46,7 @@ SETTINGS: field contains either the quotechar or the delimiter csv.QUOTE_ALL means that quotes are always placed around fields. csv.QUOTE_NONNUMERIC means that quotes are always placed around - fields which do not parse as integers or floating point + fields which do not parse as integers or floating-point numbers. csv.QUOTE_STRINGS means that quotes are always placed around fields which are strings. Note that the Python value None @@ -63,14 +63,12 @@ SETTINGS: """ import sys - -# actually csv.Dialect is a different class to _csv.Dialect at runtime, but for typing purposes, they're identical from _csv import ( QUOTE_ALL as QUOTE_ALL, QUOTE_MINIMAL as QUOTE_MINIMAL, QUOTE_NONE as QUOTE_NONE, QUOTE_NONNUMERIC as QUOTE_NONNUMERIC, - Dialect as Dialect, + Dialect as _Dialect, Error as Error, __version__ as __version__, _DialectLike, @@ -125,6 +123,16 @@ if sys.version_info < (3, 13): _T = TypeVar("_T") +class Dialect(_Dialect): + """ + Describe a CSV dialect. + + This must be subclassed (see csv.excel). Valid attributes are: + delimiter, quotechar, escapechar, doublequote, skipinitialspace, + lineterminator, quoting. + """ + def __init__(self) -> None: ... + class excel(Dialect): """Describe the usual properties of Excel-generated CSV files.""" ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/ctypes/__init__.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/ctypes/__init__.pyi index 945bfde..4b3e366 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/ctypes/__init__.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/ctypes/__init__.pyi @@ -254,3 +254,8 @@ if sys.version_info >= (3, 12): c_time_t: type[c_int32 | c_int64] # alias for one or the other at runtime class py_object(_CanCastTo, _SimpleCData[_T]): ... + +if sys.version_info >= (3, 14): + class c_float_complex(_SimpleCData[complex]): ... + class c_double_complex(_SimpleCData[complex]): ... + class c_longdouble_complex(_SimpleCData[complex]): ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/ctypes/_endian.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/ctypes/_endian.pyi index b9ec6de..5e55f48 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/ctypes/_endian.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/ctypes/_endian.pyi @@ -1,12 +1,5 @@ import sys -from _ctypes import RTLD_GLOBAL as RTLD_GLOBAL, RTLD_LOCAL as RTLD_LOCAL, Structure, Union -from ctypes import DEFAULT_MODE as DEFAULT_MODE, cdll as cdll, pydll as pydll, pythonapi as pythonapi - -if sys.version_info >= (3, 12): - from _ctypes import SIZEOF_TIME_T as SIZEOF_TIME_T - -if sys.platform == "win32": - from ctypes import oledll as oledll, windll as windll +from ctypes import Structure, Union # At runtime, the native endianness is an alias for Structure, # while the other is a subclass with a metaclass added in. diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/curses/__init__.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/curses/__init__.pyi index f671943..e89a575 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/curses/__init__.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/curses/__init__.pyi @@ -11,7 +11,7 @@ the package, and perhaps a particular module inside it. """ from _curses import * -from _curses import _CursesWindow as _CursesWindow +from _curses import window as window from collections.abc import Callable from typing import TypeVar from typing_extensions import Concatenate, ParamSpec @@ -31,7 +31,7 @@ COLS: int COLORS: int COLOR_PAIRS: int -def wrapper(func: Callable[Concatenate[_CursesWindow, _P], _T], /, *arg: _P.args, **kwds: _P.kwargs) -> _T: +def wrapper(func: Callable[Concatenate[window, _P], _T], /, *arg: _P.args, **kwds: _P.kwargs) -> _T: """ Wrapper function that initializes curses and calls another function, restoring normal keyboard/screen behavior on error. @@ -40,3 +40,8 @@ def wrapper(func: Callable[Concatenate[_CursesWindow, _P], _T], /, *arg: _P.args wrapper(). """ ... + +# typeshed used the name _CursesWindow for the underlying C class before +# it was mapped to the name 'window' in 3.8. +# Kept here as a legacy alias in case any third-party code is relying on it. +_CursesWindow = window diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/curses/panel.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/curses/panel.pyi index 13b9715..c6d5345 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/curses/panel.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/curses/panel.pyi @@ -4,7 +4,7 @@ curses.panel Module for using panels with curses. """ -from _curses import _CursesWindow +from _curses import window version: str @@ -15,17 +15,17 @@ class _Curses_Panel: # type is (note the s def hidden(self) -> bool: ... def hide(self) -> None: ... def move(self, y: int, x: int) -> None: ... - def replace(self, win: _CursesWindow) -> None: ... + def replace(self, win: window) -> None: ... def set_userptr(self, obj: object) -> None: ... def show(self) -> None: ... def top(self) -> None: ... def userptr(self) -> object: ... - def window(self) -> _CursesWindow: ... + def window(self) -> window: ... def bottom_panel() -> _Curses_Panel: """Return the bottom panel in the panel stack.""" ... -def new_panel(win: _CursesWindow, /) -> _Curses_Panel: +def new_panel(win: window, /) -> _Curses_Panel: """Return a panel object, associating it with the given window win.""" ... def top_panel() -> _Curses_Panel: diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/curses/textpad.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/curses/textpad.pyi index 924e2a9..dff1e9d 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/curses/textpad.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/curses/textpad.pyi @@ -1,9 +1,9 @@ """Simple textbox editing widget with Emacs-like keybindings.""" -from _curses import _CursesWindow +from _curses import window from collections.abc import Callable -def rectangle(win: _CursesWindow, uly: int, ulx: int, lry: int, lrx: int) -> None: +def rectangle(win: window, uly: int, ulx: int, lry: int, lrx: int) -> None: """ Draw a rectangle with corners at the provided upper-left and lower-right coordinates. @@ -36,7 +36,7 @@ class Textbox: KEY_BACKSPACE = Ctrl-h """ stripspaces: bool - def __init__(self, win: _CursesWindow, insert_mode: bool = False) -> None: ... + def __init__(self, win: window, insert_mode: bool = False) -> None: ... def edit(self, validate: Callable[[int], int] | None = None) -> str: """Edit in the widget window and collect the results.""" ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/dataclasses.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/dataclasses.pyi index ac3c9b9..17e0074 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/dataclasses.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/dataclasses.pyi @@ -5,7 +5,7 @@ from _typeshed import DataclassInstance from builtins import type as Type # alias to avoid name clashes with fields named "type" from collections.abc import Callable, Iterable, Mapping from typing import Any, Generic, Literal, Protocol, TypeVar, overload -from typing_extensions import TypeAlias, TypeIs +from typing_extensions import Never, TypeAlias, TypeIs if sys.version_info >= (3, 9): from types import GenericAlias @@ -211,23 +211,7 @@ elif sys.version_info >= (3, 10): match_args: bool = True, kw_only: bool = False, slots: bool = False, - ) -> Callable[[type[_T]], type[_T]]: - """ - Returns the same class as was passed in, with dunder methods - added based on the fields defined in the class. - - Examines PEP 526 __annotations__ to determine fields. - - If init is true, an __init__() method is added to the class. If - repr is true, a __repr__() method is added. If order is true, rich - comparison dunder methods are added. If unsafe_hash is true, a - __hash__() method function is added. If frozen is true, fields may - not be assigned to after instance creation. If match_args is true, - the __match_args__ tuple is added. If kw_only is true, then by - default all fields are keyword-only. If slots is true, an - __slots__ attribute is added. - """ - ... + ) -> Callable[[type[_T]], type[_T]]: ... else: @overload @@ -247,7 +231,7 @@ class _DefaultFactory(Protocol[_T_co]): class Field(Generic[_T]): name: str - type: Type[_T] + type: Type[_T] | str | Any default: _T | Literal[_MISSING_TYPE.MISSING] default_factory: _DefaultFactory[_T] | Literal[_MISSING_TYPE.MISSING] repr: bool @@ -416,6 +400,15 @@ def fields(class_or_instance: DataclassInstance | type[DataclassInstance]) -> tu type Field. """ ... + +# HACK: `obj: Never` typing matches if object argument is using `Any` type. +@overload +def is_dataclass(obj: Never) -> TypeIs[DataclassInstance | type[DataclassInstance]]: + """ + Returns True if obj is a dataclass or an instance of a + dataclass. + """ + ... @overload def is_dataclass(obj: type) -> TypeIs[type[DataclassInstance]]: """ @@ -438,18 +431,17 @@ if sys.version_info >= (3, 9): else: class _InitVarMeta(type): # Not used, instead `InitVar.__class_getitem__` is called. - # pyright ignore is needed because pyright (not unreasonably) thinks this - # is an invalid use of InitVar. - def __getitem__(self, params: Any) -> InitVar[Any]: ... # pyright: ignore + # pyright (not unreasonably) thinks this is an invalid use of InitVar. + def __getitem__(self, params: Any) -> InitVar[Any]: ... # pyright: ignore[reportInvalidTypeForm] class InitVar(Generic[_T], metaclass=_InitVarMeta): type: Type[_T] def __init__(self, type: Type[_T]) -> None: ... if sys.version_info >= (3, 9): @overload - def __class_getitem__(cls, type: Type[_T]) -> InitVar[_T]: ... # pyright: ignore + def __class_getitem__(cls, type: Type[_T]) -> InitVar[_T]: ... # pyright: ignore[reportInvalidTypeForm] @overload - def __class_getitem__(cls, type: Any) -> InitVar[Any]: ... # pyright: ignore + def __class_getitem__(cls, type: Any) -> InitVar[Any]: ... # pyright: ignore[reportInvalidTypeForm] if sys.version_info >= (3, 12): def make_dataclass( @@ -533,31 +525,7 @@ elif sys.version_info >= (3, 10): match_args: bool = True, kw_only: bool = False, slots: bool = False, - ) -> type: - """ - Return a new dynamically created dataclass. - - The dataclass name will be 'cls_name'. 'fields' is an iterable - of either (name), (name, type) or (name, type, Field) objects. If type is - omitted, use the string 'typing.Any'. Field objects are created by - the equivalent of calling 'field(name, type [, Field-info])'. - - C = make_dataclass('C', ['x', ('y', int), ('z', int, field(init=False))], bases=(Base,)) - - is equivalent to: - - @dataclass - class C(Base): - x: 'typing.Any' - y: int - z: int = field(init=False) - - For the bases and namespace parameters, see the builtin type() function. - - The parameters init, repr, eq, order, unsafe_hash, and frozen are passed to - dataclass(). - """ - ... + ) -> type: ... else: def make_dataclass( diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/datetime.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/datetime.pyi index b5bc81c..c75b22f 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/datetime.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/datetime.pyi @@ -3,7 +3,7 @@ import sys from abc import abstractmethod from time import struct_time -from typing import ClassVar, Literal, NamedTuple, NoReturn, SupportsIndex, final, overload +from typing import ClassVar, Final, NamedTuple, NoReturn, SupportsIndex, final, overload from typing_extensions import Self, TypeAlias, deprecated if sys.version_info >= (3, 11): @@ -11,8 +11,8 @@ if sys.version_info >= (3, 11): elif sys.version_info >= (3, 9): __all__ = ("date", "datetime", "time", "timedelta", "timezone", "tzinfo", "MINYEAR", "MAXYEAR") -MINYEAR: Literal[1] -MAXYEAR: Literal[9999] +MINYEAR: Final = 1 +MAXYEAR: Final = 9999 class tzinfo: """Abstract base class for time zone info objects.""" @@ -119,9 +119,7 @@ class date: """format -> strftime() style string.""" ... else: - def strftime(self, format: str, /) -> str: - """format -> strftime() style string.""" - ... + def strftime(self, format: str, /) -> str: ... def __format__(self, fmt: str, /) -> str: """Formats self with strftime.""" @@ -136,7 +134,9 @@ class date: """Return proleptic Gregorian ordinal. January 1 of year 1 is day 1.""" ... if sys.version_info >= (3, 13): - def __replace__(self, /, *, year: SupportsIndex = ..., month: SupportsIndex = ..., day: SupportsIndex = ...) -> Self: ... + def __replace__(self, /, *, year: SupportsIndex = ..., month: SupportsIndex = ..., day: SupportsIndex = ...) -> Self: + """The same as replace().""" + ... def replace(self, year: SupportsIndex = ..., month: SupportsIndex = ..., day: SupportsIndex = ...) -> Self: """Return date with new specified fields.""" @@ -267,9 +267,7 @@ class time: """format -> strftime() style string.""" ... else: - def strftime(self, format: str, /) -> str: - """format -> strftime() style string.""" - ... + def strftime(self, format: str, /) -> str: ... def __format__(self, fmt: str, /) -> str: """Formats self with strftime.""" @@ -294,7 +292,9 @@ class time: microsecond: SupportsIndex = ..., tzinfo: _TzInfo | None = ..., fold: int = ..., - ) -> Self: ... + ) -> Self: + """The same as replace().""" + ... def replace( self, @@ -464,12 +464,10 @@ class datetime(date): ... else: @classmethod - def fromtimestamp(cls, timestamp: float, /, tz: _TzInfo | None = ...) -> Self: - """timestamp[, tz] -> tz's local time from POSIX timestamp.""" - ... + def fromtimestamp(cls, timestamp: float, /, tz: _TzInfo | None = ...) -> Self: ... @classmethod - @deprecated("Use timezone-aware objects to represent datetimes in UTC; e.g. by calling .fromtimestamp(datetime.UTC)") + @deprecated("Use timezone-aware objects to represent datetimes in UTC; e.g. by calling .fromtimestamp(datetime.timezone.utc)") def utcfromtimestamp(cls, t: float, /) -> Self: """Construct a naive UTC datetime from a POSIX timestamp.""" ... @@ -485,7 +483,7 @@ class datetime(date): """ ... @classmethod - @deprecated("Use timezone-aware objects to represent datetimes in UTC; e.g. by calling .now(datetime.UTC)") + @deprecated("Use timezone-aware objects to represent datetimes in UTC; e.g. by calling .now(datetime.timezone.utc)") def utcnow(cls) -> Self: """Return a new datetime representing UTC day and time.""" ... @@ -522,7 +520,9 @@ class datetime(date): microsecond: SupportsIndex = ..., tzinfo: _TzInfo | None = ..., fold: int = ..., - ) -> Self: ... + ) -> Self: + """The same as replace().""" + ... def replace( self, diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/dbm/__init__.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/dbm/__init__.pyi index a8c0f26..ab07b0a 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/dbm/__init__.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/dbm/__init__.pyi @@ -6,7 +6,7 @@ Use import dbm d = dbm.open(file, 'w', 0o666) -The returned object is a dbm.gnu, dbm.ndbm or dbm.dumb object, dependent on the +The returned object is a dbm.sqlite3, dbm.gnu, dbm.ndbm or dbm.dumb database object, dependent on the type of database being opened (determined by the whichdb function) in the case of an existing dbm. If the dbm does not exist and the create or new flag ('c' or 'n') was specified, the dbm type will be determined by the availability of @@ -153,30 +153,5 @@ if sys.version_info >= (3, 11): ... else: - def whichdb(filename: str) -> str | None: - """ - Guess which db package to use to open a db file. - - Return values: - - - None if the database file can't be read; - - empty string if the file can be read but can't be recognized - - the name of the dbm submodule (e.g. "ndbm" or "gnu") if recognized. - - Importing the given module may still fail, and opening the - database using that module may still fail. - """ - ... - def open(file: str, flag: _TFlags = "r", mode: int = 0o666) -> _Database: - """ - Open or create database at path given by *file*. - - Optional argument *flag* can be 'r' (default) for read-only access, 'w' - for read-write access of an existing database, 'c' for read-write access - to a new or existing database, and 'n' for read-write access to a new - database. - - Note: 'r' and 'w' fail if the database doesn't exist; 'c' creates it - only if it doesn't exist; and 'n' always creates a new database. - """ - ... + def whichdb(filename: str) -> str | None: ... + def open(file: str, flag: _TFlags = "r", mode: int = 0o666) -> _Database: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/dbm/dumb.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/dbm/dumb.pyi index 1cab0f5..0c2d83f 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/dbm/dumb.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/dbm/dumb.pyi @@ -71,18 +71,4 @@ if sys.version_info >= (3, 11): ... else: - def open(file: str, flag: str = "c", mode: int = 0o666) -> _Database: - """ - Open the database file, filename, and return corresponding object. - - The flag argument, used to control how the database is opened in the - other DBM implementations, supports only the semantics of 'c' and 'n' - values. Other values will default to the semantics of 'c' value: - the database will always opened for update and will be created if it - does not exist. - - The optional mode argument is the UNIX mode of the file, used only when - the database has to be created. It defaults to octal code 0o666 (and - will be modified by the prevailing umask). - """ - ... + def open(file: str, flag: str = "c", mode: int = 0o666) -> _Database: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/dbm/gnu.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/dbm/gnu.pyi index 9f2b927..1d1d541 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/dbm/gnu.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/dbm/gnu.pyi @@ -1,5 +1,3 @@ -"""Provide the _gdbm module as a dbm submodule.""" - import sys from _typeshed import ReadOnlyBuffer, StrOrBytesPath from types import TracebackType @@ -21,6 +19,9 @@ if sys.platform != "win32": def reorganize(self) -> None: ... def sync(self) -> None: ... def close(self) -> None: ... + if sys.version_info >= (3, 13): + def clear(self) -> None: ... + def __getitem__(self, item: _KeyType) -> bytes: ... def __setitem__(self, key: _KeyType, value: _ValueType) -> None: ... def __delitem__(self, key: _KeyType) -> None: ... @@ -43,29 +44,4 @@ if sys.platform != "win32": if sys.version_info >= (3, 11): def open(filename: StrOrBytesPath, flags: str = "r", mode: int = 0o666, /) -> _gdbm: ... else: - def open(filename: str, flags: str = "r", mode: int = 0o666, /) -> _gdbm: - """ - Open a dbm database and return a dbm object. - - The filename argument is the name of the database file. - - The optional flags argument can be 'r' (to open an existing database - for reading only -- default), 'w' (to open an existing database for - reading and writing), 'c' (which creates the database if it doesn't - exist), or 'n' (which always creates a new empty database). - - Some versions of gdbm support additional flags which must be - appended to one of the flags described above. The module constant - 'open_flags' is a string of valid additional flags. The 'f' flag - opens the database in fast mode; altered data will not automatically - be written to the disk after every change. This results in faster - writes to the database, but may result in an inconsistent database - if the program crashes while the database is still open. Use the - sync() method to force any unwritten data to be written to the disk. - The 's' flag causes all database operations to be synchronized to - disk. The 'u' flag disables locking of the database file. - - The optional mode argument is the Unix mode of the file, used only - when the database has to be created. It defaults to octal 0o666. - """ - ... + def open(filename: str, flags: str = "r", mode: int = 0o666, /) -> _gdbm: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/dbm/ndbm.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/dbm/ndbm.pyi index ecb8184..5e9ff7c 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/dbm/ndbm.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/dbm/ndbm.pyi @@ -17,6 +17,9 @@ if sys.platform != "win32": # Actual typename dbm, not exposed by the implementation class _dbm: def close(self) -> None: ... + if sys.version_info >= (3, 13): + def clear(self) -> None: ... + def __getitem__(self, item: _KeyType) -> bytes: ... def __setitem__(self, key: _KeyType, value: _ValueType) -> None: ... def __delitem__(self, key: _KeyType) -> None: ... @@ -37,9 +40,7 @@ if sys.platform != "win32": __init__: None # type: ignore[assignment] if sys.version_info >= (3, 11): - def open(filename: StrOrBytesPath, flags: str = "r", mode: int = 0o666, /) -> _dbm: ... - else: - def open(filename: str, flags: str = "r", mode: int = 0o666, /) -> _dbm: + def open(filename: StrOrBytesPath, flags: str = "r", mode: int = 0o666, /) -> _dbm: """ Return a database object. @@ -52,3 +53,5 @@ if sys.platform != "win32": (e.g. os.O_RDWR). """ ... + else: + def open(filename: str, flags: str = "r", mode: int = 0o666, /) -> _dbm: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/dbm/sqlite3.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/dbm/sqlite3.pyi new file mode 100644 index 0000000..6f55f53 --- /dev/null +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/dbm/sqlite3.pyi @@ -0,0 +1,44 @@ +from _typeshed import ReadableBuffer, StrOrBytesPath, Unused +from collections.abc import Generator, MutableMapping +from typing import Final, Literal +from typing_extensions import LiteralString, Self, TypeAlias + +BUILD_TABLE: Final[LiteralString] +GET_SIZE: Final[LiteralString] +LOOKUP_KEY: Final[LiteralString] +STORE_KV: Final[LiteralString] +DELETE_KEY: Final[LiteralString] +ITER_KEYS: Final[LiteralString] + +_SqliteData: TypeAlias = str | ReadableBuffer | int | float + +class error(OSError): ... + +class _Database(MutableMapping[bytes, bytes]): + def __init__(self, path: StrOrBytesPath, /, *, flag: Literal["r", "w", "c", "n"], mode: int) -> None: ... + def __len__(self) -> int: ... + def __getitem__(self, key: _SqliteData) -> bytes: ... + def __setitem__(self, key: _SqliteData, value: _SqliteData) -> None: ... + def __delitem__(self, key: _SqliteData) -> None: ... + def __iter__(self) -> Generator[bytes]: ... + def close(self) -> None: ... + def keys(self) -> list[bytes]: ... # type: ignore[override] + def __enter__(self) -> Self: ... + def __exit__(self, *args: Unused) -> None: ... + +def open(filename: StrOrBytesPath, /, flag: Literal["r", "w,", "c", "n"] = "r", mode: int = 0o666) -> _Database: + """ + Open a dbm.sqlite3 database and return the dbm object. + + The 'filename' parameter is the name of the database file. + + The optional 'flag' parameter can be one of ...: + 'r' (default): open an existing database for read only access + 'w': open an existing database for read/write access + 'c': create a database if it does not exist; open for read/write access + 'n': always create a new, empty database; open for read/write access + + The optional 'mode' parameter is the Unix file access mode of the database; + only used when creating a new database. Default: 0o666. + """ + ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/decimal.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/decimal.pyi index 7cc6805..747cd69 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/decimal.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/decimal.pyi @@ -1,7 +1,7 @@ """ -Decimal fixed point and floating point arithmetic. +Decimal fixed-point and floating-point arithmetic. -This is an implementation of decimal floating point arithmetic based on +This is an implementation of decimal floating-point arithmetic based on the General Decimal Arithmetic Specification: http://speleotrove.com/decimal/decarith.html @@ -99,5 +99,861 @@ NaN >>> """ -from _decimal import * -from _decimal import __libmpdec_version__ as __libmpdec_version__, __version__ as __version__ +import numbers +from _decimal import ( + HAVE_CONTEXTVAR as HAVE_CONTEXTVAR, + HAVE_THREADS as HAVE_THREADS, + MAX_EMAX as MAX_EMAX, + MAX_PREC as MAX_PREC, + MIN_EMIN as MIN_EMIN, + MIN_ETINY as MIN_ETINY, + ROUND_05UP as ROUND_05UP, + ROUND_CEILING as ROUND_CEILING, + ROUND_DOWN as ROUND_DOWN, + ROUND_FLOOR as ROUND_FLOOR, + ROUND_HALF_DOWN as ROUND_HALF_DOWN, + ROUND_HALF_EVEN as ROUND_HALF_EVEN, + ROUND_HALF_UP as ROUND_HALF_UP, + ROUND_UP as ROUND_UP, + BasicContext as BasicContext, + DefaultContext as DefaultContext, + ExtendedContext as ExtendedContext, + __libmpdec_version__ as __libmpdec_version__, + __version__ as __version__, + getcontext as getcontext, + localcontext as localcontext, + setcontext as setcontext, +) +from collections.abc import Container, Sequence +from typing import Any, ClassVar, Literal, NamedTuple, overload +from typing_extensions import Self, TypeAlias + +_Decimal: TypeAlias = Decimal | int +_DecimalNew: TypeAlias = Decimal | float | str | tuple[int, Sequence[int], int] +_ComparableNum: TypeAlias = Decimal | float | numbers.Rational +_TrapType: TypeAlias = type[DecimalException] + +# At runtime, these classes are implemented in C as part of "_decimal". +# However, they consider themselves to live in "decimal", so we'll put them here. + +class DecimalTuple(NamedTuple): + """DecimalTuple(sign, digits, exponent)""" + sign: int + digits: tuple[int, ...] + exponent: int | Literal["n", "N", "F"] + +class DecimalException(ArithmeticError): ... +class Clamped(DecimalException): ... +class InvalidOperation(DecimalException): ... +class ConversionSyntax(InvalidOperation): ... +class DivisionByZero(DecimalException, ZeroDivisionError): ... +class DivisionImpossible(InvalidOperation): ... +class DivisionUndefined(InvalidOperation, ZeroDivisionError): ... +class Inexact(DecimalException): ... +class InvalidContext(InvalidOperation): ... +class Rounded(DecimalException): ... +class Subnormal(DecimalException): ... +class Overflow(Inexact, Rounded): ... +class Underflow(Inexact, Rounded, Subnormal): ... +class FloatOperation(DecimalException, TypeError): ... + +class Decimal: + """ + Construct a new Decimal object. 'value' can be an integer, string, tuple, + or another Decimal object. If no value is given, return Decimal('0'). The + context does not affect the conversion and is only passed to determine if + the InvalidOperation trap is active. + """ + def __new__(cls, value: _DecimalNew = ..., context: Context | None = ...) -> Self: ... + @classmethod + def from_float(cls, f: float, /) -> Self: + """ + Class method that converts a float to a decimal number, exactly. + Since 0.1 is not exactly representable in binary floating point, + Decimal.from_float(0.1) is not the same as Decimal('0.1'). + + >>> Decimal.from_float(0.1) + Decimal('0.1000000000000000055511151231257827021181583404541015625') + >>> Decimal.from_float(float('nan')) + Decimal('NaN') + >>> Decimal.from_float(float('inf')) + Decimal('Infinity') + >>> Decimal.from_float(float('-inf')) + Decimal('-Infinity') + """ + ... + def __bool__(self) -> bool: + """True if self else False""" + ... + def compare(self, other: _Decimal, context: Context | None = None) -> Decimal: + """ + Compare self to other. Return a decimal value: + + a or b is a NaN ==> Decimal('NaN') + a < b ==> Decimal('-1') + a == b ==> Decimal('0') + a > b ==> Decimal('1') + """ + ... + def __hash__(self) -> int: + """Return hash(self).""" + ... + def as_tuple(self) -> DecimalTuple: + """Return a tuple representation of the number.""" + ... + def as_integer_ratio(self) -> tuple[int, int]: + """ + Decimal.as_integer_ratio() -> (int, int) + + Return a pair of integers, whose ratio is exactly equal to the original + Decimal and with a positive denominator. The ratio is in lowest terms. + Raise OverflowError on infinities and a ValueError on NaNs. + """ + ... + def to_eng_string(self, context: Context | None = None) -> str: + """ + Convert to an engineering-type string. Engineering notation has an exponent + which is a multiple of 3, so there are up to 3 digits left of the decimal + place. For example, Decimal('123E+1') is converted to Decimal('1.23E+3'). + + The value of context.capitals determines whether the exponent sign is lower + or upper case. Otherwise, the context does not affect the operation. + """ + ... + def __abs__(self) -> Decimal: + """abs(self)""" + ... + def __add__(self, value: _Decimal, /) -> Decimal: + """Return self+value.""" + ... + def __divmod__(self, value: _Decimal, /) -> tuple[Decimal, Decimal]: + """Return divmod(self, value).""" + ... + def __eq__(self, value: object, /) -> bool: + """Return self==value.""" + ... + def __floordiv__(self, value: _Decimal, /) -> Decimal: + """Return self//value.""" + ... + def __ge__(self, value: _ComparableNum, /) -> bool: + """Return self>=value.""" + ... + def __gt__(self, value: _ComparableNum, /) -> bool: + """Return self>value.""" + ... + def __le__(self, value: _ComparableNum, /) -> bool: + """Return self<=value.""" + ... + def __lt__(self, value: _ComparableNum, /) -> bool: + """Return self Decimal: + """Return self%value.""" + ... + def __mul__(self, value: _Decimal, /) -> Decimal: + """Return self*value.""" + ... + def __neg__(self) -> Decimal: + """-self""" + ... + def __pos__(self) -> Decimal: + """+self""" + ... + def __pow__(self, value: _Decimal, mod: _Decimal | None = None, /) -> Decimal: + """Return pow(self, value, mod).""" + ... + def __radd__(self, value: _Decimal, /) -> Decimal: + """Return value+self.""" + ... + def __rdivmod__(self, value: _Decimal, /) -> tuple[Decimal, Decimal]: + """Return divmod(value, self).""" + ... + def __rfloordiv__(self, value: _Decimal, /) -> Decimal: + """Return value//self.""" + ... + def __rmod__(self, value: _Decimal, /) -> Decimal: + """Return value%self.""" + ... + def __rmul__(self, value: _Decimal, /) -> Decimal: + """Return value*self.""" + ... + def __rsub__(self, value: _Decimal, /) -> Decimal: + """Return value-self.""" + ... + def __rtruediv__(self, value: _Decimal, /) -> Decimal: + """Return value/self.""" + ... + def __sub__(self, value: _Decimal, /) -> Decimal: + """Return self-value.""" + ... + def __truediv__(self, value: _Decimal, /) -> Decimal: + """Return self/value.""" + ... + def remainder_near(self, other: _Decimal, context: Context | None = None) -> Decimal: + """ + Return the remainder from dividing self by other. This differs from + self % other in that the sign of the remainder is chosen so as to minimize + its absolute value. More precisely, the return value is self - n * other + where n is the integer nearest to the exact value of self / other, and + if two integers are equally near then the even one is chosen. + + If the result is zero then its sign will be the sign of self. + """ + ... + def __float__(self) -> float: + """float(self)""" + ... + def __int__(self) -> int: + """int(self)""" + ... + def __trunc__(self) -> int: ... + @property + def real(self) -> Decimal: ... + @property + def imag(self) -> Decimal: ... + def conjugate(self) -> Decimal: + """Return self.""" + ... + def __complex__(self) -> complex: ... + @overload + def __round__(self) -> int: ... + @overload + def __round__(self, ndigits: int, /) -> Decimal: ... + def __floor__(self) -> int: ... + def __ceil__(self) -> int: ... + def fma(self, other: _Decimal, third: _Decimal, context: Context | None = None) -> Decimal: + """ + Fused multiply-add. Return self*other+third with no rounding of the + intermediate product self*other. + + >>> Decimal(2).fma(3, 5) + Decimal('11') + """ + ... + def __rpow__(self, value: _Decimal, mod: Context | None = None, /) -> Decimal: + """Return pow(value, self, mod).""" + ... + def normalize(self, context: Context | None = None) -> Decimal: + """ + Normalize the number by stripping the rightmost trailing zeros and + converting any result equal to Decimal('0') to Decimal('0e0'). Used + for producing canonical values for members of an equivalence class. + For example, Decimal('32.100') and Decimal('0.321000e+2') both normalize + to the equivalent value Decimal('32.1'). + """ + ... + def quantize(self, exp: _Decimal, rounding: str | None = None, context: Context | None = None) -> Decimal: + """ + Return a value equal to the first operand after rounding and having the + exponent of the second operand. + + >>> Decimal('1.41421356').quantize(Decimal('1.000')) + Decimal('1.414') + + Unlike other operations, if the length of the coefficient after the quantize + operation would be greater than precision, then an InvalidOperation is signaled. + This guarantees that, unless there is an error condition, the quantized exponent + is always equal to that of the right-hand operand. + + Also unlike other operations, quantize never signals Underflow, even if the + result is subnormal and inexact. + + If the exponent of the second operand is larger than that of the first, then + rounding may be necessary. In this case, the rounding mode is determined by the + rounding argument if given, else by the given context argument; if neither + argument is given, the rounding mode of the current thread's context is used. + """ + ... + def same_quantum(self, other: _Decimal, context: Context | None = None) -> bool: + """ + Test whether self and other have the same exponent or whether both are NaN. + + This operation is unaffected by context and is quiet: no flags are changed + and no rounding is performed. As an exception, the C version may raise + InvalidOperation if the second operand cannot be converted exactly. + """ + ... + def to_integral_exact(self, rounding: str | None = None, context: Context | None = None) -> Decimal: + """ + Round to the nearest integer, signaling Inexact or Rounded as appropriate if + rounding occurs. The rounding mode is determined by the rounding parameter + if given, else by the given context. If neither parameter is given, then the + rounding mode of the current default context is used. + """ + ... + def to_integral_value(self, rounding: str | None = None, context: Context | None = None) -> Decimal: + """ + Round to the nearest integer without signaling Inexact or Rounded. The + rounding mode is determined by the rounding parameter if given, else by + the given context. If neither parameter is given, then the rounding mode + of the current default context is used. + """ + ... + def to_integral(self, rounding: str | None = None, context: Context | None = None) -> Decimal: + """ + Identical to the to_integral_value() method. The to_integral() name has been + kept for compatibility with older versions. + """ + ... + def sqrt(self, context: Context | None = None) -> Decimal: + """ + Return the square root of the argument to full precision. The result is + correctly rounded using the ROUND_HALF_EVEN rounding mode. + """ + ... + def max(self, other: _Decimal, context: Context | None = None) -> Decimal: + """ + Maximum of self and other. If one operand is a quiet NaN and the other is + numeric, the numeric operand is returned. + """ + ... + def min(self, other: _Decimal, context: Context | None = None) -> Decimal: + """ + Minimum of self and other. If one operand is a quiet NaN and the other is + numeric, the numeric operand is returned. + """ + ... + def adjusted(self) -> int: + """Return the adjusted exponent of the number. Defined as exp + digits - 1.""" + ... + def canonical(self) -> Decimal: + """ + Return the canonical encoding of the argument. Currently, the encoding + of a Decimal instance is always canonical, so this operation returns its + argument unchanged. + """ + ... + def compare_signal(self, other: _Decimal, context: Context | None = None) -> Decimal: + """Identical to compare, except that all NaNs signal.""" + ... + def compare_total(self, other: _Decimal, context: Context | None = None) -> Decimal: + """ + Compare two operands using their abstract representation rather than + their numerical value. Similar to the compare() method, but the result + gives a total ordering on Decimal instances. Two Decimal instances with + the same numeric value but different representations compare unequal + in this ordering: + + >>> Decimal('12.0').compare_total(Decimal('12')) + Decimal('-1') + + Quiet and signaling NaNs are also included in the total ordering. The result + of this function is Decimal('0') if both operands have the same representation, + Decimal('-1') if the first operand is lower in the total order than the second, + and Decimal('1') if the first operand is higher in the total order than the + second operand. See the specification for details of the total order. + + This operation is unaffected by context and is quiet: no flags are changed + and no rounding is performed. As an exception, the C version may raise + InvalidOperation if the second operand cannot be converted exactly. + """ + ... + def compare_total_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: + """ + Compare two operands using their abstract representation rather than their + value as in compare_total(), but ignoring the sign of each operand. + + x.compare_total_mag(y) is equivalent to x.copy_abs().compare_total(y.copy_abs()). + + This operation is unaffected by context and is quiet: no flags are changed + and no rounding is performed. As an exception, the C version may raise + InvalidOperation if the second operand cannot be converted exactly. + """ + ... + def copy_abs(self) -> Decimal: + """ + Return the absolute value of the argument. This operation is unaffected by + context and is quiet: no flags are changed and no rounding is performed. + """ + ... + def copy_negate(self) -> Decimal: + """ + Return the negation of the argument. This operation is unaffected by context + and is quiet: no flags are changed and no rounding is performed. + """ + ... + def copy_sign(self, other: _Decimal, context: Context | None = None) -> Decimal: + """ + Return a copy of the first operand with the sign set to be the same as the + sign of the second operand. For example: + + >>> Decimal('2.3').copy_sign(Decimal('-1.5')) + Decimal('-2.3') + + This operation is unaffected by context and is quiet: no flags are changed + and no rounding is performed. As an exception, the C version may raise + InvalidOperation if the second operand cannot be converted exactly. + """ + ... + def exp(self, context: Context | None = None) -> Decimal: + """ + Return the value of the (natural) exponential function e**x at the given + number. The function always uses the ROUND_HALF_EVEN mode and the result + is correctly rounded. + """ + ... + def is_canonical(self) -> bool: + """ + Return True if the argument is canonical and False otherwise. Currently, + a Decimal instance is always canonical, so this operation always returns + True. + """ + ... + def is_finite(self) -> bool: + """ + Return True if the argument is a finite number, and False if the argument + is infinite or a NaN. + """ + ... + def is_infinite(self) -> bool: + """ + Return True if the argument is either positive or negative infinity and + False otherwise. + """ + ... + def is_nan(self) -> bool: + """ + Return True if the argument is a (quiet or signaling) NaN and False + otherwise. + """ + ... + def is_normal(self, context: Context | None = None) -> bool: + """ + Return True if the argument is a normal finite non-zero number with an + adjusted exponent greater than or equal to Emin. Return False if the + argument is zero, subnormal, infinite or a NaN. + """ + ... + def is_qnan(self) -> bool: + """Return True if the argument is a quiet NaN, and False otherwise.""" + ... + def is_signed(self) -> bool: + """ + Return True if the argument has a negative sign and False otherwise. + Note that both zeros and NaNs can carry signs. + """ + ... + def is_snan(self) -> bool: + """Return True if the argument is a signaling NaN and False otherwise.""" + ... + def is_subnormal(self, context: Context | None = None) -> bool: + """ + Return True if the argument is subnormal, and False otherwise. A number is + subnormal if it is non-zero, finite, and has an adjusted exponent less + than Emin. + """ + ... + def is_zero(self) -> bool: + """ + Return True if the argument is a (positive or negative) zero and False + otherwise. + """ + ... + def ln(self, context: Context | None = None) -> Decimal: + """ + Return the natural (base e) logarithm of the operand. The function always + uses the ROUND_HALF_EVEN mode and the result is correctly rounded. + """ + ... + def log10(self, context: Context | None = None) -> Decimal: + """ + Return the base ten logarithm of the operand. The function always uses the + ROUND_HALF_EVEN mode and the result is correctly rounded. + """ + ... + def logb(self, context: Context | None = None) -> Decimal: + """ + For a non-zero number, return the adjusted exponent of the operand as a + Decimal instance. If the operand is a zero, then Decimal('-Infinity') is + returned and the DivisionByZero condition is raised. If the operand is + an infinity then Decimal('Infinity') is returned. + """ + ... + def logical_and(self, other: _Decimal, context: Context | None = None) -> Decimal: + """Return the digit-wise 'and' of the two (logical) operands.""" + ... + def logical_invert(self, context: Context | None = None) -> Decimal: + """Return the digit-wise inversion of the (logical) operand.""" + ... + def logical_or(self, other: _Decimal, context: Context | None = None) -> Decimal: + """Return the digit-wise 'or' of the two (logical) operands.""" + ... + def logical_xor(self, other: _Decimal, context: Context | None = None) -> Decimal: + """Return the digit-wise 'exclusive or' of the two (logical) operands.""" + ... + def max_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: + """ + Similar to the max() method, but the comparison is done using the absolute + values of the operands. + """ + ... + def min_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: + """ + Similar to the min() method, but the comparison is done using the absolute + values of the operands. + """ + ... + def next_minus(self, context: Context | None = None) -> Decimal: + """ + Return the largest number representable in the given context (or in the + current default context if no context is given) that is smaller than the + given operand. + """ + ... + def next_plus(self, context: Context | None = None) -> Decimal: + """ + Return the smallest number representable in the given context (or in the + current default context if no context is given) that is larger than the + given operand. + """ + ... + def next_toward(self, other: _Decimal, context: Context | None = None) -> Decimal: + """ + If the two operands are unequal, return the number closest to the first + operand in the direction of the second operand. If both operands are + numerically equal, return a copy of the first operand with the sign set + to be the same as the sign of the second operand. + """ + ... + def number_class(self, context: Context | None = None) -> str: + """ + Return a string describing the class of the operand. The returned value + is one of the following ten strings: + + * '-Infinity', indicating that the operand is negative infinity. + * '-Normal', indicating that the operand is a negative normal number. + * '-Subnormal', indicating that the operand is negative and subnormal. + * '-Zero', indicating that the operand is a negative zero. + * '+Zero', indicating that the operand is a positive zero. + * '+Subnormal', indicating that the operand is positive and subnormal. + * '+Normal', indicating that the operand is a positive normal number. + * '+Infinity', indicating that the operand is positive infinity. + * 'NaN', indicating that the operand is a quiet NaN (Not a Number). + * 'sNaN', indicating that the operand is a signaling NaN. + """ + ... + def radix(self) -> Decimal: + """ + Return Decimal(10), the radix (base) in which the Decimal class does + all its arithmetic. Included for compatibility with the specification. + """ + ... + def rotate(self, other: _Decimal, context: Context | None = None) -> Decimal: + """ + Return the result of rotating the digits of the first operand by an amount + specified by the second operand. The second operand must be an integer in + the range -precision through precision. The absolute value of the second + operand gives the number of places to rotate. If the second operand is + positive then rotation is to the left; otherwise rotation is to the right. + The coefficient of the first operand is padded on the left with zeros to + length precision if necessary. The sign and exponent of the first operand are + unchanged. + """ + ... + def scaleb(self, other: _Decimal, context: Context | None = None) -> Decimal: + """ + Return the first operand with the exponent adjusted the second. Equivalently, + return the first operand multiplied by 10**other. The second operand must be + an integer. + """ + ... + def shift(self, other: _Decimal, context: Context | None = None) -> Decimal: + """ + Return the result of shifting the digits of the first operand by an amount + specified by the second operand. The second operand must be an integer in + the range -precision through precision. The absolute value of the second + operand gives the number of places to shift. If the second operand is + positive, then the shift is to the left; otherwise the shift is to the + right. Digits shifted into the coefficient are zeros. The sign and exponent + of the first operand are unchanged. + """ + ... + def __reduce__(self) -> tuple[type[Self], tuple[str]]: ... + def __copy__(self) -> Self: ... + def __deepcopy__(self, memo: Any, /) -> Self: ... + def __format__(self, specifier: str, context: Context | None = ..., /) -> str: ... + +class Context: + """ + The context affects almost all operations and controls rounding, + Over/Underflow, raising of exceptions and much more. A new context + can be constructed as follows: + + >>> c = Context(prec=28, Emin=-425000000, Emax=425000000, + ... rounding=ROUND_HALF_EVEN, capitals=1, clamp=1, + ... traps=[InvalidOperation, DivisionByZero, Overflow], + ... flags=[]) + >>> + """ + # TODO: Context doesn't allow you to delete *any* attributes from instances of the class at runtime, + # even settable attributes like `prec` and `rounding`, + # but that's inexpressable in the stub. + # Type checkers either ignore it or misinterpret it + # if you add a `def __delattr__(self, name: str, /) -> NoReturn` method to the stub + prec: int + rounding: str + Emin: int + Emax: int + capitals: int + clamp: int + traps: dict[_TrapType, bool] + flags: dict[_TrapType, bool] + def __init__( + self, + prec: int | None = ..., + rounding: str | None = ..., + Emin: int | None = ..., + Emax: int | None = ..., + capitals: int | None = ..., + clamp: int | None = ..., + flags: None | dict[_TrapType, bool] | Container[_TrapType] = ..., + traps: None | dict[_TrapType, bool] | Container[_TrapType] = ..., + _ignored_flags: list[_TrapType] | None = ..., + ) -> None: ... + def __reduce__(self) -> tuple[type[Self], tuple[Any, ...]]: ... + def clear_flags(self) -> None: + """Reset all flags to False.""" + ... + def clear_traps(self) -> None: + """Set all traps to False.""" + ... + def copy(self) -> Context: + """Return a duplicate of the context with all flags cleared.""" + ... + def __copy__(self) -> Context: ... + # see https://github.com/python/cpython/issues/94107 + __hash__: ClassVar[None] # type: ignore[assignment] + def Etiny(self) -> int: + """ + Return a value equal to Emin - prec + 1, which is the minimum exponent value + for subnormal results. When underflow occurs, the exponent is set to Etiny. + """ + ... + def Etop(self) -> int: + """ + Return a value equal to Emax - prec + 1. This is the maximum exponent + if the _clamp field of the context is set to 1 (IEEE clamp mode). Etop() + must not be negative. + """ + ... + def create_decimal(self, num: _DecimalNew = "0", /) -> Decimal: + """ + Create a new Decimal instance from num, using self as the context. Unlike the + Decimal constructor, this function observes the context limits. + """ + ... + def create_decimal_from_float(self, f: float, /) -> Decimal: + """ + Create a new Decimal instance from float f. Unlike the Decimal.from_float() + class method, this function observes the context limits. + """ + ... + def abs(self, x: _Decimal, /) -> Decimal: + """Return the absolute value of x.""" + ... + def add(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Return the sum of x and y.""" + ... + def canonical(self, x: Decimal, /) -> Decimal: + """Return a new instance of x.""" + ... + def compare(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Compare x and y numerically.""" + ... + def compare_signal(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Compare x and y numerically. All NaNs signal.""" + ... + def compare_total(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Compare x and y using their abstract representation.""" + ... + def compare_total_mag(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Compare x and y using their abstract representation, ignoring sign.""" + ... + def copy_abs(self, x: _Decimal, /) -> Decimal: + """Return a copy of x with the sign set to 0.""" + ... + def copy_decimal(self, x: _Decimal, /) -> Decimal: + """Return a copy of Decimal x.""" + ... + def copy_negate(self, x: _Decimal, /) -> Decimal: + """Return a copy of x with the sign inverted.""" + ... + def copy_sign(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Copy the sign from y to x.""" + ... + def divide(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Return x divided by y.""" + ... + def divide_int(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Return x divided by y, truncated to an integer.""" + ... + def divmod(self, x: _Decimal, y: _Decimal, /) -> tuple[Decimal, Decimal]: + """Return quotient and remainder of the division x / y.""" + ... + def exp(self, x: _Decimal, /) -> Decimal: + """Return e ** x.""" + ... + def fma(self, x: _Decimal, y: _Decimal, z: _Decimal, /) -> Decimal: + """Return x multiplied by y, plus z.""" + ... + def is_canonical(self, x: _Decimal, /) -> bool: + """Return True if x is canonical, False otherwise.""" + ... + def is_finite(self, x: _Decimal, /) -> bool: + """Return True if x is finite, False otherwise.""" + ... + def is_infinite(self, x: _Decimal, /) -> bool: + """Return True if x is infinite, False otherwise.""" + ... + def is_nan(self, x: _Decimal, /) -> bool: + """Return True if x is a qNaN or sNaN, False otherwise.""" + ... + def is_normal(self, x: _Decimal, /) -> bool: + """Return True if x is a normal number, False otherwise.""" + ... + def is_qnan(self, x: _Decimal, /) -> bool: + """Return True if x is a quiet NaN, False otherwise.""" + ... + def is_signed(self, x: _Decimal, /) -> bool: + """Return True if x is negative, False otherwise.""" + ... + def is_snan(self, x: _Decimal, /) -> bool: + """Return True if x is a signaling NaN, False otherwise.""" + ... + def is_subnormal(self, x: _Decimal, /) -> bool: + """Return True if x is subnormal, False otherwise.""" + ... + def is_zero(self, x: _Decimal, /) -> bool: + """Return True if x is a zero, False otherwise.""" + ... + def ln(self, x: _Decimal, /) -> Decimal: + """Return the natural (base e) logarithm of x.""" + ... + def log10(self, x: _Decimal, /) -> Decimal: + """Return the base 10 logarithm of x.""" + ... + def logb(self, x: _Decimal, /) -> Decimal: + """Return the exponent of the magnitude of the operand's MSD.""" + ... + def logical_and(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Digit-wise and of x and y.""" + ... + def logical_invert(self, x: _Decimal, /) -> Decimal: + """Invert all digits of x.""" + ... + def logical_or(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Digit-wise or of x and y.""" + ... + def logical_xor(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Digit-wise xor of x and y.""" + ... + def max(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Compare the values numerically and return the maximum.""" + ... + def max_mag(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Compare the values numerically with their sign ignored.""" + ... + def min(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Compare the values numerically and return the minimum.""" + ... + def min_mag(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Compare the values numerically with their sign ignored.""" + ... + def minus(self, x: _Decimal, /) -> Decimal: + """ + Minus corresponds to the unary prefix minus operator in Python, but applies + the context to the result. + """ + ... + def multiply(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Return the product of x and y.""" + ... + def next_minus(self, x: _Decimal, /) -> Decimal: + """Return the largest representable number smaller than x.""" + ... + def next_plus(self, x: _Decimal, /) -> Decimal: + """Return the smallest representable number larger than x.""" + ... + def next_toward(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Return the number closest to x, in the direction towards y.""" + ... + def normalize(self, x: _Decimal, /) -> Decimal: + """Reduce x to its simplest form. Alias for reduce(x).""" + ... + def number_class(self, x: _Decimal, /) -> str: + """Return an indication of the class of x.""" + ... + def plus(self, x: _Decimal, /) -> Decimal: + """ + Plus corresponds to the unary prefix plus operator in Python, but applies + the context to the result. + """ + ... + def power(self, a: _Decimal, b: _Decimal, modulo: _Decimal | None = None) -> Decimal: + """ + Compute a**b. If 'a' is negative, then 'b' must be integral. The result + will be inexact unless 'a' is integral and the result is finite and can + be expressed exactly in 'precision' digits. In the Python version the + result is always correctly rounded, in the C version the result is almost + always correctly rounded. + + If modulo is given, compute (a**b) % modulo. The following restrictions + hold: + + * all three arguments must be integral + * 'b' must be nonnegative + * at least one of 'a' or 'b' must be nonzero + * modulo must be nonzero and less than 10**prec in absolute value + """ + ... + def quantize(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Return a value equal to x (rounded), having the exponent of y.""" + ... + def radix(self) -> Decimal: + """Return 10.""" + ... + def remainder(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """ + Return the remainder from integer division. The sign of the result, + if non-zero, is the same as that of the original dividend. + """ + ... + def remainder_near(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """ + Return x - y * n, where n is the integer nearest the exact value of x / y + (if the result is 0 then its sign will be the sign of x). + """ + ... + def rotate(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Return a copy of x, rotated by y places.""" + ... + def same_quantum(self, x: _Decimal, y: _Decimal, /) -> bool: + """Return True if the two operands have the same exponent.""" + ... + def scaleb(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Return the first operand after adding the second value to its exp.""" + ... + def shift(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Return a copy of x, shifted by y places.""" + ... + def sqrt(self, x: _Decimal, /) -> Decimal: + """Square root of a non-negative number to context precision.""" + ... + def subtract(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Return the difference between x and y.""" + ... + def to_eng_string(self, x: _Decimal, /) -> str: + """Convert a number to a string, using engineering notation.""" + ... + def to_sci_string(self, x: _Decimal, /) -> str: + """Convert a number to a string using scientific notation.""" + ... + def to_integral_exact(self, x: _Decimal, /) -> Decimal: + """Round to an integer. Signal if the result is rounded or inexact.""" + ... + def to_integral_value(self, x: _Decimal, /) -> Decimal: + """Round to an integer.""" + ... + def to_integral(self, x: _Decimal, /) -> Decimal: + """Identical to to_integral_value(x).""" + ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/dis.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/dis.pyi index 4ccb8c0..429dbcb 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/dis.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/dis.pyi @@ -33,6 +33,9 @@ __all__ = [ "EXTENDED_ARG", "stack_effect", ] +if sys.version_info >= (3, 13): + __all__ += ["hasjump"] + if sys.version_info >= (3, 12): __all__ += ["hasarg", "hasexc"] else: @@ -52,6 +55,7 @@ if sys.version_info >= (3, 11): if sys.version_info >= (3, 13): class _Instruction(NamedTuple): + """_Instruction(opname, opcode, arg, argval, argrepr, offset, start_offset, starts_line, line_number, label, positions, cache_info)""" opname: str opcode: int arg: int | None @@ -67,7 +71,6 @@ if sys.version_info >= (3, 13): elif sys.version_info >= (3, 11): class _Instruction(NamedTuple): - """_Instruction(opname, opcode, arg, argval, argrepr, offset, starts_line, is_jump_target, positions)""" opname: str opcode: int arg: int | None @@ -80,7 +83,6 @@ elif sys.version_info >= (3, 11): else: class _Instruction(NamedTuple): - """_Instruction(opname, opcode, arg, argval, argrepr, offset, starts_line, is_jump_target)""" opname: str opcode: int arg: int | None @@ -92,7 +94,7 @@ else: class Instruction(_Instruction): """ - Details for a bytecode operation + Details for a bytecode operation. Defined fields: opname - human readable name for operation @@ -101,20 +103,59 @@ class Instruction(_Instruction): argval - resolved arg value (if known), otherwise same as arg argrepr - human readable description of operation argument offset - start index of operation within bytecode sequence - starts_line - line started by this opcode (if any), otherwise None - is_jump_target - True if other code jumps to here, otherwise False + start_offset - start index of operation within bytecode sequence including extended args if present; + otherwise equal to Instruction.offset + starts_line - True if this opcode starts a source line, otherwise False + line_number - source line number associated with this opcode (if any), otherwise None + label - A label if this instruction is a jump target, otherwise None positions - Optional dis.Positions object holding the span of source code covered by this instruction + cache_info - information about the format and content of the instruction's cache + entries (if any) """ - def _disassemble(self, lineno_width: int = 3, mark_as_current: bool = False, offset_width: int = 4) -> str: - """ - Format instruction details for inclusion in disassembly output + if sys.version_info < (3, 13): + def _disassemble(self, lineno_width: int = 3, mark_as_current: bool = False, offset_width: int = 4) -> str: ... + if sys.version_info >= (3, 13): + @property + def oparg(self) -> int: + """Alias for Instruction.arg.""" + ... + @property + def baseopcode(self) -> int: + """ + Numeric code for the base operation if operation is specialized. - *lineno_width* sets the width of the line number field (0 omits it) - *mark_as_current* inserts a '-->' marker arrow as part of the line - *offset_width* sets the width of the instruction offset field - """ - ... + Otherwise equal to Instruction.opcode. + """ + ... + @property + def baseopname(self) -> str: + """ + Human readable name for the base operation if operation is specialized. + + Otherwise equal to Instruction.opname. + """ + ... + @property + def cache_offset(self) -> int: + """Start index of the cache entries following the operation.""" + ... + @property + def end_offset(self) -> int: + """End index of the cache entries following the operation.""" + ... + @property + def jump_target(self) -> int: + """ + Bytecode index of the jump target if this is a jump operation. + + Otherwise return None. + """ + ... + @property + def is_jump_target(self) -> bool: + """True if other code jumps to here, otherwise False""" + ... class Bytecode: """ @@ -127,7 +168,20 @@ class Bytecode: """ codeobj: types.CodeType first_line: int - if sys.version_info >= (3, 11): + if sys.version_info >= (3, 13): + show_offsets: bool + # 3.13 added `show_offsets` + def __init__( + self, + x: _HaveCodeType | str, + *, + first_line: int | None = None, + current_offset: int | None = None, + show_caches: bool = False, + adaptive: bool = False, + show_offsets: bool = False, + ) -> None: ... + elif sys.version_info >= (3, 11): def __init__( self, x: _HaveCodeType | str, @@ -137,18 +191,19 @@ class Bytecode: show_caches: bool = False, adaptive: bool = False, ) -> None: ... - @classmethod - def from_traceback(cls, tb: types.TracebackType, *, show_caches: bool = False, adaptive: bool = False) -> Self: - """Construct a Bytecode from the given traceback """ - ... else: def __init__( self, x: _HaveCodeType | str, *, first_line: int | None = None, current_offset: int | None = None ) -> None: ... + + if sys.version_info >= (3, 11): @classmethod - def from_traceback(cls, tb: types.TracebackType) -> Self: + def from_traceback(cls, tb: types.TracebackType, *, show_caches: bool = False, adaptive: bool = False) -> Self: """Construct a Bytecode from the given traceback """ ... + else: + @classmethod + def from_traceback(cls, tb: types.TracebackType) -> Self: ... def __iter__(self) -> Iterator[Instruction]: ... def info(self) -> str: @@ -172,6 +227,7 @@ def findlinestarts(code: _HaveCodeType) -> Iterator[tuple[int, int]]: Find the offsets in a byte code which are start of lines in the source. Generate pairs (offset, lineno) + lineno will be an integer or None the offset does not have a source line. """ ... def pretty_flags(flags: int) -> str: @@ -181,7 +237,8 @@ def code_info(x: _HaveCodeType | str) -> str: """Formatted details of methods, functions, or code.""" ... -if sys.version_info >= (3, 11): +if sys.version_info >= (3, 13): + # 3.13 added `show_offsets` def dis( x: _HaveCodeType | str | bytes | bytearray | None = None, *, @@ -189,6 +246,7 @@ if sys.version_info >= (3, 11): depth: int | None = None, show_caches: bool = False, adaptive: bool = False, + show_offsets: bool = False, ) -> None: """ Disassemble classes, methods, functions, and other compiled objects. @@ -200,40 +258,30 @@ if sys.version_info >= (3, 11): in a special attribute. """ ... - -else: - def dis( - x: _HaveCodeType | str | bytes | bytearray | None = None, *, file: IO[str] | None = None, depth: int | None = None - ) -> None: - """ - Disassemble classes, methods, functions, and other compiled objects. - - With no argument, disassemble the last traceback. - - Compiled objects currently include generator objects, async generator - objects, and coroutine objects, all of which store their code object - in a special attribute. - """ - ... - -if sys.version_info >= (3, 11): def disassemble( - co: _HaveCodeType, lasti: int = -1, *, file: IO[str] | None = None, show_caches: bool = False, adaptive: bool = False - ) -> None: - """Disassemble a code object.""" - ... - def disco( - co: _HaveCodeType, lasti: int = -1, *, file: IO[str] | None = None, show_caches: bool = False, adaptive: bool = False + co: _HaveCodeType, + lasti: int = -1, + *, + file: IO[str] | None = None, + show_caches: bool = False, + adaptive: bool = False, + show_offsets: bool = False, ) -> None: """Disassemble a code object.""" ... def distb( - tb: types.TracebackType | None = None, *, file: IO[str] | None = None, show_caches: bool = False, adaptive: bool = False + tb: types.TracebackType | None = None, + *, + file: IO[str] | None = None, + show_caches: bool = False, + adaptive: bool = False, + show_offsets: bool = False, ) -> None: """Disassemble a traceback (default: last traceback).""" ... + # 3.13 made `show_cache` `None` by default def get_instructions( - x: _HaveCodeType, *, first_line: int | None = None, show_caches: bool = False, adaptive: bool = False + x: _HaveCodeType, *, first_line: int | None = None, show_caches: bool | None = None, adaptive: bool = False ) -> Iterator[Instruction]: """ Iterator for the opcodes in methods, functions or code @@ -248,29 +296,33 @@ if sys.version_info >= (3, 11): """ ... -else: - def disassemble(co: _HaveCodeType, lasti: int = -1, *, file: IO[str] | None = None) -> None: - """Disassemble a code object.""" - ... - def disco(co: _HaveCodeType, lasti: int = -1, *, file: IO[str] | None = None) -> None: - """Disassemble a code object.""" - ... - def distb(tb: types.TracebackType | None = None, *, file: IO[str] | None = None) -> None: - """Disassemble a traceback (default: last traceback).""" - ... - def get_instructions(x: _HaveCodeType, *, first_line: int | None = None) -> Iterator[Instruction]: - """ - Iterator for the opcodes in methods, functions or code - - Generates a series of Instruction named tuples giving the details of - each operations in the supplied code. +elif sys.version_info >= (3, 11): + # 3.11 added `show_caches` and `adaptive` + def dis( + x: _HaveCodeType | str | bytes | bytearray | None = None, + *, + file: IO[str] | None = None, + depth: int | None = None, + show_caches: bool = False, + adaptive: bool = False, + ) -> None: ... + def disassemble( + co: _HaveCodeType, lasti: int = -1, *, file: IO[str] | None = None, show_caches: bool = False, adaptive: bool = False + ) -> None: ... + def distb( + tb: types.TracebackType | None = None, *, file: IO[str] | None = None, show_caches: bool = False, adaptive: bool = False + ) -> None: ... + def get_instructions( + x: _HaveCodeType, *, first_line: int | None = None, show_caches: bool = False, adaptive: bool = False + ) -> Iterator[Instruction]: ... - If *first_line* is not None, it indicates the line number that should - be reported for the first source line in the disassembled code. - Otherwise, the source line information (if any) is taken directly from - the disassembled code object. - """ - ... +else: + def dis( + x: _HaveCodeType | str | bytes | bytearray | None = None, *, file: IO[str] | None = None, depth: int | None = None + ) -> None: ... + def disassemble(co: _HaveCodeType, lasti: int = -1, *, file: IO[str] | None = None) -> None: ... + def distb(tb: types.TracebackType | None = None, *, file: IO[str] | None = None) -> None: ... + def get_instructions(x: _HaveCodeType, *, first_line: int | None = None) -> Iterator[Instruction]: ... def show_code(co: _HaveCodeType, *, file: IO[str] | None = None) -> None: """ @@ -279,3 +331,5 @@ def show_code(co: _HaveCodeType, *, file: IO[str] | None = None) -> None: If *file* is not provided, the output is printed on stdout. """ ... + +disco = disassemble diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/__init__.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/__init__.pyi index 2dccd2d..328a5b7 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/__init__.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/__init__.pyi @@ -3,15 +3,3 @@ # - Most users already do not use stdlib distutils, due to setuptools monkeypatching # - We have very little quality assurance on these stubs, since due to the two above issues # we allowlist all distutils errors in stubtest. - -""" -distutils - -The main package for the Python Module Distribution Utilities. Normally -used from a setup script as - - from distutils.core import setup - - setup (...) -""" - diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/archive_util.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/archive_util.pyi index 360b6ba..16684ff 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/archive_util.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/archive_util.pyi @@ -1,10 +1,3 @@ -""" -distutils.archive_util - -Utility functions for creating archive files (tarballs, zip files, -that sort of thing). -""" - from _typeshed import StrOrBytesPath, StrPath from typing import Literal, overload @@ -18,25 +11,7 @@ def make_archive( dry_run: bool | Literal[0, 1] = 0, owner: str | None = None, group: str | None = None, -) -> str: - """ - Create an archive file (eg. zip or tar). - - 'base_name' is the name of the file to create, minus any format-specific - extension; 'format' is the archive format: one of "zip", "tar", "gztar", - "bztar", "xztar", or "ztar". - - 'root_dir' is a directory that will be the root directory of the - archive; ie. we typically chdir into 'root_dir' before creating the - archive. 'base_dir' is the directory where we start archiving from; - ie. 'base_dir' will be the common prefix of all files and - directories in the archive. 'root_dir' and 'base_dir' both default - to the current directory. Returns the name of the archive file. - - 'owner' and 'group' are used when creating a tar archive. By default, - uses the current owner and group. - """ - ... +) -> str: ... @overload def make_archive( base_name: StrPath, @@ -47,25 +22,7 @@ def make_archive( dry_run: bool | Literal[0, 1] = 0, owner: str | None = None, group: str | None = None, -) -> str: - """ - Create an archive file (eg. zip or tar). - - 'base_name' is the name of the file to create, minus any format-specific - extension; 'format' is the archive format: one of "zip", "tar", "gztar", - "bztar", "xztar", or "ztar". - - 'root_dir' is a directory that will be the root directory of the - archive; ie. we typically chdir into 'root_dir' before creating the - archive. 'base_dir' is the directory where we start archiving from; - ie. 'base_dir' will be the common prefix of all files and - directories in the archive. 'root_dir' and 'base_dir' both default - to the current directory. Returns the name of the archive file. - - 'owner' and 'group' are used when creating a tar archive. By default, - uses the current owner and group. - """ - ... +) -> str: ... def make_tarball( base_name: str, base_dir: StrPath, @@ -74,32 +31,5 @@ def make_tarball( dry_run: bool | Literal[0, 1] = 0, owner: str | None = None, group: str | None = None, -) -> str: - """ - Create a (possibly compressed) tar file from all the files under - 'base_dir'. - - 'compress' must be "gzip" (the default), "bzip2", "xz", "compress", or - None. ("compress" will be deprecated in Python 3.2) - - 'owner' and 'group' can be used to define an owner and a group for the - archive that is being built. If not provided, the current owner and group - will be used. - - The output tar file will be named 'base_dir' + ".tar", possibly plus - the appropriate compression extension (".gz", ".bz2", ".xz" or ".Z"). - - Returns the output filename. - """ - ... -def make_zipfile(base_name: str, base_dir: str, verbose: bool | Literal[0, 1] = 0, dry_run: bool | Literal[0, 1] = 0) -> str: - """ - Create a zip file from all the files under 'base_dir'. - - The output zip file will be named 'base_name' + ".zip". Uses either the - "zipfile" Python module (if available) or the InfoZIP "zip" utility - (if installed and found on the default search path). If neither tool is - available, raises DistutilsExecError. Returns the name of the output zip - file. - """ - ... +) -> str: ... +def make_zipfile(base_name: str, base_dir: str, verbose: bool | Literal[0, 1] = 0, dry_run: bool | Literal[0, 1] = 0) -> str: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/bcppcompiler.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/bcppcompiler.pyi index be0a87a..3e432f9 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/bcppcompiler.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/bcppcompiler.pyi @@ -1,15 +1,3 @@ -""" -distutils.bcppcompiler - -Contains BorlandCCompiler, an implementation of the abstract CCompiler class -for the Borland C++ compiler. -""" - from distutils.ccompiler import CCompiler -class BCPPCompiler(CCompiler): - """ - Concrete class that implements an interface to the Borland C/C++ - compiler, as defined by the CCompiler abstract class. - """ - ... +class BCPPCompiler(CCompiler): ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/ccompiler.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/ccompiler.pyi index edb3450..e0f33f4 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/ccompiler.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/ccompiler.pyi @@ -1,93 +1,27 @@ -""" -distutils.ccompiler - -Contains CCompiler, an abstract base class that defines the interface -for the Distutils compiler abstraction model. -""" - -from _typeshed import BytesPath, StrPath +from _typeshed import BytesPath, StrPath, Unused from collections.abc import Callable, Iterable from distutils.file_util import _BytesPathT, _StrPathT -from typing import Any, Literal, overload -from typing_extensions import TypeAlias +from typing import Literal, overload +from typing_extensions import TypeAlias, TypeVarTuple, Unpack _Macro: TypeAlias = tuple[str] | tuple[str, str | None] +_Ts = TypeVarTuple("_Ts") def gen_lib_options( compiler: CCompiler, library_dirs: list[str], runtime_library_dirs: list[str], libraries: list[str] -) -> list[str]: - """ - Generate linker options for searching library directories and - linking with specific libraries. 'libraries' and 'library_dirs' are, - respectively, lists of library names (not filenames!) and search - directories. Returns a list of command-line options suitable for use - with some compiler (depending on the two format strings passed in). - """ - ... -def gen_preprocess_options(macros: list[_Macro], include_dirs: list[str]) -> list[str]: - """ - Generate C pre-processor options (-D, -U, -I) as used by at least - two types of compilers: the typical Unix compiler and Visual C++. - 'macros' is the usual thing, a list of 1- or 2-tuples, where (name,) - means undefine (-U) macro 'name', and (name,value) means define (-D) - macro 'name' to 'value'. 'include_dirs' is just a list of directory - names to be added to the header file search path (-I). Returns a list - of command-line options suitable for either Unix compilers or Visual - C++. - """ - ... -def get_default_compiler(osname: str | None = None, platform: str | None = None) -> str: - """ - Determine the default compiler to use for the given platform. - - osname should be one of the standard Python OS names (i.e. the - ones returned by os.name) and platform the common value - returned by sys.platform for the platform in question. - - The default values are os.name and sys.platform in case the - parameters are not given. - """ - ... +) -> list[str]: ... +def gen_preprocess_options(macros: list[_Macro], include_dirs: list[str]) -> list[str]: ... +def get_default_compiler(osname: str | None = None, platform: str | None = None) -> str: ... def new_compiler( plat: str | None = None, compiler: str | None = None, verbose: bool | Literal[0, 1] = 0, dry_run: bool | Literal[0, 1] = 0, force: bool | Literal[0, 1] = 0, -) -> CCompiler: - """ - Generate an instance of some CCompiler subclass for the supplied - platform/compiler combination. 'plat' defaults to 'os.name' - (eg. 'posix', 'nt'), and 'compiler' defaults to the default compiler - for that platform. Currently only 'posix' and 'nt' are supported, and - the default compilers are "traditional Unix interface" (UnixCCompiler - class) and Visual C++ (MSVCCompiler class). Note that it's perfectly - possible to ask for a Unix compiler object under Windows, and a - Microsoft compiler object under Unix -- if you supply a value for - 'compiler', 'plat' is ignored. - """ - ... -def show_compilers() -> None: - """ - Print list of available compilers (used by the "--help-compiler" - options to "build", "build_ext", "build_clib"). - """ - ... +) -> CCompiler: ... +def show_compilers() -> None: ... class CCompiler: - """ - Abstract base class to define the interface that must be implemented - by real compiler classes. Also has some utility methods used by - several compiler classes. - - The basic idea behind a compiler abstraction class is that each - instance can be used for all the compile/link steps in building a - single project. Thus, attributes common to all of those compile and - link steps -- include directories, macros to define, libraries to link - against, etc. -- are attributes of the compiler instance. To allow for - variability in how individual files are treated, most of those - attributes may be varied on a per-compilation or per-link basis. - """ dry_run: bool force: bool verbose: bool @@ -101,128 +35,20 @@ class CCompiler: def __init__( self, verbose: bool | Literal[0, 1] = 0, dry_run: bool | Literal[0, 1] = 0, force: bool | Literal[0, 1] = 0 ) -> None: ... - def add_include_dir(self, dir: str) -> None: - """ - Add 'dir' to the list of directories that will be searched for - header files. The compiler is instructed to search directories in - the order in which they are supplied by successive calls to - 'add_include_dir()'. - """ - ... - def set_include_dirs(self, dirs: list[str]) -> None: - """ - Set the list of directories that will be searched to 'dirs' (a - list of strings). Overrides any preceding calls to - 'add_include_dir()'; subsequence calls to 'add_include_dir()' add - to the list passed to 'set_include_dirs()'. This does not affect - any list of standard include directories that the compiler may - search by default. - """ - ... - def add_library(self, libname: str) -> None: - """ - Add 'libname' to the list of libraries that will be included in - all links driven by this compiler object. Note that 'libname' - should *not* be the name of a file containing a library, but the - name of the library itself: the actual filename will be inferred by - the linker, the compiler, or the compiler class (depending on the - platform). - - The linker will be instructed to link against libraries in the - order they were supplied to 'add_library()' and/or - 'set_libraries()'. It is perfectly valid to duplicate library - names; the linker will be instructed to link against libraries as - many times as they are mentioned. - """ - ... - def set_libraries(self, libnames: list[str]) -> None: - """ - Set the list of libraries to be included in all links driven by - this compiler object to 'libnames' (a list of strings). This does - not affect any standard system libraries that the linker may - include by default. - """ - ... - def add_library_dir(self, dir: str) -> None: - """ - Add 'dir' to the list of directories that will be searched for - libraries specified to 'add_library()' and 'set_libraries()'. The - linker will be instructed to search for libraries in the order they - are supplied to 'add_library_dir()' and/or 'set_library_dirs()'. - """ - ... - def set_library_dirs(self, dirs: list[str]) -> None: - """ - Set the list of library search directories to 'dirs' (a list of - strings). This does not affect any standard library search path - that the linker may search by default. - """ - ... - def add_runtime_library_dir(self, dir: str) -> None: - """ - Add 'dir' to the list of directories that will be searched for - shared libraries at runtime. - """ - ... - def set_runtime_library_dirs(self, dirs: list[str]) -> None: - """ - Set the list of directories to search for shared libraries at - runtime to 'dirs' (a list of strings). This does not affect any - standard search path that the runtime linker may search by - default. - """ - ... - def define_macro(self, name: str, value: str | None = None) -> None: - """ - Define a preprocessor macro for all compilations driven by this - compiler object. The optional parameter 'value' should be a - string; if it is not supplied, then the macro will be defined - without an explicit value and the exact outcome depends on the - compiler used (XXX true? does ANSI say anything about this?) - """ - ... - def undefine_macro(self, name: str) -> None: - """ - Undefine a preprocessor macro for all compilations driven by - this compiler object. If the same macro is defined by - 'define_macro()' and undefined by 'undefine_macro()' the last call - takes precedence (including multiple redefinitions or - undefinitions). If the macro is redefined/undefined on a - per-compilation basis (ie. in the call to 'compile()'), then that - takes precedence. - """ - ... - def add_link_object(self, object: str) -> None: - """ - Add 'object' to the list of object files (or analogues, such as - explicitly named library files or the output of "resource - compilers") to be included in every link driven by this compiler - object. - """ - ... - def set_link_objects(self, objects: list[str]) -> None: - """ - Set the list of object files (or analogues) to be included in - every link to 'objects'. This does not affect any standard object - files that the linker may include by default (such as system - libraries). - """ - ... - def detect_language(self, sources: str | list[str]) -> str | None: - """ - Detect the language of a given file, or list of files. Uses - language_map, and language_order to do the job. - """ - ... - def find_library_file(self, dirs: list[str], lib: str, debug: bool | Literal[0, 1] = 0) -> str | None: - """ - Search the specified list of directories for a static or shared - library file 'lib' and return the full path to that file. If - 'debug' true, look for a debugging version (if that makes sense on - the current platform). Return None if 'lib' wasn't found in any of - the specified directories. - """ - ... + def add_include_dir(self, dir: str) -> None: ... + def set_include_dirs(self, dirs: list[str]) -> None: ... + def add_library(self, libname: str) -> None: ... + def set_libraries(self, libnames: list[str]) -> None: ... + def add_library_dir(self, dir: str) -> None: ... + def set_library_dirs(self, dirs: list[str]) -> None: ... + def add_runtime_library_dir(self, dir: str) -> None: ... + def set_runtime_library_dirs(self, dirs: list[str]) -> None: ... + def define_macro(self, name: str, value: str | None = None) -> None: ... + def undefine_macro(self, name: str) -> None: ... + def add_link_object(self, object: str) -> None: ... + def set_link_objects(self, objects: list[str]) -> None: ... + def detect_language(self, sources: str | list[str]) -> str | None: ... + def find_library_file(self, dirs: list[str], lib: str, debug: bool | Literal[0, 1] = 0) -> str | None: ... def has_function( self, funcname: str, @@ -230,50 +56,11 @@ class CCompiler: include_dirs: list[str] | None = None, libraries: list[str] | None = None, library_dirs: list[str] | None = None, - ) -> bool: - """ - Return a boolean indicating whether funcname is supported on - the current platform. The optional arguments can be used to - augment the compilation environment. - """ - ... - def library_dir_option(self, dir: str) -> str: - """ - Return the compiler option to add 'dir' to the list of - directories searched for libraries. - """ - ... - def library_option(self, lib: str) -> str: - """ - Return the compiler option to add 'lib' to the list of libraries - linked into the shared library or executable. - """ - ... - def runtime_library_dir_option(self, dir: str) -> str: - """ - Return the compiler option to add 'dir' to the list of - directories searched for runtime libraries. - """ - ... - def set_executables(self, **args: str) -> None: - """ - Define the executables (and options for them) that will be run - to perform the various stages of compilation. The exact set of - executables that may be specified here depends on the compiler - class (via the 'executables' class attribute), but most will have: - compiler the C/C++ compiler - linker_so linker used to create shared objects and libraries - linker_exe linker used to create binary executables - archiver static library creator - - On platforms with a command-line (Unix, DOS/Windows), each of these - is a string that will be split into executable name and (optional) - list of arguments. (Splitting the string is done similarly to how - Unix shells operate: words are delimited by spaces, but quotes and - backslashes can override this. See - 'distutils.util.split_quoted()'.) - """ - ... + ) -> bool: ... + def library_dir_option(self, dir: str) -> str: ... + def library_option(self, lib: str) -> str: ... + def runtime_library_dir_option(self, dir: str) -> str: ... + def set_executables(self, **args: str) -> None: ... def compile( self, sources: list[str], @@ -284,57 +71,7 @@ class CCompiler: extra_preargs: list[str] | None = None, extra_postargs: list[str] | None = None, depends: list[str] | None = None, - ) -> list[str]: - """ - Compile one or more source files. - - 'sources' must be a list of filenames, most likely C/C++ - files, but in reality anything that can be handled by a - particular compiler and compiler class (eg. MSVCCompiler can - handle resource files in 'sources'). Return a list of object - filenames, one per source filename in 'sources'. Depending on - the implementation, not all source files will necessarily be - compiled, but all corresponding object filenames will be - returned. - - If 'output_dir' is given, object files will be put under it, while - retaining their original path component. That is, "foo/bar.c" - normally compiles to "foo/bar.o" (for a Unix implementation); if - 'output_dir' is "build", then it would compile to - "build/foo/bar.o". - - 'macros', if given, must be a list of macro definitions. A macro - definition is either a (name, value) 2-tuple or a (name,) 1-tuple. - The former defines a macro; if the value is None, the macro is - defined without an explicit value. The 1-tuple case undefines a - macro. Later definitions/redefinitions/ undefinitions take - precedence. - - 'include_dirs', if given, must be a list of strings, the - directories to add to the default include file search path for this - compilation only. - - 'debug' is a boolean; if true, the compiler will be instructed to - output debug symbols in (or alongside) the object file(s). - - 'extra_preargs' and 'extra_postargs' are implementation- dependent. - On platforms that have the notion of a command-line (e.g. Unix, - DOS/Windows), they are most likely lists of strings: extra - command-line arguments to prepend/append to the compiler command - line. On other platforms, consult the implementation class - documentation. In any event, they are intended as an escape hatch - for those occasions when the abstract compiler framework doesn't - cut the mustard. - - 'depends', if given, is a list of filenames that all targets - depend on. If a source file is older than any file in - depends, then the source file will be recompiled. This - supports dependency tracking, but only at a coarse - granularity. - - Raises CompileError on failure. - """ - ... + ) -> list[str]: ... def create_static_lib( self, objects: list[str], @@ -342,31 +79,7 @@ class CCompiler: output_dir: str | None = None, debug: bool | Literal[0, 1] = 0, target_lang: str | None = None, - ) -> None: - """ - Link a bunch of stuff together to create a static library file. - The "bunch of stuff" consists of the list of object files supplied - as 'objects', the extra object files supplied to - 'add_link_object()' and/or 'set_link_objects()', the libraries - supplied to 'add_library()' and/or 'set_libraries()', and the - libraries supplied as 'libraries' (if any). - - 'output_libname' should be a library name, not a filename; the - filename will be inferred from the library name. 'output_dir' is - the directory where the library file will be put. - - 'debug' is a boolean; if true, debugging information will be - included in the library (note that on most platforms, it is the - compile step where this matters: the 'debug' flag is included here - just for consistency). - - 'target_lang' is the target language for which the given objects - are being compiled. This allows specific linkage time treatment of - certain languages. - - Raises LibError on failure. - """ - ... + ) -> None: ... def link( self, target_desc: str, @@ -382,52 +95,7 @@ class CCompiler: extra_postargs: list[str] | None = None, build_temp: str | None = None, target_lang: str | None = None, - ) -> None: - """ - Link a bunch of stuff together to create an executable or - shared library file. - - The "bunch of stuff" consists of the list of object files supplied - as 'objects'. 'output_filename' should be a filename. If - 'output_dir' is supplied, 'output_filename' is relative to it - (i.e. 'output_filename' can provide directory components if - needed). - - 'libraries' is a list of libraries to link against. These are - library names, not filenames, since they're translated into - filenames in a platform-specific way (eg. "foo" becomes "libfoo.a" - on Unix and "foo.lib" on DOS/Windows). However, they can include a - directory component, which means the linker will look in that - specific directory rather than searching all the normal locations. - - 'library_dirs', if supplied, should be a list of directories to - search for libraries that were specified as bare library names - (ie. no directory component). These are on top of the system - default and those supplied to 'add_library_dir()' and/or - 'set_library_dirs()'. 'runtime_library_dirs' is a list of - directories that will be embedded into the shared library and used - to search for other shared libraries that *it* depends on at - run-time. (This may only be relevant on Unix.) - - 'export_symbols' is a list of symbols that the shared library will - export. (This appears to be relevant only on Windows.) - - 'debug' is as for 'compile()' and 'create_static_lib()', with the - slight distinction that it actually matters on most platforms (as - opposed to 'create_static_lib()', which includes a 'debug' flag - mostly for form's sake). - - 'extra_preargs' and 'extra_postargs' are as for 'compile()' (except - of course that they supply command-line arguments for the - particular linker being used). - - 'target_lang' is the target language for which the given objects - are being compiled. This allows specific linkage time treatment of - certain languages. - - Raises LinkError on failure. - """ - ... + ) -> None: ... def link_executable( self, objects: list[str], @@ -479,18 +147,7 @@ class CCompiler: include_dirs: list[str] | None = None, extra_preargs: list[str] | None = None, extra_postargs: list[str] | None = None, - ) -> None: - """ - Preprocess a single C/C++ source file, named in 'source'. - Output will be written to file named 'output_file', or stdout if - 'output_file' not supplied. 'macros' is a list of macro - definitions as for 'compile()', which will augment the macros set - with 'define_macro()' and 'undefine_macro()'. 'include_dirs' is a - list of directory names that will be added to the default list. - - Raises PreprocessError on failure. - """ - ... + ) -> None: ... @overload def executable_filename(self, basename: str, strip_dir: Literal[0, False] = 0, output_dir: StrPath = "") -> str: ... @overload @@ -505,7 +162,9 @@ class CCompiler: def shared_object_filename(self, basename: str, strip_dir: Literal[0, False] = 0, output_dir: StrPath = "") -> str: ... @overload def shared_object_filename(self, basename: StrPath, strip_dir: Literal[1, True], output_dir: StrPath = "") -> str: ... - def execute(self, func: Callable[..., object], args: tuple[Any, ...], msg: str | None = None, level: int = 1) -> None: ... + def execute( + self, func: Callable[[Unpack[_Ts]], Unused], args: tuple[Unpack[_Ts]], msg: str | None = None, level: int = 1 + ) -> None: ... def spawn(self, cmd: list[str]) -> None: ... def mkpath(self, name: str, mode: int = 0o777) -> None: ... @overload diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/cmd.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/cmd.pyi index 2e35f2d..dcb423a 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/cmd.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/cmd.pyi @@ -1,158 +1,165 @@ -""" -distutils.cmd - -Provides the Command class, the base class for the command classes -in the distutils.command package. -""" - from _typeshed import BytesPath, Incomplete, StrOrBytesPath, StrPath, Unused from abc import abstractmethod from collections.abc import Callable, Iterable +from distutils.command.bdist import bdist +from distutils.command.bdist_dumb import bdist_dumb +from distutils.command.bdist_rpm import bdist_rpm +from distutils.command.build import build +from distutils.command.build_clib import build_clib +from distutils.command.build_ext import build_ext +from distutils.command.build_py import build_py +from distutils.command.build_scripts import build_scripts +from distutils.command.check import check +from distutils.command.clean import clean +from distutils.command.config import config +from distutils.command.install import install +from distutils.command.install_data import install_data +from distutils.command.install_egg_info import install_egg_info +from distutils.command.install_headers import install_headers +from distutils.command.install_lib import install_lib +from distutils.command.install_scripts import install_scripts +from distutils.command.register import register +from distutils.command.sdist import sdist +from distutils.command.upload import upload from distutils.dist import Distribution from distutils.file_util import _BytesPathT, _StrPathT -from typing import Any, ClassVar, Literal, overload +from typing import Any, ClassVar, Literal, TypeVar, overload +from typing_extensions import TypeVarTuple, Unpack + +_CommandT = TypeVar("_CommandT", bound=Command) +_Ts = TypeVarTuple("_Ts") class Command: - """ - Abstract base class for defining command classes, the "worker bees" - of the Distutils. A useful analogy for command classes is to think of - them as subroutines with local variables called "options". The options - are "declared" in 'initialize_options()' and "defined" (given their - final values, aka "finalized") in 'finalize_options()', both of which - must be defined by every command class. The distinction between the - two is necessary because option values might come from the outside - world (command line, config file, ...), and any options dependent on - other options must be computed *after* these outside influences have - been processed -- hence 'finalize_options()'. The "body" of the - subroutine, where it does all its work based on the values of its - options, is the 'run()' method, which must also be implemented by every - command class. - """ + dry_run: Literal[0, 1] # Exposed from __getattr_. Same as Distribution.dry_run distribution: Distribution # Any to work around variance issues sub_commands: ClassVar[list[tuple[str, Callable[[Any], bool] | None]]] - def __init__(self, dist: Distribution) -> None: - """ - Create and initialize a new Command object. Most importantly, - invokes the 'initialize_options()' method, which is the real - initializer and depends on the actual command being - instantiated. - """ - ... + def __init__(self, dist: Distribution) -> None: ... @abstractmethod - def initialize_options(self) -> None: - """ - Set default values for all the options that this command - supports. Note that these defaults may be overridden by other - commands, by the setup script, by config files, or by the - command-line. Thus, this is not the place to code dependencies - between options; generally, 'initialize_options()' implementations - are just a bunch of "self.foo = None" assignments. - - This method must be implemented by all command classes. - """ - ... + def initialize_options(self) -> None: ... @abstractmethod - def finalize_options(self) -> None: - """ - Set final values for all the options that this command supports. - This is always called as late as possible, ie. after any option - assignments from the command-line or from other commands have been - done. Thus, this is the place to code option dependencies: if - 'foo' depends on 'bar', then it is safe to set 'foo' from 'bar' as - long as 'foo' still has the same value it was assigned in - 'initialize_options()'. - - This method must be implemented by all command classes. - """ - ... + def finalize_options(self) -> None: ... @abstractmethod - def run(self) -> None: - """ - A command's raison d'etre: carry out the action it exists to - perform, controlled by the options initialized in - 'initialize_options()', customized by other commands, the setup - script, the command-line, and config files, and finalized in - 'finalize_options()'. All terminal output and filesystem - interaction should be done by 'run()'. - - This method must be implemented by all command classes. - """ - ... - def announce(self, msg: str, level: int = 1) -> None: - """ - If the current verbosity level is of greater than or equal to - 'level' print 'msg' to stdout. - """ - ... - def debug_print(self, msg: str) -> None: - """ - Print 'msg' to stdout if the global DEBUG (taken from the - DISTUTILS_DEBUG environment variable) flag is true. - """ - ... - def ensure_string(self, option: str, default: str | None = None) -> None: - """ - Ensure that 'option' is a string; if not defined, set it to - 'default'. - """ - ... - def ensure_string_list(self, option: str | list[str]) -> None: - r""" - Ensure that 'option' is a list of strings. If 'option' is - currently a string, we split it either on /,\s*/ or /\s+/, so - "foo bar baz", "foo,bar,baz", and "foo, bar baz" all become - ["foo", "bar", "baz"]. - """ - ... - def ensure_filename(self, option: str) -> None: - """Ensure that 'option' is the name of an existing file.""" - ... + def run(self) -> None: ... + def announce(self, msg: str, level: int = 1) -> None: ... + def debug_print(self, msg: str) -> None: ... + def ensure_string(self, option: str, default: str | None = None) -> None: ... + def ensure_string_list(self, option: str) -> None: ... + def ensure_filename(self, option: str) -> None: ... def ensure_dirname(self, option: str) -> None: ... def get_command_name(self) -> str: ... - def set_undefined_options(self, src_cmd: str, *option_pairs: tuple[str, str]) -> None: - """ - Set the values of any "undefined" options from corresponding - option values in some other command object. "Undefined" here means - "is None", which is the convention used to indicate that an option - has not been changed between 'initialize_options()' and - 'finalize_options()'. Usually called from 'finalize_options()' for - options that depend on some other command rather than another - option of the same command. 'src_cmd' is the other command from - which option values will be taken (a command object will be created - for it if necessary); the remaining arguments are - '(src_option,dst_option)' tuples which mean "take the value of - 'src_option' in the 'src_cmd' command object, and copy it to - 'dst_option' in the current command object". - """ - ... - def get_finalized_command(self, command: str, create: bool | Literal[0, 1] = 1) -> Command: - """ - Wrapper around Distribution's 'get_command_obj()' method: find - (create if necessary and 'create' is true) the command object for - 'command', call its 'ensure_finalized()' method, and return the - finalized command object. - """ - ... - def reinitialize_command(self, command: Command | str, reinit_subcommands: bool | Literal[0, 1] = 0) -> Command: ... - def run_command(self, command: str) -> None: - """ - Run some other command: uses the 'run_command()' method of - Distribution, which creates and finalizes the command object if - necessary and then invokes its 'run()' method. - """ - ... - def get_sub_commands(self) -> list[str]: - """ - Determine the sub-commands that are relevant in the current - distribution (ie., that need to be run). This is based on the - 'sub_commands' class attribute: each tuple in that list may include - a method that we call to determine if the subcommand needs to be - run for the current distribution. Return a list of command names. - """ - ... + def set_undefined_options(self, src_cmd: str, *option_pairs: tuple[str, str]) -> None: ... + # NOTE: This list comes directly from the distutils/command folder. Minus bdist_msi and bdist_wininst. + @overload + def get_finalized_command(self, command: Literal["bdist"], create: bool | Literal[0, 1] = 1) -> bdist: ... + @overload + def get_finalized_command(self, command: Literal["bdist_dumb"], create: bool | Literal[0, 1] = 1) -> bdist_dumb: ... + @overload + def get_finalized_command(self, command: Literal["bdist_rpm"], create: bool | Literal[0, 1] = 1) -> bdist_rpm: ... + @overload + def get_finalized_command(self, command: Literal["build"], create: bool | Literal[0, 1] = 1) -> build: ... + @overload + def get_finalized_command(self, command: Literal["build_clib"], create: bool | Literal[0, 1] = 1) -> build_clib: ... + @overload + def get_finalized_command(self, command: Literal["build_ext"], create: bool | Literal[0, 1] = 1) -> build_ext: ... + @overload + def get_finalized_command(self, command: Literal["build_py"], create: bool | Literal[0, 1] = 1) -> build_py: ... + @overload + def get_finalized_command(self, command: Literal["build_scripts"], create: bool | Literal[0, 1] = 1) -> build_scripts: ... + @overload + def get_finalized_command(self, command: Literal["check"], create: bool | Literal[0, 1] = 1) -> check: ... + @overload + def get_finalized_command(self, command: Literal["clean"], create: bool | Literal[0, 1] = 1) -> clean: ... + @overload + def get_finalized_command(self, command: Literal["config"], create: bool | Literal[0, 1] = 1) -> config: ... + @overload + def get_finalized_command(self, command: Literal["install"], create: bool | Literal[0, 1] = 1) -> install: ... + @overload + def get_finalized_command(self, command: Literal["install_data"], create: bool | Literal[0, 1] = 1) -> install_data: ... + @overload + def get_finalized_command( + self, command: Literal["install_egg_info"], create: bool | Literal[0, 1] = 1 + ) -> install_egg_info: ... + @overload + def get_finalized_command(self, command: Literal["install_headers"], create: bool | Literal[0, 1] = 1) -> install_headers: ... + @overload + def get_finalized_command(self, command: Literal["install_lib"], create: bool | Literal[0, 1] = 1) -> install_lib: ... + @overload + def get_finalized_command(self, command: Literal["install_scripts"], create: bool | Literal[0, 1] = 1) -> install_scripts: ... + @overload + def get_finalized_command(self, command: Literal["register"], create: bool | Literal[0, 1] = 1) -> register: ... + @overload + def get_finalized_command(self, command: Literal["sdist"], create: bool | Literal[0, 1] = 1) -> sdist: ... + @overload + def get_finalized_command(self, command: Literal["upload"], create: bool | Literal[0, 1] = 1) -> upload: ... + @overload + def get_finalized_command(self, command: str, create: bool | Literal[0, 1] = 1) -> Command: ... + @overload + def reinitialize_command(self, command: Literal["bdist"], reinit_subcommands: bool | Literal[0, 1] = 0) -> bdist: ... + @overload + def reinitialize_command( + self, command: Literal["bdist_dumb"], reinit_subcommands: bool | Literal[0, 1] = 0 + ) -> bdist_dumb: ... + @overload + def reinitialize_command(self, command: Literal["bdist_rpm"], reinit_subcommands: bool | Literal[0, 1] = 0) -> bdist_rpm: ... + @overload + def reinitialize_command(self, command: Literal["build"], reinit_subcommands: bool | Literal[0, 1] = 0) -> build: ... + @overload + def reinitialize_command( + self, command: Literal["build_clib"], reinit_subcommands: bool | Literal[0, 1] = 0 + ) -> build_clib: ... + @overload + def reinitialize_command(self, command: Literal["build_ext"], reinit_subcommands: bool | Literal[0, 1] = 0) -> build_ext: ... + @overload + def reinitialize_command(self, command: Literal["build_py"], reinit_subcommands: bool | Literal[0, 1] = 0) -> build_py: ... + @overload + def reinitialize_command( + self, command: Literal["build_scripts"], reinit_subcommands: bool | Literal[0, 1] = 0 + ) -> build_scripts: ... + @overload + def reinitialize_command(self, command: Literal["check"], reinit_subcommands: bool | Literal[0, 1] = 0) -> check: ... + @overload + def reinitialize_command(self, command: Literal["clean"], reinit_subcommands: bool | Literal[0, 1] = 0) -> clean: ... + @overload + def reinitialize_command(self, command: Literal["config"], reinit_subcommands: bool | Literal[0, 1] = 0) -> config: ... + @overload + def reinitialize_command(self, command: Literal["install"], reinit_subcommands: bool | Literal[0, 1] = 0) -> install: ... + @overload + def reinitialize_command( + self, command: Literal["install_data"], reinit_subcommands: bool | Literal[0, 1] = 0 + ) -> install_data: ... + @overload + def reinitialize_command( + self, command: Literal["install_egg_info"], reinit_subcommands: bool | Literal[0, 1] = 0 + ) -> install_egg_info: ... + @overload + def reinitialize_command( + self, command: Literal["install_headers"], reinit_subcommands: bool | Literal[0, 1] = 0 + ) -> install_headers: ... + @overload + def reinitialize_command( + self, command: Literal["install_lib"], reinit_subcommands: bool | Literal[0, 1] = 0 + ) -> install_lib: ... + @overload + def reinitialize_command( + self, command: Literal["install_scripts"], reinit_subcommands: bool | Literal[0, 1] = 0 + ) -> install_scripts: ... + @overload + def reinitialize_command(self, command: Literal["register"], reinit_subcommands: bool | Literal[0, 1] = 0) -> register: ... + @overload + def reinitialize_command(self, command: Literal["sdist"], reinit_subcommands: bool | Literal[0, 1] = 0) -> sdist: ... + @overload + def reinitialize_command(self, command: Literal["upload"], reinit_subcommands: bool | Literal[0, 1] = 0) -> upload: ... + @overload + def reinitialize_command(self, command: str, reinit_subcommands: bool | Literal[0, 1] = 0) -> Command: ... + @overload + def reinitialize_command(self, command: _CommandT, reinit_subcommands: bool | Literal[0, 1] = 0) -> _CommandT: ... + def run_command(self, command: str) -> None: ... + def get_sub_commands(self) -> list[str]: ... def warn(self, msg: str) -> None: ... - def execute(self, func: Callable[..., object], args: Iterable[Any], msg: str | None = None, level: int = 1) -> None: ... + def execute( + self, func: Callable[[Unpack[_Ts]], Unused], args: tuple[Unpack[_Ts]], msg: str | None = None, level: int = 1 + ) -> None: ... def mkpath(self, name: str, mode: int = 0o777) -> None: ... @overload def copy_file( @@ -163,13 +170,7 @@ class Command: preserve_times: bool | Literal[0, 1] = 1, link: str | None = None, level: Unused = 1, - ) -> tuple[_StrPathT | str, bool]: - """ - Copy a file respecting verbose, dry-run and force flags. (The - former two default to whatever is in the Distribution object, and - the latter defaults to false for commands that don't define it.) - """ - ... + ) -> tuple[_StrPathT | str, bool]: ... @overload def copy_file( self, @@ -179,13 +180,7 @@ class Command: preserve_times: bool | Literal[0, 1] = 1, link: str | None = None, level: Unused = 1, - ) -> tuple[_BytesPathT | bytes, bool]: - """ - Copy a file respecting verbose, dry-run and force flags. (The - former two default to whatever is in the Distribution object, and - the latter defaults to false for commands that don't define it.) - """ - ... + ) -> tuple[_BytesPathT | bytes, bool]: ... def copy_tree( self, infile: StrPath, @@ -194,23 +189,12 @@ class Command: preserve_times: bool | Literal[0, 1] = 1, preserve_symlinks: bool | Literal[0, 1] = 0, level: Unused = 1, - ) -> list[str]: - """ - Copy an entire directory tree respecting verbose, dry-run, - and force flags. - """ - ... - @overload - def move_file(self, src: StrPath, dst: _StrPathT, level: Unused = 1) -> _StrPathT | str: - """Move a file respecting dry-run flag.""" - ... - @overload - def move_file(self, src: BytesPath, dst: _BytesPathT, level: Unused = 1) -> _BytesPathT | bytes: - """Move a file respecting dry-run flag.""" - ... - def spawn(self, cmd: Iterable[str], search_path: bool | Literal[0, 1] = 1, level: Unused = 1) -> None: - """Spawn an external command respecting dry-run flag.""" - ... + ) -> list[str]: ... + @overload + def move_file(self, src: StrPath, dst: _StrPathT, level: Unused = 1) -> _StrPathT | str: ... + @overload + def move_file(self, src: BytesPath, dst: _BytesPathT, level: Unused = 1) -> _BytesPathT | bytes: ... + def spawn(self, cmd: Iterable[str], search_path: bool | Literal[0, 1] = 1, level: Unused = 1) -> None: ... @overload def make_archive( self, @@ -235,21 +219,11 @@ class Command: self, infiles: str | list[str] | tuple[str, ...], outfile: StrOrBytesPath, - func: Callable[..., object], - args: list[Any], + func: Callable[[Unpack[_Ts]], Unused], + args: tuple[Unpack[_Ts]], exec_msg: str | None = None, skip_msg: str | None = None, level: Unused = 1, - ) -> None: - """ - Special case of 'execute()' for operations that process one or - more input files and generate one output file. Works just like - 'execute()', except the operation is skipped and a different - message printed if 'outfile' already exists and is newer than all - files listed in 'infiles'. If the command defined 'self.force', - and it is true, then the command is unconditionally run -- does no - timestamp checks. - """ - ... + ) -> None: ... def ensure_finalized(self) -> None: ... def dump_options(self, header: Incomplete | None = None, indent: str = "") -> None: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/__init__.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/__init__.pyi index ae6bd60..4d73728 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/__init__.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/__init__.pyi @@ -1,6 +1,48 @@ -""" -distutils.command +import sys -Package containing implementation of all the standard Distutils -commands. -""" +from . import ( + bdist, + bdist_dumb, + bdist_rpm, + build, + build_clib, + build_ext, + build_py, + build_scripts, + check, + clean, + install, + install_data, + install_headers, + install_lib, + install_scripts, + register, + sdist, + upload, +) + +__all__ = [ + "build", + "build_py", + "build_ext", + "build_clib", + "build_scripts", + "clean", + "install", + "install_lib", + "install_headers", + "install_scripts", + "install_data", + "sdist", + "register", + "bdist", + "bdist_dumb", + "bdist_rpm", + "check", + "upload", +] + +if sys.version_info < (3, 10): + from . import bdist_wininst + + __all__ += ["bdist_wininst"] diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/bdist.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/bdist.pyi index 6dd8e8f..43d7708 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/bdist.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/bdist.pyi @@ -1,30 +1,20 @@ -""" -distutils.command.bdist - -Implements the Distutils 'bdist' command (create a built [binary] -distribution). -""" - -from typing import Any +from _typeshed import Unused +from collections.abc import Callable +from typing import Any, ClassVar from ..cmd import Command -def show_formats() -> None: - """ - Print list of available formats (arguments to "--format" option). - - """ - ... +def show_formats() -> None: ... class bdist(Command): description: str - user_options: Any - boolean_options: Any - help_options: Any - no_format_option: Any - default_format: Any - format_commands: Any - format_command: Any + user_options: ClassVar[list[tuple[str, str | None, str]]] + boolean_options: ClassVar[list[str]] + help_options: ClassVar[list[tuple[str, str | None, str, Callable[[], Unused]]]] + no_format_option: ClassVar[tuple[str, ...]] + default_format: ClassVar[dict[str, str]] + format_commands: ClassVar[list[str]] + format_command: ClassVar[dict[str, tuple[str, str]]] bdist_base: Any plat_name: Any formats: Any diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/bdist_dumb.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/bdist_dumb.pyi index 0f4fd51..1999788 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/bdist_dumb.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/bdist_dumb.pyi @@ -1,20 +1,12 @@ -""" -distutils.command.bdist_dumb - -Implements the Distutils 'bdist_dumb' command (create a "dumb" built -distribution -- i.e., just an archive to be unpacked under $prefix or -$exec_prefix). -""" - -from typing import Any +from typing import Any, ClassVar from ..cmd import Command class bdist_dumb(Command): description: str - user_options: Any - boolean_options: Any - default_format: Any + user_options: ClassVar[list[tuple[str, str | None, str]]] + boolean_options: ClassVar[list[str]] + default_format: ClassVar[dict[str, str]] bdist_dir: Any plat_name: Any format: Any diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/bdist_msi.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/bdist_msi.pyi index d1eb374..d0eac1a 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/bdist_msi.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/bdist_msi.pyi @@ -1,5 +1,5 @@ import sys -from typing import Any, Literal +from typing import Any, ClassVar, Literal from ..cmd import Command @@ -16,8 +16,8 @@ if sys.platform == "win32": class bdist_msi(Command): description: str - user_options: Any - boolean_options: Any + user_options: ClassVar[list[tuple[str, str | None, str]]] + boolean_options: ClassVar[list[str]] all_versions: Any other_version: str if sys.version_info >= (3, 9): diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/bdist_rpm.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/bdist_rpm.pyi index 2806c23..89c43e1 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/bdist_rpm.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/bdist_rpm.pyi @@ -1,19 +1,12 @@ -""" -distutils.command.bdist_rpm - -Implements the Distutils 'bdist_rpm' command (create RPM source and binary -distributions). -""" - -from typing import Any +from typing import Any, ClassVar from ..cmd import Command class bdist_rpm(Command): description: str - user_options: Any - boolean_options: Any - negative_opt: Any + user_options: ClassVar[list[tuple[str, str | None, str]]] + boolean_options: ClassVar[list[str]] + negative_opt: ClassVar[dict[str, str]] bdist_base: Any rpm_base: Any dist_dir: Any diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/bdist_wininst.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/bdist_wininst.pyi index 8491d31..cf333bc 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/bdist_wininst.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/bdist_wininst.pyi @@ -1,10 +1,10 @@ from _typeshed import StrOrBytesPath from distutils.cmd import Command -from typing import Any, ClassVar +from typing import ClassVar class bdist_wininst(Command): description: ClassVar[str] - user_options: ClassVar[list[tuple[Any, ...]]] + user_options: ClassVar[list[tuple[str, str | None, str]]] boolean_options: ClassVar[list[str]] def initialize_options(self) -> None: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/build.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/build.pyi index 7306204..78ba6b7 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/build.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/build.pyi @@ -1,9 +1,4 @@ -""" -distutils.command.build - -Implements the Distutils 'build' command. -""" - +from _typeshed import Unused from collections.abc import Callable from typing import Any, ClassVar @@ -13,9 +8,9 @@ def show_compilers() -> None: ... class build(Command): description: str - user_options: Any - boolean_options: Any - help_options: Any + user_options: ClassVar[list[tuple[str, str | None, str]]] + boolean_options: ClassVar[list[str]] + help_options: ClassVar[list[tuple[str, str | None, str, Callable[[], Unused]]]] build_base: str build_purelib: Any build_platlib: Any diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/build_clib.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/build_clib.pyi index 277f106..1f66e2e 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/build_clib.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/build_clib.pyi @@ -1,12 +1,6 @@ -""" -distutils.command.build_clib - -Implements the Distutils 'build_clib' command, to build a C/C++ library -that is included in the module distribution and needed by an extension -module. -""" - -from typing import Any +from _typeshed import Unused +from collections.abc import Callable +from typing import Any, ClassVar from ..cmd import Command @@ -14,9 +8,9 @@ def show_compilers() -> None: ... class build_clib(Command): description: str - user_options: Any - boolean_options: Any - help_options: Any + user_options: ClassVar[list[tuple[str, str, str]]] + boolean_options: ClassVar[list[str]] + help_options: ClassVar[list[tuple[str, str | None, str, Callable[[], Unused]]]] build_clib: Any build_temp: Any libraries: Any @@ -29,18 +23,7 @@ class build_clib(Command): def initialize_options(self) -> None: ... def finalize_options(self) -> None: ... def run(self) -> None: ... - def check_library_list(self, libraries) -> None: - """ - Ensure that the list of libraries is valid. - - `library` is presumably provided as a command option 'libraries'. - This method checks that it is a list of 2-tuples, where the tuples - are (library_name, build_info_dict). - - Raise DistutilsSetupError if the structure is invalid anywhere; - just returns otherwise. - """ - ... + def check_library_list(self, libraries) -> None: ... def get_library_names(self): ... def get_source_files(self): ... def build_libraries(self, libraries) -> None: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/build_ext.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/build_ext.pyi index ec14d3f..a0813c3 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/build_ext.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/build_ext.pyi @@ -1,12 +1,6 @@ -""" -distutils.command.build_ext - -Implements the Distutils 'build_ext' command, for building extension -modules (currently limited to C extensions, should accommodate C++ -extensions ASAP). -""" - -from typing import Any +from _typeshed import Unused +from collections.abc import Callable +from typing import Any, ClassVar from ..cmd import Command @@ -17,9 +11,9 @@ def show_compilers() -> None: ... class build_ext(Command): description: str sep_by: Any - user_options: Any - boolean_options: Any - help_options: Any + user_options: ClassVar[list[tuple[str, str | None, str]]] + boolean_options: ClassVar[list[str]] + help_options: ClassVar[list[tuple[str, str | None, str, Callable[[], Unused]]]] extensions: Any build_lib: Any plat_name: Any @@ -44,71 +38,15 @@ class build_ext(Command): def initialize_options(self) -> None: ... def finalize_options(self) -> None: ... def run(self) -> None: ... - def check_extensions_list(self, extensions) -> None: - """ - Ensure that the list of extensions (presumably provided as a - command option 'extensions') is valid, i.e. it is a list of - Extension objects. We also support the old-style list of 2-tuples, - where the tuples are (ext_name, build_info), which are converted to - Extension instances here. - - Raise DistutilsSetupError if the structure is invalid anywhere; - just returns otherwise. - """ - ... + def check_extensions_list(self, extensions) -> None: ... def get_source_files(self): ... def get_outputs(self): ... def build_extensions(self) -> None: ... def build_extension(self, ext) -> None: ... - def swig_sources(self, sources, extension): - """ - Walk the list of source files in 'sources', looking for SWIG - interface (.i) files. Run SWIG on all that are found, and - return a modified 'sources' list with SWIG source files replaced - by the generated C (or C++) files. - """ - ... - def find_swig(self): - """ - Return the name of the SWIG executable. On Unix, this is - just "swig" -- it should be in the PATH. Tries a bit harder on - Windows. - """ - ... - def get_ext_fullpath(self, ext_name: str) -> str: - """ - Returns the path of the filename for a given extension. - - The file is located in `build_lib` or directly in the package - (inplace option). - """ - ... - def get_ext_fullname(self, ext_name: str) -> str: - """ - Returns the fullname of a given extension name. - - Adds the `package.` prefix - """ - ... - def get_ext_filename(self, ext_name: str) -> str: - r""" - Convert the name of an extension (eg. "foo.bar") into the name - of the file from which it will be loaded (eg. "foo/bar.so", or - "foo\bar.pyd"). - """ - ... - def get_export_symbols(self, ext): - """ - Return the list of symbols that a shared extension has to - export. This either uses 'ext.export_symbols' or, if it's not - provided, "PyInit_" + module_name. Only relevant on Windows, where - the .pyd file (DLL) must export the module "PyInit_" function. - """ - ... - def get_libraries(self, ext): - """ - Return the list of libraries to link against when building a - shared extension. On most platforms, this is just 'ext.libraries'; - on Windows, we add the Python library (eg. python20.dll). - """ - ... + def swig_sources(self, sources, extension): ... + def find_swig(self): ... + def get_ext_fullpath(self, ext_name: str) -> str: ... + def get_ext_fullname(self, ext_name: str) -> str: ... + def get_ext_filename(self, ext_name: str) -> str: ... + def get_export_symbols(self, ext): ... + def get_libraries(self, ext): ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/build_py.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/build_py.pyi index 1f7ecd3..90f0675 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/build_py.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/build_py.pyi @@ -1,19 +1,13 @@ -""" -distutils.command.build_py - -Implements the Distutils 'build_py' command. -""" - -from typing import Any, Literal +from typing import Any, ClassVar, Literal from ..cmd import Command from ..util import Mixin2to3 as Mixin2to3 class build_py(Command): description: str - user_options: Any - boolean_options: Any - negative_opt: Any + user_options: ClassVar[list[tuple[str, str | None, str]]] + boolean_options: ClassVar[list[str]] + negative_opt: ClassVar[dict[str, str]] build_lib: Any py_modules: Any package: Any @@ -27,45 +21,15 @@ class build_py(Command): data_files: Any def finalize_options(self) -> None: ... def run(self) -> None: ... - def get_data_files(self): - """Generate list of '(package,src_dir,build_dir,filenames)' tuples""" - ... - def find_data_files(self, package, src_dir): - """Return filenames for package's data files in 'src_dir'""" - ... - def build_package_data(self) -> None: - """Copy data files into build directory""" - ... - def get_package_dir(self, package): - """ - Return the directory, relative to the top of the source - distribution, where package 'package' should be found - (at least according to the 'package_dir' option, if any). - """ - ... + def get_data_files(self): ... + def find_data_files(self, package, src_dir): ... + def build_package_data(self) -> None: ... + def get_package_dir(self, package): ... def check_package(self, package, package_dir): ... def check_module(self, module, module_file): ... def find_package_modules(self, package, package_dir): ... - def find_modules(self): - """ - Finds individually-specified Python modules, ie. those listed by - module name in 'self.py_modules'. Returns a list of tuples (package, - module_base, filename): 'package' is a tuple of the path through - package-space to the module; 'module_base' is the bare (no - packages, no dots) module name, and 'filename' is the path to the - ".py" file (relative to the distribution root) that implements the - module. - """ - ... - def find_all_modules(self): - """ - Compute the list of all modules that will be built, whether - they are specified one-module-at-a-time ('self.py_modules') or - by whole packages ('self.packages'). Return a list of tuples - (package, module, module_file), just like 'find_modules()' and - 'find_package_modules()' do. - """ - ... + def find_modules(self): ... + def find_all_modules(self): ... def get_source_files(self): ... def get_module_outfile(self, build_dir, package, module): ... def get_outputs(self, include_bytecode: bool | Literal[0, 1] = 1): ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/build_scripts.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/build_scripts.pyi index 716e7c0..7871bb8 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/build_scripts.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/build_scripts.pyi @@ -1,10 +1,4 @@ -""" -distutils.command.build_scripts - -Implements the Distutils 'build_scripts' command. -""" - -from typing import Any +from typing import Any, ClassVar from ..cmd import Command from ..util import Mixin2to3 as Mixin2to3 @@ -13,8 +7,8 @@ first_line_re: Any class build_scripts(Command): description: str - user_options: Any - boolean_options: Any + user_options: ClassVar[list[tuple[str, str, str]]] + boolean_options: ClassVar[list[str]] build_dir: Any scripts: Any force: Any @@ -24,14 +18,7 @@ class build_scripts(Command): def finalize_options(self) -> None: ... def get_source_files(self): ... def run(self) -> None: ... - def copy_scripts(self): - r""" - Copy each script listed in 'self.scripts'; if it's marked as a - Python script in the Unix way (first line matches 'first_line_re', - ie. starts with "\#!" and contains "python"), then adjust the first - line to refer to the current Python interpreter as we copy. - """ - ... + def copy_scripts(self): ... class build_scripts_2to3(build_scripts, Mixin2to3): def copy_scripts(self): ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/check.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/check.pyi index 7b5b6a7..e69627d 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/check.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/check.pyi @@ -1,10 +1,4 @@ -""" -distutils.command.check - -Implements the Distutils 'check' command. -""" - -from typing import Any, Literal +from typing import Any, ClassVar, Final, Literal from typing_extensions import TypeAlias from ..cmd import Command @@ -28,42 +22,18 @@ class SilentReporter(_Reporter): ) -> None: ... def system_message(self, level, message, *children, **kwargs): ... -HAS_DOCUTILS: bool +HAS_DOCUTILS: Final[bool] class check(Command): - """ - This command checks the meta-data of the package. - - """ description: str - user_options: Any - boolean_options: Any + user_options: ClassVar[list[tuple[str, str, str]]] + boolean_options: ClassVar[list[str]] restructuredtext: int metadata: int strict: int - def initialize_options(self) -> None: - """Sets default values for options.""" - ... + def initialize_options(self) -> None: ... def finalize_options(self) -> None: ... - def warn(self, msg): - """Counts the number of warnings that occurs.""" - ... - def run(self) -> None: - """Runs the command.""" - ... - def check_metadata(self) -> None: - """ - Ensures that all required elements of meta-data are supplied. - - Required fields: - name, version, URL - - Recommended fields: - (author and author_email) or (maintainer and maintainer_email) - - Warns if any are missing. - """ - ... - def check_restructuredtext(self) -> None: - """Checks if the long string fields are reST-compliant.""" - ... + def warn(self, msg): ... + def run(self) -> None: ... + def check_metadata(self) -> None: ... + def check_restructuredtext(self) -> None: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/clean.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/clean.pyi index 6556f8d..55f0a0e 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/clean.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/clean.pyi @@ -1,17 +1,11 @@ -""" -distutils.command.clean - -Implements the Distutils 'clean' command. -""" - -from typing import Any +from typing import Any, ClassVar from ..cmd import Command class clean(Command): description: str - user_options: Any - boolean_options: Any + user_options: ClassVar[list[tuple[str, str | None, str]]] + boolean_options: ClassVar[list[str]] build_base: Any build_lib: Any build_temp: Any diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/config.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/config.pyi index 3776063..b091009 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/config.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/config.pyi @@ -1,29 +1,17 @@ -""" -distutils.command.config - -Implements the Distutils 'config' command, a (mostly) empty command class -that exists mainly to be sub-classed by specific module distributions and -applications. The idea is that while every "config" command is different, -at least they're all named the same, and users always see "config" in the -list of standard commands. Also, this is a good place to put common -configure-like tasks: "try to compile this C code", or "figure out where -this header file lives". -""" - from _typeshed import StrOrBytesPath from collections.abc import Sequence from re import Pattern -from typing import Any, Literal +from typing import Any, ClassVar, Final, Literal from ..ccompiler import CCompiler from ..cmd import Command -LANG_EXT: dict[str, str] +LANG_EXT: Final[dict[str, str]] class config(Command): description: str # Tuple is full name, short name, description - user_options: Sequence[tuple[str, str | None, str]] + user_options: ClassVar[list[tuple[str, str | None, str]]] compiler: str | CCompiler cc: str | None include_dirs: Sequence[str] | None @@ -41,15 +29,7 @@ class config(Command): headers: Sequence[str] | None = None, include_dirs: Sequence[str] | None = None, lang: str = "c", - ) -> bool: - """ - Construct a source file from 'body' (a string containing lines - of C/C++ code) and 'headers' (a list of header files to include) - and run it through the preprocessor. Return true if the - preprocessor succeeded, false if there were any errors. - ('body' probably isn't of much use, but what the heck.) - """ - ... + ) -> bool: ... def search_cpp( self, pattern: Pattern[str] | str, @@ -57,24 +37,10 @@ class config(Command): headers: Sequence[str] | None = None, include_dirs: Sequence[str] | None = None, lang: str = "c", - ) -> bool: - """ - Construct a source file (just like 'try_cpp()'), run it through - the preprocessor, and return true if any line of the output matches - 'pattern'. 'pattern' should either be a compiled regex object or a - string containing a regex. If both 'body' and 'headers' are None, - preprocesses an empty file -- which can be useful to determine the - symbols the preprocessor and compiler set by default. - """ - ... + ) -> bool: ... def try_compile( self, body: str, headers: Sequence[str] | None = None, include_dirs: Sequence[str] | None = None, lang: str = "c" - ) -> bool: - """ - Try to compile a source file built from 'body' and 'headers'. - Return true on success, false otherwise. - """ - ... + ) -> bool: ... def try_link( self, body: str, @@ -83,13 +49,7 @@ class config(Command): libraries: Sequence[str] | None = None, library_dirs: Sequence[str] | None = None, lang: str = "c", - ) -> bool: - """ - Try to compile and link a source file, built from 'body' and - 'headers', to executable form. Return true on success, false - otherwise. - """ - ... + ) -> bool: ... def try_run( self, body: str, @@ -98,13 +58,7 @@ class config(Command): libraries: Sequence[str] | None = None, library_dirs: Sequence[str] | None = None, lang: str = "c", - ) -> bool: - """ - Try to compile, link to an executable, and run a program - built from 'body' and 'headers'. Return true on success, false - otherwise. - """ - ... + ) -> bool: ... def check_func( self, func: str, @@ -114,22 +68,7 @@ class config(Command): library_dirs: Sequence[str] | None = None, decl: bool | Literal[0, 1] = 0, call: bool | Literal[0, 1] = 0, - ) -> bool: - """ - Determine if function 'func' is available by constructing a - source file that refers to 'func', and compiles and links it. - If everything succeeds, returns true; otherwise returns false. - - The constructed source file starts out by including the header - files listed in 'headers'. If 'decl' is true, it then declares - 'func' (as "int func()"); you probably shouldn't supply 'headers' - and set 'decl' true in the same call, or you might get errors about - a conflicting declarations for 'func'. Finally, the constructed - 'main()' function either references 'func' or (if 'call' is true) - calls it. 'libraries' and 'library_dirs' are used when - linking. - """ - ... + ) -> bool: ... def check_lib( self, library: str, @@ -137,31 +76,9 @@ class config(Command): headers: Sequence[str] | None = None, include_dirs: Sequence[str] | None = None, other_libraries: list[str] = [], - ) -> bool: - """ - Determine if 'library' is available to be linked against, - without actually checking that any particular symbols are provided - by it. 'headers' will be used in constructing the source file to - be compiled, but the only effect of this is to check if all the - header files listed are available. Any libraries listed in - 'other_libraries' will be included in the link, in case 'library' - has symbols that depend on other libraries. - """ - ... + ) -> bool: ... def check_header( self, header: str, include_dirs: Sequence[str] | None = None, library_dirs: Sequence[str] | None = None, lang: str = "c" - ) -> bool: - """ - Determine if the system header file named by 'header_file' - exists and can be found by the preprocessor; return true if so, - false otherwise. - """ - ... - -def dump_file(filename: StrOrBytesPath, head: Any | None = None) -> None: - """ - Dumps a file content into log.info. + ) -> bool: ... - If head is not None, will be dumped before the file content. - """ - ... +def dump_file(filename: StrOrBytesPath, head: Any | None = None) -> None: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/install.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/install.pyi index 6130d38..24a4eff 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/install.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/install.pyi @@ -1,23 +1,22 @@ -""" -distutils.command.install - -Implements the Distutils 'install' command. -""" - +import sys from collections.abc import Callable -from typing import Any, ClassVar +from typing import Any, ClassVar, Final, Literal from ..cmd import Command -HAS_USER_SITE: bool -SCHEME_KEYS: tuple[str, ...] -INSTALL_SCHEMES: dict[str, dict[Any, Any]] +HAS_USER_SITE: Final[bool] + +SCHEME_KEYS: Final[tuple[Literal["purelib"], Literal["platlib"], Literal["headers"], Literal["scripts"], Literal["data"]]] +INSTALL_SCHEMES: Final[dict[str, dict[str, str]]] + +if sys.version_info < (3, 10): + WINDOWS_SCHEME: Final[dict[str, str]] class install(Command): description: str - user_options: Any - boolean_options: Any - negative_opt: Any + user_options: ClassVar[list[tuple[str, str | None, str]]] + boolean_options: ClassVar[list[str]] + negative_opt: ClassVar[dict[str, str]] prefix: str | None exec_prefix: Any home: str | None @@ -43,84 +42,29 @@ class install(Command): build_base: Any build_lib: Any record: Any - def initialize_options(self) -> None: - """Initializes options.""" - ... + def initialize_options(self) -> None: ... config_vars: Any install_libbase: Any - def finalize_options(self) -> None: - """Finalizes options.""" - ... - def dump_dirs(self, msg) -> None: - """Dumps the list of user options.""" - ... - def finalize_unix(self) -> None: - """Finalizes options for posix platforms.""" - ... - def finalize_other(self) -> None: - """Finalizes options for non-posix platforms""" - ... - def select_scheme(self, name) -> None: - """Sets the install directories by applying the install schemes.""" - ... - def expand_basedirs(self) -> None: - """ - Calls `os.path.expanduser` on install_base, install_platbase and - root. - """ - ... - def expand_dirs(self) -> None: - """Calls `os.path.expanduser` on install dirs.""" - ... - def convert_paths(self, *names) -> None: - """Call `convert_path` over `names`.""" - ... + def finalize_options(self) -> None: ... + def dump_dirs(self, msg) -> None: ... + def finalize_unix(self) -> None: ... + def finalize_other(self) -> None: ... + def select_scheme(self, name) -> None: ... + def expand_basedirs(self) -> None: ... + def expand_dirs(self) -> None: ... + def convert_paths(self, *names) -> None: ... path_file: Any extra_dirs: Any - def handle_extra_path(self) -> None: - """Set `path_file` and `extra_dirs` using `extra_path`.""" - ... - def change_roots(self, *names) -> None: - """Change the install directories pointed by name using root.""" - ... - def create_home_path(self) -> None: - """Create directories under ~.""" - ... - def run(self) -> None: - """Runs the command.""" - ... - def create_path_file(self) -> None: - """Creates the .pth file""" - ... - def get_outputs(self): - """Assembles the outputs of all the sub-commands.""" - ... - def get_inputs(self): - """Returns the inputs of all the sub-commands""" - ... - def has_lib(self): - """ - Returns true if the current distribution has any Python - modules to install. - """ - ... - def has_headers(self): - """ - Returns true if the current distribution has any headers to - install. - """ - ... - def has_scripts(self): - """ - Returns true if the current distribution has any scripts to. - install. - """ - ... - def has_data(self): - """ - Returns true if the current distribution has any data to. - install. - """ - ... + def handle_extra_path(self) -> None: ... + def change_roots(self, *names) -> None: ... + def create_home_path(self) -> None: ... + def run(self) -> None: ... + def create_path_file(self) -> None: ... + def get_outputs(self): ... + def get_inputs(self): ... + def has_lib(self): ... + def has_headers(self): ... + def has_scripts(self): ... + def has_data(self): ... # Any to work around variance issues sub_commands: ClassVar[list[tuple[str, Callable[[Any], bool] | None]]] diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/install_data.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/install_data.pyi index 90d38b8..342c7a7 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/install_data.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/install_data.pyi @@ -1,18 +1,11 @@ -""" -distutils.command.install_data - -Implements the Distutils 'install_data' command, for installing -platform-independent data files. -""" - -from typing import Any +from typing import Any, ClassVar from ..cmd import Command class install_data(Command): description: str - user_options: Any - boolean_options: Any + user_options: ClassVar[list[tuple[str, str | None, str]]] + boolean_options: ClassVar[list[str]] install_dir: Any outfiles: Any root: Any diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/install_egg_info.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/install_egg_info.pyi index 1aa7de4..3fd5498 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/install_egg_info.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/install_egg_info.pyi @@ -1,18 +1,10 @@ -""" -distutils.command.install_egg_info - -Implements the Distutils 'install_egg_info' command, for installing -a package's PKG-INFO metadata. -""" - from typing import Any, ClassVar from ..cmd import Command class install_egg_info(Command): - """Install an .egg-info file for the package""" description: ClassVar[str] - user_options: ClassVar[list[tuple[str, str | None, str]]] + user_options: ClassVar[list[tuple[str, str, str]]] install_dir: Any def initialize_options(self) -> None: ... target: Any @@ -21,25 +13,6 @@ class install_egg_info(Command): def run(self) -> None: ... def get_outputs(self) -> list[str]: ... -def safe_name(name): - """ - Convert an arbitrary string to a standard distribution name - - Any runs of non-alphanumeric/. characters are replaced with a single '-'. - """ - ... -def safe_version(version): - """ - Convert an arbitrary string to a standard version string - - Spaces become dots, and all other non-alphanumeric characters become - dashes, with runs of multiple dashes condensed to a single dash. - """ - ... -def to_filename(name): - """ - Convert a project or version name to its filename-escaped form - - Any '-' characters are currently replaced with '_'. - """ - ... +def safe_name(name): ... +def safe_version(version): ... +def to_filename(name): ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/install_headers.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/install_headers.pyi index 40f34a1..7854d23 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/install_headers.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/install_headers.pyi @@ -1,18 +1,11 @@ -""" -distutils.command.install_headers - -Implements the Distutils 'install_headers' command, to install C/C++ header -files to the Python include directory. -""" - -from typing import Any +from typing import Any, ClassVar from ..cmd import Command class install_headers(Command): description: str - user_options: Any - boolean_options: Any + user_options: ClassVar[list[tuple[str, str, str]]] + boolean_options: ClassVar[list[str]] install_dir: Any force: int outfiles: Any diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/install_lib.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/install_lib.pyi index 57e9ab9..149ecae 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/install_lib.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/install_lib.pyi @@ -1,21 +1,14 @@ -""" -distutils.command.install_lib - -Implements the Distutils 'install_lib' command -(install all Python modules). -""" - -from typing import Any +from typing import Any, ClassVar, Final from ..cmd import Command -PYTHON_SOURCE_EXTENSION: str +PYTHON_SOURCE_EXTENSION: Final = ".py" class install_lib(Command): description: str - user_options: Any - boolean_options: Any - negative_opt: Any + user_options: ClassVar[list[tuple[str, str | None, str]]] + boolean_options: ClassVar[list[str]] + negative_opt: ClassVar[dict[str, str]] install_dir: Any build_dir: Any force: int @@ -28,18 +21,5 @@ class install_lib(Command): def build(self) -> None: ... def install(self): ... def byte_compile(self, files) -> None: ... - def get_outputs(self): - """ - Return the list of files that would be installed if this command - were actually run. Not affected by the "dry-run" flag or whether - modules have actually been built yet. - """ - ... - def get_inputs(self): - """ - Get the list of files that are input to this command, ie. the - files that get installed as they are named in the build tree. - The files in this list correspond one-to-one to the output - filenames returned by 'get_outputs()'. - """ - ... + def get_outputs(self): ... + def get_inputs(self): ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/install_scripts.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/install_scripts.pyi index 9382acd..5ee5589 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/install_scripts.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/install_scripts.pyi @@ -1,18 +1,11 @@ -""" -distutils.command.install_scripts - -Implements the Distutils 'install_scripts' command, for installing -Python scripts. -""" - -from typing import Any +from typing import Any, ClassVar from ..cmd import Command class install_scripts(Command): description: str - user_options: Any - boolean_options: Any + user_options: ClassVar[list[tuple[str, str | None, str]]] + boolean_options: ClassVar[list[str]] install_dir: Any force: int build_dir: Any diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/register.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/register.pyi index cb13202..a5e251d 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/register.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/register.pyi @@ -1,9 +1,3 @@ -""" -distutils.command.register - -Implements the Distutils 'register' command (register with the repository). -""" - from collections.abc import Callable from typing import Any, ClassVar @@ -18,54 +12,9 @@ class register(PyPIRCCommand): def initialize_options(self) -> None: ... def finalize_options(self) -> None: ... def run(self) -> None: ... - def check_metadata(self) -> None: - """Deprecated API.""" - ... - def classifiers(self) -> None: - """ - Fetch the list of classifiers from the server. - - """ - ... - def verify_metadata(self) -> None: - """ - Send the metadata to the package index server to be checked. - - """ - ... - def send_metadata(self) -> None: - """ - Send the metadata to the package index server. - - Well, do the following: - 1. figure who the user is, and then - 2. send the data as a Basic auth'ed POST. - - First we try to read the username/password from $HOME/.pypirc, - which is a ConfigParser-formatted file with a section - [distutils] containing username and password entries (both - in clear text). Eg: - - [distutils] - index-servers = - pypi - - [pypi] - username: fred - password: sekrit - - Otherwise, to figure who the user is, we offer the user three - choices: - - 1. use existing login, - 2. register as a new user, or - 3. set the password to a random string and email the user. - """ - ... + def check_metadata(self) -> None: ... + def classifiers(self) -> None: ... + def verify_metadata(self) -> None: ... + def send_metadata(self) -> None: ... def build_post_data(self, action): ... - def post_to_server(self, data, auth: Any | None = None): - """ - Post a query to the server, and return a string response. - - """ - ... + def post_to_server(self, data, auth: Any | None = None): ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/sdist.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/sdist.pyi index 75471b7..5b7fe24 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/sdist.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/sdist.pyi @@ -1,37 +1,21 @@ -""" -distutils.command.sdist - -Implements the Distutils 'sdist' command (create a source distribution). -""" - +from _typeshed import Unused from collections.abc import Callable from typing import Any, ClassVar from ..cmd import Command -def show_formats() -> None: - """ - Print all possible values for the 'formats' option (used by - the "--help-formats" command-line option). - """ - ... +def show_formats() -> None: ... class sdist(Command): description: str - def checking_metadata(self): - """ - Callable used for the check sub-command. - - Placed here so user_options can view it - """ - ... - user_options: Any - boolean_options: Any - help_options: Any - negative_opt: Any + def checking_metadata(self): ... + user_options: ClassVar[list[tuple[str, str | None, str]]] + boolean_options: ClassVar[list[str]] + help_options: ClassVar[list[tuple[str, str | None, str, Callable[[], Unused]]]] + negative_opt: ClassVar[dict[str, str]] # Any to work around variance issues sub_commands: ClassVar[list[tuple[str, Callable[[Any], bool] | None]]] - READMES: Any + READMES: ClassVar[tuple[str, ...]] template: Any manifest: Any use_defaults: int @@ -49,90 +33,13 @@ class sdist(Command): def finalize_options(self) -> None: ... filelist: Any def run(self) -> None: ... - def check_metadata(self) -> None: - """Deprecated API.""" - ... - def get_file_list(self) -> None: - """ - Figure out the list of files to include in the source - distribution, and put it in 'self.filelist'. This might involve - reading the manifest template (and writing the manifest), or just - reading the manifest, or just using the default file set -- it all - depends on the user's options. - """ - ... - def add_defaults(self) -> None: - """ - Add all the default files to self.filelist: - - README or README.txt - - setup.py - - test/test*.py - - all pure Python modules mentioned in setup script - - all files pointed by package_data (build_py) - - all files defined in data_files. - - all files defined as scripts. - - all C sources listed as part of extensions or C libraries - in the setup script (doesn't catch C headers!) - Warns if (README or README.txt) or setup.py are missing; everything - else is optional. - """ - ... - def read_template(self) -> None: - """ - Read and parse manifest template file named by self.template. - - (usually "MANIFEST.in") The parsing and processing is done by - 'self.filelist', which updates itself accordingly. - """ - ... - def prune_file_list(self) -> None: - """ - Prune off branches that might slip into the file list as created - by 'read_template()', but really don't belong there: - * the build tree (typically "build") - * the release tree itself (only an issue if we ran "sdist" - previously with --keep-temp, or it aborted) - * any RCS, CVS, .svn, .hg, .git, .bzr, _darcs directories - """ - ... - def write_manifest(self) -> None: - """ - Write the file list in 'self.filelist' (presumably as filled in - by 'add_defaults()' and 'read_template()') to the manifest file - named by 'self.manifest'. - """ - ... - def read_manifest(self) -> None: - """ - Read the manifest file (named by 'self.manifest') and use it to - fill in 'self.filelist', the list of files to include in the source - distribution. - """ - ... - def make_release_tree(self, base_dir, files) -> None: - """ - Create the directory tree that will become the source - distribution archive. All directories implied by the filenames in - 'files' are created under 'base_dir', and then we hard link or copy - (if hard linking is unavailable) those files into place. - Essentially, this duplicates the developer's source tree, but in a - directory named after the distribution, containing only the files - to be distributed. - """ - ... - def make_distribution(self) -> None: - """ - Create the source distribution(s). First, we create the release - tree with 'make_release_tree()'; then, we create all required - archive files (according to 'self.formats') from the release tree. - Finally, we clean up by blowing away the release tree (unless - 'self.keep_temp' is true). The list of archive files created is - stored so it can be retrieved later by 'get_archive_files()'. - """ - ... - def get_archive_files(self): - """ - Return the list of archive files created when the command - was run, or None if the command hasn't run yet. - """ - ... + def check_metadata(self) -> None: ... + def get_file_list(self) -> None: ... + def add_defaults(self) -> None: ... + def read_template(self) -> None: ... + def prune_file_list(self) -> None: ... + def write_manifest(self) -> None: ... + def read_manifest(self) -> None: ... + def make_release_tree(self, base_dir, files) -> None: ... + def make_distribution(self) -> None: ... + def get_archive_files(self): ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/upload.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/upload.pyi index 5c96379..e6b7782 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/upload.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/command/upload.pyi @@ -1,10 +1,3 @@ -""" -distutils.command.upload - -Implements the Distutils 'upload' subcommand (upload package to a package -index). -""" - from typing import Any, ClassVar from ..config import PyPIRCCommand diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/config.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/config.pyi index 5792e2e..5814a82 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/config.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/config.pyi @@ -1,10 +1,3 @@ -""" -distutils.pypirc - -Provides the PyPIRCCommand class, the base class for the command classes -that uses .pypirc in the distutils.command package. -""" - from abc import abstractmethod from distutils.cmd import Command from typing import ClassVar @@ -12,32 +5,13 @@ from typing import ClassVar DEFAULT_PYPIRC: str class PyPIRCCommand(Command): - """ - Base command that knows how to handle the .pypirc file - - """ DEFAULT_REPOSITORY: ClassVar[str] DEFAULT_REALM: ClassVar[str] repository: None realm: None user_options: ClassVar[list[tuple[str, str | None, str]]] boolean_options: ClassVar[list[str]] - def initialize_options(self) -> None: - """Initialize options.""" - ... - def finalize_options(self) -> None: - """Finalizes options.""" - ... + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... @abstractmethod - def run(self) -> None: - """ - A command's raison d'etre: carry out the action it exists to - perform, controlled by the options initialized in - 'initialize_options()', customized by other commands, the setup - script, the command-line, and config files, and finalized in - 'finalize_options()'. All terminal output and filesystem - interaction should be done by 'run()'. - - This method must be implemented by all command classes. - """ - ... + def run(self) -> None: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/core.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/core.pyi index 89019fe..a4d21f8 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/core.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/core.pyi @@ -1,20 +1,11 @@ -""" -distutils.core - -The only module that needs to be imported to use the Distutils; provides -the 'setup' function (which is to be called from the setup script). Also -indirectly provides the Distribution and Command classes, although they are -really defined in distutils.dist and distutils.cmd. -""" - from _typeshed import StrOrBytesPath from collections.abc import Mapping from distutils.cmd import Command as Command from distutils.dist import Distribution as Distribution from distutils.extension import Extension as Extension -from typing import Any, Literal +from typing import Any, Final, Literal -USAGE: str +USAGE: Final[str] def gen_usage(script_name: StrOrBytesPath) -> str: ... @@ -62,70 +53,5 @@ def setup( password: str = ..., fullname: str = ..., **attrs: Any, -) -> Distribution: - """ - The gateway to the Distutils: do everything your setup script needs - to do, in a highly flexible and user-driven way. Briefly: create a - Distribution instance; find and parse config files; parse the command - line; run each Distutils command found there, customized by the options - supplied to 'setup()' (as keyword arguments), in config files, and on - the command line. - - The Distribution instance might be an instance of a class supplied via - the 'distclass' keyword argument to 'setup'; if no such class is - supplied, then the Distribution class (in dist.py) is instantiated. - All other arguments to 'setup' (except for 'cmdclass') are used to set - attributes of the Distribution instance. - - The 'cmdclass' argument, if supplied, is a dictionary mapping command - names to command classes. Each command encountered on the command line - will be turned into a command class, which is in turn instantiated; any - class found in 'cmdclass' is used in place of the default, which is - (for command 'foo_bar') class 'foo_bar' in module - 'distutils.command.foo_bar'. The command class must provide a - 'user_options' attribute which is a list of option specifiers for - 'distutils.fancy_getopt'. Any command-line options between the current - and the next command are used to set attributes of the current command - object. - - When the entire command-line has been successfully parsed, calls the - 'run()' method on each command object in turn. This method will be - driven entirely by the Distribution object (which each command object - has a reference to, thanks to its constructor), and the - command-specific options that became attributes of each command - object. - """ - ... -def run_setup(script_name: str, script_args: list[str] | None = None, stop_after: str = "run") -> Distribution: - """ - Run a setup script in a somewhat controlled environment, and - return the Distribution instance that drives things. This is useful - if you need to find out the distribution meta-data (passed as - keyword args from 'script' to 'setup()', or the contents of the - config files or command-line. - - 'script_name' is a file that will be read and run with 'exec()'; - 'sys.argv[0]' will be replaced with 'script' for the duration of the - call. 'script_args' is a list of strings; if supplied, - 'sys.argv[1:]' will be replaced by 'script_args' for the duration of - the call. - - 'stop_after' tells 'setup()' when to stop processing; possible - values: - init - stop after the Distribution instance has been created and - populated with the keyword arguments to 'setup()' - config - stop after config files have been parsed (and their data - stored in the Distribution instance) - commandline - stop after the command-line ('sys.argv[1:]' or 'script_args') - have been parsed (and the data stored in the Distribution) - run [default] - stop after all commands have been run (the same as if 'setup()' - had been called in the usual way - - Returns the Distribution instance, which provides all information - used to drive the Distutils. - """ - ... +) -> Distribution: ... +def run_setup(script_name: str, script_args: list[str] | None = None, stop_after: str = "run") -> Distribution: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/cygwinccompiler.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/cygwinccompiler.pyi index 9e577cb..80924d6 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/cygwinccompiler.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/cygwinccompiler.pyi @@ -1,70 +1,20 @@ -""" -distutils.cygwinccompiler - -Provides the CygwinCCompiler class, a subclass of UnixCCompiler that -handles the Cygwin port of the GNU C compiler to Windows. It also contains -the Mingw32CCompiler class which handles the mingw32 port of GCC (same as -cygwin in no-cygwin mode). -""" - from distutils.unixccompiler import UnixCCompiler from distutils.version import LooseVersion from re import Pattern -from typing import Literal - -def get_msvcr() -> list[str] | None: - """ - Include the appropriate MSVC runtime library if Python was built - with MSVC 7.0 or later. - """ - ... - -class CygwinCCompiler(UnixCCompiler): - """ - Handles the Cygwin port of the GNU C compiler to Windows. - - """ - ... -class Mingw32CCompiler(CygwinCCompiler): - """ - Handles the Mingw32 port of the GNU C compiler to Windows. - - """ - ... - -CONFIG_H_OK: str -CONFIG_H_NOTOK: str -CONFIG_H_UNCERTAIN: str - -def check_config_h() -> tuple[Literal["ok", "not ok", "uncertain"], str]: - """ - Check if the current Python installation appears amenable to building - extensions with GCC. - - Returns a tuple (status, details), where 'status' is one of the following - constants: +from typing import Final, Literal - - CONFIG_H_OK: all is well, go ahead and compile - - CONFIG_H_NOTOK: doesn't look good - - CONFIG_H_UNCERTAIN: not sure -- unable to read pyconfig.h +def get_msvcr() -> list[str] | None: ... - 'details' is a human-readable string explaining the situation. +class CygwinCCompiler(UnixCCompiler): ... +class Mingw32CCompiler(CygwinCCompiler): ... - Note there are two ways to conclude "OK": either 'sys.version' contains - the string "GCC" (implying that this Python was built with GCC), or the - installed "pyconfig.h" contains the string "__GNUC__". - """ - ... +CONFIG_H_OK: Final = "ok" +CONFIG_H_NOTOK: Final = "not ok" +CONFIG_H_UNCERTAIN: Final = "uncertain" -RE_VERSION: Pattern[bytes] +def check_config_h() -> tuple[Literal["ok", "not ok", "uncertain"], str]: ... -def get_versions() -> tuple[LooseVersion | None, ...]: - """ - Try to find out the versions of gcc, ld and dllwrap. +RE_VERSION: Final[Pattern[bytes]] - If not possible it returns None for it. - """ - ... -def is_cygwingcc() -> bool: - """Try to determine if the gcc that would be used is from cygwin.""" - ... +def get_versions() -> tuple[LooseVersion | None, ...]: ... +def is_cygwingcc() -> bool: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/debug.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/debug.pyi index 11f28a8..3009588 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/debug.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/debug.pyi @@ -1 +1,3 @@ -DEBUG: bool | None +from typing import Final + +DEBUG: Final[str | None] diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/dep_util.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/dep_util.pyi index 34818c8..058377a 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/dep_util.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/dep_util.pyi @@ -1,11 +1,3 @@ -""" -distutils.dep_util - -Utility functions for simple, timestamp-based dependency of files -and groups of files; also, function based entirely on such -timestamp dependency analysis. -""" - from _typeshed import StrOrBytesPath, SupportsLenAndGetItem from collections.abc import Iterable from typing import Literal, TypeVar @@ -13,38 +5,10 @@ from typing import Literal, TypeVar _SourcesT = TypeVar("_SourcesT", bound=StrOrBytesPath) _TargetsT = TypeVar("_TargetsT", bound=StrOrBytesPath) -def newer(source: StrOrBytesPath, target: StrOrBytesPath) -> bool | Literal[1]: - """ - Return true if 'source' exists and is more recently modified than - 'target', or if 'source' exists and 'target' doesn't. Return false if - both exist and 'target' is the same age or younger than 'source'. - Raise DistutilsFileError if 'source' does not exist. - """ - ... +def newer(source: StrOrBytesPath, target: StrOrBytesPath) -> bool | Literal[1]: ... def newer_pairwise( sources: SupportsLenAndGetItem[_SourcesT], targets: SupportsLenAndGetItem[_TargetsT] -) -> tuple[list[_SourcesT], list[_TargetsT]]: - """ - Walk two filename lists in parallel, testing if each source is newer - than its corresponding target. Return a pair of lists (sources, - targets) where source is newer than target, according to the semantics - of 'newer()'. - """ - ... +) -> tuple[list[_SourcesT], list[_TargetsT]]: ... def newer_group( sources: Iterable[StrOrBytesPath], target: StrOrBytesPath, missing: Literal["error", "ignore", "newer"] = "error" -) -> Literal[0, 1]: - """ - Return true if 'target' is out-of-date with respect to any file - listed in 'sources'. In other words, if 'target' exists and is newer - than every file in 'sources', return false; otherwise return true. - 'missing' controls what we do when a source file is missing; the - default ("error") is to blow up with an OSError from inside 'stat()'; - if it is "ignore", we silently drop any missing source files; if it is - "newer", any missing source files make us assume that 'target' is - out-of-date (this is handy in "dry-run" mode: it'll make you pretend to - carry out commands that wouldn't work because inputs are missing, but - that doesn't matter because you're not actually going to run the - commands). - """ - ... +) -> Literal[0, 1]: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/dir_util.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/dir_util.pyi index 4211f23..23e2c3b 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/dir_util.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/dir_util.pyi @@ -1,43 +1,15 @@ -""" -distutils.dir_util - -Utility functions for manipulating directories and directory trees. -""" - from _typeshed import StrOrBytesPath, StrPath from collections.abc import Iterable from typing import Literal -def mkpath(name: str, mode: int = 0o777, verbose: bool | Literal[0, 1] = 1, dry_run: bool | Literal[0, 1] = 0) -> list[str]: - """ - Create a directory and any missing ancestor directories. - - If the directory already exists (or if 'name' is the empty string, which - means the current directory, which of course exists), then do nothing. - Raise DistutilsFileError if unable to create some directory along the way - (eg. some sub-path exists, but is a file rather than a directory). - If 'verbose' is true, print a one-line summary of each mkdir to stdout. - Return the list of directories actually created. - """ - ... +def mkpath(name: str, mode: int = 0o777, verbose: bool | Literal[0, 1] = 1, dry_run: bool | Literal[0, 1] = 0) -> list[str]: ... def create_tree( base_dir: StrPath, files: Iterable[StrPath], mode: int = 0o777, verbose: bool | Literal[0, 1] = 1, dry_run: bool | Literal[0, 1] = 0, -) -> None: - """ - Create all the empty directories under 'base_dir' needed to put 'files' - there. - - 'base_dir' is just the name of a directory which doesn't necessarily - exist yet; 'files' is a list of filenames to be interpreted relative to - 'base_dir'. 'base_dir' + the directory portion of every file in 'files' - will be created if it doesn't already exist. 'mode', 'verbose' and - 'dry_run' flags are as for 'mkpath()'. - """ - ... +) -> None: ... def copy_tree( src: StrPath, dst: str, @@ -47,33 +19,5 @@ def copy_tree( update: bool | Literal[0, 1] = 0, verbose: bool | Literal[0, 1] = 1, dry_run: bool | Literal[0, 1] = 0, -) -> list[str]: - """ - Copy an entire directory tree 'src' to a new location 'dst'. - - Both 'src' and 'dst' must be directory names. If 'src' is not a - directory, raise DistutilsFileError. If 'dst' does not exist, it is - created with 'mkpath()'. The end result of the copy is that every - file in 'src' is copied to 'dst', and directories under 'src' are - recursively copied to 'dst'. Return the list of files that were - copied or might have been copied, using their output name. The - return value is unaffected by 'update' or 'dry_run': it is simply - the list of all files under 'src', with the names changed to be - under 'dst'. - - 'preserve_mode' and 'preserve_times' are the same as for - 'copy_file'; note that they only apply to regular files, not to - directories. If 'preserve_symlinks' is true, symlinks will be - copied as symlinks (on platforms that support them!); otherwise - (the default), the destination of the symlink will be copied. - 'update' and 'verbose' are the same as for 'copy_file'. - """ - ... -def remove_tree(directory: StrOrBytesPath, verbose: bool | Literal[0, 1] = 1, dry_run: bool | Literal[0, 1] = 0) -> None: - """ - Recursively remove an entire directory tree. - - Any errors are ignored (apart from being reported to stdout if 'verbose' - is true). - """ - ... +) -> list[str]: ... +def remove_tree(directory: StrOrBytesPath, verbose: bool | Literal[0, 1] = 1, dry_run: bool | Literal[0, 1] = 0) -> None: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/dist.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/dist.pyi index ee72921..75fc7db 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/dist.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/dist.pyi @@ -1,15 +1,28 @@ -""" -distutils.dist - -Provides the Distribution class, which represents the module distribution -being built/installed/distributed. -""" - from _typeshed import Incomplete, StrOrBytesPath, StrPath, SupportsWrite -from collections.abc import Iterable, Mapping +from collections.abc import Iterable, MutableMapping from distutils.cmd import Command +from distutils.command.bdist import bdist +from distutils.command.bdist_dumb import bdist_dumb +from distutils.command.bdist_rpm import bdist_rpm +from distutils.command.build import build +from distutils.command.build_clib import build_clib +from distutils.command.build_ext import build_ext +from distutils.command.build_py import build_py +from distutils.command.build_scripts import build_scripts +from distutils.command.check import check +from distutils.command.clean import clean +from distutils.command.config import config +from distutils.command.install import install +from distutils.command.install_data import install_data +from distutils.command.install_egg_info import install_egg_info +from distutils.command.install_headers import install_headers +from distutils.command.install_lib import install_lib +from distutils.command.install_scripts import install_scripts +from distutils.command.register import register +from distutils.command.sdist import sdist +from distutils.command.upload import upload from re import Pattern -from typing import IO, Any, ClassVar, Literal, TypeVar, overload +from typing import IO, ClassVar, Literal, TypeVar, overload from typing_extensions import TypeAlias command_re: Pattern[str] @@ -18,10 +31,6 @@ _OptionsList: TypeAlias = list[tuple[str, str | None, str, int] | tuple[str, str _CommandT = TypeVar("_CommandT", bound=Command) class DistributionMetadata: - """ - Dummy class to hold the distribution meta-data: name, version, - author, and so forth. - """ def __init__(self, path: StrOrBytesPath | None = None) -> None: ... name: str | None version: str | None @@ -40,21 +49,9 @@ class DistributionMetadata: provides: list[str] | None requires: list[str] | None obsoletes: list[str] | None - def read_pkg_file(self, file: IO[str]) -> None: - """Reads the metadata values from a file object.""" - ... - def write_pkg_info(self, base_dir: StrPath) -> None: - """ - Write the PKG-INFO file into the release tree. - - """ - ... - def write_pkg_file(self, file: SupportsWrite[str]) -> None: - """ - Write the PKG-INFO format data to a file object. - - """ - ... + def read_pkg_file(self, file: IO[str]) -> None: ... + def write_pkg_info(self, base_dir: StrPath) -> None: ... + def write_pkg_file(self, file: SupportsWrite[str]) -> None: ... def get_name(self) -> str: ... def get_version(self) -> str: ... def get_fullname(self) -> str: ... @@ -81,68 +78,19 @@ class DistributionMetadata: def set_obsoletes(self, value: Iterable[str]) -> None: ... class Distribution: - """ - The core of the Distutils. Most of the work hiding behind 'setup' - is really done within a Distribution instance, which farms the work out - to the Distutils commands specified on the command line. - - Setup scripts will almost never instantiate Distribution directly, - unless the 'setup()' function is totally inadequate to their needs. - However, it is conceivable that a setup script might wish to subclass - Distribution for some specialized purpose, and then pass the subclass - to 'setup()' as the 'distclass' keyword argument. If so, it is - necessary to respect the expectations that 'setup' has of Distribution. - See the code for 'setup()', in core.py, for details. - """ cmdclass: dict[str, type[Command]] metadata: DistributionMetadata - def __init__(self, attrs: Mapping[str, Any] | None = None) -> None: - """ - Construct a new Distribution instance: initialize all the - attributes of a Distribution, and then use 'attrs' (a dictionary - mapping attribute names to values) to assign some of those - attributes their "real" values. (Any attributes not mentioned in - 'attrs' will be assigned to some null value: 0, None, an empty list - or dictionary, etc.) Most importantly, initialize the - 'command_obj' attribute to the empty dictionary; this will be - filled in with real command objects by 'parse_command_line()'. - """ - ... - def get_option_dict(self, command: str) -> dict[str, tuple[str, str]]: - """ - Get the option dictionary for a given command. If that - command's option dictionary hasn't been created yet, then create it - and return the new dictionary; otherwise, return the existing - option dictionary. - """ - ... + def __init__(self, attrs: MutableMapping[str, Incomplete] | None = None) -> None: ... + def get_option_dict(self, command: str) -> dict[str, tuple[str, str]]: ... def parse_config_files(self, filenames: Iterable[str] | None = None) -> None: ... - @overload - def get_command_obj(self, command: str, create: Literal[1, True] = 1) -> Command: - """ - Return the command object for 'command'. Normally this object - is cached on a previous call to 'get_command_obj()'; if no command - object for 'command' is in the cache, then we either create and - return it (if 'create' is true) or return None. - """ - ... - @overload - def get_command_obj(self, command: str, create: Literal[0, False]) -> Command | None: - """ - Return the command object for 'command'. Normally this object - is cached on a previous call to 'get_command_obj()'; if no command - object for 'command' is in the cache, then we either create and - return it (if 'create' is true) or return None. - """ - ... global_options: ClassVar[_OptionsList] common_usage: ClassVar[str] display_options: ClassVar[_OptionsList] display_option_names: ClassVar[list[str]] negative_opt: ClassVar[dict[str, str]] - verbose: int - dry_run: int - help: int + verbose: Literal[0, 1] + dry_run: Literal[0, 1] + help: Literal[0, 1] command_packages: list[str] | None script_name: str | None script_args: list[str] | None @@ -167,164 +115,152 @@ class Distribution: def dump_option_dicts( self, header: Incomplete | None = None, commands: Incomplete | None = None, indent: str = "" ) -> None: ... - def find_config_files(self): - """ - Find as many configuration files as should be processed for this - platform, and return a list of filenames in the order in which they - should be parsed. The filenames returned are guaranteed to exist - (modulo nasty race conditions). - - There are three possible config files: distutils.cfg in the - Distutils installation directory (ie. where the top-level - Distutils __inst__.py file lives), a file in the user's home - directory named .pydistutils.cfg on Unix and pydistutils.cfg - on Windows/Mac; and setup.cfg in the current directory. - - The file in the user's home directory can be disabled with the - --no-user-cfg option. - """ - ... + def find_config_files(self): ... commands: Incomplete - def parse_command_line(self): - """ - Parse the setup script's command line, taken from the - 'script_args' instance attribute (which defaults to 'sys.argv[1:]' - -- see 'setup()' in core.py). This list is first processed for - "global options" -- options that set attributes of the Distribution - instance. Then, it is alternately scanned for Distutils commands - and options for that command. Each new command terminates the - options for the previous command. The allowed options for a - command are determined by the 'user_options' attribute of the - command class -- thus, we have to be able to load command classes - in order to parse the command line. Any error in that 'options' - attribute raises DistutilsGetoptError; any error on the - command-line raises DistutilsArgError. If no Distutils commands - were found on the command line, raises DistutilsArgError. Return - true if command-line was successfully parsed and we should carry - on with executing commands; false if no errors but we shouldn't - execute commands (currently, this only happens if user asks for - help). - """ - ... - def finalize_options(self) -> None: - """ - Set final values for all the options on the Distribution - instance, analogous to the .finalize_options() method of Command - objects. - """ - ... - def handle_display_options(self, option_order): - """ - If there were any non-global "display-only" options - (--help-commands or the metadata display options) on the command - line, display the requested info and return true; else return - false. - """ - ... - def print_command_list(self, commands, header, max_length) -> None: - """ - Print a subset of the list of all commands -- used by - 'print_commands()'. - """ - ... - def print_commands(self) -> None: - """ - Print out a help message listing all available commands with a - description of each. The list is divided into "standard commands" - (listed in distutils.command.__all__) and "extra commands" - (mentioned in self.cmdclass, but not a standard command). The - descriptions come from the command class attribute - 'description'. - """ - ... - def get_command_list(self): - """ - Get a list of (command, description) tuples. - The list is divided into "standard commands" (listed in - distutils.command.__all__) and "extra commands" (mentioned in - self.cmdclass, but not a standard command). The descriptions come - from the command class attribute 'description'. - """ - ... - def get_command_packages(self): - """Return a list of packages from which commands are loaded.""" - ... - def get_command_class(self, command: str) -> type[Command]: - """ - Return the class that implements the Distutils command named by - 'command'. First we check the 'cmdclass' dictionary; if the - command is mentioned there, we fetch the class object from the - dictionary and return it. Otherwise we load the command module - ("distutils.command." + command) and fetch the command class from - the module. The loaded class is also stored in 'cmdclass' - to speed future calls to 'get_command_class()'. - - Raises DistutilsModuleError if the expected module could not be - found, or if that module does not define the expected class. - """ - ... - @overload - def reinitialize_command(self, command: str, reinit_subcommands: bool = False) -> Command: - """ - Reinitializes a command to the state it was in when first - returned by 'get_command_obj()': ie., initialized but not yet - finalized. This provides the opportunity to sneak option - values in programmatically, overriding or supplementing - user-supplied values from the config files and command line. - You'll have to re-finalize the command object (by calling - 'finalize_options()' or 'ensure_finalized()') before using it for - real. - - 'command' should be a command name (string) or command object. If - 'reinit_subcommands' is true, also reinitializes the command's - sub-commands, as declared by the 'sub_commands' class attribute (if - it has one). See the "install" command for an example. Only - reinitializes the sub-commands that actually matter, ie. those - whose test predicates return true. - - Returns the reinitialized command object. - """ - ... - @overload - def reinitialize_command(self, command: _CommandT, reinit_subcommands: bool = False) -> _CommandT: - """ - Reinitializes a command to the state it was in when first - returned by 'get_command_obj()': ie., initialized but not yet - finalized. This provides the opportunity to sneak option - values in programmatically, overriding or supplementing - user-supplied values from the config files and command line. - You'll have to re-finalize the command object (by calling - 'finalize_options()' or 'ensure_finalized()') before using it for - real. - - 'command' should be a command name (string) or command object. If - 'reinit_subcommands' is true, also reinitializes the command's - sub-commands, as declared by the 'sub_commands' class attribute (if - it has one). See the "install" command for an example. Only - reinitializes the sub-commands that actually matter, ie. those - whose test predicates return true. - - Returns the reinitialized command object. - """ - ... + def parse_command_line(self): ... + def finalize_options(self) -> None: ... + def handle_display_options(self, option_order): ... + def print_command_list(self, commands, header, max_length) -> None: ... + def print_commands(self) -> None: ... + def get_command_list(self): ... + def get_command_packages(self): ... + # NOTE: This list comes directly from the distutils/command folder. Minus bdist_msi and bdist_wininst. + @overload + def get_command_obj(self, command: Literal["bdist"], create: Literal[1, True] = 1) -> bdist: ... + @overload + def get_command_obj(self, command: Literal["bdist_dumb"], create: Literal[1, True] = 1) -> bdist_dumb: ... + @overload + def get_command_obj(self, command: Literal["bdist_rpm"], create: Literal[1, True] = 1) -> bdist_rpm: ... + @overload + def get_command_obj(self, command: Literal["build"], create: Literal[1, True] = 1) -> build: ... + @overload + def get_command_obj(self, command: Literal["build_clib"], create: Literal[1, True] = 1) -> build_clib: ... + @overload + def get_command_obj(self, command: Literal["build_ext"], create: Literal[1, True] = 1) -> build_ext: ... + @overload + def get_command_obj(self, command: Literal["build_py"], create: Literal[1, True] = 1) -> build_py: ... + @overload + def get_command_obj(self, command: Literal["build_scripts"], create: Literal[1, True] = 1) -> build_scripts: ... + @overload + def get_command_obj(self, command: Literal["check"], create: Literal[1, True] = 1) -> check: ... + @overload + def get_command_obj(self, command: Literal["clean"], create: Literal[1, True] = 1) -> clean: ... + @overload + def get_command_obj(self, command: Literal["config"], create: Literal[1, True] = 1) -> config: ... + @overload + def get_command_obj(self, command: Literal["install"], create: Literal[1, True] = 1) -> install: ... + @overload + def get_command_obj(self, command: Literal["install_data"], create: Literal[1, True] = 1) -> install_data: ... + @overload + def get_command_obj(self, command: Literal["install_egg_info"], create: Literal[1, True] = 1) -> install_egg_info: ... + @overload + def get_command_obj(self, command: Literal["install_headers"], create: Literal[1, True] = 1) -> install_headers: ... + @overload + def get_command_obj(self, command: Literal["install_lib"], create: Literal[1, True] = 1) -> install_lib: ... + @overload + def get_command_obj(self, command: Literal["install_scripts"], create: Literal[1, True] = 1) -> install_scripts: ... + @overload + def get_command_obj(self, command: Literal["register"], create: Literal[1, True] = 1) -> register: ... + @overload + def get_command_obj(self, command: Literal["sdist"], create: Literal[1, True] = 1) -> sdist: ... + @overload + def get_command_obj(self, command: Literal["upload"], create: Literal[1, True] = 1) -> upload: ... + @overload + def get_command_obj(self, command: str, create: Literal[1, True] = 1) -> Command: ... + # Not replicating the overloads for "Command | None", user may use "isinstance" + @overload + def get_command_obj(self, command: str, create: Literal[0, False]) -> Command | None: ... + @overload + def get_command_class(self, command: Literal["bdist"]) -> type[bdist]: ... + @overload + def get_command_class(self, command: Literal["bdist_dumb"]) -> type[bdist_dumb]: ... + @overload + def get_command_class(self, command: Literal["bdist_rpm"]) -> type[bdist_rpm]: ... + @overload + def get_command_class(self, command: Literal["build"]) -> type[build]: ... + @overload + def get_command_class(self, command: Literal["build_clib"]) -> type[build_clib]: ... + @overload + def get_command_class(self, command: Literal["build_ext"]) -> type[build_ext]: ... + @overload + def get_command_class(self, command: Literal["build_py"]) -> type[build_py]: ... + @overload + def get_command_class(self, command: Literal["build_scripts"]) -> type[build_scripts]: ... + @overload + def get_command_class(self, command: Literal["check"]) -> type[check]: ... + @overload + def get_command_class(self, command: Literal["clean"]) -> type[clean]: ... + @overload + def get_command_class(self, command: Literal["config"]) -> type[config]: ... + @overload + def get_command_class(self, command: Literal["install"]) -> type[install]: ... + @overload + def get_command_class(self, command: Literal["install_data"]) -> type[install_data]: ... + @overload + def get_command_class(self, command: Literal["install_egg_info"]) -> type[install_egg_info]: ... + @overload + def get_command_class(self, command: Literal["install_headers"]) -> type[install_headers]: ... + @overload + def get_command_class(self, command: Literal["install_lib"]) -> type[install_lib]: ... + @overload + def get_command_class(self, command: Literal["install_scripts"]) -> type[install_scripts]: ... + @overload + def get_command_class(self, command: Literal["register"]) -> type[register]: ... + @overload + def get_command_class(self, command: Literal["sdist"]) -> type[sdist]: ... + @overload + def get_command_class(self, command: Literal["upload"]) -> type[upload]: ... + @overload + def get_command_class(self, command: str) -> type[Command]: ... + @overload + def reinitialize_command(self, command: Literal["bdist"], reinit_subcommands: bool = False) -> bdist: ... + @overload + def reinitialize_command(self, command: Literal["bdist_dumb"], reinit_subcommands: bool = False) -> bdist_dumb: ... + @overload + def reinitialize_command(self, command: Literal["bdist_rpm"], reinit_subcommands: bool = False) -> bdist_rpm: ... + @overload + def reinitialize_command(self, command: Literal["build"], reinit_subcommands: bool = False) -> build: ... + @overload + def reinitialize_command(self, command: Literal["build_clib"], reinit_subcommands: bool = False) -> build_clib: ... + @overload + def reinitialize_command(self, command: Literal["build_ext"], reinit_subcommands: bool = False) -> build_ext: ... + @overload + def reinitialize_command(self, command: Literal["build_py"], reinit_subcommands: bool = False) -> build_py: ... + @overload + def reinitialize_command(self, command: Literal["build_scripts"], reinit_subcommands: bool = False) -> build_scripts: ... + @overload + def reinitialize_command(self, command: Literal["check"], reinit_subcommands: bool = False) -> check: ... + @overload + def reinitialize_command(self, command: Literal["clean"], reinit_subcommands: bool = False) -> clean: ... + @overload + def reinitialize_command(self, command: Literal["config"], reinit_subcommands: bool = False) -> config: ... + @overload + def reinitialize_command(self, command: Literal["install"], reinit_subcommands: bool = False) -> install: ... + @overload + def reinitialize_command(self, command: Literal["install_data"], reinit_subcommands: bool = False) -> install_data: ... + @overload + def reinitialize_command( + self, command: Literal["install_egg_info"], reinit_subcommands: bool = False + ) -> install_egg_info: ... + @overload + def reinitialize_command(self, command: Literal["install_headers"], reinit_subcommands: bool = False) -> install_headers: ... + @overload + def reinitialize_command(self, command: Literal["install_lib"], reinit_subcommands: bool = False) -> install_lib: ... + @overload + def reinitialize_command(self, command: Literal["install_scripts"], reinit_subcommands: bool = False) -> install_scripts: ... + @overload + def reinitialize_command(self, command: Literal["register"], reinit_subcommands: bool = False) -> register: ... + @overload + def reinitialize_command(self, command: Literal["sdist"], reinit_subcommands: bool = False) -> sdist: ... + @overload + def reinitialize_command(self, command: Literal["upload"], reinit_subcommands: bool = False) -> upload: ... + @overload + def reinitialize_command(self, command: str, reinit_subcommands: bool = False) -> Command: ... + @overload + def reinitialize_command(self, command: _CommandT, reinit_subcommands: bool = False) -> _CommandT: ... def announce(self, msg, level: int = 2) -> None: ... - def run_commands(self) -> None: - """ - Run each command that was seen on the setup script command line. - Uses the list of commands found and cache of command objects - created by 'get_command_obj()'. - """ - ... - def run_command(self, command: str) -> None: - """ - Do whatever it takes to run a command (including nothing at all, - if the command has already been run). Specifically: if we have - already created and run the command named by 'command', return - silently without doing anything. If the command named by 'command' - doesn't even have a command object yet, create one. Then invoke - 'run()' on that command object (or an existing one). - """ - ... + def run_commands(self) -> None: ... + def run_command(self, command: str) -> None: ... def has_pure_modules(self) -> bool: ... def has_ext_modules(self) -> bool: ... def has_c_libraries(self) -> bool: ... @@ -334,7 +270,7 @@ class Distribution: def has_data_files(self) -> bool: ... def is_pure(self) -> bool: ... - # Getter methods generated in __init__ + # Default getter methods generated in __init__ from self.metadata._METHOD_BASENAMES def get_name(self) -> str: ... def get_version(self) -> str: ... def get_fullname(self) -> str: ... @@ -356,3 +292,26 @@ class Distribution: def get_requires(self) -> list[str]: ... def get_provides(self) -> list[str]: ... def get_obsoletes(self) -> list[str]: ... + + # Default attributes generated in __init__ from self.display_option_names + help_commands: bool | Literal[0] + name: str | Literal[0] + version: str | Literal[0] + fullname: str | Literal[0] + author: str | Literal[0] + author_email: str | Literal[0] + maintainer: str | Literal[0] + maintainer_email: str | Literal[0] + contact: str | Literal[0] + contact_email: str | Literal[0] + url: str | Literal[0] + license: str | Literal[0] + licence: str | Literal[0] + description: str | Literal[0] + long_description: str | Literal[0] + platforms: str | list[str] | Literal[0] + classifiers: str | list[str] | Literal[0] + keywords: str | list[str] | Literal[0] + provides: list[str] | Literal[0] + requires: list[str] | Literal[0] + obsoletes: list[str] | Literal[0] diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/errors.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/errors.pyi index 3b7b448..e483362 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/errors.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/errors.pyi @@ -1,110 +1,19 @@ -""" -distutils.errors - -Provides exceptions used by the Distutils modules. Note that Distutils -modules may raise standard exceptions; in particular, SystemExit is -usually raised for errors that are obviously the end-user's fault -(eg. bad command-line arguments). - -This module is safe to use in "from ... import *" mode; it only exports -symbols whose names start with "Distutils" and end with "Error". -""" - -class DistutilsError(Exception): - """The root of all Distutils evil.""" - ... -class DistutilsModuleError(DistutilsError): - """ - Unable to load an expected module, or to find an expected class - within some module (in particular, command modules and classes). - """ - ... -class DistutilsClassError(DistutilsError): - """ - Some command class (or possibly distribution class, if anyone - feels a need to subclass Distribution) is found not to be holding - up its end of the bargain, ie. implementing some part of the - "command "interface. - """ - ... -class DistutilsGetoptError(DistutilsError): - """The option table provided to 'fancy_getopt()' is bogus.""" - ... -class DistutilsArgError(DistutilsError): - """ - Raised by fancy_getopt in response to getopt.error -- ie. an - error in the command line usage. - """ - ... -class DistutilsFileError(DistutilsError): - """ - Any problems in the filesystem: expected file not found, etc. - Typically this is for problems that we detect before OSError - could be raised. - """ - ... -class DistutilsOptionError(DistutilsError): - """ - Syntactic/semantic errors in command options, such as use of - mutually conflicting options, or inconsistent options, - badly-spelled values, etc. No distinction is made between option - values originating in the setup script, the command line, config - files, or what-have-you -- but if we *know* something originated in - the setup script, we'll raise DistutilsSetupError instead. - """ - ... -class DistutilsSetupError(DistutilsError): - """ - For errors that can be definitely blamed on the setup script, - such as invalid keyword arguments to 'setup()'. - """ - ... -class DistutilsPlatformError(DistutilsError): - """ - We don't know how to do something on the current platform (but - we do know how to do it on some platform) -- eg. trying to compile - C files on a platform not supported by a CCompiler subclass. - """ - ... -class DistutilsExecError(DistutilsError): - """ - Any problems executing an external program (such as the C - compiler, when compiling C files). - """ - ... -class DistutilsInternalError(DistutilsError): - """ - Internal inconsistencies or impossibilities (obviously, this - should never be seen if the code is working!). - """ - ... -class DistutilsTemplateError(DistutilsError): - """Syntax error in a file list template.""" - ... -class DistutilsByteCompileError(DistutilsError): - """Byte compile error.""" - ... -class CCompilerError(Exception): - """Some compile/link operation failed.""" - ... -class PreprocessError(CCompilerError): - """Failure to preprocess one or more C/C++ files.""" - ... -class CompileError(CCompilerError): - """Failure to compile one or more C/C++ source files.""" - ... -class LibError(CCompilerError): - """ - Failure to create a static library from one or more C/C++ object - files. - """ - ... -class LinkError(CCompilerError): - """ - Failure to link one or more C/C++ object files into an executable - or shared library file. - """ - ... -class UnknownFileError(CCompilerError): - """Attempt to process an unknown file type.""" - ... +class DistutilsError(Exception): ... +class DistutilsModuleError(DistutilsError): ... +class DistutilsClassError(DistutilsError): ... +class DistutilsGetoptError(DistutilsError): ... +class DistutilsArgError(DistutilsError): ... +class DistutilsFileError(DistutilsError): ... +class DistutilsOptionError(DistutilsError): ... +class DistutilsSetupError(DistutilsError): ... +class DistutilsPlatformError(DistutilsError): ... +class DistutilsExecError(DistutilsError): ... +class DistutilsInternalError(DistutilsError): ... +class DistutilsTemplateError(DistutilsError): ... +class DistutilsByteCompileError(DistutilsError): ... +class CCompilerError(Exception): ... +class PreprocessError(CCompilerError): ... +class CompileError(CCompilerError): ... +class LibError(CCompilerError): ... +class LinkError(CCompilerError): ... +class UnknownFileError(CCompilerError): ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/extension.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/extension.pyi index 9ca1228..789bbf6 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/extension.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/extension.pyi @@ -1,75 +1,4 @@ -""" -distutils.extension - -Provides the Extension class, used to describe C/C++ extension -modules in setup scripts. -""" - class Extension: - """ - Just a collection of attributes that describes an extension - module and everything needed to build it (hopefully in a portable - way, but there are hooks that let you be as unportable as you need). - - Instance attributes: - name : string - the full name of the extension, including any packages -- ie. - *not* a filename or pathname, but Python dotted name - sources : [string] - list of source filenames, relative to the distribution root - (where the setup script lives), in Unix form (slash-separated) - for portability. Source files may be C, C++, SWIG (.i), - platform-specific resource files, or whatever else is recognized - by the "build_ext" command as source for a Python extension. - include_dirs : [string] - list of directories to search for C/C++ header files (in Unix - form for portability) - define_macros : [(name : string, value : string|None)] - list of macros to define; each macro is defined using a 2-tuple, - where 'value' is either the string to define it to or None to - define it without a particular value (equivalent of "#define - FOO" in source or -DFOO on Unix C compiler command line) - undef_macros : [string] - list of macros to undefine explicitly - library_dirs : [string] - list of directories to search for C/C++ libraries at link time - libraries : [string] - list of library names (not filenames or paths) to link against - runtime_library_dirs : [string] - list of directories to search for C/C++ libraries at run time - (for shared extensions, this is when the extension is loaded) - extra_objects : [string] - list of extra files to link with (eg. object files not implied - by 'sources', static library that must be explicitly specified, - binary resource files, etc.) - extra_compile_args : [string] - any extra platform- and compiler-specific information to use - when compiling the source files in 'sources'. For platforms and - compilers where "command line" makes sense, this is typically a - list of command-line arguments, but for other platforms it could - be anything. - extra_link_args : [string] - any extra platform- and compiler-specific information to use - when linking object files together to create the extension (or - to create a new static Python interpreter). Similar - interpretation as for 'extra_compile_args'. - export_symbols : [string] - list of symbols to be exported from a shared extension. Not - used on all platforms, and not generally necessary for Python - extensions, which typically export exactly one symbol: "init" + - extension_name. - swig_opts : [string] - any extra options to pass to SWIG if a source file has the .i - extension. - depends : [string] - list of files that the extension depends on - language : string - extension language (i.e. "c", "c++", "objc"). Will be detected - from the source extensions if not provided. - optional : boolean - specifies that a build failure in the extension should not abort the - build process, but simply not install the failing extension. - """ name: str sources: list[str] include_dirs: list[str] diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/fancy_getopt.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/fancy_getopt.pyi index 0fa6e25..c4d3741 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/fancy_getopt.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/fancy_getopt.pyi @@ -1,112 +1,34 @@ -""" -distutils.fancy_getopt - -Wrapper around the standard getopt module that provides the following -additional features: - * short and long options are tied together - * options have help strings, so fancy_getopt could potentially - create a complete usage summary - * options set attributes of a passed-in object -""" - from collections.abc import Iterable, Mapping from re import Pattern -from typing import Any, overload +from typing import Any, Final, overload from typing_extensions import TypeAlias _Option: TypeAlias = tuple[str, str | None, str] _GR: TypeAlias = tuple[list[str], OptionDummy] -longopt_pat: str -longopt_re: Pattern[str] -neg_alias_re: Pattern[str] -longopt_xlate: dict[int, int] +longopt_pat: Final = r"[a-zA-Z](?:[a-zA-Z0-9-]*)" +longopt_re: Final[Pattern[str]] +neg_alias_re: Final[Pattern[str]] +longopt_xlate: Final[dict[int, int]] class FancyGetopt: - """ - Wrapper around the standard 'getopt()' module that provides some - handy extra functionality: - * short and long options are tied together - * options have help strings, and help text can be assembled - from them - * options set attributes of a passed-in object - * boolean options can have "negative aliases" -- eg. if - --quiet is the "negative alias" of --verbose, then "--quiet" - on the command line sets 'verbose' to false - """ def __init__(self, option_table: list[_Option] | None = None) -> None: ... # TODO kinda wrong, `getopt(object=object())` is invalid @overload - def getopt(self, args: list[str] | None = None) -> _GR: - """ - Parse command-line options in args. Store as attributes on object. - - If 'args' is None or not supplied, uses 'sys.argv[1:]'. If - 'object' is None or not supplied, creates a new OptionDummy - object, stores option values there, and returns a tuple (args, - object). If 'object' is supplied, it is modified in place and - 'getopt()' just returns 'args'; in both cases, the returned - 'args' is a modified copy of the passed-in 'args' list, which - is left untouched. - """ - ... + def getopt(self, args: list[str] | None = None) -> _GR: ... @overload - def getopt(self, args: list[str] | None, object: Any) -> list[str]: - """ - Parse command-line options in args. Store as attributes on object. - - If 'args' is None or not supplied, uses 'sys.argv[1:]'. If - 'object' is None or not supplied, creates a new OptionDummy - object, stores option values there, and returns a tuple (args, - object). If 'object' is supplied, it is modified in place and - 'getopt()' just returns 'args'; in both cases, the returned - 'args' is a modified copy of the passed-in 'args' list, which - is left untouched. - """ - ... - def get_option_order(self) -> list[tuple[str, str]]: - """ - Returns the list of (option, value) tuples processed by the - previous run of 'getopt()'. Raises RuntimeError if - 'getopt()' hasn't been called yet. - """ - ... - def generate_help(self, header: str | None = None) -> list[str]: - """ - Generate help text (a list of strings, one per suggested line of - output) from the option table for this FancyGetopt object. - """ - ... + def getopt(self, args: list[str] | None, object: Any) -> list[str]: ... + def get_option_order(self) -> list[tuple[str, str]]: ... + def generate_help(self, header: str | None = None) -> list[str]: ... def fancy_getopt( options: list[_Option], negative_opt: Mapping[_Option, _Option], object: Any, args: list[str] | None ) -> list[str] | _GR: ... -WS_TRANS: dict[int, str] - -def wrap_text(text: str, width: int) -> list[str]: - """ - wrap_text(text : string, width : int) -> [string] +WS_TRANS: Final[dict[int, str]] - Split 'text' into multiple lines of no more than 'width' characters - each, and return the list of strings that results. - """ - ... -def translate_longopt(opt: str) -> str: - """ - Convert a long option name to a valid Python identifier by - changing "-" to "_". - """ - ... +def wrap_text(text: str, width: int) -> list[str]: ... +def translate_longopt(opt: str) -> str: ... class OptionDummy: - """ - Dummy class just used as a place to hold command-line option - values as instance attributes. - """ - def __init__(self, options: Iterable[str] = []) -> None: - """ - Create a new OptionDummy instance. The attributes listed in - 'options' will be initialized to None. - """ - ... + def __init__(self, options: Iterable[str] = []) -> None: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/file_util.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/file_util.pyi index cca9107..873d23e 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/file_util.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/file_util.pyi @@ -1,9 +1,3 @@ -""" -distutils.file_util - -Utility functions for operating on single files. -""" - from _typeshed import BytesPath, StrOrBytesPath, StrPath from collections.abc import Iterable from typing import Literal, TypeVar, overload @@ -21,33 +15,7 @@ def copy_file( link: str | None = None, verbose: bool | Literal[0, 1] = 1, dry_run: bool | Literal[0, 1] = 0, -) -> tuple[_StrPathT | str, bool]: - """ - Copy a file 'src' to 'dst'. If 'dst' is a directory, then 'src' is - copied there with the same name; otherwise, it must be a filename. (If - the file exists, it will be ruthlessly clobbered.) If 'preserve_mode' - is true (the default), the file's mode (type and permission bits, or - whatever is analogous on the current platform) is copied. If - 'preserve_times' is true (the default), the last-modified and - last-access times are copied as well. If 'update' is true, 'src' will - only be copied if 'dst' does not exist, or if 'dst' does exist but is - older than 'src'. - - 'link' allows you to make hard links (os.link) or symbolic links - (os.symlink) instead of copying: set it to "hard" or "sym"; if it is - None (the default), files are copied. Don't set 'link' on systems that - don't support it: 'copy_file()' doesn't check if hard or symbolic - linking is available. If hardlink fails, falls back to - _copy_file_contents(). - - Under Mac OS, uses the native file copy function in macostools; on - other systems, uses '_copy_file_contents()' to copy file contents. - - Return a tuple (dest_name, copied): 'dest_name' is the actual name of - the output file, and 'copied' is true if the file was copied (or would - have been copied, if 'dry_run' true). - """ - ... +) -> tuple[_StrPathT | str, bool]: ... @overload def copy_file( src: BytesPath, @@ -58,62 +26,13 @@ def copy_file( link: str | None = None, verbose: bool | Literal[0, 1] = 1, dry_run: bool | Literal[0, 1] = 0, -) -> tuple[_BytesPathT | bytes, bool]: - """ - Copy a file 'src' to 'dst'. If 'dst' is a directory, then 'src' is - copied there with the same name; otherwise, it must be a filename. (If - the file exists, it will be ruthlessly clobbered.) If 'preserve_mode' - is true (the default), the file's mode (type and permission bits, or - whatever is analogous on the current platform) is copied. If - 'preserve_times' is true (the default), the last-modified and - last-access times are copied as well. If 'update' is true, 'src' will - only be copied if 'dst' does not exist, or if 'dst' does exist but is - older than 'src'. - - 'link' allows you to make hard links (os.link) or symbolic links - (os.symlink) instead of copying: set it to "hard" or "sym"; if it is - None (the default), files are copied. Don't set 'link' on systems that - don't support it: 'copy_file()' doesn't check if hard or symbolic - linking is available. If hardlink fails, falls back to - _copy_file_contents(). - - Under Mac OS, uses the native file copy function in macostools; on - other systems, uses '_copy_file_contents()' to copy file contents. - - Return a tuple (dest_name, copied): 'dest_name' is the actual name of - the output file, and 'copied' is true if the file was copied (or would - have been copied, if 'dry_run' true). - """ - ... +) -> tuple[_BytesPathT | bytes, bool]: ... @overload def move_file( src: StrPath, dst: _StrPathT, verbose: bool | Literal[0, 1] = 0, dry_run: bool | Literal[0, 1] = 0 -) -> _StrPathT | str: - """ - Move a file 'src' to 'dst'. If 'dst' is a directory, the file will - be moved into it with the same name; otherwise, 'src' is just renamed - to 'dst'. Return the new full name of the file. - - Handles cross-device moves on Unix using 'copy_file()'. What about - other systems??? - """ - ... +) -> _StrPathT | str: ... @overload def move_file( src: BytesPath, dst: _BytesPathT, verbose: bool | Literal[0, 1] = 0, dry_run: bool | Literal[0, 1] = 0 -) -> _BytesPathT | bytes: - """ - Move a file 'src' to 'dst'. If 'dst' is a directory, the file will - be moved into it with the same name; otherwise, 'src' is just renamed - to 'dst'. Return the new full name of the file. - - Handles cross-device moves on Unix using 'copy_file()'. What about - other systems??? - """ - ... -def write_file(filename: StrOrBytesPath, contents: Iterable[str]) -> None: - """ - Create a file with the specified name and write 'contents' (a - sequence of strings without line terminators) to it. - """ - ... +) -> _BytesPathT | bytes: ... +def write_file(filename: StrOrBytesPath, contents: Iterable[str]) -> None: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/filelist.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/filelist.pyi index 0240889..607a78a 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/filelist.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/filelist.pyi @@ -1,41 +1,15 @@ -""" -distutils.filelist - -Provides the FileList class, used for poking about the filesystem -and building lists of files. -""" - from collections.abc import Iterable from re import Pattern from typing import Literal, overload # class is entirely undocumented class FileList: - """ - A list of files built by on exploring the filesystem and filtered by - applying various patterns to what we find there. - - Instance attributes: - dir - directory from which files will be taken -- only used if - 'allfiles' not supplied to constructor - files - list of filenames currently being built/filtered/manipulated - allfiles - complete list of files under consideration (ie. without any - filtering applied) - """ allfiles: Iterable[str] | None files: list[str] def __init__(self, warn: None = None, debug_print: None = None) -> None: ... def set_allfiles(self, allfiles: Iterable[str]) -> None: ... def findall(self, dir: str = ".") -> None: ... - def debug_print(self, msg: str) -> None: - """ - Print 'msg' to stdout if the global DEBUG (taken from the - DISTUTILS_DEBUG environment variable) flag is true. - """ - ... + def debug_print(self, msg: str) -> None: ... def append(self, item: str) -> None: ... def extend(self, items: Iterable[str]) -> None: ... def sort(self) -> None: ... @@ -44,61 +18,9 @@ class FileList: @overload def include_pattern( self, pattern: str, anchor: bool | Literal[0, 1] = 1, prefix: str | None = None, is_regex: Literal[0, False] = 0 - ) -> bool: - """ - Select strings (presumably filenames) from 'self.files' that - match 'pattern', a Unix-style wildcard (glob) pattern. Patterns - are not quite the same as implemented by the 'fnmatch' module: '*' - and '?' match non-special characters, where "special" is platform- - dependent: slash on Unix; colon, slash, and backslash on - DOS/Windows; and colon on Mac OS. - - If 'anchor' is true (the default), then the pattern match is more - stringent: "*.py" will match "foo.py" but not "foo/bar.py". If - 'anchor' is false, both of these will match. - - If 'prefix' is supplied, then only filenames starting with 'prefix' - (itself a pattern) and ending with 'pattern', with anything in between - them, will match. 'anchor' is ignored in this case. - - If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and - 'pattern' is assumed to be either a string containing a regex or a - regex object -- no translation is done, the regex is just compiled - and used as-is. - - Selected strings will be added to self.files. - - Return True if files are found, False otherwise. - """ - ... + ) -> bool: ... @overload - def include_pattern(self, pattern: str | Pattern[str], *, is_regex: Literal[True, 1]) -> bool: - """ - Select strings (presumably filenames) from 'self.files' that - match 'pattern', a Unix-style wildcard (glob) pattern. Patterns - are not quite the same as implemented by the 'fnmatch' module: '*' - and '?' match non-special characters, where "special" is platform- - dependent: slash on Unix; colon, slash, and backslash on - DOS/Windows; and colon on Mac OS. - - If 'anchor' is true (the default), then the pattern match is more - stringent: "*.py" will match "foo.py" but not "foo/bar.py". If - 'anchor' is false, both of these will match. - - If 'prefix' is supplied, then only filenames starting with 'prefix' - (itself a pattern) and ending with 'pattern', with anything in between - them, will match. 'anchor' is ignored in this case. - - If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and - 'pattern' is assumed to be either a string containing a regex or a - regex object -- no translation is done, the regex is just compiled - and used as-is. - - Selected strings will be added to self.files. - - Return True if files are found, False otherwise. - """ - ... + def include_pattern(self, pattern: str | Pattern[str], *, is_regex: Literal[True, 1]) -> bool: ... @overload def include_pattern( self, @@ -106,55 +28,13 @@ class FileList: anchor: bool | Literal[0, 1] = 1, prefix: str | None = None, is_regex: bool | Literal[0, 1] = 0, - ) -> bool: - """ - Select strings (presumably filenames) from 'self.files' that - match 'pattern', a Unix-style wildcard (glob) pattern. Patterns - are not quite the same as implemented by the 'fnmatch' module: '*' - and '?' match non-special characters, where "special" is platform- - dependent: slash on Unix; colon, slash, and backslash on - DOS/Windows; and colon on Mac OS. - - If 'anchor' is true (the default), then the pattern match is more - stringent: "*.py" will match "foo.py" but not "foo/bar.py". If - 'anchor' is false, both of these will match. - - If 'prefix' is supplied, then only filenames starting with 'prefix' - (itself a pattern) and ending with 'pattern', with anything in between - them, will match. 'anchor' is ignored in this case. - - If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and - 'pattern' is assumed to be either a string containing a regex or a - regex object -- no translation is done, the regex is just compiled - and used as-is. - - Selected strings will be added to self.files. - - Return True if files are found, False otherwise. - """ - ... + ) -> bool: ... @overload def exclude_pattern( self, pattern: str, anchor: bool | Literal[0, 1] = 1, prefix: str | None = None, is_regex: Literal[0, False] = 0 - ) -> bool: - """ - Remove strings (presumably filenames) from 'files' that match - 'pattern'. Other parameters are the same as for - 'include_pattern()', above. - The list 'self.files' is modified in place. - Return True if files are found, False otherwise. - """ - ... + ) -> bool: ... @overload - def exclude_pattern(self, pattern: str | Pattern[str], *, is_regex: Literal[True, 1]) -> bool: - """ - Remove strings (presumably filenames) from 'files' that match - 'pattern'. Other parameters are the same as for - 'include_pattern()', above. - The list 'self.files' is modified in place. - Return True if files are found, False otherwise. - """ - ... + def exclude_pattern(self, pattern: str | Pattern[str], *, is_regex: Literal[True, 1]) -> bool: ... @overload def exclude_pattern( self, @@ -162,58 +42,17 @@ class FileList: anchor: bool | Literal[0, 1] = 1, prefix: str | None = None, is_regex: bool | Literal[0, 1] = 0, - ) -> bool: - """ - Remove strings (presumably filenames) from 'files' that match - 'pattern'. Other parameters are the same as for - 'include_pattern()', above. - The list 'self.files' is modified in place. - Return True if files are found, False otherwise. - """ - ... + ) -> bool: ... -def findall(dir: str = ".") -> list[str]: - """ - Find all files under 'dir' and return the list of full filenames. - Unless dir is '.', return full filenames with dir prepended. - """ - ... -def glob_to_re(pattern: str) -> str: - """ - Translate a shell-like glob pattern to a regular expression; return - a string containing the regex. Differs from 'fnmatch.translate()' in - that '*' does not match "special characters" (which are - platform-specific). - """ - ... +def findall(dir: str = ".") -> list[str]: ... +def glob_to_re(pattern: str) -> str: ... @overload def translate_pattern( pattern: str, anchor: bool | Literal[0, 1] = 1, prefix: str | None = None, is_regex: Literal[False, 0] = 0 -) -> Pattern[str]: - """ - Translate a shell-like wildcard pattern to a compiled regular - expression. Return the compiled regex. If 'is_regex' true, - then 'pattern' is directly compiled to a regex (if it's a string) - or just returned as-is (assumes it's a regex object). - """ - ... +) -> Pattern[str]: ... @overload -def translate_pattern(pattern: str | Pattern[str], *, is_regex: Literal[True, 1]) -> Pattern[str]: - """ - Translate a shell-like wildcard pattern to a compiled regular - expression. Return the compiled regex. If 'is_regex' true, - then 'pattern' is directly compiled to a regex (if it's a string) - or just returned as-is (assumes it's a regex object). - """ - ... +def translate_pattern(pattern: str | Pattern[str], *, is_regex: Literal[True, 1]) -> Pattern[str]: ... @overload def translate_pattern( pattern: str | Pattern[str], anchor: bool | Literal[0, 1] = 1, prefix: str | None = None, is_regex: bool | Literal[0, 1] = 0 -) -> Pattern[str]: - """ - Translate a shell-like wildcard pattern to a compiled regular - expression. Return the compiled regex. If 'is_regex' true, - then 'pattern' is directly compiled to a regex (if it's a string) - or just returned as-is (assumes it's a regex object). - """ - ... +) -> Pattern[str]: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/log.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/log.pyi index 3df7934..0ea135c 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/log.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/log.pyi @@ -1,12 +1,10 @@ -"""A simple log mechanism styled after PEP 282.""" +from typing import Any, Final -from typing import Any - -DEBUG: int -INFO: int -WARN: int -ERROR: int -FATAL: int +DEBUG: Final = 1 +INFO: Final = 2 +WARN: Final = 3 +ERROR: Final = 4 +FATAL: Final = 5 class Log: def __init__(self, threshold: int = 3) -> None: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/msvccompiler.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/msvccompiler.pyi index c5945b6..80872a6 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/msvccompiler.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/msvccompiler.pyi @@ -1,15 +1,3 @@ -""" -distutils.msvccompiler - -Contains MSVCCompiler, an implementation of the abstract CCompiler class -for the Microsoft Visual Studio. -""" - from distutils.ccompiler import CCompiler -class MSVCCompiler(CCompiler): - """ - Concrete class that implements an interface to Microsoft Visual C++, - as defined by the CCompiler abstract class. - """ - ... +class MSVCCompiler(CCompiler): ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/spawn.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/spawn.pyi index f647ecf..50d89ae 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/spawn.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/spawn.pyi @@ -1,39 +1,6 @@ -""" -distutils.spawn - -Provides the 'spawn()' function, a front-end to various platform- -specific functions for launching another program in a sub-process. -Also provides the 'find_executable()' to search the path for a given -executable name. -""" - from typing import Literal def spawn( cmd: list[str], search_path: bool | Literal[0, 1] = 1, verbose: bool | Literal[0, 1] = 0, dry_run: bool | Literal[0, 1] = 0 -) -> None: - """ - Run another program, specified as a command list 'cmd', in a new process. - - 'cmd' is just the argument list for the new process, ie. - cmd[0] is the program to run and cmd[1:] are the rest of its arguments. - There is no way to run a program with a name different from that of its - executable. - - If 'search_path' is true (the default), the system's executable - search path will be used to find the program; otherwise, cmd[0] - must be the exact path to the executable. If 'dry_run' is true, - the command will not actually be run. - - Raise DistutilsExecError if running the program fails in any way; just - return on success. - """ - ... -def find_executable(executable: str, path: str | None = None) -> str | None: - """ - Tries to find 'executable' in the directories listed in 'path'. - - A string listing directories separated by 'os.pathsep'; defaults to - os.environ['PATH']. Returns the complete filename or None if not found. - """ - ... +) -> None: ... +def find_executable(executable: str, path: str | None = None) -> str | None: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/sysconfig.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/sysconfig.pyi index a8668f1..4a9c45e 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/sysconfig.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/sysconfig.pyi @@ -1,128 +1,33 @@ -""" -Provide access to Python's configuration information. The specific -configuration variables available depend heavily on the platform and -configuration. The values may be retrieved using -get_config_var(name), and the list of variables is available via -get_config_vars().keys(). Additional convenience functions are also -available. - -Written by: Fred L. Drake, Jr. -Email: -""" - import sys from collections.abc import Mapping from distutils.ccompiler import CCompiler -from typing import Literal, overload +from typing import Final, Literal, overload from typing_extensions import deprecated -PREFIX: str -EXEC_PREFIX: str -BASE_PREFIX: str -BASE_EXEC_PREFIX: str -project_base: str -python_build: bool +PREFIX: Final[str] +EXEC_PREFIX: Final[str] +BASE_PREFIX: Final[str] +BASE_EXEC_PREFIX: Final[str] +project_base: Final[str] +python_build: Final[bool] -def expand_makefile_vars(s: str, vars: Mapping[str, str]) -> str: - """ - Expand Makefile-style variables -- "${foo}" or "$(foo)" -- in - 'string' according to 'vars' (a dictionary mapping variable names to - values). Variables not present in 'vars' are silently expanded to the - empty string. The variable values in 'vars' should not contain further - variable expansions; if 'vars' is the output of 'parse_makefile()', - you're fine. Returns a variable-expanded version of 's'. - """ - ... +def expand_makefile_vars(s: str, vars: Mapping[str, str]) -> str: ... @overload @deprecated("SO is deprecated, use EXT_SUFFIX. Support is removed in Python 3.11") -def get_config_var(name: Literal["SO"]) -> int | str | None: - """ - Return the value of a single variable using the dictionary returned by - 'get_config_vars()'. - - Equivalent to get_config_vars().get(name) - """ - ... +def get_config_var(name: Literal["SO"]) -> int | str | None: ... @overload -def get_config_var(name: str) -> int | str | None: - """ - Return the value of a single variable using the dictionary returned by - 'get_config_vars()'. - - Equivalent to get_config_vars().get(name) - """ - ... +def get_config_var(name: str) -> int | str | None: ... @overload -def get_config_vars() -> dict[str, str | int]: - """ - With no arguments, return a dictionary of all configuration - variables relevant for the current platform. - - On Unix, this means every variable defined in Python's installed Makefile; - On Windows it's a much smaller set. - - With arguments, return a list of values that result from looking up - each argument in the configuration variable dictionary. - """ - ... +def get_config_vars() -> dict[str, str | int]: ... @overload -def get_config_vars(arg: str, /, *args: str) -> list[str | int]: - """ - With no arguments, return a dictionary of all configuration - variables relevant for the current platform. - - On Unix, this means every variable defined in Python's installed Makefile; - On Windows it's a much smaller set. - - With arguments, return a list of values that result from looking up - each argument in the configuration variable dictionary. - """ - ... -def get_config_h_filename() -> str: - """Return the path of pyconfig.h.""" - ... -def get_makefile_filename() -> str: - """Return the path of the Makefile.""" - ... -def get_python_inc(plat_specific: bool | Literal[0, 1] = 0, prefix: str | None = None) -> str: - """ - Return the directory containing installed Python header files. - - If 'plat_specific' is false (the default), this is the path to the - non-platform-specific header files, i.e. Python.h and so on; - otherwise, this is the path to platform-specific header files - (namely pyconfig.h). - - If 'prefix' is supplied, use it instead of sys.base_prefix or - sys.base_exec_prefix -- i.e., ignore 'plat_specific'. - """ - ... +def get_config_vars(arg: str, /, *args: str) -> list[str | int]: ... +def get_config_h_filename() -> str: ... +def get_makefile_filename() -> str: ... +def get_python_inc(plat_specific: bool | Literal[0, 1] = 0, prefix: str | None = None) -> str: ... def get_python_lib( plat_specific: bool | Literal[0, 1] = 0, standard_lib: bool | Literal[0, 1] = 0, prefix: str | None = None -) -> str: - """ - Return the directory containing the Python library (standard or - site additions). - - If 'plat_specific' is true, return the directory containing - platform-specific modules, i.e. any module from a non-pure-Python - module distribution; otherwise, return the platform-shared library - directory. If 'standard_lib' is true, return the directory - containing standard Python library modules; otherwise, return the - directory for site-specific modules. - - If 'prefix' is supplied, use it instead of sys.base_prefix or - sys.base_exec_prefix -- i.e., ignore 'plat_specific'. - """ - ... -def customize_compiler(compiler: CCompiler) -> None: - """ - Do any platform-specific customization of a CCompiler instance. - - Mainly needed on Unix, so we can plug in the information that - varies across Unices and is stored in Python's Makefile. - """ - ... +) -> str: ... +def customize_compiler(compiler: CCompiler) -> None: ... if sys.version_info < (3, 10): def get_python_version() -> str: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/text_file.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/text_file.pyi index b975fc0..54951af 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/text_file.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/text_file.pyi @@ -1,73 +1,6 @@ -""" -text_file - -provides the TextFile class, which gives an interface to text files -that (optionally) takes care of stripping comments, ignoring blank -lines, and joining lines with backslashes. -""" - from typing import IO, Literal class TextFile: - """ - Provides a file-like object that takes care of all the things you - commonly want to do when processing a text file that has some - line-by-line syntax: strip comments (as long as "#" is your - comment character), skip blank lines, join adjacent lines by - escaping the newline (ie. backslash at end of line), strip - leading and/or trailing whitespace. All of these are optional - and independently controllable. - - Provides a 'warn()' method so you can generate warning messages that - report physical line number, even if the logical line in question - spans multiple physical lines. Also provides 'unreadline()' for - implementing line-at-a-time lookahead. - - Constructor is called as: - - TextFile (filename=None, file=None, **options) - - It bombs (RuntimeError) if both 'filename' and 'file' are None; - 'filename' should be a string, and 'file' a file object (or - something that provides 'readline()' and 'close()' methods). It is - recommended that you supply at least 'filename', so that TextFile - can include it in warning messages. If 'file' is not supplied, - TextFile creates its own using 'io.open()'. - - The options are all boolean, and affect the value returned by - 'readline()': - strip_comments [default: true] - strip from "#" to end-of-line, as well as any whitespace - leading up to the "#" -- unless it is escaped by a backslash - lstrip_ws [default: false] - strip leading whitespace from each line before returning it - rstrip_ws [default: true] - strip trailing whitespace (including line terminator!) from - each line before returning it - skip_blanks [default: true} - skip lines that are empty *after* stripping comments and - whitespace. (If both lstrip_ws and rstrip_ws are false, - then some lines may consist of solely whitespace: these will - *not* be skipped, even if 'skip_blanks' is true.) - join_lines [default: false] - if a backslash is the last non-newline character on a line - after stripping comments and whitespace, join the following line - to it to form one "logical line"; if N consecutive lines end - with a backslash, then N+1 physical lines will be joined to - form one logical line. - collapse_join [default: false] - strip leading whitespace from lines that are joined to their - predecessor; only matters if (join_lines and not lstrip_ws) - errors [default: 'strict'] - error handler used to decode the file content - - Note that since 'rstrip_ws' can strip the trailing newline, the - semantics of 'readline()' must differ from those of the builtin file - object's 'readline()' method! In particular, 'readline()' returns - None for end-of-file: an empty string might just be a blank line (or - an all-whitespace line), if 'rstrip_ws' is true but 'skip_blanks' is - not. - """ def __init__( self, filename: str | None = None, @@ -79,60 +12,10 @@ class TextFile: skip_blanks: bool | Literal[0, 1] = ..., join_lines: bool | Literal[0, 1] = ..., collapse_join: bool | Literal[0, 1] = ..., - ) -> None: - """ - Construct a new TextFile object. At least one of 'filename' - (a string) and 'file' (a file-like object) must be supplied. - They keyword argument options are described above and affect - the values returned by 'readline()'. - """ - ... - def open(self, filename: str) -> None: - """ - Open a new file named 'filename'. This overrides both the - 'filename' and 'file' arguments to the constructor. - """ - ... - def close(self) -> None: - """ - Close the current file and forget everything we know about it - (filename, current line number). - """ - ... - def warn(self, msg: str, line: list[int] | tuple[int, int] | int | None = None) -> None: - """ - Print (to stderr) a warning message tied to the current logical - line in the current file. If the current logical line in the - file spans multiple physical lines, the warning refers to the - whole range, eg. "lines 3-5". If 'line' supplied, it overrides - the current line number; it may be a list or tuple to indicate a - range of physical lines, or an integer for a single physical - line. - """ - ... - def readline(self) -> str | None: - """ - Read and return a single logical line from the current file (or - from an internal buffer if lines have previously been "unread" - with 'unreadline()'). If the 'join_lines' option is true, this - may involve reading multiple physical lines concatenated into a - single string. Updates the current line number, so calling - 'warn()' after 'readline()' emits a warning about the physical - line(s) just read. Returns None on end-of-file, since the empty - string can occur if 'rstrip_ws' is true but 'strip_blanks' is - not. - """ - ... - def readlines(self) -> list[str]: - """ - Read and return the list of all logical lines remaining in the - current file. - """ - ... - def unreadline(self, line: str) -> str: - """ - Push 'line' (a string) onto an internal buffer that will be - checked by future 'readline()' calls. Handy for implementing - a parser with line-at-a-time lookahead. - """ - ... + ) -> None: ... + def open(self, filename: str) -> None: ... + def close(self) -> None: ... + def warn(self, msg: str, line: list[int] | tuple[int, int] | int | None = None) -> None: ... + def readline(self) -> str | None: ... + def readlines(self) -> list[str]: ... + def unreadline(self, line: str) -> str: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/unixccompiler.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/unixccompiler.pyi index 6c200ce..e1d4434 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/unixccompiler.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/unixccompiler.pyi @@ -1,19 +1,3 @@ -""" -distutils.unixccompiler - -Contains the UnixCCompiler class, a subclass of CCompiler that handles -the "typical" Unix-style command-line C compiler: - * macros defined with -Dname[=value] - * macros undefined with -Uname - * include search directories specified with -Idir - * libraries specified with -lllib - * library search directories specified with -Ldir - * compile handled by 'cc' (or similar) executable with -c option: - compiles .c to .o - * link static library handled by 'ar' command (possibly with 'ranlib') - * link shared library handled by 'cc -shared' -""" - from distutils.ccompiler import CCompiler class UnixCCompiler(CCompiler): ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/util.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/util.pyi index ccb4e7c..0e1bb41 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/util.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/util.pyi @@ -1,114 +1,25 @@ -""" -distutils.util - -Miscellaneous utility functions -- anything that doesn't fit into -one of the other *util.py modules. -""" - from _typeshed import StrPath, Unused from collections.abc import Callable, Container, Iterable, Mapping from typing import Any, Literal +from typing_extensions import TypeVarTuple, Unpack -def get_host_platform() -> str: - """ - Return a string that identifies the current platform. This is used mainly to - distinguish platform-specific build directories and platform-specific built - distributions. Typically includes the OS name and version and the - architecture (as supplied by 'os.uname()'), although the exact information - included depends on the OS; eg. on Linux, the kernel version isn't - particularly important. - - Examples of returned values: - linux-i586 - linux-alpha (?) - solaris-2.6-sun4u +_Ts = TypeVarTuple("_Ts") - Windows will return one of: - win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc) - win32 (all others - specifically, sys.platform is returned) - - For other non-POSIX platforms, currently just returns 'sys.platform'. - """ - ... +def get_host_platform() -> str: ... def get_platform() -> str: ... -def convert_path(pathname: str) -> str: - """ - Return 'pathname' as a name that will work on the native filesystem, - i.e. split it on '/' and put it back together again using the current - directory separator. Needed because filenames in the setup script are - always supplied in Unix style, and have to be converted to the local - convention before we can actually use them in the filesystem. Raises - ValueError on non-Unix-ish systems if 'pathname' either starts or - ends with a slash. - """ - ... -def change_root(new_root: StrPath, pathname: StrPath) -> str: - """ - Return 'pathname' with 'new_root' prepended. If 'pathname' is - relative, this is equivalent to "os.path.join(new_root,pathname)". - Otherwise, it requires making 'pathname' relative and then joining the - two, which is tricky on DOS/Windows and Mac OS. - """ - ... -def check_environ() -> None: - """ - Ensure that 'os.environ' has all the environment variables we - guarantee that users can use in config files, command-line options, - etc. Currently this includes: - HOME - user's home directory (Unix only) - PLAT - description of the current platform, including hardware - and OS (see 'get_platform()') - """ - ... -def subst_vars(s: str, local_vars: Mapping[str, str]) -> None: - """ - Perform shell/Perl-style variable substitution on 'string'. Every - occurrence of '$' followed by a name is considered a variable, and - variable is substituted by the value found in the 'local_vars' - dictionary, or in 'os.environ' if it's not in 'local_vars'. - 'os.environ' is first checked/augmented to guarantee that it contains - certain values: see 'check_environ()'. Raise ValueError for any - variables not found in either 'local_vars' or 'os.environ'. - """ - ... -def split_quoted(s: str) -> list[str]: - """ - Split a string up according to Unix shell-like rules for quotes and - backslashes. In short: words are delimited by spaces, as long as those - spaces are not escaped by a backslash, or inside a quoted string. - Single and double quotes are equivalent, and the quote characters can - be backslash-escaped. The backslash is stripped from any two-character - escape sequence, leaving only the escaped character. The quote - characters are stripped from any quoted string. Returns a list of - words. - """ - ... +def convert_path(pathname: str) -> str: ... +def change_root(new_root: StrPath, pathname: StrPath) -> str: ... +def check_environ() -> None: ... +def subst_vars(s: str, local_vars: Mapping[str, str]) -> None: ... +def split_quoted(s: str) -> list[str]: ... def execute( - func: Callable[..., object], - args: tuple[Any, ...], + func: Callable[[Unpack[_Ts]], Unused], + args: tuple[Unpack[_Ts]], msg: str | None = None, verbose: bool | Literal[0, 1] = 0, dry_run: bool | Literal[0, 1] = 0, -) -> None: - """ - Perform some action that affects the outside world (eg. by - writing to the filesystem). Such actions are special because they - are disabled by the 'dry_run' flag. This method takes care of all - that bureaucracy for you; all you have to do is supply the - function to call and an argument tuple for it (to embody the - "external action" being performed), and an optional message to - print. - """ - ... -def strtobool(val: str) -> Literal[0, 1]: - """ - Convert a string representation of truth to true (1) or false (0). - - True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values - are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if - 'val' is anything else. - """ - ... +) -> None: ... +def strtobool(val: str) -> Literal[0, 1]: ... def byte_compile( py_files: list[str], optimize: int = 0, @@ -118,57 +29,14 @@ def byte_compile( verbose: bool | Literal[0, 1] = 1, dry_run: bool | Literal[0, 1] = 0, direct: bool | None = None, -) -> None: - """ - Byte-compile a collection of Python source files to .pyc - files in a __pycache__ subdirectory. 'py_files' is a list - of files to compile; any files that don't end in ".py" are silently - skipped. 'optimize' must be one of the following: - 0 - don't optimize - 1 - normal optimization (like "python -O") - 2 - extra optimization (like "python -OO") - If 'force' is true, all files are recompiled regardless of - timestamps. - - The source filename encoded in each bytecode file defaults to the - filenames listed in 'py_files'; you can modify these with 'prefix' and - 'basedir'. 'prefix' is a string that will be stripped off of each - source filename, and 'base_dir' is a directory name that will be - prepended (after 'prefix' is stripped). You can supply either or both - (or neither) of 'prefix' and 'base_dir', as you wish. - - If 'dry_run' is true, doesn't actually do anything that would - affect the filesystem. - - Byte-compilation is either done directly in this interpreter process - with the standard py_compile module, or indirectly by writing a - temporary script and executing it. Normally, you should let - 'byte_compile()' figure out to use direct compilation or not (see - the source for details). The 'direct' flag is used by the script - generated in indirect mode; unless you know what you're doing, leave - it set to None. - """ - ... -def rfc822_escape(header: str) -> str: - """ - Return a version of the string escaped for inclusion in an - RFC-822 header, by ensuring there are 8 spaces space after each newline. - """ - ... +) -> None: ... +def rfc822_escape(header: str) -> str: ... def run_2to3( files: Iterable[str], fixer_names: Iterable[str] | None = None, options: Mapping[str, Any] | None = None, explicit: Unused = None, -) -> None: - """ - Invoke 2to3 on a list of Python files. - The files should all come from the build area, as the - modification is done in-place. To reduce the build time, - only files modified since the last invocation of this - function should be passed in the files argument. - """ - ... +) -> None: ... def copydir_run_2to3( src: StrPath, dest: StrPath, @@ -176,22 +44,9 @@ def copydir_run_2to3( fixer_names: Iterable[str] | None = None, options: Mapping[str, Any] | None = None, explicit: Container[str] | None = None, -) -> list[str]: - """ - Recursively copy a directory, only copying new and changed files, - running run_2to3 over all newly copied Python modules afterward. - - If you give a template string, it's parsed like a MANIFEST.in. - """ - ... +) -> list[str]: ... class Mixin2to3: - """ - Mixin class for commands that run 2to3. - To configure 2to3, setup scripts may either change - the class variables, or inherit from individual commands - to override how 2to3 is invoked. - """ fixer_names: Iterable[str] | None options: Mapping[str, Any] | None explicit: Container[str] | None diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/version.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/version.pyi index 4a1ba66..47da65e 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/version.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/distutils/version.pyi @@ -1,34 +1,8 @@ -""" -Provides classes to represent module version numbers (one class for -each style of version numbering). There are currently two such classes -implemented: StrictVersion and LooseVersion. - -Every version number class implements the following interface: - * the 'parse' method takes a string and parses it to some internal - representation; if the string is an invalid version number, - 'parse' raises a ValueError exception - * the class constructor takes an optional string argument which, - if supplied, is passed to 'parse' - * __str__ reconstructs the string that was passed to 'parse' (or - an equivalent string -- ie. one that will generate an equivalent - version number instance) - * __repr__ generates Python code to recreate the version number instance - * _cmp compares the current instance with either another instance - of the same class or a string (which will be parsed to an instance - of the same class, thus must follow the same rules) -""" - from abc import abstractmethod from re import Pattern from typing_extensions import Self class Version: - """ - Abstract base class for version numbering classes. Just provides - constructor (__init__) and reproducer (__repr__), because those - seem to be the same for all version numbering classes; and route - rich comparisons to _cmp. - """ def __eq__(self, other: object) -> bool: ... def __lt__(self, other: Self | str) -> bool: ... def __le__(self, other: Self | str) -> bool: ... @@ -39,48 +13,11 @@ class Version: @abstractmethod def parse(self, vstring: str) -> Self: ... @abstractmethod - def __str__(self) -> str: - """Return str(self).""" - ... + def __str__(self) -> str: ... @abstractmethod def _cmp(self, other: Self | str) -> bool: ... class StrictVersion(Version): - """ - Version numbering for anal retentives and software idealists. - Implements the standard interface for version number classes as - described above. A version number consists of two or three - dot-separated numeric components, with an optional "pre-release" tag - on the end. The pre-release tag consists of the letter 'a' or 'b' - followed by a number. If the numeric components of two version - numbers are equal, then one with a pre-release tag will always - be deemed earlier (lesser) than one without. - - The following are valid version numbers (shown in the order that - would be obtained by sorting according to the supplied cmp function): - - 0.4 0.4.0 (these two are equivalent) - 0.4.1 - 0.5a1 - 0.5b3 - 0.5 - 0.9.6 - 1.0 - 1.0.4a3 - 1.0.4b1 - 1.0.4 - - The following are examples of invalid version numbers: - - 1 - 2.7.2.2 - 1.3.a4 - 1.3pl1 - 1.3c4 - - The rationale for this version numbering system will be explained - in the distutils documentation. - """ version_re: Pattern[str] version: tuple[int, int, int] prerelease: tuple[str, int] | None @@ -90,37 +27,6 @@ class StrictVersion(Version): def _cmp(self, other: Self | str) -> bool: ... class LooseVersion(Version): - """ - Version numbering for anarchists and software realists. - Implements the standard interface for version number classes as - described above. A version number consists of a series of numbers, - separated by either periods or strings of letters. When comparing - version numbers, the numeric components will be compared - numerically, and the alphabetic components lexically. The following - are all valid version numbers, in no particular order: - - 1.5.1 - 1.5.2b2 - 161 - 3.10a - 8.02 - 3.4j - 1996.07.12 - 3.2.pl0 - 3.1.1.6 - 2g6 - 11g - 0.960923 - 2.2beta29 - 1.13++ - 5.5.kw - 2.0b1pl0 - - In fact, there is no such thing as an invalid version number under - this scheme; the rules for comparison are simple and predictable, - but may not always give the results you want (for some definition - of "want"). - """ component_re: Pattern[str] vstring: str version: tuple[str | int, ...] diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/doctest.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/doctest.pyi index d6befb7..81cb7b7 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/doctest.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/doctest.pyi @@ -38,12 +38,13 @@ of doctest's default behaviors. See the Library Reference Manual for details. """ +import sys import types import unittest from _typeshed import ExcInfo from collections.abc import Callable -from typing import Any, NamedTuple -from typing_extensions import TypeAlias +from typing import Any, ClassVar, NamedTuple +from typing_extensions import Self, TypeAlias __all__ = [ "register_optionflag", @@ -81,10 +82,22 @@ __all__ = [ "debug", ] -class TestResults(NamedTuple): - """TestResults(failed, attempted)""" - failed: int - attempted: int +# MyPy errors on conditionals within named tuples. + +if sys.version_info >= (3, 13): + class TestResults(NamedTuple): + def __new__(cls, failed: int, attempted: int, *, skipped: int = 0) -> Self: ... # type: ignore[misc] + skipped: int + failed: int + attempted: int + _fields: ClassVar = ("failed", "attempted") # type: ignore[misc] + __match_args__ = ("failed", "attempted") # type: ignore[misc] + __doc__: None # type: ignore[misc] + +else: + class TestResults(NamedTuple): + failed: int + attempted: int OPTIONFLAGS_BY_NAME: dict[str, int] @@ -321,8 +334,10 @@ class DocTestRunner: """ A class used to run DocTest test cases, and accumulate statistics. The `run` method is used to process a single DocTest case. It - returns a tuple `(f, t)`, where `t` is the number of test cases - tried, and `f` is the number of test cases that failed. + returns a TestResults instance. + + >>> save_colorize = _colorize.COLORIZE + >>> _colorize.COLORIZE = False >>> tests = DocTestFinder().find(_TestClass) >>> runner = DocTestRunner(verbose=False) @@ -335,27 +350,29 @@ class DocTestRunner: _TestClass.square -> TestResults(failed=0, attempted=1) The `summarize` method prints a summary of all the test cases that - have been run by the runner, and returns an aggregated `(f, t)` - tuple: + have been run by the runner, and returns an aggregated TestResults + instance: >>> runner.summarize(verbose=1) 4 items passed all tests: 2 tests in _TestClass 2 tests in _TestClass.__init__ 2 tests in _TestClass.get - 1 tests in _TestClass.square + 1 test in _TestClass.square 7 tests in 4 items. - 7 passed and 0 failed. + 7 passed. Test passed. TestResults(failed=0, attempted=7) - The aggregated number of tried examples and failed examples is - also available via the `tries` and `failures` attributes: + The aggregated number of tried examples and failed examples is also + available via the `tries`, `failures` and `skips` attributes: >>> runner.tries 7 >>> runner.failures 0 + >>> runner.skips + 0 The comparison between expected outputs and actual outputs is done by an `OutputChecker`. This comparison may be customized with a @@ -372,12 +389,16 @@ class DocTestRunner: can be also customized by subclassing DocTestRunner, and overriding the methods `report_start`, `report_success`, `report_unexpected_exception`, and `report_failure`. + + >>> _colorize.COLORIZE = save_colorize """ DIVIDER: str optionflags: int original_optionflags: int tries: int failures: int + if sys.version_info >= (3, 13): + skips: int test: DocTest def __init__(self, checker: OutputChecker | None = None, verbose: bool | None = None, optionflags: int = 0) -> None: """ @@ -441,9 +462,7 @@ class DocTestRunner: def summarize(self, verbose: bool | None = None) -> TestResults: """ Print a summary of all the test cases that have been run by - this DocTestRunner, and return a tuple `(f, t)`, where `f` is - the total number of failed examples, and `t` is the total - number of tried examples. + this DocTestRunner, and return a TestResults instance. The optional `verbose` argument controls how detailed the summary is. If the verbosity is not specified, then the @@ -454,7 +473,7 @@ class DocTestRunner: class OutputChecker: """ - A class used to check the whether the actual output from a doctest + A class used to check whether the actual output from a doctest example matches the expected output. `OutputChecker` defines two methods: `check_output`, which compares a given pair of outputs, and returns true if they match; and `output_difference`, which diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/email/_header_value_parser.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/email/_header_value_parser.pyi index 870834c..1deaafd 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/email/_header_value_parser.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/email/_header_value_parser.pyi @@ -86,6 +86,10 @@ TOKEN_ENDS: Final[set[str]] ASPECIALS: Final[set[str]] ATTRIBUTE_ENDS: Final[set[str]] EXTENDED_ATTRIBUTE_ENDS: Final[set[str]] +# Added in Python 3.8.20, 3.9.20, 3.10.15, 3.11.10, 3.12.5 +NLSET: Final[set[str]] +# Added in Python 3.8.20, 3.9.20, 3.10.15, 3.11.10, 3.12.5 +SPECIALSNL: Final[set[str]] def quote_string(value: Any) -> str: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/email/_policybase.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/email/_policybase.pyi index 8bb607a..742c23c 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/email/_policybase.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/email/_policybase.pyi @@ -5,14 +5,22 @@ Allows fine grained feature control of how the package parses and emits data. """ from abc import ABCMeta, abstractmethod -from collections.abc import Callable from email.errors import MessageDefect from email.header import Header from email.message import Message -from typing import Any +from typing import Generic, Protocol, TypeVar, type_check_only from typing_extensions import Self -class _PolicyBase: +_MessageT = TypeVar("_MessageT", bound=Message, default=Message) + +@type_check_only +class _MessageFactory(Protocol[_MessageT]): + def __call__(self, policy: Policy[_MessageT]) -> _MessageT: ... + +# Policy below is the only known direct subclass of _PolicyBase. We therefore +# assume that the __init__ arguments and attributes of _PolicyBase are +# the same as those of Policy. +class _PolicyBase(Generic[_MessageT]): """ Policy Object basic framework. @@ -33,14 +41,45 @@ class _PolicyBase: if and only if the repr of the values can be used to reconstruct those values. """ - def __add__(self, other: Any) -> Self: + max_line_length: int | None + linesep: str + cte_type: str + raise_on_defect: bool + mangle_from_: bool + message_factory: _MessageFactory[_MessageT] | None + # Added in Python 3.8.20, 3.9.20, 3.10.15, 3.11.10, 3.12.5 + verify_generated_headers: bool + + def __init__( + self, + *, + max_line_length: int | None = 78, + linesep: str = "\n", + cte_type: str = "8bit", + raise_on_defect: bool = False, + mangle_from_: bool = ..., # default depends on sub-class + message_factory: _MessageFactory[_MessageT] | None = None, + # Added in Python 3.8.20, 3.9.20, 3.10.15, 3.11.10, 3.12.5 + verify_generated_headers: bool = True, + ) -> None: """ - Non-default values from right operand override those from left. + Create new Policy, possibly overriding some defaults. - The object returned is a new instance of the subclass. + See class docstring for a list of overridable attributes. """ ... - def clone(self, **kw: Any) -> Self: + def clone( + self, + *, + max_line_length: int | None = ..., + linesep: str = ..., + cte_type: str = ..., + raise_on_defect: bool = ..., + mangle_from_: bool = ..., + message_factory: _MessageFactory[_MessageT] | None = ..., + # Added in Python 3.8.20, 3.9.20, 3.10.15, 3.11.10, 3.12.5 + verify_generated_headers: bool = ..., + ) -> Self: """ Return a new instance with specified attributes changed. @@ -48,8 +87,15 @@ class _PolicyBase: except for the changes passed in as keyword arguments. """ ... + def __add__(self, other: Policy) -> Self: + """ + Non-default values from right operand override those from left. + + The object returned is a new instance of the subclass. + """ + ... -class Policy(_PolicyBase, metaclass=ABCMeta): +class Policy(_PolicyBase[_MessageT], metaclass=ABCMeta): r""" Controls for how messages are interpreted and formatted. @@ -96,30 +142,16 @@ class Policy(_PolicyBase, metaclass=ABCMeta): message_factory -- the class to use to create new message objects. If the value is None, the default is Message. - """ - max_line_length: int | None - linesep: str - cte_type: str - raise_on_defect: bool - mangle_from_: bool - message_factory: Callable[[Policy], Message] | None - def __init__( - self, - *, - max_line_length: int | None = 78, - linesep: str = "\n", - cte_type: str = "8bit", - raise_on_defect: bool = False, - mangle_from_: bool = False, - message_factory: Callable[[Policy], Message] | None = None, - ) -> None: - """ - Create new Policy, possibly overriding some defaults. - See class docstring for a list of overridable attributes. - """ - ... - def handle_defect(self, obj: Message, defect: MessageDefect) -> None: + verify_generated_headers + -- if true, the generator verifies that each header + they are properly folded, so that a parser won't + treat it as multiple headers, start-of-body, or + part of another header. + This is a check against custom Header & fold() + implementations. + """ + def handle_defect(self, obj: _MessageT, defect: MessageDefect) -> None: """ Based on policy, either raise defect or call register_defect. @@ -135,7 +167,7 @@ class Policy(_PolicyBase, metaclass=ABCMeta): The email package parsers always call it with Defect instances. """ ... - def register_defect(self, obj: Message, defect: MessageDefect) -> None: + def register_defect(self, obj: _MessageT, defect: MessageDefect) -> None: """ Record 'defect' on 'obj'. @@ -214,7 +246,7 @@ class Policy(_PolicyBase, metaclass=ABCMeta): """ ... -class Compat32(Policy): +class Compat32(Policy[_MessageT]): r""" Controls for how messages are interpreted and formatted. @@ -262,6 +294,13 @@ class Compat32(Policy): message_factory -- the class to use to create new message objects. If the value is None, the default is Message. + verify_generated_headers + -- if true, the generator verifies that each header + they are properly folded, so that a parser won't + treat it as multiple headers, start-of-body, or + part of another header. + This is a check against custom Header & fold() + implementations. This particular policy is the backward compatibility Policy. It replicates the behavior of the email package version 5.1. """ @@ -327,4 +366,4 @@ class Compat32(Policy): """ ... -compat32: Compat32 +compat32: Compat32[Message] diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/email/charset.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/email/charset.pyi index 6a66ccc..b17da68 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/email/charset.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/email/charset.pyi @@ -1,12 +1,12 @@ from collections.abc import Callable, Iterator from email.message import Message -from typing import overload +from typing import Final, overload __all__ = ["Charset", "add_alias", "add_charset", "add_codec"] -QP: int # undocumented -BASE64: int # undocumented -SHORTEST: int # undocumented +QP: Final[int] # undocumented +BASE64: Final[int] # undocumented +SHORTEST: Final[int] # undocumented class Charset: """ diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/email/errors.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/email/errors.pyi index c16afeb..8f37942 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/email/errors.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/email/errors.pyi @@ -21,6 +21,11 @@ class CharsetError(MessageError): """An illegal charset was given.""" ... +# Added in Python 3.8.20, 3.9.20, 3.10.15, 3.11.10, 3.12.5 +class HeaderWriteError(MessageError): + """Error while writing headers.""" + ... + class MessageDefect(ValueError): """Base class for a message defect.""" def __init__(self, line: str | None = None) -> None: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/email/feedparser.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/email/feedparser.pyi index fd718db..fd614d9 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/email/feedparser.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/email/feedparser.pyi @@ -23,12 +23,12 @@ from typing import Generic, TypeVar, overload __all__ = ["FeedParser", "BytesFeedParser"] -_MessageT = TypeVar("_MessageT", bound=Message) +_MessageT = TypeVar("_MessageT", bound=Message, default=Message) class FeedParser(Generic[_MessageT]): """A feed-style parser of email.""" @overload - def __init__(self: FeedParser[Message], _factory: None = None, *, policy: Policy = ...) -> None: + def __init__(self: FeedParser[Message], _factory: None = None, *, policy: Policy[Message] = ...) -> None: """ _factory is called with no arguments to create a new message obj @@ -38,7 +38,7 @@ class FeedParser(Generic[_MessageT]): """ ... @overload - def __init__(self, _factory: Callable[[], _MessageT], *, policy: Policy = ...) -> None: + def __init__(self, _factory: Callable[[], _MessageT], *, policy: Policy[_MessageT] = ...) -> None: """ _factory is called with no arguments to create a new message obj @@ -57,7 +57,7 @@ class FeedParser(Generic[_MessageT]): class BytesFeedParser(FeedParser[_MessageT]): """Like FeedParser, but feed accepts bytes.""" @overload - def __init__(self: BytesFeedParser[Message], _factory: None = None, *, policy: Policy = ...) -> None: + def __init__(self: BytesFeedParser[Message], _factory: None = None, *, policy: Policy[Message] = ...) -> None: """ _factory is called with no arguments to create a new message obj @@ -67,7 +67,7 @@ class BytesFeedParser(FeedParser[_MessageT]): """ ... @overload - def __init__(self, _factory: Callable[[], _MessageT], *, policy: Policy = ...) -> None: + def __init__(self, _factory: Callable[[], _MessageT], *, policy: Policy[_MessageT] = ...) -> None: """ _factory is called with no arguments to create a new message obj diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/email/generator.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/email/generator.pyi index c7c80ff..0c01c54 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/email/generator.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/email/generator.pyi @@ -3,28 +3,63 @@ from _typeshed import SupportsWrite from email.message import Message from email.policy import Policy +from typing import Any, Generic, TypeVar, overload from typing_extensions import Self __all__ = ["Generator", "DecodedGenerator", "BytesGenerator"] -class Generator: +# By default, generators do not have a message policy. +_MessageT = TypeVar("_MessageT", bound=Message, default=Any) + +class Generator(Generic[_MessageT]): """ Generates output from a Message object tree. This basic generator writes the message to the given file object as plain text. """ - def clone(self, fp: SupportsWrite[str]) -> Self: - """Clone this generator with the exact same options.""" + maxheaderlen: int | None + policy: Policy[_MessageT] | None + @overload + def __init__( + self: Generator[Any], # The Policy of the message is used. + outfp: SupportsWrite[str], + mangle_from_: bool | None = None, + maxheaderlen: int | None = None, + *, + policy: None = None, + ) -> None: + """ + Create the generator for message flattening. + + outfp is the output file-like object for writing the message to. It + must have a write() method. + + Optional mangle_from_ is a flag that, when True (the default if policy + is not set), escapes From_ lines in the body of the message by putting + a `>' in front of them. + + Optional maxheaderlen specifies the longest length for a non-continued + header. When a header line is longer (in characters, with tabs + expanded to 8 spaces) than maxheaderlen, the header will split as + defined in the Header class. Set maxheaderlen to zero to disable + header wrapping. The default is 78, as recommended (but not required) + by RFC 2822. + + The policy keyword specifies a policy object that controls a number of + aspects of the generator's operation. If no policy is specified, + the policy associated with the Message object passed to the + flatten method is used. + """ ... - def write(self, s: str) -> None: ... + @overload def __init__( self, outfp: SupportsWrite[str], mangle_from_: bool | None = None, maxheaderlen: int | None = None, *, - policy: Policy | None = None, + policy: Policy[_MessageT], ) -> None: """ Create the generator for message flattening. @@ -49,7 +84,8 @@ class Generator: flatten method is used. """ ... - def flatten(self, msg: Message, unixfrom: bool = False, linesep: str | None = None) -> None: + def write(self, s: str) -> None: ... + def flatten(self, msg: _MessageT, unixfrom: bool = False, linesep: str | None = None) -> None: """ Print the message object tree rooted at msg to the output file specified when the Generator instance was created. @@ -67,8 +103,11 @@ class Generator: from the policy associated with the msg. """ ... + def clone(self, fp: SupportsWrite[str]) -> Self: + """Clone this generator with the exact same options.""" + ... -class BytesGenerator(Generator): +class BytesGenerator(Generator[_MessageT]): """ Generates a bytes version of a Message object tree. @@ -81,13 +120,46 @@ class BytesGenerator(Generator): The outfp object must accept bytes in its write method. """ + @overload + def __init__( + self: BytesGenerator[Any], # The Policy of the message is used. + outfp: SupportsWrite[bytes], + mangle_from_: bool | None = None, + maxheaderlen: int | None = None, + *, + policy: None = None, + ) -> None: + """ + Create the generator for message flattening. + + outfp is the output file-like object for writing the message to. It + must have a write() method. + + Optional mangle_from_ is a flag that, when True (the default if policy + is not set), escapes From_ lines in the body of the message by putting + a `>' in front of them. + + Optional maxheaderlen specifies the longest length for a non-continued + header. When a header line is longer (in characters, with tabs + expanded to 8 spaces) than maxheaderlen, the header will split as + defined in the Header class. Set maxheaderlen to zero to disable + header wrapping. The default is 78, as recommended (but not required) + by RFC 2822. + + The policy keyword specifies a policy object that controls a number of + aspects of the generator's operation. If no policy is specified, + the policy associated with the Message object passed to the + flatten method is used. + """ + ... + @overload def __init__( self, outfp: SupportsWrite[bytes], mangle_from_: bool | None = None, maxheaderlen: int | None = None, *, - policy: Policy | None = None, + policy: Policy[_MessageT], ) -> None: """ Create the generator for message flattening. @@ -113,13 +185,47 @@ class BytesGenerator(Generator): """ ... -class DecodedGenerator(Generator): +class DecodedGenerator(Generator[_MessageT]): """ Generates a text representation of a message. Like the Generator base class, except that non-text parts are substituted with a format string representing the part. """ + @overload + def __init__( + self: DecodedGenerator[Any], # The Policy of the message is used. + outfp: SupportsWrite[str], + mangle_from_: bool | None = None, + maxheaderlen: int | None = None, + fmt: str | None = None, + *, + policy: None = None, + ) -> None: + """ + Like Generator.__init__() except that an additional optional + argument is allowed. + + Walks through all subparts of a message. If the subpart is of main + type `text', then it prints the decoded payload of the subpart. + + Otherwise, fmt is a format string that is used instead of the message + payload. fmt is expanded with the following keywords (in + %(keyword)s format): + + type : Full MIME type of the non-text part + maintype : Main MIME type of the non-text part + subtype : Sub-MIME type of the non-text part + filename : Filename of the non-text part + description: Description associated with the non-text part + encoding : Content transfer encoding of the non-text part + + The default value for fmt is None, meaning + + [Non-text (%(type)s) part of message omitted, filename %(filename)s] + """ + ... + @overload def __init__( self, outfp: SupportsWrite[str], @@ -127,7 +233,7 @@ class DecodedGenerator(Generator): maxheaderlen: int | None = None, fmt: str | None = None, *, - policy: Policy | None = None, + policy: Policy[_MessageT], ) -> None: """ Like Generator.__init__() except that an additional optional diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/email/message.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/email/message.pyi index 0b5e360..20c7a68 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/email/message.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/email/message.pyi @@ -47,10 +47,13 @@ class Message(Generic[_HeaderT, _HeaderParamT]): you must use the explicit API to set or get all the headers. Not all of the mapping methods are implemented. """ - policy: Policy # undocumented + # The policy attributes and arguments in this class and its subclasses + # would ideally use Policy[Self], but this is not possible. + policy: Policy[Any] # undocumented preamble: str | None epilogue: str | None defects: list[MessageDefect] + def __init__(self, policy: Policy[Any] = ...) -> None: ... def is_multipart(self) -> bool: """Return True if the message consists of multiple parts.""" ... @@ -182,7 +185,8 @@ class Message(Generic[_HeaderT, _HeaderParamT]): is returned. """ ... - # If `charset=None` and payload supports both `encode` AND `decode`, then an invalid payload could be passed, but this is unlikely + # If `charset=None` and payload supports both `encode` AND `decode`, + # then an invalid payload could be passed, but this is unlikely # Not[_SupportsEncodeToPayload] @overload def set_payload( @@ -656,7 +660,7 @@ class Message(Generic[_HeaderT, _HeaderParamT]): according to the rfc2183. """ ... - def as_string(self, unixfrom: bool = False, maxheaderlen: int = 0, policy: Policy | None = None) -> str: + def as_string(self, unixfrom: bool = False, maxheaderlen: int = 0, policy: Policy[Any] | None = None) -> str: """ Return the entire formatted message as a string. @@ -672,7 +676,7 @@ class Message(Generic[_HeaderT, _HeaderParamT]): unicode "unknown character" code points. """ ... - def as_bytes(self, unixfrom: bool = False, policy: Policy | None = None) -> bytes: + def as_bytes(self, unixfrom: bool = False, policy: Policy[Any] | None = None) -> bytes: """ Return the entire formatted message as a bytes object. @@ -716,7 +720,6 @@ class Message(Generic[_HeaderT, _HeaderParamT]): to the empty string. Both charset and language should be strings. """ ... - def __init__(self, policy: Policy = ...) -> None: ... # The following two methods are undocumented, but a source code comment states that they are public API def set_raw(self, name: str, value: _HeaderParamT) -> None: """ @@ -734,7 +737,7 @@ class Message(Generic[_HeaderT, _HeaderParamT]): ... class MIMEPart(Message[_HeaderRegistryT, _HeaderRegistryParamT]): - def __init__(self, policy: Policy | None = None) -> None: ... + def __init__(self, policy: Policy[Any] | None = None) -> None: ... def get_body(self, preferencelist: Sequence[str] = ("related", "html", "plain")) -> MIMEPart[_HeaderRegistryT] | None: """ Return best candidate mime part for display as 'body' of message. @@ -747,7 +750,19 @@ class MIMEPart(Message[_HeaderRegistryT, _HeaderRegistryParamT]): match. Ignore parts with 'Content-Disposition: attachment'. """ ... - def iter_attachments(self) -> Iterator[MIMEPart[_HeaderRegistryT]]: + def attach(self, payload: Self) -> None: + """ + Add the given payload to the current payload. + + The current payload will always be a list of objects after this method + is called. If you want to set the payload to a scalar object, use + set_payload() instead. + """ + ... + # The attachments are created via type(self) in the attach method. It's theoretically + # possible to sneak other attachment types into a MIMEPart instance, but could cause + # cause unforseen consequences. + def iter_attachments(self) -> Iterator[Self]: """ Return an iterator over the non-main parts of a multipart. @@ -777,7 +792,7 @@ class MIMEPart(Message[_HeaderRegistryT, _HeaderRegistryParamT]): def add_attachment(self, *args: Any, content_manager: ContentManager | None = ..., **kw: Any) -> None: ... def clear(self) -> None: ... def clear_content(self) -> None: ... - def as_string(self, unixfrom: bool = False, maxheaderlen: int | None = None, policy: Policy | None = None) -> str: + def as_string(self, unixfrom: bool = False, maxheaderlen: int | None = None, policy: Policy[Any] | None = None) -> str: """ Return the entire formatted message as a string. diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/email/parser.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/email/parser.pyi index 0a23d7a..760c391 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/email/parser.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/email/parser.pyi @@ -14,7 +14,7 @@ _MessageT = TypeVar("_MessageT", bound=Message, default=Message) class Parser(Generic[_MessageT]): @overload - def __init__(self: Parser[Message[str, str]], _class: None = None, *, policy: Policy = ...) -> None: + def __init__(self: Parser[Message[str, str]], _class: None = None, *, policy: Policy[Message[str, str]] = ...) -> None: """ Parser of RFC 2822 and MIME email messages. @@ -37,7 +37,7 @@ class Parser(Generic[_MessageT]): """ ... @overload - def __init__(self, _class: Callable[[], _MessageT], *, policy: Policy = ...) -> None: + def __init__(self, _class: Callable[[], _MessageT], *, policy: Policy[_MessageT] = ...) -> None: """ Parser of RFC 2822 and MIME email messages. @@ -87,7 +87,9 @@ class HeaderParser(Parser[_MessageT]): class BytesParser(Generic[_MessageT]): parser: Parser[_MessageT] @overload - def __init__(self: BytesParser[Message[str, str]], _class: None = None, *, policy: Policy = ...) -> None: + def __init__( + self: BytesParser[Message[str, str]], _class: None = None, *, policy: Policy[Message[str, str]] = ... + ) -> None: """ Parser of binary RFC 2822 and MIME email messages. @@ -106,7 +108,7 @@ class BytesParser(Generic[_MessageT]): """ ... @overload - def __init__(self, _class: Callable[[], _MessageT], *, policy: Policy = ...) -> None: + def __init__(self, _class: Callable[[], _MessageT], *, policy: Policy[_MessageT] = ...) -> None: """ Parser of binary RFC 2822 and MIME email messages. diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/email/policy.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/email/policy.pyi index 3ede4c0..8b02d30 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/email/policy.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/email/policy.pyi @@ -4,14 +4,16 @@ code that adds all the email6 features. """ from collections.abc import Callable -from email._policybase import Compat32 as Compat32, Policy as Policy, compat32 as compat32 +from email._policybase import Compat32 as Compat32, Policy as Policy, _MessageFactory, compat32 as compat32 from email.contentmanager import ContentManager -from email.message import Message -from typing import Any +from email.message import EmailMessage, Message +from typing import Any, TypeVar, overload __all__ = ["Compat32", "compat32", "Policy", "EmailPolicy", "default", "strict", "SMTP", "HTTP"] -class EmailPolicy(Policy): +_MessageT = TypeVar("_MessageT", bound=Message, default=Message) + +class EmailPolicy(Policy[_MessageT]): r""" Controls for how messages are interpreted and formatted. @@ -59,6 +61,13 @@ class EmailPolicy(Policy): message_factory -- the class to use to create new message objects. If the value is None, the default is Message. + verify_generated_headers + -- if true, the generator verifies that each header + they are properly folded, so that a parser won't + treat it as multiple headers, start-of-body, or + part of another header. + This is a check against custom Header & fold() + implementations. PROVISIONAL The API extensions enabled by this policy are currently provisional. @@ -118,6 +127,22 @@ class EmailPolicy(Policy): refold_source: str header_factory: Callable[[str, Any], Any] content_manager: ContentManager + @overload + def __init__( + self: EmailPolicy[EmailMessage], + *, + max_line_length: int | None = ..., + linesep: str = ..., + cte_type: str = ..., + raise_on_defect: bool = ..., + mangle_from_: bool = ..., + message_factory: None = None, + utf8: bool = ..., + refold_source: str = ..., + header_factory: Callable[[str, str], str] = ..., + content_manager: ContentManager = ..., + ) -> None: ... + @overload def __init__( self, *, @@ -126,7 +151,7 @@ class EmailPolicy(Policy): cte_type: str = ..., raise_on_defect: bool = ..., mangle_from_: bool = ..., - message_factory: Callable[[Policy], Message] | None = ..., + message_factory: _MessageFactory[_MessageT] | None = ..., utf8: bool = ..., refold_source: str = ..., header_factory: Callable[[str, str], str] = ..., @@ -219,8 +244,8 @@ class EmailPolicy(Policy): """ ... -default: EmailPolicy -SMTP: EmailPolicy -SMTPUTF8: EmailPolicy -HTTP: EmailPolicy -strict: EmailPolicy +default: EmailPolicy[EmailMessage] +SMTP: EmailPolicy[EmailMessage] +SMTPUTF8: EmailPolicy[EmailMessage] +HTTP: EmailPolicy[EmailMessage] +strict: EmailPolicy[EmailMessage] diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/email/utils.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/email/utils.pyi index 584a0e8..6366c1d 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/email/utils.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/email/utils.pyi @@ -3,6 +3,7 @@ import datetime import sys from _typeshed import Unused +from collections.abc import Iterable from email import _ParamType from email.charset import Charset from typing import overload @@ -40,12 +41,16 @@ def quote(str: str) -> str: def unquote(str: str) -> str: """Remove quotes from a string.""" ... -def parseaddr(addr: str | None) -> tuple[str, str]: + +# `strict` parameter added in Python 3.8.20, 3.9.20, 3.10.15, 3.11.10, 3.12.5 +def parseaddr(addr: str | list[str], *, strict: bool = True) -> tuple[str, str]: """ Parse addr into its constituent realname and email address parts. Return a tuple of realname and email address, unless the parse fails, in which case return a 2-tuple of ('', ''). + + If strict is True, use a strict parser which rejects malformed inputs. """ ... def formataddr(pair: tuple[str | None, str], charset: str | Charset = "utf-8") -> str: @@ -63,8 +68,17 @@ def formataddr(pair: tuple[str | None, str], charset: str | Charset = "utf-8") - 'utf-8'. """ ... -def getaddresses(fieldvalues: list[str]) -> list[tuple[str, str]]: - """Return a list of (REALNAME, EMAIL) for each fieldvalue.""" + +# `strict` parameter added in Python 3.8.20, 3.9.20, 3.10.15, 3.11.10, 3.12.5 +def getaddresses(fieldvalues: Iterable[str], *, strict: bool = True) -> list[tuple[str, str]]: + """ + Return a list of (REALNAME, EMAIL) or ('','') for each fieldvalue. + + When parsing fails for a fieldvalue, a 2-tuple of ('', '') is returned in + its place. + + If strict is true, use a strict parser which rejects malformed inputs. + """ ... @overload def parsedate(data: None) -> None: @@ -109,7 +123,7 @@ def formatdate(timeval: float | None = None, localtime: bool = False, usegmt: bo Fri, 09 Nov 2001 01:08:47 -0000 - Optional timeval if given is a floating point time value as accepted by + Optional timeval if given is a floating-point time value as accepted by gmtime() and localtime(), otherwise the current time is used. Optional localtime is a flag that when True, interprets timeval, and @@ -131,7 +145,10 @@ def format_datetime(dt: datetime.datetime, usegmt: bool = False) -> str: """ ... -if sys.version_info >= (3, 12): +if sys.version_info >= (3, 14): + def localtime(dt: datetime.datetime | None = None) -> datetime.datetime: ... + +elif sys.version_info >= (3, 12): @overload def localtime(dt: datetime.datetime | None = None) -> datetime.datetime: """ @@ -159,21 +176,7 @@ if sys.version_info >= (3, 12): ... else: - def localtime(dt: datetime.datetime | None = None, isdst: int = -1) -> datetime.datetime: - """ - Return local time as an aware datetime object. - - If called without arguments, return current time. Otherwise *dt* - argument should be a datetime instance, and it is converted to the - local time zone according to the system time zone database. If *dt* is - naive (that is, dt.tzinfo is None), it is assumed to be in local time. - In this case, a positive or zero value for *isdst* causes localtime to - presume initially that summer time (for example, Daylight Saving Time) - is or is not (respectively) in effect for the specified time. A - negative value for *isdst* causes the localtime() function to attempt - to divine whether summer time is in effect for the specified time. - """ - ... + def localtime(dt: datetime.datetime | None = None, isdst: int = -1) -> datetime.datetime: ... def make_msgid(idstring: str | None = None, domain: str | None = None) -> str: """ diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/enum.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/enum.pyi index 8c81f08..938c5e9 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/enum.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/enum.pyi @@ -254,32 +254,7 @@ class EnumMeta(type): qualname: str | None = None, type: type | None = None, start: int = 1, - ) -> type[Enum]: - """ - Either returns an existing member, or creates a new enum class. - - This method is used both when an enum class is given a value to match - to an enumeration member (i.e. Color(3)) and for the functional API - (i.e. Color = Enum('Color', names='RED GREEN BLUE')). - - When used for the functional API: - - `value` will be the name of the new class. - - `names` should be either a string of white-space/comma delimited names - (values will start at `start`), or an iterator/mapping of name, value pairs. - - `module` should be set to the module this class is being created in; - if it is not set, an attempt to find that module will be made, but if - it fails the class will not be picklable. - - `qualname` should be set to the actual location this class can be found - at in its module; by default it is set to the global scope. If this is - not correct, unpickling will fail in some circumstances. - - `type`, if set, will be mixed in as the first base class. - """ - ... + ) -> type[Enum]: ... # Overload 3 (py312+ only): Value lookup on an already existing enum class (complex case) # @@ -415,9 +390,7 @@ class Enum(metaclass=EnumMeta): """Returns public methods and other interesting attributes.""" ... def __hash__(self) -> int: ... - def __format__(self, format_spec: str) -> str: - """Returns format using actual value type unless __str__ has been overridden.""" - ... + def __format__(self, format_spec: str) -> str: ... def __reduce_ex__(self, proto: Unused) -> tuple[Any, ...]: ... if sys.version_info >= (3, 11): def __copy__(self) -> Self: ... @@ -558,7 +531,6 @@ if sys.version_info >= (3, 11): else: class IntFlag(int, Flag): # type: ignore[misc] # complaints about incompatible bases - """Support for integer-based Flags""" def __new__(cls, value: int) -> Self: ... def __or__(self, other: int) -> Self: ... def __and__(self, other: int) -> Self: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/faulthandler.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/faulthandler.pyi index a3e9d8a..2e9c298 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/faulthandler.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/faulthandler.pyi @@ -27,8 +27,8 @@ def is_enabled() -> bool: if sys.platform != "win32": def register(signum: int, file: FileDescriptorLike = ..., all_threads: bool = ..., chain: bool = ...) -> None: - """register(signum, file=sys.stderr, all_threads=True, chain=False): register a handler for the signal 'signum': dump the traceback of the current thread, or of all threads if all_threads is True, into file""" + """Register a handler for the signal 'signum': dump the traceback of the current thread, or of all threads if all_threads is True, into file.""" ... def unregister(signum: int, /) -> None: - """unregister(signum): unregister the handler of the signal 'signum' registered by register()""" + """Unregister the handler of the signal 'signum' registered by register().""" ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/fcntl.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/fcntl.pyi index f3eab3e..301ef0d 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/fcntl.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/fcntl.pyi @@ -7,7 +7,7 @@ a file or socket object. import sys from _typeshed import FileDescriptorLike, ReadOnlyBuffer, WriteableBuffer -from typing import Any, Literal, overload +from typing import Any, Final, Literal, overload from typing_extensions import Buffer if sys.platform != "win32": @@ -51,9 +51,10 @@ if sys.platform != "win32": F_SEAL_SHRINK: int F_SEAL_WRITE: int if sys.version_info >= (3, 9): - F_OFD_GETLK: int - F_OFD_SETLK: int - F_OFD_SETLKW: int + F_OFD_GETLK: Final[int] + F_OFD_SETLK: Final[int] + F_OFD_SETLKW: Final[int] + if sys.version_info >= (3, 10): F_GETPIPE_SZ: int F_SETPIPE_SZ: int @@ -112,6 +113,36 @@ if sys.platform != "win32": FICLONE: int FICLONERANGE: int + if sys.version_info >= (3, 13) and sys.platform == "linux": + F_OWNER_TID: Final = 0 + F_OWNER_PID: Final = 1 + F_OWNER_PGRP: Final = 2 + F_SETOWN_EX: Final = 15 + F_GETOWN_EX: Final = 16 + F_SEAL_FUTURE_WRITE: Final = 16 + F_GET_RW_HINT: Final = 1035 + F_SET_RW_HINT: Final = 1036 + F_GET_FILE_RW_HINT: Final = 1037 + F_SET_FILE_RW_HINT: Final = 1038 + RWH_WRITE_LIFE_NOT_SET: Final = 0 + RWH_WRITE_LIFE_NONE: Final = 1 + RWH_WRITE_LIFE_SHORT: Final = 2 + RWH_WRITE_LIFE_MEDIUM: Final = 3 + RWH_WRITE_LIFE_LONG: Final = 4 + RWH_WRITE_LIFE_EXTREME: Final = 5 + + if sys.version_info >= (3, 11) and sys.platform == "darwin": + F_OFD_SETLK: Final = 90 + F_OFD_SETLKW: Final = 91 + F_OFD_GETLK: Final = 92 + + if sys.version_info >= (3, 13) and sys.platform != "linux": + # OSx and NetBSD + F_GETNOSIGPIPE: Final[int] + F_SETNOSIGPIPE: Final[int] + # OSx and FreeBSD + F_RDAHEAD: Final[int] + @overload def fcntl(fd: FileDescriptorLike, cmd: int, arg: int = 0, /) -> int: """ diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/filecmp.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/filecmp.pyi index 0a533e5..2b95bd5 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/filecmp.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/filecmp.pyi @@ -13,7 +13,7 @@ Functions: import sys from _typeshed import GenericPath, StrOrBytesPath from collections.abc import Callable, Iterable, Sequence -from typing import Any, AnyStr, Generic, Literal +from typing import Any, AnyStr, Final, Generic, Literal if sys.version_info >= (3, 9): from types import GenericAlias @@ -21,7 +21,7 @@ if sys.version_info >= (3, 9): __all__ = ["clear_cache", "cmp", "dircmp", "cmpfiles", "DEFAULT_IGNORES"] DEFAULT_IGNORES: list[str] -BUFSIZE: Literal[8192] +BUFSIZE: Final = 8192 def cmp(f1: StrOrBytesPath, f2: StrOrBytesPath, shallow: bool | Literal[0, 1] = True) -> bool: """ @@ -67,12 +67,15 @@ class dircmp(Generic[AnyStr]): """ A class that manages the comparison of 2 directories. - dircmp(a, b, ignore=None, hide=None) + dircmp(a, b, ignore=None, hide=None, *, shallow=True) A and B are directories. IGNORE is a list of names to ignore, defaults to DEFAULT_IGNORES. HIDE is a list of names to hide, defaults to [os.curdir, os.pardir]. + SHALLOW specifies whether to just check the stat signature (do not read + the files). + defaults to True. High level usage: x = dircmp(dir1, dir2) @@ -100,13 +103,24 @@ class dircmp(Generic[AnyStr]): in common_dirs. """ - def __init__( - self, - a: GenericPath[AnyStr], - b: GenericPath[AnyStr], - ignore: Sequence[AnyStr] | None = None, - hide: Sequence[AnyStr] | None = None, - ) -> None: ... + if sys.version_info >= (3, 13): + def __init__( + self, + a: GenericPath[AnyStr], + b: GenericPath[AnyStr], + ignore: Sequence[AnyStr] | None = None, + hide: Sequence[AnyStr] | None = None, + *, + shallow: bool = True, + ) -> None: ... + else: + def __init__( + self, + a: GenericPath[AnyStr], + b: GenericPath[AnyStr], + ignore: Sequence[AnyStr] | None = None, + hide: Sequence[AnyStr] | None = None, + ) -> None: ... left: AnyStr right: AnyStr hide: Sequence[AnyStr] diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/fileinput.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/fileinput.pyi index 3a0c8b5..53860c9 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/fileinput.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/fileinput.pyi @@ -54,7 +54,7 @@ __getitem__() method which implements the sequence behavior. The sequence must be accessed in strictly sequential order; sequence access and readline() cannot be mixed. -Optional in-place filtering: if the keyword argument inplace=1 is +Optional in-place filtering: if the keyword argument inplace=True is passed to input() or to the FileInput constructor, the file is moved to a backup file and standard output is directed to the input file. This makes it possible to write a filter that rewrites its input file diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/ftplib.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/ftplib.pyi index 9ffe1f9..840e667 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/ftplib.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/ftplib.pyi @@ -35,16 +35,16 @@ from collections.abc import Callable, Iterable, Iterator from socket import socket from ssl import SSLContext from types import TracebackType -from typing import Any, Literal, TextIO +from typing import Any, Final, Literal, TextIO from typing_extensions import Self __all__ = ["FTP", "error_reply", "error_temp", "error_perm", "error_proto", "all_errors", "FTP_TLS"] -MSG_OOB: Literal[1] -FTP_PORT: Literal[21] -MAXLINE: Literal[8192] -CRLF: Literal["\r\n"] -B_CRLF: Literal[b"\r\n"] +MSG_OOB: Final = 1 +FTP_PORT: Final = 21 +MAXLINE: Final = 8192 +CRLF: Final = "\r\n" +B_CRLF: Final = b"\r\n" class Error(Exception): ... class error_reply(Error): ... @@ -209,7 +209,7 @@ class FTP: def login(self, user: str = "", passwd: str = "", acct: str = "") -> str: """Login, default anonymous.""" ... - # In practice, `rest` rest can actually be anything whose str() is an integer sequence, so to make it simple we allow integers. + # In practice, `rest` can actually be anything whose str() is an integer sequence, so to make it simple we allow integers def ntransfercmd(self, cmd: str, rest: int | str | None = None) -> tuple[socket, int | None]: """ Initiate a transfer over the data connection. diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/functools.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/functools.pyi index 2f4dda8..14267ed 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/functools.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/functools.pyi @@ -40,7 +40,7 @@ _RWrapper = TypeVar("_RWrapper") @overload def reduce(function: Callable[[_T, _S], _T], sequence: Iterable[_S], initial: _T, /) -> _T: """ - reduce(function, iterable[, initial]) -> value + reduce(function, iterable[, initial], /) -> value Apply a function of two arguments cumulatively to the items of a sequence or iterable, from left to right, so as to reduce the iterable to a single @@ -53,7 +53,7 @@ def reduce(function: Callable[[_T, _S], _T], sequence: Iterable[_S], initial: _T @overload def reduce(function: Callable[[_T, _T], _T], sequence: Iterable[_T], /) -> _T: """ - reduce(function, iterable[, initial]) -> value + reduce(function, iterable[, initial], /) -> value Apply a function of two arguments cumulatively to the items of a sequence or iterable, from left to right, so as to reduce the iterable to a single @@ -118,8 +118,9 @@ def lru_cache(maxsize: int | None = 128, typed: bool = False) -> Callable[[Calla can grow without bound. If *typed* is True, arguments of different types will be cached separately. - For example, f(3.0) and f(3) will be treated as distinct calls with - distinct results. + For example, f(decimal.Decimal("3.0")) and f(3.0) will be treated as + distinct calls with distinct results. Some types such as str and int may + be cached separately even when typed is false. Arguments to the cached function must be hashable. @@ -139,8 +140,9 @@ def lru_cache(maxsize: Callable[..., _T], typed: bool = False) -> _lru_cache_wra can grow without bound. If *typed* is True, arguments of different types will be cached separately. - For example, f(3.0) and f(3) will be treated as distinct calls with - distinct results. + For example, f(decimal.Decimal("3.0")) and f(3.0) will be treated as + distinct calls with distinct results. Some types such as str and int may + be cached separately even when typed is false. Arguments to the cached function must be hashable. @@ -219,35 +221,12 @@ else: wrapped: Callable[_PWrapped, _RWrapped], assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__"), updated: Sequence[str] = ("__dict__",), - ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWrapper]: - """ - Update a wrapper function to look like the wrapped function - - wrapper is the function to be updated - wrapped is the original function - assigned is a tuple naming the attributes assigned directly - from the wrapped function to the wrapper function (defaults to - functools.WRAPPER_ASSIGNMENTS) - updated is a tuple naming the attributes of the wrapper that - are updated with the corresponding attribute from the wrapped - function (defaults to functools.WRAPPER_UPDATES) - """ - ... + ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWrapper]: ... def wraps( wrapped: Callable[_PWrapped, _RWrapped], assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__"), updated: Sequence[str] = ("__dict__",), - ) -> _Wrapper[_PWrapped, _RWrapped]: - """ - Decorator factory to apply update_wrapper() to a wrapper function - - Returns a decorator that invokes update_wrapper() with the decorated - function as the wrapper argument and the arguments to wraps() as the - remaining arguments. Default arguments are as for update_wrapper(). - This is a convenience function to simplify applying partial() to - update_wrapper(). - """ - ... + ) -> _Wrapper[_PWrapped, _RWrapped]: ... def total_ordering(cls: type[_T]) -> type[_T]: """Class decorator that fills in missing ordering methods""" @@ -263,8 +242,8 @@ def cmp_to_key(mycmp: Callable[[_T, _T], int]) -> Callable[[_T], SupportsAllComp class partial(Generic[_T]): """ - partial(func, *args, **keywords) - new function with partial application - of the given arguments and keywords. + Create a new function with partial application of the given arguments + and keywords. """ @property def func(self) -> Callable[..., _T]: diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/gc.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/gc.pyi index 4162274..bbc311e 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/gc.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/gc.pyi @@ -23,14 +23,14 @@ get_freeze_count() -- Return the number of objects in the permanent generation. import sys from collections.abc import Callable -from typing import Any, Literal +from typing import Any, Final, Literal from typing_extensions import TypeAlias -DEBUG_COLLECTABLE: Literal[2] -DEBUG_LEAK: Literal[38] -DEBUG_SAVEALL: Literal[32] -DEBUG_STATS: Literal[1] -DEBUG_UNCOLLECTABLE: Literal[4] +DEBUG_COLLECTABLE: Final = 2 +DEBUG_LEAK: Final = 38 +DEBUG_SAVEALL: Final = 32 +DEBUG_STATS: Final = 1 +DEBUG_UNCOLLECTABLE: Final = 4 _CallbackType: TypeAlias = Callable[[Literal["start", "stop"], dict[str, int]], object] @@ -91,16 +91,10 @@ def get_freeze_count() -> int: """Return the number of objects in the permanent generation.""" ... def get_referents(*objs: Any) -> list[Any]: - """ - get_referents(*objs) -> list - Return the list of objects that are directly referred to by objs. - """ + """Return the list of objects that are directly referred to by 'objs'.""" ... def get_referrers(*objs: Any) -> list[Any]: - """ - get_referrers(*objs) -> list - Return the list of objects that directly refer to any of objs. - """ + """Return the list of objects that directly refer to any of 'objs'.""" ... def get_stats() -> list[dict[str, Any]]: """Return a list of dictionaries containing per-generation statistics.""" @@ -140,11 +134,11 @@ def set_debug(flags: int, /) -> None: Debugging information is written to sys.stderr. """ ... -def set_threshold(threshold0: int, threshold1: int = ..., threshold2: int = ...) -> None: +def set_threshold(threshold0: int, threshold1: int = ..., threshold2: int = ..., /) -> None: """ - set_threshold(threshold0, [threshold1, threshold2]) -> None + set_threshold(threshold0, [threshold1, [threshold2]]) + Set the collection thresholds (the collection frequency). - Sets the collection thresholds. Setting threshold0 to zero disables - collection. + Setting 'threshold0' to zero disables collection. """ ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/genericpath.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/genericpath.pyi index ff2ae57..8f6818b 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/genericpath.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/genericpath.pyi @@ -97,6 +97,18 @@ def samestat(s1: os.stat_result, s2: os.stat_result) -> bool: ... if sys.version_info >= (3, 13): - def isjunction(path: StrOrBytesPath) -> bool: ... - def isdevdrive(path: StrOrBytesPath) -> bool: ... - def lexists(path: StrOrBytesPath) -> bool: ... + def isjunction(path: StrOrBytesPath) -> bool: + """ + Test whether a path is a junction + Junctions are not supported on the current platform + """ + ... + def isdevdrive(path: StrOrBytesPath) -> bool: + """ + Determines whether the specified path is on a Windows Dev Drive. + Dev Drives are not supported on the current platform + """ + ... + def lexists(path: StrOrBytesPath) -> bool: + """Test whether a path exists. Returns True for broken symbolic links""" + ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/getpass.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/getpass.pyi index 28a8392..60f40cd 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/getpass.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/getpass.pyi @@ -23,6 +23,11 @@ def getuser() -> str: First try various environment variables, then the password database. This works on Windows as long as USERNAME is set. + Any failure to find a username raises OSError. + + .. versionchanged:: 3.13 + Previously, various exceptions beyond just :exc:`OSError` + were raised. """ ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/glob.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/glob.pyi index 64bb19f..3448b33 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/glob.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/glob.pyi @@ -61,34 +61,10 @@ if sys.version_info >= (3, 11): elif sys.version_info >= (3, 10): def glob( pathname: AnyStr, *, root_dir: StrOrBytesPath | None = None, dir_fd: int | None = None, recursive: bool = False - ) -> list[AnyStr]: - """ - Return a list of paths matching a pathname pattern. - - The pattern may contain simple shell-style wildcards a la - fnmatch. However, unlike fnmatch, filenames starting with a - dot are special cases that are not matched by '*' and '?' - patterns. - - If recursive is true, the pattern '**' will match any files and - zero or more directories and subdirectories. - """ - ... + ) -> list[AnyStr]: ... def iglob( pathname: AnyStr, *, root_dir: StrOrBytesPath | None = None, dir_fd: int | None = None, recursive: bool = False - ) -> Iterator[AnyStr]: - """ - Return an iterator which yields the paths matching a pathname pattern. - - The pattern may contain simple shell-style wildcards a la - fnmatch. However, unlike fnmatch, filenames starting with a - dot are special cases that are not matched by '*' and '?' - patterns. - - If recursive is true, the pattern '**' will match any files and - zero or more directories and subdirectories. - """ - ... + ) -> Iterator[AnyStr]: ... else: def glob(pathname: AnyStr, *, recursive: bool = False) -> list[AnyStr]: ... @@ -105,4 +81,18 @@ def has_magic(s: str | bytes) -> bool: ... # undocumented if sys.version_info >= (3, 13): def translate( pat: str, *, recursive: bool = False, include_hidden: bool = False, seps: Sequence[str] | None = None - ) -> str: ... + ) -> str: + """ + Translate a pathname with shell wildcards to a regular expression. + + If `recursive` is true, the pattern segment '**' will match any number of + path segments. + + If `include_hidden` is true, wildcards can match path segments beginning + with a dot ('.'). + + If a sequence of separator characters is given to `seps`, they will be + used to split the pattern into segments and match path separators. If not + given, os.path.sep and os.path.altsep (where available) are used. + """ + ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/gzip.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/gzip.pyi index 6c8bc2c..3381407 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/gzip.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/gzip.pyi @@ -10,7 +10,7 @@ import sys import zlib from _typeshed import ReadableBuffer, SizedBuffer, StrOrBytesPath from io import FileIO -from typing import Literal, Protocol, TextIO, overload +from typing import Final, Literal, Protocol, TextIO, overload from typing_extensions import TypeAlias __all__ = ["BadGzipFile", "GzipFile", "open", "compress", "decompress"] @@ -19,14 +19,14 @@ _ReadBinaryMode: TypeAlias = Literal["r", "rb"] _WriteBinaryMode: TypeAlias = Literal["a", "ab", "w", "wb", "x", "xb"] _OpenTextMode: TypeAlias = Literal["rt", "at", "wt", "xt"] -READ: object # undocumented -WRITE: object # undocumented +READ: Final[object] # undocumented +WRITE: Final[object] # undocumented -FTEXT: int # actually Literal[1] # undocumented -FHCRC: int # actually Literal[2] # undocumented -FEXTRA: int # actually Literal[4] # undocumented -FNAME: int # actually Literal[8] # undocumented -FCOMMENT: int # actually Literal[16] # undocumented +FTEXT: Final[int] # actually Literal[1] # undocumented +FHCRC: Final[int] # actually Literal[2] # undocumented +FEXTRA: Final[int] # actually Literal[4] # undocumented +FNAME: Final[int] # actually Literal[8] # undocumented +FCOMMENT: Final[int] # actually Literal[16] # undocumented class _ReadableFileobj(Protocol): def read(self, n: int, /) -> bytes: ... @@ -224,9 +224,10 @@ class GzipFile(_compression.BaseStream): and 9 is slowest and produces the most compression. 0 is no compression at all. The default is 9. - The mtime argument is an optional numeric timestamp to be written - to the last modification time field in the stream when compressing. - If omitted or None, the current time is used. + The optional mtime argument is the timestamp requested by gzip. The time + is in Unix format, i.e., seconds since 00:00:00 UTC, January 1, 1970. + If mtime is omitted or None, the current time is used. Use mtime = 0 + to generate a compressed stream that does not depend on creation time. """ ... @overload @@ -266,9 +267,10 @@ class GzipFile(_compression.BaseStream): and 9 is slowest and produces the most compression. 0 is no compression at all. The default is 9. - The mtime argument is an optional numeric timestamp to be written - to the last modification time field in the stream when compressing. - If omitted or None, the current time is used. + The optional mtime argument is the timestamp requested by gzip. The time + is in Unix format, i.e., seconds since 00:00:00 UTC, January 1, 1970. + If mtime is omitted or None, the current time is used. Use mtime = 0 + to generate a compressed stream that does not depend on creation time. """ ... @overload @@ -308,9 +310,10 @@ class GzipFile(_compression.BaseStream): and 9 is slowest and produces the most compression. 0 is no compression at all. The default is 9. - The mtime argument is an optional numeric timestamp to be written - to the last modification time field in the stream when compressing. - If omitted or None, the current time is used. + The optional mtime argument is the timestamp requested by gzip. The time + is in Unix format, i.e., seconds since 00:00:00 UTC, January 1, 1970. + If mtime is omitted or None, the current time is used. Use mtime = 0 + to generate a compressed stream that does not depend on creation time. """ ... @overload @@ -350,9 +353,10 @@ class GzipFile(_compression.BaseStream): and 9 is slowest and produces the most compression. 0 is no compression at all. The default is 9. - The mtime argument is an optional numeric timestamp to be written - to the last modification time field in the stream when compressing. - If omitted or None, the current time is used. + The optional mtime argument is the timestamp requested by gzip. The time + is in Unix format, i.e., seconds since 00:00:00 UTC, January 1, 1970. + If mtime is omitted or None, the current time is used. Use mtime = 0 + to generate a compressed stream that does not depend on creation time. """ ... @overload @@ -392,9 +396,10 @@ class GzipFile(_compression.BaseStream): and 9 is slowest and produces the most compression. 0 is no compression at all. The default is 9. - The mtime argument is an optional numeric timestamp to be written - to the last modification time field in the stream when compressing. - If omitted or None, the current time is used. + The optional mtime argument is the timestamp requested by gzip. The time + is in Unix format, i.e., seconds since 00:00:00 UTC, January 1, 1970. + If mtime is omitted or None, the current time is used. Use mtime = 0 + to generate a compressed stream that does not depend on creation time. """ ... if sys.version_info < (3, 12): diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/http/__init__.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/http/__init__.pyi index e15e503..a2d5782 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/http/__init__.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/http/__init__.pyi @@ -15,7 +15,7 @@ class HTTPStatus(IntEnum): Status codes from the following RFCs are all observed: - * RFC 7231: Hypertext Transfer Protocol (HTTP/1.1), obsoletes 2616 + * RFC 9110: HTTP Semantics, obsoletes 7231, which obsoleted 2616 * RFC 6585: Additional HTTP Status Codes * RFC 3229: Delta encoding in HTTP * RFC 4918: HTTP Extensions for WebDAV, obsoletes 2518 @@ -123,7 +123,7 @@ if sys.version_info >= (3, 11): Methods from the following RFCs are all observed: - * RFC 7231: Hypertext Transfer Protocol (HTTP/1.1), obsoletes 2616 + * RFF 9110: HTTP Semantics, obsoletes 7231, which obsoleted 2616 * RFC 5789: PATCH Method for HTTP """ @property diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/http/client.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/http/client.pyi index 652c868..5a666a2 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/http/client.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/http/client.pyi @@ -105,6 +105,7 @@ __all__ = [ _DataType: TypeAlias = SupportsRead[bytes] | Iterable[ReadableBuffer] | ReadableBuffer _T = TypeVar("_T") _MessageT = TypeVar("_MessageT", bound=email.message.Message) +_HeaderValue: TypeAlias = ReadableBuffer | str | int HTTP_PORT: int HTTPS_PORT: int @@ -328,7 +329,7 @@ class HTTPConnection: method: str, url: str, body: _DataType | str | None = None, - headers: Mapping[str, str] = {}, + headers: Mapping[str, _HeaderValue] = {}, *, encode_chunked: bool = False, ) -> None: @@ -401,7 +402,7 @@ class HTTPConnection: 'Accept-Encoding:' header """ ... - def putheader(self, header: str | bytes, *argument: str | bytes) -> None: + def putheader(self, header: str | bytes, *values: _HeaderValue) -> None: """ Send a request header line to the server. diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/http/cookiejar.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/http/cookiejar.pyi index 45cf92c..e9419f5 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/http/cookiejar.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/http/cookiejar.pyi @@ -121,7 +121,7 @@ class CookieJar(Iterable[Cookie]): class FileCookieJar(CookieJar): """CookieJar that can be loaded from and saved to a file.""" - filename: str + filename: str | None delayload: bool def __init__(self, filename: StrPath | None = None, delayload: bool = False, policy: CookiePolicy | None = None) -> None: """ diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/http/server.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/http/server.pyi index 6054544..5c41768 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/http/server.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/http/server.pyi @@ -3,18 +3,18 @@ HTTP server classes. Note: BaseHTTPRequestHandler doesn't implement any HTTP request; see SimpleHTTPRequestHandler for simple implementations of GET, HEAD and POST, -and CGIHTTPRequestHandler for CGI scripts. +and (deprecated) CGIHTTPRequestHandler for CGI scripts. -It does, however, optionally implement HTTP/1.1 persistent connections, -as of version 0.3. +It does, however, optionally implement HTTP/1.1 persistent connections. Notes on CGIHTTPRequestHandler ------------------------------ -This class implements GET and POST requests to cgi-bin scripts. +This class is deprecated. It implements GET and POST requests to cgi-bin scripts. -If the os.fork() function is not present (e.g. on Windows), -subprocess.Popen() is used as a fallback, with slightly altered semantics. +If the os.fork() function is not present (Windows), subprocess.Popen() is used, +with slightly altered but never documented semantics. Use from a threaded +process is likely to trigger a warning at os.fork() time. In all cases, the implementation is intentionally naive -- all requests are executed synchronously. diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/imghdr.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/imghdr.pyi index c6d1f2c..6e1b858 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/imghdr.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/imghdr.pyi @@ -1,5 +1,3 @@ -"""Recognize image file formats based on their first few bytes.""" - from _typeshed import StrPath from collections.abc import Callable from typing import Any, BinaryIO, Protocol, overload @@ -12,12 +10,8 @@ class _ReadableBinary(Protocol): def seek(self, offset: int, /) -> Any: ... @overload -def what(file: StrPath | _ReadableBinary, h: None = None) -> str | None: - """Return the type of image contained in a file or byte stream.""" - ... +def what(file: StrPath | _ReadableBinary, h: None = None) -> str | None: ... @overload -def what(file: Any, h: bytes) -> str | None: - """Return the type of image contained in a file or byte stream.""" - ... +def what(file: Any, h: bytes) -> str | None: ... tests: list[Callable[[bytes, BinaryIO | None], str | None]] diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/imp.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/imp.pyi index 5b2397f..ee5a0cd 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/imp.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/imp.pyi @@ -1,11 +1,3 @@ -""" -This module provides the components needed to build your own __import__ -function. Undocumented functions are obsolete. - -In most cases it is preferred you consider using the importlib module's -functionality over this module. -""" - import types from _imp import ( acquire_lock as acquire_lock, @@ -34,66 +26,16 @@ PY_FROZEN: int PY_CODERESOURCE: int IMP_HOOK: int -def new_module(name: str) -> types.ModuleType: - """ - **DEPRECATED** - - Create a new module. - - The module is not entered into sys.modules. - """ - ... -def get_magic() -> bytes: - """ - **DEPRECATED** - - Return the magic number for .pyc files. - """ - ... -def get_tag() -> str: - """Return the magic tag for .pyc files.""" - ... -def cache_from_source(path: StrPath, debug_override: bool | None = None) -> str: - """ - **DEPRECATED** - - Given the path to a .py file, return the path to its .pyc file. - - The .py file does not need to exist; this simply returns the path to the - .pyc file calculated as if the .py file were imported. - - If debug_override is not None, then it must be a boolean and is used in - place of sys.flags.optimize. - - If sys.implementation.cache_tag is None then NotImplementedError is raised. - """ - ... -def source_from_cache(path: StrPath) -> str: - """ - **DEPRECATED** - - Given the path to a .pyc. file, return the path to its .py file. - - The .pyc file does not need to exist; this simply returns the path to - the .py file calculated to correspond to the .pyc file. If path does - not conform to PEP 3147 format, ValueError will be raised. If - sys.implementation.cache_tag is None then NotImplementedError is raised. - """ - ... -def get_suffixes() -> list[tuple[str, str, int]]: - """**DEPRECATED**""" - ... +def new_module(name: str) -> types.ModuleType: ... +def get_magic() -> bytes: ... +def get_tag() -> str: ... +def cache_from_source(path: StrPath, debug_override: bool | None = None) -> str: ... +def source_from_cache(path: StrPath) -> str: ... +def get_suffixes() -> list[tuple[str, str, int]]: ... class NullImporter: - """ - **DEPRECATED** - - Null import object. - """ def __init__(self, path: StrPath) -> None: ... - def find_module(self, fullname: Any) -> None: - """Always returns None.""" - ... + def find_module(self, fullname: Any) -> None: ... # Technically, a text file has to support a slightly different set of operations than a binary file, # but we ignore that here. @@ -107,58 +49,14 @@ class _FileLike(Protocol): # PathLike doesn't work for the pathname argument here def load_source(name: str, pathname: str, file: _FileLike | None = None) -> types.ModuleType: ... -def load_compiled(name: str, pathname: str, file: _FileLike | None = None) -> types.ModuleType: - """**DEPRECATED**""" - ... -def load_package(name: str, path: StrPath) -> types.ModuleType: - """**DEPRECATED**""" - ... -def load_module(name: str, file: _FileLike | None, filename: str, details: tuple[str, str, int]) -> types.ModuleType: - """ - **DEPRECATED** - - Load a module, given information returned by find_module(). - - The module name must include the full package name, if any. - """ - ... +def load_compiled(name: str, pathname: str, file: _FileLike | None = None) -> types.ModuleType: ... +def load_package(name: str, path: StrPath) -> types.ModuleType: ... +def load_module(name: str, file: _FileLike | None, filename: str, details: tuple[str, str, int]) -> types.ModuleType: ... # IO[Any] is a TextIOWrapper if name is a .py file, and a FileIO otherwise. def find_module( name: str, path: None | list[str] | list[PathLike[str]] | list[StrPath] = None -) -> tuple[IO[Any], str, tuple[str, str, int]]: - """ - **DEPRECATED** - - Search for a module. - - If path is omitted or None, search for a built-in, frozen or special - module and continue search in sys.path. The module name cannot - contain '.'; to search for a submodule of a package, pass the - submodule name and the package's __path__. - """ - ... -def reload(module: types.ModuleType) -> types.ModuleType: - """ - **DEPRECATED** - - Reload the module and return it. - - The module must have been successfully imported before. - """ - ... -def init_builtin(name: str) -> types.ModuleType | None: - """ - **DEPRECATED** - - Load and return a built-in module by name, or None is such module doesn't - exist - """ - ... -def load_dynamic(name: str, path: str, file: Any = None) -> types.ModuleType: - """ - **DEPRECATED** - - Load an extension module. - """ - ... +) -> tuple[IO[Any], str, tuple[str, str, int]]: ... +def reload(module: types.ModuleType) -> types.ModuleType: ... +def init_builtin(name: str) -> types.ModuleType | None: ... +def load_dynamic(name: str, path: str, file: Any = None) -> types.ModuleType: ... # file argument is ignored diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/importlib/__init__.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/importlib/__init__.pyi index 12a5f44..22c4943 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/importlib/__init__.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/importlib/__init__.pyi @@ -39,15 +39,7 @@ def import_module(name: str, package: str | None = None) -> ModuleType: ... if sys.version_info < (3, 12): - def find_loader(name: str, path: str | None = None) -> Loader | None: - """ - Return the loader for the specified module. - - This is a backward-compatible wrapper around find_spec(). - - This function is deprecated in favor of importlib.util.find_spec(). - """ - ... + def find_loader(name: str, path: str | None = None) -> Loader | None: ... def invalidate_caches() -> None: """ diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/importlib/_abc.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/importlib/_abc.pyi index b5a33e2..3c1fe88 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/importlib/_abc.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/importlib/_abc.pyi @@ -23,16 +23,7 @@ if sys.version_info >= (3, 10): """ ... if sys.version_info < (3, 12): - def module_repr(self, module: types.ModuleType) -> str: - """ - Return a module's repr. - - Used by the module type when the method does not raise - NotImplementedError. - - This method is deprecated. - """ - ... + def module_repr(self, module: types.ModuleType) -> str: ... def create_module(self, spec: ModuleSpec) -> types.ModuleType | None: """ diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/importlib/abc.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/importlib/abc.pyi index 7a3e0f7..f3c6890 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/importlib/abc.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/importlib/abc.pyi @@ -37,18 +37,7 @@ else: def exec_module(self, module: types.ModuleType) -> None: ... if sys.version_info < (3, 12): - class Finder(metaclass=ABCMeta): - """ - Legacy abstract base class for import finders. - - It may be subclassed for compatibility with legacy third party - reimplementations of the import system. Otherwise, finder - implementations should derive from the more specific MetaPathFinder - or PathEntryFinder ABCs. - - Deprecated since Python 3.3 - """ - ... + class Finder(metaclass=ABCMeta): ... class ResourceLoader(Loader): """ @@ -179,18 +168,7 @@ if sys.version_info >= (3, 10): class MetaPathFinder(metaclass=ABCMeta): """Abstract base class for import finders on sys.meta_path.""" if sys.version_info < (3, 12): - def find_module(self, fullname: str, path: Sequence[str] | None) -> Loader | None: - """ - Return a loader for the module. - - If no module is found, return None. The fullname is a str and - the path is a list of strings or None. - - This method is deprecated since Python 3.4 in favor of - finder.find_spec(). If find_spec() exists then backwards-compatible - functionality is provided for this method. - """ - ... + def find_module(self, fullname: str, path: Sequence[str] | None) -> Loader | None: ... def invalidate_caches(self) -> None: """ @@ -206,31 +184,8 @@ if sys.version_info >= (3, 10): class PathEntryFinder(metaclass=ABCMeta): """Abstract base class for path entry finders used by PathFinder.""" if sys.version_info < (3, 12): - def find_module(self, fullname: str) -> Loader | None: - """ - Try to find a loader for the specified module by delegating to - self.find_loader(). - - This method is deprecated in favor of finder.find_spec(). - """ - ... - def find_loader(self, fullname: str) -> tuple[Loader | None, Sequence[str]]: - """ - Return (loader, namespace portion) for the path entry. - - The fullname is a str. The namespace portion is a sequence of - path entries contributing to part of a namespace package. The - sequence may be empty. If loader is not None, the portion will - be ignored. - - The portion will be discarded if another path entry finder - locates the module as a normal module or package. - - This method is deprecated since Python 3.4 in favor of - finder.find_spec(). If find_spec() is provided than backwards-compatible - functionality is provided. - """ - ... + def find_module(self, fullname: str) -> Loader | None: ... + def find_loader(self, fullname: str) -> tuple[Loader | None, Sequence[str]]: ... def invalidate_caches(self) -> None: """ @@ -359,16 +314,14 @@ if sys.version_info >= (3, 9): ... else: @abstractmethod - def joinpath(self, child: str, /) -> Traversable: - """Return Traversable child in self""" - ... + def joinpath(self, child: str, /) -> Traversable: ... # The documentation and runtime protocol allows *args, **kwargs arguments, # but this would mean that all implementers would have to support them, # which is not the case. @overload @abstractmethod - def open(self, mode: Literal["r"] = "r", /, *, encoding: str | None = None, errors: str | None = None) -> IO[str]: + def open(self, mode: Literal["r"] = "r", *, encoding: str | None = None, errors: str | None = None) -> IO[str]: """ mode may be 'r' or 'rb' to open as text or binary. Return a handle suitable for reading (same as pathlib.Path.open). @@ -379,7 +332,7 @@ if sys.version_info >= (3, 9): ... @overload @abstractmethod - def open(self, mode: Literal["rb"], /) -> IO[bytes]: + def open(self, mode: Literal["rb"]) -> IO[bytes]: """ mode may be 'r' or 'rb' to open as text or binary. Return a handle suitable for reading (same as pathlib.Path.open). diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/importlib/machinery.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/importlib/machinery.pyi index 6dc2cd6..1902b7c 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/importlib/machinery.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/importlib/machinery.pyi @@ -77,15 +77,7 @@ class BuiltinImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader) # MetaPathFinder if sys.version_info < (3, 12): @classmethod - def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: - """ - Find the built-in module. - - If 'path' is ever specified then the search is considered a failure. - - This method is deprecated. Use find_spec() instead. - """ - ... + def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ... @classmethod def find_spec( @@ -115,13 +107,7 @@ class BuiltinImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader) # Loader if sys.version_info < (3, 12): @staticmethod - def module_repr(module: types.ModuleType) -> str: - """ - Return repr for the module. - - The method is deprecated. The import machinery does the job itself. - """ - ... + def module_repr(module: types.ModuleType) -> str: ... if sys.version_info >= (3, 10): @staticmethod def create_module(spec: ModuleSpec) -> types.ModuleType | None: @@ -147,13 +133,7 @@ class FrozenImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader): # MetaPathFinder if sys.version_info < (3, 12): @classmethod - def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: - """ - Find a frozen module. - - This method is deprecated. Use find_spec() instead. - """ - ... + def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ... @classmethod def find_spec( @@ -183,13 +163,7 @@ class FrozenImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader): # Loader if sys.version_info < (3, 12): @staticmethod - def module_repr(m: types.ModuleType) -> str: - """ - Return repr for the module. - - The method is deprecated. The import machinery does the job itself. - """ - ... + def module_repr(m: types.ModuleType) -> str: ... if sys.version_info >= (3, 10): @staticmethod def create_module(spec: ModuleSpec) -> types.ModuleType | None: @@ -206,13 +180,7 @@ class WindowsRegistryFinder(importlib.abc.MetaPathFinder): """Meta path finder for modules declared in the Windows registry.""" if sys.version_info < (3, 12): @classmethod - def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: - """ - Find module named in the registry. - - This method is deprecated. Use find_spec() instead. - """ - ... + def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ... @classmethod def find_spec( @@ -226,7 +194,7 @@ class PathFinder: def invalidate_caches() -> None: """ Call the invalidate_caches() method on all path entry finders - stored in sys.path_importer_caches (where implemented). + stored in sys.path_importer_cache (where implemented). """ ... else: @@ -260,14 +228,7 @@ class PathFinder: ... if sys.version_info < (3, 12): @classmethod - def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: - """ - find the module on sys.path or 'path' based on sys.path_hooks and - sys.path_importer_cache. - - This method is deprecated. Use find_spec() instead. - """ - ... + def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ... SOURCE_SUFFIXES: list[str] DEBUG_BYTECODE_SUFFIXES: list[str] diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/importlib/metadata/__init__.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/importlib/metadata/__init__.pyi index 7a8a312..d903a8f 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/importlib/metadata/__init__.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/importlib/metadata/__init__.pyi @@ -1,6 +1,7 @@ import abc import pathlib import sys +import types from _collections_abc import dict_keys, dict_values from _typeshed import StrPath from collections.abc import Iterable, Iterator, Mapping @@ -46,11 +47,8 @@ if sys.version_info >= (3, 10): """ ... - if sys.version_info >= (3, 12): - # It's generic but shouldn't be - _SimplePath: TypeAlias = SimplePath[Any] - else: - _SimplePath: TypeAlias = SimplePath + _SimplePath: TypeAlias = SimplePath + else: _SimplePath: TypeAlias = Path @@ -59,20 +57,10 @@ class PackageNotFoundError(ModuleNotFoundError): @property def name(self) -> str: ... # type: ignore[override] -if sys.version_info >= (3, 11): +if sys.version_info >= (3, 13): + _EntryPointBase = object +elif sys.version_info >= (3, 11): class DeprecatedTuple: - """ - Provide subscript item access for backward compatibility. - - >>> recwarn = getfixture('recwarn') - >>> ep = EntryPoint(name='name', value='value', group='group') - >>> ep[:] - ('name', 'value', 'group') - >>> ep[0] - 'name' - >>> len(recwarn) - 1 - """ def __getitem__(self, item: int) -> str: ... _EntryPointBase = DeprecatedTuple @@ -154,18 +142,12 @@ class EntryPoint(_EntryPointBase): """ ... - def __hash__(self) -> int: - """Return hash(self).""" - ... - def __eq__(self, other: object) -> bool: - """Return self==value.""" - ... + def __hash__(self) -> int: ... + def __eq__(self, other: object) -> bool: ... if sys.version_info >= (3, 11): def __lt__(self, other: object) -> bool: ... if sys.version_info < (3, 12): - def __iter__(self) -> Iterator[Any]: - """Supply iter so one may construct dicts of EntryPoints by name.""" - ... + def __iter__(self) -> Iterator[Any]: ... # result of iter((str, Self)), really if sys.version_info >= (3, 12): class EntryPoints(tuple[EntryPoint, ...]): @@ -198,45 +180,11 @@ if sys.version_info >= (3, 12): ... elif sys.version_info >= (3, 10): - class DeprecatedList(list[_T]): - """ - Allow an otherwise immutable object to implement mutability - for compatibility. - - >>> recwarn = getfixture('recwarn') - >>> dl = DeprecatedList(range(3)) - >>> dl[0] = 1 - >>> dl.append(3) - >>> del dl[3] - >>> dl.reverse() - >>> dl.sort() - >>> dl.extend([4]) - >>> dl.pop(-1) - 4 - >>> dl.remove(1) - >>> dl += [5] - >>> dl + [6] - [1, 2, 5, 6] - >>> dl + (6,) - [1, 2, 5, 6] - >>> dl.insert(0, 0) - >>> dl - [0, 1, 2, 5] - >>> dl == [0, 1, 2, 5] - True - >>> dl == (0, 1, 2, 5) - True - >>> len(recwarn) - 1 - """ - ... + class DeprecatedList(list[_T]): ... class EntryPoints(DeprecatedList[EntryPoint]): # use as list is deprecated since 3.10 - """An immutable collection of selectable EntryPoint objects.""" # int argument is deprecated since 3.10 - def __getitem__(self, name: int | str) -> EntryPoint: - """Get the EntryPoint in self matching name.""" - ... + def __getitem__(self, name: int | str) -> EntryPoint: ... # type: ignore[override] def select( self, *, @@ -246,50 +194,14 @@ elif sys.version_info >= (3, 10): module: str = ..., attr: str = ..., extras: list[str] = ..., - ) -> EntryPoints: - """ - Select entry points from self that match the - given parameters (typically group and/or name). - """ - ... + ) -> EntryPoints: ... @property - def names(self) -> set[str]: - """Return the set of all names of all entry points.""" - ... + def names(self) -> set[str]: ... @property - def groups(self) -> set[str]: - """ - Return the set of all groups of all entry points. - - For coverage while SelectableGroups is present. - >>> EntryPoints().groups - set() - """ - ... + def groups(self) -> set[str]: ... if sys.version_info >= (3, 10) and sys.version_info < (3, 12): class Deprecated(Generic[_KT, _VT]): - """ - Compatibility add-in for mapping to indicate that - mapping behavior is deprecated. - - >>> recwarn = getfixture('recwarn') - >>> class DeprecatedDict(Deprecated, dict): pass - >>> dd = DeprecatedDict(foo='bar') - >>> dd.get('baz', None) - >>> dd['foo'] - 'bar' - >>> list(dd) - ['foo'] - >>> list(dd.keys()) - ['foo'] - >>> 'foo' in dd - True - >>> list(dd.values()) - ['bar'] - >>> len(recwarn) - 1 - """ def __getitem__(self, name: _KT) -> _VT: ... @overload def get(self, name: _KT) -> _VT | None: ... @@ -301,24 +213,14 @@ if sys.version_info >= (3, 10) and sys.version_info < (3, 12): def values(self) -> dict_values[_KT, _VT]: ... class SelectableGroups(Deprecated[str, EntryPoints], dict[str, EntryPoints]): # use as dict is deprecated since 3.10 - """ - A backward- and forward-compatible result from - entry_points that fully implements the dict interface. - """ @classmethod def load(cls, eps: Iterable[EntryPoint]) -> Self: ... @property def groups(self) -> set[str]: ... @property - def names(self) -> set[str]: - """ - for coverage: - >>> SelectableGroups().names - set() - """ - ... + def names(self) -> set[str]: ... @overload - def select(self) -> Self: ... # type: ignore[misc] + def select(self) -> Self: ... @overload def select( self, @@ -355,12 +257,35 @@ else: _distribution_parent = object class Distribution(_distribution_parent): - """A Python distribution package.""" + """ + An abstract Python distribution package. + + Custom providers may derive from this class and define + the abstract methods to provide a concrete implementation + for their environment. Some providers may opt to override + the default implementation of some properties to bypass + the file-reading mechanism. + """ @abc.abstractmethod def read_text(self, filename: str) -> str | None: """ Attempt to load metadata file given by the name. + Python distribution metadata is organized by blobs of text + typically represented as "files" in the metadata directory + (e.g. package-1.0.dist-info). These files include things + like: + + - METADATA: The distribution metadata including fields + like Name and Version and Description. + - entry_points.txt: A series of entry points as defined in + `the entry points spec `_. + - RECORD: A record of files according to + `this recording spec `_. + + A package may provide any set of files, including those + not listed here or none at all. + :param filename: The name of the file in the distribution info. :return: The text if found, otherwise None. """ @@ -368,7 +293,7 @@ class Distribution(_distribution_parent): @abc.abstractmethod def locate_file(self, path: StrPath) -> _SimplePath: """ - Given a path to a file in this distribution, return a path + Given a path to a file in this distribution, return a SimplePath to it. """ ... @@ -395,7 +320,8 @@ class Distribution(_distribution_parent): a context. :context: A ``DistributionFinder.Context`` object. - :return: Iterable of Distribution objects for all packages. + :return: Iterable of Distribution objects for packages matching + the context. """ ... @overload @@ -410,13 +336,14 @@ class Distribution(_distribution_parent): a context. :context: A ``DistributionFinder.Context`` object. - :return: Iterable of Distribution objects for all packages. + :return: Iterable of Distribution objects for packages matching + the context. """ ... @staticmethod def at(path: StrPath) -> PathDistribution: """ - Return a Distribution for the indicated metadata path + Return a Distribution for the indicated metadata path. :param path: a string or path-like object :return: a concrete Distribution instance for the path @@ -430,11 +357,22 @@ class Distribution(_distribution_parent): Return the parsed metadata for this Distribution. The returned object will have keys that name the various bits of - metadata. See PEP 566 for details. + metadata per the + `Core metadata specifications `_. + + Custom providers may provide the METADATA file or override this + property. """ ... @property - def entry_points(self) -> EntryPoints: ... + def entry_points(self) -> EntryPoints: + """ + Return EntryPoints for this distribution. + + Custom providers may provide the ``entry_points.txt`` file + or override this property. + """ + ... else: @property def metadata(self) -> Message: ... @@ -456,6 +394,10 @@ class Distribution(_distribution_parent): (i.e. RECORD for dist-info, or installed-files.txt or SOURCES.txt for egg-info) is missing. Result may be empty if the metadata exists but is empty. + + Custom providers are recommended to provide a "RECORD" file (in + ``read_text``) or override this property to allow for callers to be + able to resolve filenames provided by the package. """ ... @property @@ -467,9 +409,17 @@ class Distribution(_distribution_parent): def name(self) -> str: """Return the 'Name' metadata for the distribution package.""" ... + if sys.version_info >= (3, 13): + @property + def origin(self) -> types.SimpleNamespace: ... class DistributionFinder(MetaPathFinder): - """A MetaPathFinder capable of discovering installed distributions.""" + """ + A MetaPathFinder capable of discovering installed distributions. + + Custom providers should implement this interface in order to + supply metadata. + """ class Context: """ Keyword arguments presented by the caller to @@ -480,6 +430,17 @@ class DistributionFinder(MetaPathFinder): Each DistributionFinder may expect any parameters and should attempt to honor the canonical parameters defined below when appropriate. + + This mechanism gives a custom provider a means to + solicit additional details from the caller beyond + "name" and "path" when searching distributions. + For example, imagine a provider that exposes suites + of packages in either a "public" or "private" ``realm``. + A caller may wish to query only for distributions in + a particular realm and could call + ``distributions(realm="private")`` to signal to the + custom provider to only include distributions from that + realm. """ name: str | None def __init__(self, *, name: str | None = ..., path: list[str] = ..., **kwargs: Any) -> None: ... @@ -537,6 +498,21 @@ class PathDistribution(Distribution): """ Attempt to load metadata file given by the name. + Python distribution metadata is organized by blobs of text + typically represented as "files" in the metadata directory + (e.g. package-1.0.dist-info). These files include things + like: + + - METADATA: The distribution metadata including fields + like Name and Version and Description. + - entry_points.txt: A series of entry points as defined in + `the entry points spec `_. + - RECORD: A record of files according to + `this recording spec `_. + + A package may provide any set of files, including those + not listed here or none at all. + :param filename: The name of the file in the distribution info. :return: The text if found, otherwise None. """ @@ -600,47 +576,11 @@ if sys.version_info >= (3, 12): elif sys.version_info >= (3, 10): @overload - def entry_points() -> SelectableGroups: - """ - Return EntryPoint objects for all installed packages. - - Pass selection parameters (group or name) to filter the - result to entry points matching those properties (see - EntryPoints.select()). - - For compatibility, returns ``SelectableGroups`` object unless - selection parameters are supplied. In the future, this function - will return ``EntryPoints`` instead of ``SelectableGroups`` - even when no selection parameters are supplied. - - For maximum future compatibility, pass selection parameters - or invoke ``.select`` with parameters on the result. - - :return: EntryPoints or SelectableGroups for all installed packages. - """ - ... + def entry_points() -> SelectableGroups: ... @overload def entry_points( *, name: str = ..., value: str = ..., group: str = ..., module: str = ..., attr: str = ..., extras: list[str] = ... - ) -> EntryPoints: - """ - Return EntryPoint objects for all installed packages. - - Pass selection parameters (group or name) to filter the - result to entry points matching those properties (see - EntryPoints.select()). - - For compatibility, returns ``SelectableGroups`` object unless - selection parameters are supplied. In the future, this function - will return ``EntryPoints`` instead of ``SelectableGroups`` - even when no selection parameters are supplied. - - For maximum future compatibility, pass selection parameters - or invoke ``.select`` with parameters on the result. - - :return: EntryPoints or SelectableGroups for all installed packages. - """ - ... + ) -> EntryPoints: ... else: def entry_points() -> dict[str, list[EntryPoint]]: ... @@ -666,7 +606,7 @@ def requires(distribution_name: str) -> list[str] | None: """ Return a list of requirements for the named package. - :return: An iterator of requirements, suitable for + :return: An iterable of requirements, suitable for packaging.requirement.Requirement. """ ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/importlib/metadata/_meta.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/importlib/metadata/_meta.pyi index f8c57c9..be28ba1 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/importlib/metadata/_meta.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/importlib/metadata/_meta.pyi @@ -1,9 +1,12 @@ import sys +from _typeshed import StrPath from collections.abc import Iterator -from typing import Any, Protocol, TypeVar, overload +from os import PathLike +from typing import Any, Protocol, overload +from typing_extensions import TypeVar _T = TypeVar("_T") -_T_co = TypeVar("_T_co", covariant=True) +_T_co = TypeVar("_T_co", covariant=True, default=Any) class PackageMetadata(Protocol): def __len__(self) -> int: ... @@ -32,9 +35,20 @@ class PackageMetadata(Protocol): """Helper for @overload to raise when called.""" ... -if sys.version_info >= (3, 12): +if sys.version_info >= (3, 13): + class SimplePath(Protocol): + """A minimal subset of pathlib.Path required by Distribution.""" + def joinpath(self, other: StrPath, /) -> SimplePath: ... + def __truediv__(self, other: StrPath, /) -> SimplePath: ... + # Incorrect at runtime + @property + def parent(self) -> PathLike[str]: ... + def read_text(self, encoding: str | None = None) -> str: ... + def read_bytes(self) -> bytes: ... + def exists(self) -> bool: ... + +elif sys.version_info >= (3, 12): class SimplePath(Protocol[_T_co]): - """A minimal subset of pathlib.Path required by PathDistribution.""" # At runtime this is defined as taking `str | _T`, but that causes trouble. # See #11436. def joinpath(self, other: str, /) -> _T_co: ... @@ -46,7 +60,6 @@ if sys.version_info >= (3, 12): else: class SimplePath(Protocol): - """A minimal subset of pathlib.Path required by PathDistribution.""" # Actually takes only self at runtime, but that's clearly wrong def joinpath(self, other: Any, /) -> SimplePath: ... # Not defined as a property at runtime, but it should be diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/importlib/metadata/diagnose.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/importlib/metadata/diagnose.pyi new file mode 100644 index 0000000..565872f --- /dev/null +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/importlib/metadata/diagnose.pyi @@ -0,0 +1,2 @@ +def inspect(path: str) -> None: ... +def run() -> None: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/importlib/resources/__init__.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/importlib/resources/__init__.pyi index 153fef7..051dbb0 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/importlib/resources/__init__.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/importlib/resources/__init__.pyi @@ -9,10 +9,15 @@ from types import ModuleType from typing import Any, BinaryIO, TextIO from typing_extensions import TypeAlias +if sys.version_info >= (3, 11): + from importlib.resources._common import Package as Package +else: + Package: TypeAlias = str | ModuleType + if sys.version_info >= (3, 9): from importlib.abc import Traversable -__all__ = ["Package", "Resource", "contents", "is_resource", "open_binary", "open_text", "path", "read_binary", "read_text"] +__all__ = ["Package", "contents", "is_resource", "open_binary", "open_text", "path", "read_binary", "read_text"] if sys.version_info >= (3, 9): __all__ += ["as_file", "files"] @@ -20,75 +25,48 @@ if sys.version_info >= (3, 9): if sys.version_info >= (3, 10): __all__ += ["ResourceReader"] -Package: TypeAlias = str | ModuleType +if sys.version_info < (3, 13): + __all__ += ["Resource"] -if sys.version_info >= (3, 11): +if sys.version_info < (3, 11): + Resource: TypeAlias = str | os.PathLike[Any] +elif sys.version_info < (3, 13): Resource: TypeAlias = str + +if sys.version_info >= (3, 13): + from importlib.resources._common import Anchor as Anchor + + __all__ += ["Anchor"] + + from importlib.resources._functional import ( + contents as contents, + is_resource as is_resource, + open_binary as open_binary, + open_text as open_text, + path as path, + read_binary as read_binary, + read_text as read_text, + ) + else: - Resource: TypeAlias = str | os.PathLike[Any] + def open_binary(package: Package, resource: Resource) -> BinaryIO: ... + def open_text(package: Package, resource: Resource, encoding: str = "utf-8", errors: str = "strict") -> TextIO: ... + def read_binary(package: Package, resource: Resource) -> bytes: ... + def read_text(package: Package, resource: Resource, encoding: str = "utf-8", errors: str = "strict") -> str: ... + def path(package: Package, resource: Resource) -> AbstractContextManager[Path]: ... + def is_resource(package: Package, name: str) -> bool: ... + def contents(package: Package) -> Iterator[str]: ... -def open_binary(package: Package, resource: Resource) -> BinaryIO: - """Return a file-like object opened for binary reading of the resource.""" - ... -def open_text(package: Package, resource: Resource, encoding: str = "utf-8", errors: str = "strict") -> TextIO: - """Return a file-like object opened for text reading of the resource.""" - ... -def read_binary(package: Package, resource: Resource) -> bytes: - """Return the binary contents of the resource.""" - ... -def read_text(package: Package, resource: Resource, encoding: str = "utf-8", errors: str = "strict") -> str: - """ - Return the decoded string of the resource. - - The decoding-related arguments have the same semantics as those of - bytes.decode(). - """ - ... -def path(package: Package, resource: Resource) -> AbstractContextManager[Path]: - """ - A context manager providing a file path object to the resource. - - If the resource does not already exist on its own on the file system, - a temporary file will be created. If the file was created, the file - will be deleted upon exiting the context manager (no exception is - raised if the file was deleted prior to the context manager - exiting). - """ - ... -def is_resource(package: Package, name: str) -> bool: - """ - True if `name` is a resource inside `package`. - - Directories are *not* resources. - """ - ... -def contents(package: Package) -> Iterator[str]: - """ - Return an iterable of entries in `package`. - - Note that not all entries are resources. Specifically, directories are - not considered resources. Use `is_resource()` on each entry returned here - to check if it is a resource or not. - """ - ... +if sys.version_info >= (3, 11): + from importlib.resources._common import as_file as as_file +elif sys.version_info >= (3, 9): + def as_file(path: Traversable) -> AbstractContextManager[Path]: ... -if sys.version_info >= (3, 9): - def as_file(path: Traversable) -> AbstractContextManager[Path]: - """ - Given a Traversable object, return that object as a - path on the local file system in a context manager. - """ - ... - -if sys.version_info >= (3, 12): - def files(anchor: Package | None = ...) -> Traversable: - """Get a Traversable resource for an anchor.""" - ... +if sys.version_info >= (3, 11): + from importlib.resources._common import files as files elif sys.version_info >= (3, 9): - def files(package: Package) -> Traversable: - """Get a Traversable resource from a package""" - ... + def files(package: Package) -> Traversable: ... if sys.version_info >= (3, 10): from importlib.abc import ResourceReader as ResourceReader diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/importlib/resources/_common.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/importlib/resources/_common.pyi new file mode 100644 index 0000000..6358697 --- /dev/null +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/importlib/resources/_common.pyi @@ -0,0 +1,67 @@ +import sys + +# Even though this file is 3.11+ only, Pyright will complain in stubtest for older versions. +if sys.version_info >= (3, 11): + import types + from collections.abc import Callable + from contextlib import AbstractContextManager + from importlib.abc import ResourceReader, Traversable + from pathlib import Path + from typing import overload + from typing_extensions import TypeAlias, deprecated + + Package: TypeAlias = str | types.ModuleType + + if sys.version_info >= (3, 12): + Anchor: TypeAlias = Package + + def package_to_anchor( + func: Callable[[Anchor | None], Traversable] + ) -> Callable[[Anchor | None, Anchor | None], Traversable]: + """ + Replace 'package' parameter as 'anchor' and warn about the change. + + Other errors should fall through. + + >>> files('a', 'b') + Traceback (most recent call last): + TypeError: files() takes from 0 to 1 positional arguments but 2 were given + + Remove this compatibility in Python 3.14. + """ + ... + @overload + def files(anchor: Anchor | None = None) -> Traversable: + """Get a Traversable resource for an anchor.""" + ... + @overload + @deprecated("First parameter to files is renamed to 'anchor'") + def files(package: Anchor | None = None) -> Traversable: + """Get a Traversable resource for an anchor.""" + ... + + else: + def files(package: Package) -> Traversable: ... + + def get_resource_reader(package: types.ModuleType) -> ResourceReader | None: + """Return the package's loader if it's a ResourceReader.""" + ... + + if sys.version_info >= (3, 12): + def resolve(cand: Anchor | None) -> types.ModuleType: ... + + else: + def resolve(cand: Package) -> types.ModuleType: ... + + if sys.version_info < (3, 12): + def get_package(package: Package) -> types.ModuleType: ... + + def from_package(package: types.ModuleType) -> Traversable: + """Return a Traversable object for the given package.""" + ... + def as_file(path: Traversable) -> AbstractContextManager[Path]: + """ + Given a Traversable object, return that object as a + path on the local file system in a context manager. + """ + ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/importlib/resources/_functional.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/importlib/resources/_functional.pyi new file mode 100644 index 0000000..b53f308 --- /dev/null +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/importlib/resources/_functional.pyi @@ -0,0 +1,59 @@ +"""Simplified function-based API for importlib.resources""" + +import sys + +# Even though this file is 3.13+ only, Pyright will complain in stubtest for older versions. +if sys.version_info >= (3, 13): + from _typeshed import StrPath + from collections.abc import Iterator + from contextlib import AbstractContextManager + from importlib.resources._common import Anchor + from io import TextIOWrapper + from pathlib import Path + from typing import BinaryIO, overload + from typing_extensions import Unpack + + def open_binary(anchor: Anchor, *path_names: StrPath) -> BinaryIO: + """Open for binary reading the *resource* within *package*.""" + ... + @overload + def open_text( + anchor: Anchor, *path_names: Unpack[tuple[StrPath]], encoding: str | None = "utf-8", errors: str | None = "strict" + ) -> TextIOWrapper: + """Open for text reading the *resource* within *package*.""" + ... + @overload + def open_text(anchor: Anchor, *path_names: StrPath, encoding: str | None, errors: str | None = "strict") -> TextIOWrapper: + """Open for text reading the *resource* within *package*.""" + ... + def read_binary(anchor: Anchor, *path_names: StrPath) -> bytes: + """Read and return contents of *resource* within *package* as bytes.""" + ... + @overload + def read_text( + anchor: Anchor, *path_names: Unpack[tuple[StrPath]], encoding: str | None = "utf-8", errors: str | None = "strict" + ) -> str: + """Read and return contents of *resource* within *package* as str.""" + ... + @overload + def read_text(anchor: Anchor, *path_names: StrPath, encoding: str | None, errors: str | None = "strict") -> str: + """Read and return contents of *resource* within *package* as str.""" + ... + def path(anchor: Anchor, *path_names: StrPath) -> AbstractContextManager[Path]: + """Return the path to the *resource* as an actual file system path.""" + ... + def is_resource(anchor: Anchor, *path_names: StrPath) -> bool: + """ + Return ``True`` if there is a resource named *name* in the package, + + Otherwise returns ``False``. + """ + ... + def contents(anchor: Anchor, *path_names: StrPath) -> Iterator[str]: + """ + Return an iterable over the named resources within the package. + + The iterable returns :class:`str` resources (e.g. files). + The iterable does not recurse into subdirectories. + """ + ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/importlib/util.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/importlib/util.pyi index b1608f9..14acfd5 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/importlib/util.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/importlib/util.pyi @@ -13,39 +13,9 @@ from typing_extensions import ParamSpec _P = ParamSpec("_P") if sys.version_info < (3, 12): - def module_for_loader(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: - """ - Decorator to handle selecting the proper module for loaders. - - The decorated function is passed the module to use instead of the module - name. The module passed in to the function is either from sys.modules if - it already exists or is a new module. If the module is new, then __name__ - is set the first argument to the method, __loader__ is set to self, and - __package__ is set accordingly (if self.is_package() is defined) will be set - before it is passed to the decorated function (if self.is_package() does - not work for the module it will be set post-load). - - If an exception is raised and the decorator created the module it is - subsequently removed from sys.modules. - - The decorator assumes that the decorated function takes the module name as - the second argument. - """ - ... - def set_loader(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: - """ - Set __loader__ on the returned module. - - This function is deprecated. - """ - ... - def set_package(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: - """ - Set __package__ on the returned module. - - This function is deprecated. - """ - ... + def module_for_loader(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: ... + def set_loader(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: ... + def set_package(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: ... def resolve_name(name: str, package: str | None) -> str: """Resolve a relative module name to an absolute one.""" diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/inspect.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/inspect.pyi index 6782598..b5d7e42 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/inspect.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/inspect.pyi @@ -56,7 +56,7 @@ from types import ( TracebackType, WrapperDescriptorType, ) -from typing import Any, ClassVar, Literal, NamedTuple, Protocol, TypeVar, overload +from typing import Any, ClassVar, Final, Literal, NamedTuple, Protocol, TypeVar, overload from typing_extensions import ParamSpec, Self, TypeAlias, TypeGuard, TypeIs if sys.version_info >= (3, 11): @@ -193,34 +193,41 @@ class BlockFinder: last: int def tokeneater(self, type: int, token: str, srowcol: tuple[int, int], erowcol: tuple[int, int], line: str) -> None: ... -CO_OPTIMIZED: Literal[1] -CO_NEWLOCALS: Literal[2] -CO_VARARGS: Literal[4] -CO_VARKEYWORDS: Literal[8] -CO_NESTED: Literal[16] -CO_GENERATOR: Literal[32] -CO_NOFREE: Literal[64] -CO_COROUTINE: Literal[128] -CO_ITERABLE_COROUTINE: Literal[256] -CO_ASYNC_GENERATOR: Literal[512] -TPFLAGS_IS_ABSTRACT: Literal[1048576] +CO_OPTIMIZED: Final = 1 +CO_NEWLOCALS: Final = 2 +CO_VARARGS: Final = 4 +CO_VARKEYWORDS: Final = 8 +CO_NESTED: Final = 16 +CO_GENERATOR: Final = 32 +CO_NOFREE: Final = 64 +CO_COROUTINE: Final = 128 +CO_ITERABLE_COROUTINE: Final = 256 +CO_ASYNC_GENERATOR: Final = 512 +TPFLAGS_IS_ABSTRACT: Final = 1048576 modulesbyfile: dict[str, Any] _GetMembersPredicateTypeGuard: TypeAlias = Callable[[Any], TypeGuard[_T]] +_GetMembersPredicateTypeIs: TypeAlias = Callable[[Any], TypeIs[_T]] _GetMembersPredicate: TypeAlias = Callable[[Any], bool] -_GetMembersReturnTypeGuard: TypeAlias = list[tuple[str, _T]] -_GetMembersReturn: TypeAlias = list[tuple[str, Any]] +_GetMembersReturn: TypeAlias = list[tuple[str, _T]] @overload -def getmembers(object: object, predicate: _GetMembersPredicateTypeGuard[_T]) -> _GetMembersReturnTypeGuard[_T]: +def getmembers(object: object, predicate: _GetMembersPredicateTypeGuard[_T]) -> _GetMembersReturn[_T]: """ Return all members of an object as (name, value) pairs sorted by name. Optionally, only return members that satisfy a given predicate. """ ... @overload -def getmembers(object: object, predicate: _GetMembersPredicate | None = None) -> _GetMembersReturn: +def getmembers(object: object, predicate: _GetMembersPredicateTypeIs[_T]) -> _GetMembersReturn[_T]: + """ + Return all members of an object as (name, value) pairs sorted by name. + Optionally, only return members that satisfy a given predicate. + """ + ... +@overload +def getmembers(object: object, predicate: _GetMembersPredicate | None = None) -> _GetMembersReturn[Any]: """ Return all members of an object as (name, value) pairs sorted by name. Optionally, only return members that satisfy a given predicate. @@ -229,7 +236,7 @@ def getmembers(object: object, predicate: _GetMembersPredicate | None = None) -> if sys.version_info >= (3, 11): @overload - def getmembers_static(object: object, predicate: _GetMembersPredicateTypeGuard[_T]) -> _GetMembersReturnTypeGuard[_T]: + def getmembers_static(object: object, predicate: _GetMembersPredicateTypeGuard[_T]) -> _GetMembersReturn[_T]: """ Return all members of an object as (name, value) pairs sorted by name without triggering dynamic lookup via the descriptor protocol, @@ -244,7 +251,22 @@ if sys.version_info >= (3, 11): """ ... @overload - def getmembers_static(object: object, predicate: _GetMembersPredicate | None = None) -> _GetMembersReturn: + def getmembers_static(object: object, predicate: _GetMembersPredicateTypeIs[_T]) -> _GetMembersReturn[_T]: + """ + Return all members of an object as (name, value) pairs sorted by name + without triggering dynamic lookup via the descriptor protocol, + __getattr__ or __getattribute__. Optionally, only return members that + satisfy a given predicate. + + Note: this function may not be able to retrieve all members + that getmembers can fetch (like dynamically created attributes) + and may find members that getmembers can't (like descriptors + that raise AttributeError). It can also return descriptor objects + instead of instance members in some cases. + """ + ... + @overload + def getmembers_static(object: object, predicate: _GetMembersPredicate | None = None) -> _GetMembersReturn[Any]: """ Return all members of an object as (name, value) pairs sorted by name without triggering dynamic lookup via the descriptor protocol, @@ -504,9 +526,10 @@ def ismethoddescriptor(object: object) -> TypeIs[MethodDescriptorType]: But not if ismethod() or isclass() or isfunction() are true. This is new in Python 2.2, and, for example, is true of int.__add__. - An object passing this test has a __get__ attribute but not a __set__ - attribute, but beyond that the set of attributes varies. __name__ is - usually sensible, and __doc__ often is. + An object passing this test has a __get__ attribute, but not a + __set__ attribute or a __delete__ attribute. Beyond that, the set + of attributes varies; __name__ is usually sensible, and __doc__ + often is. Methods implemented via descriptors that also pass one of the other tests return false from the ismethoddescriptor() test, simply because @@ -747,7 +770,16 @@ class Signature: @classmethod def from_callable(cls, obj: _IntrospectableCallable, *, follow_wrapped: bool = True) -> Self: ... if sys.version_info >= (3, 13): - def format(self, *, max_width: int | None = None) -> str: ... + def format(self, *, max_width: int | None = None) -> str: + """ + Create a string representation of the Signature object. + + If *max_width* integer is passed, + signature will try to fit into the *max_width*. + If signature is longer than *max_width*, + all parameters will be on separate lines. + """ + ... def __eq__(self, other: object) -> bool: ... def __hash__(self) -> int: ... @@ -809,7 +841,6 @@ if sys.version_info >= (3, 10): # The name is the same as the enum's name in CPython class _ParameterKind(enum.IntEnum): - """An enumeration.""" POSITIONAL_ONLY = 0 POSITIONAL_OR_KEYWORD = 1 VAR_POSITIONAL = 2 @@ -820,10 +851,10 @@ class _ParameterKind(enum.IntEnum): def description(self) -> str: ... if sys.version_info >= (3, 12): - AGEN_CREATED: Literal["AGEN_CREATED"] - AGEN_RUNNING: Literal["AGEN_RUNNING"] - AGEN_SUSPENDED: Literal["AGEN_SUSPENDED"] - AGEN_CLOSED: Literal["AGEN_CLOSED"] + AGEN_CREATED: Final = "AGEN_CREATED" + AGEN_RUNNING: Final = "AGEN_RUNNING" + AGEN_SUSPENDED: Final = "AGEN_SUSPENDED" + AGEN_CLOSED: Final = "AGEN_CLOSED" def getasyncgenstate( agen: AsyncGenerator[Any, Any] @@ -982,34 +1013,12 @@ def getargs(co: CodeType) -> Arguments: if sys.version_info < (3, 11): class ArgSpec(NamedTuple): - """ArgSpec(args, varargs, keywords, defaults)""" args: list[str] varargs: str | None keywords: str | None defaults: tuple[Any, ...] - def getargspec(func: object) -> ArgSpec: - """ - Get the names and default values of a function's parameters. - - A tuple of four things is returned: (args, varargs, keywords, defaults). - 'args' is a list of the argument names, including keyword-only argument names. - 'varargs' and 'keywords' are the names of the * and ** parameters or None. - 'defaults' is an n-tuple of the default values of the last n parameters. - - This function is deprecated, as it does not support annotations or - keyword-only parameters and will raise ValueError if either is present - on the supplied callable. - - For a more structured introspection API, use inspect.signature() instead. - - Alternatively, use getfullargspec() for an API with a similar namedtuple - based interface, but full support for annotations and keyword-only - parameters. - - Deprecated since Python 3.5, use `inspect.getfullargspec()`. - """ - ... + def getargspec(func: object) -> ArgSpec: ... class FullArgSpec(NamedTuple): """FullArgSpec(args, varargs, varkw, defaults, kwonlyargs, kwonlydefaults, annotations)""" @@ -1075,20 +1084,7 @@ if sys.version_info < (3, 11): formatvalue: Callable[[Any], str] = ..., formatreturns: Callable[[Any], str] = ..., formatannotation: Callable[[Any], str] = ..., - ) -> str: - """ - Format an argument spec from the values returned by getfullargspec. - - The first seven arguments are (args, varargs, varkw, defaults, - kwonlyargs, kwonlydefaults, annotations). The other five arguments - are the corresponding optional formatting functions that are called to - turn names and values into strings. The last argument is an optional - function to format the sequence of arguments. - - Deprecated since Python 3.5: use the `signature` function and `Signature` - objects. - """ - ... + ) -> str: ... def formatargvalues( args: list[str], @@ -1209,7 +1205,6 @@ if sys.version_info >= (3, 11): else: class Traceback(NamedTuple): - """Traceback(filename, lineno, function, code_context, index)""" filename: str lineno: int function: str @@ -1217,7 +1212,6 @@ else: index: int | None # type: ignore[assignment] class FrameInfo(NamedTuple): - """FrameInfo(frame, filename, lineno, function, code_context, index)""" frame: FrameType filename: str lineno: int @@ -1287,10 +1281,10 @@ def getattr_static(obj: object, attr: str, default: Any | None = ...) -> Any: # Current State of Generators and Coroutines # -GEN_CREATED: Literal["GEN_CREATED"] -GEN_RUNNING: Literal["GEN_RUNNING"] -GEN_SUSPENDED: Literal["GEN_SUSPENDED"] -GEN_CLOSED: Literal["GEN_CLOSED"] +GEN_CREATED: Final = "GEN_CREATED" +GEN_RUNNING: Final = "GEN_RUNNING" +GEN_SUSPENDED: Final = "GEN_SUSPENDED" +GEN_CLOSED: Final = "GEN_CLOSED" def getgeneratorstate( generator: Generator[Any, Any, Any] @@ -1306,10 +1300,10 @@ def getgeneratorstate( """ ... -CORO_CREATED: Literal["CORO_CREATED"] -CORO_RUNNING: Literal["CORO_RUNNING"] -CORO_SUSPENDED: Literal["CORO_SUSPENDED"] -CORO_CLOSED: Literal["CORO_CLOSED"] +CORO_CREATED: Final = "CORO_CREATED" +CORO_RUNNING: Final = "CORO_RUNNING" +CORO_SUSPENDED: Final = "CORO_SUSPENDED" +CORO_CLOSED: Final = "CORO_CLOSED" def getcoroutinestate( coroutine: Coroutine[Any, Any, Any] diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/io.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/io.pyi index 6135a9f..b6516c2 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/io.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/io.pyi @@ -38,11 +38,11 @@ import abc import builtins import codecs import sys -from _typeshed import FileDescriptorOrPath, ReadableBuffer, WriteableBuffer +from _typeshed import FileDescriptorOrPath, MaybeNone, ReadableBuffer, WriteableBuffer from collections.abc import Callable, Iterable, Iterator from os import _Opener from types import TracebackType -from typing import IO, Any, BinaryIO, Literal, Protocol, TextIO, TypeVar, overload, type_check_only +from typing import IO, Any, BinaryIO, Final, Generic, Literal, Protocol, TextIO, TypeVar, overload, type_check_only from typing_extensions import Self __all__ = [ @@ -72,11 +72,11 @@ if sys.version_info >= (3, 11): _T = TypeVar("_T") -DEFAULT_BUFFER_SIZE: Literal[8192] +DEFAULT_BUFFER_SIZE: Final = 8192 -SEEK_SET: Literal[0] -SEEK_CUR: Literal[1] -SEEK_END: Literal[2] +SEEK_SET: Final = 0 +SEEK_CUR: Final = 1 +SEEK_END: Final = 2 open = builtins.open @@ -245,7 +245,9 @@ class IOBase(metaclass=abc.ABCMeta): terminator(s) recognized. """ ... - def __del__(self) -> None: ... + def __del__(self) -> None: + """Called when the instance is about to be destroyed.""" + ... @property def closed(self) -> bool: ... def _checkClosed(self) -> None: ... # undocumented @@ -255,9 +257,11 @@ class RawIOBase(IOBase): def readall(self) -> bytes: """Read until EOF, using multiple read() call.""" ... - def readinto(self, buffer: WriteableBuffer, /) -> int | None: ... - def write(self, b: ReadableBuffer, /) -> int | None: ... - def read(self, size: int = -1, /) -> bytes | None: ... + # The following methods can return None if the file is in non-blocking mode + # and no data is available. + def readinto(self, buffer: WriteableBuffer, /) -> int | MaybeNone: ... + def write(self, b: ReadableBuffer, /) -> int | MaybeNone: ... + def read(self, size: int = -1, /) -> bytes | MaybeNone: ... class BufferedIOBase(IOBase): """ @@ -275,7 +279,6 @@ class BufferedIOBase(IOBase): A typical implementation should not inherit from a RawIOBase implementation, but wrap one. """ - raw: RawIOBase # This is not part of the BufferedIOBase API and may not exist on some implementations. def detach(self) -> RawIOBase: """ Disconnect this buffer from its underlying raw stream and return it. @@ -354,24 +357,6 @@ class FileIO(RawIOBase, BinaryIO): # type: ignore[misc] # incompatible definit def closefd(self) -> bool: """True if the file descriptor will be closed by close().""" ... - def write(self, b: ReadableBuffer, /) -> int: - """ - Write buffer b to file, return number of bytes written. - - Only makes one system call, so not all of the data may be written. - The number of bytes actually written is returned. In non-blocking mode, - returns None if the write would block. - """ - ... - def read(self, size: int = -1, /) -> bytes: - """ - Read at most size bytes, returned as bytes. - - Only makes one system call, so less data may be returned than requested. - In non-blocking mode, returns None if no data is available. - Return an empty bytes object at EOF. - """ - ... def __enter__(self) -> Self: ... class BytesIO(BufferedIOBase, BinaryIO): # type: ignore[misc] # incompatible definitions of methods in the base classes @@ -399,6 +384,7 @@ class BytesIO(BufferedIOBase, BinaryIO): # type: ignore[misc] # incompatible d class BufferedReader(BufferedIOBase, BinaryIO): # type: ignore[misc] # incompatible definitions of methods in the base classes """Create a new buffered reader using the given readable raw IO object.""" + raw: RawIOBase def __enter__(self) -> Self: ... def __init__(self, raw: RawIOBase, buffer_size: int = ...) -> None: ... def peek(self, size: int = 0, /) -> bytes: ... @@ -411,6 +397,7 @@ class BufferedWriter(BufferedIOBase, BinaryIO): # type: ignore[misc] # incompa stream. If the buffer_size is not given, it defaults to DEFAULT_BUFFER_SIZE. """ + raw: RawIOBase def __enter__(self) -> Self: ... def __init__(self, raw: RawIOBase, buffer_size: int = ...) -> None: ... def write(self, buffer: ReadableBuffer, /) -> int: ... @@ -526,14 +513,14 @@ class _WrappedBuffer(Protocol): def writable(self) -> bool: ... def truncate(self, size: int, /) -> int: ... def fileno(self) -> int: ... - def isatty(self) -> int: ... + def isatty(self) -> bool: ... # Optional: Only needs to be present if seekable() returns True. # def seek(self, offset: Literal[0], whence: Literal[2]) -> int: ... # def tell(self) -> int: ... -# TODO: Should be generic over the buffer type, but needs to wait for -# TypeVar defaults. -class TextIOWrapper(TextIOBase, TextIO): # type: ignore[misc] # incompatible definitions of write in the base classes +_BufferT_co = TypeVar("_BufferT_co", bound=_WrappedBuffer, default=_WrappedBuffer, covariant=True) + +class TextIOWrapper(TextIOBase, TextIO, Generic[_BufferT_co]): # type: ignore[misc] # incompatible definitions of write in the base classes r""" Character and line based layer over a BufferedIOBase object, buffer. @@ -566,7 +553,7 @@ class TextIOWrapper(TextIOBase, TextIO): # type: ignore[misc] # incompatible d """ def __init__( self, - buffer: _WrappedBuffer, + buffer: _BufferT_co, encoding: str | None = None, errors: str | None = None, newline: str | None = None, @@ -575,7 +562,7 @@ class TextIOWrapper(TextIOBase, TextIO): # type: ignore[misc] # incompatible d ) -> None: ... # Equals the "buffer" argument passed in to the constructor. @property - def buffer(self) -> BinaryIO: ... + def buffer(self) -> _BufferT_co: ... # type: ignore[override] @property def closed(self) -> bool: ... @property @@ -624,7 +611,7 @@ class TextIOWrapper(TextIOBase, TextIO): # type: ignore[misc] # incompatible d """ ... # Equals the "buffer" argument passed in to the constructor. - def detach(self) -> BinaryIO: ... + def detach(self) -> _BufferT_co: ... # type: ignore[override] # TextIOWrapper's version of seek only supports a limited subset of # operations. def seek(self, cookie: int, whence: int = 0, /) -> int: diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/ipaddress.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/ipaddress.pyi index f945a18..df87a4a 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/ipaddress.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/ipaddress.pyi @@ -7,12 +7,12 @@ and networks. import sys from collections.abc import Iterable, Iterator -from typing import Any, Generic, Literal, SupportsInt, TypeVar, overload +from typing import Any, Final, Generic, Literal, SupportsInt, TypeVar, overload from typing_extensions import Self, TypeAlias # Undocumented length constants -IPV4LENGTH: Literal[32] -IPV6LENGTH: Literal[128] +IPV4LENGTH: Final = 32 +IPV6LENGTH: Final = 128 _A = TypeVar("_A", IPv4Address, IPv6Address) _N = TypeVar("_N", IPv4Network, IPv6Network) @@ -145,15 +145,9 @@ class _BaseAddress(_IPAddressBase, SupportsInt): """Return a <= b. Computed by @total_ordering from (a < b) or (a == b).""" ... else: - def __ge__(self, other: Self, NotImplemented: Any = ...) -> bool: - """Return a >= b. Computed by @total_ordering from (not a < b).""" - ... - def __gt__(self, other: Self, NotImplemented: Any = ...) -> bool: - """Return a > b. Computed by @total_ordering from (not a < b) and (a != b).""" - ... - def __le__(self, other: Self, NotImplemented: Any = ...) -> bool: - """Return a <= b. Computed by @total_ordering from (a < b) or (a == b).""" - ... + def __ge__(self, other: Self, NotImplemented: Any = ...) -> bool: ... + def __gt__(self, other: Self, NotImplemented: Any = ...) -> bool: ... + def __le__(self, other: Self, NotImplemented: Any = ...) -> bool: ... @property def is_global(self) -> bool: ... @@ -201,15 +195,9 @@ class _BaseNetwork(_IPAddressBase, Generic[_A]): """Return a <= b. Computed by @total_ordering from (a < b) or (a == b).""" ... else: - def __ge__(self, other: Self, NotImplemented: Any = ...) -> bool: - """Return a >= b. Computed by @total_ordering from (not a < b).""" - ... - def __gt__(self, other: Self, NotImplemented: Any = ...) -> bool: - """Return a > b. Computed by @total_ordering from (not a < b) and (a != b).""" - ... - def __le__(self, other: Self, NotImplemented: Any = ...) -> bool: - """Return a <= b. Computed by @total_ordering from (a < b) or (a == b).""" - ... + def __ge__(self, other: Self, NotImplemented: Any = ...) -> bool: ... + def __gt__(self, other: Self, NotImplemented: Any = ...) -> bool: ... + def __le__(self, other: Self, NotImplemented: Any = ...) -> bool: ... def address_exclude(self, other: Self) -> Iterator[Self]: """ @@ -464,7 +452,14 @@ class IPv4Address(_BaseV4, _BaseAddress): """Represent and manipulate single IPv4 Addresses.""" if sys.version_info >= (3, 13): @property - def ipv6_mapped(self) -> IPv6Address: ... + def ipv6_mapped(self) -> IPv6Address: + """ + Return the IPv4-mapped IPv6 address. + + Returns: + The IPv4-mapped IPv6 address per RFC 4291. + """ + ... class IPv4Network(_BaseV4, _BaseNetwork[IPv4Address]): """ @@ -710,7 +705,7 @@ def collapse_addresses(addresses: Iterable[_N]) -> Iterator[_N]: [IPv4Network('192.0.2.0/24')] Args: - addresses: An iterator of IPv4Network or IPv6Network objects. + addresses: An iterable of IPv4Network or IPv6Network objects. Returns: An iterator of the collapsed IPv(4|6)Network objects. diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/itertools.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/itertools.pyi index e54bceb..cb5039b 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/itertools.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/itertools.pyi @@ -129,8 +129,6 @@ class accumulate(Iterator[_T]): class chain(Iterator[_T]): """ - chain(*iterables) --> chain object - Return a chain object whose .__next__() method returns elements from the first iterable until it is exhausted, then elements from the next iterable, until all of the iterables are exhausted. @@ -266,8 +264,6 @@ def tee(iterable: Iterable[_T], n: int = 2, /) -> tuple[Iterator[_T], ...]: class zip_longest(Iterator[_T_co]): """ - zip_longest(iter1 [,iter2 [...]], [fillvalue=None]) --> zip_longest object - Return a zip_longest object whose .__next__() method returns a tuple where the i-th element comes from the i-th iterable argument. The .__next__() method continues until the longest iterable in the argument sequence @@ -358,8 +354,6 @@ class zip_longest(Iterator[_T_co]): class product(Iterator[_T_co]): """ - product(*iterables, repeat=1) --> product object - Cartesian product of input iterables. Equivalent to nested for-loops. For example, product(A, B) returns the same as: ((x,y) for x in A for y in B). @@ -562,8 +556,15 @@ if sys.version_info >= (3, 12): ('A', 'B', 'C') ('D', 'E', 'F') ('G',) + + If "strict" is True, raises a ValueError if the final batch is shorter + than n. """ - def __new__(cls, iterable: Iterable[_T_co], n: int) -> Self: ... + if sys.version_info >= (3, 13): + def __new__(cls, iterable: Iterable[_T_co], n: int, *, strict: bool = False) -> Self: ... + else: + def __new__(cls, iterable: Iterable[_T_co], n: int) -> Self: ... + def __iter__(self) -> Self: """Implement iter(self).""" ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/json/encoder.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/json/encoder.pyi index 41a9386..0548409 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/json/encoder.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/json/encoder.pyi @@ -2,13 +2,13 @@ from collections.abc import Callable, Iterator from re import Pattern -from typing import Any +from typing import Any, Final -ESCAPE: Pattern[str] -ESCAPE_ASCII: Pattern[str] -HAS_UTF8: Pattern[bytes] -ESCAPE_DCT: dict[str, str] -INFINITY: float +ESCAPE: Final[Pattern[str]] +ESCAPE_ASCII: Final[Pattern[str]] +HAS_UTF8: Final[Pattern[bytes]] +ESCAPE_DCT: Final[dict[str, str]] +INFINITY: Final[float] def py_encode_basestring(s: str) -> str: """ diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/btm_matcher.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/btm_matcher.pyi index 5fd03ea..4c87b66 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/btm_matcher.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/btm_matcher.pyi @@ -1,12 +1,3 @@ -""" -A bottom-up tree matching algorithm implementation meant to speed -up 2to3's matching process. After the tree patterns are reduced to -their rarest linear path, a linear Aho-Corasick automaton is -created. The linear automaton traverses the linear paths from the -leaves to the root of the AST and returns a set of nodes for further -matching. This reduces significantly the number of candidate nodes. -""" - from _typeshed import Incomplete, SupportsGetItem from collections import defaultdict from collections.abc import Iterable @@ -15,7 +6,6 @@ from .fixer_base import BaseFix from .pytree import Leaf, Node class BMNode: - """Class for a node of the Aho-Corasick automaton used in matching""" count: Incomplete transition_table: Incomplete fixers: Incomplete @@ -24,47 +14,15 @@ class BMNode: def __init__(self) -> None: ... class BottomMatcher: - """ - The main matcher class. After instantiating the patterns should - be added using the add_fixer method - """ match: Incomplete root: Incomplete nodes: Incomplete fixers: Incomplete logger: Incomplete def __init__(self) -> None: ... - def add_fixer(self, fixer: BaseFix) -> None: - """ - Reduces a fixer's pattern tree to a linear path and adds it - to the matcher(a common Aho-Corasick automaton). The fixer is - appended on the matching states and called when they are - reached - """ - ... - def add(self, pattern: SupportsGetItem[int | slice, Incomplete] | None, start: BMNode) -> list[BMNode]: - """Recursively adds a linear pattern to the AC automaton""" - ... - def run(self, leaves: Iterable[Leaf]) -> defaultdict[BaseFix, list[Node | Leaf]]: - """ - The main interface with the bottom matcher. The tree is - traversed from the bottom using the constructed - automaton. Nodes are only checked once as the tree is - retraversed. When the automaton fails, we give it one more - shot(in case the above tree matches as a whole with the - rejected leaf), then we break for the next leaf. There is the - special case of multiple arguments(see code comments) where we - recheck the nodes - - Args: - The leaves of the AST tree to be matched - - Returns: - A dictionary of node matches with fixers as the keys - """ - ... - def print_ac(self) -> None: - """Prints a graphviz diagram of the BM automaton(for debugging)""" - ... + def add_fixer(self, fixer: BaseFix) -> None: ... + def add(self, pattern: SupportsGetItem[int | slice, Incomplete] | None, start: BMNode) -> list[BMNode]: ... + def run(self, leaves: Iterable[Leaf]) -> defaultdict[BaseFix, list[Node | Leaf]]: ... + def print_ac(self) -> None: ... def type_repr(type_num: int) -> str | int: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixer_base.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixer_base.pyi index 6605ef8..06813c9 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixer_base.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixer_base.pyi @@ -1,5 +1,3 @@ -"""Base class for fixers (optional, but recommended).""" - from _typeshed import Incomplete, StrPath from abc import ABCMeta, abstractmethod from collections.abc import MutableMapping @@ -10,14 +8,6 @@ from .pytree import Base, Leaf, Node _N = TypeVar("_N", bound=Base) class BaseFix: - """ - Optional base class for fixers. - - The subclass name must be FixFooBar where FooBar is the result of - removing underscores and capitalizing the words of the fix name. - For example, the class name for a fixer named 'has_key' should be - FixHasKey. - """ PATTERN: ClassVar[str | None] pattern: Incomplete | None pattern_tree: Incomplete | None @@ -32,107 +22,21 @@ class BaseFix: BM_compatible: ClassVar[bool] syms: Incomplete log: Incomplete - def __init__(self, options: MutableMapping[str, Incomplete], log: list[str]) -> None: - """ - Initializer. Subclass may override. - - Args: - options: a dict containing the options passed to RefactoringTool - that could be used to customize the fixer through the command line. - log: a list to append warnings and other messages to. - """ - ... - def compile_pattern(self) -> None: - """ - Compiles self.PATTERN into self.pattern. - - Subclass may override if it doesn't want to use - self.{pattern,PATTERN} in .match(). - """ - ... - def set_filename(self, filename: StrPath) -> None: - """ - Set the filename. - - The main refactoring tool should call this. - """ - ... - def match(self, node: _N) -> Literal[False] | dict[str, _N]: - """ - Returns match for a given parse tree node. - - Should return a true or false object (not necessarily a bool). - It may return a non-empty dict of matching sub-nodes as - returned by a matching pattern. - - Subclass may override. - """ - ... + def __init__(self, options: MutableMapping[str, Incomplete], log: list[str]) -> None: ... + def compile_pattern(self) -> None: ... + def set_filename(self, filename: StrPath) -> None: ... + def match(self, node: _N) -> Literal[False] | dict[str, _N]: ... @abstractmethod - def transform(self, node: Base, results: dict[str, Base]) -> Node | Leaf | None: - """ - Returns the transformation for a given parse tree node. - - Args: - node: the root of the parse tree that matched the fixer. - results: a dict mapping symbolic names to part of the match. - - Returns: - None, or a node that is a modified copy of the - argument node. The node argument may also be modified in-place to - effect the same change. - - Subclass *must* override. - """ - ... - def new_name(self, template: str = "xxx_todo_changeme") -> str: - """ - Return a string suitable for use as an identifier - - The new name is guaranteed not to conflict with other identifiers. - """ - ... + def transform(self, node: Base, results: dict[str, Base]) -> Node | Leaf | None: ... + def new_name(self, template: str = "xxx_todo_changeme") -> str: ... first_log: bool def log_message(self, message: str) -> None: ... - def cannot_convert(self, node: Base, reason: str | None = None) -> None: - """ - Warn the user that a given chunk of code is not valid Python 3, - but that it cannot be converted automatically. - - First argument is the top-level node for the code in question. - Optional second argument is why it can't be converted. - """ - ... - def warning(self, node: Base, reason: str) -> None: - """ - Used for warning the user about possible uncertainty in the - translation. - - First argument is the top-level node for the code in question. - Optional second argument is why it can't be converted. - """ - ... - def start_tree(self, tree: Node, filename: StrPath) -> None: - """ - Some fixers need to maintain tree-wide state. - This method is called once, at the start of tree fix-up. - - tree - the root node of the tree to be processed. - filename - the name of the file the tree came from. - """ - ... - def finish_tree(self, tree: Node, filename: StrPath) -> None: - """ - Some fixers need to maintain tree-wide state. - This method is called once, at the conclusion of tree fix-up. - - tree - the root node of the tree to be processed. - filename - the name of the file the tree came from. - """ - ... + def cannot_convert(self, node: Base, reason: str | None = None) -> None: ... + def warning(self, node: Base, reason: str) -> None: ... + def start_tree(self, tree: Node, filename: StrPath) -> None: ... + def finish_tree(self, tree: Node, filename: StrPath) -> None: ... class ConditionalFix(BaseFix, metaclass=ABCMeta): - """Base class for fixers which not execute if an import is found. """ skip_on: ClassVar[str | None] def start_tree(self, tree: Node, filename: StrPath, /) -> None: ... def should_skip(self, node: Base) -> bool: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_apply.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_apply.pyi index 7645661..e53e3dd 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_apply.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_apply.pyi @@ -1,9 +1,3 @@ -""" -Fixer for apply(). - -This converts apply(func, v, k) into (func)(*v, **k). -""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_asserts.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_asserts.pyi index 0edbcfb..1bf7db2 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_asserts.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_asserts.pyi @@ -1,10 +1,8 @@ -"""Fixer that replaces deprecated unittest method names.""" - -from typing import ClassVar, Literal +from typing import ClassVar, Final, Literal from ..fixer_base import BaseFix -NAMES: dict[str, str] +NAMES: Final[dict[str, str]] class FixAsserts(BaseFix): BM_compatible: ClassVar[Literal[False]] diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_basestring.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_basestring.pyi index 49c66c8..8ed5cca 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_basestring.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_basestring.pyi @@ -1,5 +1,3 @@ -"""Fixer for basestring -> str.""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_buffer.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_buffer.pyi index bc798b5..1efca62 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_buffer.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_buffer.pyi @@ -1,5 +1,3 @@ -"""Fixer that changes buffer(...) into memoryview(...).""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_dict.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_dict.pyi index a937bc0..08c54c3 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_dict.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_dict.pyi @@ -1,30 +1,3 @@ -""" -Fixer for dict methods. - -d.keys() -> list(d.keys()) -d.items() -> list(d.items()) -d.values() -> list(d.values()) - -d.iterkeys() -> iter(d.keys()) -d.iteritems() -> iter(d.items()) -d.itervalues() -> iter(d.values()) - -d.viewkeys() -> d.keys() -d.viewitems() -> d.items() -d.viewvalues() -> d.values() - -Except in certain very specific contexts: the iter() can be dropped -when the context is list(), sorted(), iter() or for...in; the list() -can be dropped when the context is list() or sorted() (but not iter() -or for...in!). Special contexts that apply to both: list(), sorted(), tuple() -set(), any(), all(), sum(). - -Note: iter(d.keys()) could be written as iter(d) but since the -original d.iterkeys() was also redundant we don't fix this. And there -are (rare) contexts where it makes a difference (e.g. when passing it -as an argument to a function that introspects the argument). -""" - from _typeshed import Incomplete from typing import ClassVar, Literal diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_except.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_except.pyi index 7a2514d..30930a2 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_except.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_except.pyi @@ -1,26 +1,3 @@ -""" -Fixer for except statements with named exceptions. - -The following cases will be converted: - -- "except E, T:" where T is a name: - - except E as T: - -- "except E, T:" where T is not a name, tuple or list: - - except E as t: - T = t - - This is done because the target of an "except" clause must be a - name. - -- "except E, T:" where T is a tuple or list literal: - - except E as t: - T = t.args -""" - from collections.abc import Generator, Iterable from typing import ClassVar, Literal, TypeVar diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_exec.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_exec.pyi index d557119..71e2a82 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_exec.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_exec.pyi @@ -1,12 +1,3 @@ -""" -Fixer for exec. - -This converts usages of the exec statement into calls to a built-in -exec() function. - -exec code in ns1, ns2 -> exec(code, ns1, ns2) -""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_execfile.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_execfile.pyi index b6a6232..8122a63 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_execfile.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_execfile.pyi @@ -1,10 +1,3 @@ -""" -Fixer for execfile. - -This converts usages of the execfile function into calls to the built-in -exec() function. -""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_exitfunc.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_exitfunc.pyi index 4558eac..7fc910c 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_exitfunc.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_exitfunc.pyi @@ -1,5 +1,3 @@ -"""Convert use of sys.exitfunc to use the atexit module.""" - from _typeshed import Incomplete, StrPath from lib2to3 import fixer_base from typing import ClassVar, Literal diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_filter.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_filter.pyi index 88a45fc..638889b 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_filter.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_filter.pyi @@ -1,16 +1,3 @@ -""" -Fixer that changes filter(F, X) into list(filter(F, X)). - -We avoid the transformation if the filter() call is directly contained -in iter(<>), list(<>), tuple(<>), sorted(<>), ...join(<>), or -for V in <>:. - -NOTE: This is still not correct if the original code was depending on -filter(F, X) to return a string if X is a string and a tuple if X is a -tuple. That would require type inference, which we don't do. Let -Python 2.6 figure it out. -""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_funcattrs.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_funcattrs.pyi index f9d236b..60487bb 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_funcattrs.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_funcattrs.pyi @@ -1,5 +1,3 @@ -"""Fix function attribute names (f.func_x -> f.__x__).""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_future.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_future.pyi index 18cba07..12ed93f 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_future.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_future.pyi @@ -1,9 +1,3 @@ -""" -Remove __future__ imports - -from __future__ import foo is replaced with an empty line. -""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_getcwdu.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_getcwdu.pyi index ed17bb6..aa3ccf5 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_getcwdu.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_getcwdu.pyi @@ -1,5 +1,3 @@ -"""Fixer that changes os.getcwdu() to os.getcwd().""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_has_key.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_has_key.pyi index 0da3fe4..f6f5a07 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_has_key.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_has_key.pyi @@ -1,32 +1,3 @@ -""" -Fixer for has_key(). - -Calls to .has_key() methods are expressed in terms of the 'in' -operator: - - d.has_key(k) -> k in d - -CAVEATS: -1) While the primary target of this fixer is dict.has_key(), the - fixer will change any has_key() method call, regardless of its - class. - -2) Cases like this will not be converted: - - m = d.has_key - if m(k): - ... - - Only *calls* to has_key() are converted. While it is possible to - convert the above to something like - - m = d.__contains__ - if m(k): - ... - - this is currently not done. -""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_idioms.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_idioms.pyi index 66fd490..6b2723d 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_idioms.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_idioms.pyi @@ -1,38 +1,9 @@ -""" -Adjust some old Python 2 idioms to their modern counterparts. - -* Change some type comparisons to isinstance() calls: - type(x) == T -> isinstance(x, T) - type(x) is T -> isinstance(x, T) - type(x) != T -> not isinstance(x, T) - type(x) is not T -> not isinstance(x, T) - -* Change "while 1:" into "while True:". - -* Change both - - v = list(EXPR) - v.sort() - foo(v) - -and the more general - - v = EXPR - v.sort() - foo(v) - -into - - v = sorted(EXPR) - foo(v) -""" - -from typing import ClassVar, Literal +from typing import ClassVar, Final, Literal from .. import fixer_base -CMP: str -TYPE: str +CMP: Final[str] +TYPE: Final[str] class FixIdioms(fixer_base.BaseFix): BM_compatible: ClassVar[Literal[False]] diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_import.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_import.pyi index 3eaa3c9..bf4b2d0 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_import.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_import.pyi @@ -1,16 +1,3 @@ -""" -Fixer for import statements. -If spam is being imported from the local directory, this import: - from spam import eggs -Becomes: - from .spam import eggs - -And this import: - import spam -Becomes: - from . import spam -""" - from _typeshed import StrPath from collections.abc import Generator from typing import ClassVar, Literal @@ -18,9 +5,7 @@ from typing import ClassVar, Literal from .. import fixer_base from ..pytree import Node -def traverse_imports(names) -> Generator[str, None, None]: - """Walks over all the names imported in a dotted_as_names node.""" - ... +def traverse_imports(names) -> Generator[str, None, None]: ... class FixImport(fixer_base.BaseFix): BM_compatible: ClassVar[Literal[True]] diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_imports.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_imports.pyi index 14081e7..c747af5 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_imports.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_imports.pyi @@ -1,13 +1,11 @@ -"""Fix incompatible imports and module references.""" - from _typeshed import StrPath from collections.abc import Generator -from typing import ClassVar, Literal +from typing import ClassVar, Final, Literal from .. import fixer_base from ..pytree import Node -MAPPING: dict[str, str] +MAPPING: Final[dict[str, str]] def alternates(members): ... def build_pattern(mapping=...) -> Generator[str, None, None]: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_imports2.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_imports2.pyi index 736d75f..618ecd0 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_imports2.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_imports2.pyi @@ -1,11 +1,8 @@ -""" -Fix incompatible imports and module references that must be fixed after -fix_imports. -""" +from typing import Final from . import fix_imports -MAPPING: dict[str, str] +MAPPING: Final[dict[str, str]] class FixImports2(fix_imports.FixImports): mapping = MAPPING diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_input.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_input.pyi index 1105e93..fc12795 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_input.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_input.pyi @@ -1,5 +1,3 @@ -"""Fixer that changes input(...) into eval(input(...)).""" - from _typeshed import Incomplete from typing import ClassVar, Literal diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_intern.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_intern.pyi index fb714a7..804b7b2 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_intern.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_intern.pyi @@ -1,9 +1,3 @@ -""" -Fixer for intern(). - -intern(s) -> sys.intern(s) -""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_isinstance.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_isinstance.pyi index ed85a7e..31eefd6 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_isinstance.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_isinstance.pyi @@ -1,12 +1,3 @@ -""" -Fixer that cleans up a tuple argument to isinstance after the tokens -in it were fixed. This is mainly used to remove double occurrences of -tokens as a leftover of the long -> int / unicode -> str conversion. - -eg. isinstance(x, (int, long)) -> isinstance(x, (int, int)) - -> isinstance(x, int) -""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_itertools.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_itertools.pyi index 6be3ccf..229d86e 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_itertools.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_itertools.pyi @@ -1,13 +1,3 @@ -""" -Fixer for itertools.(imap|ifilter|izip) --> (map|filter|zip) and -itertools.ifilterfalse --> itertools.filterfalse (bugs 2360-2363) - -imports from itertools are fixed in fix_itertools_import.py - -If itertools is imported as something else (ie: import itertools as it; -it.izip(spam, eggs)) method calls will not get fixed. -""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_itertools_imports.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_itertools_imports.pyi index 7149bab..39a4da5 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_itertools_imports.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_itertools_imports.pyi @@ -1,5 +1,3 @@ -"""Fixer for imports of itertools.(imap|ifilter|izip|ifilterfalse) """ - from lib2to3 import fixer_base from typing import ClassVar, Literal diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_long.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_long.pyi index 5b4e000..9ccf271 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_long.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_long.pyi @@ -1,5 +1,3 @@ -"""Fixer that turns 'long' into 'int' everywhere.""" - from lib2to3 import fixer_base from typing import ClassVar, Literal diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_map.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_map.pyi index 7543f6d..6e60282 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_map.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_map.pyi @@ -1,22 +1,3 @@ -""" -Fixer that changes map(F, ...) into list(map(F, ...)) unless there -exists a 'from future_builtins import map' statement in the top-level -namespace. - -As a special case, map(None, X) is changed into list(X). (This is -necessary because the semantics are changed in this case -- the new -map(None, X) is equivalent to [(x,) for x in X].) - -We avoid the transformation (except for the special case mentioned -above) if the map() call is directly contained in iter(<>), list(<>), -tuple(<>), sorted(<>), ...join(<>), or for V in <>:. - -NOTE: This is still not correct if the original code was depending on -map(F, X, Y, ...) to go on until the longest argument is exhausted, -substituting None for missing values -- like zip(), it now stops as -soon as the shortest argument is exhausted. -""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_metaclass.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_metaclass.pyi index 3b8e88a..1b1ec82 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_metaclass.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_metaclass.pyi @@ -1,56 +1,15 @@ -""" -Fixer for __metaclass__ = X -> (metaclass=X) methods. - -The various forms of classef (inherits nothing, inherits once, inherits -many) don't parse the same in the CST so we look at ALL classes for -a __metaclass__ and if we find one normalize the inherits to all be -an arglist. - -For one-liner classes ('class X: pass') there is no indent/dedent so -we normalize those into having a suite. - -Moving the __metaclass__ into the classdef can also cause the class -body to be empty so there is some special casing for that as well. - -This fixer also tries very hard to keep original indenting and spacing -in all those corner cases. -""" - from collections.abc import Generator from typing import ClassVar, Literal from .. import fixer_base from ..pytree import Base -def has_metaclass(parent): - """ - we have to check the cls_node without changing it. - There are two possibilities: - 1) clsdef => suite => simple_stmt => expr_stmt => Leaf('__meta') - 2) clsdef => simple_stmt => expr_stmt => Leaf('__meta') - """ - ... -def fixup_parse_tree(cls_node) -> None: - """ - one-line classes don't get a suite in the parse tree so we add - one to normalize the tree - """ - ... -def fixup_simple_stmt(parent, i, stmt_node) -> None: - """ - if there is a semi-colon all the parts count as part of the same - simple_stmt. We just want the __metaclass__ part so we move - everything after the semi-colon into its own simple_stmt node - """ - ... +def has_metaclass(parent): ... +def fixup_parse_tree(cls_node) -> None: ... +def fixup_simple_stmt(parent, i, stmt_node) -> None: ... def remove_trailing_newline(node) -> None: ... def find_metas(cls_node) -> Generator[tuple[Base, int, Base], None, None]: ... -def fixup_indent(suite) -> None: - """ - If an INDENT is followed by a thing with a prefix then nuke the prefix - Otherwise we get in trouble when removing __metaclass__ at suite start - """ - ... +def fixup_indent(suite) -> None: ... class FixMetaclass(fixer_base.BaseFix): BM_compatible: ClassVar[Literal[True]] diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_methodattrs.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_methodattrs.pyi index cd5cda5..ca9b71e 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_methodattrs.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_methodattrs.pyi @@ -1,10 +1,8 @@ -"""Fix bound method attributes (method.im_? -> method.__?__).""" - -from typing import ClassVar, Literal +from typing import ClassVar, Final, Literal from .. import fixer_base -MAP: dict[str, str] +MAP: Final[dict[str, str]] class FixMethodattrs(fixer_base.BaseFix): BM_compatible: ClassVar[Literal[True]] diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_ne.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_ne.pyi index 0e85174..6ff1220 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_ne.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_ne.pyi @@ -1,5 +1,3 @@ -"""Fixer that turns <> into !=.""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_next.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_next.pyi index 87a4d61..b13914a 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_next.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_next.pyi @@ -1,5 +1,3 @@ -"""Fixer for it.next() -> next(it), per PEP 3114.""" - from _typeshed import StrPath from typing import ClassVar, Literal diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_nonzero.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_nonzero.pyi index 60bd7df..5c37fc1 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_nonzero.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_nonzero.pyi @@ -1,5 +1,3 @@ -"""Fixer for __nonzero__ -> __bool__ methods.""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_numliterals.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_numliterals.pyi index 71dc901..113145e 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_numliterals.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_numliterals.pyi @@ -1,5 +1,3 @@ -"""Fixer that turns 1L into 1, 0755 into 0o755.""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_operator.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_operator.pyi index c30d7a2..b9863d3 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_operator.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_operator.pyi @@ -1,15 +1,3 @@ -""" -Fixer for operator functions. - -operator.isCallable(obj) -> callable(obj) -operator.sequenceIncludes(obj) -> operator.contains(obj) -operator.isSequenceType(obj) -> isinstance(obj, collections.abc.Sequence) -operator.isMappingType(obj) -> isinstance(obj, collections.abc.Mapping) -operator.isNumberType(obj) -> isinstance(obj, numbers.Number) -operator.repeat(obj, n) -> operator.mul(obj, n) -operator.irepeat(obj, n) -> operator.imul(obj, n) -""" - from lib2to3 import fixer_base from typing import ClassVar, Literal diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_paren.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_paren.pyi index af4a6af..237df6c 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_paren.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_paren.pyi @@ -1,9 +1,3 @@ -""" -Fixer that adds parentheses where they are required - -This converts ``[x for x in 1, 2]`` to ``[x for x in (1, 2)]``. -""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_print.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_print.pyi index f89397d..e9564b0 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_print.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_print.pyi @@ -1,15 +1,3 @@ -""" -Fixer for print. - -Change: - 'print' into 'print()' - 'print ...' into 'print(...)' - 'print ... ,' into 'print(..., end=" ")' - 'print >>x, ...' into 'print(..., file=x)' - -No changes are applied if print_function is imported from __future__ -""" - from _typeshed import Incomplete from typing import ClassVar, Literal diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_raise.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_raise.pyi index 0073338..e02c308 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_raise.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_raise.pyi @@ -1,27 +1,3 @@ -""" -Fixer for 'raise E, V, T' - -raise -> raise -raise E -> raise E -raise E, V -> raise E(V) -raise E, V, T -> raise E(V).with_traceback(T) -raise E, None, T -> raise E.with_traceback(T) - -raise (((E, E'), E''), E'''), V -> raise E(V) -raise "foo", V, T -> warns about string exceptions - - -CAVEATS: -1) "raise E, V" will be incorrectly translated if V is an exception - instance. The correct Python 3 idiom is - - raise E from V - - but since we can't detect instance-hood by syntax alone and since - any client code would have to be changed as well, we don't automate - this. -""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_raw_input.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_raw_input.pyi index 717649b..d1a0eb0 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_raw_input.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_raw_input.pyi @@ -1,5 +1,3 @@ -"""Fixer that changes raw_input(...) into input(...).""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_reduce.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_reduce.pyi index a3ab936..f8ad876 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_reduce.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_reduce.pyi @@ -1,10 +1,3 @@ -""" -Fixer for reduce(). - -Makes sure reduce() is imported from the functools module if reduce is -used in that module. -""" - from lib2to3 import fixer_base from typing import ClassVar, Literal diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_reload.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_reload.pyi index 9bed9ea..8200754 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_reload.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_reload.pyi @@ -1,9 +1,3 @@ -""" -Fixer for reload(). - -reload(s) -> importlib.reload(s) -""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_renames.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_renames.pyi index e94e9aa..652d8f1 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_renames.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_renames.pyi @@ -1,17 +1,10 @@ -""" -Fix incompatible renames - -Fixes: - * sys.maxint -> sys.maxsize -""" - from collections.abc import Generator -from typing import ClassVar, Literal +from typing import ClassVar, Final, Literal from .. import fixer_base -MAPPING: dict[str, dict[str, str]] -LOOKUP: dict[tuple[str, str], str] +MAPPING: Final[dict[str, dict[str, str]]] +LOOKUP: Final[dict[tuple[str, str], str]] def alternates(members): ... def build_pattern() -> Generator[str, None, None]: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_repr.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_repr.pyi index 84569e3..3b192d3 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_repr.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_repr.pyi @@ -1,5 +1,3 @@ -"""Fixer that transforms `xyzzy` into repr(xyzzy).""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_set_literal.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_set_literal.pyi index 713cf3c..6962ff3 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_set_literal.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_set_literal.pyi @@ -1,5 +1,3 @@ -"""Optional fixer to transform set() calls to set literals.""" - from lib2to3 import fixer_base from typing import ClassVar, Literal diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_standarderror.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_standarderror.pyi index adb75ff..ba914bc 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_standarderror.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_standarderror.pyi @@ -1,5 +1,3 @@ -"""Fixer for StandardError -> Exception.""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_sys_exc.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_sys_exc.pyi index 7fd2e6c..0fa1a47 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_sys_exc.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_sys_exc.pyi @@ -1,11 +1,3 @@ -""" -Fixer for sys.exc_{type, value, traceback} - -sys.exc_type -> sys.exc_info()[0] -sys.exc_value -> sys.exc_info()[1] -sys.exc_traceback -> sys.exc_info()[2] -""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_throw.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_throw.pyi index bdd87b9..4c99855 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_throw.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_throw.pyi @@ -1,13 +1,3 @@ -""" -Fixer for generator.throw(E, V, T). - -g.throw(E) -> g.throw(E) -g.throw(E, V) -> g.throw(E(V)) -g.throw(E, V, T) -> g.throw(E(V).with_traceback(T)) - -g.throw("foo"[, V[, T]]) will warn about string exceptions. -""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_tuple_params.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_tuple_params.pyi index 5a3525b..bfaa997 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_tuple_params.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_tuple_params.pyi @@ -1,23 +1,3 @@ -""" -Fixer for function definitions with tuple parameters. - -def func(((a, b), c), d): - ... - - -> - -def func(x, d): - ((a, b), c) = x - ... - -It will also support lambdas: - - lambda (x, y): x + y -> lambda t: t[0] + t[1] - - # The parens are a syntax error in Python 3 - lambda (x): x + y -> lambda x: x + y -""" - from _typeshed import Incomplete from typing import ClassVar, Literal diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_types.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_types.pyi index fb62180..e26dbec 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_types.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_types.pyi @@ -1,21 +1,3 @@ -""" -Fixer for removing uses of the types module. - -These work for only the known names in the types module. The forms above -can include types. or not. ie, It is assumed the module is imported either as: - - import types - from types import ... # either * or specific types - -The import statements are not modified. - -There should be another fixer that handles at least the following constants: - - type([]) -> list - type(()) -> tuple - type('') -> str -""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_unicode.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_unicode.pyi index 9f07c3e..85d1315 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_unicode.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_unicode.pyi @@ -1,13 +1,3 @@ -r""" -Fixer for unicode. - -* Changes unicode to str and unichr to chr. - -* If "...\u..." is not unicode literal change it into "...\\u...". - -* Change u"..." into "...". -""" - from _typeshed import StrPath from typing import ClassVar, Literal @@ -16,7 +6,7 @@ from ..pytree import Node class FixUnicode(fixer_base.BaseFix): BM_compatible: ClassVar[Literal[True]] - PATTERN: ClassVar[Literal["STRING | 'unicode' | 'unichr'"]] # type: ignore[name-defined] # Name "STRING" is not defined + PATTERN: ClassVar[str] unicode_literals: bool def start_tree(self, tree: Node, filename: StrPath) -> None: ... def transform(self, node, results): ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_urllib.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_urllib.pyi index ef7393d..abdcc0f 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_urllib.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_urllib.pyi @@ -1,35 +1,15 @@ -""" -Fix changes imports of urllib which are now incompatible. -This is rather similar to fix_imports, but because of the more -complex nature of the fixing for urllib, it has its own fixer. -""" - from collections.abc import Generator -from typing import Literal +from typing import Final, Literal from .fix_imports import FixImports -MAPPING: dict[str, list[tuple[Literal["urllib.request", "urllib.parse", "urllib.error"], list[str]]]] +MAPPING: Final[dict[str, list[tuple[Literal["urllib.request", "urllib.parse", "urllib.error"], list[str]]]]] def build_pattern() -> Generator[str, None, None]: ... class FixUrllib(FixImports): def build_pattern(self): ... - def transform_import(self, node, results) -> None: - """ - Transform for the basic import case. Replaces the old - import name with a comma separated list of its - replacements. - """ - ... - def transform_member(self, node, results): - """ - Transform for imports of specific module elements. Replaces - the module to be imported from with the appropriate new - module. - """ - ... - def transform_dot(self, node, results) -> None: - """Transform for calls to module members in code.""" - ... + def transform_import(self, node, results) -> None: ... + def transform_member(self, node, results): ... + def transform_dot(self, node, results) -> None: ... def transform(self, node, results) -> None: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_ws_comma.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_ws_comma.pyi index ebef1bc..4ce5cb2 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_ws_comma.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_ws_comma.pyi @@ -1,10 +1,3 @@ -""" -Fixer that changes 'a ,b' into 'a, b'. - -This also changes '{a :b}' into '{a: b}', but does not touch other -uses of colons. It does not touch other uses of whitespace. -""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_xrange.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_xrange.pyi index aa87720..71318b7 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_xrange.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_xrange.pyi @@ -1,5 +1,3 @@ -"""Fixer that changes xrange(...) into range(...).""" - from _typeshed import Incomplete, StrPath from typing import ClassVar, Literal diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_xreadlines.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_xreadlines.pyi index 52ef7a2..b479414 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_xreadlines.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_xreadlines.pyi @@ -1,9 +1,3 @@ -""" -Fix "for x in f.xreadlines()" -> "for x in f". - -This fixer will also convert g(f.xreadlines) into g(f.__iter__). -""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_zip.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_zip.pyi index 06a30dd..805886e 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_zip.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/fixes/fix_zip.pyi @@ -1,12 +1,3 @@ -""" -Fixer that changes zip(seq0, seq1, ...) into list(zip(seq0, seq1, ...) -unless there exists a 'from future_builtins import zip' statement in the -top-level namespace. - -We avoid the transformation if the zip() call is directly contained in -iter(<>), list(<>), tuple(<>), sorted(<>), ...join(<>), or for V in <>:. -""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/main.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/main.pyi index c37f3a7..5b7fdfc 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/main.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/main.pyi @@ -1,5 +1,3 @@ -"""Main program for 2to3.""" - from _typeshed import FileDescriptorOrPath from collections.abc import Container, Iterable, Iterator, Mapping, Sequence from logging import _ExcInfoType @@ -7,19 +5,9 @@ from typing import AnyStr, Literal from . import refactor as refactor -def diff_texts(a: str, b: str, filename: str) -> Iterator[str]: - """Return a unified diff of two strings.""" - ... +def diff_texts(a: str, b: str, filename: str) -> Iterator[str]: ... class StdoutRefactoringTool(refactor.MultiprocessRefactoringTool): - """ - A refactoring tool that can avoid overwriting its input files. - Prints output to stdout. - - Output files can optionally be written to a different directory and or - have an extra file suffix appended to their name for use in situations - where you do not want to replace the input files. - """ nobackups: bool show_diffs: bool def __init__( @@ -32,26 +20,7 @@ class StdoutRefactoringTool(refactor.MultiprocessRefactoringTool): input_base_dir: str = "", output_dir: str = "", append_suffix: str = "", - ) -> None: - """ - Args: - fixers: A list of fixers to import. - options: A dict with RefactoringTool configuration. - explicit: A list of fixers to run even if they are explicit. - nobackups: If true no backup '.bak' files will be created for those - files that are being refactored. - show_diffs: Should diffs of the refactoring be printed to stdout? - input_base_dir: The base directory for all input files. This class - will strip this path prefix off of filenames before substituting - it with output_dir. Only meaningful if output_dir is supplied. - All files processed by refactor() must start with this path. - output_dir: If supplied, all converted files will be written into - this directory tree instead of input_base_dir. - append_suffix: If supplied, all files output by this tool will have - this appended to their filename. Useful for changing .py to - .py3 for example by passing append_suffix='3'. - """ - ... + ) -> None: ... # Same as super.log_error and Logger.error def log_error( # type: ignore[override] self, @@ -70,15 +39,4 @@ class StdoutRefactoringTool(refactor.MultiprocessRefactoringTool): def print_output(self, old: str, new: str, filename: str, equal: bool) -> None: ... # type: ignore[override] def warn(msg: object) -> None: ... -def main(fixer_pkg: str, args: Sequence[AnyStr] | None = None) -> Literal[0, 1, 2]: - """ - Main program. - - Args: - fixer_pkg: the name of a package where the fixers are located. - args: optional; a list of command line arguments. If omitted, - sys.argv[1:] is used. - - Returns a suggested exit status (0, 1, 2). - """ - ... +def main(fixer_pkg: str, args: Sequence[AnyStr] | None = None) -> Literal[0, 1, 2]: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/pgen2/__init__.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/pgen2/__init__.pyi index 8a246f1..de8a874 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/pgen2/__init__.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/pgen2/__init__.pyi @@ -1,5 +1,3 @@ -"""The pgen2 package.""" - from collections.abc import Callable from typing import Any from typing_extensions import TypeAlias diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/pgen2/driver.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/pgen2/driver.pyi index 573a850..dea13fb 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/pgen2/driver.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/pgen2/driver.pyi @@ -1,9 +1,3 @@ -""" -Parser driver. - -This provides a high-level interface to parse a file into a syntax tree. -""" - from _typeshed import StrPath from collections.abc import Iterable from logging import Logger @@ -22,24 +16,12 @@ class Driver: def __init__(self, grammar: Grammar, convert: _Convert | None = None, logger: Logger | None = None) -> None: ... def parse_tokens( self, tokens: Iterable[tuple[int, str, tuple[int, int], tuple[int, int], str]], debug: bool = False - ) -> _NL: - """Parse a series of tokens and return the syntax tree.""" - ... - def parse_stream_raw(self, stream: IO[str], debug: bool = False) -> _NL: - """Parse a stream and return the syntax tree.""" - ... - def parse_stream(self, stream: IO[str], debug: bool = False) -> _NL: - """Parse a stream and return the syntax tree.""" - ... - def parse_file(self, filename: StrPath, encoding: str | None = None, debug: bool = False) -> _NL: - """Parse a file and return the syntax tree.""" - ... - def parse_string(self, text: str, debug: bool = False) -> _NL: - """Parse a string and return the syntax tree.""" - ... + ) -> _NL: ... + def parse_stream_raw(self, stream: IO[str], debug: bool = False) -> _NL: ... + def parse_stream(self, stream: IO[str], debug: bool = False) -> _NL: ... + def parse_file(self, filename: StrPath, encoding: str | None = None, debug: bool = False) -> _NL: ... + def parse_string(self, text: str, debug: bool = False) -> _NL: ... def load_grammar( gt: str = "Grammar.txt", gp: str | None = None, save: bool = True, force: bool = False, logger: Logger | None = None -) -> Grammar: - """Load the grammar (maybe from a pickle).""" - ... +) -> Grammar: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/pgen2/grammar.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/pgen2/grammar.pyi index 89fe0cb..bef0a79 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/pgen2/grammar.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/pgen2/grammar.pyi @@ -1,14 +1,3 @@ -""" -This module defines the data structures used to represent a grammar. - -These are a bit arcane because they are derived from the data -structures used by Python's 'pgen' parser generator. - -There's also a table here mapping operators to their names in the -token module; the Python tokenize module reports all operators as the -fallback token code OP, but the parser needs the actual token code. -""" - from _typeshed import StrPath from typing_extensions import Self, TypeAlias @@ -17,58 +6,6 @@ _DFA: TypeAlias = list[list[tuple[int, int]]] _DFAS: TypeAlias = tuple[_DFA, dict[int, int]] class Grammar: - """ - Pgen parsing tables conversion class. - - Once initialized, this class supplies the grammar tables for the - parsing engine implemented by parse.py. The parsing engine - accesses the instance variables directly. The class here does not - provide initialization of the tables; several subclasses exist to - do this (see the conv and pgen modules). - - The load() method reads the tables from a pickle file, which is - much faster than the other ways offered by subclasses. The pickle - file is written by calling dump() (after loading the grammar - tables using a subclass). The report() method prints a readable - representation of the tables to stdout, for debugging. - - The instance variables are as follows: - - symbol2number -- a dict mapping symbol names to numbers. Symbol - numbers are always 256 or higher, to distinguish - them from token numbers, which are between 0 and - 255 (inclusive). - - number2symbol -- a dict mapping numbers to symbol names; - these two are each other's inverse. - - states -- a list of DFAs, where each DFA is a list of - states, each state is a list of arcs, and each - arc is a (i, j) pair where i is a label and j is - a state number. The DFA number is the index into - this list. (This name is slightly confusing.) - Final states are represented by a special arc of - the form (0, j) where j is its own state number. - - dfas -- a dict mapping symbol numbers to (DFA, first) - pairs, where DFA is an item from the states list - above, and first is a set of tokens that can - begin this grammar rule (represented by a dict - whose values are always 1). - - labels -- a list of (x, y) pairs where x is either a token - number or a symbol number, and y is either None - or a string; the strings are keywords. The label - number is the index in this list; label numbers - are used to mark state transitions (arcs) in the - DFAs. - - start -- the number of the grammar's start symbol. - - keywords -- a dict mapping keyword strings to arc labels. - - tokens -- a dict mapping token numbers to arc labels. - """ symbol2number: dict[str, int] number2symbol: dict[int, str] states: list[_DFA] @@ -78,18 +15,10 @@ class Grammar: tokens: dict[int, int] symbol2label: dict[str, int] start: int - def dump(self, filename: StrPath) -> None: - """Dump the grammar tables to a pickle file.""" - ... - def load(self, filename: StrPath) -> None: - """Load the grammar tables from a pickle file.""" - ... - def copy(self) -> Self: - """Copy the grammar.""" - ... - def report(self) -> None: - """Dump the grammar tables to standard output, for debugging.""" - ... + def dump(self, filename: StrPath) -> None: ... + def load(self, filename: StrPath) -> None: ... + def copy(self) -> Self: ... + def report(self) -> None: ... opmap_raw: str opmap: dict[str, str] diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/pgen2/literals.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/pgen2/literals.pyi index 9b991d2..c3fabe8 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/pgen2/literals.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/pgen2/literals.pyi @@ -1,5 +1,3 @@ -"""Safely evaluate Python string literals without using eval().""" - from re import Match simple_escapes: dict[str, str] diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/pgen2/parse.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/pgen2/parse.pyi index 3c511df..320c5f0 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/pgen2/parse.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/pgen2/parse.pyi @@ -1,12 +1,3 @@ -""" -Parser engine for the grammar tables generated by pgen. - -The grammar table must be loaded first. - -See Parser/parser.c in the Python distribution for additional info on -how this parsing engine works. -""" - from _typeshed import Incomplete from collections.abc import Sequence from typing_extensions import TypeAlias @@ -18,7 +9,6 @@ from .grammar import _DFAS, Grammar _Context: TypeAlias = Sequence[Incomplete] class ParseError(Exception): - """Exception to signal the parser is stuck.""" msg: str type: int value: str | None @@ -26,95 +16,15 @@ class ParseError(Exception): def __init__(self, msg: str, type: int, value: str | None, context: _Context) -> None: ... class Parser: - """ - Parser engine. - - The proper usage sequence is: - - p = Parser(grammar, [converter]) # create instance - p.setup([start]) # prepare for parsing - : - if p.addtoken(...): # parse a token; may raise ParseError - break - root = p.rootnode # root of abstract syntax tree - - A Parser instance may be reused by calling setup() repeatedly. - - A Parser instance contains state pertaining to the current token - sequence, and should not be used concurrently by different threads - to parse separate token sequences. - - See driver.py for how to get input tokens by tokenizing a file or - string. - - Parsing is complete when addtoken() returns True; the root of the - abstract syntax tree can then be retrieved from the rootnode - instance variable. When a syntax error occurs, addtoken() raises - the ParseError exception. There is no error recovery; the parser - cannot be used after a syntax error was reported (but it can be - reinitialized by calling setup()). - """ grammar: Grammar convert: _Convert stack: list[tuple[_DFAS, int, _RawNode]] rootnode: _NL | None used_names: set[str] - def __init__(self, grammar: Grammar, convert: _Convert | None = None) -> None: - """ - Constructor. - - The grammar argument is a grammar.Grammar instance; see the - grammar module for more information. - - The parser is not ready yet for parsing; you must call the - setup() method to get it started. - - The optional convert argument is a function mapping concrete - syntax tree nodes to abstract syntax tree nodes. If not - given, no conversion is done and the syntax tree produced is - the concrete syntax tree. If given, it must be a function of - two arguments, the first being the grammar (a grammar.Grammar - instance), and the second being the concrete syntax tree node - to be converted. The syntax tree is converted from the bottom - up. - - A concrete syntax tree node is a (type, value, context, nodes) - tuple, where type is the node type (a token or symbol number), - value is None for symbols and a string for tokens, context is - None or an opaque value used for error reporting (typically a - (lineno, offset) pair), and nodes is a list of children for - symbols, and None for tokens. - - An abstract syntax tree node may be anything; this is entirely - up to the converter function. - """ - ... - def setup(self, start: int | None = None) -> None: - """ - Prepare for parsing. - - This *must* be called before starting to parse. - - The optional argument is an alternative start symbol; it - defaults to the grammar's start symbol. - - You can use a Parser instance to parse any number of programs; - each time you call setup() the parser is reset to an initial - state determined by the (implicit or explicit) start symbol. - """ - ... - def addtoken(self, type: int, value: str | None, context: _Context) -> bool: - """Add a token; return True iff this is the end of the program.""" - ... - def classify(self, type: int, value: str | None, context: _Context) -> int: - """Turn a token into a label. (Internal)""" - ... - def shift(self, type: int, value: str | None, newstate: int, context: _Context) -> None: - """Shift a token. (Internal)""" - ... - def push(self, type: int, newdfa: _DFAS, newstate: int, context: _Context) -> None: - """Push a nonterminal. (Internal)""" - ... - def pop(self) -> None: - """Pop a nonterminal. (Internal)""" - ... + def __init__(self, grammar: Grammar, convert: _Convert | None = None) -> None: ... + def setup(self, start: int | None = None) -> None: ... + def addtoken(self, type: int, value: str | None, context: _Context) -> bool: ... + def classify(self, type: int, value: str | None, context: _Context) -> int: ... + def shift(self, type: int, value: str | None, newstate: int, context: _Context) -> None: ... + def push(self, type: int, newdfa: _DFAS, newstate: int, context: _Context) -> None: ... + def pop(self) -> None: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/pgen2/token.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/pgen2/token.pyi index d65adc1..6898517 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/pgen2/token.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/pgen2/token.pyi @@ -1,67 +1,67 @@ -"""Token constants (from "token.h").""" +from typing import Final -ENDMARKER: int -NAME: int -NUMBER: int -STRING: int -NEWLINE: int -INDENT: int -DEDENT: int -LPAR: int -RPAR: int -LSQB: int -RSQB: int -COLON: int -COMMA: int -SEMI: int -PLUS: int -MINUS: int -STAR: int -SLASH: int -VBAR: int -AMPER: int -LESS: int -GREATER: int -EQUAL: int -DOT: int -PERCENT: int -BACKQUOTE: int -LBRACE: int -RBRACE: int -EQEQUAL: int -NOTEQUAL: int -LESSEQUAL: int -GREATEREQUAL: int -TILDE: int -CIRCUMFLEX: int -LEFTSHIFT: int -RIGHTSHIFT: int -DOUBLESTAR: int -PLUSEQUAL: int -MINEQUAL: int -STAREQUAL: int -SLASHEQUAL: int -PERCENTEQUAL: int -AMPEREQUAL: int -VBAREQUAL: int -CIRCUMFLEXEQUAL: int -LEFTSHIFTEQUAL: int -RIGHTSHIFTEQUAL: int -DOUBLESTAREQUAL: int -DOUBLESLASH: int -DOUBLESLASHEQUAL: int -OP: int -COMMENT: int -NL: int -RARROW: int -AT: int -ATEQUAL: int -AWAIT: int -ASYNC: int -ERRORTOKEN: int -COLONEQUAL: int -N_TOKENS: int -NT_OFFSET: int +ENDMARKER: Final[int] +NAME: Final[int] +NUMBER: Final[int] +STRING: Final[int] +NEWLINE: Final[int] +INDENT: Final[int] +DEDENT: Final[int] +LPAR: Final[int] +RPAR: Final[int] +LSQB: Final[int] +RSQB: Final[int] +COLON: Final[int] +COMMA: Final[int] +SEMI: Final[int] +PLUS: Final[int] +MINUS: Final[int] +STAR: Final[int] +SLASH: Final[int] +VBAR: Final[int] +AMPER: Final[int] +LESS: Final[int] +GREATER: Final[int] +EQUAL: Final[int] +DOT: Final[int] +PERCENT: Final[int] +BACKQUOTE: Final[int] +LBRACE: Final[int] +RBRACE: Final[int] +EQEQUAL: Final[int] +NOTEQUAL: Final[int] +LESSEQUAL: Final[int] +GREATEREQUAL: Final[int] +TILDE: Final[int] +CIRCUMFLEX: Final[int] +LEFTSHIFT: Final[int] +RIGHTSHIFT: Final[int] +DOUBLESTAR: Final[int] +PLUSEQUAL: Final[int] +MINEQUAL: Final[int] +STAREQUAL: Final[int] +SLASHEQUAL: Final[int] +PERCENTEQUAL: Final[int] +AMPEREQUAL: Final[int] +VBAREQUAL: Final[int] +CIRCUMFLEXEQUAL: Final[int] +LEFTSHIFTEQUAL: Final[int] +RIGHTSHIFTEQUAL: Final[int] +DOUBLESTAREQUAL: Final[int] +DOUBLESLASH: Final[int] +DOUBLESLASHEQUAL: Final[int] +OP: Final[int] +COMMENT: Final[int] +NL: Final[int] +RARROW: Final[int] +AT: Final[int] +ATEQUAL: Final[int] +AWAIT: Final[int] +ASYNC: Final[int] +ERRORTOKEN: Final[int] +COLONEQUAL: Final[int] +N_TOKENS: Final[int] +NT_OFFSET: Final[int] tok_name: dict[int, str] def ISTERMINAL(x: int) -> bool: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/pgen2/tokenize.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/pgen2/tokenize.pyi index a026dd4..af54de1 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/pgen2/tokenize.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/pgen2/tokenize.pyi @@ -1,29 +1,3 @@ -""" -Tokenization help for Python programs. - -generate_tokens(readline) is a generator that breaks a stream of -text into Python tokens. It accepts a readline-like method which is called -repeatedly to get the next line of input (or "" for EOF). It generates -5-tuples with these members: - - the token type (see token.py) - the token (a string) - the starting (row, column) indices of the token (a 2-tuple of ints) - the ending (row, column) indices of the token (a 2-tuple of ints) - the original line (string) - -It is designed to match the working of the Python tokenizer exactly, except -that it produces COMMENT tokens for comments and gives type OP for all -operators - -Older entry points - tokenize_loop(readline, tokeneater) - tokenize(readline, tokeneater=printtoken) -are the same, except instead of generating tokens, tokeneater is a callback -function to which the 5 fields described above are passed as 5 arguments, -each time a new token is found. -""" - from collections.abc import Callable, Iterable, Iterator from typing_extensions import TypeAlias @@ -108,20 +82,7 @@ _TokenInfo: TypeAlias = tuple[int, str, _Coord, _Coord, str] class TokenError(Exception): ... class StopTokenizing(Exception): ... -def tokenize(readline: Callable[[], str], tokeneater: _TokenEater = ...) -> None: - """ - The tokenize() function accepts two parameters: one representing the - input stream, and one providing an output mechanism for tokenize(). - - The first parameter, readline, must be a callable object which provides - the same interface as the readline() method of built-in file objects. - Each call to the function should return one line of input as a string. - - The second parameter, tokeneater, must also be a callable object. It is - called once for each token, with five arguments, corresponding to the - tuples generated by generate_tokens(). - """ - ... +def tokenize(readline: Callable[[], str], tokeneater: _TokenEater = ...) -> None: ... class Untokenizer: tokens: list[str] @@ -131,40 +92,5 @@ class Untokenizer: def untokenize(self, iterable: Iterable[_TokenInfo]) -> str: ... def compat(self, token: tuple[int, str], iterable: Iterable[_TokenInfo]) -> None: ... -def untokenize(iterable: Iterable[_TokenInfo]) -> str: - """ - Transform tokens back into Python source code. - - Each element returned by the iterable must be a token sequence - with at least two elements, a token number and token value. If - only two tokens are passed, the resulting output is poor. - - Round-trip invariant for full input: - Untokenized source will match input source exactly - - Round-trip invariant for limited input: - # Output text will tokenize the back to the input - t1 = [tok[:2] for tok in generate_tokens(f.readline)] - newcode = untokenize(t1) - readline = iter(newcode.splitlines(1)).next - t2 = [tok[:2] for tokin generate_tokens(readline)] - assert t1 == t2 - """ - ... -def generate_tokens(readline: Callable[[], str]) -> Iterator[_TokenInfo]: - """ - The generate_tokens() generator requires one argument, readline, which - must be a callable object which provides the same interface as the - readline() method of built-in file objects. Each call to the function - should return one line of input as a string. Alternately, readline - can be a callable function terminating with StopIteration: - readline = open(myfile).next # Example of alternate readline - - The generator produces 5-tuples with these members: the token type; the - token string; a 2-tuple (srow, scol) of ints specifying the row and - column where the token begins in the source; a 2-tuple (erow, ecol) of - ints specifying the row and column where the token ends in the source; - and the line on which the token was found. The line passed is the - physical line. - """ - ... +def untokenize(iterable: Iterable[_TokenInfo]) -> str: ... +def generate_tokens(readline: Callable[[], str]) -> Iterator[_TokenInfo]: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/pygram.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/pygram.pyi index f10fc4e..86c74b5 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/pygram.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/pygram.pyi @@ -1,16 +1,7 @@ -"""Export the Python grammar and symbols.""" - from .pgen2.grammar import Grammar class Symbols: - def __init__(self, grammar: Grammar) -> None: - """ - Initializer. - - Creates an attribute for each grammar symbol (nonterminal), - whose value is the symbol's type (an int >= 256). - """ - ... + def __init__(self, grammar: Grammar) -> None: ... class python_symbols(Symbols): and_expr: int diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/pytree.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/pytree.pyi index 9ad709d..138333b 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/pytree.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/pytree.pyi @@ -1,12 +1,3 @@ -""" -Python parse tree definitions. - -This is a very concrete parse tree; we need to keep every token and -even the comments and whitespace between tokens. - -There's also a pattern matching implementation here. -""" - from _typeshed import Incomplete, SupportsGetItem, SupportsLenAndGetItem, Unused from abc import abstractmethod from collections.abc import Iterable, Iterator, MutableSequence @@ -26,100 +17,34 @@ HUGE: Final = 0x7FFFFFFF def type_repr(type_num: int) -> str | int: ... class Base: - """ - Abstract base class for Node and Leaf. - - This provides some default functionality and boilerplate using the - template pattern. - - A node may be a subnode of at most one parent. - """ type: int parent: Node | None prefix: str children: list[_NL] was_changed: bool was_checked: bool - def __eq__(self, other: object) -> bool: - """ - Compare two nodes for equality. - - This calls the method _eq(). - """ - ... + def __eq__(self, other: object) -> bool: ... @abstractmethod - def _eq(self, other: Base) -> bool: - """ - Compare two nodes for equality. - - This is called by __eq__ and __ne__. It is only called if the two nodes - have the same type. This must be implemented by the concrete subclass. - Nodes should be considered equal if they have the same structure, - ignoring the prefix string and other context information. - """ - ... + def _eq(self, other: Base) -> bool: ... @abstractmethod - def clone(self) -> Self: - """ - Return a cloned (deep) copy of self. - - This must be implemented by the concrete subclass. - """ - ... + def clone(self) -> Self: ... @abstractmethod - def post_order(self) -> Iterator[Self]: - """ - Return a post-order iterator for the tree. - - This must be implemented by the concrete subclass. - """ - ... + def post_order(self) -> Iterator[Self]: ... @abstractmethod - def pre_order(self) -> Iterator[Self]: - """ - Return a pre-order iterator for the tree. - - This must be implemented by the concrete subclass. - """ - ... - def replace(self, new: _NL | list[_NL]) -> None: - """Replace this node with a new one in the parent.""" - ... - def get_lineno(self) -> int: - """Return the line number which generated the invocant node.""" - ... + def pre_order(self) -> Iterator[Self]: ... + def replace(self, new: _NL | list[_NL]) -> None: ... + def get_lineno(self) -> int: ... def changed(self) -> None: ... - def remove(self) -> int | None: - """ - Remove the node from the tree. Returns the position of the node in its - parent's children before it was removed. - """ - ... + def remove(self) -> int | None: ... @property - def next_sibling(self) -> _NL | None: - """ - The node immediately following the invocant in their parent's children - list. If the invocant does not have a next sibling, it is None - """ - ... + def next_sibling(self) -> _NL | None: ... @property - def prev_sibling(self) -> _NL | None: - """ - The node immediately preceding the invocant in their parent's children - list. If the invocant does not have a previous sibling, it is None. - """ - ... + def prev_sibling(self) -> _NL | None: ... def leaves(self) -> Iterator[Leaf]: ... def depth(self) -> int: ... - def get_suffix(self) -> str: - """ - Return the string immediately following the invocant node. This is - effectively equivalent to node.next_sibling.prefix - """ - ... + def get_suffix(self) -> str: ... class Node(Base): - """Concrete implementation for interior nodes.""" fixers_applied: MutableSequence[BaseFix] | None # Is Unbound until set in refactor.RefactoringTool future_features: frozenset[Incomplete] @@ -132,56 +57,17 @@ class Node(Base): context: Unused = None, prefix: str | None = None, fixers_applied: MutableSequence[BaseFix] | None = None, - ) -> None: - """ - Initializer. - - Takes a type constant (a symbol number >= 256), a sequence of - child nodes, and an optional context keyword argument. - - As a side effect, the parent pointers of the children are updated. - """ - ... - def _eq(self, other: Base) -> bool: - """Compare two nodes for equality.""" - ... - def clone(self) -> Node: - """Return a cloned (deep) copy of self.""" - ... - def post_order(self) -> Iterator[Self]: - """Return a post-order iterator for the tree.""" - ... - def pre_order(self) -> Iterator[Self]: - """Return a pre-order iterator for the tree.""" - ... - def set_child(self, i: int, child: _NL) -> None: - """ - Equivalent to 'node.children[i] = child'. This method also sets the - child's parent attribute appropriately. - """ - ... - def insert_child(self, i: int, child: _NL) -> None: - """ - Equivalent to 'node.children.insert(i, child)'. This method also sets - the child's parent attribute appropriately. - """ - ... - def append_child(self, child: _NL) -> None: - """ - Equivalent to 'node.children.append(child)'. This method also sets the - child's parent attribute appropriately. - """ - ... - def __unicode__(self) -> str: - """ - Return a pretty string representation. - - This reproduces the input source exactly. - """ - ... + ) -> None: ... + def _eq(self, other: Base) -> bool: ... + def clone(self) -> Node: ... + def post_order(self) -> Iterator[Self]: ... + def pre_order(self) -> Iterator[Self]: ... + def set_child(self, i: int, child: _NL) -> None: ... + def insert_child(self, i: int, child: _NL) -> None: ... + def append_child(self, child: _NL) -> None: ... + def __unicode__(self) -> str: ... class Leaf(Base): - """Concrete implementation for leaf nodes.""" lineno: int column: int value: str @@ -193,195 +79,39 @@ class Leaf(Base): context: _Context | None = None, prefix: str | None = None, fixers_applied: MutableSequence[BaseFix] = [], - ) -> None: - """ - Initializer. - - Takes a type constant (a token number < 256), a string value, and an - optional context keyword argument. - """ - ... - def _eq(self, other: Base) -> bool: - """Compare two nodes for equality.""" - ... - def clone(self) -> Leaf: - """Return a cloned (deep) copy of self.""" - ... - def post_order(self) -> Iterator[Self]: - """Return a post-order iterator for the tree.""" - ... - def pre_order(self) -> Iterator[Self]: - """Return a pre-order iterator for the tree.""" - ... - def __unicode__(self) -> str: - """ - Return a pretty string representation. - - This reproduces the input source exactly. - """ - ... + ) -> None: ... + def _eq(self, other: Base) -> bool: ... + def clone(self) -> Leaf: ... + def post_order(self) -> Iterator[Self]: ... + def pre_order(self) -> Iterator[Self]: ... + def __unicode__(self) -> str: ... -def convert(gr: Grammar, raw_node: _RawNode) -> _NL: - """ - Convert raw node information to a Node or Leaf instance. - - This is passed to the parser driver which calls it whenever a reduction of a - grammar rule produces a new complete node, so that the tree is build - strictly bottom-up. - """ - ... +def convert(gr: Grammar, raw_node: _RawNode) -> _NL: ... class BasePattern: - """ - A pattern is a tree matching pattern. - - It looks for a specific node type (token or symbol), and - optionally for a specific content. - - This is an abstract base class. There are three concrete - subclasses: - - - LeafPattern matches a single leaf node; - - NodePattern matches a single node (usually non-leaf); - - WildcardPattern matches a sequence of nodes of variable length. - """ type: int content: str | None name: str | None - def optimize(self) -> BasePattern: - """ - A subclass can define this as a hook for optimizations. - - Returns either self or another node with the same effect. - """ - ... - def match(self, node: _NL, results: _Results | None = None) -> bool: - """ - Does this pattern exactly match a node? - - Returns True if it matches, False if not. - - If results is not None, it must be a dict which will be - updated with the nodes matching named subpatterns. - - Default implementation for non-wildcard patterns. - """ - ... - def match_seq(self, nodes: SupportsLenAndGetItem[_NL], results: _Results | None = None) -> bool: - """ - Does this pattern exactly match a sequence of nodes? - - Default implementation for non-wildcard patterns. - """ - ... - def generate_matches(self, nodes: SupportsGetItem[int, _NL]) -> Iterator[tuple[int, _Results]]: - """ - Generator yielding all matches for this pattern. - - Default implementation for non-wildcard patterns. - """ - ... + def optimize(self) -> BasePattern: ... # sic, subclasses are free to optimize themselves into different patterns + def match(self, node: _NL, results: _Results | None = None) -> bool: ... + def match_seq(self, nodes: SupportsLenAndGetItem[_NL], results: _Results | None = None) -> bool: ... + def generate_matches(self, nodes: SupportsGetItem[int, _NL]) -> Iterator[tuple[int, _Results]]: ... class LeafPattern(BasePattern): - def __init__(self, type: int | None = None, content: str | None = None, name: str | None = None) -> None: - """ - Initializer. Takes optional type, content, and name. - - The type, if given must be a token type (< 256). If not given, - this matches any *leaf* node; the content may still be required. - - The content, if given, must be a string. - - If a name is given, the matching node is stored in the results - dict under that key. - """ - ... + def __init__(self, type: int | None = None, content: str | None = None, name: str | None = None) -> None: ... class NodePattern(BasePattern): wildcards: bool - def __init__(self, type: int | None = None, content: str | None = None, name: str | None = None) -> None: - """ - Initializer. Takes optional type, content, and name. - - The type, if given, must be a symbol type (>= 256). If the - type is None this matches *any* single node (leaf or not), - except if content is not None, in which it only matches - non-leaf nodes that also match the content pattern. - - The content, if not None, must be a sequence of Patterns that - must match the node's children exactly. If the content is - given, the type must not be None. - - If a name is given, the matching node is stored in the results - dict under that key. - """ - ... + def __init__(self, type: int | None = None, content: str | None = None, name: str | None = None) -> None: ... class WildcardPattern(BasePattern): - """ - A wildcard pattern can match zero or more nodes. - - This has all the flexibility needed to implement patterns like: - - .* .+ .? .{m,n} - (a b c | d e | f) - (...)* (...)+ (...)? (...){m,n} - - except it always uses non-greedy matching. - """ min: int max: int - def __init__(self, content: str | None = None, min: int = 0, max: int = 0x7FFFFFFF, name: str | None = None) -> None: - """ - Initializer. - - Args: - content: optional sequence of subsequences of patterns; - if absent, matches one node; - if present, each subsequence is an alternative [*] - min: optional minimum number of times to match, default 0 - max: optional maximum number of times to match, default HUGE - name: optional name assigned to this match - - [*] Thus, if content is [[a, b, c], [d, e], [f, g, h]] this is - equivalent to (a b c | d e | f g h); if content is None, - this is equivalent to '.' in regular expression terms. - The min and max parameters work as follows: - min=0, max=maxint: .* - min=1, max=maxint: .+ - min=0, max=1: .? - min=1, max=1: . - If content is not None, replace the dot with the parenthesized - list of alternatives, e.g. (a b c | d e | f g h)* - """ - ... + def __init__(self, content: str | None = None, min: int = 0, max: int = 0x7FFFFFFF, name: str | None = None) -> None: ... class NegatedPattern(BasePattern): - def __init__(self, content: str | None = None) -> None: - """ - Initializer. - - The argument is either a pattern or None. If it is None, this - only matches an empty sequence (effectively '$' in regex - lingo). If it is not None, this matches whenever the argument - pattern doesn't have any matches. - """ - ... + def __init__(self, content: str | None = None) -> None: ... def generate_matches( patterns: SupportsGetItem[int | slice, BasePattern] | None, nodes: SupportsGetItem[int | slice, _NL] -) -> Iterator[tuple[int, _Results]]: - """ - Generator yielding matches for a sequence of patterns and nodes. - - Args: - patterns: a sequence of patterns - nodes: a sequence of nodes - - Yields: - (count, results) tuples where: - count: the entire sequence of patterns matches nodes[:count]; - results: dict containing named submatches. - - """ - ... +) -> Iterator[tuple[int, _Results]]: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/refactor.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/refactor.pyi index 10706f3..a7f3825 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/refactor.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lib2to3/refactor.pyi @@ -1,11 +1,3 @@ -""" -Refactoring framework. - -Used as a main program, this can refactor any number of files and/or -recursively descend down directories. Imported as a module, this -provides infrastructure to write your own refactoring tool. -""" - from _typeshed import FileDescriptorOrPath, StrPath, SupportsGetItem from collections.abc import Container, Generator, Iterable, Mapping from logging import Logger, _ExcInfoType @@ -19,16 +11,10 @@ from .pgen2.driver import Driver from .pgen2.grammar import Grammar from .pytree import Node -def get_all_fix_names(fixer_pkg: str, remove_prefix: bool = True) -> list[str]: - """Return a sorted list of all available fix names in the given package.""" - ... -def get_fixers_from_package(pkg_name: str) -> list[str]: - """Return the fully qualified names for fixers in the package pkg_name.""" - ... +def get_all_fix_names(fixer_pkg: str, remove_prefix: bool = True) -> list[str]: ... +def get_fixers_from_package(pkg_name: str) -> list[str]: ... -class FixerError(Exception): - """A fixer could not be loaded.""" - ... +class FixerError(Exception): ... class RefactoringTool: CLASS_PREFIX: ClassVar[str] @@ -51,170 +37,40 @@ class RefactoringTool: bmi_post_order: list[BaseFix] def __init__( self, fixer_names: Iterable[str], options: Mapping[str, object] | None = None, explicit: Container[str] | None = None - ) -> None: - """ - Initializer. - - Args: - fixer_names: a list of fixers to import - options: a dict with configuration. - explicit: a list of fixers to run even if they are explicit. - """ - ... - def get_fixers(self) -> tuple[list[BaseFix], list[BaseFix]]: - """ - Inspects the options to load the requested patterns and handlers. - - Returns: - (pre_order, post_order), where pre_order is the list of fixers that - want a pre-order AST traversal, and post_order is the list that want - post-order traversal. - """ - ... - def log_error(self, msg: str, *args: Iterable[str], **kwargs: _ExcInfoType) -> NoReturn: - """Called when an error occurs.""" - ... + ) -> None: ... + def get_fixers(self) -> tuple[list[BaseFix], list[BaseFix]]: ... + def log_error(self, msg: str, *args: Iterable[str], **kwargs: _ExcInfoType) -> NoReturn: ... @overload - def log_message(self, msg: object) -> None: - """Hook to log a message.""" - ... + def log_message(self, msg: object) -> None: ... @overload - def log_message(self, msg: str, *args: object) -> None: - """Hook to log a message.""" - ... + def log_message(self, msg: str, *args: object) -> None: ... @overload def log_debug(self, msg: object) -> None: ... @overload def log_debug(self, msg: str, *args: object) -> None: ... - def print_output(self, old_text: str, new_text: str, filename: StrPath, equal: bool) -> None: - """ - Called with the old version, new version, and filename of a - refactored file. - """ - ... - def refactor(self, items: Iterable[str], write: bool = False, doctests_only: bool = False) -> None: - """Refactor a list of files and directories.""" - ... - def refactor_dir(self, dir_name: str, write: bool = False, doctests_only: bool = False) -> None: - """ - Descends down a directory and refactor every Python file found. - - Python files are assumed to have a .py extension. - - Files and subdirectories starting with '.' are skipped. - """ - ... - def _read_python_source(self, filename: FileDescriptorOrPath) -> tuple[str, str]: - """Do our best to decode a Python source file correctly.""" - ... - def refactor_file(self, filename: StrPath, write: bool = False, doctests_only: bool = False) -> None: - """Refactors a file.""" - ... - def refactor_string(self, data: str, name: str) -> Node | None: - """ - Refactor a given input string. - - Args: - data: a string holding the code to be refactored. - name: a human-readable name for use in error/log messages. - - Returns: - An AST corresponding to the refactored input stream; None if - there were errors during the parse. - """ - ... + def print_output(self, old_text: str, new_text: str, filename: StrPath, equal: bool) -> None: ... + def refactor(self, items: Iterable[str], write: bool = False, doctests_only: bool = False) -> None: ... + def refactor_dir(self, dir_name: str, write: bool = False, doctests_only: bool = False) -> None: ... + def _read_python_source(self, filename: FileDescriptorOrPath) -> tuple[str, str]: ... + def refactor_file(self, filename: StrPath, write: bool = False, doctests_only: bool = False) -> None: ... + def refactor_string(self, data: str, name: str) -> Node | None: ... def refactor_stdin(self, doctests_only: bool = False) -> None: ... - def refactor_tree(self, tree: Node, name: str) -> bool: - """ - Refactors a parse tree (modifying the tree in place). - - For compatible patterns the bottom matcher module is - used. Otherwise the tree is traversed node-to-node for - matches. - - Args: - tree: a pytree.Node instance representing the root of the tree - to be refactored. - name: a human-readable name for this tree. - - Returns: - True if the tree was modified, False otherwise. - """ - ... - def traverse_by(self, fixers: SupportsGetItem[int, Iterable[BaseFix]] | None, traversal: Iterable[Node]) -> None: - """ - Traverse an AST, applying a set of fixers to each node. - - This is a helper method for refactor_tree(). - - Args: - fixers: a list of fixer instances. - traversal: a generator that yields AST nodes. - - Returns: - None - """ - ... + def refactor_tree(self, tree: Node, name: str) -> bool: ... + def traverse_by(self, fixers: SupportsGetItem[int, Iterable[BaseFix]] | None, traversal: Iterable[Node]) -> None: ... def processed_file( self, new_text: str, filename: StrPath, old_text: str | None = None, write: bool = False, encoding: str | None = None - ) -> None: - """Called when a file has been refactored and there may be changes.""" - ... - def write_file(self, new_text: str, filename: FileDescriptorOrPath, old_text: str, encoding: str | None = None) -> None: - """ - Writes a string to a file. - - It first shows a unified diff between the old text and the new text, and - then rewrites the file; the latter is only done if the write option is - set. - """ - ... + ) -> None: ... + def write_file(self, new_text: str, filename: FileDescriptorOrPath, old_text: str, encoding: str | None = None) -> None: ... PS1: Final = ">>> " PS2: Final = "... " - def refactor_docstring(self, input: str, filename: StrPath) -> str: - """ - Refactors a docstring, looking for doctests. - - This returns a modified version of the input string. It looks - for doctests, which start with a ">>>" prompt, and may be - continued with "..." prompts, as long as the "..." is indented - the same as the ">>>". - - (Unfortunately we can't use the doctest module's parser, - since, like most parsers, it is not geared towards preserving - the original source.) - """ - ... - def refactor_doctest(self, block: list[str], lineno: int, indent: int, filename: StrPath) -> list[str]: - """ - Refactors one doctest. - - A doctest is given as a block of lines, the first of which starts - with ">>>" (possibly indented), while the remaining lines start - with "..." (identically indented). - """ - ... + def refactor_docstring(self, input: str, filename: StrPath) -> str: ... + def refactor_doctest(self, block: list[str], lineno: int, indent: int, filename: StrPath) -> list[str]: ... def summarize(self) -> None: ... - def parse_block(self, block: Iterable[str], lineno: int, indent: int) -> Node: - """ - Parses a block into a tree. - - This is necessary to get correct line number / offset information - in the parser diagnostics and embedded into the parse tree. - """ - ... + def parse_block(self, block: Iterable[str], lineno: int, indent: int) -> Node: ... def wrap_toks( self, block: Iterable[str], lineno: int, indent: int - ) -> Generator[tuple[int, str, tuple[int, int], tuple[int, int], str], None, None]: - """Wraps a tokenize stream to systematically modify start/end.""" - ... - def gen_lines(self, block: Iterable[str], indent: int) -> Generator[str, None, None]: - """ - Generates lines as expected by tokenize from a list of lines. - - This strips the first len(indent + self.PS1) characters off each line. - """ - ... + ) -> Generator[tuple[int, str, tuple[int, int], tuple[int, int], str], None, None]: ... + def gen_lines(self, block: Iterable[str], indent: int) -> Generator[str, None, None]: ... class MultiprocessingUnsupported(Exception): ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/locale.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/locale.pyi index 437ba52..bd9a461 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/locale.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/locale.pyi @@ -211,21 +211,12 @@ def normalize(localename: _str) -> _str: ... if sys.version_info < (3, 13): - def resetlocale(category: int = ...) -> None: - """ - Sets the locale for category to the default setting. - - The default setting is determined by calling - getdefaultlocale(). category defaults to LC_ALL. - """ - ... + def resetlocale(category: int = ...) -> None: ... if sys.version_info < (3, 12): def format( percent: _str, value: float | Decimal, grouping: bool = False, monetary: bool = False, *additional: Any - ) -> _str: - """Deprecated, use format_string instead.""" - ... + ) -> _str: ... def format_string(f: _str, val: Any, grouping: bool = False, monetary: bool = False) -> _str: """ diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/logging/__init__.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/logging/__init__.pyi index da34c05..723a27d 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/logging/__init__.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/logging/__init__.pyi @@ -16,8 +16,8 @@ from re import Pattern from string import Template from time import struct_time from types import FrameType, TracebackType -from typing import Any, ClassVar, Generic, Literal, Protocol, TextIO, TypeVar, overload -from typing_extensions import Self, TypeAlias +from typing import Any, ClassVar, Final, Generic, Literal, Protocol, TextIO, TypeVar, overload +from typing_extensions import Self, TypeAlias, deprecated if sys.version_info >= (3, 11): from types import GenericAlias @@ -64,10 +64,9 @@ __all__ = [ "setLogRecordFactory", "lastResort", "raiseExceptions", + "warn", ] -if sys.version_info < (3, 13): - __all__ += ["warn"] if sys.version_info >= (3, 11): __all__ += ["getLevelNamesMapping"] if sys.version_info >= (3, 12): @@ -141,19 +140,7 @@ class Filterer: """ ... else: - def filter(self, record: LogRecord) -> bool: - """ - Determine if a record is loggable by consulting all the filters. - - The default is to allow the record to be logged; any filter can veto - this and the record is then dropped. Returns a zero value if a record - is to be dropped, else non-zero. - - .. versionchanged:: 3.2 - - Allow filters to be just callables. - """ - ... + def filter(self, record: LogRecord) -> bool: ... class Manager: # undocumented """ @@ -304,17 +291,16 @@ class Logger(Filterer): logger.warning("Houston, we have a %s", "bit of a problem", exc_info=True) """ ... - if sys.version_info < (3, 13): - def warn( - self, - msg: object, - *args: object, - exc_info: _ExcInfoType = None, - stack_info: bool = False, - stacklevel: int = 1, - extra: Mapping[str, object] | None = None, - ) -> None: ... - + @deprecated("Deprecated; use warning() instead.") + def warn( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + ) -> None: ... def error( self, msg: object, @@ -458,14 +444,14 @@ class Logger(Filterer): """ ... -CRITICAL: int -FATAL: int -ERROR: int -WARNING: int -WARN: int -INFO: int -DEBUG: int -NOTSET: int +CRITICAL: Final = 50 +FATAL: Final = CRITICAL +ERROR: Final = 40 +WARNING: Final = 30 +WARN: Final = WARNING +INFO: Final = 20 +DEBUG: Final = 10 +NOTSET: Final = 0 class Handler(Filterer): """ @@ -603,7 +589,7 @@ class Formatter: %(lineno)d Source line number where the logging call was issued (if available) %(funcName)s Function name - %(created)f Time when the LogRecord was created (time.time() + %(created)f Time when the LogRecord was created (time.time_ns() / 1e9 return value) %(asctime)s Textual time when the LogRecord was created %(msecs)d Millisecond portion of the creation time @@ -767,14 +753,7 @@ class Filter: """ ... else: - def filter(self, record: LogRecord) -> bool: - """ - Determine if the specified record is to be logged. - - Returns True if the record should be logged, or False otherwise. - If deemed appropriate, the record may be modified in-place. - """ - ... + def filter(self, record: LogRecord) -> bool: ... class LogRecord: """ @@ -854,9 +833,7 @@ class LoggerAdapter(Generic[_L]): manager: Manager # undocumented if sys.version_info >= (3, 13): - def __init__(self, logger: _L, extra: Mapping[str, object] | None = None, merge_extra: bool = False) -> None: ... - elif sys.version_info >= (3, 10): - def __init__(self, logger: _L, extra: Mapping[str, object] | None = None) -> None: + def __init__(self, logger: _L, extra: Mapping[str, object] | None = None, merge_extra: bool = False) -> None: """ Initialize the adapter with a logger and a dict-like object which provides contextual information. This constructor signature allows @@ -866,8 +843,20 @@ class LoggerAdapter(Generic[_L]): following example: adapter = LoggerAdapter(someLogger, dict(p1=v1, p2="v2")) + + By default, LoggerAdapter objects will drop the "extra" argument + passed on the individual log calls to use its own instead. + + Initializing it with merge_extra=True will instead merge both + maps when logging, the individual call extra taking precedence + over the LoggerAdapter instance extra + + .. versionchanged:: 3.13 + The *merge_extra* argument was added. """ ... + elif sys.version_info >= (3, 10): + def __init__(self, logger: _L, extra: Mapping[str, object] | None = None) -> None: ... else: def __init__(self, logger: _L, extra: Mapping[str, object]) -> None: ... @@ -923,18 +912,17 @@ class LoggerAdapter(Generic[_L]): ) -> None: """Delegate a warning call to the underlying logger.""" ... - if sys.version_info < (3, 13): - def warn( - self, - msg: object, - *args: object, - exc_info: _ExcInfoType = None, - stack_info: bool = False, - stacklevel: int = 1, - extra: Mapping[str, object] | None = None, - **kwargs: object, - ) -> None: ... - + @deprecated("Deprecated; use warning() instead.") + def warn( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + **kwargs: object, + ) -> None: ... def error( self, msg: object, @@ -1021,9 +1009,7 @@ class LoggerAdapter(Generic[_L]): exc_info: _ExcInfoType | None = None, extra: Mapping[str, object] | None = None, stack_info: bool = False, - ) -> None: - """Low-level log implementation, proxied to allow nested logger adapters.""" - ... + ) -> None: ... # undocumented @property def name(self) -> str: ... # undocumented @@ -1091,17 +1077,15 @@ def warning( format. """ ... - -if sys.version_info < (3, 13): - def warn( - msg: object, - *args: object, - exc_info: _ExcInfoType = None, - stack_info: bool = False, - stacklevel: int = 1, - extra: Mapping[str, object] | None = None, - ) -> None: ... - +@deprecated("Deprecated; use warning() instead.") +def warn( + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, +) -> None: ... def error( msg: object, *args: object, @@ -1192,11 +1176,8 @@ def getLevelName(level: int) -> str: 'Level %s' % level is returned. """ ... - -# The str -> int case is considered a mistake, but retained for backward -# compatibility. See -# https://docs.python.org/3/library/logging.html#logging.getLevelName. @overload +@deprecated("The str -> int case is considered a mistake.") def getLevelName(level: str) -> Any: """ Return the textual or numeric representation of logging level 'level'. @@ -1491,6 +1472,6 @@ class StrFormatStyle(PercentStyle): # undocumented class StringTemplateStyle(PercentStyle): # undocumented _tpl: Template -_STYLES: dict[str, tuple[PercentStyle, str]] +_STYLES: Final[dict[str, tuple[PercentStyle, str]]] -BASIC_FORMAT: str +BASIC_FORMAT: Final[str] diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/logging/config.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/logging/config.pyi index 2a0e784..96ab0ca 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/logging/config.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/logging/config.pyi @@ -14,14 +14,14 @@ from collections.abc import Callable, Hashable, Iterable, Sequence from configparser import RawConfigParser from re import Pattern from threading import Thread -from typing import IO, Any, Literal, SupportsIndex, TypedDict, overload +from typing import IO, Any, Final, Literal, SupportsIndex, TypedDict, overload from typing_extensions import Required, TypeAlias from . import Filter, Filterer, Formatter, Handler, Logger, _FilterType, _FormatStyle, _Level DEFAULT_LOGGING_CONFIG_PORT: int -RESET_ERROR: int # undocumented -IDENTIFIER: Pattern[str] # undocumented +RESET_ERROR: Final[int] # undocumented +IDENTIFIER: Final[Pattern[str]] # undocumented if sys.version_info >= (3, 11): class _RootLoggerConfiguration(TypedDict, total=False): diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/logging/handlers.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/logging/handlers.pyi index a54a21c..6ce5a17 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/logging/handlers.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/logging/handlers.pyi @@ -17,16 +17,16 @@ from logging import FileHandler, Handler, LogRecord from re import Pattern from socket import SocketKind, socket from threading import Thread -from typing import Any, ClassVar, Protocol, TypeVar +from typing import Any, ClassVar, Final, Protocol, TypeVar _T = TypeVar("_T") -DEFAULT_TCP_LOGGING_PORT: int -DEFAULT_UDP_LOGGING_PORT: int -DEFAULT_HTTP_LOGGING_PORT: int -DEFAULT_SOAP_LOGGING_PORT: int -SYSLOG_UDP_PORT: int -SYSLOG_TCP_PORT: int +DEFAULT_TCP_LOGGING_PORT: Final[int] +DEFAULT_UDP_LOGGING_PORT: Final[int] +DEFAULT_HTTP_LOGGING_PORT: Final[int] +DEFAULT_SOAP_LOGGING_PORT: Final[int] +SYSLOG_UDP_PORT: Final[int] +SYSLOG_TCP_PORT: Final[int] class WatchedFileHandler(FileHandler): """ @@ -42,8 +42,7 @@ class WatchedFileHandler(FileHandler): This handler is not appropriate for use under Windows, because under Windows open files cannot be moved or renamed - logging opens the files with exclusive locks - and so there is no need - for such a handler. Furthermore, ST_INO is not supported under - Windows; stat always returns zero for this value. + for such a handler. This handler is based on a suggestion and patch by Chad J. Schroeder. diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lzma.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lzma.pyi index de0ce9d..cadbd40 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lzma.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/lzma.pyi @@ -12,7 +12,7 @@ container formats, as well as raw compressed data streams. from _compression import BaseStream from _typeshed import ReadableBuffer, StrOrBytesPath from collections.abc import Mapping, Sequence -from typing import IO, Any, Literal, TextIO, final, overload +from typing import IO, Any, Final, Literal, TextIO, final, overload from typing_extensions import Self, TypeAlias __all__ = [ @@ -61,33 +61,33 @@ _PathOrFile: TypeAlias = StrOrBytesPath | IO[bytes] _FilterChain: TypeAlias = Sequence[Mapping[str, Any]] -FORMAT_AUTO: Literal[0] -FORMAT_XZ: Literal[1] -FORMAT_ALONE: Literal[2] -FORMAT_RAW: Literal[3] -CHECK_NONE: Literal[0] -CHECK_CRC32: Literal[1] -CHECK_CRC64: Literal[4] -CHECK_SHA256: Literal[10] -CHECK_ID_MAX: Literal[15] -CHECK_UNKNOWN: Literal[16] +FORMAT_AUTO: Final = 0 +FORMAT_XZ: Final = 1 +FORMAT_ALONE: Final = 2 +FORMAT_RAW: Final = 3 +CHECK_NONE: Final = 0 +CHECK_CRC32: Final = 1 +CHECK_CRC64: Final = 4 +CHECK_SHA256: Final = 10 +CHECK_ID_MAX: Final = 15 +CHECK_UNKNOWN: Final = 16 FILTER_LZMA1: int # v big number -FILTER_LZMA2: Literal[33] -FILTER_DELTA: Literal[3] -FILTER_X86: Literal[4] -FILTER_IA64: Literal[6] -FILTER_ARM: Literal[7] -FILTER_ARMTHUMB: Literal[8] -FILTER_SPARC: Literal[9] -FILTER_POWERPC: Literal[5] -MF_HC3: Literal[3] -MF_HC4: Literal[4] -MF_BT2: Literal[18] -MF_BT3: Literal[19] -MF_BT4: Literal[20] -MODE_FAST: Literal[1] -MODE_NORMAL: Literal[2] -PRESET_DEFAULT: Literal[6] +FILTER_LZMA2: Final = 33 +FILTER_DELTA: Final = 3 +FILTER_X86: Final = 4 +FILTER_IA64: Final = 6 +FILTER_ARM: Final = 7 +FILTER_ARMTHUMB: Final = 8 +FILTER_SPARC: Final = 9 +FILTER_POWERPC: Final = 5 +MF_HC3: Final = 3 +MF_HC4: Final = 4 +MF_BT2: Final = 18 +MF_BT3: Final = 19 +MF_BT4: Final = 20 +MODE_FAST: Final = 1 +MODE_NORMAL: Final = 2 +PRESET_DEFAULT: Final = 6 PRESET_EXTREME: int # v big number # from _lzma.c diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/mailbox.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/mailbox.pyi index 205903c..ca9a163 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/mailbox.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/mailbox.pyi @@ -204,6 +204,26 @@ class Maildir(Mailbox[MaildirMessage]): def get_file(self, key: str) -> _ProxyFile[bytes]: """Return a file-like representation or raise a KeyError.""" ... + if sys.version_info >= (3, 13): + def get_info(self, key: str) -> str: + """Get the keyed message's "info" as a string.""" + ... + def set_info(self, key: str, info: str) -> None: + """Set the keyed message's "info" string.""" + ... + def get_flags(self, key: str) -> str: + """Return as a string the standard flags that are set on the keyed message.""" + ... + def set_flags(self, key: str, flags: str) -> None: + """Set the given flags and unset all others on the keyed message.""" + ... + def add_flag(self, key: str, flag: str) -> None: + """Set the given flag(s) without changing others on the keyed message.""" + ... + def remove_flag(self, key: str, flag: str) -> None: + """Unset the given string flag(s) without changing others on the keyed message.""" + ... + def iterkeys(self) -> Iterator[str]: """Return an iterator over keys.""" ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/mailcap.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/mailcap.pyi index 4fee156..ce549e0 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/mailcap.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/mailcap.pyi @@ -1,5 +1,3 @@ -"""Mailcap file handling. See RFC 1524.""" - from collections.abc import Mapping, Sequence from typing_extensions import TypeAlias @@ -9,24 +7,5 @@ __all__ = ["getcaps", "findmatch"] def findmatch( caps: Mapping[str, list[_Cap]], MIMEtype: str, key: str = "view", filename: str = "/dev/null", plist: Sequence[str] = [] -) -> tuple[str | None, _Cap | None]: - """ - Find a match for a mailcap entry. - - Return a tuple containing the command line, and the mailcap entry - used; (None, None) if no match is found. This may invoke the - 'test' command of several matching entries before deciding which - entry to use. - """ - ... -def getcaps() -> dict[str, list[_Cap]]: - """ - Return a dictionary containing the mailcap database. - - The dictionary maps a MIME type (in all lowercase, e.g. 'text/plain') - to a list of dictionaries corresponding to mailcap entries. The list - collects all the entries for that MIME type from all available mailcap - files. Each dictionary contains key-value pairs for that MIME type, - where the viewing command is stored with the key "view". - """ - ... +) -> tuple[str | None, _Cap | None]: ... +def getcaps() -> dict[str, list[_Cap]]: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/marshal.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/marshal.pyi index de807fa..78ba750 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/marshal.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/marshal.pyi @@ -6,7 +6,7 @@ machine architecture issues. Not all Python object types are supported; in general, only objects whose value is independent from a particular invocation of Python can be written and read by this module. The following types are supported: -None, integers, floating point numbers, strings, bytes, bytearrays, +None, integers, floating-point numbers, strings, bytes, bytearrays, tuples, lists, sets, dictionaries, and code objects, where it should be understood that tuples, lists and dictionaries are only supported as long as the values contained therein are themselves @@ -17,7 +17,7 @@ Variables: version -- indicates the format that the module uses. Version 0 is the historical format, version 1 shares interned strings and version 2 - uses a binary format for floating point numbers. + uses a binary format for floating-point numbers. Version 3 shares common object references (New in version 3.4). Functions: @@ -59,13 +59,7 @@ _Marshallable: TypeAlias = ( ) if sys.version_info >= (3, 13): - def dump(value: _Marshallable, file: SupportsWrite[bytes], version: int = 4, /, *, allow_code: bool = True) -> None: ... - def load(file: SupportsRead[bytes], /, *, allow_code: bool = True) -> Any: ... - def dumps(value: _Marshallable, version: int = 4, /, *, allow_code: bool = True) -> bytes: ... - def loads(bytes: ReadableBuffer, /, *, allow_code: bool = True) -> Any: ... - -else: - def dump(value: _Marshallable, file: SupportsWrite[bytes], version: int = 4, /) -> None: + def dump(value: _Marshallable, file: SupportsWrite[bytes], version: int = 4, /, *, allow_code: bool = True) -> None: """ Write the value on the open file. @@ -75,18 +69,22 @@ else: Must be a writeable binary file. version Indicates the data format that dump should use. + allow_code + Allow to write code objects. If the value has (or contains an object that has) an unsupported type, a ValueError exception is raised - but garbage data will also be written to the file. The object will not be properly read back by load(). """ ... - def load(file: SupportsRead[bytes], /) -> Any: + def load(file: SupportsRead[bytes], /, *, allow_code: bool = True) -> Any: """ Read one value from the open file and return it. file Must be readable binary file. + allow_code + Allow to load code objects. If no valid value is read (e.g. because the data has a different Python version's incompatible marshal format), raise EOFError, ValueError or @@ -96,7 +94,7 @@ else: dump(), load() will substitute None for the unmarshallable type. """ ... - def dumps(value: _Marshallable, version: int = 4, /) -> bytes: + def dumps(value: _Marshallable, version: int = 4, /, *, allow_code: bool = True) -> bytes: """ Return the bytes object that would be written to a file by dump(value, file). @@ -104,16 +102,27 @@ else: Must be a supported type. version Indicates the data format that dumps should use. + allow_code + Allow to write code objects. Raise a ValueError exception if value has (or contains an object that has) an unsupported type. """ ... - def loads(bytes: ReadableBuffer, /) -> Any: + def loads(bytes: ReadableBuffer, /, *, allow_code: bool = True) -> Any: """ Convert the bytes-like object to a value. + allow_code + Allow to load code objects. + If no valid value is found, raise EOFError, ValueError or TypeError. Extra bytes in the input are ignored. """ ... + +else: + def dump(value: _Marshallable, file: SupportsWrite[bytes], version: int = 4, /) -> None: ... + def load(file: SupportsRead[bytes], /) -> Any: ... + def dumps(value: _Marshallable, version: int = 4, /) -> bytes: ... + def loads(bytes: ReadableBuffer, /) -> Any: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/math.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/math.pyi index 9e762d0..2a3c079 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/math.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/math.pyi @@ -193,9 +193,9 @@ def frexp(x: _SupportsFloatOrIndex, /) -> tuple[float, int]: ... def fsum(seq: Iterable[_SupportsFloatOrIndex], /) -> float: """ - Return an accurate floating point sum of values in the iterable seq. + Return an accurate floating-point sum of values in the iterable seq. - Assumes IEEE-754 floating point arithmetic. + Assumes IEEE-754 floating-point arithmetic. """ ... def gamma(x: _SupportsFloatOrIndex, /) -> float: @@ -236,7 +236,7 @@ def isclose( abs_tol: _SupportsFloatOrIndex = 0.0, ) -> bool: """ - Determine whether two floating point numbers are close in value. + Determine whether two floating-point numbers are close in value. rel_tol maximum difference for being considered "close", relative to the @@ -325,9 +325,7 @@ if sys.version_info >= (3, 12): ... elif sys.version_info >= (3, 9): - def nextafter(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, /) -> float: - """Return the next floating-point value after x towards y.""" - ... + def nextafter(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, /) -> float: ... def perm(n: SupportsIndex, k: SupportsIndex | None = None, /) -> int: """ @@ -431,4 +429,10 @@ if sys.version_info >= (3, 9): ... if sys.version_info >= (3, 13): - def fma(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, z: _SupportsFloatOrIndex, /) -> float: ... + def fma(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, z: _SupportsFloatOrIndex, /) -> float: + """ + Fused multiply-add operation. + + Compute (x * y) + z with a single round. + """ + ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/mimetypes.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/mimetypes.pyi index b29b5f3..12d973a 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/mimetypes.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/mimetypes.pyi @@ -115,7 +115,13 @@ def add_type(type: str, ext: str, strict: bool = True) -> None: ... if sys.version_info >= (3, 13): - def guess_file_type(path: StrPath, *, strict: bool = True) -> tuple[str | None, str | None]: ... + def guess_file_type(path: StrPath, *, strict: bool = True) -> tuple[str | None, str | None]: + """ + Guess the type of a file based on its path. + + Similar to guess_type(), but takes file path istead of URL. + """ + ... inited: bool knownfiles: list[str] @@ -137,6 +143,20 @@ class MimeTypes: types_map: tuple[dict[str, str], dict[str, str]] types_map_inv: tuple[dict[str, str], dict[str, str]] def __init__(self, filenames: tuple[str, ...] = (), strict: bool = True) -> None: ... + def add_type(self, type: str, ext: str, strict: bool = True) -> None: + """ + Add a mapping between a type and an extension. + + When the extension is already known, the new + type will replace the old one. When the type + is already known the extension will be added + to the list of known extensions. + + If strict is true, information will be added to + list of standard types, else to the list of non-standard + types. + """ + ... def guess_extension(self, type: str, strict: bool = True) -> str | None: """ Guess the extension for a file based on its MIME type. @@ -214,4 +234,10 @@ class MimeTypes: """ ... if sys.version_info >= (3, 13): - def guess_file_type(self, path: StrPath, *, strict: bool = True) -> tuple[str | None, str | None]: ... + def guess_file_type(self, path: StrPath, *, strict: bool = True) -> tuple[str | None, str | None]: + """ + Guess the type of a file based on its path. + + Similar to guess_type(), but takes file path istead of URL. + """ + ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/mmap.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/mmap.pyi index 7240070..d824e7c 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/mmap.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/mmap.pyi @@ -1,7 +1,7 @@ import sys from _typeshed import ReadableBuffer, Unused from collections.abc import Iterable, Iterator, Sized -from typing import NoReturn, overload +from typing import Final, Literal, NoReturn, overload from typing_extensions import Self ACCESS_DEFAULT: int @@ -41,7 +41,7 @@ class mmap(Iterable[int], Sized): except that if the file is empty Windows raises an exception (you cannot create an empty mapping on Windows). - Unix: mmap(fileno, length[, flags[, prot[, access[, offset]]]]) + Unix: mmap(fileno, length[, flags[, prot[, access[, offset[, trackfd]]]]]) Maps length bytes from the file specified by the file descriptor fileno, and returns a mmap object. If length is 0, the maximum length of the map @@ -116,7 +116,7 @@ class mmap(Iterable[int], Sized): """Release the buffer object that exposes the underlying memory of the object.""" ... if sys.version_info >= (3, 13): - def seekable(self) -> bool: ... + def seekable(self) -> Literal[True]: ... if sys.platform != "win32": MADV_NORMAL: int @@ -152,3 +152,21 @@ if sys.platform != "linux" and sys.platform != "darwin" and sys.platform != "win if sys.version_info >= (3, 10) and sys.platform == "darwin": MADV_FREE_REUSABLE: int MADV_FREE_REUSE: int + +if sys.version_info >= (3, 13) and sys.platform != "win32": + MAP_32BIT: Final = 32768 + +if sys.version_info >= (3, 13) and sys.platform == "darwin": + MAP_NORESERVE: Final = 64 + MAP_NOEXTEND: Final = 256 + MAP_HASSEMAPHORE: Final = 512 + MAP_NOCACHE: Final = 1024 + MAP_JIT: Final = 2048 + MAP_RESILIENT_CODESIGN: Final = 8192 + MAP_RESILIENT_MEDIA: Final = 16384 + MAP_TRANSLATED_ALLOW_EXECUTE: Final = 131072 + MAP_UNIX03: Final = 262144 + MAP_TPRO: Final = 524288 + +if sys.version_info >= (3, 13) and sys.platform == "linux": + MAP_NORESERVE: Final = 16384 diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/modulefinder.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/modulefinder.pyi index 00ae4e6..c5cdfb9 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/modulefinder.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/modulefinder.pyi @@ -3,15 +3,15 @@ import sys from collections.abc import Container, Iterable, Iterator, Sequence from types import CodeType -from typing import IO, Any +from typing import IO, Any, Final if sys.version_info < (3, 11): - LOAD_CONST: int # undocumented - IMPORT_NAME: int # undocumented - STORE_NAME: int # undocumented - STORE_GLOBAL: int # undocumented - STORE_OPS: tuple[int, int] # undocumented - EXTENDED_ARG: int # undocumented + LOAD_CONST: Final[int] # undocumented + IMPORT_NAME: Final[int] # undocumented + STORE_NAME: Final[int] # undocumented + STORE_GLOBAL: Final[int] # undocumented + STORE_OPS: Final[tuple[int, int]] # undocumented + EXTENDED_ARG: Final[int] # undocumented packagePathMap: dict[str, list[str]] # undocumented diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/msvcrt.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/msvcrt.pyi index 54b3674..403a5d9 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/msvcrt.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/msvcrt.pyi @@ -1,14 +1,14 @@ import sys -from typing import Final, Literal +from typing import Final # This module is only available on Windows if sys.platform == "win32": CRT_ASSEMBLY_VERSION: Final[str] - LK_UNLCK: Literal[0] - LK_LOCK: Literal[1] - LK_NBLCK: Literal[2] - LK_RLCK: Literal[3] - LK_NBRLCK: Literal[4] + LK_UNLCK: Final = 0 + LK_LOCK: Final = 1 + LK_NBLCK: Final = 2 + LK_RLCK: Final = 3 + LK_NBRLCK: Final = 4 SEM_FAILCRITICALERRORS: int SEM_NOALIGNMENTFAULTEXCEPT: int SEM_NOGPFAULTERRORBOX: int diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/connection.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/connection.pyi index 857bf3a..b103e92 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/connection.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/connection.pyi @@ -1,9 +1,9 @@ import socket import sys -import types -from _typeshed import ReadableBuffer +from _typeshed import Incomplete, ReadableBuffer from collections.abc import Iterable -from typing import Any, SupportsIndex +from types import TracebackType +from typing import Any, Generic, SupportsIndex, TypeVar from typing_extensions import Self, TypeAlias __all__ = ["Client", "Listener", "Pipe", "wait"] @@ -11,7 +11,11 @@ __all__ = ["Client", "Listener", "Pipe", "wait"] # https://docs.python.org/3/library/multiprocessing.html#address-formats _Address: TypeAlias = str | tuple[str, int] -class _ConnectionBase: +# Defaulting to Any to avoid forcing generics on a lot of pre-existing code +_SendT = TypeVar("_SendT", contravariant=True, default=Any) +_RecvT = TypeVar("_RecvT", covariant=True, default=Any) + +class _ConnectionBase(Generic[_SendT, _RecvT]): def __init__(self, handle: SupportsIndex, readable: bool = True, writable: bool = True) -> None: ... @property def closed(self) -> bool: @@ -34,7 +38,7 @@ class _ConnectionBase: def send_bytes(self, buf: ReadableBuffer, offset: int = 0, size: int | None = None) -> None: """Send the bytes data from a bytes-like object""" ... - def send(self, obj: Any) -> None: + def send(self, obj: _SendT) -> None: """Send a (picklable) object""" ... def recv_bytes(self, maxlength: int | None = None) -> bytes: @@ -46,7 +50,7 @@ class _ConnectionBase: Return the number of bytes read. """ ... - def recv(self) -> Any: + def recv(self) -> _RecvT: """Receive a (picklable) object""" ... def poll(self, timeout: float | None = 0.0) -> bool: @@ -54,11 +58,11 @@ class _ConnectionBase: ... def __enter__(self) -> Self: ... def __exit__( - self, exc_type: type[BaseException] | None, exc_value: BaseException | None, exc_tb: types.TracebackType | None + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, exc_tb: TracebackType | None ) -> None: ... def __del__(self) -> None: ... -class Connection(_ConnectionBase): +class Connection(_ConnectionBase[_SendT, _RecvT]): """ Connection class based on an arbitrary file descriptor (Unix only), or a socket handle (Windows). @@ -66,7 +70,7 @@ class Connection(_ConnectionBase): ... if sys.platform == "win32": - class PipeConnection(_ConnectionBase): ... + class PipeConnection(_ConnectionBase[_SendT, _RecvT]): ... class Listener: """ @@ -78,7 +82,7 @@ class Listener: def __init__( self, address: _Address | None = None, family: str | None = None, backlog: int = 1, authkey: bytes | None = None ) -> None: ... - def accept(self) -> Connection: + def accept(self) -> Connection[Incomplete, Incomplete]: """ Accept a connection on the bound socket or named pipe of `self`. @@ -94,36 +98,40 @@ class Listener: def last_accepted(self) -> _Address | None: ... def __enter__(self) -> Self: ... def __exit__( - self, exc_type: type[BaseException] | None, exc_value: BaseException | None, exc_tb: types.TracebackType | None + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, exc_tb: TracebackType | None ) -> None: ... +# Any: send and recv methods unused if sys.version_info >= (3, 12): - def deliver_challenge(connection: Connection, authkey: bytes, digest_name: str = "sha256") -> None: ... + def deliver_challenge(connection: Connection[Any, Any], authkey: bytes, digest_name: str = "sha256") -> None: ... else: - def deliver_challenge(connection: Connection, authkey: bytes) -> None: ... + def deliver_challenge(connection: Connection[Any, Any], authkey: bytes) -> None: ... -def answer_challenge(connection: Connection, authkey: bytes) -> None: ... +def answer_challenge(connection: Connection[Any, Any], authkey: bytes) -> None: ... def wait( - object_list: Iterable[Connection | socket.socket | int], timeout: float | None = None -) -> list[Connection | socket.socket | int]: + object_list: Iterable[Connection[_SendT, _RecvT] | socket.socket | int], timeout: float | None = None +) -> list[Connection[_SendT, _RecvT] | socket.socket | int]: """ Wait till an object in object_list is ready/readable. Returns list of those objects in object_list which are ready/readable. """ ... -def Client(address: _Address, family: str | None = None, authkey: bytes | None = None) -> Connection: +def Client(address: _Address, family: str | None = None, authkey: bytes | None = None) -> Connection[Any, Any]: """Returns a connection to the address of a `Listener`""" ... # N.B. Keep this in sync with multiprocessing.context.BaseContext.Pipe. # _ConnectionBase is the common base class of Connection and PipeConnection # and can be used in cross-platform code. +# +# The two connections should have the same generic types but inverted (Connection[_T1, _T2], Connection[_T2, _T1]). +# However, TypeVars scoped entirely within a return annotation is unspecified in the spec. if sys.platform != "win32": - def Pipe(duplex: bool = True) -> tuple[Connection, Connection]: + def Pipe(duplex: bool = True) -> tuple[Connection[Any, Any], Connection[Any, Any]]: """Returns pair of connection objects at either end of a pipe""" ... else: - def Pipe(duplex: bool = True) -> tuple[PipeConnection, PipeConnection]: ... + def Pipe(duplex: bool = True) -> tuple[PipeConnection[Any, Any], PipeConnection[Any, Any]]: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/context.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/context.pyi index 3f2bc9e..894252d 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/context.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/context.pyi @@ -62,12 +62,15 @@ class BaseContext: # N.B. Keep this in sync with multiprocessing.connection.Pipe. # _ConnectionBase is the common base class of Connection and PipeConnection # and can be used in cross-platform code. + # + # The two connections should have the same generic types but inverted (Connection[_T1, _T2], Connection[_T2, _T1]). + # However, TypeVars scoped entirely within a return annotation is unspecified in the spec. if sys.platform != "win32": - def Pipe(self, duplex: bool = True) -> tuple[Connection, Connection]: + def Pipe(self, duplex: bool = True) -> tuple[Connection[Any, Any], Connection[Any, Any]]: """Returns two connection object connected by a pipe""" ... else: - def Pipe(self, duplex: bool = True) -> tuple[PipeConnection, PipeConnection]: ... + def Pipe(self, duplex: bool = True) -> tuple[PipeConnection[Any, Any], PipeConnection[Any, Any]]: ... def Barrier( self, parties: int, action: Callable[..., object] | None = None, timeout: float | None = None @@ -150,20 +153,24 @@ class BaseContext: ... @overload def Array( - self, typecode_or_type: type[c_char], size_or_initializer: int | Sequence[Any], *, lock: Literal[True] | _LockLike = True - ) -> SynchronizedString: + self, typecode_or_type: type[_SimpleCData[_T]], size_or_initializer: int | Sequence[Any], *, lock: Literal[False] + ) -> SynchronizedArray[_T]: """Returns a synchronized shared array""" ... @overload def Array( - self, typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any], *, lock: Literal[False] - ) -> SynchronizedArray[_CT]: + self, typecode_or_type: type[c_char], size_or_initializer: int | Sequence[Any], *, lock: Literal[True] | _LockLike = True + ) -> SynchronizedString: """Returns a synchronized shared array""" ... @overload def Array( - self, typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any], *, lock: Literal[True] | _LockLike = True - ) -> SynchronizedArray[_CT]: + self, + typecode_or_type: type[_SimpleCData[_T]], + size_or_initializer: int | Sequence[Any], + *, + lock: Literal[True] | _LockLike = True, + ) -> SynchronizedArray[_T]: """Returns a synchronized shared array""" ... @overload diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/forkserver.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/forkserver.pyi index be6dd75..250be46 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/forkserver.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/forkserver.pyi @@ -1,12 +1,12 @@ from _typeshed import FileDescriptorLike, Unused from collections.abc import Sequence from struct import Struct -from typing import Any +from typing import Any, Final __all__ = ["ensure_running", "get_inherited_fds", "connect_to_new_process", "set_forkserver_preload"] -MAXFDS_TO_SEND: int -SIGNED_STRUCT: Struct +MAXFDS_TO_SEND: Final = 256 +SIGNED_STRUCT: Final[Struct] class ForkServer: def set_forkserver_preload(self, modules_names: list[str]) -> None: diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/managers.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/managers.pyi index 140d1a9..64f42ec 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/managers.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/managers.pyi @@ -1,7 +1,7 @@ import queue import sys import threading -from _typeshed import SupportsKeysAndGetItem, SupportsRichComparison, SupportsRichComparisonT +from _typeshed import Incomplete, SupportsKeysAndGetItem, SupportsRichComparison, SupportsRichComparisonT from collections.abc import Callable, Iterable, Iterator, Mapping, MutableMapping, MutableSequence, Sequence from types import TracebackType from typing import Any, AnyStr, ClassVar, Generic, SupportsIndex, TypeVar, overload @@ -98,7 +98,13 @@ class DictProxy(BaseProxy, MutableMapping[_KT, _VT]): def items(self) -> list[tuple[_KT, _VT]]: ... # type: ignore[override] def values(self) -> list[_VT]: ... # type: ignore[override] if sys.version_info >= (3, 13): - def __class_getitem__(cls, args: Any, /) -> Any: ... + def __class_getitem__(cls, args: Any, /) -> Any: + """ + Represent a PEP 585 generic type + + E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). + """ + ... class BaseListProxy(BaseProxy, MutableSequence[_T]): __builtins__: ClassVar[dict[str, Any]] @@ -134,7 +140,13 @@ class ListProxy(BaseListProxy[_T]): def __iadd__(self, value: Iterable[_T], /) -> Self: ... # type: ignore[override] def __imul__(self, value: SupportsIndex, /) -> Self: ... # type: ignore[override] if sys.version_info >= (3, 13): - def __class_getitem__(cls, args: Any, /) -> Any: ... + def __class_getitem__(cls, args: Any, /) -> Any: + """ + Represent a PEP 585 generic type + + E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). + """ + ... # Returned by BaseManager.get_server() class Server: @@ -146,7 +158,9 @@ class Server: def serve_forever(self) -> None: """Run the server forever""" ... - def accept_connection(self, c: Connection, name: str) -> None: + def accept_connection( + self, c: Connection[tuple[str, str | None], tuple[str, str, Iterable[Incomplete], Mapping[str, Incomplete]]], name: str + ) -> None: """Spawn a new thread to serve this connection""" ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/pool.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/pool.pyi index 8cad4ef..8612fa5 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/pool.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/pool.pyi @@ -1,7 +1,7 @@ import sys from collections.abc import Callable, Iterable, Iterator, Mapping from types import TracebackType -from typing import Any, Generic, Literal, TypeVar +from typing import Any, Final, Generic, TypeVar from typing_extensions import Self if sys.version_info >= (3, 9): @@ -130,7 +130,7 @@ class ThreadPool(Pool): ) -> None: ... # undocumented -INIT: Literal["INIT"] -RUN: Literal["RUN"] -CLOSE: Literal["CLOSE"] -TERMINATE: Literal["TERMINATE"] +INIT: Final = "INIT" +RUN: Final = "RUN" +CLOSE: Final = "CLOSE" +TERMINATE: Final = "TERMINATE" diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/popen_spawn_win32.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/popen_spawn_win32.pyi index 3dc9d5b..481b9ee 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/popen_spawn_win32.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/popen_spawn_win32.pyi @@ -1,16 +1,16 @@ import sys from multiprocessing.process import BaseProcess -from typing import ClassVar +from typing import ClassVar, Final from .util import Finalize if sys.platform == "win32": __all__ = ["Popen"] - TERMINATE: int - WINEXE: bool - WINSERVICE: bool - WINENV: bool + TERMINATE: Final[int] + WINEXE: Final[bool] + WINSERVICE: Final[bool] + WINENV: Final[bool] class Popen: finalizer: Finalize diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/reduction.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/reduction.pyi index c62bd64..aba64c4 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/reduction.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/reduction.pyi @@ -8,14 +8,14 @@ from copyreg import _DispatchTableType from multiprocessing import connection from pickle import _ReducedType from socket import socket -from typing import Any, Literal +from typing import Any, Final if sys.platform == "win32": __all__ = ["send_handle", "recv_handle", "ForkingPickler", "register", "dump", "DupHandle", "duplicate", "steal_handle"] else: __all__ = ["send_handle", "recv_handle", "ForkingPickler", "register", "dump", "DupFd", "sendfds", "recvfds"] -HAVE_SEND_HANDLE: bool +HAVE_SEND_HANDLE: Final[bool] class ForkingPickler(pickle.Pickler): """Pickler subclass used by multiprocessing.""" @@ -40,18 +40,15 @@ if sys.platform == "win32": handle: int, target_process: int | None = None, inheritable: bool = False, *, source_process: int | None = None ) -> int: ... def steal_handle(source_pid: int, handle: int) -> int: ... - def send_handle(conn: connection.PipeConnection, handle: int, destination_pid: int) -> None: ... - def recv_handle(conn: connection.PipeConnection) -> int: ... + def send_handle(conn: connection.PipeConnection[DupHandle, Any], handle: int, destination_pid: int) -> None: ... + def recv_handle(conn: connection.PipeConnection[Any, DupHandle]) -> int: ... class DupHandle: def __init__(self, handle: int, access: int, pid: int | None = None) -> None: ... def detach(self) -> int: ... else: - if sys.platform == "darwin": - ACKNOWLEDGE: Literal[True] - else: - ACKNOWLEDGE: Literal[False] + ACKNOWLEDGE: Final[bool] def recvfds(sock: socket, size: int) -> list[int]: """Receive an array of fds over an AF_UNIX socket.""" diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/shared_memory.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/shared_memory.pyi index e619686..15d8c32 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/shared_memory.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/shared_memory.pyi @@ -61,9 +61,15 @@ class SharedMemory: """ Requests that the underlying shared memory block be destroyed. - In order to ensure proper cleanup of resources, unlink should be - called once (and only once) across all processes which have access - to the shared memory block. + Unlink should be called once (and only once) across all handles + which have access to the shared memory block, even if these + handles belong to different processes. Closing and unlinking may + happen in any order, but trying to access data inside a shared + memory block after unlinking may result in memory errors, + depending on platform. + + This method has no effect on Windows, where the only way to + delete a shared memory block is to close all handles. """ ... def __del__(self) -> None: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/sharedctypes.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/sharedctypes.pyi index 88d97cd..54101f4 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/sharedctypes.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/sharedctypes.pyi @@ -57,12 +57,22 @@ def Array( ... @overload def Array( - typecode_or_type: type[_CT], + typecode_or_type: type[c_char], size_or_initializer: int | Sequence[Any], *, lock: Literal[True] | _LockLike = True, ctx: BaseContext | None = None, -) -> SynchronizedArray[_CT]: +) -> SynchronizedString: + """Return a synchronization wrapper for a RawArray""" + ... +@overload +def Array( + typecode_or_type: type[_SimpleCData[_T]], + size_or_initializer: int | Sequence[Any], + *, + lock: Literal[True] | _LockLike = True, + ctx: BaseContext | None = None, +) -> SynchronizedArray[_T]: """Return a synchronization wrapper for a RawArray""" ... @overload @@ -91,7 +101,9 @@ def synchronized(obj: _SimpleCData[_T], lock: _LockLike | None = None, ctx: Any @overload def synchronized(obj: ctypes.Array[c_char], lock: _LockLike | None = None, ctx: Any | None = None) -> SynchronizedString: ... @overload -def synchronized(obj: ctypes.Array[_CT], lock: _LockLike | None = None, ctx: Any | None = None) -> SynchronizedArray[_CT]: ... +def synchronized( + obj: ctypes.Array[_SimpleCData[_T]], lock: _LockLike | None = None, ctx: Any | None = None +) -> SynchronizedArray[_T]: ... @overload def synchronized(obj: _CT, lock: _LockLike | None = None, ctx: Any | None = None) -> SynchronizedBase[_CT]: ... @@ -113,19 +125,30 @@ class SynchronizedBase(Generic[_CT]): class Synchronized(SynchronizedBase[_SimpleCData[_T]], Generic[_T]): value: _T -class SynchronizedArray(SynchronizedBase[ctypes.Array[_CT]], Generic[_CT]): +class SynchronizedArray(SynchronizedBase[ctypes.Array[_SimpleCData[_T]]], Generic[_T]): def __len__(self) -> int: ... @overload - def __getitem__(self, i: slice) -> list[_CT]: ... + def __getitem__(self, i: slice) -> list[_T]: ... + @overload + def __getitem__(self, i: int) -> _T: ... @overload - def __getitem__(self, i: int) -> _CT: ... + def __setitem__(self, i: slice, value: Iterable[_T]) -> None: ... + @overload + def __setitem__(self, i: int, value: _T) -> None: ... + def __getslice__(self, start: int, stop: int) -> list[_T]: ... + def __setslice__(self, start: int, stop: int, values: Iterable[_T]) -> None: ... + +class SynchronizedString(SynchronizedArray[bytes]): + @overload # type: ignore[override] + def __getitem__(self, i: slice) -> bytes: ... @overload - def __setitem__(self, i: slice, value: Iterable[_CT]) -> None: ... + def __getitem__(self, i: int) -> bytes: ... + @overload # type: ignore[override] + def __setitem__(self, i: slice, value: bytes) -> None: ... @overload - def __setitem__(self, i: int, value: _CT) -> None: ... - def __getslice__(self, start: int, stop: int) -> list[_CT]: ... - def __setslice__(self, start: int, stop: int, values: Iterable[_CT]) -> None: ... + def __setitem__(self, i: int, value: bytes) -> None: ... + def __getslice__(self, start: int, stop: int) -> bytes: ... # type: ignore[override] + def __setslice__(self, start: int, stop: int, values: bytes) -> None: ... # type: ignore[override] -class SynchronizedString(SynchronizedArray[c_char]): value: bytes raw: bytes diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/spawn.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/spawn.pyi index 5c0322e..cadc1f2 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/spawn.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/spawn.pyi @@ -1,6 +1,6 @@ from collections.abc import Mapping, Sequence from types import ModuleType -from typing import Any +from typing import Any, Final __all__ = [ "_main", @@ -12,8 +12,8 @@ __all__ = [ "import_main_path", ] -WINEXE: bool -WINSERVICE: bool +WINEXE: Final[bool] +WINSERVICE: Final[bool] def set_executable(exe: str) -> None: ... def get_executable() -> str: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/util.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/util.pyi index 5ad3e24..b43b465 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/util.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/multiprocessing/util.pyi @@ -2,7 +2,7 @@ import threading from _typeshed import ConvertibleToInt, Incomplete, Unused from collections.abc import Callable, Iterable, Mapping, MutableMapping, Sequence from logging import Logger, _Level as _LoggingLevel -from typing import Any, Generic, TypeVar, overload +from typing import Any, Final, Generic, TypeVar, overload __all__ = [ "sub_debug", @@ -25,14 +25,14 @@ __all__ = [ _T = TypeVar("_T") _R_co = TypeVar("_R_co", default=Any, covariant=True) -NOTSET: int -SUBDEBUG: int -DEBUG: int -INFO: int -SUBWARNING: int +NOTSET: Final[int] +SUBDEBUG: Final[int] +DEBUG: Final[int] +INFO: Final[int] +SUBWARNING: Final[int] -LOGGER_NAME: str -DEFAULT_LOGGING_FORMAT: str +LOGGER_NAME: Final[str] +DEFAULT_LOGGING_FORMAT: Final[str] def sub_debug(msg: object, *args: object) -> None: ... def debug(msg: object, *args: object) -> None: ... @@ -105,7 +105,7 @@ class ForkAwareThreadLock: class ForkAwareLocal(threading.local): ... -MAXFD: int +MAXFD: Final[int] def close_all_fds_except(fds: Iterable[int]) -> None: ... def spawnv_passfds(path: bytes, args: Sequence[ConvertibleToInt], passfds: Sequence[int]) -> int: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/nis.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/nis.pyi index 3b6b2c2..10eef23 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/nis.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/nis.pyi @@ -1,36 +1,9 @@ -"""This module contains functions for accessing NIS maps.""" - import sys if sys.platform != "win32": - def cat(map: str, domain: str = ...) -> dict[str, str]: - """ - cat(map, domain = defaultdomain) - Returns the entire map as a dictionary. Optionally domain can be - specified but it defaults to the system default domain. - """ - ... - def get_default_domain() -> str: - """ - get_default_domain() -> str - Corresponds to the C library yp_get_default_domain() call, returning - the default NIS domain. - """ - ... - def maps(domain: str = ...) -> list[str]: - """ - maps(domain = defaultdomain) - Returns an array of all available NIS maps within a domain. If domain - is not specified it defaults to the system default domain. - """ - ... - def match(key: str, map: str, domain: str = ...) -> str: - """ - match(key, map, domain = defaultdomain) - Corresponds to the C library yp_match() call, returning the value of - key in the given map. Optionally domain can be specified but it - defaults to the system default domain. - """ - ... + def cat(map: str, domain: str = ...) -> dict[str, str]: ... + def get_default_domain() -> str: ... + def maps(domain: str = ...) -> list[str]: ... + def match(key: str, map: str, domain: str = ...) -> str: ... class error(Exception): ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/nntplib.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/nntplib.pyi index 3ae2c6a..85dfbff 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/nntplib.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/nntplib.pyi @@ -1,33 +1,3 @@ -""" -An NNTP client class based on: -- RFC 977: Network News Transfer Protocol -- RFC 2980: Common NNTP Extensions -- RFC 3977: Network News Transfer Protocol (version 2) - -Example: - ->>> from nntplib import NNTP ->>> s = NNTP('news') ->>> resp, count, first, last, name = s.group('comp.lang.python') ->>> print('Group', name, 'has', count, 'articles, range', first, 'to', last) -Group comp.lang.python has 51 articles, range 5770 to 5821 ->>> resp, subs = s.xhdr('subject', '{0}-{1}'.format(first, last)) ->>> resp = s.quit() ->>> - -Here 'resp' is the server response line. -Error responses are turned into exceptions. - -To post an article from a file: ->>> f = open(filename, 'rb') # file containing article, including header ->>> resp = s.post(f) ->>> - -For descriptions of all methods, read the comments in the code below. -Note that all arguments and return values representing article numbers -are strings, not numbers, since they are rarely used for calculations. -""" - import datetime import socket import ssl @@ -35,7 +5,7 @@ import sys from _typeshed import Unused from builtins import list as _list # conflicts with a method named "list" from collections.abc import Iterable -from typing import IO, Any, Literal, NamedTuple +from typing import IO, Any, Final, NamedTuple from typing_extensions import Self, TypeAlias __all__ = [ @@ -53,47 +23,29 @@ __all__ = [ _File: TypeAlias = IO[bytes] | bytes | str | None class NNTPError(Exception): - """Base class for all nntplib exceptions""" response: str -class NNTPReplyError(NNTPError): - """Unexpected [123]xx reply""" - ... -class NNTPTemporaryError(NNTPError): - """4xx errors""" - ... -class NNTPPermanentError(NNTPError): - """5xx errors""" - ... -class NNTPProtocolError(NNTPError): - """Response does not begin with [1-5]""" - ... -class NNTPDataError(NNTPError): - """Error in response data""" - ... +class NNTPReplyError(NNTPError): ... +class NNTPTemporaryError(NNTPError): ... +class NNTPPermanentError(NNTPError): ... +class NNTPProtocolError(NNTPError): ... +class NNTPDataError(NNTPError): ... -NNTP_PORT: Literal[119] -NNTP_SSL_PORT: Literal[563] +NNTP_PORT: Final = 119 +NNTP_SSL_PORT: Final = 563 class GroupInfo(NamedTuple): - """GroupInfo(group, last, first, flag)""" group: str last: str first: str flag: str class ArticleInfo(NamedTuple): - """ArticleInfo(number, message_id, lines)""" number: int message_id: str lines: list[bytes] -def decode_header(header_str: str) -> str: - """ - Takes a unicode string representing a munged header value - and decodes it as a (possibly non-ASCII) readable value. - """ - ... +def decode_header(header_str: str) -> str: ... class NNTP: encoding: str @@ -119,275 +71,43 @@ class NNTP: readermode: bool | None = None, usenetrc: bool = False, timeout: float = ..., - ) -> None: - """ - Initialize an instance. Arguments: - - host: hostname to connect to - - port: port to connect to (default the standard NNTP port) - - user: username to authenticate with - - password: password to use with username - - readermode: if true, send 'mode reader' command after - connecting. - - usenetrc: allow loading username and password from ~/.netrc file - if not specified explicitly - - timeout: timeout (in seconds) used for socket connections - - readermode is sometimes necessary if you are connecting to an - NNTP server on the local machine and intend to call - reader-specific commands, such as `group'. If you get - unexpected NNTPPermanentErrors, you might need to set - readermode. - """ - ... + ) -> None: ... def __enter__(self) -> Self: ... def __exit__(self, *args: Unused) -> None: ... - def getwelcome(self) -> str: - """ - Get the welcome message from the server - (this is read and squirreled away by __init__()). - If the response code is 200, posting is allowed; - if it 201, posting is not allowed. - """ - ... - def getcapabilities(self) -> dict[str, _list[str]]: - """ - Get the server capabilities, as read by __init__(). - If the CAPABILITIES command is not supported, an empty dict is - returned. - """ - ... - def set_debuglevel(self, level: int) -> None: - """ - Set the debugging level. Argument 'level' means: - 0: no debugging output (default) - 1: print commands and responses but not body text etc. - 2: also print raw lines read and sent before stripping CR/LF - """ - ... - def debug(self, level: int) -> None: - """ - Set the debugging level. Argument 'level' means: - 0: no debugging output (default) - 1: print commands and responses but not body text etc. - 2: also print raw lines read and sent before stripping CR/LF - """ - ... - def capabilities(self) -> tuple[str, dict[str, _list[str]]]: - """ - Process a CAPABILITIES command. Not supported by all servers. - Return: - - resp: server response if successful - - caps: a dictionary mapping capability names to lists of tokens - (for example {'VERSION': ['2'], 'OVER': [], LIST: ['ACTIVE', 'HEADERS'] }) - """ - ... - def newgroups(self, date: datetime.date | datetime.datetime, *, file: _File = None) -> tuple[str, _list[str]]: - """ - Process a NEWGROUPS command. Arguments: - - date: a date or datetime object - Return: - - resp: server response if successful - - list: list of newsgroup names - """ - ... - def newnews(self, group: str, date: datetime.date | datetime.datetime, *, file: _File = None) -> tuple[str, _list[str]]: - """ - Process a NEWNEWS command. Arguments: - - group: group name or '*' - - date: a date or datetime object - Return: - - resp: server response if successful - - list: list of message ids - """ - ... - def list(self, group_pattern: str | None = None, *, file: _File = None) -> tuple[str, _list[str]]: - """ - Process a LIST or LIST ACTIVE command. Arguments: - - group_pattern: a pattern indicating which groups to query - - file: Filename string or file object to store the result in - Returns: - - resp: server response if successful - - list: list of (group, last, first, flag) (strings) - """ - ... - def description(self, group: str) -> str: - """ - Get a description for a single group. If more than one - group matches ('group' is a pattern), return the first. If no - group matches, return an empty string. - - This elides the response code from the server, since it can - only be '215' or '285' (for xgtitle) anyway. If the response - code is needed, use the 'descriptions' method. - - NOTE: This neither checks for a wildcard in 'group' nor does - it check whether the group actually exists. - """ - ... - def descriptions(self, group_pattern: str) -> tuple[str, dict[str, str]]: - """Get descriptions for a range of groups.""" - ... - def group(self, name: str) -> tuple[str, int, int, int, str]: - """ - Process a GROUP command. Argument: - - group: the group name - Returns: - - resp: server response if successful - - count: number of articles - - first: first article number - - last: last article number - - name: the group name - """ - ... - def help(self, *, file: _File = None) -> tuple[str, _list[str]]: - """ - Process a HELP command. Argument: - - file: Filename string or file object to store the result in - Returns: - - resp: server response if successful - - list: list of strings returned by the server in response to the - HELP command - """ - ... - def stat(self, message_spec: Any = None) -> tuple[str, int, str]: - """ - Process a STAT command. Argument: - - message_spec: article number or message id (if not specified, - the current article is selected) - Returns: - - resp: server response if successful - - art_num: the article number - - message_id: the message id - """ - ... - def next(self) -> tuple[str, int, str]: - """Process a NEXT command. No arguments. Return as for STAT.""" - ... - def last(self) -> tuple[str, int, str]: - """Process a LAST command. No arguments. Return as for STAT.""" - ... - def head(self, message_spec: Any = None, *, file: _File = None) -> tuple[str, ArticleInfo]: - """ - Process a HEAD command. Argument: - - message_spec: article number or message id - - file: filename string or file object to store the headers in - Returns: - - resp: server response if successful - - ArticleInfo: (article number, message id, list of header lines) - """ - ... - def body(self, message_spec: Any = None, *, file: _File = None) -> tuple[str, ArticleInfo]: - """ - Process a BODY command. Argument: - - message_spec: article number or message id - - file: filename string or file object to store the body in - Returns: - - resp: server response if successful - - ArticleInfo: (article number, message id, list of body lines) - """ - ... - def article(self, message_spec: Any = None, *, file: _File = None) -> tuple[str, ArticleInfo]: - """ - Process an ARTICLE command. Argument: - - message_spec: article number or message id - - file: filename string or file object to store the article in - Returns: - - resp: server response if successful - - ArticleInfo: (article number, message id, list of article lines) - """ - ... - def slave(self) -> str: - """ - Process a SLAVE command. Returns: - - resp: server response if successful - """ - ... - def xhdr(self, hdr: str, str: Any, *, file: _File = None) -> tuple[str, _list[str]]: - """ - Process an XHDR command (optional server extension). Arguments: - - hdr: the header type (e.g. 'subject') - - str: an article nr, a message id, or a range nr1-nr2 - - file: Filename string or file object to store the result in - Returns: - - resp: server response if successful - - list: list of (nr, value) strings - """ - ... - def xover(self, start: int, end: int, *, file: _File = None) -> tuple[str, _list[tuple[int, dict[str, str]]]]: - """ - Process an XOVER command (optional server extension) Arguments: - - start: start of range - - end: end of range - - file: Filename string or file object to store the result in - Returns: - - resp: server response if successful - - list: list of dicts containing the response fields - """ - ... + def getwelcome(self) -> str: ... + def getcapabilities(self) -> dict[str, _list[str]]: ... + def set_debuglevel(self, level: int) -> None: ... + def debug(self, level: int) -> None: ... + def capabilities(self) -> tuple[str, dict[str, _list[str]]]: ... + def newgroups(self, date: datetime.date | datetime.datetime, *, file: _File = None) -> tuple[str, _list[str]]: ... + def newnews(self, group: str, date: datetime.date | datetime.datetime, *, file: _File = None) -> tuple[str, _list[str]]: ... + def list(self, group_pattern: str | None = None, *, file: _File = None) -> tuple[str, _list[str]]: ... + def description(self, group: str) -> str: ... + def descriptions(self, group_pattern: str) -> tuple[str, dict[str, str]]: ... + def group(self, name: str) -> tuple[str, int, int, int, str]: ... + def help(self, *, file: _File = None) -> tuple[str, _list[str]]: ... + def stat(self, message_spec: Any = None) -> tuple[str, int, str]: ... + def next(self) -> tuple[str, int, str]: ... + def last(self) -> tuple[str, int, str]: ... + def head(self, message_spec: Any = None, *, file: _File = None) -> tuple[str, ArticleInfo]: ... + def body(self, message_spec: Any = None, *, file: _File = None) -> tuple[str, ArticleInfo]: ... + def article(self, message_spec: Any = None, *, file: _File = None) -> tuple[str, ArticleInfo]: ... + def slave(self) -> str: ... + def xhdr(self, hdr: str, str: Any, *, file: _File = None) -> tuple[str, _list[str]]: ... + def xover(self, start: int, end: int, *, file: _File = None) -> tuple[str, _list[tuple[int, dict[str, str]]]]: ... def over( self, message_spec: None | str | _list[Any] | tuple[Any, ...], *, file: _File = None - ) -> tuple[str, _list[tuple[int, dict[str, str]]]]: - """ - Process an OVER command. If the command isn't supported, fall - back to XOVER. Arguments: - - message_spec: - - either a message id, indicating the article to fetch - information about - - or a (start, end) tuple, indicating a range of article numbers; - if end is None, information up to the newest message will be - retrieved - - or None, indicating the current article number must be used - - file: Filename string or file object to store the result in - Returns: - - resp: server response if successful - - list: list of dicts containing the response fields - - NOTE: the "message id" form isn't supported by XOVER - """ - ... + ) -> tuple[str, _list[tuple[int, dict[str, str]]]]: ... if sys.version_info < (3, 9): def xgtitle(self, group: str, *, file: _File = None) -> tuple[str, _list[tuple[str, str]]]: ... def xpath(self, id: Any) -> tuple[str, str]: ... - def date(self) -> tuple[str, datetime.datetime]: - """ - Process the DATE command. - Returns: - - resp: server response if successful - - date: datetime object - """ - ... - def post(self, data: bytes | Iterable[bytes]) -> str: - """ - Process a POST command. Arguments: - - data: bytes object, iterable or file containing the article - Returns: - - resp: server response if successful - """ - ... - def ihave(self, message_id: Any, data: bytes | Iterable[bytes]) -> str: - """ - Process an IHAVE command. Arguments: - - message_id: message-id of the article - - data: file containing the article - Returns: - - resp: server response if successful - Note that if the server refuses the article an exception is raised. - """ - ... - def quit(self) -> str: - """ - Process a QUIT command and close the socket. Returns: - - resp: server response if successful - """ - ... + def date(self) -> tuple[str, datetime.datetime]: ... + def post(self, data: bytes | Iterable[bytes]) -> str: ... + def ihave(self, message_id: Any, data: bytes | Iterable[bytes]) -> str: ... + def quit(self) -> str: ... def login(self, user: str | None = None, password: str | None = None, usenetrc: bool = True) -> None: ... - def starttls(self, context: ssl.SSLContext | None = None) -> None: - """ - Process a STARTTLS command. Arguments: - - context: SSL context to use for the encrypted connection - """ - ... + def starttls(self, context: ssl.SSLContext | None = None) -> None: ... class NNTP_SSL(NNTP): ssl_context: ssl.SSLContext | None @@ -402,9 +122,4 @@ class NNTP_SSL(NNTP): readermode: bool | None = None, usenetrc: bool = False, timeout: float = ..., - ) -> None: - """ - This works identically to NNTP.__init__, except for the change - in default port and the `ssl_context` argument for SSL connections. - """ - ... + ) -> None: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/nt.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/nt.pyi index a59a423..f888466 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/nt.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/nt.pyi @@ -114,5 +114,7 @@ if sys.platform == "win32": listvolumes as listvolumes, set_blocking as set_blocking, ) + if sys.version_info >= (3, 13): + from os import fchmod as fchmod, lchmod as lchmod environ: dict[str, str] diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/ntpath.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/ntpath.pyi index e07e0fa..7587c84 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/ntpath.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/ntpath.pyi @@ -130,4 +130,6 @@ else: realpath = abspath if sys.version_info >= (3, 13): - def isreserved(path: StrOrBytesPath) -> bool: ... + def isreserved(path: StrOrBytesPath) -> bool: + """Return true if the pathname is reserved by the system.""" + ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/operator.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/operator.pyi index 12895bb..8262d9e 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/operator.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/operator.pyi @@ -71,6 +71,9 @@ __all__ = [ if sys.version_info >= (3, 11): __all__ += ["call"] +if sys.version_info >= (3, 14): + __all__ += ["is_none", "is_not_none"] + __lt__ = lt __le__ = le __eq__ = eq diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/optparse.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/optparse.pyi index 49333df..51f03f9 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/optparse.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/optparse.pyi @@ -25,7 +25,7 @@ Simple usage example: from _typeshed import Incomplete from abc import abstractmethod from collections.abc import Callable, Iterable, Mapping, Sequence -from typing import IO, Any, AnyStr, Literal, overload +from typing import IO, Any, AnyStr, Literal, NoReturn, overload __all__ = [ "Option", @@ -502,7 +502,7 @@ class OptionParser(OptionContainer): allow_interspersed_args. """ ... - def error(self, msg: str) -> None: + def error(self, msg: str) -> NoReturn: """ error(msg : string) @@ -511,7 +511,7 @@ class OptionParser(OptionContainer): should either exit or raise an exception. """ ... - def exit(self, status: int = 0, msg: str | None = None) -> None: ... + def exit(self, status: int = 0, msg: str | None = None) -> NoReturn: ... def expand_prog_name(self, s: str) -> str: ... def format_epilog(self, formatter: HelpFormatter) -> str: ... def format_help(self, formatter: HelpFormatter | None = None) -> str: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/os/__init__.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/os/__init__.pyi index 8bf1d34..104a3b1 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/os/__init__.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/os/__init__.pyi @@ -419,7 +419,9 @@ class stat_result(structseq[float], tuple[int, int, int, int, int, int, int, flo if sys.version_info >= (3, 12) and sys.platform == "win32": @property @deprecated( - "Use st_birthtime instead to retrieve the file creation time. In the future, this property will contain the last metadata change time." + """\ +Use st_birthtime instead to retrieve the file creation time. \ +In the future, this property will contain the last metadata change time.""" ) def st_ctime(self) -> float: ... else: @@ -1016,7 +1018,15 @@ def isatty(fd: int, /) -> bool: ... if sys.platform != "win32" and sys.version_info >= (3, 11): - def login_tty(fd: int, /) -> None: ... + def login_tty(fd: int, /) -> None: + """ + Prepare the tty of which fd is a file descriptor for a new login session. + + Make the calling process a session leader; make the tty the + controlling tty, the stdin, the stdout, and the stderr of the + calling process; close fd. + """ + ... if sys.version_info >= (3, 11): def lseek(fd: int, position: int, whence: int, /) -> int: @@ -1038,14 +1048,7 @@ if sys.version_info >= (3, 11): ... else: - def lseek(fd: int, position: int, how: int, /) -> int: - """ - Set the position of a file descriptor. Return the new position. - - Return the new cursor position in number of bytes - relative to the beginning of the file. - """ - ... + def lseek(fd: int, position: int, how: int, /) -> int: ... def open(path: StrOrBytesPath, flags: int, mode: int = 0o777, *, dir_fd: int | None = None) -> int: """ @@ -1087,13 +1090,6 @@ if sys.version_info >= (3, 12) or sys.platform != "win32": ... if sys.platform != "win32": - def fchmod(fd: int, mode: int) -> None: - """ - Change the access permissions of the file given by file descriptor fd. - - Equivalent to os.chmod(fd, mode). - """ - ... def fchown(fd: int, uid: int, gid: int) -> None: """ Change the owner and group id of the file specified by file descriptor. @@ -1185,7 +1181,7 @@ if sys.platform != "win32": Write bytes to a file descriptor starting at a particular offset. Write buffer to fd, starting at offset bytes from the beginning of - the file. Returns the number of bytes writte. Does not change the + the file. Returns the number of bytes written. Does not change the current file offset. """ ... @@ -1427,7 +1423,6 @@ def chmod(path: FileDescriptorOrPath, mode: int, *, dir_fd: int | None = None, f if sys.platform != "win32" and sys.platform != "linux": def chflags(path: StrOrBytesPath, flags: int, follow_symlinks: bool = True) -> None: ... # some flavors of Unix def lchflags(path: StrOrBytesPath, flags: int) -> None: ... - def lchmod(path: StrOrBytesPath, mode: int) -> None: ... if sys.platform != "win32": def chroot(path: StrOrBytesPath) -> None: @@ -2125,8 +2120,8 @@ if sys.platform != "win32": def nice(increment: int, /) -> int: """Add increment to the priority of process and return the new priority.""" ... - if sys.platform != "darwin": - def plock(op: int, /) -> None: ... # ???op is int? + if sys.platform != "darwin" and sys.platform != "linux": + def plock(op: int, /) -> None: ... class _wrap_close(_TextIOWrapper): def __init__(self, stream: _TextIOWrapper, proc: Popen[str]) -> None: ... @@ -2232,7 +2227,7 @@ def times() -> times_result: The object returned behaves like a named tuple with these fields: (utime, stime, cutime, cstime, elapsed_time) - All fields are floating point numbers. + All fields are floating-point numbers. """ ... def waitpid(pid: int, options: int, /) -> tuple[int, int]: @@ -2311,7 +2306,8 @@ else: (pid, status) """ ... - if sys.platform != "darwin": + # Added to MacOS in 3.13 + if sys.platform != "darwin" or sys.version_info >= (3, 13): @final class waitid_result(structseq[int], tuple[int, int, int, int, int]): """ @@ -2566,11 +2562,9 @@ if sys.platform != "win32": def cpu_count() -> int | None: """ - Return the number of CPUs in the system; return None if indeterminable. + Return the number of logical CPUs in the system. - This number is not equivalent to the number of CPUs the current process can - use. The number of usable CPUs can be obtained with - ``len(os.sched_getaffinity(0))`` + Return None if indeterminable. """ ... @@ -2701,17 +2695,104 @@ if sys.version_info >= (3, 10) and sys.platform == "linux": if sys.version_info >= (3, 12) and sys.platform == "linux": CLONE_FILES: int CLONE_FS: int - CLONE_NEWCGROUP: int - CLONE_NEWIPC: int - CLONE_NEWNET: int + CLONE_NEWCGROUP: int # Linux 4.6+ + CLONE_NEWIPC: int # Linux 2.6.19+ + CLONE_NEWNET: int # Linux 2.6.24+ CLONE_NEWNS: int - CLONE_NEWPID: int - CLONE_NEWTIME: int - CLONE_NEWUSER: int - CLONE_NEWUTS: int + CLONE_NEWPID: int # Linux 3.8+ + CLONE_NEWTIME: int # Linux 5.6+ + CLONE_NEWUSER: int # Linux 3.8+ + CLONE_NEWUTS: int # Linux 2.6.19+ CLONE_SIGHAND: int - CLONE_SYSVSEM: int + CLONE_SYSVSEM: int # Linux 2.6.26+ CLONE_THREAD: int CLONE_VM: int def unshare(flags: int) -> None: ... def setns(fd: FileDescriptorLike, nstype: int = 0) -> None: ... + +if sys.version_info >= (3, 13) and sys.platform != "win32": + def posix_openpt(oflag: int, /) -> int: + """ + Open and return a file descriptor for a master pseudo-terminal device. + + Performs a posix_openpt() C function call. The oflag argument is used to + set file status flags and file access modes as specified in the manual page + of posix_openpt() of your system. + """ + ... + def grantpt(fd: FileDescriptorLike, /) -> None: + """ + Grant access to the slave pseudo-terminal device. + + fd + File descriptor of a master pseudo-terminal device. + + Performs a grantpt() C function call. + """ + ... + def unlockpt(fd: FileDescriptorLike, /) -> None: + """ + Unlock a pseudo-terminal master/slave pair. + + fd + File descriptor of a master pseudo-terminal device. + + Performs an unlockpt() C function call. + """ + ... + def ptsname(fd: FileDescriptorLike, /) -> str: + """ + Return the name of the slave pseudo-terminal device. + + fd + File descriptor of a master pseudo-terminal device. + + If the ptsname_r() C function is available, it is called; + otherwise, performs a ptsname() C function call. + """ + ... + +if sys.version_info >= (3, 13) and sys.platform == "linux": + TFD_TIMER_ABSTIME: Final = 1 + TFD_TIMER_CANCEL_ON_SET: Final = 2 + TFD_NONBLOCK: Final[int] + TFD_CLOEXEC: Final[int] + POSIX_SPAWN_CLOSEFROM: Final[int] + + def timerfd_create(clockid: int, /, *, flags: int = 0) -> int: ... + def timerfd_settime( + fd: FileDescriptor, /, *, flags: int = 0, initial: float = 0.0, interval: float = 0.0 + ) -> tuple[float, float]: ... + def timerfd_settime_ns(fd: FileDescriptor, /, *, flags: int = 0, initial: int = 0, interval: int = 0) -> tuple[int, int]: ... + def timerfd_gettime(fd: FileDescriptor, /) -> tuple[float, float]: ... + def timerfd_gettime_ns(fd: FileDescriptor, /) -> tuple[int, int]: ... + +if sys.version_info >= (3, 13) or sys.platform != "win32": + # Added to Windows in 3.13. + def fchmod(fd: int, mode: int) -> None: + """ + Change the access permissions of the file given by file descriptor fd. + + fd + The file descriptor of the file to be modified. + mode + Operating-system mode bitfield. + Be careful when using number literals for *mode*. The conventional UNIX notation for + numeric modes uses an octal base, which needs to be indicated with a ``0o`` prefix in + Python. + + Equivalent to os.chmod(fd, mode). + """ + ... + +if sys.platform != "linux": + if sys.version_info >= (3, 13) or sys.platform != "win32": + # Added to Windows in 3.13. + def lchmod(path: StrOrBytesPath, mode: int) -> None: + """ + Change the access permissions of a file, without following symbolic links. + + If path is a symlink, this affects the link itself rather than the target. + Equivalent to chmod(path, mode, follow_symlinks=False)." + """ + ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/pathlib.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/pathlib.pyi index e96dd71..3eda15a 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/pathlib.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/pathlib.pyi @@ -46,7 +46,12 @@ class PurePath(PathLike[str]): """ if sys.version_info >= (3, 13): parser: ClassVar[types.ModuleType] - def full_match(self, pattern: StrPath, *, case_sensitive: bool | None = None) -> bool: ... + def full_match(self, pattern: StrPath, *, case_sensitive: bool | None = None) -> bool: + """ + Return True if this path matches the given glob-style pattern. The + pattern is matched against the entire path. + """ + ... @property def parts(self) -> tuple[str, ...]: @@ -100,16 +105,9 @@ class PurePath(PathLike[str]): new PurePath object. """ ... - def __init__(self, *args: StrPath) -> None: ... + def __init__(self, *args: StrPath) -> None: ... # pyright: ignore[reportInconsistentConstructor] else: - def __new__(cls, *args: StrPath) -> Self: - """ - Construct a PurePath from one or several strings and or existing - PurePath objects. The strings and path objects are combined so as - to yield a canonicalized path, which is incorporated into the - new PurePath object. - """ - ... + def __new__(cls, *args: StrPath) -> Self: ... def __hash__(self) -> int: ... def __fspath__(self) -> str: ... @@ -132,7 +130,7 @@ class PurePath(PathLike[str]): """ ... def as_uri(self) -> str: - """Return the path as a 'file' URI.""" + """Return the path as a URI.""" ... def is_absolute(self) -> bool: """ @@ -154,21 +152,19 @@ class PurePath(PathLike[str]): """ ... elif sys.version_info >= (3, 9): - def is_relative_to(self, *other: StrPath) -> bool: - """ - Return True if the path is relative to another path or False. - - """ - ... + def is_relative_to(self, *other: StrPath) -> bool: ... if sys.version_info >= (3, 12): def match(self, path_pattern: str, *, case_sensitive: bool | None = None) -> bool: - """Return True if this path matches the given pattern.""" + """ + Return True if this path matches the given pattern. If the pattern is + relative, matching is done from the right; otherwise, the entire path + is matched. The recursive wildcard '**' is *not* supported by this + method. + """ ... else: - def match(self, path_pattern: str) -> bool: - """Return True if this path matches the given pattern.""" - ... + def match(self, path_pattern: str) -> bool: ... if sys.version_info >= (3, 12): def relative_to(self, other: StrPath, /, *_deprecated: StrPath, walk_up: bool = False) -> Self: @@ -182,13 +178,7 @@ class PurePath(PathLike[str]): """ ... else: - def relative_to(self, *other: StrPath) -> Self: - """ - Return the relative path to another path identified by the passed - arguments. If the operation is not possible (because this is not - a subpath of the other path), raise ValueError. - """ - ... + def relative_to(self, *other: StrPath) -> Self: ... def with_name(self, name: str) -> Self: """Return a new path with the file name changed.""" @@ -260,7 +250,11 @@ class Path(PurePath): object. You can also instantiate a PosixPath or WindowsPath directly, but cannot instantiate a WindowsPath on a POSIX system or vice versa. """ - def __new__(cls, *args: StrPath, **kwargs: Any) -> Self: ... + if sys.version_info >= (3, 12): + def __new__(cls, *args: StrPath, **kwargs: Unused) -> Self: ... # pyright: ignore[reportInconsistentConstructor] + else: + def __new__(cls, *args: StrPath, **kwargs: Unused) -> Self: ... + @classmethod def cwd(cls) -> Self: """Return a new path pointing to the current working directory.""" @@ -281,61 +275,52 @@ class Path(PurePath): if sys.version_info >= (3, 13): @classmethod - def from_uri(cls, uri: str) -> Path: ... - def is_dir(self, *, follow_symlinks: bool = True) -> bool: ... - def is_file(self, *, follow_symlinks: bool = True) -> bool: ... - def read_text(self, encoding: str | None = None, errors: str | None = None, newline: str | None = None) -> str: ... - else: - def __enter__(self) -> Self: ... - def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... - def is_dir(self) -> bool: + def from_uri(cls, uri: str) -> Self: + """Return a new path from the given 'file' URI.""" + ... + def is_dir(self, *, follow_symlinks: bool = True) -> bool: """Whether this path is a directory.""" ... - def is_file(self) -> bool: + def is_file(self, *, follow_symlinks: bool = True) -> bool: """ Whether this path is a regular file (also True for symlinks pointing to regular files). """ ... - def read_text(self, encoding: str | None = None, errors: str | None = None) -> str: + def read_text(self, encoding: str | None = None, errors: str | None = None, newline: str | None = None) -> str: """Open the file in text mode, read it, and close the file.""" ... + else: + def __enter__(self) -> Self: ... + def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... + def is_dir(self) -> bool: ... + def is_file(self) -> bool: ... + def read_text(self, encoding: str | None = None, errors: str | None = None) -> str: ... if sys.version_info >= (3, 13): def glob( self, pattern: str, *, case_sensitive: bool | None = None, recurse_symlinks: bool = False - ) -> Generator[Self, None, None]: ... - def rglob( - self, pattern: str, *, case_sensitive: bool | None = None, recurse_symlinks: bool = False - ) -> Generator[Self, None, None]: ... - elif sys.version_info >= (3, 12): - def glob(self, pattern: str, *, case_sensitive: bool | None = None) -> Generator[Self, None, None]: + ) -> Generator[Self, None, None]: """ Iterate over this subtree and yield all existing files (of any kind, including directories) matching the given relative pattern. """ ... - def rglob(self, pattern: str, *, case_sensitive: bool | None = None) -> Generator[Self, None, None]: + def rglob( + self, pattern: str, *, case_sensitive: bool | None = None, recurse_symlinks: bool = False + ) -> Generator[Self, None, None]: """ Recursively yield all existing files (of any kind, including directories) matching the given relative pattern, anywhere in this subtree. """ ... + elif sys.version_info >= (3, 12): + def glob(self, pattern: str, *, case_sensitive: bool | None = None) -> Generator[Self, None, None]: ... + def rglob(self, pattern: str, *, case_sensitive: bool | None = None) -> Generator[Self, None, None]: ... else: - def glob(self, pattern: str) -> Generator[Self, None, None]: - """ - Iterate over this subtree and yield all existing files (of any - kind, including directories) matching the given relative pattern. - """ - ... - def rglob(self, pattern: str) -> Generator[Self, None, None]: - """ - Recursively yield all existing files (of any kind, including - directories) matching the given relative pattern, anywhere in - this subtree. - """ - ... + def glob(self, pattern: str) -> Generator[Self, None, None]: ... + def rglob(self, pattern: str) -> Generator[Self, None, None]: ... if sys.version_info >= (3, 12): def exists(self, *, follow_symlinks: bool = True) -> bool: @@ -347,9 +332,7 @@ class Path(PurePath): """ ... else: - def exists(self) -> bool: - """Whether this path exists.""" - ... + def exists(self) -> bool: ... def is_symlink(self) -> bool: """Whether this path is a symbolic link.""" @@ -394,6 +377,20 @@ class Path(PurePath): def mkdir(self, mode: int = 0o777, parents: bool = False, exist_ok: bool = False) -> None: """Create a new directory at this given path.""" ... + + if sys.version_info >= (3, 14): + def copy(self, target: StrPath, *, follow_symlinks: bool = True, preserve_metadata: bool = False) -> None: ... + def copytree( + self, + target: StrPath, + *, + follow_symlinks: bool = True, + preserve_metadata: bool = False, + dirs_exist_ok: bool = False, + ignore: Callable[[Self], bool] | None = None, + on_error: Callable[[OSError], object] | None = None, + ) -> None: ... + # Adapted from builtins.open # Text mode: always returns a TextIOWrapper # The Traversable .open in stdlib/importlib/abc.pyi should be kept in sync with this. @@ -488,15 +485,15 @@ class Path(PurePath): # These methods do "exist" on Windows, but they always raise NotImplementedError, # so it's safer to pretend they don't exist if sys.version_info >= (3, 13): - def owner(self, *, follow_symlinks: bool = True) -> str: ... - def group(self, *, follow_symlinks: bool = True) -> str: ... - else: - def owner(self) -> str: + def owner(self, *, follow_symlinks: bool = True) -> str: """Return the login name of the file owner.""" ... - def group(self) -> str: + def group(self, *, follow_symlinks: bool = True) -> str: """Return the group name of the file gid.""" ... + else: + def owner(self) -> str: ... + def group(self) -> str: ... # This method does "exist" on Windows on <3.12, but always raises NotImplementedError # On py312+, it works properly on Windows, as with all other platforms @@ -510,28 +507,33 @@ class Path(PurePath): """Return the path to which the symbolic link points.""" ... - def rename(self, target: str | PurePath) -> Self: - """ - Rename this path to the target path. + if sys.version_info >= (3, 10): + def rename(self, target: StrPath) -> Self: + """ + Rename this path to the target path. - The target path may be absolute or relative. Relative paths are - interpreted relative to the current working directory, *not* the - directory of the Path object. + The target path may be absolute or relative. Relative paths are + interpreted relative to the current working directory, *not* the + directory of the Path object. - Returns the new Path instance pointing to the target path. - """ - ... - def replace(self, target: str | PurePath) -> Self: - """ - Rename this path to the target path, overwriting if that path exists. + Returns the new Path instance pointing to the target path. + """ + ... + def replace(self, target: StrPath) -> Self: + """ + Rename this path to the target path, overwriting if that path exists. - The target path may be absolute or relative. Relative paths are - interpreted relative to the current working directory, *not* the - directory of the Path object. + The target path may be absolute or relative. Relative paths are + interpreted relative to the current working directory, *not* the + directory of the Path object. + + Returns the new Path instance pointing to the target path. + """ + ... + else: + def rename(self, target: str | PurePath) -> Self: ... + def replace(self, target: str | PurePath) -> Self: ... - Returns the new Path instance pointing to the target path. - """ - ... def resolve(self, strict: bool = False) -> Self: """ Make the path absolute, resolving all symlinks on the way and also @@ -541,6 +543,9 @@ class Path(PurePath): def rmdir(self) -> None: """Remove this directory. The directory must be empty.""" ... + if sys.version_info >= (3, 14): + def delete(self, ignore_errors: bool = False, on_error: Callable[[OSError], object] | None = None) -> None: ... + def symlink_to(self, target: StrOrBytesPath, target_is_directory: bool = False) -> None: """ Make this path a symlink pointing to the target path. @@ -568,16 +573,16 @@ class Path(PurePath): @classmethod def home(cls) -> Self: """ - Return a new path pointing to the user's home directory (as - returned by os.path.expanduser('~')). + Return a new path pointing to expanduser('~'). + """ ... def absolute(self) -> Self: """ - Return an absolute version of this path by prepending the current - working directory. No normalization or symlink resolution is performed. + Return an absolute version of this path + No normalization or symlink resolution is performed. - Use resolve() to get the canonical path to a file. + Use resolve() to resolve symlinks and remove '..' segments. """ ... def expanduser(self) -> Self: @@ -609,19 +614,7 @@ class Path(PurePath): if sys.version_info < (3, 12): if sys.version_info >= (3, 10): @deprecated("Deprecated as of Python 3.10 and removed in Python 3.12. Use hardlink_to() instead.") - def link_to(self, target: StrOrBytesPath) -> None: - """ - Make the target path a hard link pointing to this path. - - Note this function does not make this path a hard link to *target*, - despite the implication of the function and argument names. The order - of arguments (target, link) is the reverse of Path.symlink_to, but - matches that of os.link. - - Deprecated since Python 3.10 and scheduled for removal in Python 3.12. - Use `hardlink_to()` instead. - """ - ... + def link_to(self, target: StrOrBytesPath) -> None: ... else: def link_to(self, target: StrOrBytesPath) -> None: ... if sys.version_info >= (3, 12): @@ -631,6 +624,9 @@ class Path(PurePath): """Walk the directory tree from this directory, similar to os.walk().""" ... + if sys.version_info >= (3, 14): + def rmtree(self, ignore_errors: bool = False, on_error: Callable[[OSError], object] | None = None) -> None: ... + class PosixPath(Path, PurePosixPath): """ Path subclass for non-Windows systems. @@ -647,4 +643,9 @@ class WindowsPath(Path, PureWindowsPath): ... if sys.version_info >= (3, 13): - class UnsupportedOperation(NotImplementedError): ... + class UnsupportedOperation(NotImplementedError): + """ + An exception that is raised when an unsupported operation is called on + a path object. + """ + ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/pdb.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/pdb.pyi index 8f6f7aa..6c90a2a 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/pdb.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/pdb.pyi @@ -63,275 +63,274 @@ Debugger commands h(elp) - Without argument, print the list of available commands. - With a command name as argument, print help about that command. - "help pdb" shows the full pdb documentation. - "help exec" gives help on the ! command. +Without argument, print the list of available commands. +With a command name as argument, print help about that command. +"help pdb" shows the full pdb documentation. +"help exec" gives help on the ! command. w(here) - Print a stack trace, with the most recent frame at the bottom. - An arrow indicates the "current frame", which determines the - context of most commands. 'bt' is an alias for this command. +Print a stack trace, with the most recent frame at the bottom. +An arrow indicates the "current frame", which determines the +context of most commands. 'bt' is an alias for this command. d(own) [count] - Move the current frame count (default one) levels down in the - stack trace (to a newer frame). +Move the current frame count (default one) levels down in the +stack trace (to a newer frame). u(p) [count] - Move the current frame count (default one) levels up in the - stack trace (to an older frame). +Move the current frame count (default one) levels up in the +stack trace (to an older frame). b(reak) [ ([filename:]lineno | function) [, condition] ] - Without argument, list all breaks. +Without argument, list all breaks. - With a line number argument, set a break at this line in the - current file. With a function name, set a break at the first - executable line of that function. If a second argument is - present, it is a string specifying an expression which must - evaluate to true before the breakpoint is honored. +With a line number argument, set a break at this line in the +current file. With a function name, set a break at the first +executable line of that function. If a second argument is +present, it is a string specifying an expression which must +evaluate to true before the breakpoint is honored. - The line number may be prefixed with a filename and a colon, - to specify a breakpoint in another file (probably one that - hasn't been loaded yet). The file is searched for on - sys.path; the .py suffix may be omitted. +The line number may be prefixed with a filename and a colon, +to specify a breakpoint in another file (probably one that +hasn't been loaded yet). The file is searched for on +sys.path; the .py suffix may be omitted. tbreak [ ([filename:]lineno | function) [, condition] ] - Same arguments as break, but sets a temporary breakpoint: it - is automatically deleted when first hit. +Same arguments as break, but sets a temporary breakpoint: it +is automatically deleted when first hit. cl(ear) [filename:lineno | bpnumber ...] - With a space separated list of breakpoint numbers, clear - those breakpoints. Without argument, clear all breaks (but - first ask confirmation). With a filename:lineno argument, - clear all breaks at that line in that file. +With a space separated list of breakpoint numbers, clear +those breakpoints. Without argument, clear all breaks (but +first ask confirmation). With a filename:lineno argument, +clear all breaks at that line in that file. disable bpnumber [bpnumber ...] - Disables the breakpoints given as a space separated list of - breakpoint numbers. Disabling a breakpoint means it cannot - cause the program to stop execution, but unlike clearing a - breakpoint, it remains in the list of breakpoints and can be - (re-)enabled. +Disables the breakpoints given as a space separated list of +breakpoint numbers. Disabling a breakpoint means it cannot +cause the program to stop execution, but unlike clearing a +breakpoint, it remains in the list of breakpoints and can be +(re-)enabled. enable bpnumber [bpnumber ...] - Enables the breakpoints given as a space separated list of - breakpoint numbers. +Enables the breakpoints given as a space separated list of +breakpoint numbers. ignore bpnumber [count] - Set the ignore count for the given breakpoint number. If - count is omitted, the ignore count is set to 0. A breakpoint - becomes active when the ignore count is zero. When non-zero, - the count is decremented each time the breakpoint is reached - and the breakpoint is not disabled and any associated - condition evaluates to true. +Set the ignore count for the given breakpoint number. If +count is omitted, the ignore count is set to 0. A breakpoint +becomes active when the ignore count is zero. When non-zero, +the count is decremented each time the breakpoint is reached +and the breakpoint is not disabled and any associated +condition evaluates to true. condition bpnumber [condition] - Set a new condition for the breakpoint, an expression which - must evaluate to true before the breakpoint is honored. If - condition is absent, any existing condition is removed; i.e., - the breakpoint is made unconditional. +Set a new condition for the breakpoint, an expression which +must evaluate to true before the breakpoint is honored. If +condition is absent, any existing condition is removed; i.e., +the breakpoint is made unconditional. (Pdb) commands [bpnumber] - (com) ... - (com) end - (Pdb) - - Specify a list of commands for breakpoint number bpnumber. - The commands themselves are entered on the following lines. - Type a line containing just 'end' to terminate the commands. - The commands are executed when the breakpoint is hit. - - To remove all commands from a breakpoint, type commands and - follow it immediately with end; that is, give no commands. - - With no bpnumber argument, commands refers to the last - breakpoint set. - - You can use breakpoint commands to start your program up - again. Simply use the continue command, or step, or any other - command that resumes execution. - - Specifying any command resuming execution (currently continue, - step, next, return, jump, quit and their abbreviations) - terminates the command list (as if that command was - immediately followed by end). This is because any time you - resume execution (even with a simple next or step), you may - encounter another breakpoint -- which could have its own - command list, leading to ambiguities about which list to - execute. - - If you use the 'silent' command in the command list, the usual - message about stopping at a breakpoint is not printed. This - may be desirable for breakpoints that are to print a specific - message and then continue. If none of the other commands - print anything, you will see no sign that the breakpoint was - reached. +(com) ... +(com) end +(Pdb) + +Specify a list of commands for breakpoint number bpnumber. +The commands themselves are entered on the following lines. +Type a line containing just 'end' to terminate the commands. +The commands are executed when the breakpoint is hit. + +To remove all commands from a breakpoint, type commands and +follow it immediately with end; that is, give no commands. + +With no bpnumber argument, commands refers to the last +breakpoint set. + +You can use breakpoint commands to start your program up +again. Simply use the continue command, or step, or any other +command that resumes execution. + +Specifying any command resuming execution (currently continue, +step, next, return, jump, quit and their abbreviations) +terminates the command list (as if that command was +immediately followed by end). This is because any time you +resume execution (even with a simple next or step), you may +encounter another breakpoint -- which could have its own +command list, leading to ambiguities about which list to +execute. + +If you use the 'silent' command in the command list, the usual +message about stopping at a breakpoint is not printed. This +may be desirable for breakpoints that are to print a specific +message and then continue. If none of the other commands +print anything, you will see no sign that the breakpoint was +reached. s(tep) - Execute the current line, stop at the first possible occasion - (either in a function that is called or in the current - function). +Execute the current line, stop at the first possible occasion +(either in a function that is called or in the current +function). n(ext) - Continue execution until the next line in the current function - is reached or it returns. +Continue execution until the next line in the current function +is reached or it returns. unt(il) [lineno] - Without argument, continue execution until the line with a - number greater than the current one is reached. With a line - number, continue execution until a line with a number greater - or equal to that is reached. In both cases, also stop when - the current frame returns. +Without argument, continue execution until the line with a +number greater than the current one is reached. With a line +number, continue execution until a line with a number greater +or equal to that is reached. In both cases, also stop when +the current frame returns. j(ump) lineno - Set the next line that will be executed. Only available in - the bottom-most frame. This lets you jump back and execute - code again, or jump forward to skip code that you don't want - to run. +Set the next line that will be executed. Only available in +the bottom-most frame. This lets you jump back and execute +code again, or jump forward to skip code that you don't want +to run. - It should be noted that not all jumps are allowed -- for - instance it is not possible to jump into the middle of a - for loop or out of a finally clause. +It should be noted that not all jumps are allowed -- for +instance it is not possible to jump into the middle of a +for loop or out of a finally clause. r(eturn) - Continue execution until the current function returns. +Continue execution until the current function returns. retval - Print the return value for the last return of a function. +Print the return value for the last return of a function. run [args...] - Restart the debugged python program. If a string is supplied - it is split with "shlex", and the result is used as the new - sys.argv. History, breakpoints, actions and debugger options - are preserved. "restart" is an alias for "run". +Restart the debugged python program. If a string is supplied +it is split with "shlex", and the result is used as the new +sys.argv. History, breakpoints, actions and debugger options +are preserved. "restart" is an alias for "run". c(ont(inue)) - Continue execution, only stop when a breakpoint is encountered. +Continue execution, only stop when a breakpoint is encountered. l(ist) [first[, last] | .] - List source code for the current file. Without arguments, - list 11 lines around the current line or continue the previous - listing. With . as argument, list 11 lines around the current - line. With one argument, list 11 lines starting at that line. - With two arguments, list the given range; if the second - argument is less than the first, it is a count. +List source code for the current file. Without arguments, +list 11 lines around the current line or continue the previous +listing. With . as argument, list 11 lines around the current +line. With one argument, list 11 lines starting at that line. +With two arguments, list the given range; if the second +argument is less than the first, it is a count. - The current line in the current frame is indicated by "->". - If an exception is being debugged, the line where the - exception was originally raised or propagated is indicated by - ">>", if it differs from the current line. +The current line in the current frame is indicated by "->". +If an exception is being debugged, the line where the +exception was originally raised or propagated is indicated by +">>", if it differs from the current line. ll | longlist - List the whole source code for the current function or frame. +List the whole source code for the current function or frame. a(rgs) - Print the argument list of the current function. +Print the argument list of the current function. p expression - Print the value of the expression. +Print the value of the expression. pp expression - Pretty-print the value of the expression. +Pretty-print the value of the expression. whatis expression - Print the type of the argument. +Print the type of the argument. source expression - Try to get source code for the given object and display it. +Try to get source code for the given object and display it. display [expression] - Display the value of the expression if it changed, each time execution - stops in the current frame. +Display the value of the expression if it changed, each time execution +stops in the current frame. - Without expression, list all display expressions for the current frame. +Without expression, list all display expressions for the current frame. undisplay [expression] - Do not display the expression any more in the current frame. +Do not display the expression any more in the current frame. - Without expression, clear all display expressions for the current frame. +Without expression, clear all display expressions for the current frame. interact - Start an interactive interpreter whose global namespace - contains all the (global and local) names found in the current scope. +Start an interactive interpreter whose global namespace +contains all the (global and local) names found in the current scope. alias [name [command]] - Create an alias called 'name' that executes 'command'. The - command must *not* be enclosed in quotes. Replaceable - parameters can be indicated by %1, %2, and so on, while %* is - replaced by all the parameters. If no command is given, the - current alias for name is shown. If no name is given, all - aliases are listed. +Create an alias called 'name' that executes 'command'. The +command must *not* be enclosed in quotes. Replaceable +parameters can be indicated by %1, %2, and so on, while %* is +replaced by all the parameters. If no command is given, the +current alias for name is shown. If no name is given, all +aliases are listed. - Aliases may be nested and can contain anything that can be - legally typed at the pdb prompt. Note! You *can* override - internal pdb commands with aliases! Those internal commands - are then hidden until the alias is removed. Aliasing is - recursively applied to the first word of the command line; all - other words in the line are left alone. +Aliases may be nested and can contain anything that can be +legally typed at the pdb prompt. Note! You *can* override +internal pdb commands with aliases! Those internal commands +are then hidden until the alias is removed. Aliasing is +recursively applied to the first word of the command line; all +other words in the line are left alone. - As an example, here are two useful aliases (especially when - placed in the .pdbrc file): +As an example, here are two useful aliases (especially when +placed in the .pdbrc file): - # Print instance variables (usage "pi classInst") - alias pi for k in %1.__dict__.keys(): print("%1.",k,"=",%1.__dict__[k]) - # Print instance variables in self - alias ps pi self +# Print instance variables (usage "pi classInst") +alias pi for k in %1.__dict__.keys(): print("%1.",k,"=",%1.__dict__[k]) +# Print instance variables in self +alias ps pi self unalias name - Delete the specified alias. +Delete the specified alias. debug code - Enter a recursive debugger that steps through the code - argument (which is an arbitrary expression or statement to be - executed in the current environment). +Enter a recursive debugger that steps through the code +argument (which is an arbitrary expression or statement to be +executed in the current environment). q(uit) | exit - Quit from the debugger. The program being executed is aborted. +Quit from the debugger. The program being executed is aborted. (!) statement - Execute the (one-line) statement in the context of the current - stack frame. The exclamation point can be omitted unless the - first word of the statement resembles a debugger command, e.g.: - (Pdb) ! n=42 - (Pdb) +Execute the (one-line) statement in the context of the current +stack frame. The exclamation point can be omitted unless the +first word of the statement resembles a debugger command, e.g.: +(Pdb) ! n=42 +(Pdb) - To assign to a global variable you must always prefix the command with - a 'global' command, e.g.: - (Pdb) global list_options; list_options = ['-l'] - (Pdb) - +To assign to a global variable you must always prefix the command with +a 'global' command, e.g.: +(Pdb) global list_options; list_options = ['-l'] +(Pdb) """ import signal @@ -341,7 +340,7 @@ from cmd import Cmd from collections.abc import Callable, Iterable, Mapping, Sequence from inspect import _SourceObjectType from types import CodeType, FrameType, TracebackType -from typing import IO, Any, ClassVar, TypeVar +from typing import IO, Any, ClassVar, Final, TypeVar from typing_extensions import ParamSpec, Self __all__ = ["run", "pm", "Pdb", "runeval", "runctx", "runcall", "set_trace", "post_mortem", "help"] @@ -402,15 +401,19 @@ def set_trace(*, header: str | None = None) -> None: ... def post_mortem(t: TracebackType | None = None) -> None: """ - Enter post-mortem debugging of the given *traceback* object. + Enter post-mortem debugging of the given *traceback*, or *exception* + object. If no traceback is given, it uses the one of the exception that is currently being handled (an exception must be being handled if the default is to be used). + + If `t` is an exception object, the `exceptions` command makes it possible to + list and inspect its chained exceptions (if any). """ ... def pm() -> None: - """Enter post-mortem debugging of the traceback found in sys.last_traceback.""" + """Enter post-mortem debugging of the traceback found in sys.last_exc.""" ... class Pdb(Bdb, Cmd): @@ -418,6 +421,9 @@ class Pdb(Bdb, Cmd): commands_resuming: ClassVar[list[str]] + if sys.version_info >= (3, 13): + MAX_CHAINED_EXCEPTION_DEPTH: Final = 999 + aliases: dict[str, str] mainpyfile: str _wait_for_mainpyfile: bool @@ -446,6 +452,9 @@ class Pdb(Bdb, Cmd): if sys.version_info < (3, 11): def execRcLines(self) -> None: ... + if sys.version_info >= (3, 13): + user_opcode = Bdb.user_line + def bp_commands(self, frame: FrameType) -> bool: """ Call every command that was set for the current active breakpoint @@ -455,7 +464,12 @@ class Pdb(Bdb, Cmd): False otherwise. """ ... - def interaction(self, frame: FrameType | None, traceback: TracebackType | None) -> None: ... + + if sys.version_info >= (3, 13): + def interaction(self, frame: FrameType | None, tb_or_exc: TracebackType | BaseException | None) -> None: ... + else: + def interaction(self, frame: FrameType | None, traceback: TracebackType | None) -> None: ... + def displayhook(self, obj: object) -> None: """ Custom displayhook for the exec in default(), which prevents @@ -486,11 +500,21 @@ class Pdb(Bdb, Cmd): lookupmodule() translates (possibly incomplete) file or module name into an absolute file name. + + filename could be in format of: + * an absolute path like '/path/to/file.py' + * a relative path like 'file.py' or 'dir/file.py' + * a module name like 'module' or 'package.module' + + files and modules will be searched in sys.path. """ ... if sys.version_info < (3, 11): def _runscript(self, filename: str) -> None: ... + if sys.version_info >= (3, 13): + def completedefault(self, text: str, line: str, begidx: int, endidx: int) -> list[str]: ... + def do_commands(self, arg: str) -> bool | None: """ (Pdb) commands [bpnumber] @@ -616,6 +640,21 @@ class Pdb(Bdb, Cmd): context of most commands. 'bt' is an alias for this command. """ ... + if sys.version_info >= (3, 13): + def do_exceptions(self, arg: str) -> bool | None: + """ + exceptions [number] + + List or change current exception in an exception chain. + + Without arguments, list all the current exception in the exception + chain. Exceptions will be numbered, with the current exception indicated + with an arrow. + + If given an integer as argument, switch to the exception at that index. + """ + ... + def do_up(self, arg: str) -> bool | None: """ u(p) [count] @@ -855,8 +894,14 @@ class Pdb(Bdb, Cmd): ... def help_pdb(self) -> None: ... def sigint_handler(self, signum: signal.Signals, frame: FrameType) -> None: ... - def message(self, msg: str) -> None: ... + if sys.version_info >= (3, 13): + def message(self, msg: str, end: str = "\n") -> None: ... + else: + def message(self, msg: str) -> None: ... + def error(self, msg: str) -> None: ... + if sys.version_info >= (3, 13): + def completenames(self, text: str, line: str, begidx: int, endidx: int) -> list[str]: ... # type: ignore[override] if sys.version_info >= (3, 12): def set_convenience_variable(self, frame: FrameType, name: str, value: Any) -> None: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/pipes.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/pipes.pyi index 33254a7..fe680bf 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/pipes.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/pipes.pyi @@ -1,96 +1,16 @@ -""" -Conversion pipeline templates. - -The problem: ------------- - -Suppose you have some data that you want to convert to another format, -such as from GIF image format to PPM image format. Maybe the -conversion involves several steps (e.g. piping it through compress or -uuencode). Some of the conversion steps may require that their input -is a disk file, others may be able to read standard input; similar for -their output. The input to the entire conversion may also be read -from a disk file or from an open file, and similar for its output. - -The module lets you construct a pipeline template by sticking one or -more conversion steps together. It will take care of creating and -removing temporary files if they are necessary to hold intermediate -data. You can then use the template to do conversions from many -different sources to many different destinations. The temporary -file names used are different each time the template is used. - -The templates are objects so you can create templates for many -different conversion steps and store them in a dictionary, for -instance. - - -Directions: ------------ - -To create a template: - t = Template() - -To add a conversion step to a template: - t.append(command, kind) -where kind is a string of two characters: the first is '-' if the -command reads its standard input or 'f' if it requires a file; the -second likewise for the output. The command must be valid /bin/sh -syntax. If input or output files are required, they are passed as -$IN and $OUT; otherwise, it must be possible to use the command in -a pipeline. - -To add a conversion step at the beginning: - t.prepend(command, kind) - -To convert a file to another file using a template: - sts = t.copy(infile, outfile) -If infile or outfile are the empty string, standard input is read or -standard output is written, respectively. The return value is the -exit status of the conversion pipeline. - -To open a file for reading or writing through a conversion pipeline: - fp = t.open(file, mode) -where mode is 'r' to read the file, or 'w' to write it -- just like -for the built-in function open() or for os.popen(). - -To create a new template object initialized to a given one: - t2 = t.clone() -""" - import os __all__ = ["Template"] class Template: - """Class representing a pipeline template.""" - def reset(self) -> None: - """t.reset() restores a pipeline template to its initial state.""" - ... - def clone(self) -> Template: - """ - t.clone() returns a new pipeline template with identical - initial state as the current one. - """ - ... - def debug(self, flag: bool) -> None: - """t.debug(flag) turns debugging on or off.""" - ... - def append(self, cmd: str, kind: str) -> None: - """t.append(cmd, kind) adds a new step at the end.""" - ... - def prepend(self, cmd: str, kind: str) -> None: - """t.prepend(cmd, kind) adds a new step at the front.""" - ... - def open(self, file: str, rw: str) -> os._wrap_close: - """ - t.open(file, rw) returns a pipe or file object open for - reading or writing; the file is the other end of the pipeline. - """ - ... + def reset(self) -> None: ... + def clone(self) -> Template: ... + def debug(self, flag: bool) -> None: ... + def append(self, cmd: str, kind: str) -> None: ... + def prepend(self, cmd: str, kind: str) -> None: ... + def open(self, file: str, rw: str) -> os._wrap_close: ... def copy(self, infile: str, outfile: str) -> int: ... # Not documented, but widely used. # Documented as shlex.quote since 3.3. -def quote(s: str) -> str: - """Return a shell-escaped version of the string *s*.""" - ... +def quote(s: str) -> str: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/pkgutil.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/pkgutil.pyi index 8ab2faa..d44b2ce 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/pkgutil.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/pkgutil.pyi @@ -67,23 +67,9 @@ def extend_path(path: _PathT, name: str) -> _PathT: if sys.version_info < (3, 12): class ImpImporter: - """ - PEP 302 Finder that wraps Python's "classic" import algorithm - - ImpImporter(dirname) produces a PEP 302 finder that searches that - directory. ImpImporter(None) produces a PEP 302 finder that searches - the current sys.path, plus any modules that are frozen or built-in. - - Note that ImpImporter does not currently support being used by placement - on sys.meta_path. - """ def __init__(self, path: str | None = None) -> None: ... class ImpLoader: - """ - PEP 302 Loader that wraps Python's "classic" import algorithm - - """ def __init__(self, fullname: str, file: IO[str], filename: str, etc: tuple[str, str, int]) -> None: ... @deprecated("Use importlib.util.find_spec() instead. Will be removed in Python 3.14.") diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/platform.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/platform.pyi index ca6fe12..4c8f186 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/platform.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/platform.pyi @@ -248,6 +248,7 @@ if sys.version_info >= (3, 10): if sys.version_info >= (3, 13): class AndroidVer(NamedTuple): + """AndroidVer(release, api_level, manufacturer, model, device, is_emulator)""" release: str api_level: int manufacturer: str @@ -256,6 +257,7 @@ if sys.version_info >= (3, 13): is_emulator: bool class IOSVersionInfo(NamedTuple): + """IOSVersionInfo(system, release, model, is_simulator)""" system: str release: str model: str @@ -269,4 +271,12 @@ if sys.version_info >= (3, 13): device: str = "", is_emulator: bool = False, ) -> AndroidVer: ... - def ios_ver(system: str = "", release: str = "", model: str = "", is_simulator: bool = False) -> IOSVersionInfo: ... + def ios_ver(system: str = "", release: str = "", model: str = "", is_simulator: bool = False) -> IOSVersionInfo: + """ + Get iOS version information, and return it as a namedtuple: + (system, release, model, is_simulator). + + If values can't be determined, they are set to values provided as + parameters. + """ + ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/plistlib.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/plistlib.pyi index 26ae86f..20e5e27 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/plistlib.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/plistlib.pyi @@ -69,7 +69,6 @@ if sys.version_info < (3, 9): __all__ += ["readPlist", "writePlist", "readPlistFromBytes", "writePlistToBytes", "Data"] class PlistFormat(Enum): - """An enumeration.""" FMT_XML = 1 FMT_BINARY = 2 @@ -82,24 +81,18 @@ if sys.version_info >= (3, 13): fmt: PlistFormat | None = None, dict_type: type[MutableMapping[str, Any]] = ..., aware_datetime: bool = False, - ) -> Any: ... - def loads( - value: ReadableBuffer | str, - *, - fmt: PlistFormat | None = None, - dict_type: type[MutableMapping[str, Any]] = ..., - aware_datetime: bool = False, - ) -> Any: ... - -elif sys.version_info >= (3, 9): - def load(fp: IO[bytes], *, fmt: PlistFormat | None = None, dict_type: type[MutableMapping[str, Any]] = ...) -> Any: + ) -> Any: """ Read a .plist file. 'fp' should be a readable and binary file object. Return the unpacked root object (which usually is a dictionary). """ ... def loads( - value: ReadableBuffer, *, fmt: PlistFormat | None = None, dict_type: type[MutableMapping[str, Any]] = ... + value: ReadableBuffer | str, + *, + fmt: PlistFormat | None = None, + dict_type: type[MutableMapping[str, Any]] = ..., + aware_datetime: bool = False, ) -> Any: """ Read a .plist file from a bytes object. @@ -107,6 +100,12 @@ elif sys.version_info >= (3, 9): """ ... +elif sys.version_info >= (3, 9): + def load(fp: IO[bytes], *, fmt: PlistFormat | None = None, dict_type: type[MutableMapping[str, Any]] = ...) -> Any: ... + def loads( + value: ReadableBuffer, *, fmt: PlistFormat | None = None, dict_type: type[MutableMapping[str, Any]] = ... + ) -> Any: ... + else: def load( fp: IO[bytes], @@ -132,7 +131,12 @@ if sys.version_info >= (3, 13): sort_keys: bool = True, skipkeys: bool = False, aware_datetime: bool = False, - ) -> None: ... + ) -> None: + """ + Write 'value' to a .plist file. 'fp' should be a writable, + binary file object. + """ + ... def dumps( value: Mapping[str, Any] | list[Any] | tuple[Any, ...] | str | bool | float | bytes | bytearray | datetime, *, @@ -140,7 +144,12 @@ if sys.version_info >= (3, 13): skipkeys: bool = False, sort_keys: bool = True, aware_datetime: bool = False, - ) -> bytes: ... + ) -> bytes: + """ + Return a bytes object with the contents for a .plist file. + + """ + ... else: def dump( @@ -150,24 +159,14 @@ else: fmt: PlistFormat = ..., sort_keys: bool = True, skipkeys: bool = False, - ) -> None: - """ - Write 'value' to a .plist file. 'fp' should be a writable, - binary file object. - """ - ... + ) -> None: ... def dumps( value: Mapping[str, Any] | list[Any] | tuple[Any, ...] | str | bool | float | bytes | bytearray | datetime, *, fmt: PlistFormat = ..., skipkeys: bool = False, sort_keys: bool = True, - ) -> bytes: - """ - Return a bytes object with the contents for a .plist file. - - """ - ... + ) -> bytes: ... if sys.version_info < (3, 9): def readPlist(pathOrFile: str | IO[bytes]) -> Any: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/poplib.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/poplib.pyi index dca4fae..cc83a83 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/poplib.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/poplib.pyi @@ -9,7 +9,7 @@ import ssl import sys from builtins import list as _list # conflicts with a method named "list" from re import Pattern -from typing import Any, BinaryIO, Literal, NoReturn, overload +from typing import Any, BinaryIO, Final, NoReturn, overload from typing_extensions import TypeAlias __all__ = ["POP3", "error_proto", "POP3_SSL"] @@ -18,11 +18,11 @@ _LongResp: TypeAlias = tuple[bytes, list[bytes], int] class error_proto(Exception): ... -POP3_PORT: Literal[110] -POP3_SSL_PORT: Literal[995] -CR: Literal[b"\r"] -LF: Literal[b"\n"] -CRLF: Literal[b"\r\n"] +POP3_PORT: Final = 110 +POP3_SSL_PORT: Final = 995 +CR: Final = b"\r" +LF: Final = b"\n" +CRLF: Final = b"\r\n" HAVE_SSL: bool class POP3: @@ -249,11 +249,6 @@ class POP3_SSL(POP3): timeout: float = ..., context: ssl.SSLContext | None = None, ) -> None: ... - # "context" is actually the last argument, but that breaks LSP and it doesn't really matter because all the arguments are ignored - def stls(self, context: Any = None, keyfile: Any = None, certfile: Any = None) -> NoReturn: - """ - The method unconditionally raises an exception since the - STLS command doesn't make any sense on an already established - SSL/TLS session. - """ - ... + # "context" is actually the last argument, + # but that breaks LSP and it doesn't really matter because all the arguments are ignored + def stls(self, context: Any = None, keyfile: Any = None, certfile: Any = None) -> NoReturn: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/posix.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/posix.pyi index 7621043..dfff040 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/posix.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/posix.pyi @@ -243,6 +243,23 @@ if sys.platform != "win32": if sys.version_info >= (3, 11): from os import login_tty as login_tty + if sys.version_info >= (3, 13): + from os import grantpt as grantpt, posix_openpt as posix_openpt, ptsname as ptsname, unlockpt as unlockpt + + if sys.version_info >= (3, 13) and sys.platform == "linux": + from os import ( + POSIX_SPAWN_CLOSEFROM as POSIX_SPAWN_CLOSEFROM, + TFD_CLOEXEC as TFD_CLOEXEC, + TFD_NONBLOCK as TFD_NONBLOCK, + TFD_TIMER_ABSTIME as TFD_TIMER_ABSTIME, + TFD_TIMER_CANCEL_ON_SET as TFD_TIMER_CANCEL_ON_SET, + timerfd_create as timerfd_create, + timerfd_gettime as timerfd_gettime, + timerfd_gettime_ns as timerfd_gettime_ns, + timerfd_settime as timerfd_settime, + timerfd_settime_ns as timerfd_settime_ns, + ) + if sys.platform != "linux": from os import chflags as chflags, lchflags as lchflags, lchmod as lchmod @@ -276,13 +293,14 @@ if sys.platform != "win32": sched_setscheduler as sched_setscheduler, setresgid as setresgid, setresuid as setresuid, - waitid as waitid, - waitid_result as waitid_result, ) if sys.version_info >= (3, 10): from os import RWF_APPEND as RWF_APPEND + if sys.platform != "darwin" or sys.version_info >= (3, 13): + from os import waitid as waitid, waitid_result as waitid_result + if sys.platform == "linux": from os import ( GRND_NONBLOCK as GRND_NONBLOCK, diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/posixpath.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/posixpath.pyi index 621127e..0f18df0 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/posixpath.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/posixpath.pyi @@ -90,13 +90,7 @@ pathsep: LiteralString defpath: LiteralString devnull: LiteralString -# Overloads are necessary to work around python/mypy#3644. -@overload -def abspath(path: PathLike[AnyStr]) -> AnyStr: - """Return an absolute path.""" - ... -@overload -def abspath(path: AnyStr) -> AnyStr: +def abspath(path: PathLike[AnyStr] | AnyStr) -> AnyStr: """Return an absolute path.""" ... @overload @@ -115,29 +109,13 @@ def dirname(p: PathLike[AnyStr]) -> AnyStr: def dirname(p: AnyOrLiteralStr) -> AnyOrLiteralStr: """Returns the directory component of a pathname""" ... -@overload -def expanduser(path: PathLike[AnyStr]) -> AnyStr: - """ - Expand ~ and ~user constructions. If user or $HOME is unknown, - do nothing. - """ - ... -@overload -def expanduser(path: AnyStr) -> AnyStr: +def expanduser(path: PathLike[AnyStr] | AnyStr) -> AnyStr: """ Expand ~ and ~user constructions. If user or $HOME is unknown, do nothing. """ ... -@overload -def expandvars(path: PathLike[AnyStr]) -> AnyStr: - """ - Expand shell variables of form $var and ${var}. Unknown variables - are left unchanged. - """ - ... -@overload -def expandvars(path: AnyStr) -> AnyStr: +def expandvars(path: PathLike[AnyStr] | AnyStr) -> AnyStr: """ Expand shell variables of form $var and ${var}. Unknown variables are left unchanged. @@ -300,32 +278,22 @@ if sys.version_info >= (3, 12): def isjunction(path: StrOrBytesPath) -> bool: """ Test whether a path is a junction - Junctions are not a part of posix semantics + Junctions are not supported on the current platform """ ... @overload def splitroot(p: AnyOrLiteralStr) -> tuple[AnyOrLiteralStr, AnyOrLiteralStr, AnyOrLiteralStr]: """ - Split a pathname into drive, root and tail. On Posix, drive is always - empty; the root may be empty, a single slash, or two slashes. The tail - contains anything after the root. For example: + Split a pathname into drive, root and tail. - splitroot('foo/bar') == ('', '', 'foo/bar') - splitroot('/foo/bar') == ('', '/', 'foo/bar') - splitroot('//foo/bar') == ('', '//', 'foo/bar') - splitroot('///foo/bar') == ('', '/', '//foo/bar') + The tail contains anything after the root. """ ... @overload def splitroot(p: PathLike[AnyStr]) -> tuple[AnyStr, AnyStr, AnyStr]: """ - Split a pathname into drive, root and tail. On Posix, drive is always - empty; the root may be empty, a single slash, or two slashes. The tail - contains anything after the root. For example: + Split a pathname into drive, root and tail. - splitroot('foo/bar') == ('', '', 'foo/bar') - splitroot('/foo/bar') == ('', '/', 'foo/bar') - splitroot('//foo/bar') == ('', '//', 'foo/bar') - splitroot('///foo/bar') == ('', '/', '//foo/bar') + The tail contains anything after the root. """ ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/profile.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/profile.pyi index 4fe42a2..038b83b 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/profile.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/profile.pyi @@ -1,7 +1,7 @@ """Class for profiling Python code.""" from _typeshed import StrOrBytesPath -from collections.abc import Callable +from collections.abc import Callable, Mapping from typing import Any, TypeVar from typing_extensions import ParamSpec, Self, TypeAlias @@ -21,7 +21,7 @@ def run(statement: str, filename: str | None = None, sort: str | int = -1) -> No """ ... def runctx( - statement: str, globals: dict[str, Any], locals: dict[str, Any], filename: str | None = None, sort: str | int = -1 + statement: str, globals: dict[str, Any], locals: Mapping[str, Any], filename: str | None = None, sort: str | int = -1 ) -> None: """ Run statement under profiler, supplying your own globals and locals, @@ -85,6 +85,6 @@ class Profile: def create_stats(self) -> None: ... def snapshot_stats(self) -> None: ... def run(self, cmd: str) -> Self: ... - def runctx(self, cmd: str, globals: dict[str, Any], locals: dict[str, Any]) -> Self: ... + def runctx(self, cmd: str, globals: dict[str, Any], locals: Mapping[str, Any]) -> Self: ... def runcall(self, func: Callable[_P, _T], /, *args: _P.args, **kw: _P.kwargs) -> _T: ... def calibrate(self, m: int, verbose: int = 0) -> float: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/pstats.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/pstats.pyi index d97ddc9..34a6beb 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/pstats.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/pstats.pyi @@ -62,7 +62,7 @@ class Stats: method now take arbitrarily many file names as arguments. All the print methods now take an argument that indicates how many lines - to print. If the arg is a floating point number between 0 and 1.0, then + to print. If the arg is a floating-point number between 0 and 1.0, then it is taken as a decimal percentage of the available lines to be printed (e.g., .1 means print 10% of all available lines). If it is an integer, it is taken to mean the number of lines of data that you wish to have diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/pty.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/pty.pyi index 9843e3c..c428bcc 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/pty.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/pty.pyi @@ -2,39 +2,44 @@ import sys from collections.abc import Callable, Iterable -from typing import Literal -from typing_extensions import TypeAlias +from typing import Final +from typing_extensions import TypeAlias, deprecated if sys.platform != "win32": __all__ = ["openpty", "fork", "spawn"] _Reader: TypeAlias = Callable[[int], bytes] - STDIN_FILENO: Literal[0] - STDOUT_FILENO: Literal[1] - STDERR_FILENO: Literal[2] + STDIN_FILENO: Final = 0 + STDOUT_FILENO: Final = 1 + STDERR_FILENO: Final = 2 - CHILD: Literal[0] + CHILD: Final = 0 def openpty() -> tuple[int, int]: """ openpty() -> (master_fd, slave_fd) Open a pty master/slave pair, using os.openpty() if possible. """ ... - def master_open() -> tuple[int, str]: - """ - master_open() -> (master_fd, slave_name) - Open a pty master and return the fd, and the filename of the slave end. - Deprecated, use openpty() instead. - """ - ... - def slave_open(tty_name: str) -> int: - """ - slave_open(tty_name) -> slave_fd - Open the pty slave and acquire the controlling terminal, returning - opened filedescriptor. - Deprecated, use openpty() instead. - """ - ... + + if sys.version_info < (3, 14): + @deprecated("Deprecated in 3.12, to be removed in 3.14; use openpty() instead") + def master_open() -> tuple[int, str]: + """ + master_open() -> (master_fd, slave_name) + Open a pty master and return the fd, and the filename of the slave end. + Deprecated, use openpty() instead. + """ + ... + @deprecated("Deprecated in 3.12, to be removed in 3.14; use openpty() instead") + def slave_open(tty_name: str) -> int: + """ + slave_open(tty_name) -> slave_fd + Open the pty slave and acquire the controlling terminal, returning + opened filedescriptor. + Deprecated, use openpty() instead. + """ + ... + def fork() -> tuple[int, int]: """ fork() -> (pid, master_fd) diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/py_compile.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/py_compile.pyi index c4175fa..1935920 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/py_compile.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/py_compile.pyi @@ -44,7 +44,6 @@ class PyCompileError(Exception): def __init__(self, exc_type: type[BaseException], exc_value: BaseException, file: str, msg: str = "") -> None: ... class PycInvalidationMode(enum.Enum): - """An enumeration.""" TIMESTAMP = 1 CHECKED_HASH = 2 UNCHECKED_HASH = 3 diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/pydoc.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/pydoc.pyi index 660acac..b9dde90 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/pydoc.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/pydoc.pyi @@ -45,7 +45,7 @@ from builtins import list as _list # "list" conflicts with method name from collections.abc import Callable, Container, Mapping, MutableMapping from reprlib import Repr from types import MethodType, ModuleType, TracebackType -from typing import IO, Any, AnyStr, Final, NoReturn, TypeVar +from typing import IO, Any, AnyStr, Final, NoReturn, Protocol, TypeVar from typing_extensions import TypeGuard __all__ = ["help"] @@ -57,6 +57,9 @@ __date__: Final[str] __version__: Final[str] __credits__: Final[str] +class _Pager(Protocol): + def __call__(self, text: str, title: str = "") -> None: ... + def pathdirs() -> list[str]: """Convert sys.path into a list of absolute, existing, unique paths.""" ... @@ -194,9 +197,7 @@ class HTMLDoc(Doc): """Format a list of items into a multi-column list.""" ... else: - def heading(self, title: str, fgcol: str, bgcol: str, extras: str = "") -> str: - """Format a page heading.""" - ... + def heading(self, title: str, fgcol: str, bgcol: str, extras: str = "") -> str: ... def section( self, title: str, @@ -207,12 +208,8 @@ class HTMLDoc(Doc): prelude: str = "", marginalia: str | None = None, gap: str = " ", - ) -> str: - """Format a section with a heading.""" - ... - def multicolumn(self, list: list[_T], format: Callable[[_T], str], cols: int = 4) -> str: - """Format a list of items into a multi-column list.""" - ... + ) -> str: ... + def multicolumn(self, list: list[_T], format: Callable[[_T], str], cols: int = 4) -> str: ... def bigsection(self, title: str, *args: Any) -> str: """Format a section with a big heading.""" @@ -305,15 +302,9 @@ class HTMLDoc(Doc): classes: Mapping[str, str] = {}, methods: Mapping[str, str] = {}, cl: type | None = None, - ) -> str: - """Produce HTML documentation for a function or method object.""" - ... - def docproperty(self, object: object, name: str | None = None, mod: str | None = None, cl: Any | None = None) -> str: - """Produce html documentation for a data descriptor.""" - ... - def docdata(self, object: object, name: str | None = None, mod: Any | None = None, cl: Any | None = None) -> str: - """Produce html documentation for a data descriptor.""" - ... + ) -> str: ... + def docproperty(self, object: object, name: str | None = None, mod: str | None = None, cl: Any | None = None) -> str: ... # type: ignore[override] + def docdata(self, object: object, name: str | None = None, mod: Any | None = None, cl: Any | None = None) -> str: ... # type: ignore[override] if sys.version_info >= (3, 11): def parentlink(self, object: type | ModuleType, modname: str) -> str: """Make a link for the enclosing class or module.""" @@ -395,18 +386,10 @@ class TextDoc(Doc): """Produce text documentation for a data object.""" ... else: - def docroutine(self, object: object, name: str | None = None, mod: str | None = None, cl: Any | None = None) -> str: - """Produce text documentation for a function or method object.""" - ... - def docmodule(self, object: object, name: str | None = None, mod: Any | None = None) -> str: - """Produce text documentation for a given module object.""" - ... - def docproperty(self, object: object, name: str | None = None, mod: Any | None = None, cl: Any | None = None) -> str: - """Produce text documentation for a data descriptor.""" - ... - def docdata(self, object: object, name: str | None = None, mod: str | None = None, cl: Any | None = None) -> str: - """Produce text documentation for a data descriptor.""" - ... + def docroutine(self, object: object, name: str | None = None, mod: str | None = None, cl: Any | None = None) -> str: ... # type: ignore[override] + def docmodule(self, object: object, name: str | None = None, mod: Any | None = None) -> str: ... # type: ignore[override] + def docproperty(self, object: object, name: str | None = None, mod: Any | None = None, cl: Any | None = None) -> str: ... # type: ignore[override] + def docdata(self, object: object, name: str | None = None, mod: str | None = None, cl: Any | None = None) -> str: ... # type: ignore[override] def docother( # type: ignore[override] self, object: object, @@ -415,31 +398,19 @@ class TextDoc(Doc): parent: str | None = None, maxlen: int | None = None, doc: Any | None = None, - ) -> str: - """Produce text documentation for a data object.""" - ... + ) -> str: ... + +if sys.version_info >= (3, 13): + def pager(text: str, title: str = "") -> None: + """The first time this is called, determine what kind of pager to use.""" + ... + +else: + def pager(text: str) -> None: ... -def pager(text: str) -> None: - """The first time this is called, determine what kind of pager to use.""" - ... -def getpager() -> Callable[[str], None]: - """Decide what method to use for paging through text.""" - ... def plain(text: str) -> str: """Remove boldface formatting from text.""" ... -def pipepager(text: str, cmd: str) -> None: - """Page through text by feeding it to another program.""" - ... -def tempfilepager(text: str, cmd: str) -> None: - """Page through text by invoking a program on a temporary file.""" - ... -def ttypager(text: str) -> None: - """Page through text on a text terminal.""" - ... -def plainpager(text: str) -> None: - """Simply print unformatted text. This is the ultimate fallback.""" - ... def describe(thing: Any) -> str: """Produce a short description of the given thing.""" ... @@ -447,6 +418,36 @@ def locate(path: str, forceload: bool = ...) -> object: """Locate an object by name or dotted path, importing as necessary.""" ... +if sys.version_info >= (3, 13): + def get_pager() -> _Pager: + """Decide what method to use for paging through text.""" + ... + def pipe_pager(text: str, cmd: str, title: str = "") -> None: + """Page through text by feeding it to another program.""" + ... + def tempfile_pager(text: str, cmd: str, title: str = "") -> None: + """Page through text by invoking a program on a temporary file.""" + ... + def tty_pager(text: str, title: str = "") -> None: + """Page through text on a text terminal.""" + ... + def plain_pager(text: str, title: str = "") -> None: + """Simply print unformatted text. This is the ultimate fallback.""" + ... + + # For backwards compatibility. + getpager = get_pager + pipepager = pipe_pager + tempfilepager = tempfile_pager + ttypager = tty_pager + plainpager = plain_pager +else: + def getpager() -> Callable[[str], None]: ... + def pipepager(text: str, cmd: str) -> None: ... + def tempfilepager(text: str, cmd: str) -> None: ... + def ttypager(text: str) -> None: ... + def plainpager(text: str) -> None: ... + text: TextDoc html: HTMLDoc @@ -476,9 +477,7 @@ else: title: str = "Python Library Documentation: %s", forceload: bool = ..., output: SupportsWrite[str] | None = None, - ) -> None: - """Display text documentation, given an object or a path to an object.""" - ... + ) -> None: ... def writedoc(thing: str | object, forceload: bool = ...) -> None: """Write HTML documentation to a file in the current directory.""" diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/pyexpat/__init__.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/pyexpat/__init__.pyi index df2e360..249e088 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/pyexpat/__init__.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/pyexpat/__init__.pyi @@ -3,24 +3,19 @@ from _typeshed import ReadableBuffer, SupportsRead from collections.abc import Callable from pyexpat import errors as errors, model as model -from typing import Any, final +from typing import Any, Final, final from typing_extensions import TypeAlias +from xml.parsers.expat import ExpatError as ExpatError -EXPAT_VERSION: str # undocumented +EXPAT_VERSION: Final[str] # undocumented version_info: tuple[int, int, int] # undocumented native_encoding: str # undocumented features: list[tuple[str, int]] # undocumented -class ExpatError(Exception): - code: int - lineno: int - offset: int - error = ExpatError - -XML_PARAM_ENTITY_PARSING_NEVER: int -XML_PARAM_ENTITY_PARSING_UNLESS_STANDALONE: int -XML_PARAM_ENTITY_PARSING_ALWAYS: int +XML_PARAM_ENTITY_PARSING_NEVER: Final = 0 +XML_PARAM_ENTITY_PARSING_UNLESS_STANDALONE: Final = 1 +XML_PARAM_ENTITY_PARSING_ALWAYS: Final = 2 _Model: TypeAlias = tuple[int, int, str | None, tuple[Any, ...]] diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/pyexpat/errors.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/pyexpat/errors.pyi index c0cb7ca..01cd4d2 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/pyexpat/errors.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/pyexpat/errors.pyi @@ -1,51 +1,53 @@ """Constants used to describe error conditions.""" import sys +from typing import Final +from typing_extensions import LiteralString codes: dict[str, int] messages: dict[int, str] -XML_ERROR_ABORTED: str -XML_ERROR_ASYNC_ENTITY: str -XML_ERROR_ATTRIBUTE_EXTERNAL_ENTITY_REF: str -XML_ERROR_BAD_CHAR_REF: str -XML_ERROR_BINARY_ENTITY_REF: str -XML_ERROR_CANT_CHANGE_FEATURE_ONCE_PARSING: str -XML_ERROR_DUPLICATE_ATTRIBUTE: str -XML_ERROR_ENTITY_DECLARED_IN_PE: str -XML_ERROR_EXTERNAL_ENTITY_HANDLING: str -XML_ERROR_FEATURE_REQUIRES_XML_DTD: str -XML_ERROR_FINISHED: str -XML_ERROR_INCOMPLETE_PE: str -XML_ERROR_INCORRECT_ENCODING: str -XML_ERROR_INVALID_TOKEN: str -XML_ERROR_JUNK_AFTER_DOC_ELEMENT: str -XML_ERROR_MISPLACED_XML_PI: str -XML_ERROR_NOT_STANDALONE: str -XML_ERROR_NOT_SUSPENDED: str -XML_ERROR_NO_ELEMENTS: str -XML_ERROR_NO_MEMORY: str -XML_ERROR_PARAM_ENTITY_REF: str -XML_ERROR_PARTIAL_CHAR: str -XML_ERROR_PUBLICID: str -XML_ERROR_RECURSIVE_ENTITY_REF: str -XML_ERROR_SUSPENDED: str -XML_ERROR_SUSPEND_PE: str -XML_ERROR_SYNTAX: str -XML_ERROR_TAG_MISMATCH: str -XML_ERROR_TEXT_DECL: str -XML_ERROR_UNBOUND_PREFIX: str -XML_ERROR_UNCLOSED_CDATA_SECTION: str -XML_ERROR_UNCLOSED_TOKEN: str -XML_ERROR_UNDECLARING_PREFIX: str -XML_ERROR_UNDEFINED_ENTITY: str -XML_ERROR_UNEXPECTED_STATE: str -XML_ERROR_UNKNOWN_ENCODING: str -XML_ERROR_XML_DECL: str +XML_ERROR_ABORTED: Final[LiteralString] +XML_ERROR_ASYNC_ENTITY: Final[LiteralString] +XML_ERROR_ATTRIBUTE_EXTERNAL_ENTITY_REF: Final[LiteralString] +XML_ERROR_BAD_CHAR_REF: Final[LiteralString] +XML_ERROR_BINARY_ENTITY_REF: Final[LiteralString] +XML_ERROR_CANT_CHANGE_FEATURE_ONCE_PARSING: Final[LiteralString] +XML_ERROR_DUPLICATE_ATTRIBUTE: Final[LiteralString] +XML_ERROR_ENTITY_DECLARED_IN_PE: Final[LiteralString] +XML_ERROR_EXTERNAL_ENTITY_HANDLING: Final[LiteralString] +XML_ERROR_FEATURE_REQUIRES_XML_DTD: Final[LiteralString] +XML_ERROR_FINISHED: Final[LiteralString] +XML_ERROR_INCOMPLETE_PE: Final[LiteralString] +XML_ERROR_INCORRECT_ENCODING: Final[LiteralString] +XML_ERROR_INVALID_TOKEN: Final[LiteralString] +XML_ERROR_JUNK_AFTER_DOC_ELEMENT: Final[LiteralString] +XML_ERROR_MISPLACED_XML_PI: Final[LiteralString] +XML_ERROR_NOT_STANDALONE: Final[LiteralString] +XML_ERROR_NOT_SUSPENDED: Final[LiteralString] +XML_ERROR_NO_ELEMENTS: Final[LiteralString] +XML_ERROR_NO_MEMORY: Final[LiteralString] +XML_ERROR_PARAM_ENTITY_REF: Final[LiteralString] +XML_ERROR_PARTIAL_CHAR: Final[LiteralString] +XML_ERROR_PUBLICID: Final[LiteralString] +XML_ERROR_RECURSIVE_ENTITY_REF: Final[LiteralString] +XML_ERROR_SUSPENDED: Final[LiteralString] +XML_ERROR_SUSPEND_PE: Final[LiteralString] +XML_ERROR_SYNTAX: Final[LiteralString] +XML_ERROR_TAG_MISMATCH: Final[LiteralString] +XML_ERROR_TEXT_DECL: Final[LiteralString] +XML_ERROR_UNBOUND_PREFIX: Final[LiteralString] +XML_ERROR_UNCLOSED_CDATA_SECTION: Final[LiteralString] +XML_ERROR_UNCLOSED_TOKEN: Final[LiteralString] +XML_ERROR_UNDECLARING_PREFIX: Final[LiteralString] +XML_ERROR_UNDEFINED_ENTITY: Final[LiteralString] +XML_ERROR_UNEXPECTED_STATE: Final[LiteralString] +XML_ERROR_UNKNOWN_ENCODING: Final[LiteralString] +XML_ERROR_XML_DECL: Final[LiteralString] if sys.version_info >= (3, 11): - XML_ERROR_RESERVED_PREFIX_XML: str - XML_ERROR_RESERVED_PREFIX_XMLNS: str - XML_ERROR_RESERVED_NAMESPACE_URI: str - XML_ERROR_INVALID_ARGUMENT: str - XML_ERROR_NO_BUFFER: str - XML_ERROR_AMPLIFICATION_LIMIT_BREACH: str + XML_ERROR_RESERVED_PREFIX_XML: Final[LiteralString] + XML_ERROR_RESERVED_PREFIX_XMLNS: Final[LiteralString] + XML_ERROR_RESERVED_NAMESPACE_URI: Final[LiteralString] + XML_ERROR_INVALID_ARGUMENT: Final[LiteralString] + XML_ERROR_NO_BUFFER: Final[LiteralString] + XML_ERROR_AMPLIFICATION_LIMIT_BREACH: Final[LiteralString] diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/pyexpat/model.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/pyexpat/model.pyi index 62b63f4..3de4eec 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/pyexpat/model.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/pyexpat/model.pyi @@ -1,13 +1,15 @@ """Constants used to interpret content model information.""" -XML_CTYPE_ANY: int -XML_CTYPE_CHOICE: int -XML_CTYPE_EMPTY: int -XML_CTYPE_MIXED: int -XML_CTYPE_NAME: int -XML_CTYPE_SEQ: int +from typing import Final -XML_CQUANT_NONE: int -XML_CQUANT_OPT: int -XML_CQUANT_PLUS: int -XML_CQUANT_REP: int +XML_CTYPE_ANY: Final = 2 +XML_CTYPE_EMPTY: Final = 1 +XML_CTYPE_MIXED: Final = 3 +XML_CTYPE_NAME: Final = 4 +XML_CTYPE_CHOICE: Final = 5 +XML_CTYPE_SEQ: Final = 6 + +XML_CQUANT_NONE: Final = 0 +XML_CQUANT_OPT: Final = 1 +XML_CQUANT_REP: Final = 2 +XML_CQUANT_PLUS: Final = 3 diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/queue.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/queue.pyi index 9f1f80e..137d6ec 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/queue.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/queue.pyi @@ -21,7 +21,9 @@ class Full(Exception): ... if sys.version_info >= (3, 13): - class ShutDown(Exception): ... + class ShutDown(Exception): + """Raised when put/get with shut-down queue.""" + ... class Queue(Generic[_T]): """ @@ -77,6 +79,9 @@ class Queue(Generic[_T]): Otherwise ('block' is false), return an item if one is immediately available, else raise the Empty exception ('timeout' is ignored in that case). + + Raises ShutDown if the queue has been shut down and is empty, + or if the queue has been shut down immediately. """ ... def get_nowait(self) -> _T: @@ -88,7 +93,18 @@ class Queue(Generic[_T]): """ ... if sys.version_info >= (3, 13): - def shutdown(self, immediate: bool = False) -> None: ... + def shutdown(self, immediate: bool = False) -> None: + """ + Shut-down the queue, making queue gets and puts raise ShutDown. + + By default, gets will only raise once the queue is empty. Set + 'immediate' to True to make gets raise immediately instead. + + All blocked callers of put() and get() will be unblocked. If + 'immediate', a task is marked as done for each item remaining in + the queue, which may unblock callers of join(). + """ + ... def _get(self) -> _T: ... def put(self, item: _T, block: bool = True, timeout: float | None = None) -> None: @@ -102,6 +118,8 @@ class Queue(Generic[_T]): Otherwise ('block' is false), put an item on the queue if a free slot is immediately available, else raise the Full exception ('timeout' is ignored in that case). + + Raises ShutDown if the queue has been shut down. """ ... def put_nowait(self, item: _T) -> None: @@ -140,6 +158,9 @@ class Queue(Generic[_T]): have been processed (meaning that a task_done() call was received for every item that had been put() into the queue). + shutdown(immediate=True) calls task_done() for each remaining item in + the queue. + Raises a ValueError if called more times than there were items placed in the queue. """ diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/random.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/random.pyi index 859b1bb..e03c43b 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/random.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/random.pyi @@ -184,15 +184,7 @@ class Random(_random.Random): """Shuffle list x in place, and return None.""" ... else: - def shuffle(self, x: MutableSequence[Any], random: Callable[[], float] | None = None) -> None: - """ - Shuffle list x in place, and return None. - - Optional argument random is a 0-argument function returning a - random float in [0.0, 1.0); if it is the default None, the - standard random.random will be used. - """ - ... + def shuffle(self, x: MutableSequence[Any], random: Callable[[], float] | None = None) -> None: ... if sys.version_info >= (3, 11): def sample(self, population: Sequence[_T], k: int, *, counts: Iterable[int] | None = None) -> list[_T]: """ @@ -227,36 +219,7 @@ class Random(_random.Random): elif sys.version_info >= (3, 9): def sample( self, population: Sequence[_T] | AbstractSet[_T], k: int, *, counts: Iterable[int] | None = None - ) -> list[_T]: - """ - Chooses k unique random elements from a population sequence or set. - - Returns a new list containing elements from the population while - leaving the original population unchanged. The resulting list is - in selection order so that all sub-slices will also be valid random - samples. This allows raffle winners (the sample) to be partitioned - into grand prize and second place winners (the subslices). - - Members of the population need not be hashable or unique. If the - population contains repeats, then each occurrence is a possible - selection in the sample. - - Repeated elements can be specified one at a time or with the optional - counts parameter. For example: - - sample(['red', 'blue'], counts=[4, 2], k=5) - - is equivalent to: - - sample(['red', 'red', 'red', 'red', 'blue', 'blue'], k=5) - - To choose a sample from a range of integers, use range() for the - population argument. This is especially fast and space efficient - for sampling from a large population: - - sample(range(10000000), 60) - """ - ... + ) -> list[_T]: ... else: def sample(self, population: Sequence[_T] | AbstractSet[_T], k: int) -> list[_T]: ... @@ -335,17 +298,7 @@ class Random(_random.Random): """ ... else: - def expovariate(self, lambd: float) -> float: - """ - Exponential distribution. - - lambd is 1.0 divided by the desired mean. It should be - nonzero. (The parameter would be called "lambda", but that is - a reserved word in Python.) Returned values range from 0 to - positive infinity if lambd is positive, and from negative - infinity to 0 if lambd is negative. - """ - ... + def expovariate(self, lambd: float) -> float: ... def gammavariate(self, alpha: float, beta: float) -> float: """ @@ -384,23 +337,8 @@ class Random(_random.Random): """ ... else: - def gauss(self, mu: float, sigma: float) -> float: - """ - Gaussian distribution. - - mu is the mean, and sigma is the standard deviation. This is - slightly faster than the normalvariate() function. - - Not thread-safe without a lock around calls. - """ - ... - def normalvariate(self, mu: float, sigma: float) -> float: - """ - Normal distribution. - - mu is the mean, and sigma is the standard deviation. - """ - ... + def gauss(self, mu: float, sigma: float) -> float: ... + def normalvariate(self, mu: float, sigma: float) -> float: ... def lognormvariate(self, mu: float, sigma: float) -> float: """ diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/re.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/re.pyi index c70c501..dee73c4 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/re.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/re.pyi @@ -102,7 +102,8 @@ A, L, and U are mutually exclusive. U UNICODE For compatibility only. Ignored for string patterns (it is the default), and forbidden for bytes patterns. -This module also defines an exception 'error'. +This module also defines exception 'PatternError', aliased to 'error' for +backward compatibility. """ import enum @@ -111,7 +112,6 @@ import sre_constants import sys from _typeshed import ReadableBuffer from collections.abc import Callable, Iterator, Mapping -from sre_constants import error as error from typing import Any, AnyStr, Generic, Literal, TypeVar, final, overload from typing_extensions import TypeAlias @@ -161,6 +161,27 @@ if sys.version_info >= (3, 13): _T = TypeVar("_T") +# The implementation defines this in re._constants (version_info >= 3, 11) or +# sre_constants. Typeshed has it here because its __module__ attribute is set to "re". +class error(Exception): + """ + Exception raised for invalid regular expressions. + + Attributes: + + msg: The unformatted error message + pattern: The regular expression pattern + pos: The index in the pattern where compilation failed (may be None) + lineno: The line corresponding to pos (may be None) + colno: The column corresponding to pos (may be None) + """ + msg: str + pattern: str | bytes | None + pos: int | None + lineno: int + colno: int + def __init__(self, msg: str, pattern: str | bytes | None = None, pos: int | None = None) -> None: ... + @final class Match(Generic[AnyStr]): """ @@ -358,7 +379,9 @@ class Pattern(Generic[AnyStr]): """Matches against all of the string.""" ... @overload - def fullmatch(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Match[bytes] | None: + def fullmatch( + self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize + ) -> Match[bytes] | None: """Matches against all of the string.""" ... @overload @@ -399,7 +422,9 @@ class Pattern(Generic[AnyStr]): """ ... @overload - def finditer(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Iterator[Match[bytes]]: + def finditer( + self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize + ) -> Iterator[Match[bytes]]: """ Return an iterator over all non-overlapping matches for the RE pattern in string. @@ -419,7 +444,7 @@ class Pattern(Generic[AnyStr]): """Return the string obtained by replacing the leftmost non-overlapping occurrences of pattern in string by the replacement repl.""" ... @overload - def sub( # type: ignore[overload-overlap] + def sub( self: Pattern[bytes], repl: ReadableBuffer | Callable[[Match[bytes]], ReadableBuffer], string: ReadableBuffer, @@ -436,7 +461,7 @@ class Pattern(Generic[AnyStr]): """Return the tuple (new_string, number_of_subs_made) found by replacing the leftmost non-overlapping occurrences of pattern with the replacement repl.""" ... @overload - def subn( # type: ignore[overload-overlap] + def subn( self: Pattern[bytes], repl: ReadableBuffer | Callable[[Match[bytes]], ReadableBuffer], string: ReadableBuffer, @@ -705,6 +730,4 @@ def purge() -> None: ... if sys.version_info < (3, 13): - def template(pattern: AnyStr | Pattern[AnyStr], flags: _FlagsType = 0) -> Pattern[AnyStr]: - """Compile a template pattern, returning a Pattern object, deprecated""" - ... + def template(pattern: AnyStr | Pattern[AnyStr], flags: _FlagsType = 0) -> Pattern[AnyStr]: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/readline.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/readline.pyi index 12e7039..1821c6a 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/readline.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/readline.pyi @@ -1,8 +1,9 @@ -"""Importing this module enables command line editing using GNU readline.""" +"""Importing this module enables command line editing using libedit readline.""" import sys from _typeshed import StrOrBytesPath from collections.abc import Callable, Sequence +from typing import Literal from typing_extensions import TypeAlias if sys.platform != "win32": @@ -42,13 +43,7 @@ if sys.platform != "win32": The default filename is ~/.history. """ ... - def append_history_file(nelements: int, filename: StrOrBytesPath | None = None, /) -> None: - """ - Append the last nelements items of the history list to file. - - The default filename is ~/.history. - """ - ... + def append_history_file(nelements: int, filename: StrOrBytesPath | None = None, /) -> None: ... def get_history_length() -> int: """Return the maximum number of lines that will be written to the history file.""" ... @@ -59,9 +54,7 @@ if sys.platform != "win32": A negative length is used to inhibit history truncation. """ ... - def clear_history() -> None: - """Clear the current readline history.""" - ... + def clear_history() -> None: ... def get_current_history_length() -> int: """Return the current (not the maximum) length of history.""" ... @@ -92,15 +85,7 @@ if sys.platform != "win32": before readline prints the first prompt. """ ... - def set_pre_input_hook(function: Callable[[], object] | None = None, /) -> None: - """ - Set or remove the function invoked by the rl_pre_input_hook callback. - - The function is called with no arguments after the first prompt - has been printed and just before readline starts reading input - characters. - """ - ... + def set_pre_input_hook(function: Callable[[], object] | None = None, /) -> None: ... def set_completer(function: _Completer | None = None, /) -> None: """ Set or remove the completer function. @@ -137,3 +122,6 @@ if sys.platform != "win32": once each time matches need to be displayed. """ ... + + if sys.version_info >= (3, 13): + backend: Literal["readline", "editline"] diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/sched.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/sched.pyi index da69393..d4d6614 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/sched.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/sched.pyi @@ -12,7 +12,7 @@ substituting time and sleep from built-in module time, or you can implement simulated time by writing your own functions. This can also be used to integrate scheduling with STDWIN events; the delay function is allowed to modify the queue. Time can be expressed as -integers or floating point numbers, as long as it is consistent. +integers or floating-point numbers, as long as it is consistent. Events are specified by tuples (time, priority, action, argument, kwargs). As in UNIX, lower priority numbers mean higher priority; in this diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/select.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/select.pyi index 079a806..86c1973 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/select.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/select.pyi @@ -56,7 +56,7 @@ def select( gotten from a fileno() method call on one of those. The optional 4th argument specifies a timeout in seconds; it may be - a floating point number to specify fractions of seconds. If it is absent + a floating-point number to specify fractions of seconds. If it is absent or None, the call will never time out. The return value is a tuple of three lists corresponding to the first three diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/shelve.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/shelve.pyi index 4d88059..368dc37 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/shelve.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/shelve.pyi @@ -152,17 +152,4 @@ if sys.version_info >= (3, 11): ... else: - def open(filename: str, flag: _TFlags = "c", protocol: int | None = None, writeback: bool = False) -> Shelf[Any]: - """ - Open a persistent dictionary for reading and writing. - - The filename parameter is the base filename for the underlying - database. As a side-effect, an extension may be added to the - filename and more than one file may be created. The optional flag - parameter has the same interpretation as the flag parameter of - dbm.open(). The optional protocol parameter specifies the - version of the pickle protocol. - - See the module's __doc__ string for an overview of the interface. - """ - ... + def open(filename: str, flag: _TFlags = "c", protocol: int | None = None, writeback: bool = False) -> Shelf[Any]: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/shlex.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/shlex.pyi index d59b2c6..4f0e8fc 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/shlex.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/shlex.pyi @@ -22,14 +22,10 @@ if sys.version_info >= (3, 12): else: @overload - def split(s: str | _ShlexInstream, comments: bool = False, posix: bool = True) -> list[str]: - """Split the string *s* using shell-like syntax.""" - ... + def split(s: str | _ShlexInstream, comments: bool = False, posix: bool = True) -> list[str]: ... @overload @deprecated("Passing None for 's' to shlex.split() is deprecated and will raise an error in Python 3.12.") - def split(s: None, comments: bool = False, posix: bool = True) -> list[str]: - """Split the string *s* using shell-like syntax.""" - ... + def split(s: None, comments: bool = False, posix: bool = True) -> list[str]: ... def join(split_command: Iterable[str]) -> str: """Return a shell-escaped string from *split_command*.""" diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/shutil.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/shutil.pyi index 346c9ba..16fe72c 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/shutil.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/shutil.pyi @@ -337,7 +337,21 @@ if sys.version_info >= (3, 13): *, dir_fd: int | None = None, follow_symlinks: bool = True, - ) -> None: ... + ) -> None: + """ + Change owner user and group of the given path. + + user and group can be the uid/gid or the user/group names, and in that case, + they are converted to their respective uid/gid. + + If dir_fd is set, it should be an open file descriptor to the directory to + be used as the root of *path* if it is relative. + + If follow_symlinks is set to False and the last element of the path is a + symbolic link, chown will modify the link itself and not the file being + referenced by the link. + """ + ... @overload def chown( path: FileDescriptorOrPath, @@ -346,54 +360,68 @@ if sys.version_info >= (3, 13): group: str | int, dir_fd: int | None = None, follow_symlinks: bool = True, - ) -> None: ... - @overload - def chown( - path: FileDescriptorOrPath, user: None, group: str | int, *, dir_fd: int | None = None, follow_symlinks: bool = True - ) -> None: ... - @overload - def chown( - path: FileDescriptorOrPath, user: str | int, group: str | int, *, dir_fd: int | None = None, follow_symlinks: bool = True - ) -> None: ... - -else: - @overload - def chown(path: FileDescriptorOrPath, user: str | int, group: None = None) -> None: + ) -> None: """ Change owner user and group of the given path. user and group can be the uid/gid or the user/group names, and in that case, they are converted to their respective uid/gid. - """ - ... - @overload - def chown(path: FileDescriptorOrPath, user: None = None, *, group: str | int) -> None: - """ - Change owner user and group of the given path. - user and group can be the uid/gid or the user/group names, and in that case, - they are converted to their respective uid/gid. + If dir_fd is set, it should be an open file descriptor to the directory to + be used as the root of *path* if it is relative. + + If follow_symlinks is set to False and the last element of the path is a + symbolic link, chown will modify the link itself and not the file being + referenced by the link. """ ... @overload - def chown(path: FileDescriptorOrPath, user: None, group: str | int) -> None: + def chown( + path: FileDescriptorOrPath, user: None, group: str | int, *, dir_fd: int | None = None, follow_symlinks: bool = True + ) -> None: """ Change owner user and group of the given path. user and group can be the uid/gid or the user/group names, and in that case, they are converted to their respective uid/gid. + + If dir_fd is set, it should be an open file descriptor to the directory to + be used as the root of *path* if it is relative. + + If follow_symlinks is set to False and the last element of the path is a + symbolic link, chown will modify the link itself and not the file being + referenced by the link. """ ... @overload - def chown(path: FileDescriptorOrPath, user: str | int, group: str | int) -> None: + def chown( + path: FileDescriptorOrPath, user: str | int, group: str | int, *, dir_fd: int | None = None, follow_symlinks: bool = True + ) -> None: """ Change owner user and group of the given path. user and group can be the uid/gid or the user/group names, and in that case, they are converted to their respective uid/gid. + + If dir_fd is set, it should be an open file descriptor to the directory to + be used as the root of *path* if it is relative. + + If follow_symlinks is set to False and the last element of the path is a + symbolic link, chown will modify the link itself and not the file being + referenced by the link. """ ... +else: + @overload + def chown(path: FileDescriptorOrPath, user: str | int, group: None = None) -> None: ... + @overload + def chown(path: FileDescriptorOrPath, user: None = None, *, group: str | int) -> None: ... + @overload + def chown(path: FileDescriptorOrPath, user: None, group: str | int) -> None: ... + @overload + def chown(path: FileDescriptorOrPath, user: str | int, group: str | int) -> None: ... + @overload def which(cmd: _StrPathT, mode: int = 1, path: StrPath | None = None) -> str | _StrPathT | None: """ diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/signal.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/signal.pyi index 16c23ac..971ec86 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/signal.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/signal.pyi @@ -271,7 +271,7 @@ else: """ Like sigwaitinfo(), but with a timeout. - The timeout is specified in seconds, with floating point numbers allowed. + The timeout is specified in seconds, with floating-point numbers allowed. """ ... def sigwaitinfo(sigset: Iterable[int], /) -> struct_siginfo: @@ -304,10 +304,9 @@ def raise_signal(signalnum: _SIGNUM, /) -> None: ... def set_wakeup_fd(fd: int, /, *, warn_on_full_buffer: bool = ...) -> int: """ - set_wakeup_fd(fd, *, warn_on_full_buffer=True) -> fd + Sets the fd to be written to (with the signal number) when a signal comes in. - Sets the fd to be written to (with the signal number) when a signal - comes in. A library can use this to wakeup select or poll. + A library can use this to wakeup select or poll. The previous fd or -1 is returned. The fd must be non-blocking. diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/site.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/site.pyi index 5d1a09c..0644779 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/site.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/site.pyi @@ -70,6 +70,7 @@ site-specific customizations. If this import fails with an ImportError exception, it is silently ignored. """ +import sys from _typeshed import StrPath from collections.abc import Iterable @@ -125,17 +126,35 @@ def check_enableusersite() -> bool | None: True: Safe and enabled """ ... + +if sys.version_info >= (3, 13): + def gethistoryfile() -> str: + """ + Check if the PYTHON_HISTORY environment variable is set and define + it as the .python_history file. If PYTHON_HISTORY is not set, use the + default .python_history file. + """ + ... + def enablerlcompleter() -> None: """ Enable default readline configuration on interactive prompts, by registering a sys.__interactivehook__. - - If the readline module can be imported, the hook will set the Tab key - as completion key and register ~/.python_history as history file. - This can be overridden in the sitecustomize or usercustomize module, - or in a PYTHONSTARTUP file. """ ... + +if sys.version_info >= (3, 13): + def register_readline() -> None: + """ + Configure readline completion on interactive prompts. + + If the readline module can be imported, the hook will set the Tab key + as completion key and register ~/.python_history as history file. + This can be overridden in the sitecustomize or usercustomize module, + or in a PYTHONSTARTUP file. + """ + ... + def execsitecustomize() -> None: """Run custom site specific code, if available.""" ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/smtpd.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/smtpd.pyi index 13f8ce1..7392bd5 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/smtpd.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/smtpd.pyi @@ -1,49 +1,3 @@ -""" -An RFC 5321 smtp proxy with optional RFC 1870 and RFC 6531 extensions. - -Usage: %(program)s [options] [localhost:localport [remotehost:remoteport]] - -Options: - - --nosetuid - -n - This program generally tries to setuid `nobody', unless this flag is - set. The setuid call will fail if this program is not run as root (in - which case, use this flag). - - --version - -V - Print the version number and exit. - - --class classname - -c classname - Use `classname' as the concrete SMTP proxy class. Uses `PureProxy' by - default. - - --size limit - -s limit - Restrict the total size of the incoming message to "limit" number of - bytes via the RFC 1870 SIZE extension. Defaults to 33554432 bytes. - - --smtputf8 - -u - Enable the SMTPUTF8 extension and behave as an RFC 6531 smtp proxy. - - --debug - -d - Turn on debugging prints. - - --help - -h - Print this message and exit. - -Version: %(__version__)s - -If localhost is not given then `localhost' is used, and if localport is not -given then 8025 is used. If remotehost is not given then `localhost' is used, -and if remoteport is not given, then 25 is used. -""" - import asynchat import asyncore import socket @@ -125,38 +79,7 @@ class SMTPServer(asyncore.dispatcher): def handle_accepted(self, conn: socket.socket, addr: Any) -> None: ... def process_message( self, peer: _Address, mailfrom: str, rcpttos: list[str], data: bytes | str, **kwargs: Any - ) -> str | None: - """ - Override this abstract method to handle messages from the client. - - peer is a tuple containing (ipaddr, port) of the client that made the - socket connection to our smtp port. - - mailfrom is the raw address the client claims the message is coming - from. - - rcpttos is a list of raw addresses the client wishes to deliver the - message to. - - data is a string containing the entire full text of the message, - headers (if supplied) and all. It has been `de-transparencied' - according to RFC 821, Section 4.5.2. In other words, a line - containing a `.' followed by other text has had the leading dot - removed. - - kwargs is a dictionary containing additional information. It is - empty if decode_data=True was given as init parameter, otherwise - it will contain the following keys: - 'mail_options': list of parameters to the mail command. All - elements are uppercase strings. Example: - ['BODY=8BITMIME', 'SMTPUTF8']. - 'rcpt_options': same, for the rcpt command. - - This function should return None for a normal `250 Ok' response; - otherwise, it should return the desired response string in RFC 821 - format. - """ - ... + ) -> str | None: ... class DebuggingServer(SMTPServer): ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/smtplib.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/smtplib.pyi index 4b4a00f..1bc7186 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/smtplib.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/smtplib.pyi @@ -462,25 +462,7 @@ class SMTP: else: def starttls( self, keyfile: str | None = None, certfile: str | None = None, context: SSLContext | None = None - ) -> _Reply: - """ - Puts the connection to the SMTP server into TLS mode. - - If there has been no previous EHLO or HELO command this session, this - method tries ESMTP EHLO first. - - If the server supports TLS, this will encrypt the rest of the SMTP - session. If you provide the keyfile and certfile parameters, - the identity of the SMTP server and client can be checked. This, - however, depends on whether the socket module really checks the - certificates. - - This method may raise the following exceptions: - - SMTPHeloError The server didn't reply properly to - the helo greeting. - """ - ... + ) -> _Reply: ... def sendmail( self, diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/sndhdr.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/sndhdr.pyi index e58a761..f4d4876 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/sndhdr.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/sndhdr.pyi @@ -1,49 +1,14 @@ -""" -Routines to help recognizing sound files. - -Function whathdr() recognizes various types of sound file headers. -It understands almost all headers that SOX can decode. - -The return tuple contains the following items, in this order: -- file type (as SOX understands it) -- sampling rate (0 if unknown or hard to decode) -- number of channels (0 if unknown or hard to decode) -- number of frames in the file (-1 if unknown or hard to decode) -- number of bits/sample, or 'U' for U-LAW, or 'A' for A-LAW - -If the file doesn't have a recognizable type, it returns None. -If the file can't be opened, OSError is raised. - -To compute the total time, divide the number of frames by the -sampling rate (a frame contains a sample for each channel). - -Function what() calls whathdr(). (It used to also use some -heuristics for raw data, but this doesn't work very well.) - -Finally, the function test() is a simple main program that calls -what() for all files mentioned on the argument list. For directory -arguments it calls what() for all files in that directory. Default -argument is "." (testing all files in the current directory). The -option -r tells it to recurse down directories found inside -explicitly given directories. -""" - from _typeshed import StrOrBytesPath from typing import NamedTuple __all__ = ["what", "whathdr"] class SndHeaders(NamedTuple): - """SndHeaders(filetype, framerate, nchannels, nframes, sampwidth)""" filetype: str framerate: int nchannels: int nframes: int sampwidth: int | str -def what(filename: StrOrBytesPath) -> SndHeaders | None: - """Guess the type of a sound file.""" - ... -def whathdr(filename: StrOrBytesPath) -> SndHeaders | None: - """Recognize sound headers.""" - ... +def what(filename: StrOrBytesPath) -> SndHeaders | None: ... +def whathdr(filename: StrOrBytesPath) -> SndHeaders | None: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/socket.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/socket.pyi index a2842be..d3397d8 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/socket.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/socket.pyi @@ -158,8 +158,6 @@ from _socket import ( _RetAddress as _RetAddress, close as close, dup as dup, - error as error, - gaierror as gaierror, getdefaulttimeout as getdefaulttimeout, gethostbyaddr as gethostbyaddr, gethostbyname as gethostbyname, @@ -170,7 +168,6 @@ from _socket import ( getservbyname as getservbyname, getservbyport as getservbyport, has_ipv6 as has_ipv6, - herror as herror, htonl as htonl, htons as htons, if_indextoname as if_indextoname, @@ -183,7 +180,6 @@ from _socket import ( ntohl as ntohl, ntohs as ntohs, setdefaulttimeout as setdefaulttimeout, - timeout as timeout, ) from _typeshed import ReadableBuffer, Unused, WriteableBuffer from collections.abc import Iterable @@ -535,6 +531,18 @@ EBADF: int EAGAIN: int EWOULDBLOCK: int +# These errors are implemented in _socket at runtime +# but they consider themselves to live in socket so we'll put them here. +error = OSError + +class herror(error): ... +class gaierror(error): ... + +if sys.version_info >= (3, 10): + timeout = TimeoutError +else: + class timeout(error): ... + class AddressFamily(IntEnum): """An enumeration.""" AF_INET = 2 @@ -1052,20 +1060,7 @@ if sys.version_info >= (3, 11): else: def create_connection( address: tuple[str | None, int], timeout: float | None = ..., source_address: _Address | None = None - ) -> socket: - """ - Connect to *address* and return the socket object. - - Convenience function. Connect to *address* (a 2-tuple ``(host, - port)``) and return the socket object. Passing the optional - *timeout* parameter will set the timeout on the socket instance - before attempting to connect. If no *timeout* is supplied, the - global default timeout setting returned by :func:`getdefaulttimeout` - is used. If *source_address* is set it must be a tuple of (host, port) - for the socket to bind as a source address before making the connection. - A host of '' or port 0 tells the OS to use the default. - """ - ... + ) -> socket: ... def has_dualstack_ipv6() -> bool: """ diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/socketserver.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/socketserver.pyi index 0ba08d2..52ee512 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/socketserver.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/socketserver.pyi @@ -123,8 +123,9 @@ import types from _socket import _Address, _RetAddress from _typeshed import ReadableBuffer from collections.abc import Callable +from io import BufferedIOBase from socket import socket as _socket -from typing import Any, BinaryIO, ClassVar +from typing import Any, ClassVar from typing_extensions import Self, TypeAlias __all__ = [ @@ -494,12 +495,12 @@ class StreamRequestHandler(BaseRequestHandler): timeout: ClassVar[float | None] # undocumented disable_nagle_algorithm: ClassVar[bool] # undocumented connection: Any # undocumented - rfile: BinaryIO - wfile: BinaryIO + rfile: BufferedIOBase + wfile: BufferedIOBase class DatagramRequestHandler(BaseRequestHandler): """Define self.rfile and self.wfile for datagram sockets.""" - packet: _socket # undocumented + packet: bytes # undocumented socket: _socket # undocumented - rfile: BinaryIO - wfile: BinaryIO + rfile: BufferedIOBase + wfile: BufferedIOBase diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/spwd.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/spwd.pyi index 6a0f07c..3a5d399 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/spwd.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/spwd.pyi @@ -1,15 +1,3 @@ -""" -This module provides access to the Unix shadow password database. -It is available on various Unix versions. - -Shadow password database entries are reported as 9-tuples of type struct_spwd, -containing the following items from the password database (see `'): -sp_namp, sp_pwdp, sp_lstchg, sp_min, sp_max, sp_warn, sp_inact, sp_expire, sp_flag. -The sp_namp and sp_pwdp are strings, the rest are integers. -An exception is raised if the entry asked for cannot be found. -You have to be root to be able to use this module. -""" - import sys from _typeshed import structseq from typing import Any, Final, final @@ -17,13 +5,6 @@ from typing import Any, Final, final if sys.platform != "win32": @final class struct_spwd(structseq[Any], tuple[str, str, int, int, int, int, int, int, int]): - """ - spwd.struct_spwd: Results from getsp*() routines. - - This object may be accessed either as a 9-tuple of - (sp_namp,sp_pwdp,sp_lstchg,sp_min,sp_max,sp_warn,sp_inact,sp_expire,sp_flag) - or via the object attributes as named in the above tuple. - """ if sys.version_info >= (3, 10): __match_args__: Final = ( "sp_namp", @@ -38,53 +19,28 @@ if sys.platform != "win32": ) @property - def sp_namp(self) -> str: - """login name""" - ... + def sp_namp(self) -> str: ... @property - def sp_pwdp(self) -> str: - """encrypted password""" - ... + def sp_pwdp(self) -> str: ... @property - def sp_lstchg(self) -> int: - """date of last change""" - ... + def sp_lstchg(self) -> int: ... @property - def sp_min(self) -> int: - """min #days between changes""" - ... + def sp_min(self) -> int: ... @property - def sp_max(self) -> int: - """max #days between changes""" - ... + def sp_max(self) -> int: ... @property - def sp_warn(self) -> int: - """#days before pw expires to warn user about it""" - ... + def sp_warn(self) -> int: ... @property - def sp_inact(self) -> int: - """#days after pw expires until account is disabled""" - ... + def sp_inact(self) -> int: ... @property - def sp_expire(self) -> int: - """#days since 1970-01-01 when account expires""" - ... + def sp_expire(self) -> int: ... @property - def sp_flag(self) -> int: - """reserved""" - ... - - def getspall() -> list[struct_spwd]: - """ - Return a list of all available shadow password database entries, in arbitrary order. - - See `help(spwd)` for more on shadow password database entries. - """ - ... - def getspnam(arg: str, /) -> struct_spwd: - """ - Return the shadow password database entry for the given user name. + def sp_flag(self) -> int: ... + # Deprecated aliases below. + @property + def sp_nam(self) -> str: ... + @property + def sp_pwd(self) -> str: ... - See `help(spwd)` for more on shadow password database entries. - """ - ... + def getspall() -> list[struct_spwd]: ... + def getspnam(arg: str, /) -> struct_spwd: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/sqlite3/__init__.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/sqlite3/__init__.pyi index 41ed20a..520a514 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/sqlite3/__init__.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/sqlite3/__init__.pyi @@ -32,4 +32,789 @@ its execute() method to perform SQL queries: The sqlite3 module is written by Gerhard Häring . """ -from sqlite3.dbapi2 import * +import sys +from _typeshed import ReadableBuffer, StrOrBytesPath, SupportsLenAndGetItem, Unused +from collections.abc import Callable, Generator, Iterable, Iterator, Mapping, Sequence +from sqlite3.dbapi2 import ( + PARSE_COLNAMES as PARSE_COLNAMES, + PARSE_DECLTYPES as PARSE_DECLTYPES, + SQLITE_ALTER_TABLE as SQLITE_ALTER_TABLE, + SQLITE_ANALYZE as SQLITE_ANALYZE, + SQLITE_ATTACH as SQLITE_ATTACH, + SQLITE_CREATE_INDEX as SQLITE_CREATE_INDEX, + SQLITE_CREATE_TABLE as SQLITE_CREATE_TABLE, + SQLITE_CREATE_TEMP_INDEX as SQLITE_CREATE_TEMP_INDEX, + SQLITE_CREATE_TEMP_TABLE as SQLITE_CREATE_TEMP_TABLE, + SQLITE_CREATE_TEMP_TRIGGER as SQLITE_CREATE_TEMP_TRIGGER, + SQLITE_CREATE_TEMP_VIEW as SQLITE_CREATE_TEMP_VIEW, + SQLITE_CREATE_TRIGGER as SQLITE_CREATE_TRIGGER, + SQLITE_CREATE_VIEW as SQLITE_CREATE_VIEW, + SQLITE_CREATE_VTABLE as SQLITE_CREATE_VTABLE, + SQLITE_DELETE as SQLITE_DELETE, + SQLITE_DENY as SQLITE_DENY, + SQLITE_DETACH as SQLITE_DETACH, + SQLITE_DONE as SQLITE_DONE, + SQLITE_DROP_INDEX as SQLITE_DROP_INDEX, + SQLITE_DROP_TABLE as SQLITE_DROP_TABLE, + SQLITE_DROP_TEMP_INDEX as SQLITE_DROP_TEMP_INDEX, + SQLITE_DROP_TEMP_TABLE as SQLITE_DROP_TEMP_TABLE, + SQLITE_DROP_TEMP_TRIGGER as SQLITE_DROP_TEMP_TRIGGER, + SQLITE_DROP_TEMP_VIEW as SQLITE_DROP_TEMP_VIEW, + SQLITE_DROP_TRIGGER as SQLITE_DROP_TRIGGER, + SQLITE_DROP_VIEW as SQLITE_DROP_VIEW, + SQLITE_DROP_VTABLE as SQLITE_DROP_VTABLE, + SQLITE_FUNCTION as SQLITE_FUNCTION, + SQLITE_IGNORE as SQLITE_IGNORE, + SQLITE_INSERT as SQLITE_INSERT, + SQLITE_OK as SQLITE_OK, + SQLITE_PRAGMA as SQLITE_PRAGMA, + SQLITE_READ as SQLITE_READ, + SQLITE_RECURSIVE as SQLITE_RECURSIVE, + SQLITE_REINDEX as SQLITE_REINDEX, + SQLITE_SAVEPOINT as SQLITE_SAVEPOINT, + SQLITE_SELECT as SQLITE_SELECT, + SQLITE_TRANSACTION as SQLITE_TRANSACTION, + SQLITE_UPDATE as SQLITE_UPDATE, + Binary as Binary, + Date as Date, + DateFromTicks as DateFromTicks, + Time as Time, + TimeFromTicks as TimeFromTicks, + TimestampFromTicks as TimestampFromTicks, + adapt as adapt, + adapters as adapters, + apilevel as apilevel, + complete_statement as complete_statement, + connect as connect, + converters as converters, + enable_callback_tracebacks as enable_callback_tracebacks, + paramstyle as paramstyle, + register_adapter as register_adapter, + register_converter as register_converter, + sqlite_version as sqlite_version, + sqlite_version_info as sqlite_version_info, + threadsafety as threadsafety, + version_info as version_info, +) +from types import TracebackType +from typing import Any, Literal, Protocol, SupportsIndex, TypeVar, final, overload +from typing_extensions import Self, TypeAlias + +if sys.version_info >= (3, 12): + from sqlite3.dbapi2 import ( + LEGACY_TRANSACTION_CONTROL as LEGACY_TRANSACTION_CONTROL, + SQLITE_DBCONFIG_DEFENSIVE as SQLITE_DBCONFIG_DEFENSIVE, + SQLITE_DBCONFIG_DQS_DDL as SQLITE_DBCONFIG_DQS_DDL, + SQLITE_DBCONFIG_DQS_DML as SQLITE_DBCONFIG_DQS_DML, + SQLITE_DBCONFIG_ENABLE_FKEY as SQLITE_DBCONFIG_ENABLE_FKEY, + SQLITE_DBCONFIG_ENABLE_FTS3_TOKENIZER as SQLITE_DBCONFIG_ENABLE_FTS3_TOKENIZER, + SQLITE_DBCONFIG_ENABLE_LOAD_EXTENSION as SQLITE_DBCONFIG_ENABLE_LOAD_EXTENSION, + SQLITE_DBCONFIG_ENABLE_QPSG as SQLITE_DBCONFIG_ENABLE_QPSG, + SQLITE_DBCONFIG_ENABLE_TRIGGER as SQLITE_DBCONFIG_ENABLE_TRIGGER, + SQLITE_DBCONFIG_ENABLE_VIEW as SQLITE_DBCONFIG_ENABLE_VIEW, + SQLITE_DBCONFIG_LEGACY_ALTER_TABLE as SQLITE_DBCONFIG_LEGACY_ALTER_TABLE, + SQLITE_DBCONFIG_LEGACY_FILE_FORMAT as SQLITE_DBCONFIG_LEGACY_FILE_FORMAT, + SQLITE_DBCONFIG_NO_CKPT_ON_CLOSE as SQLITE_DBCONFIG_NO_CKPT_ON_CLOSE, + SQLITE_DBCONFIG_RESET_DATABASE as SQLITE_DBCONFIG_RESET_DATABASE, + SQLITE_DBCONFIG_TRIGGER_EQP as SQLITE_DBCONFIG_TRIGGER_EQP, + SQLITE_DBCONFIG_TRUSTED_SCHEMA as SQLITE_DBCONFIG_TRUSTED_SCHEMA, + SQLITE_DBCONFIG_WRITABLE_SCHEMA as SQLITE_DBCONFIG_WRITABLE_SCHEMA, + ) + +if sys.version_info >= (3, 11): + from sqlite3.dbapi2 import ( + SQLITE_ABORT as SQLITE_ABORT, + SQLITE_ABORT_ROLLBACK as SQLITE_ABORT_ROLLBACK, + SQLITE_AUTH as SQLITE_AUTH, + SQLITE_AUTH_USER as SQLITE_AUTH_USER, + SQLITE_BUSY as SQLITE_BUSY, + SQLITE_BUSY_RECOVERY as SQLITE_BUSY_RECOVERY, + SQLITE_BUSY_SNAPSHOT as SQLITE_BUSY_SNAPSHOT, + SQLITE_BUSY_TIMEOUT as SQLITE_BUSY_TIMEOUT, + SQLITE_CANTOPEN as SQLITE_CANTOPEN, + SQLITE_CANTOPEN_CONVPATH as SQLITE_CANTOPEN_CONVPATH, + SQLITE_CANTOPEN_DIRTYWAL as SQLITE_CANTOPEN_DIRTYWAL, + SQLITE_CANTOPEN_FULLPATH as SQLITE_CANTOPEN_FULLPATH, + SQLITE_CANTOPEN_ISDIR as SQLITE_CANTOPEN_ISDIR, + SQLITE_CANTOPEN_NOTEMPDIR as SQLITE_CANTOPEN_NOTEMPDIR, + SQLITE_CANTOPEN_SYMLINK as SQLITE_CANTOPEN_SYMLINK, + SQLITE_CONSTRAINT as SQLITE_CONSTRAINT, + SQLITE_CONSTRAINT_CHECK as SQLITE_CONSTRAINT_CHECK, + SQLITE_CONSTRAINT_COMMITHOOK as SQLITE_CONSTRAINT_COMMITHOOK, + SQLITE_CONSTRAINT_FOREIGNKEY as SQLITE_CONSTRAINT_FOREIGNKEY, + SQLITE_CONSTRAINT_FUNCTION as SQLITE_CONSTRAINT_FUNCTION, + SQLITE_CONSTRAINT_NOTNULL as SQLITE_CONSTRAINT_NOTNULL, + SQLITE_CONSTRAINT_PINNED as SQLITE_CONSTRAINT_PINNED, + SQLITE_CONSTRAINT_PRIMARYKEY as SQLITE_CONSTRAINT_PRIMARYKEY, + SQLITE_CONSTRAINT_ROWID as SQLITE_CONSTRAINT_ROWID, + SQLITE_CONSTRAINT_TRIGGER as SQLITE_CONSTRAINT_TRIGGER, + SQLITE_CONSTRAINT_UNIQUE as SQLITE_CONSTRAINT_UNIQUE, + SQLITE_CONSTRAINT_VTAB as SQLITE_CONSTRAINT_VTAB, + SQLITE_CORRUPT as SQLITE_CORRUPT, + SQLITE_CORRUPT_INDEX as SQLITE_CORRUPT_INDEX, + SQLITE_CORRUPT_SEQUENCE as SQLITE_CORRUPT_SEQUENCE, + SQLITE_CORRUPT_VTAB as SQLITE_CORRUPT_VTAB, + SQLITE_EMPTY as SQLITE_EMPTY, + SQLITE_ERROR as SQLITE_ERROR, + SQLITE_ERROR_MISSING_COLLSEQ as SQLITE_ERROR_MISSING_COLLSEQ, + SQLITE_ERROR_RETRY as SQLITE_ERROR_RETRY, + SQLITE_ERROR_SNAPSHOT as SQLITE_ERROR_SNAPSHOT, + SQLITE_FORMAT as SQLITE_FORMAT, + SQLITE_FULL as SQLITE_FULL, + SQLITE_INTERNAL as SQLITE_INTERNAL, + SQLITE_INTERRUPT as SQLITE_INTERRUPT, + SQLITE_IOERR as SQLITE_IOERR, + SQLITE_IOERR_ACCESS as SQLITE_IOERR_ACCESS, + SQLITE_IOERR_AUTH as SQLITE_IOERR_AUTH, + SQLITE_IOERR_BEGIN_ATOMIC as SQLITE_IOERR_BEGIN_ATOMIC, + SQLITE_IOERR_BLOCKED as SQLITE_IOERR_BLOCKED, + SQLITE_IOERR_CHECKRESERVEDLOCK as SQLITE_IOERR_CHECKRESERVEDLOCK, + SQLITE_IOERR_CLOSE as SQLITE_IOERR_CLOSE, + SQLITE_IOERR_COMMIT_ATOMIC as SQLITE_IOERR_COMMIT_ATOMIC, + SQLITE_IOERR_CONVPATH as SQLITE_IOERR_CONVPATH, + SQLITE_IOERR_CORRUPTFS as SQLITE_IOERR_CORRUPTFS, + SQLITE_IOERR_DATA as SQLITE_IOERR_DATA, + SQLITE_IOERR_DELETE as SQLITE_IOERR_DELETE, + SQLITE_IOERR_DELETE_NOENT as SQLITE_IOERR_DELETE_NOENT, + SQLITE_IOERR_DIR_CLOSE as SQLITE_IOERR_DIR_CLOSE, + SQLITE_IOERR_DIR_FSYNC as SQLITE_IOERR_DIR_FSYNC, + SQLITE_IOERR_FSTAT as SQLITE_IOERR_FSTAT, + SQLITE_IOERR_FSYNC as SQLITE_IOERR_FSYNC, + SQLITE_IOERR_GETTEMPPATH as SQLITE_IOERR_GETTEMPPATH, + SQLITE_IOERR_LOCK as SQLITE_IOERR_LOCK, + SQLITE_IOERR_MMAP as SQLITE_IOERR_MMAP, + SQLITE_IOERR_NOMEM as SQLITE_IOERR_NOMEM, + SQLITE_IOERR_RDLOCK as SQLITE_IOERR_RDLOCK, + SQLITE_IOERR_READ as SQLITE_IOERR_READ, + SQLITE_IOERR_ROLLBACK_ATOMIC as SQLITE_IOERR_ROLLBACK_ATOMIC, + SQLITE_IOERR_SEEK as SQLITE_IOERR_SEEK, + SQLITE_IOERR_SHMLOCK as SQLITE_IOERR_SHMLOCK, + SQLITE_IOERR_SHMMAP as SQLITE_IOERR_SHMMAP, + SQLITE_IOERR_SHMOPEN as SQLITE_IOERR_SHMOPEN, + SQLITE_IOERR_SHMSIZE as SQLITE_IOERR_SHMSIZE, + SQLITE_IOERR_SHORT_READ as SQLITE_IOERR_SHORT_READ, + SQLITE_IOERR_TRUNCATE as SQLITE_IOERR_TRUNCATE, + SQLITE_IOERR_UNLOCK as SQLITE_IOERR_UNLOCK, + SQLITE_IOERR_VNODE as SQLITE_IOERR_VNODE, + SQLITE_IOERR_WRITE as SQLITE_IOERR_WRITE, + SQLITE_LIMIT_ATTACHED as SQLITE_LIMIT_ATTACHED, + SQLITE_LIMIT_COLUMN as SQLITE_LIMIT_COLUMN, + SQLITE_LIMIT_COMPOUND_SELECT as SQLITE_LIMIT_COMPOUND_SELECT, + SQLITE_LIMIT_EXPR_DEPTH as SQLITE_LIMIT_EXPR_DEPTH, + SQLITE_LIMIT_FUNCTION_ARG as SQLITE_LIMIT_FUNCTION_ARG, + SQLITE_LIMIT_LENGTH as SQLITE_LIMIT_LENGTH, + SQLITE_LIMIT_LIKE_PATTERN_LENGTH as SQLITE_LIMIT_LIKE_PATTERN_LENGTH, + SQLITE_LIMIT_SQL_LENGTH as SQLITE_LIMIT_SQL_LENGTH, + SQLITE_LIMIT_TRIGGER_DEPTH as SQLITE_LIMIT_TRIGGER_DEPTH, + SQLITE_LIMIT_VARIABLE_NUMBER as SQLITE_LIMIT_VARIABLE_NUMBER, + SQLITE_LIMIT_VDBE_OP as SQLITE_LIMIT_VDBE_OP, + SQLITE_LIMIT_WORKER_THREADS as SQLITE_LIMIT_WORKER_THREADS, + SQLITE_LOCKED as SQLITE_LOCKED, + SQLITE_LOCKED_SHAREDCACHE as SQLITE_LOCKED_SHAREDCACHE, + SQLITE_LOCKED_VTAB as SQLITE_LOCKED_VTAB, + SQLITE_MISMATCH as SQLITE_MISMATCH, + SQLITE_MISUSE as SQLITE_MISUSE, + SQLITE_NOLFS as SQLITE_NOLFS, + SQLITE_NOMEM as SQLITE_NOMEM, + SQLITE_NOTADB as SQLITE_NOTADB, + SQLITE_NOTFOUND as SQLITE_NOTFOUND, + SQLITE_NOTICE as SQLITE_NOTICE, + SQLITE_NOTICE_RECOVER_ROLLBACK as SQLITE_NOTICE_RECOVER_ROLLBACK, + SQLITE_NOTICE_RECOVER_WAL as SQLITE_NOTICE_RECOVER_WAL, + SQLITE_OK_LOAD_PERMANENTLY as SQLITE_OK_LOAD_PERMANENTLY, + SQLITE_OK_SYMLINK as SQLITE_OK_SYMLINK, + SQLITE_PERM as SQLITE_PERM, + SQLITE_PROTOCOL as SQLITE_PROTOCOL, + SQLITE_RANGE as SQLITE_RANGE, + SQLITE_READONLY as SQLITE_READONLY, + SQLITE_READONLY_CANTINIT as SQLITE_READONLY_CANTINIT, + SQLITE_READONLY_CANTLOCK as SQLITE_READONLY_CANTLOCK, + SQLITE_READONLY_DBMOVED as SQLITE_READONLY_DBMOVED, + SQLITE_READONLY_DIRECTORY as SQLITE_READONLY_DIRECTORY, + SQLITE_READONLY_RECOVERY as SQLITE_READONLY_RECOVERY, + SQLITE_READONLY_ROLLBACK as SQLITE_READONLY_ROLLBACK, + SQLITE_ROW as SQLITE_ROW, + SQLITE_SCHEMA as SQLITE_SCHEMA, + SQLITE_TOOBIG as SQLITE_TOOBIG, + SQLITE_WARNING as SQLITE_WARNING, + SQLITE_WARNING_AUTOINDEX as SQLITE_WARNING_AUTOINDEX, + ) + +if sys.version_info < (3, 12): + from sqlite3.dbapi2 import enable_shared_cache as enable_shared_cache, version as version + +if sys.version_info < (3, 10): + from sqlite3.dbapi2 import OptimizedUnicode as OptimizedUnicode + +_CursorT = TypeVar("_CursorT", bound=Cursor) +_SqliteData: TypeAlias = str | ReadableBuffer | int | float | None +# Data that is passed through adapters can be of any type accepted by an adapter. +_AdaptedInputData: TypeAlias = _SqliteData | Any +# The Mapping must really be a dict, but making it invariant is too annoying. +_Parameters: TypeAlias = SupportsLenAndGetItem[_AdaptedInputData] | Mapping[str, _AdaptedInputData] + +class _AnyParamWindowAggregateClass(Protocol): + def step(self, *args: Any) -> object: ... + def inverse(self, *args: Any) -> object: ... + def value(self) -> _SqliteData: ... + def finalize(self) -> _SqliteData: ... + +class _WindowAggregateClass(Protocol): + step: Callable[..., object] + inverse: Callable[..., object] + def value(self) -> _SqliteData: ... + def finalize(self) -> _SqliteData: ... + +class _AggregateProtocol(Protocol): + def step(self, value: int, /) -> object: ... + def finalize(self) -> int: ... + +class _SingleParamWindowAggregateClass(Protocol): + def step(self, param: Any, /) -> object: ... + def inverse(self, param: Any, /) -> object: ... + def value(self) -> _SqliteData: ... + def finalize(self) -> _SqliteData: ... + +# These classes are implemented in the C module _sqlite3. At runtime, they're imported +# from there into sqlite3.dbapi2 and from that module to here. However, they +# consider themselves to live in the sqlite3.* namespace, so we'll define them here. + +class Error(Exception): + if sys.version_info >= (3, 11): + sqlite_errorcode: int + sqlite_errorname: str + +class DatabaseError(Error): ... +class DataError(DatabaseError): ... +class IntegrityError(DatabaseError): ... +class InterfaceError(Error): ... +class InternalError(DatabaseError): ... +class NotSupportedError(DatabaseError): ... +class OperationalError(DatabaseError): ... +class ProgrammingError(DatabaseError): ... +class Warning(Exception): ... + +class Connection: + """SQLite database connection object.""" + @property + def DataError(self) -> type[DataError]: ... + @property + def DatabaseError(self) -> type[DatabaseError]: ... + @property + def Error(self) -> type[Error]: ... + @property + def IntegrityError(self) -> type[IntegrityError]: ... + @property + def InterfaceError(self) -> type[InterfaceError]: ... + @property + def InternalError(self) -> type[InternalError]: ... + @property + def NotSupportedError(self) -> type[NotSupportedError]: ... + @property + def OperationalError(self) -> type[OperationalError]: ... + @property + def ProgrammingError(self) -> type[ProgrammingError]: ... + @property + def Warning(self) -> type[Warning]: ... + @property + def in_transaction(self) -> bool: ... + isolation_level: str | None # one of '', 'DEFERRED', 'IMMEDIATE' or 'EXCLUSIVE' + @property + def total_changes(self) -> int: ... + if sys.version_info >= (3, 12): + @property + def autocommit(self) -> int: ... + @autocommit.setter + def autocommit(self, val: int) -> None: ... + row_factory: Any + text_factory: Any + if sys.version_info >= (3, 12): + def __init__( + self, + database: StrOrBytesPath, + timeout: float = ..., + detect_types: int = ..., + isolation_level: str | None = ..., + check_same_thread: bool = ..., + factory: type[Connection] | None = ..., + cached_statements: int = ..., + uri: bool = ..., + autocommit: bool = ..., + ) -> None: ... + else: + def __init__( + self, + database: StrOrBytesPath, + timeout: float = ..., + detect_types: int = ..., + isolation_level: str | None = ..., + check_same_thread: bool = ..., + factory: type[Connection] | None = ..., + cached_statements: int = ..., + uri: bool = ..., + ) -> None: ... + + def close(self) -> None: + """ + Close the database connection. + + Any pending transaction is not committed implicitly. + """ + ... + if sys.version_info >= (3, 11): + def blobopen(self, table: str, column: str, row: int, /, *, readonly: bool = False, name: str = "main") -> Blob: + """ + Open and return a BLOB object. + + table + Table name. + column + Column name. + row + Row index. + readonly + Open the BLOB without write permissions. + name + Database name. + """ + ... + + def commit(self) -> None: + """ + Commit any pending transaction to the database. + + If there is no open transaction, this method is a no-op. + """ + ... + def create_aggregate(self, name: str, n_arg: int, aggregate_class: Callable[[], _AggregateProtocol]) -> None: + """ + Creates a new aggregate. + + Note: Passing keyword arguments 'name', 'n_arg' and 'aggregate_class' + to _sqlite3.Connection.create_aggregate() is deprecated. Parameters + 'name', 'n_arg' and 'aggregate_class' will become positional-only in + Python 3.15. + """ + ... + if sys.version_info >= (3, 11): + # num_params determines how many params will be passed to the aggregate class. We provide an overload + # for the case where num_params = 1, which is expected to be the common case. + @overload + def create_window_function( + self, name: str, num_params: Literal[1], aggregate_class: Callable[[], _SingleParamWindowAggregateClass] | None, / + ) -> None: + """ + Creates or redefines an aggregate window function. Non-standard. + + name + The name of the SQL aggregate window function to be created or + redefined. + num_params + The number of arguments the step and inverse methods takes. + aggregate_class + A class with step(), finalize(), value(), and inverse() methods. + Set to None to clear the window function. + """ + ... + # And for num_params = -1, which means the aggregate must accept any number of parameters. + @overload + def create_window_function( + self, name: str, num_params: Literal[-1], aggregate_class: Callable[[], _AnyParamWindowAggregateClass] | None, / + ) -> None: + """ + Creates or redefines an aggregate window function. Non-standard. + + name + The name of the SQL aggregate window function to be created or + redefined. + num_params + The number of arguments the step and inverse methods takes. + aggregate_class + A class with step(), finalize(), value(), and inverse() methods. + Set to None to clear the window function. + """ + ... + @overload + def create_window_function( + self, name: str, num_params: int, aggregate_class: Callable[[], _WindowAggregateClass] | None, / + ) -> None: + """ + Creates or redefines an aggregate window function. Non-standard. + + name + The name of the SQL aggregate window function to be created or + redefined. + num_params + The number of arguments the step and inverse methods takes. + aggregate_class + A class with step(), finalize(), value(), and inverse() methods. + Set to None to clear the window function. + """ + ... + + def create_collation(self, name: str, callback: Callable[[str, str], int | SupportsIndex] | None, /) -> None: + """Creates a collation function.""" + ... + def create_function( + self, name: str, narg: int, func: Callable[..., _SqliteData] | None, *, deterministic: bool = False + ) -> None: + """ + Creates a new function. + + Note: Passing keyword arguments 'name', 'narg' and 'func' to + _sqlite3.Connection.create_function() is deprecated. Parameters + 'name', 'narg' and 'func' will become positional-only in Python 3.15. + """ + ... + @overload + def cursor(self, factory: None = None) -> Cursor: + """Return a cursor for the connection.""" + ... + @overload + def cursor(self, factory: Callable[[Connection], _CursorT]) -> _CursorT: + """Return a cursor for the connection.""" + ... + def execute(self, sql: str, parameters: _Parameters = ..., /) -> Cursor: + """Executes an SQL statement.""" + ... + def executemany(self, sql: str, parameters: Iterable[_Parameters], /) -> Cursor: + """Repeatedly executes an SQL statement.""" + ... + def executescript(self, sql_script: str, /) -> Cursor: + """Executes multiple SQL statements at once.""" + ... + def interrupt(self) -> None: + """Abort any pending database operation.""" + ... + if sys.version_info >= (3, 13): + def iterdump(self, *, filter: str | None = None) -> Generator[str, None, None]: + """ + Returns iterator to the dump of the database in an SQL text format. + + filter + An optional LIKE pattern for database objects to dump + """ + ... + else: + def iterdump(self) -> Generator[str, None, None]: ... + + def rollback(self) -> None: + """ + Roll back to the start of any pending transaction. + + If there is no open transaction, this method is a no-op. + """ + ... + def set_authorizer( + self, authorizer_callback: Callable[[int, str | None, str | None, str | None, str | None], int] | None + ) -> None: + """ + Set authorizer callback. + + Note: Passing keyword argument 'authorizer_callback' to + _sqlite3.Connection.set_authorizer() is deprecated. Parameter + 'authorizer_callback' will become positional-only in Python 3.15. + """ + ... + def set_progress_handler(self, progress_handler: Callable[[], int | None] | None, n: int) -> None: + """ + Set progress handler callback. + + progress_handler + A callable that takes no arguments. + If the callable returns non-zero, the current query is terminated, + and an exception is raised. + n + The number of SQLite virtual machine instructions that are + executed between invocations of 'progress_handler'. + + If 'progress_handler' is None or 'n' is 0, the progress handler is disabled. + + Note: Passing keyword argument 'progress_handler' to + _sqlite3.Connection.set_progress_handler() is deprecated. Parameter + 'progress_handler' will become positional-only in Python 3.15. + """ + ... + def set_trace_callback(self, trace_callback: Callable[[str], object] | None) -> None: + """ + Set a trace callback called for each SQL statement (passed as unicode). + + Note: Passing keyword argument 'trace_callback' to + _sqlite3.Connection.set_trace_callback() is deprecated. Parameter + 'trace_callback' will become positional-only in Python 3.15. + """ + ... + # enable_load_extension and load_extension is not available on python distributions compiled + # without sqlite3 loadable extension support. see footnotes https://docs.python.org/3/library/sqlite3.html#f1 + def enable_load_extension(self, enable: bool, /) -> None: + """Enable dynamic loading of SQLite extension modules.""" + ... + if sys.version_info >= (3, 12): + def load_extension(self, name: str, /, *, entrypoint: str | None = None) -> None: + """Load SQLite extension module.""" + ... + else: + def load_extension(self, name: str, /) -> None: ... + + def backup( + self, + target: Connection, + *, + pages: int = -1, + progress: Callable[[int, int, int], object] | None = None, + name: str = "main", + sleep: float = 0.25, + ) -> None: + """Makes a backup of the database.""" + ... + if sys.version_info >= (3, 11): + def setlimit(self, category: int, limit: int, /) -> int: + """ + Set connection run-time limits. + + category + The limit category to be set. + limit + The new limit. If the new limit is a negative number, the limit is + unchanged. + + Attempts to increase a limit above its hard upper bound are silently truncated + to the hard upper bound. Regardless of whether or not the limit was changed, + the prior value of the limit is returned. + """ + ... + def getlimit(self, category: int, /) -> int: + """ + Get connection run-time limits. + + category + The limit category to be queried. + """ + ... + def serialize(self, *, name: str = "main") -> bytes: + """ + Serialize a database into a byte string. + + name + Which database to serialize. + + For an ordinary on-disk database file, the serialization is just a copy of the + disk file. For an in-memory database or a "temp" database, the serialization is + the same sequence of bytes which would be written to disk if that database + were backed up to disk. + """ + ... + def deserialize(self, data: ReadableBuffer, /, *, name: str = "main") -> None: + """ + Load a serialized database. + + data + The serialized database content. + name + Which database to reopen with the deserialization. + + The deserialize interface causes the database connection to disconnect from the + target database, and then reopen it as an in-memory database based on the given + serialized data. + + The deserialize interface will fail with SQLITE_BUSY if the database is + currently in a read transaction or is involved in a backup operation. + """ + ... + if sys.version_info >= (3, 12): + def getconfig(self, op: int, /) -> bool: + """ + Query a boolean connection configuration option. + + op + The configuration verb; one of the sqlite3.SQLITE_DBCONFIG codes. + """ + ... + def setconfig(self, op: int, enable: bool = True, /) -> bool: + """ + Set a boolean connection configuration option. + + op + The configuration verb; one of the sqlite3.SQLITE_DBCONFIG codes. + """ + ... + + def __call__(self, sql: str, /) -> _Statement: + """Call self as a function.""" + ... + def __enter__(self) -> Self: + """ + Called when the connection is used as a context manager. + + Returns itself as a convenience to the caller. + """ + ... + def __exit__( + self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None, / + ) -> Literal[False]: + """ + Called when the connection is used as a context manager. + + If there was any exception, a rollback takes place; otherwise we commit. + """ + ... + +class Cursor(Iterator[Any]): + """SQLite database cursor class.""" + arraysize: int + @property + def connection(self) -> Connection: ... + # May be None, but using | Any instead to avoid slightly annoying false positives. + @property + def description(self) -> tuple[tuple[str, None, None, None, None, None, None], ...] | Any: ... + @property + def lastrowid(self) -> int | None: ... + row_factory: Callable[[Cursor, Row], object] | None + @property + def rowcount(self) -> int: ... + def __init__(self, cursor: Connection, /) -> None: ... + def close(self) -> None: + """Closes the cursor.""" + ... + def execute(self, sql: str, parameters: _Parameters = (), /) -> Self: + """Executes an SQL statement.""" + ... + def executemany(self, sql: str, seq_of_parameters: Iterable[_Parameters], /) -> Self: + """Repeatedly executes an SQL statement.""" + ... + def executescript(self, sql_script: str, /) -> Cursor: + """Executes multiple SQL statements at once.""" + ... + def fetchall(self) -> list[Any]: + """Fetches all rows from the resultset.""" + ... + def fetchmany(self, size: int | None = 1) -> list[Any]: + """ + Fetches several rows from the resultset. + + size + The default value is set by the Cursor.arraysize attribute. + """ + ... + # Returns either a row (as created by the row_factory) or None, but + # putting None in the return annotation causes annoying false positives. + def fetchone(self) -> Any: + """Fetches one row from the resultset.""" + ... + def setinputsizes(self, sizes: Unused, /) -> None: + """Required by DB-API. Does nothing in sqlite3.""" + ... + def setoutputsize(self, size: Unused, column: Unused = None, /) -> None: + """Required by DB-API. Does nothing in sqlite3.""" + ... + def __iter__(self) -> Self: + """Implement iter(self).""" + ... + def __next__(self) -> Any: + """Implement next(self).""" + ... + +@final +class PrepareProtocol: + """PEP 246 style object adaption protocol type.""" + def __init__(self, *args: object, **kwargs: object) -> None: ... + +class Row(Sequence[Any]): + def __init__(self, cursor: Cursor, data: tuple[Any, ...], /) -> None: ... + def keys(self) -> list[str]: + """Returns the keys of the row.""" + ... + @overload + def __getitem__(self, key: int | str, /) -> Any: + """Return self[key].""" + ... + @overload + def __getitem__(self, key: slice, /) -> tuple[Any, ...]: + """Return self[key].""" + ... + def __hash__(self) -> int: + """Return hash(self).""" + ... + def __iter__(self) -> Iterator[Any]: + """Implement iter(self).""" + ... + def __len__(self) -> int: + """Return len(self).""" + ... + # These return NotImplemented for anything that is not a Row. + def __eq__(self, value: object, /) -> bool: + """Return self==value.""" + ... + def __ge__(self, value: object, /) -> bool: + """Return self>=value.""" + ... + def __gt__(self, value: object, /) -> bool: + """Return self>value.""" + ... + def __le__(self, value: object, /) -> bool: + """Return self<=value.""" + ... + def __lt__(self, value: object, /) -> bool: + """Return self bool: + """Return self!=value.""" + ... + +@final +class _Statement: ... + +if sys.version_info >= (3, 11): + @final + class Blob: + def close(self) -> None: + """Close the blob.""" + ... + def read(self, length: int = -1, /) -> bytes: + """ + Read data at the current offset position. + + length + Read length in bytes. + + If the end of the blob is reached, the data up to end of file will be returned. + When length is not specified, or is negative, Blob.read() will read until the + end of the blob. + """ + ... + def write(self, data: ReadableBuffer, /) -> None: + """ + Write data at the current offset. + + This function cannot change the blob length. Writing beyond the end of the + blob will result in an exception being raised. + """ + ... + def tell(self) -> int: + """Return the current access position for the blob.""" + ... + # whence must be one of os.SEEK_SET, os.SEEK_CUR, os.SEEK_END + def seek(self, offset: int, origin: int = 0, /) -> None: + """ + Set the current access position to offset. + + The origin argument defaults to os.SEEK_SET (absolute blob positioning). + Other values for origin are os.SEEK_CUR (seek relative to the current position) + and os.SEEK_END (seek relative to the blob's end). + """ + ... + def __len__(self) -> int: + """Return len(self).""" + ... + def __enter__(self) -> Self: + """Blob context manager enter.""" + ... + def __exit__(self, type: object, val: object, tb: object, /) -> Literal[False]: + """Blob context manager exit.""" + ... + def __getitem__(self, key: SupportsIndex | slice, /) -> int: + """Return self[key].""" + ... + def __setitem__(self, key: SupportsIndex | slice, value: int, /) -> None: + """Set self[key] to value.""" + ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/sqlite3/dbapi2.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/sqlite3/dbapi2.pyi index 0e3e891..d3ea3ef 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/sqlite3/dbapi2.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/sqlite3/dbapi2.pyi @@ -1,22 +1,226 @@ -import sqlite3 import sys -from _typeshed import ReadableBuffer, StrOrBytesPath, SupportsLenAndGetItem, Unused -from collections.abc import Callable, Generator, Iterable, Iterator, Mapping +from _sqlite3 import ( + PARSE_COLNAMES as PARSE_COLNAMES, + PARSE_DECLTYPES as PARSE_DECLTYPES, + SQLITE_ALTER_TABLE as SQLITE_ALTER_TABLE, + SQLITE_ANALYZE as SQLITE_ANALYZE, + SQLITE_ATTACH as SQLITE_ATTACH, + SQLITE_CREATE_INDEX as SQLITE_CREATE_INDEX, + SQLITE_CREATE_TABLE as SQLITE_CREATE_TABLE, + SQLITE_CREATE_TEMP_INDEX as SQLITE_CREATE_TEMP_INDEX, + SQLITE_CREATE_TEMP_TABLE as SQLITE_CREATE_TEMP_TABLE, + SQLITE_CREATE_TEMP_TRIGGER as SQLITE_CREATE_TEMP_TRIGGER, + SQLITE_CREATE_TEMP_VIEW as SQLITE_CREATE_TEMP_VIEW, + SQLITE_CREATE_TRIGGER as SQLITE_CREATE_TRIGGER, + SQLITE_CREATE_VIEW as SQLITE_CREATE_VIEW, + SQLITE_CREATE_VTABLE as SQLITE_CREATE_VTABLE, + SQLITE_DELETE as SQLITE_DELETE, + SQLITE_DENY as SQLITE_DENY, + SQLITE_DETACH as SQLITE_DETACH, + SQLITE_DONE as SQLITE_DONE, + SQLITE_DROP_INDEX as SQLITE_DROP_INDEX, + SQLITE_DROP_TABLE as SQLITE_DROP_TABLE, + SQLITE_DROP_TEMP_INDEX as SQLITE_DROP_TEMP_INDEX, + SQLITE_DROP_TEMP_TABLE as SQLITE_DROP_TEMP_TABLE, + SQLITE_DROP_TEMP_TRIGGER as SQLITE_DROP_TEMP_TRIGGER, + SQLITE_DROP_TEMP_VIEW as SQLITE_DROP_TEMP_VIEW, + SQLITE_DROP_TRIGGER as SQLITE_DROP_TRIGGER, + SQLITE_DROP_VIEW as SQLITE_DROP_VIEW, + SQLITE_DROP_VTABLE as SQLITE_DROP_VTABLE, + SQLITE_FUNCTION as SQLITE_FUNCTION, + SQLITE_IGNORE as SQLITE_IGNORE, + SQLITE_INSERT as SQLITE_INSERT, + SQLITE_OK as SQLITE_OK, + SQLITE_PRAGMA as SQLITE_PRAGMA, + SQLITE_READ as SQLITE_READ, + SQLITE_RECURSIVE as SQLITE_RECURSIVE, + SQLITE_REINDEX as SQLITE_REINDEX, + SQLITE_SAVEPOINT as SQLITE_SAVEPOINT, + SQLITE_SELECT as SQLITE_SELECT, + SQLITE_TRANSACTION as SQLITE_TRANSACTION, + SQLITE_UPDATE as SQLITE_UPDATE, + adapt as adapt, + adapters as adapters, + complete_statement as complete_statement, + connect as connect, + converters as converters, + enable_callback_tracebacks as enable_callback_tracebacks, + register_adapter as register_adapter, + register_converter as register_converter, + sqlite_version as sqlite_version, +) from datetime import date, datetime, time -from types import TracebackType -from typing import Any, Literal, Protocol, SupportsIndex, TypeVar, final, overload -from typing_extensions import Self, TypeAlias +from sqlite3 import ( + Connection as Connection, + Cursor as Cursor, + DatabaseError as DatabaseError, + DataError as DataError, + Error as Error, + IntegrityError as IntegrityError, + InterfaceError as InterfaceError, + InternalError as InternalError, + NotSupportedError as NotSupportedError, + OperationalError as OperationalError, + PrepareProtocol as PrepareProtocol, + ProgrammingError as ProgrammingError, + Row as Row, + Warning as Warning, +) -_T = TypeVar("_T") -_ConnectionT = TypeVar("_ConnectionT", bound=Connection) -_CursorT = TypeVar("_CursorT", bound=Cursor) -_SqliteData: TypeAlias = str | ReadableBuffer | int | float | None -# Data that is passed through adapters can be of any type accepted by an adapter. -_AdaptedInputData: TypeAlias = _SqliteData | Any -# The Mapping must really be a dict, but making it invariant is too annoying. -_Parameters: TypeAlias = SupportsLenAndGetItem[_AdaptedInputData] | Mapping[str, _AdaptedInputData] -_Adapter: TypeAlias = Callable[[_T], _SqliteData] -_Converter: TypeAlias = Callable[[bytes], Any] +if sys.version_info >= (3, 12): + from _sqlite3 import ( + LEGACY_TRANSACTION_CONTROL as LEGACY_TRANSACTION_CONTROL, + SQLITE_DBCONFIG_DEFENSIVE as SQLITE_DBCONFIG_DEFENSIVE, + SQLITE_DBCONFIG_DQS_DDL as SQLITE_DBCONFIG_DQS_DDL, + SQLITE_DBCONFIG_DQS_DML as SQLITE_DBCONFIG_DQS_DML, + SQLITE_DBCONFIG_ENABLE_FKEY as SQLITE_DBCONFIG_ENABLE_FKEY, + SQLITE_DBCONFIG_ENABLE_FTS3_TOKENIZER as SQLITE_DBCONFIG_ENABLE_FTS3_TOKENIZER, + SQLITE_DBCONFIG_ENABLE_LOAD_EXTENSION as SQLITE_DBCONFIG_ENABLE_LOAD_EXTENSION, + SQLITE_DBCONFIG_ENABLE_QPSG as SQLITE_DBCONFIG_ENABLE_QPSG, + SQLITE_DBCONFIG_ENABLE_TRIGGER as SQLITE_DBCONFIG_ENABLE_TRIGGER, + SQLITE_DBCONFIG_ENABLE_VIEW as SQLITE_DBCONFIG_ENABLE_VIEW, + SQLITE_DBCONFIG_LEGACY_ALTER_TABLE as SQLITE_DBCONFIG_LEGACY_ALTER_TABLE, + SQLITE_DBCONFIG_LEGACY_FILE_FORMAT as SQLITE_DBCONFIG_LEGACY_FILE_FORMAT, + SQLITE_DBCONFIG_NO_CKPT_ON_CLOSE as SQLITE_DBCONFIG_NO_CKPT_ON_CLOSE, + SQLITE_DBCONFIG_RESET_DATABASE as SQLITE_DBCONFIG_RESET_DATABASE, + SQLITE_DBCONFIG_TRIGGER_EQP as SQLITE_DBCONFIG_TRIGGER_EQP, + SQLITE_DBCONFIG_TRUSTED_SCHEMA as SQLITE_DBCONFIG_TRUSTED_SCHEMA, + SQLITE_DBCONFIG_WRITABLE_SCHEMA as SQLITE_DBCONFIG_WRITABLE_SCHEMA, + ) + +if sys.version_info >= (3, 11): + from _sqlite3 import ( + SQLITE_ABORT as SQLITE_ABORT, + SQLITE_ABORT_ROLLBACK as SQLITE_ABORT_ROLLBACK, + SQLITE_AUTH as SQLITE_AUTH, + SQLITE_AUTH_USER as SQLITE_AUTH_USER, + SQLITE_BUSY as SQLITE_BUSY, + SQLITE_BUSY_RECOVERY as SQLITE_BUSY_RECOVERY, + SQLITE_BUSY_SNAPSHOT as SQLITE_BUSY_SNAPSHOT, + SQLITE_BUSY_TIMEOUT as SQLITE_BUSY_TIMEOUT, + SQLITE_CANTOPEN as SQLITE_CANTOPEN, + SQLITE_CANTOPEN_CONVPATH as SQLITE_CANTOPEN_CONVPATH, + SQLITE_CANTOPEN_DIRTYWAL as SQLITE_CANTOPEN_DIRTYWAL, + SQLITE_CANTOPEN_FULLPATH as SQLITE_CANTOPEN_FULLPATH, + SQLITE_CANTOPEN_ISDIR as SQLITE_CANTOPEN_ISDIR, + SQLITE_CANTOPEN_NOTEMPDIR as SQLITE_CANTOPEN_NOTEMPDIR, + SQLITE_CANTOPEN_SYMLINK as SQLITE_CANTOPEN_SYMLINK, + SQLITE_CONSTRAINT as SQLITE_CONSTRAINT, + SQLITE_CONSTRAINT_CHECK as SQLITE_CONSTRAINT_CHECK, + SQLITE_CONSTRAINT_COMMITHOOK as SQLITE_CONSTRAINT_COMMITHOOK, + SQLITE_CONSTRAINT_FOREIGNKEY as SQLITE_CONSTRAINT_FOREIGNKEY, + SQLITE_CONSTRAINT_FUNCTION as SQLITE_CONSTRAINT_FUNCTION, + SQLITE_CONSTRAINT_NOTNULL as SQLITE_CONSTRAINT_NOTNULL, + SQLITE_CONSTRAINT_PINNED as SQLITE_CONSTRAINT_PINNED, + SQLITE_CONSTRAINT_PRIMARYKEY as SQLITE_CONSTRAINT_PRIMARYKEY, + SQLITE_CONSTRAINT_ROWID as SQLITE_CONSTRAINT_ROWID, + SQLITE_CONSTRAINT_TRIGGER as SQLITE_CONSTRAINT_TRIGGER, + SQLITE_CONSTRAINT_UNIQUE as SQLITE_CONSTRAINT_UNIQUE, + SQLITE_CONSTRAINT_VTAB as SQLITE_CONSTRAINT_VTAB, + SQLITE_CORRUPT as SQLITE_CORRUPT, + SQLITE_CORRUPT_INDEX as SQLITE_CORRUPT_INDEX, + SQLITE_CORRUPT_SEQUENCE as SQLITE_CORRUPT_SEQUENCE, + SQLITE_CORRUPT_VTAB as SQLITE_CORRUPT_VTAB, + SQLITE_EMPTY as SQLITE_EMPTY, + SQLITE_ERROR as SQLITE_ERROR, + SQLITE_ERROR_MISSING_COLLSEQ as SQLITE_ERROR_MISSING_COLLSEQ, + SQLITE_ERROR_RETRY as SQLITE_ERROR_RETRY, + SQLITE_ERROR_SNAPSHOT as SQLITE_ERROR_SNAPSHOT, + SQLITE_FORMAT as SQLITE_FORMAT, + SQLITE_FULL as SQLITE_FULL, + SQLITE_INTERNAL as SQLITE_INTERNAL, + SQLITE_INTERRUPT as SQLITE_INTERRUPT, + SQLITE_IOERR as SQLITE_IOERR, + SQLITE_IOERR_ACCESS as SQLITE_IOERR_ACCESS, + SQLITE_IOERR_AUTH as SQLITE_IOERR_AUTH, + SQLITE_IOERR_BEGIN_ATOMIC as SQLITE_IOERR_BEGIN_ATOMIC, + SQLITE_IOERR_BLOCKED as SQLITE_IOERR_BLOCKED, + SQLITE_IOERR_CHECKRESERVEDLOCK as SQLITE_IOERR_CHECKRESERVEDLOCK, + SQLITE_IOERR_CLOSE as SQLITE_IOERR_CLOSE, + SQLITE_IOERR_COMMIT_ATOMIC as SQLITE_IOERR_COMMIT_ATOMIC, + SQLITE_IOERR_CONVPATH as SQLITE_IOERR_CONVPATH, + SQLITE_IOERR_CORRUPTFS as SQLITE_IOERR_CORRUPTFS, + SQLITE_IOERR_DATA as SQLITE_IOERR_DATA, + SQLITE_IOERR_DELETE as SQLITE_IOERR_DELETE, + SQLITE_IOERR_DELETE_NOENT as SQLITE_IOERR_DELETE_NOENT, + SQLITE_IOERR_DIR_CLOSE as SQLITE_IOERR_DIR_CLOSE, + SQLITE_IOERR_DIR_FSYNC as SQLITE_IOERR_DIR_FSYNC, + SQLITE_IOERR_FSTAT as SQLITE_IOERR_FSTAT, + SQLITE_IOERR_FSYNC as SQLITE_IOERR_FSYNC, + SQLITE_IOERR_GETTEMPPATH as SQLITE_IOERR_GETTEMPPATH, + SQLITE_IOERR_LOCK as SQLITE_IOERR_LOCK, + SQLITE_IOERR_MMAP as SQLITE_IOERR_MMAP, + SQLITE_IOERR_NOMEM as SQLITE_IOERR_NOMEM, + SQLITE_IOERR_RDLOCK as SQLITE_IOERR_RDLOCK, + SQLITE_IOERR_READ as SQLITE_IOERR_READ, + SQLITE_IOERR_ROLLBACK_ATOMIC as SQLITE_IOERR_ROLLBACK_ATOMIC, + SQLITE_IOERR_SEEK as SQLITE_IOERR_SEEK, + SQLITE_IOERR_SHMLOCK as SQLITE_IOERR_SHMLOCK, + SQLITE_IOERR_SHMMAP as SQLITE_IOERR_SHMMAP, + SQLITE_IOERR_SHMOPEN as SQLITE_IOERR_SHMOPEN, + SQLITE_IOERR_SHMSIZE as SQLITE_IOERR_SHMSIZE, + SQLITE_IOERR_SHORT_READ as SQLITE_IOERR_SHORT_READ, + SQLITE_IOERR_TRUNCATE as SQLITE_IOERR_TRUNCATE, + SQLITE_IOERR_UNLOCK as SQLITE_IOERR_UNLOCK, + SQLITE_IOERR_VNODE as SQLITE_IOERR_VNODE, + SQLITE_IOERR_WRITE as SQLITE_IOERR_WRITE, + SQLITE_LIMIT_ATTACHED as SQLITE_LIMIT_ATTACHED, + SQLITE_LIMIT_COLUMN as SQLITE_LIMIT_COLUMN, + SQLITE_LIMIT_COMPOUND_SELECT as SQLITE_LIMIT_COMPOUND_SELECT, + SQLITE_LIMIT_EXPR_DEPTH as SQLITE_LIMIT_EXPR_DEPTH, + SQLITE_LIMIT_FUNCTION_ARG as SQLITE_LIMIT_FUNCTION_ARG, + SQLITE_LIMIT_LENGTH as SQLITE_LIMIT_LENGTH, + SQLITE_LIMIT_LIKE_PATTERN_LENGTH as SQLITE_LIMIT_LIKE_PATTERN_LENGTH, + SQLITE_LIMIT_SQL_LENGTH as SQLITE_LIMIT_SQL_LENGTH, + SQLITE_LIMIT_TRIGGER_DEPTH as SQLITE_LIMIT_TRIGGER_DEPTH, + SQLITE_LIMIT_VARIABLE_NUMBER as SQLITE_LIMIT_VARIABLE_NUMBER, + SQLITE_LIMIT_VDBE_OP as SQLITE_LIMIT_VDBE_OP, + SQLITE_LIMIT_WORKER_THREADS as SQLITE_LIMIT_WORKER_THREADS, + SQLITE_LOCKED as SQLITE_LOCKED, + SQLITE_LOCKED_SHAREDCACHE as SQLITE_LOCKED_SHAREDCACHE, + SQLITE_LOCKED_VTAB as SQLITE_LOCKED_VTAB, + SQLITE_MISMATCH as SQLITE_MISMATCH, + SQLITE_MISUSE as SQLITE_MISUSE, + SQLITE_NOLFS as SQLITE_NOLFS, + SQLITE_NOMEM as SQLITE_NOMEM, + SQLITE_NOTADB as SQLITE_NOTADB, + SQLITE_NOTFOUND as SQLITE_NOTFOUND, + SQLITE_NOTICE as SQLITE_NOTICE, + SQLITE_NOTICE_RECOVER_ROLLBACK as SQLITE_NOTICE_RECOVER_ROLLBACK, + SQLITE_NOTICE_RECOVER_WAL as SQLITE_NOTICE_RECOVER_WAL, + SQLITE_OK_LOAD_PERMANENTLY as SQLITE_OK_LOAD_PERMANENTLY, + SQLITE_OK_SYMLINK as SQLITE_OK_SYMLINK, + SQLITE_PERM as SQLITE_PERM, + SQLITE_PROTOCOL as SQLITE_PROTOCOL, + SQLITE_RANGE as SQLITE_RANGE, + SQLITE_READONLY as SQLITE_READONLY, + SQLITE_READONLY_CANTINIT as SQLITE_READONLY_CANTINIT, + SQLITE_READONLY_CANTLOCK as SQLITE_READONLY_CANTLOCK, + SQLITE_READONLY_DBMOVED as SQLITE_READONLY_DBMOVED, + SQLITE_READONLY_DIRECTORY as SQLITE_READONLY_DIRECTORY, + SQLITE_READONLY_RECOVERY as SQLITE_READONLY_RECOVERY, + SQLITE_READONLY_ROLLBACK as SQLITE_READONLY_ROLLBACK, + SQLITE_ROW as SQLITE_ROW, + SQLITE_SCHEMA as SQLITE_SCHEMA, + SQLITE_TOOBIG as SQLITE_TOOBIG, + SQLITE_WARNING as SQLITE_WARNING, + SQLITE_WARNING_AUTOINDEX as SQLITE_WARNING_AUTOINDEX, + ) + from sqlite3 import Blob as Blob + +if sys.version_info < (3, 14): + # Deprecated and removed from _sqlite3 in 3.12, but removed from here in 3.14. + version: str + +if sys.version_info < (3, 12): + if sys.version_info >= (3, 10): + # deprecation wrapper that has a different name for the argument... + def enable_shared_cache(enable: int) -> None: ... + else: + from _sqlite3 import enable_shared_cache as enable_shared_cache + +if sys.version_info < (3, 10): + from _sqlite3 import OptimizedUnicode as OptimizedUnicode paramstyle: str threadsafety: int @@ -29,862 +233,9 @@ def DateFromTicks(ticks: float) -> Date: ... def TimeFromTicks(ticks: float) -> Time: ... def TimestampFromTicks(ticks: float) -> Timestamp: ... -version_info: tuple[int, int, int] +if sys.version_info < (3, 14): + # Deprecated in 3.12, removed in 3.14. + version_info: tuple[int, int, int] + sqlite_version_info: tuple[int, int, int] Binary = memoryview - -# The remaining definitions are imported from _sqlite3. - -PARSE_COLNAMES: int -PARSE_DECLTYPES: int -SQLITE_ALTER_TABLE: int -SQLITE_ANALYZE: int -SQLITE_ATTACH: int -SQLITE_CREATE_INDEX: int -SQLITE_CREATE_TABLE: int -SQLITE_CREATE_TEMP_INDEX: int -SQLITE_CREATE_TEMP_TABLE: int -SQLITE_CREATE_TEMP_TRIGGER: int -SQLITE_CREATE_TEMP_VIEW: int -SQLITE_CREATE_TRIGGER: int -SQLITE_CREATE_VIEW: int -SQLITE_CREATE_VTABLE: int -SQLITE_DELETE: int -SQLITE_DENY: int -SQLITE_DETACH: int -SQLITE_DONE: int -SQLITE_DROP_INDEX: int -SQLITE_DROP_TABLE: int -SQLITE_DROP_TEMP_INDEX: int -SQLITE_DROP_TEMP_TABLE: int -SQLITE_DROP_TEMP_TRIGGER: int -SQLITE_DROP_TEMP_VIEW: int -SQLITE_DROP_TRIGGER: int -SQLITE_DROP_VIEW: int -SQLITE_DROP_VTABLE: int -SQLITE_FUNCTION: int -SQLITE_IGNORE: int -SQLITE_INSERT: int -SQLITE_OK: int -if sys.version_info >= (3, 11): - SQLITE_LIMIT_LENGTH: int - SQLITE_LIMIT_SQL_LENGTH: int - SQLITE_LIMIT_COLUMN: int - SQLITE_LIMIT_EXPR_DEPTH: int - SQLITE_LIMIT_COMPOUND_SELECT: int - SQLITE_LIMIT_VDBE_OP: int - SQLITE_LIMIT_FUNCTION_ARG: int - SQLITE_LIMIT_ATTACHED: int - SQLITE_LIMIT_LIKE_PATTERN_LENGTH: int - SQLITE_LIMIT_VARIABLE_NUMBER: int - SQLITE_LIMIT_TRIGGER_DEPTH: int - SQLITE_LIMIT_WORKER_THREADS: int -SQLITE_PRAGMA: int -SQLITE_READ: int -SQLITE_REINDEX: int -SQLITE_RECURSIVE: int -SQLITE_SAVEPOINT: int -SQLITE_SELECT: int -SQLITE_TRANSACTION: int -SQLITE_UPDATE: int -adapters: dict[tuple[type[Any], type[Any]], _Adapter[Any]] -converters: dict[str, _Converter] -sqlite_version: str -version: str - -if sys.version_info >= (3, 11): - SQLITE_ABORT: int - SQLITE_ABORT_ROLLBACK: int - SQLITE_AUTH: int - SQLITE_AUTH_USER: int - SQLITE_BUSY: int - SQLITE_BUSY_RECOVERY: int - SQLITE_BUSY_SNAPSHOT: int - SQLITE_BUSY_TIMEOUT: int - SQLITE_CANTOPEN: int - SQLITE_CANTOPEN_CONVPATH: int - SQLITE_CANTOPEN_DIRTYWAL: int - SQLITE_CANTOPEN_FULLPATH: int - SQLITE_CANTOPEN_ISDIR: int - SQLITE_CANTOPEN_NOTEMPDIR: int - SQLITE_CANTOPEN_SYMLINK: int - SQLITE_CONSTRAINT: int - SQLITE_CONSTRAINT_CHECK: int - SQLITE_CONSTRAINT_COMMITHOOK: int - SQLITE_CONSTRAINT_FOREIGNKEY: int - SQLITE_CONSTRAINT_FUNCTION: int - SQLITE_CONSTRAINT_NOTNULL: int - SQLITE_CONSTRAINT_PINNED: int - SQLITE_CONSTRAINT_PRIMARYKEY: int - SQLITE_CONSTRAINT_ROWID: int - SQLITE_CONSTRAINT_TRIGGER: int - SQLITE_CONSTRAINT_UNIQUE: int - SQLITE_CONSTRAINT_VTAB: int - SQLITE_CORRUPT: int - SQLITE_CORRUPT_INDEX: int - SQLITE_CORRUPT_SEQUENCE: int - SQLITE_CORRUPT_VTAB: int - SQLITE_EMPTY: int - SQLITE_ERROR: int - SQLITE_ERROR_MISSING_COLLSEQ: int - SQLITE_ERROR_RETRY: int - SQLITE_ERROR_SNAPSHOT: int - SQLITE_FORMAT: int - SQLITE_FULL: int - SQLITE_INTERNAL: int - SQLITE_INTERRUPT: int - SQLITE_IOERR: int - SQLITE_IOERR_ACCESS: int - SQLITE_IOERR_AUTH: int - SQLITE_IOERR_BEGIN_ATOMIC: int - SQLITE_IOERR_BLOCKED: int - SQLITE_IOERR_CHECKRESERVEDLOCK: int - SQLITE_IOERR_CLOSE: int - SQLITE_IOERR_COMMIT_ATOMIC: int - SQLITE_IOERR_CONVPATH: int - SQLITE_IOERR_CORRUPTFS: int - SQLITE_IOERR_DATA: int - SQLITE_IOERR_DELETE: int - SQLITE_IOERR_DELETE_NOENT: int - SQLITE_IOERR_DIR_CLOSE: int - SQLITE_IOERR_DIR_FSYNC: int - SQLITE_IOERR_FSTAT: int - SQLITE_IOERR_FSYNC: int - SQLITE_IOERR_GETTEMPPATH: int - SQLITE_IOERR_LOCK: int - SQLITE_IOERR_MMAP: int - SQLITE_IOERR_NOMEM: int - SQLITE_IOERR_RDLOCK: int - SQLITE_IOERR_READ: int - SQLITE_IOERR_ROLLBACK_ATOMIC: int - SQLITE_IOERR_SEEK: int - SQLITE_IOERR_SHMLOCK: int - SQLITE_IOERR_SHMMAP: int - SQLITE_IOERR_SHMOPEN: int - SQLITE_IOERR_SHMSIZE: int - SQLITE_IOERR_SHORT_READ: int - SQLITE_IOERR_TRUNCATE: int - SQLITE_IOERR_UNLOCK: int - SQLITE_IOERR_VNODE: int - SQLITE_IOERR_WRITE: int - SQLITE_LOCKED: int - SQLITE_LOCKED_SHAREDCACHE: int - SQLITE_LOCKED_VTAB: int - SQLITE_MISMATCH: int - SQLITE_MISUSE: int - SQLITE_NOLFS: int - SQLITE_NOMEM: int - SQLITE_NOTADB: int - SQLITE_NOTFOUND: int - SQLITE_NOTICE: int - SQLITE_NOTICE_RECOVER_ROLLBACK: int - SQLITE_NOTICE_RECOVER_WAL: int - SQLITE_OK_LOAD_PERMANENTLY: int - SQLITE_OK_SYMLINK: int - SQLITE_PERM: int - SQLITE_PROTOCOL: int - SQLITE_RANGE: int - SQLITE_READONLY: int - SQLITE_READONLY_CANTINIT: int - SQLITE_READONLY_CANTLOCK: int - SQLITE_READONLY_DBMOVED: int - SQLITE_READONLY_DIRECTORY: int - SQLITE_READONLY_RECOVERY: int - SQLITE_READONLY_ROLLBACK: int - SQLITE_ROW: int - SQLITE_SCHEMA: int - SQLITE_TOOBIG: int - SQLITE_WARNING: int - SQLITE_WARNING_AUTOINDEX: int - -if sys.version_info >= (3, 12): - LEGACY_TRANSACTION_CONTROL: int - SQLITE_DBCONFIG_DEFENSIVE: int - SQLITE_DBCONFIG_DQS_DDL: int - SQLITE_DBCONFIG_DQS_DML: int - SQLITE_DBCONFIG_ENABLE_FKEY: int - SQLITE_DBCONFIG_ENABLE_FTS3_TOKENIZER: int - SQLITE_DBCONFIG_ENABLE_LOAD_EXTENSION: int - SQLITE_DBCONFIG_ENABLE_QPSG: int - SQLITE_DBCONFIG_ENABLE_TRIGGER: int - SQLITE_DBCONFIG_ENABLE_VIEW: int - SQLITE_DBCONFIG_LEGACY_ALTER_TABLE: int - SQLITE_DBCONFIG_LEGACY_FILE_FORMAT: int - SQLITE_DBCONFIG_NO_CKPT_ON_CLOSE: int - SQLITE_DBCONFIG_RESET_DATABASE: int - SQLITE_DBCONFIG_TRIGGER_EQP: int - SQLITE_DBCONFIG_TRUSTED_SCHEMA: int - SQLITE_DBCONFIG_WRITABLE_SCHEMA: int - -# Can take or return anything depending on what's in the registry. -@overload -def adapt(obj: Any, proto: Any, /) -> Any: - """Adapt given object to given protocol.""" - ... -@overload -def adapt(obj: Any, proto: Any, alt: _T, /) -> Any | _T: - """Adapt given object to given protocol.""" - ... -def complete_statement(statement: str) -> bool: - """Checks if a string contains a complete SQL statement.""" - ... - -if sys.version_info >= (3, 12): - @overload - def connect( - database: StrOrBytesPath, - timeout: float = 5.0, - detect_types: int = 0, - isolation_level: Literal["DEFERRED", "EXCLUSIVE", "IMMEDIATE"] | None = "DEFERRED", - check_same_thread: bool = True, - cached_statements: int = 128, - uri: bool = False, - *, - autocommit: bool = ..., - ) -> Connection: - """ - Opens a connection to the SQLite database file database. - - You can use ":memory:" to open a database connection to a database that resides - in RAM instead of on disk. - """ - ... - @overload - def connect( - database: StrOrBytesPath, - timeout: float, - detect_types: int, - isolation_level: Literal["DEFERRED", "EXCLUSIVE", "IMMEDIATE"] | None, - check_same_thread: bool, - factory: type[_ConnectionT], - cached_statements: int = 128, - uri: bool = False, - *, - autocommit: bool = ..., - ) -> _ConnectionT: - """ - Opens a connection to the SQLite database file database. - - You can use ":memory:" to open a database connection to a database that resides - in RAM instead of on disk. - """ - ... - @overload - def connect( - database: StrOrBytesPath, - timeout: float = 5.0, - detect_types: int = 0, - isolation_level: Literal["DEFERRED", "EXCLUSIVE", "IMMEDIATE"] | None = "DEFERRED", - check_same_thread: bool = True, - *, - factory: type[_ConnectionT], - cached_statements: int = 128, - uri: bool = False, - autocommit: bool = ..., - ) -> _ConnectionT: - """ - Opens a connection to the SQLite database file database. - - You can use ":memory:" to open a database connection to a database that resides - in RAM instead of on disk. - """ - ... - -else: - @overload - def connect( - database: StrOrBytesPath, - timeout: float = 5.0, - detect_types: int = 0, - isolation_level: Literal["DEFERRED", "EXCLUSIVE", "IMMEDIATE"] | None = "DEFERRED", - check_same_thread: bool = True, - cached_statements: int = 128, - uri: bool = False, - ) -> Connection: - """ - connect(database[, timeout, detect_types, isolation_level, - check_same_thread, factory, cached_statements, uri]) - - Opens a connection to the SQLite database file *database*. You can use - ":memory:" to open a database connection to a database that resides in - RAM instead of on disk. - """ - ... - @overload - def connect( - database: StrOrBytesPath, - timeout: float, - detect_types: int, - isolation_level: Literal["DEFERRED", "EXCLUSIVE", "IMMEDIATE"] | None, - check_same_thread: bool, - factory: type[_ConnectionT], - cached_statements: int = 128, - uri: bool = False, - ) -> _ConnectionT: - """ - connect(database[, timeout, detect_types, isolation_level, - check_same_thread, factory, cached_statements, uri]) - - Opens a connection to the SQLite database file *database*. You can use - ":memory:" to open a database connection to a database that resides in - RAM instead of on disk. - """ - ... - @overload - def connect( - database: StrOrBytesPath, - timeout: float = 5.0, - detect_types: int = 0, - isolation_level: Literal["DEFERRED", "EXCLUSIVE", "IMMEDIATE"] | None = "DEFERRED", - check_same_thread: bool = True, - *, - factory: type[_ConnectionT], - cached_statements: int = 128, - uri: bool = False, - ) -> _ConnectionT: - """ - connect(database[, timeout, detect_types, isolation_level, - check_same_thread, factory, cached_statements, uri]) - - Opens a connection to the SQLite database file *database*. You can use - ":memory:" to open a database connection to a database that resides in - RAM instead of on disk. - """ - ... - -def enable_callback_tracebacks(enable: bool, /) -> None: - """Enable or disable callback functions throwing errors to stderr.""" - ... - -if sys.version_info < (3, 12): - # takes a pos-or-keyword argument because there is a C wrapper - def enable_shared_cache(enable: int) -> None: ... - -if sys.version_info >= (3, 10): - def register_adapter(type: type[_T], adapter: _Adapter[_T], /) -> None: - """Register a function to adapt Python objects to SQLite values.""" - ... - def register_converter(typename: str, converter: _Converter, /) -> None: - """Register a function to convert SQLite values to Python objects.""" - ... - -else: - def register_adapter(type: type[_T], caster: _Adapter[_T], /) -> None: ... - def register_converter(name: str, converter: _Converter, /) -> None: ... - -class _AggregateProtocol(Protocol): - def step(self, value: int, /) -> object: ... - def finalize(self) -> int: ... - -class _SingleParamWindowAggregateClass(Protocol): - def step(self, param: Any, /) -> object: ... - def inverse(self, param: Any, /) -> object: ... - def value(self) -> _SqliteData: ... - def finalize(self) -> _SqliteData: ... - -class _AnyParamWindowAggregateClass(Protocol): - def step(self, *args: Any) -> object: ... - def inverse(self, *args: Any) -> object: ... - def value(self) -> _SqliteData: ... - def finalize(self) -> _SqliteData: ... - -class _WindowAggregateClass(Protocol): - step: Callable[..., object] - inverse: Callable[..., object] - def value(self) -> _SqliteData: ... - def finalize(self) -> _SqliteData: ... - -class Connection: - """SQLite database connection object.""" - @property - def DataError(self) -> type[sqlite3.DataError]: ... - @property - def DatabaseError(self) -> type[sqlite3.DatabaseError]: ... - @property - def Error(self) -> type[sqlite3.Error]: ... - @property - def IntegrityError(self) -> type[sqlite3.IntegrityError]: ... - @property - def InterfaceError(self) -> type[sqlite3.InterfaceError]: ... - @property - def InternalError(self) -> type[sqlite3.InternalError]: ... - @property - def NotSupportedError(self) -> type[sqlite3.NotSupportedError]: ... - @property - def OperationalError(self) -> type[sqlite3.OperationalError]: ... - @property - def ProgrammingError(self) -> type[sqlite3.ProgrammingError]: ... - @property - def Warning(self) -> type[sqlite3.Warning]: ... - @property - def in_transaction(self) -> bool: ... - isolation_level: str | None # one of '', 'DEFERRED', 'IMMEDIATE' or 'EXCLUSIVE' - @property - def total_changes(self) -> int: ... - if sys.version_info >= (3, 12): - @property - def autocommit(self) -> int: ... - @autocommit.setter - def autocommit(self, val: int) -> None: ... - row_factory: Any - text_factory: Any - if sys.version_info >= (3, 12): - def __init__( - self, - database: StrOrBytesPath, - timeout: float = ..., - detect_types: int = ..., - isolation_level: str | None = ..., - check_same_thread: bool = ..., - factory: type[Connection] | None = ..., - cached_statements: int = ..., - uri: bool = ..., - autocommit: bool = ..., - ) -> None: ... - else: - def __init__( - self, - database: StrOrBytesPath, - timeout: float = ..., - detect_types: int = ..., - isolation_level: str | None = ..., - check_same_thread: bool = ..., - factory: type[Connection] | None = ..., - cached_statements: int = ..., - uri: bool = ..., - ) -> None: ... - - def close(self) -> None: - """ - Close the database connection. - - Any pending transaction is not committed implicitly. - """ - ... - if sys.version_info >= (3, 11): - def blobopen(self, table: str, column: str, row: int, /, *, readonly: bool = False, name: str = "main") -> Blob: - """ - Open and return a BLOB object. - - table - Table name. - column - Column name. - row - Row index. - readonly - Open the BLOB without write permissions. - name - Database name. - """ - ... - - def commit(self) -> None: - """ - Commit any pending transaction to the database. - - If there is no open transaction, this method is a no-op. - """ - ... - def create_aggregate(self, name: str, n_arg: int, aggregate_class: Callable[[], _AggregateProtocol]) -> None: - """Creates a new aggregate.""" - ... - if sys.version_info >= (3, 11): - # num_params determines how many params will be passed to the aggregate class. We provide an overload - # for the case where num_params = 1, which is expected to be the common case. - @overload - def create_window_function( - self, name: str, num_params: Literal[1], aggregate_class: Callable[[], _SingleParamWindowAggregateClass] | None, / - ) -> None: - """ - Creates or redefines an aggregate window function. Non-standard. - - name - The name of the SQL aggregate window function to be created or - redefined. - num_params - The number of arguments the step and inverse methods takes. - aggregate_class - A class with step(), finalize(), value(), and inverse() methods. - Set to None to clear the window function. - """ - ... - # And for num_params = -1, which means the aggregate must accept any number of parameters. - @overload - def create_window_function( - self, name: str, num_params: Literal[-1], aggregate_class: Callable[[], _AnyParamWindowAggregateClass] | None, / - ) -> None: - """ - Creates or redefines an aggregate window function. Non-standard. - - name - The name of the SQL aggregate window function to be created or - redefined. - num_params - The number of arguments the step and inverse methods takes. - aggregate_class - A class with step(), finalize(), value(), and inverse() methods. - Set to None to clear the window function. - """ - ... - @overload - def create_window_function( - self, name: str, num_params: int, aggregate_class: Callable[[], _WindowAggregateClass] | None, / - ) -> None: - """ - Creates or redefines an aggregate window function. Non-standard. - - name - The name of the SQL aggregate window function to be created or - redefined. - num_params - The number of arguments the step and inverse methods takes. - aggregate_class - A class with step(), finalize(), value(), and inverse() methods. - Set to None to clear the window function. - """ - ... - - def create_collation(self, name: str, callback: Callable[[str, str], int | SupportsIndex] | None, /) -> None: - """Creates a collation function.""" - ... - def create_function( - self, name: str, narg: int, func: Callable[..., _SqliteData] | None, *, deterministic: bool = False - ) -> None: - """Creates a new function.""" - ... - @overload - def cursor(self, factory: None = None) -> Cursor: - """Return a cursor for the connection.""" - ... - @overload - def cursor(self, factory: Callable[[Connection], _CursorT]) -> _CursorT: - """Return a cursor for the connection.""" - ... - def execute(self, sql: str, parameters: _Parameters = ..., /) -> Cursor: - """Executes an SQL statement.""" - ... - def executemany(self, sql: str, parameters: Iterable[_Parameters], /) -> Cursor: - """Repeatedly executes an SQL statement.""" - ... - def executescript(self, sql_script: str, /) -> Cursor: - """Executes multiple SQL statements at once.""" - ... - def interrupt(self) -> None: - """Abort any pending database operation.""" - ... - if sys.version_info >= (3, 13): - def iterdump(self, *, filter: str | None = None) -> Generator[str, None, None]: ... - else: - def iterdump(self) -> Generator[str, None, None]: - """Returns iterator to the dump of the database in an SQL text format.""" - ... - - def rollback(self) -> None: - """ - Roll back to the start of any pending transaction. - - If there is no open transaction, this method is a no-op. - """ - ... - def set_authorizer( - self, authorizer_callback: Callable[[int, str | None, str | None, str | None, str | None], int] | None - ) -> None: - """Sets authorizer callback.""" - ... - def set_progress_handler(self, progress_handler: Callable[[], int | None] | None, n: int) -> None: - """Sets progress handler callback.""" - ... - def set_trace_callback(self, trace_callback: Callable[[str], object] | None) -> None: - """Sets a trace callback called for each SQL statement (passed as unicode).""" - ... - # enable_load_extension and load_extension is not available on python distributions compiled - # without sqlite3 loadable extension support. see footnotes https://docs.python.org/3/library/sqlite3.html#f1 - def enable_load_extension(self, enable: bool, /) -> None: - """Enable dynamic loading of SQLite extension modules.""" - ... - def load_extension(self, name: str, /) -> None: - """Load SQLite extension module.""" - ... - def backup( - self, - target: Connection, - *, - pages: int = -1, - progress: Callable[[int, int, int], object] | None = None, - name: str = "main", - sleep: float = 0.25, - ) -> None: - """Makes a backup of the database.""" - ... - if sys.version_info >= (3, 11): - def setlimit(self, category: int, limit: int, /) -> int: - """ - Set connection run-time limits. - - category - The limit category to be set. - limit - The new limit. If the new limit is a negative number, the limit is - unchanged. - - Attempts to increase a limit above its hard upper bound are silently truncated - to the hard upper bound. Regardless of whether or not the limit was changed, - the prior value of the limit is returned. - """ - ... - def getlimit(self, category: int, /) -> int: - """ - Get connection run-time limits. - - category - The limit category to be queried. - """ - ... - def serialize(self, *, name: str = "main") -> bytes: - """ - Serialize a database into a byte string. - - name - Which database to serialize. - - For an ordinary on-disk database file, the serialization is just a copy of the - disk file. For an in-memory database or a "temp" database, the serialization is - the same sequence of bytes which would be written to disk if that database - were backed up to disk. - """ - ... - def deserialize(self, data: ReadableBuffer, /, *, name: str = "main") -> None: - """ - Load a serialized database. - - data - The serialized database content. - name - Which database to reopen with the deserialization. - - The deserialize interface causes the database connection to disconnect from the - target database, and then reopen it as an in-memory database based on the given - serialized data. - - The deserialize interface will fail with SQLITE_BUSY if the database is - currently in a read transaction or is involved in a backup operation. - """ - ... - if sys.version_info >= (3, 12): - def getconfig(self, op: int, /) -> bool: - """ - Query a boolean connection configuration option. - - op - The configuration verb; one of the sqlite3.SQLITE_DBCONFIG codes. - """ - ... - def setconfig(self, op: int, enable: bool = True, /) -> bool: - """ - Set a boolean connection configuration option. - - op - The configuration verb; one of the sqlite3.SQLITE_DBCONFIG codes. - """ - ... - - def __call__(self, sql: str, /) -> _Statement: - """Call self as a function.""" - ... - def __enter__(self) -> Self: - """ - Called when the connection is used as a context manager. - - Returns itself as a convenience to the caller. - """ - ... - def __exit__( - self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None, / - ) -> Literal[False]: - """ - Called when the connection is used as a context manager. - - If there was any exception, a rollback takes place; otherwise we commit. - """ - ... - -class Cursor(Iterator[Any]): - """SQLite database cursor class.""" - arraysize: int - @property - def connection(self) -> Connection: ... - # May be None, but using | Any instead to avoid slightly annoying false positives. - @property - def description(self) -> tuple[tuple[str, None, None, None, None, None, None], ...] | Any: ... - @property - def lastrowid(self) -> int | None: ... - row_factory: Callable[[Cursor, Row], object] | None - @property - def rowcount(self) -> int: ... - def __init__(self, cursor: Connection, /) -> None: ... - def close(self) -> None: - """Closes the cursor.""" - ... - def execute(self, sql: str, parameters: _Parameters = (), /) -> Self: - """Executes an SQL statement.""" - ... - def executemany(self, sql: str, seq_of_parameters: Iterable[_Parameters], /) -> Self: - """Repeatedly executes an SQL statement.""" - ... - def executescript(self, sql_script: str, /) -> Cursor: - """Executes multiple SQL statements at once.""" - ... - def fetchall(self) -> list[Any]: - """Fetches all rows from the resultset.""" - ... - def fetchmany(self, size: int | None = 1) -> list[Any]: - """ - Fetches several rows from the resultset. - - size - The default value is set by the Cursor.arraysize attribute. - """ - ... - # Returns either a row (as created by the row_factory) or None, but - # putting None in the return annotation causes annoying false positives. - def fetchone(self) -> Any: - """Fetches one row from the resultset.""" - ... - def setinputsizes(self, sizes: Unused, /) -> None: - """Required by DB-API. Does nothing in sqlite3.""" - ... - def setoutputsize(self, size: Unused, column: Unused = None, /) -> None: - """Required by DB-API. Does nothing in sqlite3.""" - ... - def __iter__(self) -> Self: - """Implement iter(self).""" - ... - def __next__(self) -> Any: - """Implement next(self).""" - ... - -class Error(Exception): - if sys.version_info >= (3, 11): - sqlite_errorcode: int - sqlite_errorname: str - -class DatabaseError(Error): ... -class DataError(DatabaseError): ... -class IntegrityError(DatabaseError): ... -class InterfaceError(Error): ... -class InternalError(DatabaseError): ... -class NotSupportedError(DatabaseError): ... -class OperationalError(DatabaseError): ... - -if sys.version_info < (3, 10): - OptimizedUnicode = str - -@final -class PrepareProtocol: - """PEP 246 style object adaption protocol type.""" - def __init__(self, *args: object, **kwargs: object) -> None: ... - -class ProgrammingError(DatabaseError): ... - -class Row: - def __init__(self, cursor: Cursor, data: tuple[Any, ...], /) -> None: ... - def keys(self) -> list[str]: - """Returns the keys of the row.""" - ... - @overload - def __getitem__(self, key: int | str, /) -> Any: - """Return self[key].""" - ... - @overload - def __getitem__(self, key: slice, /) -> tuple[Any, ...]: - """Return self[key].""" - ... - def __hash__(self) -> int: - """Return hash(self).""" - ... - def __iter__(self) -> Iterator[Any]: - """Implement iter(self).""" - ... - def __len__(self) -> int: - """Return len(self).""" - ... - # These return NotImplemented for anything that is not a Row. - def __eq__(self, value: object, /) -> bool: - """Return self==value.""" - ... - def __ge__(self, value: object, /) -> bool: - """Return self>=value.""" - ... - def __gt__(self, value: object, /) -> bool: - """Return self>value.""" - ... - def __le__(self, value: object, /) -> bool: - """Return self<=value.""" - ... - def __lt__(self, value: object, /) -> bool: - """Return self bool: - """Return self!=value.""" - ... - -@final -class _Statement: ... - -class Warning(Exception): ... - -if sys.version_info >= (3, 11): - @final - class Blob: - def close(self) -> None: - """Close the blob.""" - ... - def read(self, length: int = -1, /) -> bytes: - """ - Read data at the current offset position. - - length - Read length in bytes. - - If the end of the blob is reached, the data up to end of file will be returned. - When length is not specified, or is negative, Blob.read() will read until the - end of the blob. - """ - ... - def write(self, data: ReadableBuffer, /) -> None: - """ - Write data at the current offset. - - This function cannot change the blob length. Writing beyond the end of the - blob will result in an exception being raised. - """ - ... - def tell(self) -> int: - """Return the current access position for the blob.""" - ... - # whence must be one of os.SEEK_SET, os.SEEK_CUR, os.SEEK_END - def seek(self, offset: int, origin: int = 0, /) -> None: - """ - Set the current access position to offset. - - The origin argument defaults to os.SEEK_SET (absolute blob positioning). - Other values for origin are os.SEEK_CUR (seek relative to the current position) - and os.SEEK_END (seek relative to the blob's end). - """ - ... - def __len__(self) -> int: - """Return len(self).""" - ... - def __enter__(self) -> Self: - """Blob context manager enter.""" - ... - def __exit__(self, type: object, val: object, tb: object, /) -> Literal[False]: - """Blob context manager exit.""" - ... - def __getitem__(self, key: SupportsIndex | slice, /) -> int: - """Return self[key].""" - ... - def __setitem__(self, key: SupportsIndex | slice, value: int, /) -> None: - """Set self[key] to value.""" - ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/sre_compile.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/sre_compile.pyi index 684f8d7..2d04a88 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/sre_compile.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/sre_compile.pyi @@ -1,5 +1,3 @@ -"""Internal support module for sre""" - from re import Pattern from sre_constants import * from sre_constants import _NamedIntConstant diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/sre_constants.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/sre_constants.pyi index 064ce97..383f0f7 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/sre_constants.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/sre_constants.pyi @@ -1,6 +1,5 @@ -"""Internal support module for sre""" - import sys +from re import error as error from typing import Any from typing_extensions import Self @@ -8,25 +7,6 @@ MAXGROUPS: int MAGIC: int -class error(Exception): - """ - Exception raised for invalid regular expressions. - - Attributes: - - msg: The unformatted error message - pattern: The regular expression pattern - pos: The index in the pattern where compilation failed (may be None) - lineno: The line corresponding to pos (may be None) - colno: The column corresponding to pos (may be None) - """ - msg: str - pattern: str | bytes | None - pos: int | None - lineno: int - colno: int - def __init__(self, msg: str, pattern: str | bytes | None = None, pos: int | None = None) -> None: ... - class _NamedIntConstant(int): name: Any def __new__(cls, value: int, name: str) -> Self: ... @@ -43,7 +23,8 @@ AT_LOCALE: dict[_NamedIntConstant, _NamedIntConstant] AT_UNICODE: dict[_NamedIntConstant, _NamedIntConstant] CH_LOCALE: dict[_NamedIntConstant, _NamedIntConstant] CH_UNICODE: dict[_NamedIntConstant, _NamedIntConstant] -SRE_FLAG_TEMPLATE: int +if sys.version_info < (3, 13): + SRE_FLAG_TEMPLATE: int SRE_FLAG_IGNORECASE: int SRE_FLAG_LOCALE: int SRE_FLAG_MULTILINE: int diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/sre_parse.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/sre_parse.pyi index ad12138..c242bd2 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/sre_parse.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/sre_parse.pyi @@ -1,5 +1,3 @@ -"""Internal support module for sre""" - import sys from collections.abc import Iterable from re import Match, Pattern as _Pattern diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/ssl.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/ssl.pyi index 2d7cb8b..5b62a05 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/ssl.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/ssl.pyi @@ -92,18 +92,51 @@ ALERT_DESCRIPTION_UNKNOWN_PSK_IDENTITY import enum import socket import sys +from _ssl import ( + _DEFAULT_CIPHERS as _DEFAULT_CIPHERS, + _OPENSSL_API_VERSION as _OPENSSL_API_VERSION, + HAS_ALPN as HAS_ALPN, + HAS_ECDH as HAS_ECDH, + HAS_NPN as HAS_NPN, + HAS_SNI as HAS_SNI, + OPENSSL_VERSION as OPENSSL_VERSION, + OPENSSL_VERSION_INFO as OPENSSL_VERSION_INFO, + OPENSSL_VERSION_NUMBER as OPENSSL_VERSION_NUMBER, + HAS_SSLv2 as HAS_SSLv2, + HAS_SSLv3 as HAS_SSLv3, + HAS_TLSv1 as HAS_TLSv1, + HAS_TLSv1_1 as HAS_TLSv1_1, + HAS_TLSv1_2 as HAS_TLSv1_2, + HAS_TLSv1_3 as HAS_TLSv1_3, + MemoryBIO as MemoryBIO, + RAND_add as RAND_add, + RAND_bytes as RAND_bytes, + RAND_status as RAND_status, + SSLSession as SSLSession, + _PasswordType as _PasswordType, # typeshed only, but re-export for other type stubs to use + _SSLContext, +) from _typeshed import ReadableBuffer, StrOrBytesPath, WriteableBuffer from collections.abc import Callable, Iterable -from typing import Any, Literal, NamedTuple, TypedDict, final, overload +from typing import Any, Literal, NamedTuple, TypedDict, overload from typing_extensions import Never, Self, TypeAlias +if sys.version_info >= (3, 13): + from _ssl import HAS_PSK as HAS_PSK + +if sys.version_info < (3, 12): + from _ssl import RAND_pseudo_bytes as RAND_pseudo_bytes + +if sys.version_info < (3, 10): + from _ssl import RAND_egd as RAND_egd + +if sys.platform == "win32": + from _ssl import enum_certificates as enum_certificates, enum_crls as enum_crls + _PCTRTT: TypeAlias = tuple[tuple[str, str], ...] _PCTRTTT: TypeAlias = tuple[_PCTRTT, ...] _PeerCertRetDictType: TypeAlias = dict[str, str | _PCTRTTT | _PCTRTT] _PeerCertRetType: TypeAlias = _PeerCertRetDictType | bytes | None -_EnumRetType: TypeAlias = list[tuple[bytes, str, set[str] | bool]] -_PasswordType: TypeAlias = Callable[[], str | bytes | bytearray] | str | bytes | bytearray - _SrvnmeCbType: TypeAlias = Callable[[SSLSocket | SSLObject, str | None, SSLSocket], int | None] socket_error = OSError @@ -224,54 +257,8 @@ else: _create_default_https_context: Callable[..., SSLContext] -def RAND_bytes(n: int, /) -> bytes: - """Generate n cryptographically strong pseudo-random bytes.""" - ... - if sys.version_info < (3, 12): - def RAND_pseudo_bytes(n: int, /) -> tuple[bytes, bool]: - """ - Generate n pseudo-random bytes. - - Return a pair (bytes, is_cryptographic). is_cryptographic is True - if the bytes generated are cryptographically strong. - """ - ... - -def RAND_status() -> bool: - """ - Returns True if the OpenSSL PRNG has been seeded with enough data and False if not. - - It is necessary to seed the PRNG with RAND_add() on some platforms before - using the ssl() function. - """ - ... -def RAND_egd(path: str) -> None: ... -def RAND_add(string: str | ReadableBuffer, entropy: float, /) -> None: - """ - Mix string into the OpenSSL PRNG state. - - entropy (a float) is a lower bound on the entropy contained in - string. See RFC 4086. - """ - ... - -if sys.version_info < (3, 12): - def match_hostname(cert: _PeerCertRetDictType, hostname: str) -> None: - """ - Verify that *cert* (in decoded format as returned by - SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 - rules are followed. - - The function matches IP addresses rather than dNSNames if hostname is a - valid ipaddress string. IPv4 addresses are supported on all platforms. - IPv6 addresses are supported on platforms with IPv6 support (AF_INET6 - and inet_pton). - - CertificateError is raised on failure. On success, the function - returns nothing. - """ - ... + def match_hostname(cert: _PeerCertRetDictType, hostname: str) -> None: ... def cert_time_to_seconds(cert_time: str) -> int: """ @@ -331,10 +318,6 @@ def get_default_verify_paths() -> DefaultVerifyPaths: """ ... -if sys.platform == "win32": - def enum_certificates(store_name: str) -> _EnumRetType: ... - def enum_crls(store_name: str) -> _EnumRetType: ... - class VerifyMode(enum.IntEnum): """An enumeration.""" CERT_NONE = 0 @@ -431,21 +414,8 @@ if sys.version_info >= (3, 11) or sys.platform == "linux": OP_IGNORE_UNEXPECTED_EOF: Options HAS_NEVER_CHECK_COMMON_NAME: bool -HAS_SSLv2: bool -HAS_SSLv3: bool -HAS_TLSv1: bool -HAS_TLSv1_1: bool -HAS_TLSv1_2: bool -HAS_TLSv1_3: bool -HAS_ALPN: bool -HAS_ECDH: bool -HAS_SNI: bool -HAS_NPN: bool -CHANNEL_BINDING_TYPES: list[str] -OPENSSL_VERSION: str -OPENSSL_VERSION_INFO: tuple[int, int, int, int, int] -OPENSSL_VERSION_NUMBER: int +CHANNEL_BINDING_TYPES: list[str] class AlertDescription(enum.IntEnum): """An enumeration.""" @@ -689,8 +659,21 @@ class SSLSocket(socket.socket): def recvmsg_into(self, *args: Never, **kwargs: Never) -> Never: ... # type: ignore[override] def sendmsg(self, *args: Never, **kwargs: Never) -> Never: ... # type: ignore[override] if sys.version_info >= (3, 13): - def get_verified_chain(self) -> list[bytes]: ... - def get_unverified_chain(self) -> list[bytes]: ... + def get_verified_chain(self) -> list[bytes]: + """ + Returns verified certificate chain provided by the other + end of the SSL channel as a list of DER-encoded bytes. + + If certificate verification was disabled method acts the same as + ``SSLSocket.get_unverified_chain``. + """ + ... + def get_unverified_chain(self) -> list[bytes]: + """ + Returns raw certificate chain provided by the other + end of the SSL channel as a list of DER-encoded bytes. + """ + ... class TLSVersion(enum.IntEnum): """An enumeration.""" @@ -702,21 +685,19 @@ class TLSVersion(enum.IntEnum): TLSv1_2 = 771 TLSv1_3 = 772 -class SSLContext: +class SSLContext(_SSLContext): """ An SSLContext holds various SSL-related configuration options and data, such as certificates and possibly a private key. """ - check_hostname: bool options: Options verify_flags: VerifyFlags verify_mode: VerifyMode @property - def protocol(self) -> _SSLMethod: ... + def protocol(self) -> _SSLMethod: ... # type: ignore[override] hostname_checks_common_name: bool maximum_version: TLSVersion minimum_version: TLSVersion - sni_callback: Callable[[SSLObject, str, SSLContext], None | int] | None # The following two attributes have class-level defaults. # However, the docs explicitly state that it's OK to override these attributes on instances, # so making these ClassVars wouldn't be appropriate @@ -733,20 +714,6 @@ class SSLContext: else: def __new__(cls, protocol: int = ..., *args: Any, **kwargs: Any) -> Self: ... - def cert_store_stats(self) -> dict[str, int]: - """ - Returns quantities of loaded X.509 certificates. - - X.509 certificates with a CA extension and certificate revocation lists - inside the context's cert store. - - NOTE: Certificates in a capath directory aren't loaded unless they have - been used at least once. - """ - ... - def load_cert_chain( - self, certfile: StrOrBytesPath, keyfile: StrOrBytesPath | None = None, password: _PasswordType | None = None - ) -> None: ... def load_default_certs(self, purpose: Purpose = ...) -> None: ... def load_verify_locations( self, @@ -815,7 +782,6 @@ class SSLContext: server_hostname: str | bytes | None = None, session: SSLSession | None = None, ) -> SSLObject: ... - def session_stats(self) -> dict[str, int]: ... class SSLObject: """ @@ -953,63 +919,21 @@ class SSLObject: ... def verify_client_post_handshake(self) -> None: ... if sys.version_info >= (3, 13): - def get_verified_chain(self) -> list[bytes]: ... - def get_unverified_chain(self) -> list[bytes]: ... - -@final -class MemoryBIO: - pending: int - eof: bool - def read(self, size: int = -1, /) -> bytes: - """ - Read up to size bytes from the memory BIO. - - If size is not specified, read the entire buffer. - If the return value is an empty bytes instance, this means either - EOF or that no data is available. Use the "eof" property to - distinguish between the two. - """ - ... - def write(self, b: ReadableBuffer, /) -> int: - """ - Writes the bytes b into the memory BIO. - - Returns the number of bytes written. - """ - ... - def write_eof(self) -> None: - """ - Write an EOF marker to the memory BIO. - - When all data has been read, the "eof" property will be True. - """ - ... - -@final -class SSLSession: - @property - def has_ticket(self) -> bool: - """Does the session contain a ticket?""" - ... - @property - def id(self) -> bytes: - """Session id""" - ... - @property - def ticket_lifetime_hint(self) -> int: - """Ticket life time hint.""" - ... - @property - def time(self) -> int: - """Session creation time (seconds since epoch).""" - ... - @property - def timeout(self) -> int: - """Session timeout (delta in seconds).""" - ... - def __eq__(self, value: object, /) -> bool: - """Return self==value.""" - ... + def get_verified_chain(self) -> list[bytes]: + """ + Returns verified certificate chain provided by the other + end of the SSL channel as a list of DER-encoded bytes. + + If certificate verification was disabled method acts the same as + ``SSLSocket.get_unverified_chain``. + """ + ... + def get_unverified_chain(self) -> list[bytes]: + """ + Returns raw certificate chain provided by the other + end of the SSL channel as a list of DER-encoded bytes. + """ + ... class SSLErrorNumber(enum.IntEnum): """An enumeration.""" diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/stat.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/stat.pyi index c6b8fcc..c70c1c7 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/stat.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/stat.pyi @@ -6,8 +6,8 @@ Suggested usage: from stat import * import sys from _stat import * -from typing import Literal +from typing import Final if sys.version_info >= (3, 13): # https://github.com/python/cpython/issues/114081#issuecomment-2119017790 - SF_RESTRICTED: Literal[0x00080000] + SF_RESTRICTED: Final = 0x00080000 diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/statistics.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/statistics.pyi index 9747368..4aeb45a 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/statistics.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/statistics.pyi @@ -11,7 +11,7 @@ Calculating averages Function Description ================== ================================================== mean Arithmetic mean (average) of data. -fmean Fast, floating point arithmetic mean. +fmean Fast, floating-point arithmetic mean. geometric_mean Geometric mean of data. harmonic_mean Harmonic mean of data. median Median (middle value) of data. @@ -159,24 +159,16 @@ if sys.version_info >= (3, 11): ... else: - def fmean(data: Iterable[SupportsFloat]) -> float: - """ - Convert data to floats and compute the arithmetic mean. - - This runs faster than the mean() function and it always returns a float. - If the input dataset is empty, it raises a StatisticsError. - - >>> fmean([3.5, 4.0, 5.25]) - 4.25 - """ - ... + def fmean(data: Iterable[SupportsFloat]) -> float: ... def geometric_mean(data: Iterable[SupportsFloat]) -> float: """ Convert data to floats and compute the geometric mean. - Raises a StatisticsError if the input dataset is empty, - if it contains a zero, or if it contains a negative value. + Raises a StatisticsError if the input dataset is empty + or if it contains a negative value. + + Returns zero if the product of inputs is zero. No special efforts are made to achieve exact results. (However, this may change in the future.) @@ -322,34 +314,7 @@ if sys.version_info >= (3, 11): ... else: - def median_grouped(data: Iterable[_NumberT], interval: _NumberT | float = 1) -> _NumberT | float: - """ - Return the 50th percentile (median) of grouped continuous data. - - >>> median_grouped([1, 2, 2, 3, 4, 4, 4, 4, 4, 5]) - 3.7 - >>> median_grouped([52, 52, 53, 54]) - 52.5 - - This calculates the median as the 50th percentile, and should be - used when your data is continuous and grouped. In the above example, - the values 1, 2, 3, etc. actually represent the midpoint of classes - 0.5-1.5, 1.5-2.5, 2.5-3.5, etc. The middle value falls somewhere in - class 3.5-4.5, and interpolation is used to estimate it. - - Optional argument ``interval`` represents the class interval, and - defaults to 1. Changing the class interval naturally will change the - interpolated 50th percentile value: - - >>> median_grouped([1, 3, 3, 5, 7], interval=1) - 3.25 - >>> median_grouped([1, 3, 3, 5, 7], interval=2) - 3.5 - - This function does not check whether the data points are at least - ``interval`` apart. - """ - ... + def median_grouped(data: Iterable[_NumberT], interval: _NumberT | float = 1) -> _NumberT | float: ... def mode(data: Iterable[_HashableT]) -> _HashableT: """ @@ -684,24 +649,7 @@ if sys.version_info >= (3, 12): ... elif sys.version_info >= (3, 10): - def correlation(x: Sequence[_Number], y: Sequence[_Number], /) -> float: - """ - Pearson's correlation coefficient - - Return the Pearson's correlation coefficient for two inputs. Pearson's - correlation coefficient *r* takes values between -1 and +1. It measures the - strength and direction of the linear relationship, where +1 means very - strong, positive linear relationship, -1 very strong, negative linear - relationship, and 0 no linear relationship. - - >>> x = [1, 2, 3, 4, 5, 6, 7, 8, 9] - >>> y = [9, 8, 7, 6, 5, 4, 3, 2, 1] - >>> correlation(x, x) - 1.0 - >>> correlation(x, y) - -1.0 - """ - ... + def correlation(x: Sequence[_Number], y: Sequence[_Number], /) -> float: ... if sys.version_info >= (3, 10): def covariance(x: Sequence[_Number], y: Sequence[_Number], /) -> float: @@ -754,7 +702,7 @@ if sys.version_info >= (3, 11): >>> noise = NormalDist().samples(5, seed=42) >>> y = [3 * x[i] + 2 + noise[i] for i in range(5)] >>> linear_regression(x, y) #doctest: +ELLIPSIS - LinearRegression(slope=3.09078914170..., intercept=1.75684970486...) + LinearRegression(slope=3.17495..., intercept=1.00925...) If *proportional* is true, the independent variable *x* and the dependent variable *y* are assumed to be directly proportional. @@ -767,37 +715,12 @@ if sys.version_info >= (3, 11): >>> y = [3 * x[i] + noise[i] for i in range(5)] >>> linear_regression(x, y, proportional=True) #doctest: +ELLIPSIS - LinearRegression(slope=3.02447542484..., intercept=0.0) + LinearRegression(slope=2.90475..., intercept=0.0) """ ... elif sys.version_info >= (3, 10): - def linear_regression(regressor: Sequence[_Number], dependent_variable: Sequence[_Number], /) -> LinearRegression: - """ - Slope and intercept for simple linear regression. - - Return the slope and intercept of simple linear regression - parameters estimated using ordinary least squares. Simple linear - regression describes relationship between an independent variable - *x* and a dependent variable *y* in terms of linear function: - - y = slope * x + intercept + noise - - where *slope* and *intercept* are the regression parameters that are - estimated, and noise represents the variability of the data that was - not explained by the linear regression (it is equal to the - difference between predicted and actual values of the dependent - variable). - - The parameters are returned as a named tuple. - - >>> x = [1, 2, 3, 4, 5] - >>> noise = NormalDist().samples(5, seed=42) - >>> y = [3 * x[i] + 2 + noise[i] for i in range(5)] - >>> linear_regression(x, y) #doctest: +ELLIPSIS - LinearRegression(slope=3.09078914170..., intercept=1.75684970486...) - """ - ... + def linear_regression(regressor: Sequence[_Number], dependent_variable: Sequence[_Number], /) -> LinearRegression: ... if sys.version_info >= (3, 13): _Kernel: TypeAlias = Literal[ @@ -817,11 +740,128 @@ if sys.version_info >= (3, 13): ] def kde( data: Sequence[float], h: float, kernel: _Kernel = "normal", *, cumulative: bool = False - ) -> Callable[[float], float]: ... + ) -> Callable[[float], float]: + """ + Kernel Density Estimation: Create a continuous probability density + function or cumulative distribution function from discrete samples. + + The basic idea is to smooth the data using a kernel function + to help draw inferences about a population from a sample. + + The degree of smoothing is controlled by the scaling parameter h + which is called the bandwidth. Smaller values emphasize local + features while larger values give smoother results. + + The kernel determines the relative weights of the sample data + points. Generally, the choice of kernel shape does not matter + as much as the more influential bandwidth smoothing parameter. + + Kernels that give some weight to every sample point: + + normal (gauss) + logistic + sigmoid + + Kernels that only give weight to sample points within + the bandwidth: + + rectangular (uniform) + triangular + parabolic (epanechnikov) + quartic (biweight) + triweight + cosine + + If *cumulative* is true, will return a cumulative distribution function. + + A StatisticsError will be raised if the data sequence is empty. + + Example + ------- + + Given a sample of six data points, construct a continuous + function that estimates the underlying probability density: + + >>> sample = [-2.1, -1.3, -0.4, 1.9, 5.1, 6.2] + >>> f_hat = kde(sample, h=1.5) + + Compute the area under the curve: + + >>> area = sum(f_hat(x) for x in range(-20, 20)) + >>> round(area, 4) + 1.0 + + Plot the estimated probability density function at + evenly spaced points from -6 to 10: + + >>> for x in range(-6, 11): + ... density = f_hat(x) + ... plot = ' ' * int(density * 400) + 'x' + ... print(f'{x:2}: {density:.3f} {plot}') + ... + -6: 0.002 x + -5: 0.009 x + -4: 0.031 x + -3: 0.070 x + -2: 0.111 x + -1: 0.125 x + 0: 0.110 x + 1: 0.086 x + 2: 0.068 x + 3: 0.059 x + 4: 0.066 x + 5: 0.082 x + 6: 0.082 x + 7: 0.058 x + 8: 0.028 x + 9: 0.009 x + 10: 0.002 x + + Estimate P(4.5 < X <= 7.5), the probability that a new sample value + will be between 4.5 and 7.5: + + >>> cdf = kde(sample, h=1.5, cumulative=True) + >>> round(cdf(7.5) - cdf(4.5), 2) + 0.22 + + References + ---------- + + Kernel density estimation and its application: + https://www.itm-conferences.org/articles/itmconf/pdf/2018/08/itmconf_sam2018_00037.pdf + + Kernel functions in common use: + https://en.wikipedia.org/wiki/Kernel_(statistics)#kernel_functions_in_common_use + + Interactive graphical demonstration and exploration: + https://demonstrations.wolfram.com/KernelDensityEstimation/ + + Kernel estimation of cumulative distribution function of a random variable with bounded support + https://www.econstor.eu/bitstream/10419/207829/1/10.21307_stattrans-2016-037.pdf + """ + ... def kde_random( data: Sequence[float], h: float, kernel: _Kernel = "normal", *, seed: int | float | str | bytes | bytearray | None = None, # noqa: Y041 - ) -> Callable[[], float]: ... + ) -> Callable[[], float]: + """ + Return a function that makes a random selection from the estimated + probability density function created by kde(data, h, kernel). + + Providing a *seed* allows reproducible selections within a single + thread. The seed may be an integer, float, str, or bytes. + + A StatisticsError will be raised if the *data* sequence is empty. + + Example: + + >>> data = [-2.1, -1.3, -0.4, 1.9, 5.1, 6.2] + >>> rand = kde_random(data, h=1.5, seed=8675309) + >>> new_selections = [rand() for i in range(10)] + >>> [round(x, 1) for x in new_selections] + [0.7, 6.2, 1.2, 6.9, 7.0, 1.8, 2.5, -0.5, -1.8, 5.6] + """ + ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/subprocess.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/subprocess.pyi index aa7025c..ced3a90 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/subprocess.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/subprocess.pyi @@ -37,7 +37,7 @@ import sys from _typeshed import ReadableBuffer, StrOrBytesPath from collections.abc import Callable, Collection, Iterable, Mapping, Sequence from types import TracebackType -from typing import IO, Any, AnyStr, Generic, Literal, TypeVar, overload +from typing import IO, Any, AnyStr, Final, Generic, Literal, TypeVar, overload from typing_extensions import Self, TypeAlias if sys.version_info >= (3, 9): @@ -109,8 +109,8 @@ _T = TypeVar("_T") # These two are private but documented if sys.version_info >= (3, 11): - _USE_VFORK: bool -_USE_POSIX_SPAWN: bool + _USE_VFORK: Final[bool] +_USE_POSIX_SPAWN: Final[bool] class CompletedProcess(Generic[_T]): """ @@ -566,37 +566,7 @@ elif sys.version_info >= (3, 10): extra_groups: Iterable[str | int] | None = None, umask: int = -1, pipesize: int = -1, - ) -> CompletedProcess[str]: - """ - Run command with arguments and return a CompletedProcess instance. - - The returned instance will have attributes args, returncode, stdout and - stderr. By default, stdout and stderr are not captured, and those attributes - will be None. Pass stdout=PIPE and/or stderr=PIPE in order to capture them, - or pass capture_output=True to capture both. - - If check is True and the exit code was non-zero, it raises a - CalledProcessError. The CalledProcessError object will have the return code - in the returncode attribute, and output & stderr attributes if those streams - were captured. - - If timeout is given, and the process takes too long, a TimeoutExpired - exception will be raised. - - There is an optional argument "input", allowing you to - pass bytes or a string to the subprocess's stdin. If you use this argument - you may not also use the Popen constructor's "stdin" argument, as - it will be used internally. - - By default, all communication is in bytes, and therefore any "input" should - be bytes, and the stdout and stderr will be bytes. If in text mode, any - "input" should be a string, and stdout and stderr will be strings decoded - according to locale encoding, or by "encoding" if set. Text mode is - triggered by setting any of text, encoding, errors or universal_newlines. - - The other arguments are the same as for the Popen constructor. - """ - ... + ) -> CompletedProcess[str]: ... @overload def run( args: _CMD, @@ -629,37 +599,7 @@ elif sys.version_info >= (3, 10): extra_groups: Iterable[str | int] | None = None, umask: int = -1, pipesize: int = -1, - ) -> CompletedProcess[str]: - """ - Run command with arguments and return a CompletedProcess instance. - - The returned instance will have attributes args, returncode, stdout and - stderr. By default, stdout and stderr are not captured, and those attributes - will be None. Pass stdout=PIPE and/or stderr=PIPE in order to capture them, - or pass capture_output=True to capture both. - - If check is True and the exit code was non-zero, it raises a - CalledProcessError. The CalledProcessError object will have the return code - in the returncode attribute, and output & stderr attributes if those streams - were captured. - - If timeout is given, and the process takes too long, a TimeoutExpired - exception will be raised. - - There is an optional argument "input", allowing you to - pass bytes or a string to the subprocess's stdin. If you use this argument - you may not also use the Popen constructor's "stdin" argument, as - it will be used internally. - - By default, all communication is in bytes, and therefore any "input" should - be bytes, and the stdout and stderr will be bytes. If in text mode, any - "input" should be a string, and stdout and stderr will be strings decoded - according to locale encoding, or by "encoding" if set. Text mode is - triggered by setting any of text, encoding, errors or universal_newlines. - - The other arguments are the same as for the Popen constructor. - """ - ... + ) -> CompletedProcess[str]: ... @overload def run( args: _CMD, @@ -692,37 +632,7 @@ elif sys.version_info >= (3, 10): extra_groups: Iterable[str | int] | None = None, umask: int = -1, pipesize: int = -1, - ) -> CompletedProcess[str]: - """ - Run command with arguments and return a CompletedProcess instance. - - The returned instance will have attributes args, returncode, stdout and - stderr. By default, stdout and stderr are not captured, and those attributes - will be None. Pass stdout=PIPE and/or stderr=PIPE in order to capture them, - or pass capture_output=True to capture both. - - If check is True and the exit code was non-zero, it raises a - CalledProcessError. The CalledProcessError object will have the return code - in the returncode attribute, and output & stderr attributes if those streams - were captured. - - If timeout is given, and the process takes too long, a TimeoutExpired - exception will be raised. - - There is an optional argument "input", allowing you to - pass bytes or a string to the subprocess's stdin. If you use this argument - you may not also use the Popen constructor's "stdin" argument, as - it will be used internally. - - By default, all communication is in bytes, and therefore any "input" should - be bytes, and the stdout and stderr will be bytes. If in text mode, any - "input" should be a string, and stdout and stderr will be strings decoded - according to locale encoding, or by "encoding" if set. Text mode is - triggered by setting any of text, encoding, errors or universal_newlines. - - The other arguments are the same as for the Popen constructor. - """ - ... + ) -> CompletedProcess[str]: ... @overload def run( args: _CMD, @@ -756,37 +666,7 @@ elif sys.version_info >= (3, 10): extra_groups: Iterable[str | int] | None = None, umask: int = -1, pipesize: int = -1, - ) -> CompletedProcess[str]: - """ - Run command with arguments and return a CompletedProcess instance. - - The returned instance will have attributes args, returncode, stdout and - stderr. By default, stdout and stderr are not captured, and those attributes - will be None. Pass stdout=PIPE and/or stderr=PIPE in order to capture them, - or pass capture_output=True to capture both. - - If check is True and the exit code was non-zero, it raises a - CalledProcessError. The CalledProcessError object will have the return code - in the returncode attribute, and output & stderr attributes if those streams - were captured. - - If timeout is given, and the process takes too long, a TimeoutExpired - exception will be raised. - - There is an optional argument "input", allowing you to - pass bytes or a string to the subprocess's stdin. If you use this argument - you may not also use the Popen constructor's "stdin" argument, as - it will be used internally. - - By default, all communication is in bytes, and therefore any "input" should - be bytes, and the stdout and stderr will be bytes. If in text mode, any - "input" should be a string, and stdout and stderr will be strings decoded - according to locale encoding, or by "encoding" if set. Text mode is - triggered by setting any of text, encoding, errors or universal_newlines. - - The other arguments are the same as for the Popen constructor. - """ - ... + ) -> CompletedProcess[str]: ... @overload def run( args: _CMD, @@ -819,37 +699,7 @@ elif sys.version_info >= (3, 10): extra_groups: Iterable[str | int] | None = None, umask: int = -1, pipesize: int = -1, - ) -> CompletedProcess[bytes]: - """ - Run command with arguments and return a CompletedProcess instance. - - The returned instance will have attributes args, returncode, stdout and - stderr. By default, stdout and stderr are not captured, and those attributes - will be None. Pass stdout=PIPE and/or stderr=PIPE in order to capture them, - or pass capture_output=True to capture both. - - If check is True and the exit code was non-zero, it raises a - CalledProcessError. The CalledProcessError object will have the return code - in the returncode attribute, and output & stderr attributes if those streams - were captured. - - If timeout is given, and the process takes too long, a TimeoutExpired - exception will be raised. - - There is an optional argument "input", allowing you to - pass bytes or a string to the subprocess's stdin. If you use this argument - you may not also use the Popen constructor's "stdin" argument, as - it will be used internally. - - By default, all communication is in bytes, and therefore any "input" should - be bytes, and the stdout and stderr will be bytes. If in text mode, any - "input" should be a string, and stdout and stderr will be strings decoded - according to locale encoding, or by "encoding" if set. Text mode is - triggered by setting any of text, encoding, errors or universal_newlines. - - The other arguments are the same as for the Popen constructor. - """ - ... + ) -> CompletedProcess[bytes]: ... @overload def run( args: _CMD, @@ -882,37 +732,7 @@ elif sys.version_info >= (3, 10): extra_groups: Iterable[str | int] | None = None, umask: int = -1, pipesize: int = -1, - ) -> CompletedProcess[Any]: - """ - Run command with arguments and return a CompletedProcess instance. - - The returned instance will have attributes args, returncode, stdout and - stderr. By default, stdout and stderr are not captured, and those attributes - will be None. Pass stdout=PIPE and/or stderr=PIPE in order to capture them, - or pass capture_output=True to capture both. - - If check is True and the exit code was non-zero, it raises a - CalledProcessError. The CalledProcessError object will have the return code - in the returncode attribute, and output & stderr attributes if those streams - were captured. - - If timeout is given, and the process takes too long, a TimeoutExpired - exception will be raised. - - There is an optional argument "input", allowing you to - pass bytes or a string to the subprocess's stdin. If you use this argument - you may not also use the Popen constructor's "stdin" argument, as - it will be used internally. - - By default, all communication is in bytes, and therefore any "input" should - be bytes, and the stdout and stderr will be bytes. If in text mode, any - "input" should be a string, and stdout and stderr will be strings decoded - according to locale encoding, or by "encoding" if set. Text mode is - triggered by setting any of text, encoding, errors or universal_newlines. - - The other arguments are the same as for the Popen constructor. - """ - ... + ) -> CompletedProcess[Any]: ... elif sys.version_info >= (3, 9): # 3.9 adds arguments "user", "group", "extra_groups" and "umask" @@ -1303,6 +1123,7 @@ if sys.version_info >= (3, 11): start_new_session: bool = False, pass_fds: Collection[int] = ..., *, + encoding: str | None = None, timeout: float | None = None, text: bool | None = None, user: str | int | None = None, @@ -1343,6 +1164,7 @@ elif sys.version_info >= (3, 10): start_new_session: bool = False, pass_fds: Collection[int] = ..., *, + encoding: str | None = None, timeout: float | None = None, text: bool | None = None, user: str | int | None = None, @@ -1350,16 +1172,7 @@ elif sys.version_info >= (3, 10): extra_groups: Iterable[str | int] | None = None, umask: int = -1, pipesize: int = -1, - ) -> int: - """ - Run command with arguments. Wait for command to complete or - timeout, then return the returncode attribute. - - The arguments are the same as for the Popen constructor. Example: - - retcode = call(["ls", "-l"]) - """ - ... + ) -> int: ... elif sys.version_info >= (3, 9): # 3.9 adds arguments "user", "group", "extra_groups" and "umask" @@ -1382,6 +1195,7 @@ elif sys.version_info >= (3, 9): start_new_session: bool = False, pass_fds: Collection[int] = ..., *, + encoding: str | None = None, timeout: float | None = None, text: bool | None = None, user: str | int | None = None, @@ -1410,6 +1224,7 @@ else: start_new_session: bool = False, pass_fds: Collection[int] = ..., *, + encoding: str | None = None, timeout: float | None = None, text: bool | None = None, ) -> int: ... @@ -1437,6 +1252,7 @@ if sys.version_info >= (3, 11): pass_fds: Collection[int] = ..., timeout: float | None = ..., *, + encoding: str | None = None, text: bool | None = None, user: str | int | None = None, group: str | int | None = None, @@ -1479,24 +1295,14 @@ elif sys.version_info >= (3, 10): pass_fds: Collection[int] = ..., timeout: float | None = ..., *, + encoding: str | None = None, text: bool | None = None, user: str | int | None = None, group: str | int | None = None, extra_groups: Iterable[str | int] | None = None, umask: int = -1, pipesize: int = -1, - ) -> int: - """ - Run command with arguments. Wait for command to complete. If - the exit code was zero then return, otherwise raise - CalledProcessError. The CalledProcessError object will have the - return code in the returncode attribute. - - The arguments are the same as for the call function. Example: - - check_call(["ls", "-l"]) - """ - ... + ) -> int: ... elif sys.version_info >= (3, 9): # 3.9 adds arguments "user", "group", "extra_groups" and "umask" @@ -1520,6 +1326,7 @@ elif sys.version_info >= (3, 9): pass_fds: Collection[int] = ..., timeout: float | None = ..., *, + encoding: str | None = None, text: bool | None = None, user: str | int | None = None, group: str | int | None = None, @@ -1548,6 +1355,7 @@ else: pass_fds: Collection[int] = ..., timeout: float | None = ..., *, + encoding: str | None = None, text: bool | None = None, ) -> int: ... @@ -1988,43 +1796,7 @@ elif sys.version_info >= (3, 10): extra_groups: Iterable[str | int] | None = None, umask: int = -1, pipesize: int = -1, - ) -> str: - r""" - Run command with arguments and return its output. - - If the exit code was non-zero it raises a CalledProcessError. The - CalledProcessError object will have the return code in the returncode - attribute and output in the output attribute. - - The arguments are the same as for the Popen constructor. Example: - - >>> check_output(["ls", "-l", "/dev/null"]) - b'crw-rw-rw- 1 root root 1, 3 Oct 18 2007 /dev/null\n' - - The stdout argument is not allowed as it is used internally. - To capture standard error in the result, use stderr=STDOUT. - - >>> check_output(["/bin/sh", "-c", - ... "ls -l non_existent_file ; exit 0"], - ... stderr=STDOUT) - b'ls: non_existent_file: No such file or directory\n' - - There is an additional optional argument, "input", allowing you to - pass a string to the subprocess's stdin. If you use this argument - you may not also use the Popen constructor's "stdin" argument, as - it too will be used internally. Example: - - >>> check_output(["sed", "-e", "s/foo/bar/"], - ... input=b"when in the course of fooman events\n") - b'when in the course of barman events\n' - - By default, all communication is in bytes, and therefore any "input" - should be bytes, and the return value will be bytes. If in text mode, - any "input" should be a string, and the return value will be a string - decoded according to locale encoding, or by "encoding" if set. Text mode - is triggered by setting any of text, encoding, errors or universal_newlines. - """ - ... + ) -> str: ... @overload def check_output( args: _CMD, @@ -2054,43 +1826,7 @@ elif sys.version_info >= (3, 10): extra_groups: Iterable[str | int] | None = None, umask: int = -1, pipesize: int = -1, - ) -> str: - r""" - Run command with arguments and return its output. - - If the exit code was non-zero it raises a CalledProcessError. The - CalledProcessError object will have the return code in the returncode - attribute and output in the output attribute. - - The arguments are the same as for the Popen constructor. Example: - - >>> check_output(["ls", "-l", "/dev/null"]) - b'crw-rw-rw- 1 root root 1, 3 Oct 18 2007 /dev/null\n' - - The stdout argument is not allowed as it is used internally. - To capture standard error in the result, use stderr=STDOUT. - - >>> check_output(["/bin/sh", "-c", - ... "ls -l non_existent_file ; exit 0"], - ... stderr=STDOUT) - b'ls: non_existent_file: No such file or directory\n' - - There is an additional optional argument, "input", allowing you to - pass a string to the subprocess's stdin. If you use this argument - you may not also use the Popen constructor's "stdin" argument, as - it too will be used internally. Example: - - >>> check_output(["sed", "-e", "s/foo/bar/"], - ... input=b"when in the course of fooman events\n") - b'when in the course of barman events\n' - - By default, all communication is in bytes, and therefore any "input" - should be bytes, and the return value will be bytes. If in text mode, - any "input" should be a string, and the return value will be a string - decoded according to locale encoding, or by "encoding" if set. Text mode - is triggered by setting any of text, encoding, errors or universal_newlines. - """ - ... + ) -> str: ... @overload def check_output( args: _CMD, @@ -2120,43 +1856,7 @@ elif sys.version_info >= (3, 10): extra_groups: Iterable[str | int] | None = None, umask: int = -1, pipesize: int = -1, - ) -> str: - r""" - Run command with arguments and return its output. - - If the exit code was non-zero it raises a CalledProcessError. The - CalledProcessError object will have the return code in the returncode - attribute and output in the output attribute. - - The arguments are the same as for the Popen constructor. Example: - - >>> check_output(["ls", "-l", "/dev/null"]) - b'crw-rw-rw- 1 root root 1, 3 Oct 18 2007 /dev/null\n' - - The stdout argument is not allowed as it is used internally. - To capture standard error in the result, use stderr=STDOUT. - - >>> check_output(["/bin/sh", "-c", - ... "ls -l non_existent_file ; exit 0"], - ... stderr=STDOUT) - b'ls: non_existent_file: No such file or directory\n' - - There is an additional optional argument, "input", allowing you to - pass a string to the subprocess's stdin. If you use this argument - you may not also use the Popen constructor's "stdin" argument, as - it too will be used internally. Example: - - >>> check_output(["sed", "-e", "s/foo/bar/"], - ... input=b"when in the course of fooman events\n") - b'when in the course of barman events\n' - - By default, all communication is in bytes, and therefore any "input" - should be bytes, and the return value will be bytes. If in text mode, - any "input" should be a string, and the return value will be a string - decoded according to locale encoding, or by "encoding" if set. Text mode - is triggered by setting any of text, encoding, errors or universal_newlines. - """ - ... + ) -> str: ... @overload def check_output( args: _CMD, @@ -2187,43 +1887,7 @@ elif sys.version_info >= (3, 10): extra_groups: Iterable[str | int] | None = None, umask: int = -1, pipesize: int = -1, - ) -> str: - r""" - Run command with arguments and return its output. - - If the exit code was non-zero it raises a CalledProcessError. The - CalledProcessError object will have the return code in the returncode - attribute and output in the output attribute. - - The arguments are the same as for the Popen constructor. Example: - - >>> check_output(["ls", "-l", "/dev/null"]) - b'crw-rw-rw- 1 root root 1, 3 Oct 18 2007 /dev/null\n' - - The stdout argument is not allowed as it is used internally. - To capture standard error in the result, use stderr=STDOUT. - - >>> check_output(["/bin/sh", "-c", - ... "ls -l non_existent_file ; exit 0"], - ... stderr=STDOUT) - b'ls: non_existent_file: No such file or directory\n' - - There is an additional optional argument, "input", allowing you to - pass a string to the subprocess's stdin. If you use this argument - you may not also use the Popen constructor's "stdin" argument, as - it too will be used internally. Example: - - >>> check_output(["sed", "-e", "s/foo/bar/"], - ... input=b"when in the course of fooman events\n") - b'when in the course of barman events\n' - - By default, all communication is in bytes, and therefore any "input" - should be bytes, and the return value will be bytes. If in text mode, - any "input" should be a string, and the return value will be a string - decoded according to locale encoding, or by "encoding" if set. Text mode - is triggered by setting any of text, encoding, errors or universal_newlines. - """ - ... + ) -> str: ... @overload def check_output( args: _CMD, @@ -2253,43 +1917,7 @@ elif sys.version_info >= (3, 10): extra_groups: Iterable[str | int] | None = None, umask: int = -1, pipesize: int = -1, - ) -> bytes: - r""" - Run command with arguments and return its output. - - If the exit code was non-zero it raises a CalledProcessError. The - CalledProcessError object will have the return code in the returncode - attribute and output in the output attribute. - - The arguments are the same as for the Popen constructor. Example: - - >>> check_output(["ls", "-l", "/dev/null"]) - b'crw-rw-rw- 1 root root 1, 3 Oct 18 2007 /dev/null\n' - - The stdout argument is not allowed as it is used internally. - To capture standard error in the result, use stderr=STDOUT. - - >>> check_output(["/bin/sh", "-c", - ... "ls -l non_existent_file ; exit 0"], - ... stderr=STDOUT) - b'ls: non_existent_file: No such file or directory\n' - - There is an additional optional argument, "input", allowing you to - pass a string to the subprocess's stdin. If you use this argument - you may not also use the Popen constructor's "stdin" argument, as - it too will be used internally. Example: - - >>> check_output(["sed", "-e", "s/foo/bar/"], - ... input=b"when in the course of fooman events\n") - b'when in the course of barman events\n' - - By default, all communication is in bytes, and therefore any "input" - should be bytes, and the return value will be bytes. If in text mode, - any "input" should be a string, and the return value will be a string - decoded according to locale encoding, or by "encoding" if set. Text mode - is triggered by setting any of text, encoding, errors or universal_newlines. - """ - ... + ) -> bytes: ... @overload def check_output( args: _CMD, @@ -2319,43 +1947,7 @@ elif sys.version_info >= (3, 10): extra_groups: Iterable[str | int] | None = None, umask: int = -1, pipesize: int = -1, - ) -> Any: - r""" - Run command with arguments and return its output. - - If the exit code was non-zero it raises a CalledProcessError. The - CalledProcessError object will have the return code in the returncode - attribute and output in the output attribute. - - The arguments are the same as for the Popen constructor. Example: - - >>> check_output(["ls", "-l", "/dev/null"]) - b'crw-rw-rw- 1 root root 1, 3 Oct 18 2007 /dev/null\n' - - The stdout argument is not allowed as it is used internally. - To capture standard error in the result, use stderr=STDOUT. - - >>> check_output(["/bin/sh", "-c", - ... "ls -l non_existent_file ; exit 0"], - ... stderr=STDOUT) - b'ls: non_existent_file: No such file or directory\n' - - There is an additional optional argument, "input", allowing you to - pass a string to the subprocess's stdin. If you use this argument - you may not also use the Popen constructor's "stdin" argument, as - it too will be used internally. Example: - - >>> check_output(["sed", "-e", "s/foo/bar/"], - ... input=b"when in the course of fooman events\n") - b'when in the course of barman events\n' - - By default, all communication is in bytes, and therefore any "input" - should be bytes, and the return value will be bytes. If in text mode, - any "input" should be a string, and the return value will be a string - decoded according to locale encoding, or by "encoding" if set. Text mode - is triggered by setting any of text, encoding, errors or universal_newlines. - """ - ... + ) -> Any: ... # morally: -> str | bytes elif sys.version_info >= (3, 9): # 3.9 adds arguments "user", "group", "extra_groups" and "umask" @@ -2688,9 +2280,9 @@ else: text: bool | None = None, ) -> Any: ... # morally: -> str | bytes -PIPE: int -STDOUT: int -DEVNULL: int +PIPE: Final[int] +STDOUT: Final[int] +DEVNULL: Final[int] class SubprocessError(Exception): ... @@ -3031,9 +2623,7 @@ class Popen(Generic[AnyStr]): extra_groups: Iterable[str | int] | None = None, umask: int = -1, pipesize: int = -1, - ) -> None: - """Create new Popen instance.""" - ... + ) -> None: ... @overload def __init__( self: Popen[str], @@ -3063,9 +2653,7 @@ class Popen(Generic[AnyStr]): extra_groups: Iterable[str | int] | None = None, umask: int = -1, pipesize: int = -1, - ) -> None: - """Create new Popen instance.""" - ... + ) -> None: ... @overload def __init__( self: Popen[str], @@ -3096,9 +2684,7 @@ class Popen(Generic[AnyStr]): extra_groups: Iterable[str | int] | None = None, umask: int = -1, pipesize: int = -1, - ) -> None: - """Create new Popen instance.""" - ... + ) -> None: ... @overload def __init__( self: Popen[str], @@ -3128,9 +2714,7 @@ class Popen(Generic[AnyStr]): extra_groups: Iterable[str | int] | None = None, umask: int = -1, pipesize: int = -1, - ) -> None: - """Create new Popen instance.""" - ... + ) -> None: ... @overload def __init__( self: Popen[bytes], @@ -3160,9 +2744,7 @@ class Popen(Generic[AnyStr]): extra_groups: Iterable[str | int] | None = None, umask: int = -1, pipesize: int = -1, - ) -> None: - """Create new Popen instance.""" - ... + ) -> None: ... @overload def __init__( self: Popen[Any], @@ -3192,9 +2774,7 @@ class Popen(Generic[AnyStr]): extra_groups: Iterable[str | int] | None = None, umask: int = -1, pipesize: int = -1, - ) -> None: - """Create new Popen instance.""" - ... + ) -> None: ... elif sys.version_info >= (3, 9): # user, group, extra_groups, umask were added in 3.9 @overload @@ -3618,41 +3198,8 @@ if sys.version_info >= (3, 11): ... else: - def getstatusoutput(cmd: _CMD) -> tuple[int, str]: - """ - Return (exitcode, output) of executing cmd in a shell. - - Execute the string 'cmd' in a shell with 'check_output' and - return a 2-tuple (status, output). The locale encoding is used - to decode the output and process newlines. - - A trailing newline is stripped from the output. - The exit status for the command can be interpreted - according to the rules for the function 'wait'. Example: - - >>> import subprocess - >>> subprocess.getstatusoutput('ls /bin/ls') - (0, '/bin/ls') - >>> subprocess.getstatusoutput('cat /bin/junk') - (1, 'cat: /bin/junk: No such file or directory') - >>> subprocess.getstatusoutput('/bin/junk') - (127, 'sh: /bin/junk: not found') - >>> subprocess.getstatusoutput('/bin/kill $$') - (-15, '') - """ - ... - def getoutput(cmd: _CMD) -> str: - """ - Return output (stdout or stderr) of executing cmd in a shell. - - Like getstatusoutput(), except the exit status is ignored and the return - value is a string containing the command's output. Example: - - >>> import subprocess - >>> subprocess.getoutput('ls /bin/ls') - '/bin/ls' - """ - ... + def getstatusoutput(cmd: _CMD) -> tuple[int, str]: ... + def getoutput(cmd: _CMD) -> str: ... def list2cmdline(seq: Iterable[StrOrBytesPath]) -> str: """ @@ -3682,6 +3229,11 @@ def list2cmdline(seq: Iterable[StrOrBytesPath]) -> str: ... if sys.platform == "win32": + if sys.version_info >= (3, 13): + from _winapi import STARTF_FORCEOFFFEEDBACK, STARTF_FORCEONFEEDBACK + + __all__ += ["STARTF_FORCEOFFFEEDBACK", "STARTF_FORCEONFEEDBACK"] + class STARTUPINFO: def __init__( self, diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/sunau.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/sunau.pyi index 5dfaaa2..9b051e8 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/sunau.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/sunau.pyi @@ -1,109 +1,3 @@ -""" -Stuff to parse Sun and NeXT audio files. - -An audio file consists of a header followed by the data. The structure -of the header is as follows. - - +---------------+ - | magic word | - +---------------+ - | header size | - +---------------+ - | data size | - +---------------+ - | encoding | - +---------------+ - | sample rate | - +---------------+ - | # of channels | - +---------------+ - | info | - | | - +---------------+ - -The magic word consists of the 4 characters '.snd'. Apart from the -info field, all header fields are 4 bytes in size. They are all -32-bit unsigned integers encoded in big-endian byte order. - -The header size really gives the start of the data. -The data size is the physical size of the data. From the other -parameters the number of frames can be calculated. -The encoding gives the way in which audio samples are encoded. -Possible values are listed below. -The info field currently consists of an ASCII string giving a -human-readable description of the audio file. The info field is -padded with NUL bytes to the header size. - -Usage. - -Reading audio files: - f = sunau.open(file, 'r') -where file is either the name of a file or an open file pointer. -The open file pointer must have methods read(), seek(), and close(). -When the setpos() and rewind() methods are not used, the seek() -method is not necessary. - -This returns an instance of a class with the following public methods: - getnchannels() -- returns number of audio channels (1 for - mono, 2 for stereo) - getsampwidth() -- returns sample width in bytes - getframerate() -- returns sampling frequency - getnframes() -- returns number of audio frames - getcomptype() -- returns compression type ('NONE' or 'ULAW') - getcompname() -- returns human-readable version of - compression type ('not compressed' matches 'NONE') - getparams() -- returns a namedtuple consisting of all of the - above in the above order - getmarkers() -- returns None (for compatibility with the - aifc module) - getmark(id) -- raises an error since the mark does not - exist (for compatibility with the aifc module) - readframes(n) -- returns at most n frames of audio - rewind() -- rewind to the beginning of the audio stream - setpos(pos) -- seek to the specified position - tell() -- return the current position - close() -- close the instance (make it unusable) -The position returned by tell() and the position given to setpos() -are compatible and have nothing to do with the actual position in the -file. -The close() method is called automatically when the class instance -is destroyed. - -Writing audio files: - f = sunau.open(file, 'w') -where file is either the name of a file or an open file pointer. -The open file pointer must have methods write(), tell(), seek(), and -close(). - -This returns an instance of a class with the following public methods: - setnchannels(n) -- set the number of channels - setsampwidth(n) -- set the sample width - setframerate(n) -- set the frame rate - setnframes(n) -- set the number of frames - setcomptype(type, name) - -- set the compression type and the - human-readable compression type - setparams(tuple)-- set all parameters at once - tell() -- return current position in output file - writeframesraw(data) - -- write audio frames without pathing up the - file header - writeframes(data) - -- write audio frames and patch up the file header - close() -- patch up the file header and close the - output file -You should set the parameters before the first writeframesraw or -writeframes. The total number of frames does not need to be set, -but when it is set to the correct value, the header does not have to -be patched up. -It is best to first set all parameters, perhaps possibly the -compression type, and then write audio frames using writeframesraw. -When all frames have been written, either call writeframes(b'') or -close() to patch up the sizes in the header. -The close() method is called automatically when the class instance -is destroyed. -""" - import sys from _typeshed import Unused from typing import IO, Any, Literal, NamedTuple, NoReturn, overload @@ -129,7 +23,6 @@ AUDIO_FILE_ENCODING_ALAW_8: int AUDIO_UNKNOWN_SIZE: int class _sunau_params(NamedTuple): - """_sunau_params(nchannels, sampwidth, framerate, nframes, comptype, compname)""" nchannels: int sampwidth: int framerate: int diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/symtable.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/symtable.pyi index 7a1700e..eff3c20 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/symtable.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/symtable.pyi @@ -4,9 +4,13 @@ import sys from _collections_abc import dict_keys from collections.abc import Sequence from typing import Any +from typing_extensions import deprecated __all__ = ["symtable", "SymbolTable", "Class", "Function", "Symbol"] +if sys.version_info >= (3, 13): + __all__ += ["SymbolTableType"] + def symtable(code: str, filename: str, compile_type: str) -> SymbolTable: """ Return the toplevel *SymbolTable* for the source code. @@ -16,16 +20,32 @@ def symtable(code: str, filename: str, compile_type: str) -> SymbolTable: """ ... +if sys.version_info >= (3, 13): + from enum import StrEnum + + class SymbolTableType(StrEnum): + MODULE = "module" + FUNCTION = "function" + CLASS = "class" + ANNOTATION = "annotation" + TYPE_ALIAS = "type alias" + TYPE_PARAMETERS = "type parameters" + TYPE_VARIABLE = "type variable" + class SymbolTable: def __init__(self, raw_table: Any, filename: str) -> None: ... - def get_type(self) -> str: - """ - Return the type of the symbol table. + if sys.version_info >= (3, 13): + def get_type(self) -> SymbolTableType: + """ + Return the type of the symbol table. + + The value returned is one of the values in + the ``SymbolTableType`` enumeration. + """ + ... + else: + def get_type(self) -> str: ... - The values returned are 'class', 'module', 'function', - 'annotation', 'TypeVar bound', 'type alias', and 'type parameter'. - """ - ... def get_id(self) -> int: """ Return an identifier for the table. @@ -127,12 +147,14 @@ class Function(SymbolTable): ... class Class(SymbolTable): - def get_methods(self) -> tuple[str, ...]: - """ - Return a tuple of methods declared in the class. + if sys.version_info < (3, 16): + @deprecated("deprecated in Python 3.14, will be removed in Python 3.16") + def get_methods(self) -> tuple[str, ...]: + """ + Return a tuple of methods declared in the class. - """ - ... + """ + ... class Symbol: def __init__( @@ -159,6 +181,9 @@ class Symbol: """ ... + if sys.version_info >= (3, 14): + def is_type_parameter(self) -> bool: ... + def is_global(self) -> bool: """ Return *True* if the symbol is global. @@ -189,6 +214,9 @@ class Symbol: not assigned to. """ ... + if sys.version_info >= (3, 14): + def is_free_class(self) -> bool: ... + def is_imported(self) -> bool: """ Return *True* if the symbol is created from @@ -198,6 +226,10 @@ class Symbol: def is_assigned(self) -> bool: """Return *True* if a symbol is assigned to.""" ... + if sys.version_info >= (3, 14): + def is_comp_iter(self) -> bool: ... + def is_comp_cell(self) -> bool: ... + def is_namespace(self) -> bool: """ Returns *True* if name binding introduces new namespace. diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/sys/__init__.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/sys/__init__.pyi index 7ec3477..cdf4fe8 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/sys/__init__.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/sys/__init__.pyi @@ -450,10 +450,10 @@ def getprofile() -> ProfileFunction | None: ... def setprofile(function: ProfileFunction | None, /) -> None: """ - setprofile(function) + Set the profiling function. - Set the profiling function. It will be called on each function call - and return. See the profiler chapter in the library manual. + It will be called on each function call and return. See the profiler + chapter in the library manual. """ ... def gettrace() -> TraceFunction | None: @@ -465,10 +465,10 @@ def gettrace() -> TraceFunction | None: ... def settrace(function: TraceFunction | None, /) -> None: """ - settrace(function) + Set the global debug tracing function. - Set the global debug tracing function. It will be called on each - function call. See the debugger chapter in the library manual. + It will be called on each function call. See the debugger chapter + in the library manual. """ ... @@ -512,11 +512,7 @@ def is_finalizing() -> bool: """Return True if Python is exiting.""" ... def breakpointhook(*args: Any, **kwargs: Any) -> Any: - """ - breakpointhook(*args, **kws) - - This hook function is called by built-in breakpoint(). - """ + """This hook function is called by built-in breakpoint().""" ... __breakpointhook__ = breakpointhook # Contains the original value of breakpointhook @@ -594,11 +590,7 @@ def addaudithook(hook: Callable[[str, tuple[Any, ...]], Any]) -> None: """Adds a new audit hook callback.""" ... def audit(event: str, /, *args: Any) -> None: - """ - audit(event, *args) - - Passes the event to any audit hooks that are attached. - """ + """Passes the event to any audit hooks that are attached.""" ... _AsyncgenHook: TypeAlias = Callable[[AsyncGenerator[Any, Any]], None] | None @@ -652,9 +644,13 @@ def get_int_max_str_digits() -> int: ... if sys.version_info >= (3, 12): - def getunicodeinternedsize() -> int: - """Return the number of elements of the unicode interned dictionary""" - ... + if sys.version_info >= (3, 13): + def getunicodeinternedsize(*, _only_immortal: bool = False) -> int: + """Return the number of elements of the unicode interned dictionary""" + ... + else: + def getunicodeinternedsize() -> int: ... + def deactivate_stack_trampoline() -> None: """ Deactivate the current stack profiler trampoline backend. diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/syslog.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/syslog.pyi index d539dd5..4db3c95 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/syslog.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/syslog.pyi @@ -1,55 +1,77 @@ import sys -from typing import Literal, overload +from typing import Final, overload if sys.platform != "win32": - LOG_ALERT: Literal[1] - LOG_AUTH: Literal[32] - LOG_AUTHPRIV: Literal[80] - LOG_CONS: Literal[2] - LOG_CRIT: Literal[2] - LOG_CRON: Literal[72] - LOG_DAEMON: Literal[24] - LOG_DEBUG: Literal[7] - LOG_EMERG: Literal[0] - LOG_ERR: Literal[3] - LOG_INFO: Literal[6] - LOG_KERN: Literal[0] - LOG_LOCAL0: Literal[128] - LOG_LOCAL1: Literal[136] - LOG_LOCAL2: Literal[144] - LOG_LOCAL3: Literal[152] - LOG_LOCAL4: Literal[160] - LOG_LOCAL5: Literal[168] - LOG_LOCAL6: Literal[176] - LOG_LOCAL7: Literal[184] - LOG_LPR: Literal[48] - LOG_MAIL: Literal[16] - LOG_NDELAY: Literal[8] - LOG_NEWS: Literal[56] - LOG_NOTICE: Literal[5] - LOG_NOWAIT: Literal[16] - LOG_ODELAY: Literal[4] - LOG_PERROR: Literal[32] - LOG_PID: Literal[1] - LOG_SYSLOG: Literal[40] - LOG_USER: Literal[8] - LOG_UUCP: Literal[64] - LOG_WARNING: Literal[4] + LOG_ALERT: Final = 1 + LOG_AUTH: Final = 32 + LOG_AUTHPRIV: Final = 80 + LOG_CONS: Final = 2 + LOG_CRIT: Final = 2 + LOG_CRON: Final = 72 + LOG_DAEMON: Final = 24 + LOG_DEBUG: Final = 7 + LOG_EMERG: Final = 0 + LOG_ERR: Final = 3 + LOG_INFO: Final = 6 + LOG_KERN: Final = 0 + LOG_LOCAL0: Final = 128 + LOG_LOCAL1: Final = 136 + LOG_LOCAL2: Final = 144 + LOG_LOCAL3: Final = 152 + LOG_LOCAL4: Final = 160 + LOG_LOCAL5: Final = 168 + LOG_LOCAL6: Final = 176 + LOG_LOCAL7: Final = 184 + LOG_LPR: Final = 48 + LOG_MAIL: Final = 16 + LOG_NDELAY: Final = 8 + LOG_NEWS: Final = 56 + LOG_NOTICE: Final = 5 + LOG_NOWAIT: Final = 16 + LOG_ODELAY: Final = 4 + LOG_PERROR: Final = 32 + LOG_PID: Final = 1 + LOG_SYSLOG: Final = 40 + LOG_USER: Final = 8 + LOG_UUCP: Final = 64 + LOG_WARNING: Final = 4 if sys.version_info >= (3, 13): - LOG_FTP: Literal[88] - LOG_INSTALL: Literal[112] - LOG_LAUNCHD: Literal[192] - LOG_NETINFO: Literal[96] - LOG_RAS: Literal[120] - LOG_REMOTEAUTH: Literal[104] + LOG_FTP: Final = 88 - def LOG_MASK(pri: int, /) -> int: ... - def LOG_UPTO(pri: int, /) -> int: ... - def closelog() -> None: ... - def openlog(ident: str = ..., logoption: int = ..., facility: int = ...) -> None: ... - def setlogmask(maskpri: int, /) -> int: ... + if sys.platform == "darwin": + LOG_INSTALL: Final = 112 + LOG_LAUNCHD: Final = 192 + LOG_NETINFO: Final = 96 + LOG_RAS: Final = 120 + LOG_REMOTEAUTH: Final = 104 + + def LOG_MASK(pri: int, /) -> int: + """Calculates the mask for the individual priority pri.""" + ... + def LOG_UPTO(pri: int, /) -> int: + """Calculates the mask for all priorities up to and including pri.""" + ... + def closelog() -> None: + """Reset the syslog module values and call the system library closelog().""" + ... + def openlog(ident: str = ..., logoption: int = ..., facility: int = ...) -> None: + """Set logging options of subsequent syslog() calls.""" + ... + def setlogmask(maskpri: int, /) -> int: + """Set the priority mask to maskpri and return the previous mask value.""" + ... @overload - def syslog(priority: int, message: str) -> None: ... + def syslog(priority: int, message: str) -> None: + """ + syslog([priority=LOG_INFO,] message) + Send the string message to the system logger. + """ + ... @overload - def syslog(message: str) -> None: ... + def syslog(message: str) -> None: + """ + syslog([priority=LOG_INFO,] message) + Send the string message to the system logger. + """ + ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tarfile.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tarfile.pyi index f0bf367..c0b5571 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tarfile.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tarfile.pyi @@ -105,10 +105,11 @@ PAX_NAME_FIELDS: set[str] ENCODING: str +@overload def open( name: StrOrBytesPath | None = None, - mode: str = "r", - fileobj: IO[bytes] | None = None, # depends on mode + mode: Literal["r", "r:*", "r:", "r:gz", "r:bz2", "r:xz"] = "r", + fileobj: IO[bytes] | None = None, bufsize: int = 10240, *, format: int | None = ..., @@ -120,7 +121,327 @@ def open( pax_headers: Mapping[str, str] | None = ..., debug: int | None = ..., errorlevel: int | None = ..., - compresslevel: int | None = ..., +) -> TarFile: + """ + Open a tar archive for reading, writing or appending. Return + an appropriate TarFile class. + + mode: + 'r' or 'r:*' open for reading with transparent compression + 'r:' open for reading exclusively uncompressed + 'r:gz' open for reading with gzip compression + 'r:bz2' open for reading with bzip2 compression + 'r:xz' open for reading with lzma compression + 'a' or 'a:' open for appending, creating the file if necessary + 'w' or 'w:' open for writing without compression + 'w:gz' open for writing with gzip compression + 'w:bz2' open for writing with bzip2 compression + 'w:xz' open for writing with lzma compression + + 'x' or 'x:' create a tarfile exclusively without compression, raise + an exception if the file is already created + 'x:gz' create a gzip compressed tarfile, raise an exception + if the file is already created + 'x:bz2' create a bzip2 compressed tarfile, raise an exception + if the file is already created + 'x:xz' create an lzma compressed tarfile, raise an exception + if the file is already created + + 'r|*' open a stream of tar blocks with transparent compression + 'r|' open an uncompressed stream of tar blocks for reading + 'r|gz' open a gzip compressed stream of tar blocks + 'r|bz2' open a bzip2 compressed stream of tar blocks + 'r|xz' open an lzma compressed stream of tar blocks + 'w|' open an uncompressed stream for writing + 'w|gz' open a gzip compressed stream for writing + 'w|bz2' open a bzip2 compressed stream for writing + 'w|xz' open an lzma compressed stream for writing + """ + ... +@overload +def open( + name: StrOrBytesPath | None, + mode: Literal["x", "x:", "a", "a:", "w", "w:"], + fileobj: _Fileobj | None = None, + bufsize: int = 10240, + *, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + errors: str = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., +) -> TarFile: + """ + Open a tar archive for reading, writing or appending. Return + an appropriate TarFile class. + + mode: + 'r' or 'r:*' open for reading with transparent compression + 'r:' open for reading exclusively uncompressed + 'r:gz' open for reading with gzip compression + 'r:bz2' open for reading with bzip2 compression + 'r:xz' open for reading with lzma compression + 'a' or 'a:' open for appending, creating the file if necessary + 'w' or 'w:' open for writing without compression + 'w:gz' open for writing with gzip compression + 'w:bz2' open for writing with bzip2 compression + 'w:xz' open for writing with lzma compression + + 'x' or 'x:' create a tarfile exclusively without compression, raise + an exception if the file is already created + 'x:gz' create a gzip compressed tarfile, raise an exception + if the file is already created + 'x:bz2' create a bzip2 compressed tarfile, raise an exception + if the file is already created + 'x:xz' create an lzma compressed tarfile, raise an exception + if the file is already created + + 'r|*' open a stream of tar blocks with transparent compression + 'r|' open an uncompressed stream of tar blocks for reading + 'r|gz' open a gzip compressed stream of tar blocks + 'r|bz2' open a bzip2 compressed stream of tar blocks + 'r|xz' open an lzma compressed stream of tar blocks + 'w|' open an uncompressed stream for writing + 'w|gz' open a gzip compressed stream for writing + 'w|bz2' open a bzip2 compressed stream for writing + 'w|xz' open an lzma compressed stream for writing + """ + ... +@overload +def open( + name: StrOrBytesPath | None = None, + *, + mode: Literal["x", "x:", "a", "a:", "w", "w:"], + fileobj: _Fileobj | None = None, + bufsize: int = 10240, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + errors: str = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., +) -> TarFile: + """ + Open a tar archive for reading, writing or appending. Return + an appropriate TarFile class. + + mode: + 'r' or 'r:*' open for reading with transparent compression + 'r:' open for reading exclusively uncompressed + 'r:gz' open for reading with gzip compression + 'r:bz2' open for reading with bzip2 compression + 'r:xz' open for reading with lzma compression + 'a' or 'a:' open for appending, creating the file if necessary + 'w' or 'w:' open for writing without compression + 'w:gz' open for writing with gzip compression + 'w:bz2' open for writing with bzip2 compression + 'w:xz' open for writing with lzma compression + + 'x' or 'x:' create a tarfile exclusively without compression, raise + an exception if the file is already created + 'x:gz' create a gzip compressed tarfile, raise an exception + if the file is already created + 'x:bz2' create a bzip2 compressed tarfile, raise an exception + if the file is already created + 'x:xz' create an lzma compressed tarfile, raise an exception + if the file is already created + + 'r|*' open a stream of tar blocks with transparent compression + 'r|' open an uncompressed stream of tar blocks for reading + 'r|gz' open a gzip compressed stream of tar blocks + 'r|bz2' open a bzip2 compressed stream of tar blocks + 'r|xz' open an lzma compressed stream of tar blocks + 'w|' open an uncompressed stream for writing + 'w|gz' open a gzip compressed stream for writing + 'w|bz2' open a bzip2 compressed stream for writing + 'w|xz' open an lzma compressed stream for writing + """ + ... +@overload +def open( + name: StrOrBytesPath | None, + mode: Literal["x:gz", "x:bz2", "w:gz", "w:bz2"], + fileobj: _Fileobj | None = None, + bufsize: int = 10240, + *, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + errors: str = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + compresslevel: int = 9, +) -> TarFile: + """ + Open a tar archive for reading, writing or appending. Return + an appropriate TarFile class. + + mode: + 'r' or 'r:*' open for reading with transparent compression + 'r:' open for reading exclusively uncompressed + 'r:gz' open for reading with gzip compression + 'r:bz2' open for reading with bzip2 compression + 'r:xz' open for reading with lzma compression + 'a' or 'a:' open for appending, creating the file if necessary + 'w' or 'w:' open for writing without compression + 'w:gz' open for writing with gzip compression + 'w:bz2' open for writing with bzip2 compression + 'w:xz' open for writing with lzma compression + + 'x' or 'x:' create a tarfile exclusively without compression, raise + an exception if the file is already created + 'x:gz' create a gzip compressed tarfile, raise an exception + if the file is already created + 'x:bz2' create a bzip2 compressed tarfile, raise an exception + if the file is already created + 'x:xz' create an lzma compressed tarfile, raise an exception + if the file is already created + + 'r|*' open a stream of tar blocks with transparent compression + 'r|' open an uncompressed stream of tar blocks for reading + 'r|gz' open a gzip compressed stream of tar blocks + 'r|bz2' open a bzip2 compressed stream of tar blocks + 'r|xz' open an lzma compressed stream of tar blocks + 'w|' open an uncompressed stream for writing + 'w|gz' open a gzip compressed stream for writing + 'w|bz2' open a bzip2 compressed stream for writing + 'w|xz' open an lzma compressed stream for writing + """ + ... +@overload +def open( + name: StrOrBytesPath | None = None, + *, + mode: Literal["x:gz", "x:bz2", "w:gz", "w:bz2"], + fileobj: _Fileobj | None = None, + bufsize: int = 10240, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + errors: str = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + compresslevel: int = 9, +) -> TarFile: + """ + Open a tar archive for reading, writing or appending. Return + an appropriate TarFile class. + + mode: + 'r' or 'r:*' open for reading with transparent compression + 'r:' open for reading exclusively uncompressed + 'r:gz' open for reading with gzip compression + 'r:bz2' open for reading with bzip2 compression + 'r:xz' open for reading with lzma compression + 'a' or 'a:' open for appending, creating the file if necessary + 'w' or 'w:' open for writing without compression + 'w:gz' open for writing with gzip compression + 'w:bz2' open for writing with bzip2 compression + 'w:xz' open for writing with lzma compression + + 'x' or 'x:' create a tarfile exclusively without compression, raise + an exception if the file is already created + 'x:gz' create a gzip compressed tarfile, raise an exception + if the file is already created + 'x:bz2' create a bzip2 compressed tarfile, raise an exception + if the file is already created + 'x:xz' create an lzma compressed tarfile, raise an exception + if the file is already created + + 'r|*' open a stream of tar blocks with transparent compression + 'r|' open an uncompressed stream of tar blocks for reading + 'r|gz' open a gzip compressed stream of tar blocks + 'r|bz2' open a bzip2 compressed stream of tar blocks + 'r|xz' open an lzma compressed stream of tar blocks + 'w|' open an uncompressed stream for writing + 'w|gz' open a gzip compressed stream for writing + 'w|bz2' open a bzip2 compressed stream for writing + 'w|xz' open an lzma compressed stream for writing + """ + ... +@overload +def open( + name: StrOrBytesPath | None, + mode: Literal["x:xz", "w:xz"], + fileobj: _Fileobj | None = None, + bufsize: int = 10240, + *, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + errors: str = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + preset: Literal[0, 1, 2, 3, 4, 5, 6, 7, 8, 9] | None = ..., +) -> TarFile: + """ + Open a tar archive for reading, writing or appending. Return + an appropriate TarFile class. + + mode: + 'r' or 'r:*' open for reading with transparent compression + 'r:' open for reading exclusively uncompressed + 'r:gz' open for reading with gzip compression + 'r:bz2' open for reading with bzip2 compression + 'r:xz' open for reading with lzma compression + 'a' or 'a:' open for appending, creating the file if necessary + 'w' or 'w:' open for writing without compression + 'w:gz' open for writing with gzip compression + 'w:bz2' open for writing with bzip2 compression + 'w:xz' open for writing with lzma compression + + 'x' or 'x:' create a tarfile exclusively without compression, raise + an exception if the file is already created + 'x:gz' create a gzip compressed tarfile, raise an exception + if the file is already created + 'x:bz2' create a bzip2 compressed tarfile, raise an exception + if the file is already created + 'x:xz' create an lzma compressed tarfile, raise an exception + if the file is already created + + 'r|*' open a stream of tar blocks with transparent compression + 'r|' open an uncompressed stream of tar blocks for reading + 'r|gz' open a gzip compressed stream of tar blocks + 'r|bz2' open a bzip2 compressed stream of tar blocks + 'r|xz' open an lzma compressed stream of tar blocks + 'w|' open an uncompressed stream for writing + 'w|gz' open a gzip compressed stream for writing + 'w|bz2' open a bzip2 compressed stream for writing + 'w|xz' open an lzma compressed stream for writing + """ + ... +@overload +def open( + name: StrOrBytesPath | None = None, + *, + mode: Literal["x:xz", "w:xz"], + fileobj: _Fileobj | None = None, + bufsize: int = 10240, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + errors: str = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., preset: Literal[0, 1, 2, 3, 4, 5, 6, 7, 8, 9] | None = ..., ) -> TarFile: """ @@ -160,6 +481,64 @@ def open( """ ... +# TODO: Temporary fallback for modes containing pipe characters. These don't +# work with mypy 1.10, but this should be fixed with mypy 1.11. +# https://github.com/python/typeshed/issues/12182 +@overload +def open( + name: StrOrBytesPath | None = None, + *, + mode: str, + fileobj: IO[bytes] | None = None, + bufsize: int = 10240, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + errors: str = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + preset: int | None = ..., +) -> TarFile: + """ + Open a tar archive for reading, writing or appending. Return + an appropriate TarFile class. + + mode: + 'r' or 'r:*' open for reading with transparent compression + 'r:' open for reading exclusively uncompressed + 'r:gz' open for reading with gzip compression + 'r:bz2' open for reading with bzip2 compression + 'r:xz' open for reading with lzma compression + 'a' or 'a:' open for appending, creating the file if necessary + 'w' or 'w:' open for writing without compression + 'w:gz' open for writing with gzip compression + 'w:bz2' open for writing with bzip2 compression + 'w:xz' open for writing with lzma compression + + 'x' or 'x:' create a tarfile exclusively without compression, raise + an exception if the file is already created + 'x:gz' create a gzip compressed tarfile, raise an exception + if the file is already created + 'x:bz2' create a bzip2 compressed tarfile, raise an exception + if the file is already created + 'x:xz' create an lzma compressed tarfile, raise an exception + if the file is already created + + 'r|*' open a stream of tar blocks with transparent compression + 'r|' open an uncompressed stream of tar blocks for reading + 'r|gz' open a gzip compressed stream of tar blocks + 'r|bz2' open a bzip2 compressed stream of tar blocks + 'r|xz' open an lzma compressed stream of tar blocks + 'w|' open an uncompressed stream for writing + 'w|gz' open a gzip compressed stream for writing + 'w|bz2' open a bzip2 compressed stream for writing + 'w|xz' open an lzma compressed stream for writing + """ + ... + class ExFileObject(io.BufferedReader): def __init__(self, tarfile: TarFile, tarinfo: TarInfo) -> None: ... @@ -202,7 +581,17 @@ class TarFile: errorlevel: int | None = None, copybufsize: int | None = None, # undocumented stream: bool = False, - ) -> None: ... + ) -> None: + """ + Open an (uncompressed) tar archive `name'. `mode' is either 'r' to + read from an existing archive, 'a' to append data to an existing + file or 'w' to create a new file overwriting an existing one. `mode' + defaults to 'r'. + If `fileobj' is given, it is used for reading or writing data. If it + can be determined, `mode' is overridden by `fileobj's mode. + `fileobj' is not closed, when TarFile is closed. + """ + ... else: def __init__( self, @@ -219,17 +608,7 @@ class TarFile: debug: int | None = None, errorlevel: int | None = None, copybufsize: int | None = None, # undocumented - ) -> None: - """ - Open an (uncompressed) tar archive `name'. `mode' is either 'r' to - read from an existing archive, 'a' to append data to an existing - file or 'w' to create a new file overwriting an existing one. `mode' - defaults to 'r'. - If `fileobj' is given, it is used for reading or writing data. If it - can be determined, `mode' is overridden by `fileobj's mode. - `fileobj' is not closed, when TarFile is closed. - """ - ... + ) -> None: ... def __enter__(self) -> Self: ... def __exit__( @@ -609,10 +988,10 @@ class TarFile: ... def addfile(self, tarinfo: TarInfo, fileobj: IO[bytes] | None = None) -> None: """ - Add the TarInfo object `tarinfo' to the archive. If `fileobj' is - given, it should be a binary file, and tarinfo.size bytes are read - from it and added to the archive. You can create TarInfo objects - directly, or by using gettarinfo(). + Add the TarInfo object `tarinfo' to the archive. If `tarinfo' represents + a non zero-size regular file, the `fileobj' argument should be a binary file, + and tarinfo.size bytes are read from it and added to the archive. + You can create TarInfo objects directly, or by using gettarinfo(). """ ... def gettarinfo( @@ -702,7 +1081,7 @@ class TarInfo: name: str path: str size: int - mtime: int + mtime: int | float chksum: int devmajor: int devminor: int diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/telnetlib.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/telnetlib.pyi index 8fd2937..294a1cb 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/telnetlib.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/telnetlib.pyi @@ -1,36 +1,3 @@ -r""" -TELNET client class. - -Based on RFC 854: TELNET Protocol Specification, by J. Postel and -J. Reynolds - -Example: - ->>> from telnetlib import Telnet ->>> tn = Telnet('www.python.org', 79) # connect to finger port ->>> tn.write(b'guido\r\n') ->>> print(tn.read_all()) -Login Name TTY Idle When Where -guido Guido van Rossum pts/2 snag.cnri.reston.. - ->>> - -Note that read_all() won't read until eof -- it just reads some data --- but it guarantees to read at least one byte unless EOF is hit. - -It is possible to pass a Telnet object to a selector in order to wait until -more data is available. Note that in this case, read_eager() may return b'' -even if there was data on the socket, because the protocol negotiation may have -eaten the data. This is why EOFError is needed in some cases to distinguish -between "no data" and "connection closed" (since the socket also appears ready -for reading when it is closed). - -To do: -- option negotiation -- timeout should be intrinsic to the connection object instead of an - option on one of the read calls only -""" - import socket from collections.abc import Callable, Sequence from re import Match, Pattern @@ -120,242 +87,37 @@ EXOPL: bytes NOOPT: bytes class Telnet: - """ - Telnet interface class. - - An instance of this class represents a connection to a telnet - server. The instance is initially not connected; the open() - method must be used to establish a connection. Alternatively, the - host name and optional port number can be passed to the - constructor, too. - - Don't try to reopen an already connected instance. - - This class has many read_*() methods. Note that some of them - raise EOFError when the end of the connection is read, because - they can return an empty string for other reasons. See the - individual doc strings. - - read_until(expected, [timeout]) - Read until the expected string has been seen, or a timeout is - hit (default is no timeout); may block. - - read_all() - Read all data until EOF; may block. - - read_some() - Read at least one byte or EOF; may block. - - read_very_eager() - Read all data available already queued or on the socket, - without blocking. - - read_eager() - Read either data already queued or some data available on the - socket, without blocking. - - read_lazy() - Read all data in the raw queue (processing it first), without - doing any socket I/O. - - read_very_lazy() - Reads all data in the cooked queue, without doing any socket - I/O. - - read_sb_data() - Reads available data between SB ... SE sequence. Don't block. - - set_option_negotiation_callback(callback) - Each time a telnet option is read on the input flow, this callback - (if set) is called with the following parameters : - callback(telnet socket, command, option) - option will be chr(0) when there is no option. - No other action is done afterwards by telnetlib. - """ host: str | None # undocumented - def __init__(self, host: str | None = None, port: int = 0, timeout: float = ...) -> None: - """ - Constructor. - - When called without arguments, create an unconnected instance. - With a hostname argument, it connects the instance; port number - and timeout are optional. - """ - ... - def open(self, host: str, port: int = 0, timeout: float = ...) -> None: - """ - Connect to a host. - - The optional second argument is the port number, which - defaults to the standard telnet port (23). - - Don't try to reopen an already connected instance. - """ - ... - def msg(self, msg: str, *args: Any) -> None: - """ - Print a debug message, when the debug level is > 0. - - If extra arguments are present, they are substituted in the - message using the standard string formatting operator. - """ - ... - def set_debuglevel(self, debuglevel: int) -> None: - """ - Set the debug level. - - The higher it is, the more debug output you get (on sys.stdout). - """ - ... - def close(self) -> None: - """Close the connection.""" - ... - def get_socket(self) -> socket.socket: - """Return the socket object used internally.""" - ... - def fileno(self) -> int: - """Return the fileno() of the socket object used internally.""" - ... - def write(self, buffer: bytes) -> None: - """ - Write a string to the socket, doubling any IAC characters. - - Can block if the connection is blocked. May raise - OSError if the connection is closed. - """ - ... - def read_until(self, match: bytes, timeout: float | None = None) -> bytes: - """ - Read until a given string is encountered or until timeout. - - When no match is found, return whatever is available instead, - possibly the empty string. Raise EOFError if the connection - is closed and no cooked data is available. - """ - ... - def read_all(self) -> bytes: - """Read all data until EOF; block until connection closed.""" - ... - def read_some(self) -> bytes: - """ - Read at least one byte of cooked data unless EOF is hit. - - Return b'' if EOF is hit. Block if no data is immediately - available. - """ - ... - def read_very_eager(self) -> bytes: - """ - Read everything that's possible without blocking in I/O (eager). - - Raise EOFError if connection closed and no cooked data - available. Return b'' if no cooked data available otherwise. - Don't block unless in the midst of an IAC sequence. - """ - ... - def read_eager(self) -> bytes: - """ - Read readily available data. - - Raise EOFError if connection closed and no cooked data - available. Return b'' if no cooked data available otherwise. - Don't block unless in the midst of an IAC sequence. - """ - ... - def read_lazy(self) -> bytes: - """ - Process and return data that's already in the queues (lazy). - - Raise EOFError if connection closed and no data available. - Return b'' if no cooked data available otherwise. Don't block - unless in the midst of an IAC sequence. - """ - ... - def read_very_lazy(self) -> bytes: - """ - Return any data available in the cooked queue (very lazy). - - Raise EOFError if connection closed and no data available. - Return b'' if no cooked data available otherwise. Don't block. - """ - ... - def read_sb_data(self) -> bytes: - """ - Return any data available in the SB ... SE queue. - - Return b'' if no SB ... SE available. Should only be called - after seeing a SB or SE command. When a new SB command is - found, old unread SB data will be discarded. Don't block. - """ - ... - def set_option_negotiation_callback(self, callback: Callable[[socket.socket, bytes, bytes], object] | None) -> None: - """Provide a callback function called after each receipt of a telnet option.""" - ... - def process_rawq(self) -> None: - """ - Transfer from raw queue to cooked queue. - - Set self.eof when connection is closed. Don't block unless in - the midst of an IAC sequence. - """ - ... - def rawq_getchar(self) -> bytes: - """ - Get next char from raw queue. - - Block if no data is immediately available. Raise EOFError - when connection is closed. - """ - ... - def fill_rawq(self) -> None: - """ - Fill raw queue from exactly one recv() system call. - - Block if no data is immediately available. Set self.eof when - connection is closed. - """ - ... - def sock_avail(self) -> bool: - """Test whether data is available on the socket.""" - ... - def interact(self) -> None: - """Interaction function, emulates a very dumb telnet client.""" - ... - def mt_interact(self) -> None: - """Multithreaded version of interact().""" - ... - def listener(self) -> None: - """Helper for mt_interact() -- this executes in the other thread.""" - ... + sock: socket.socket | None # undocumented + def __init__(self, host: str | None = None, port: int = 0, timeout: float = ...) -> None: ... + def open(self, host: str, port: int = 0, timeout: float = ...) -> None: ... + def msg(self, msg: str, *args: Any) -> None: ... + def set_debuglevel(self, debuglevel: int) -> None: ... + def close(self) -> None: ... + def get_socket(self) -> socket.socket: ... + def fileno(self) -> int: ... + def write(self, buffer: bytes) -> None: ... + def read_until(self, match: bytes, timeout: float | None = None) -> bytes: ... + def read_all(self) -> bytes: ... + def read_some(self) -> bytes: ... + def read_very_eager(self) -> bytes: ... + def read_eager(self) -> bytes: ... + def read_lazy(self) -> bytes: ... + def read_very_lazy(self) -> bytes: ... + def read_sb_data(self) -> bytes: ... + def set_option_negotiation_callback(self, callback: Callable[[socket.socket, bytes, bytes], object] | None) -> None: ... + def process_rawq(self) -> None: ... + def rawq_getchar(self) -> bytes: ... + def fill_rawq(self) -> None: ... + def sock_avail(self) -> bool: ... + def interact(self) -> None: ... + def mt_interact(self) -> None: ... + def listener(self) -> None: ... def expect( self, list: Sequence[Pattern[bytes] | bytes], timeout: float | None = None - ) -> tuple[int, Match[bytes] | None, bytes]: - """ - Read until one from a list of a regular expressions matches. - - The first argument is a list of regular expressions, either - compiled (re.Pattern instances) or uncompiled (strings). - The optional second argument is a timeout, in seconds; default - is no timeout. - - Return a tuple of three items: the index in the list of the - first regular expression that matches; the re.Match object - returned; and the text read up till and including the match. - - If EOF is read and no text was read, raise EOFError. - Otherwise, when nothing matches, return (-1, None, text) where - text is the text received so far (may be the empty string if a - timeout happened). - - If a regular expression ends with a greedy match (e.g. '.*') - or if more than one expression can match the same input, the - results are undeterministic, and may depend on the I/O timing. - """ - ... + ) -> tuple[int, Match[bytes] | None, bytes]: ... def __enter__(self) -> Self: ... def __exit__( self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> None: ... - def __del__(self) -> None: - """Destructor -- close the connection.""" - ... + def __del__(self) -> None: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tempfile.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tempfile.pyi index 64b5487..7db5970 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tempfile.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tempfile.pyi @@ -196,24 +196,7 @@ else: delete: bool = True, *, errors: str | None = None, - ) -> _TemporaryFileWrapper[str]: - """ - Create and return a temporary file. - Arguments: - 'prefix', 'suffix', 'dir' -- as for mkstemp. - 'mode' -- the mode argument to io.open (default "w+b"). - 'buffering' -- the buffer size argument to io.open (default -1). - 'encoding' -- the encoding argument to io.open (default None) - 'newline' -- the newline argument to io.open (default None) - 'delete' -- whether the file is deleted on close (default True). - 'errors' -- the errors argument to io.open (default None) - The file is created as mkstemp() would do it. - - Returns an object with a file-like interface; the name of the file - is accessible as its 'name' attribute. The file will be automatically - deleted when it is closed unless the 'delete' argument is set to False. - """ - ... + ) -> _TemporaryFileWrapper[str]: ... @overload def NamedTemporaryFile( mode: OpenBinaryMode = "w+b", @@ -226,24 +209,7 @@ else: delete: bool = True, *, errors: str | None = None, - ) -> _TemporaryFileWrapper[bytes]: - """ - Create and return a temporary file. - Arguments: - 'prefix', 'suffix', 'dir' -- as for mkstemp. - 'mode' -- the mode argument to io.open (default "w+b"). - 'buffering' -- the buffer size argument to io.open (default -1). - 'encoding' -- the encoding argument to io.open (default None) - 'newline' -- the newline argument to io.open (default None) - 'delete' -- whether the file is deleted on close (default True). - 'errors' -- the errors argument to io.open (default None) - The file is created as mkstemp() would do it. - - Returns an object with a file-like interface; the name of the file - is accessible as its 'name' attribute. The file will be automatically - deleted when it is closed unless the 'delete' argument is set to False. - """ - ... + ) -> _TemporaryFileWrapper[bytes]: ... @overload def NamedTemporaryFile( mode: str = "w+b", @@ -256,24 +222,7 @@ else: delete: bool = True, *, errors: str | None = None, - ) -> _TemporaryFileWrapper[Any]: - """ - Create and return a temporary file. - Arguments: - 'prefix', 'suffix', 'dir' -- as for mkstemp. - 'mode' -- the mode argument to io.open (default "w+b"). - 'buffering' -- the buffer size argument to io.open (default -1). - 'encoding' -- the encoding argument to io.open (default None) - 'newline' -- the newline argument to io.open (default None) - 'delete' -- whether the file is deleted on close (default True). - 'errors' -- the errors argument to io.open (default None) - The file is created as mkstemp() would do it. - - Returns an object with a file-like interface; the name of the file - is accessible as its 'name' attribute. The file will be automatically - deleted when it is closed unless the 'delete' argument is set to False. - """ - ... + ) -> _TemporaryFileWrapper[Any]: ... if sys.platform == "win32": TemporaryFile = NamedTemporaryFile @@ -579,17 +528,19 @@ class _TemporaryFileWrapper(IO[AnyStr]): def truncate(self, size: int | None = ...) -> int: ... def writable(self) -> bool: ... @overload - def write(self: _TemporaryFileWrapper[str], s: str) -> int: ... + def write(self: _TemporaryFileWrapper[str], s: str, /) -> int: ... @overload - def write(self: _TemporaryFileWrapper[bytes], s: ReadableBuffer) -> int: ... + def write(self: _TemporaryFileWrapper[bytes], s: ReadableBuffer, /) -> int: ... @overload - def write(self, s: AnyStr) -> int: ... + def write(self, s: AnyStr, /) -> int: ... @overload def writelines(self: _TemporaryFileWrapper[str], lines: Iterable[str]) -> None: ... @overload def writelines(self: _TemporaryFileWrapper[bytes], lines: Iterable[ReadableBuffer]) -> None: ... @overload def writelines(self, lines: Iterable[AnyStr]) -> None: ... + @property + def closed(self) -> bool: ... if sys.version_info >= (3, 11): _SpooledTemporaryFileBase = io.IOBase @@ -825,7 +776,7 @@ class TemporaryDirectory(Generic[AnyStr]): # The overloads overlap, but they should still work fine. @overload -def mkstemp( # type: ignore[overload-overlap] +def mkstemp( suffix: str | None = None, prefix: str | None = None, dir: StrPath | None = None, text: bool = False ) -> tuple[int, str]: """ diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/termios.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/termios.pyi index d14e158..fa3a624 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/termios.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/termios.pyi @@ -319,7 +319,20 @@ if sys.platform != "win32": """ ... if sys.version_info >= (3, 11): - def tcgetwinsize(fd: FileDescriptorLike, /) -> tuple[int, int]: ... - def tcsetwinsize(fd: FileDescriptorLike, winsize: tuple[int, int], /) -> None: ... + def tcgetwinsize(fd: FileDescriptorLike, /) -> tuple[int, int]: + """ + Get the tty winsize for file descriptor fd. + + Returns a tuple (ws_row, ws_col). + """ + ... + def tcsetwinsize(fd: FileDescriptorLike, winsize: tuple[int, int], /) -> None: + """ + Set the tty winsize for file descriptor fd. + + The winsize to be set is taken from the winsize argument, which + is a two-item tuple (ws_row, ws_col) like the one returned by tcgetwinsize(). + """ + ... class error(Exception): ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/threading.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/threading.pyi index 8659e29..1cef437 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/threading.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/threading.pyi @@ -77,8 +77,6 @@ def currentThread() -> Thread: ... def get_ident() -> int: """ - get_ident() -> integer - Return a non-zero integer that uniquely identifies the current thread amongst other threads that exist simultaneously. This may be used to identify per-thread resources. @@ -150,10 +148,8 @@ if sys.version_info >= (3, 10): """Get the profiler function as set by threading.setprofile().""" ... -def stack_size(size: int = ...) -> int: +def stack_size(size: int = 0, /) -> int: """ - stack_size([size]) -> size - Return the thread stack size used when creating new threads. The optional size argument specifies the stack size (in bytes) to be used for subsequently created threads, and must be 0 (use platform or @@ -260,7 +256,7 @@ class Thread: or until the optional timeout occurs. When the timeout argument is present and not None, it should be a - floating point number specifying a timeout for the operation in seconds + floating-point number specifying a timeout for the operation in seconds (or fractions thereof). As join() always returns None, you must call is_alive() after join() to decide whether a timeout happened -- if the thread is still alive, the join() call timed out. @@ -332,22 +328,54 @@ class _DummyThread(Thread): @final class Lock: """ - allocate_lock() -> lock object - (allocate() is an obsolete synonym) + A lock object is a synchronization primitive. To create a lock, + call threading.Lock(). Methods are: + + acquire() -- lock the lock, possibly blocking until it can be obtained + release() -- unlock of the lock + locked() -- test whether the lock is currently locked - Create a new lock object. See help(type(threading.Lock())) for - information about locks. + A lock is not owned by the thread that locked it; another thread may + unlock it. A thread attempting to lock a lock that it has already locked + will block until another thread unlocks it. Deadlocks may ensue. """ - def __enter__(self) -> bool: ... + def __enter__(self) -> bool: + """Lock the lock.""" + ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None - ) -> None: ... - def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ... - def release(self) -> None: ... - def locked(self) -> bool: ... - def acquire_lock(self, blocking: bool = ..., timeout: float = ...) -> bool: ... # undocumented - def release_lock(self) -> None: ... # undocumented - def locked_lock(self) -> bool: ... # undocumented + ) -> None: + """Release the lock.""" + ... + def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: + """ + Lock the lock. Without argument, this blocks if the lock is already + locked (even by the same thread), waiting for another thread to release + the lock, and return True once the lock is acquired. + With an argument, this will only block if the argument is true, + and the return value reflects whether the lock is acquired. + The blocking operation is interruptible. + """ + ... + def release(self) -> None: + """ + Release the lock, allowing another thread that is blocked waiting for + the lock to acquire the lock. The lock must be in the locked state, + but it needn't be locked by the same thread that unlocks it. + """ + ... + def locked(self) -> bool: + """Return whether the lock is in the locked state.""" + ... + def acquire_lock(self, blocking: bool = ..., timeout: float = ...) -> bool: + """An obsolete synonym of acquire().""" + ... + def release_lock(self) -> None: + """An obsolete synonym of release().""" + ... + def locked_lock(self) -> bool: + """An obsolete synonym of locked().""" + ... @final class _RLock: @@ -439,7 +467,7 @@ class Condition: awakened or timed out, it re-acquires the lock and returns. When the timeout argument is present and not None, it should be a - floating point number specifying a timeout for the operation in seconds + floating-point number specifying a timeout for the operation in seconds (or fractions thereof). When the underlying lock is an RLock, it is not released using its @@ -621,7 +649,7 @@ class Event: the optional timeout occurs. When the timeout argument is present and not None, it should be a - floating point number specifying a timeout for the operation in seconds + floating-point number specifying a timeout for the operation in seconds (or fractions thereof). This method returns the internal flag on exit, so it will always return diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/time.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/time.pyi index 9c0c1e6..395c97a 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/time.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/time.pyi @@ -3,7 +3,7 @@ This module provides various functions to manipulate time values. There are two standard representations of time. One is the number of seconds since the Epoch, in UTC (a.k.a. GMT). It may be an integer -or a floating point number (to represent fractions of seconds). +or a floating-point number (to represent fractions of seconds). The epoch is the point where the time starts, the return value of time.gmtime(0). It is January 1, 1970, 00:00:00 (UTC) on all platforms. @@ -165,7 +165,7 @@ def localtime(seconds: float | None = None, /) -> struct_time: ... def mktime(time_tuple: _TimeTuple | struct_time, /) -> float: """ - mktime(tuple) -> floating point number + mktime(tuple) -> floating-point number Convert a time tuple in local time to seconds since the Epoch. Note that mktime(gmtime(0)) will not generally return zero for most @@ -178,7 +178,7 @@ def sleep(seconds: float, /) -> None: sleep(seconds) Delay execution for a given number of seconds. The argument may be - a floating point number for subsecond precision. + a floating-point number for subsecond precision. """ ... def strftime(format: str, time_tuple: _TimeTuple | struct_time = ..., /) -> str: @@ -241,7 +241,7 @@ def strptime(data_string: str, format: str = "%a %b %d %H:%M:%S %Y", /) -> struc ... def time() -> float: """ - time() -> floating point number + time() -> floating-point number Return the current time in seconds since the Epoch. Fractions of a second may be present if the system clock provides them. @@ -303,17 +303,13 @@ def process_time() -> float: if sys.platform != "win32": def clock_getres(clk_id: int, /) -> float: """ - clock_getres(clk_id) -> floating point number + clock_getres(clk_id) -> floating-point number Return the resolution (precision) of the specified clock clk_id. """ ... def clock_gettime(clk_id: int, /) -> float: - """ - clock_gettime(clk_id) -> float - - Return the time of the specified clock clk_id. - """ + """Return the time of the specified clock clk_id as a float.""" ... def clock_settime(clk_id: int, time: float, /) -> None: """ @@ -325,11 +321,7 @@ if sys.platform != "win32": if sys.platform != "win32": def clock_gettime_ns(clk_id: int, /) -> int: - """ - clock_gettime_ns(clk_id) -> int - - Return the time of the specified clock clk_id as nanoseconds. - """ + """Return the time of the specified clock clk_id as nanoseconds (int).""" ... def clock_settime_ns(clock_id: int, time: int, /) -> int: """ diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tkinter/__init__.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tkinter/__init__.pyi index b9f7867..e50816a 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tkinter/__init__.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tkinter/__init__.pyi @@ -34,7 +34,7 @@ tk.mainloop() import _tkinter import sys from _typeshed import Incomplete, StrEnum, StrOrBytesPath -from collections.abc import Callable, Mapping, Sequence +from collections.abc import Callable, Iterable, Mapping, Sequence from tkinter.constants import * from tkinter.font import _FontDescription from types import TracebackType @@ -1752,64 +1752,49 @@ class Wm: @overload def wm_attributes(self) -> tuple[Any, ...]: """ - This subcommand returns or sets platform specific attributes + Return or sets platform specific attributes. - The first form returns a list of the platform specific flags and - their values. The second form returns the value for the specific - option. The third form sets one or more of the values. The values - are as follows: + When called with a single argument return_python_dict=True, + return a dict of the platform specific attributes and their values. + When called without arguments or with a single argument + return_python_dict=False, return a tuple containing intermixed + attribute names with the minus prefix and their values. - On Windows, -disabled gets or sets whether the window is in a - disabled state. -toolwindow gets or sets the style of the window - to toolwindow (as defined in the MSDN). -topmost gets or sets - whether this is a topmost window (displays above all other - windows). - - On Macintosh, XXXXX - - On Unix, there are currently no special attribute values. + When called with a single string value, return the value for the + specific option. When called with keyword arguments, set the + corresponding attributes. """ ... @overload def wm_attributes(self, option: str, /): """ - This subcommand returns or sets platform specific attributes - - The first form returns a list of the platform specific flags and - their values. The second form returns the value for the specific - option. The third form sets one or more of the values. The values - are as follows: + Return or sets platform specific attributes. - On Windows, -disabled gets or sets whether the window is in a - disabled state. -toolwindow gets or sets the style of the window - to toolwindow (as defined in the MSDN). -topmost gets or sets - whether this is a topmost window (displays above all other - windows). + When called with a single argument return_python_dict=True, + return a dict of the platform specific attributes and their values. + When called without arguments or with a single argument + return_python_dict=False, return a tuple containing intermixed + attribute names with the minus prefix and their values. - On Macintosh, XXXXX - - On Unix, there are currently no special attribute values. + When called with a single string value, return the value for the + specific option. When called with keyword arguments, set the + corresponding attributes. """ ... @overload def wm_attributes(self, option: str, value, /, *__other_option_value_pairs: Any) -> None: """ - This subcommand returns or sets platform specific attributes - - The first form returns a list of the platform specific flags and - their values. The second form returns the value for the specific - option. The third form sets one or more of the values. The values - are as follows: - - On Windows, -disabled gets or sets whether the window is in a - disabled state. -toolwindow gets or sets the style of the window - to toolwindow (as defined in the MSDN). -topmost gets or sets - whether this is a topmost window (displays above all other - windows). + Return or sets platform specific attributes. - On Macintosh, XXXXX + When called with a single argument return_python_dict=True, + return a dict of the platform specific attributes and their values. + When called without arguments or with a single argument + return_python_dict=False, return a tuple containing intermixed + attribute names with the minus prefix and their values. - On Unix, there are currently no special attribute values. + When called with a single string value, return the value for the + specific option. When called with keyword arguments, set the + corresponding attributes. """ ... attributes = wm_attributes @@ -4369,11 +4354,12 @@ class Listbox(Widget, XView, YView): selectborderwidth: _ScreenUnits = 0, selectforeground: str = ..., # from listbox man page: "The value of the [selectmode] option may be - # arbitrary, but the default bindings expect it to be ..." + # arbitrary, but the default bindings expect it to be either single, + # browse, multiple, or extended" # # I have never seen anyone setting this to something else than what # "the default bindings expect", but let's support it anyway. - selectmode: str = "browse", + selectmode: str | Literal["single", "browse", "multiple", "extended"] = "browse", # noqa: Y051 setgrid: bool = False, state: Literal["normal", "disabled"] = "normal", takefocus: _TakeFocusValue = "", @@ -4418,7 +4404,7 @@ class Listbox(Widget, XView, YView): selectbackground: str = ..., selectborderwidth: _ScreenUnits = ..., selectforeground: str = ..., - selectmode: str = ..., + selectmode: str | Literal["single", "browse", "multiple", "extended"] = ..., # noqa: Y051 setgrid: bool = ..., state: Literal["normal", "disabled"] = ..., takefocus: _TakeFocusValue = ..., @@ -5464,6 +5450,9 @@ class Scrollbar(Widget): ... _TextIndex: TypeAlias = _tkinter.Tcl_Obj | str | float | Misc +_WhatToCount: TypeAlias = Literal[ + "chars", "displaychars", "displayindices", "displaylines", "indices", "lines", "xpixels", "ypixels" +] class Text(Widget, XView, YView): """Text widget which can display text in various forms.""" @@ -5628,21 +5617,349 @@ class Text(Widget, XView, YView): relation OP is satisfied. OP is one of <, <=, ==, >=, >, or !=. """ ... - def count(self, index1, index2, *args): - """ - Counts the number of relevant things between the two indices. - If index1 is after index2, the result will be a negative number - (and this holds for each of the possible options). + if sys.version_info >= (3, 13): + @overload + def count(self, index1: _TextIndex, index2: _TextIndex, *, return_ints: Literal[True]) -> int: + """ + Counts the number of relevant things between the two indices. + + If INDEX1 is after INDEX2, the result will be a negative number + (and this holds for each of the possible options). + + The actual items which are counted depends on the options given. + The result is a tuple of integers, one for the result of each + counting option given, if more than one option is specified or + return_ints is false (default), otherwise it is an integer. + Valid counting options are "chars", "displaychars", + "displayindices", "displaylines", "indices", "lines", "xpixels" + and "ypixels". The default value, if no option is specified, is + "indices". There is an additional possible option "update", + which if given then all subsequent options ensure that any + possible out of date information is recalculated. + """ + ... + @overload + def count( + self, index1: _TextIndex, index2: _TextIndex, arg: _WhatToCount | Literal["update"], /, *, return_ints: Literal[True] + ) -> int: + """ + Counts the number of relevant things between the two indices. + + If INDEX1 is after INDEX2, the result will be a negative number + (and this holds for each of the possible options). + + The actual items which are counted depends on the options given. + The result is a tuple of integers, one for the result of each + counting option given, if more than one option is specified or + return_ints is false (default), otherwise it is an integer. + Valid counting options are "chars", "displaychars", + "displayindices", "displaylines", "indices", "lines", "xpixels" + and "ypixels". The default value, if no option is specified, is + "indices". There is an additional possible option "update", + which if given then all subsequent options ensure that any + possible out of date information is recalculated. + """ + ... + @overload + def count( + self, + index1: _TextIndex, + index2: _TextIndex, + arg1: Literal["update"], + arg2: _WhatToCount, + /, + *, + return_ints: Literal[True], + ) -> int: + """ + Counts the number of relevant things between the two indices. + + If INDEX1 is after INDEX2, the result will be a negative number + (and this holds for each of the possible options). + + The actual items which are counted depends on the options given. + The result is a tuple of integers, one for the result of each + counting option given, if more than one option is specified or + return_ints is false (default), otherwise it is an integer. + Valid counting options are "chars", "displaychars", + "displayindices", "displaylines", "indices", "lines", "xpixels" + and "ypixels". The default value, if no option is specified, is + "indices". There is an additional possible option "update", + which if given then all subsequent options ensure that any + possible out of date information is recalculated. + """ + ... + @overload + def count( + self, + index1: _TextIndex, + index2: _TextIndex, + arg1: _WhatToCount, + arg2: Literal["update"], + /, + *, + return_ints: Literal[True], + ) -> int: + """ + Counts the number of relevant things between the two indices. + + If INDEX1 is after INDEX2, the result will be a negative number + (and this holds for each of the possible options). + + The actual items which are counted depends on the options given. + The result is a tuple of integers, one for the result of each + counting option given, if more than one option is specified or + return_ints is false (default), otherwise it is an integer. + Valid counting options are "chars", "displaychars", + "displayindices", "displaylines", "indices", "lines", "xpixels" + and "ypixels". The default value, if no option is specified, is + "indices". There is an additional possible option "update", + which if given then all subsequent options ensure that any + possible out of date information is recalculated. + """ + ... + @overload + def count( + self, index1: _TextIndex, index2: _TextIndex, arg1: _WhatToCount, arg2: _WhatToCount, /, *, return_ints: Literal[True] + ) -> tuple[int, int]: + """ + Counts the number of relevant things between the two indices. + + If INDEX1 is after INDEX2, the result will be a negative number + (and this holds for each of the possible options). + + The actual items which are counted depends on the options given. + The result is a tuple of integers, one for the result of each + counting option given, if more than one option is specified or + return_ints is false (default), otherwise it is an integer. + Valid counting options are "chars", "displaychars", + "displayindices", "displaylines", "indices", "lines", "xpixels" + and "ypixels". The default value, if no option is specified, is + "indices". There is an additional possible option "update", + which if given then all subsequent options ensure that any + possible out of date information is recalculated. + """ + ... + @overload + def count( + self, + index1: _TextIndex, + index2: _TextIndex, + arg1: _WhatToCount | Literal["update"], + arg2: _WhatToCount | Literal["update"], + arg3: _WhatToCount | Literal["update"], + /, + *args: _WhatToCount | Literal["update"], + return_ints: Literal[True], + ) -> tuple[int, ...]: + """ + Counts the number of relevant things between the two indices. + + If INDEX1 is after INDEX2, the result will be a negative number + (and this holds for each of the possible options). + + The actual items which are counted depends on the options given. + The result is a tuple of integers, one for the result of each + counting option given, if more than one option is specified or + return_ints is false (default), otherwise it is an integer. + Valid counting options are "chars", "displaychars", + "displayindices", "displaylines", "indices", "lines", "xpixels" + and "ypixels". The default value, if no option is specified, is + "indices". There is an additional possible option "update", + which if given then all subsequent options ensure that any + possible out of date information is recalculated. + """ + ... + @overload + def count(self, index1: _TextIndex, index2: _TextIndex, *, return_ints: Literal[False] = False) -> tuple[int] | None: + """ + Counts the number of relevant things between the two indices. + + If INDEX1 is after INDEX2, the result will be a negative number + (and this holds for each of the possible options). + + The actual items which are counted depends on the options given. + The result is a tuple of integers, one for the result of each + counting option given, if more than one option is specified or + return_ints is false (default), otherwise it is an integer. + Valid counting options are "chars", "displaychars", + "displayindices", "displaylines", "indices", "lines", "xpixels" + and "ypixels". The default value, if no option is specified, is + "indices". There is an additional possible option "update", + which if given then all subsequent options ensure that any + possible out of date information is recalculated. + """ + ... + @overload + def count( + self, + index1: _TextIndex, + index2: _TextIndex, + arg: _WhatToCount | Literal["update"], + /, + *, + return_ints: Literal[False] = False, + ) -> tuple[int] | None: + """ + Counts the number of relevant things between the two indices. + + If INDEX1 is after INDEX2, the result will be a negative number + (and this holds for each of the possible options). + + The actual items which are counted depends on the options given. + The result is a tuple of integers, one for the result of each + counting option given, if more than one option is specified or + return_ints is false (default), otherwise it is an integer. + Valid counting options are "chars", "displaychars", + "displayindices", "displaylines", "indices", "lines", "xpixels" + and "ypixels". The default value, if no option is specified, is + "indices". There is an additional possible option "update", + which if given then all subsequent options ensure that any + possible out of date information is recalculated. + """ + ... + @overload + def count( + self, + index1: _TextIndex, + index2: _TextIndex, + arg1: Literal["update"], + arg2: _WhatToCount, + /, + *, + return_ints: Literal[False] = False, + ) -> int | None: + """ + Counts the number of relevant things between the two indices. + + If INDEX1 is after INDEX2, the result will be a negative number + (and this holds for each of the possible options). + + The actual items which are counted depends on the options given. + The result is a tuple of integers, one for the result of each + counting option given, if more than one option is specified or + return_ints is false (default), otherwise it is an integer. + Valid counting options are "chars", "displaychars", + "displayindices", "displaylines", "indices", "lines", "xpixels" + and "ypixels". The default value, if no option is specified, is + "indices". There is an additional possible option "update", + which if given then all subsequent options ensure that any + possible out of date information is recalculated. + """ + ... + @overload + def count( + self, + index1: _TextIndex, + index2: _TextIndex, + arg1: _WhatToCount, + arg2: Literal["update"], + /, + *, + return_ints: Literal[False] = False, + ) -> int | None: + """ + Counts the number of relevant things between the two indices. + + If INDEX1 is after INDEX2, the result will be a negative number + (and this holds for each of the possible options). + + The actual items which are counted depends on the options given. + The result is a tuple of integers, one for the result of each + counting option given, if more than one option is specified or + return_ints is false (default), otherwise it is an integer. + Valid counting options are "chars", "displaychars", + "displayindices", "displaylines", "indices", "lines", "xpixels" + and "ypixels". The default value, if no option is specified, is + "indices". There is an additional possible option "update", + which if given then all subsequent options ensure that any + possible out of date information is recalculated. + """ + ... + @overload + def count( + self, + index1: _TextIndex, + index2: _TextIndex, + arg1: _WhatToCount, + arg2: _WhatToCount, + /, + *, + return_ints: Literal[False] = False, + ) -> tuple[int, int]: + """ + Counts the number of relevant things between the two indices. + + If INDEX1 is after INDEX2, the result will be a negative number + (and this holds for each of the possible options). + + The actual items which are counted depends on the options given. + The result is a tuple of integers, one for the result of each + counting option given, if more than one option is specified or + return_ints is false (default), otherwise it is an integer. + Valid counting options are "chars", "displaychars", + "displayindices", "displaylines", "indices", "lines", "xpixels" + and "ypixels". The default value, if no option is specified, is + "indices". There is an additional possible option "update", + which if given then all subsequent options ensure that any + possible out of date information is recalculated. + """ + ... + @overload + def count( + self, + index1: _TextIndex, + index2: _TextIndex, + arg1: _WhatToCount | Literal["update"], + arg2: _WhatToCount | Literal["update"], + arg3: _WhatToCount | Literal["update"], + /, + *args: _WhatToCount | Literal["update"], + return_ints: Literal[False] = False, + ) -> tuple[int, ...]: + """ + Counts the number of relevant things between the two indices. + + If INDEX1 is after INDEX2, the result will be a negative number + (and this holds for each of the possible options). + + The actual items which are counted depends on the options given. + The result is a tuple of integers, one for the result of each + counting option given, if more than one option is specified or + return_ints is false (default), otherwise it is an integer. + Valid counting options are "chars", "displaychars", + "displayindices", "displaylines", "indices", "lines", "xpixels" + and "ypixels". The default value, if no option is specified, is + "indices". There is an additional possible option "update", + which if given then all subsequent options ensure that any + possible out of date information is recalculated. + """ + ... + else: + @overload + def count(self, index1: _TextIndex, index2: _TextIndex) -> tuple[int] | None: ... + @overload + def count( + self, index1: _TextIndex, index2: _TextIndex, arg: _WhatToCount | Literal["update"], / + ) -> tuple[int] | None: ... + @overload + def count(self, index1: _TextIndex, index2: _TextIndex, arg1: Literal["update"], arg2: _WhatToCount, /) -> int | None: ... + @overload + def count(self, index1: _TextIndex, index2: _TextIndex, arg1: _WhatToCount, arg2: Literal["update"], /) -> int | None: ... + @overload + def count(self, index1: _TextIndex, index2: _TextIndex, arg1: _WhatToCount, arg2: _WhatToCount, /) -> tuple[int, int]: ... + @overload + def count( + self, + index1: _TextIndex, + index2: _TextIndex, + arg1: _WhatToCount | Literal["update"], + arg2: _WhatToCount | Literal["update"], + arg3: _WhatToCount | Literal["update"], + /, + *args: _WhatToCount | Literal["update"], + ) -> tuple[int, ...]: ... - The actual items which are counted depends on the options given by - args. The result is a list of integers, one for the result of each - counting option given. Valid counting options are "chars", - "displaychars", "displayindices", "displaylines", "indices", - "lines", "xpixels" and "ypixels". There is an additional possible - option "update", which if given then all subsequent options ensure - that any possible out of date information is recalculated. - """ - ... @overload def debug(self, boolean: None = None) -> bool: """ @@ -6268,23 +6585,112 @@ class PhotoImage(Image, _PhotoImageLike): """Return the value of OPTION.""" ... def __getitem__(self, key: str) -> str: ... # always string: image['height'] can be '0' - def copy(self) -> PhotoImage: - """Return a new PhotoImage with the same image as this widget.""" - ... - def zoom(self, x: int, y: int | Literal[""] = "") -> PhotoImage: - """ - Return a new PhotoImage with the same image as this widget - but zoom it with a factor of x in the X direction and y in the Y - direction. If y is not given, the default value is the same as x. - """ - ... - def subsample(self, x: int, y: int | Literal[""] = "") -> PhotoImage: - """ - Return a new PhotoImage based on the same image as this widget - but use only every Xth or Yth pixel. If y is not given, the - default value is the same as x. - """ - ... + if sys.version_info >= (3, 13): + def copy( + self, + *, + from_coords: Iterable[int] | None = None, + zoom: int | tuple[int, int] | list[int] | None = None, + subsample: int | tuple[int, int] | list[int] | None = None, + ) -> PhotoImage: + """ + Return a new PhotoImage with the same image as this widget. + + The FROM_COORDS option specifies a rectangular sub-region of the + source image to be copied. It must be a tuple or a list of 1 to 4 + integers (x1, y1, x2, y2). (x1, y1) and (x2, y2) specify diagonally + opposite corners of the rectangle. If x2 and y2 are not specified, + the default value is the bottom-right corner of the source image. + The pixels copied will include the left and top edges of the + specified rectangle but not the bottom or right edges. If the + FROM_COORDS option is not given, the default is the whole source + image. + + If SUBSAMPLE or ZOOM are specified, the image is transformed as in + the subsample() or zoom() methods. The value must be a single + integer or a pair of integers. + """ + ... + def subsample(self, x: int, y: Literal[""] = "", *, from_coords: Iterable[int] | None = None) -> PhotoImage: + """ + Return a new PhotoImage based on the same image as this widget + but use only every Xth or Yth pixel. If Y is not given, the + default value is the same as X. + + The FROM_COORDS option specifies a rectangular sub-region of the + source image to be copied, as in the copy() method. + """ + ... + def zoom(self, x: int, y: Literal[""] = "", *, from_coords: Iterable[int] | None = None) -> PhotoImage: + """ + Return a new PhotoImage with the same image as this widget + but zoom it with a factor of X in the X direction and Y in the Y + direction. If Y is not given, the default value is the same as X. + + The FROM_COORDS option specifies a rectangular sub-region of the + source image to be copied, as in the copy() method. + """ + ... + def copy_replace( + self, + sourceImage: PhotoImage | str, + *, + from_coords: Iterable[int] | None = None, + to: Iterable[int] | None = None, + shrink: bool = False, + zoom: int | tuple[int, int] | list[int] | None = None, + subsample: int | tuple[int, int] | list[int] | None = None, + # `None` defaults to overlay. + compositingrule: Literal["overlay", "set"] | None = None, + ) -> None: + """ + Copy a region from the source image (which must be a PhotoImage) to + this image, possibly with pixel zooming and/or subsampling. If no + options are specified, this command copies the whole of the source + image into this image, starting at coordinates (0, 0). + + The FROM_COORDS option specifies a rectangular sub-region of the + source image to be copied. It must be a tuple or a list of 1 to 4 + integers (x1, y1, x2, y2). (x1, y1) and (x2, y2) specify diagonally + opposite corners of the rectangle. If x2 and y2 are not specified, + the default value is the bottom-right corner of the source image. + The pixels copied will include the left and top edges of the + specified rectangle but not the bottom or right edges. If the + FROM_COORDS option is not given, the default is the whole source + image. + + The TO option specifies a rectangular sub-region of the destination + image to be affected. It must be a tuple or a list of 1 to 4 + integers (x1, y1, x2, y2). (x1, y1) and (x2, y2) specify diagonally + opposite corners of the rectangle. If x2 and y2 are not specified, + the default value is (x1,y1) plus the size of the source region + (after subsampling and zooming, if specified). If x2 and y2 are + specified, the source region will be replicated if necessary to fill + the destination region in a tiled fashion. + + If SHRINK is true, the size of the destination image should be + reduced, if necessary, so that the region being copied into is at + the bottom-right corner of the image. + + If SUBSAMPLE or ZOOM are specified, the image is transformed as in + the subsample() or zoom() methods. The value must be a single + integer or a pair of integers. + + The COMPOSITINGRULE option specifies how transparent pixels in the + source image are combined with the destination image. When a + compositing rule of 'overlay' is set, the old contents of the + destination image are visible, as if the source image were printed + on a piece of transparent film and placed over the top of the + destination. When a compositing rule of 'set' is set, the old + contents of the destination image are discarded and the source image + is used as-is. The default compositing rule is 'overlay'. + """ + ... + else: + def copy(self) -> PhotoImage: ... + def zoom(self, x: int, y: int | Literal[""] = "") -> PhotoImage: ... + def subsample(self, x: int, y: int | Literal[""] = "") -> PhotoImage: ... + def get(self, x: int, y: int) -> tuple[int, int, int]: """Return the color (red, green, blue) of the pixel at X,Y.""" ... @@ -6306,12 +6712,144 @@ class PhotoImage(Image, _PhotoImageLike): position TO, e.g. image.put("{red green} {blue yellow}", to=(4,6)) """ ... - def write(self, filename: StrOrBytesPath, format: str | None = None, from_coords: tuple[int, int] | None = None) -> None: - """ - Write image to file FILENAME in FORMAT starting from - position FROM_COORDS. - """ - ... + if sys.version_info >= (3, 13): + def read( + self, + filename: StrOrBytesPath, + format: str | None = None, + *, + from_coords: Iterable[int] | None = None, + to: Iterable[int] | None = None, + shrink: bool = False, + ) -> None: + """ + Reads image data from the file named FILENAME into the image. + + The FORMAT option specifies the format of the image data in the + file. + + The FROM_COORDS option specifies a rectangular sub-region of the image + file data to be copied to the destination image. It must be a tuple + or a list of 1 to 4 integers (x1, y1, x2, y2). (x1, y1) and + (x2, y2) specify diagonally opposite corners of the rectangle. If + x2 and y2 are not specified, the default value is the bottom-right + corner of the source image. The default, if this option is not + specified, is the whole of the image in the image file. + + The TO option specifies the coordinates of the top-left corner of + the region of the image into which data from filename are to be + read. The default is (0, 0). + + If SHRINK is true, the size of the destination image will be + reduced, if necessary, so that the region into which the image file + data are read is at the bottom-right corner of the image. + """ + ... + def write( + self, + filename: StrOrBytesPath, + format: str | None = None, + from_coords: Iterable[int] | None = None, + *, + background: str | None = None, + grayscale: bool = False, + ) -> None: + """ + Writes image data from the image to a file named FILENAME. + + The FORMAT option specifies the name of the image file format + handler to be used to write the data to the file. If this option + is not given, the format is guessed from the file extension. + + The FROM_COORDS option specifies a rectangular region of the image + to be written to the image file. It must be a tuple or a list of 1 + to 4 integers (x1, y1, x2, y2). If only x1 and y1 are specified, + the region extends from (x1,y1) to the bottom-right corner of the + image. If all four coordinates are given, they specify diagonally + opposite corners of the rectangular region. The default, if this + option is not given, is the whole image. + + If BACKGROUND is specified, the data will not contain any + transparency information. In all transparent pixels the color will + be replaced by the specified color. + + If GRAYSCALE is true, the data will not contain color information. + All pixel data will be transformed into grayscale. + """ + ... + @overload + def data( + self, format: str, *, from_coords: Iterable[int] | None = None, background: str | None = None, grayscale: bool = False + ) -> bytes: + """ + Returns image data. + + The FORMAT option specifies the name of the image file format + handler to be used. If this option is not given, this method uses + a format that consists of a tuple (one element per row) of strings + containings space separated (one element per pixel/column) colors + in “#RRGGBB” format (where RR is a pair of hexadecimal digits for + the red channel, GG for green, and BB for blue). + + The FROM_COORDS option specifies a rectangular region of the image + to be returned. It must be a tuple or a list of 1 to 4 integers + (x1, y1, x2, y2). If only x1 and y1 are specified, the region + extends from (x1,y1) to the bottom-right corner of the image. If + all four coordinates are given, they specify diagonally opposite + corners of the rectangular region, including (x1, y1) and excluding + (x2, y2). The default, if this option is not given, is the whole + image. + + If BACKGROUND is specified, the data will not contain any + transparency information. In all transparent pixels the color will + be replaced by the specified color. + + If GRAYSCALE is true, the data will not contain color information. + All pixel data will be transformed into grayscale. + """ + ... + @overload + def data( + self, + format: None = None, + *, + from_coords: Iterable[int] | None = None, + background: str | None = None, + grayscale: bool = False, + ) -> tuple[str, ...]: + """ + Returns image data. + + The FORMAT option specifies the name of the image file format + handler to be used. If this option is not given, this method uses + a format that consists of a tuple (one element per row) of strings + containings space separated (one element per pixel/column) colors + in “#RRGGBB” format (where RR is a pair of hexadecimal digits for + the red channel, GG for green, and BB for blue). + + The FROM_COORDS option specifies a rectangular region of the image + to be returned. It must be a tuple or a list of 1 to 4 integers + (x1, y1, x2, y2). If only x1 and y1 are specified, the region + extends from (x1,y1) to the bottom-right corner of the image. If + all four coordinates are given, they specify diagonally opposite + corners of the rectangular region, including (x1, y1) and excluding + (x2, y2). The default, if this option is not given, is the whole + image. + + If BACKGROUND is specified, the data will not contain any + transparency information. In all transparent pixels the color will + be replaced by the specified color. + + If GRAYSCALE is true, the data will not contain color information. + All pixel data will be transformed into grayscale. + """ + ... + + else: + def write( + self, filename: StrOrBytesPath, format: str | None = None, from_coords: tuple[int, int] | None = None + ) -> None: ... + def transparency_get(self, x: int, y: int) -> bool: """Return True if the pixel at x,y is transparent.""" ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tkinter/constants.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tkinter/constants.pyi index 74fa72a..fbfe8b4 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tkinter/constants.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tkinter/constants.pyi @@ -1,80 +1,80 @@ -from typing import Literal +from typing import Final # These are not actually bools. See #4669 -NO: bool -YES: bool -TRUE: bool -FALSE: bool -ON: bool -OFF: bool -N: Literal["n"] -S: Literal["s"] -W: Literal["w"] -E: Literal["e"] -NW: Literal["nw"] -SW: Literal["sw"] -NE: Literal["ne"] -SE: Literal["se"] -NS: Literal["ns"] -EW: Literal["ew"] -NSEW: Literal["nsew"] -CENTER: Literal["center"] -NONE: Literal["none"] -X: Literal["x"] -Y: Literal["y"] -BOTH: Literal["both"] -LEFT: Literal["left"] -TOP: Literal["top"] -RIGHT: Literal["right"] -BOTTOM: Literal["bottom"] -RAISED: Literal["raised"] -SUNKEN: Literal["sunken"] -FLAT: Literal["flat"] -RIDGE: Literal["ridge"] -GROOVE: Literal["groove"] -SOLID: Literal["solid"] -HORIZONTAL: Literal["horizontal"] -VERTICAL: Literal["vertical"] -NUMERIC: Literal["numeric"] -CHAR: Literal["char"] -WORD: Literal["word"] -BASELINE: Literal["baseline"] -INSIDE: Literal["inside"] -OUTSIDE: Literal["outside"] -SEL: Literal["sel"] -SEL_FIRST: Literal["sel.first"] -SEL_LAST: Literal["sel.last"] -END: Literal["end"] -INSERT: Literal["insert"] -CURRENT: Literal["current"] -ANCHOR: Literal["anchor"] -ALL: Literal["all"] -NORMAL: Literal["normal"] -DISABLED: Literal["disabled"] -ACTIVE: Literal["active"] -HIDDEN: Literal["hidden"] -CASCADE: Literal["cascade"] -CHECKBUTTON: Literal["checkbutton"] -COMMAND: Literal["command"] -RADIOBUTTON: Literal["radiobutton"] -SEPARATOR: Literal["separator"] -SINGLE: Literal["single"] -BROWSE: Literal["browse"] -MULTIPLE: Literal["multiple"] -EXTENDED: Literal["extended"] -DOTBOX: Literal["dotbox"] -UNDERLINE: Literal["underline"] -PIESLICE: Literal["pieslice"] -CHORD: Literal["chord"] -ARC: Literal["arc"] -FIRST: Literal["first"] -LAST: Literal["last"] -BUTT: Literal["butt"] -PROJECTING: Literal["projecting"] -ROUND: Literal["round"] -BEVEL: Literal["bevel"] -MITER: Literal["miter"] -MOVETO: Literal["moveto"] -SCROLL: Literal["scroll"] -UNITS: Literal["units"] -PAGES: Literal["pages"] +NO: Final[bool] +YES: Final[bool] +TRUE: Final[bool] +FALSE: Final[bool] +ON: Final[bool] +OFF: Final[bool] +N: Final = "n" +S: Final = "s" +W: Final = "w" +E: Final = "e" +NW: Final = "nw" +SW: Final = "sw" +NE: Final = "ne" +SE: Final = "se" +NS: Final = "ns" +EW: Final = "ew" +NSEW: Final = "nsew" +CENTER: Final = "center" +NONE: Final = "none" +X: Final = "x" +Y: Final = "y" +BOTH: Final = "both" +LEFT: Final = "left" +TOP: Final = "top" +RIGHT: Final = "right" +BOTTOM: Final = "bottom" +RAISED: Final = "raised" +SUNKEN: Final = "sunken" +FLAT: Final = "flat" +RIDGE: Final = "ridge" +GROOVE: Final = "groove" +SOLID: Final = "solid" +HORIZONTAL: Final = "horizontal" +VERTICAL: Final = "vertical" +NUMERIC: Final = "numeric" +CHAR: Final = "char" +WORD: Final = "word" +BASELINE: Final = "baseline" +INSIDE: Final = "inside" +OUTSIDE: Final = "outside" +SEL: Final = "sel" +SEL_FIRST: Final = "sel.first" +SEL_LAST: Final = "sel.last" +END: Final = "end" +INSERT: Final = "insert" +CURRENT: Final = "current" +ANCHOR: Final = "anchor" +ALL: Final = "all" +NORMAL: Final = "normal" +DISABLED: Final = "disabled" +ACTIVE: Final = "active" +HIDDEN: Final = "hidden" +CASCADE: Final = "cascade" +CHECKBUTTON: Final = "checkbutton" +COMMAND: Final = "command" +RADIOBUTTON: Final = "radiobutton" +SEPARATOR: Final = "separator" +SINGLE: Final = "single" +BROWSE: Final = "browse" +MULTIPLE: Final = "multiple" +EXTENDED: Final = "extended" +DOTBOX: Final = "dotbox" +UNDERLINE: Final = "underline" +PIESLICE: Final = "pieslice" +CHORD: Final = "chord" +ARC: Final = "arc" +FIRST: Final = "first" +LAST: Final = "last" +BUTT: Final = "butt" +PROJECTING: Final = "projecting" +ROUND: Final = "round" +BEVEL: Final = "bevel" +MITER: Final = "miter" +MOVETO: Final = "moveto" +SCROLL: Final = "scroll" +UNITS: Final = "units" +PAGES: Final = "pages" diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tkinter/dialog.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tkinter/dialog.pyi index f76732a..b7d74c0 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tkinter/dialog.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tkinter/dialog.pyi @@ -2,12 +2,12 @@ import sys from _typeshed import Incomplete from collections.abc import Mapping from tkinter import Widget -from typing import Any +from typing import Any, Final if sys.version_info >= (3, 9): __all__ = ["Dialog"] -DIALOG_ICON: str +DIALOG_ICON: Final = "questhead" class Dialog(Widget): widgetName: str diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tkinter/font.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tkinter/font.pyi index ef17a3c..3dddd29 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tkinter/font.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tkinter/font.pyi @@ -1,16 +1,16 @@ import _tkinter import sys import tkinter -from typing import Any, Literal, TypedDict, overload +from typing import Any, Final, Literal, TypedDict, overload from typing_extensions import TypeAlias if sys.version_info >= (3, 9): __all__ = ["NORMAL", "ROMAN", "BOLD", "ITALIC", "nametofont", "Font", "families", "names"] -NORMAL: Literal["normal"] -ROMAN: Literal["roman"] -BOLD: Literal["bold"] -ITALIC: Literal["italic"] +NORMAL: Final = "normal" +ROMAN: Final = "roman" +BOLD: Final = "bold" +ITALIC: Final = "italic" _FontDescription: TypeAlias = ( str # "Helvetica 12" diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tkinter/messagebox.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tkinter/messagebox.pyi index 03ef3f6..e4c8a4d 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tkinter/messagebox.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tkinter/messagebox.pyi @@ -1,6 +1,6 @@ import sys from tkinter.commondialog import Dialog -from typing import ClassVar +from typing import ClassVar, Final if sys.version_info >= (3, 9): __all__ = [ @@ -14,22 +14,22 @@ if sys.version_info >= (3, 9): "askretrycancel", ] -ERROR: str -INFO: str -QUESTION: str -WARNING: str -ABORTRETRYIGNORE: str -OK: str -OKCANCEL: str -RETRYCANCEL: str -YESNO: str -YESNOCANCEL: str -ABORT: str -RETRY: str -IGNORE: str -CANCEL: str -YES: str -NO: str +ERROR: Final = "error" +INFO: Final = "info" +QUESTION: Final = "question" +WARNING: Final = "warning" +ABORTRETRYIGNORE: Final = "abortretryignore" +OK: Final = "ok" +OKCANCEL: Final = "okcancel" +RETRYCANCEL: Final = "retrycancel" +YESNO: Final = "yesno" +YESNOCANCEL: Final = "yesnocancel" +ABORT: Final = "abort" +RETRY: Final = "retry" +IGNORE: Final = "ignore" +CANCEL: Final = "cancel" +YES: Final = "yes" +NO: Final = "no" class Message(Dialog): """A message box""" diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tkinter/tix.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tkinter/tix.pyi index fb939a7..7891364 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tkinter/tix.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tkinter/tix.pyi @@ -1,174 +1,53 @@ import tkinter from _typeshed import Incomplete -from typing import Any, Literal - -WINDOW: Literal["window"] -TEXT: Literal["text"] -STATUS: Literal["status"] -IMMEDIATE: Literal["immediate"] -IMAGE: Literal["image"] -IMAGETEXT: Literal["imagetext"] -BALLOON: Literal["balloon"] -AUTO: Literal["auto"] -ACROSSTOP: Literal["acrosstop"] - -ASCII: Literal["ascii"] -CELL: Literal["cell"] -COLUMN: Literal["column"] -DECREASING: Literal["decreasing"] -INCREASING: Literal["increasing"] -INTEGER: Literal["integer"] -MAIN: Literal["main"] -MAX: Literal["max"] -REAL: Literal["real"] -ROW: Literal["row"] -S_REGION: Literal["s-region"] -X_REGION: Literal["x-region"] -Y_REGION: Literal["y-region"] +from typing import Any, Final + +WINDOW: Final = "window" +TEXT: Final = "text" +STATUS: Final = "status" +IMMEDIATE: Final = "immediate" +IMAGE: Final = "image" +IMAGETEXT: Final = "imagetext" +BALLOON: Final = "balloon" +AUTO: Final = "auto" +ACROSSTOP: Final = "acrosstop" + +ASCII: Final = "ascii" +CELL: Final = "cell" +COLUMN: Final = "column" +DECREASING: Final = "decreasing" +INCREASING: Final = "increasing" +INTEGER: Final = "integer" +MAIN: Final = "main" +MAX: Final = "max" +REAL: Final = "real" +ROW: Final = "row" +S_REGION: Final = "s-region" +X_REGION: Final = "x-region" +Y_REGION: Final = "y-region" # These should be kept in sync with _tkinter constants, except TCL_ALL_EVENTS which doesn't match ALL_EVENTS -TCL_DONT_WAIT: Literal[2] -TCL_WINDOW_EVENTS: Literal[4] -TCL_FILE_EVENTS: Literal[8] -TCL_TIMER_EVENTS: Literal[16] -TCL_IDLE_EVENTS: Literal[32] -TCL_ALL_EVENTS: Literal[0] +TCL_DONT_WAIT: Final = 2 +TCL_WINDOW_EVENTS: Final = 4 +TCL_FILE_EVENTS: Final = 8 +TCL_TIMER_EVENTS: Final = 16 +TCL_IDLE_EVENTS: Final = 32 +TCL_ALL_EVENTS: Final = 0 class tixCommand: - """ - The tix commands provide access to miscellaneous elements - of Tix's internal state and the Tix application context. - Most of the information manipulated by these commands pertains - to the application as a whole, or to a screen or - display, rather than to a particular window. - - This is a mixin class, assumed to be mixed to Tkinter.Tk - that supports the self.tk.call method. - """ - def tix_addbitmapdir(self, directory: str) -> None: - """ - Tix maintains a list of directories under which - the tix_getimage and tix_getbitmap commands will - search for image files. The standard bitmap directory - is $TIX_LIBRARY/bitmaps. The addbitmapdir command - adds directory into this list. By using this - command, the image files of an applications can - also be located using the tix_getimage or tix_getbitmap - command. - """ - ... - def tix_cget(self, option: str) -> Any: - """ - Returns the current value of the configuration - option given by option. Option may be any of the - options described in the CONFIGURATION OPTIONS section. - """ - ... - def tix_configure(self, cnf: dict[str, Any] | None = None, **kw: Any) -> Any: - """ - Query or modify the configuration options of the Tix application - context. If no option is specified, returns a dictionary all of the - available options. If option is specified with no value, then the - command returns a list describing the one named option (this list - will be identical to the corresponding sublist of the value - returned if no option is specified). If one or more option-value - pairs are specified, then the command modifies the given option(s) - to have the given value(s); in this case the command returns an - empty string. Option may be any of the configuration options. - """ - ... - def tix_filedialog(self, dlgclass: str | None = None) -> str: - """ - Returns the file selection dialog that may be shared among - different calls from this application. This command will create a - file selection dialog widget when it is called the first time. This - dialog will be returned by all subsequent calls to tix_filedialog. - An optional dlgclass parameter can be passed to specified what type - of file selection dialog widget is desired. Possible options are - tix FileSelectDialog or tixExFileSelectDialog. - """ - ... - def tix_getbitmap(self, name: str) -> str: - """ - Locates a bitmap file of the name name.xpm or name in one of the - bitmap directories (see the tix_addbitmapdir command above). By - using tix_getbitmap, you can avoid hard coding the pathnames of the - bitmap files in your application. When successful, it returns the - complete pathname of the bitmap file, prefixed with the character - '@'. The returned value can be used to configure the -bitmap - option of the TK and Tix widgets. - """ - ... - def tix_getimage(self, name: str) -> str: - """ - Locates an image file of the name name.xpm, name.xbm or name.ppm - in one of the bitmap directories (see the addbitmapdir command - above). If more than one file with the same name (but different - extensions) exist, then the image type is chosen according to the - depth of the X display: xbm images are chosen on monochrome - displays and color images are chosen on color displays. By using - tix_ getimage, you can avoid hard coding the pathnames of the - image files in your application. When successful, this command - returns the name of the newly created image, which can be used to - configure the -image option of the Tk and Tix widgets. - """ - ... - def tix_option_get(self, name: str) -> Any: - """ - Gets the options maintained by the Tix - scheme mechanism. Available options include: - - active_bg active_fg bg - bold_font dark1_bg dark1_fg - dark2_bg dark2_fg disabled_fg - fg fixed_font font - inactive_bg inactive_fg input1_bg - input2_bg italic_font light1_bg - light1_fg light2_bg light2_fg - menu_font output1_bg output2_bg - select_bg select_fg selector - - """ - ... - def tix_resetoptions(self, newScheme: str, newFontSet: str, newScmPrio: str | None = None) -> None: - """ - Resets the scheme and fontset of the Tix application to - newScheme and newFontSet, respectively. This affects only those - widgets created after this call. Therefore, it is best to call the - resetoptions command before the creation of any widgets in a Tix - application. - - The optional parameter newScmPrio can be given to reset the - priority level of the Tk options set by the Tix schemes. - - Because of the way Tk handles the X option database, after Tix has - been has imported and inited, it is not possible to reset the color - schemes and font sets using the tix config command. Instead, the - tix_resetoptions command must be used. - """ - ... + def tix_addbitmapdir(self, directory: str) -> None: ... + def tix_cget(self, option: str) -> Any: ... + def tix_configure(self, cnf: dict[str, Any] | None = None, **kw: Any) -> Any: ... + def tix_filedialog(self, dlgclass: str | None = None) -> str: ... + def tix_getbitmap(self, name: str) -> str: ... + def tix_getimage(self, name: str) -> str: ... + def tix_option_get(self, name: str) -> Any: ... + def tix_resetoptions(self, newScheme: str, newFontSet: str, newScmPrio: str | None = None) -> None: ... class Tk(tkinter.Tk, tixCommand): - """ - Toplevel widget of Tix which represents mostly the main window - of an application. It has an associated Tcl interpreter. - """ def __init__(self, screenName: str | None = None, baseName: str | None = None, className: str = "Tix") -> None: ... class TixWidget(tkinter.Widget): - """ - A TixWidget class is used to package all (or most) Tix widgets. - - Widget initialization is extended in two ways: - 1) It is possible to give a list of options which must be part of - the creation command (so called Tix 'static' options). These cannot be - given as a 'config' command later. - 2) It is possible to give the name of an existing TK widget. These are - child widgets created automatically by a Tix mega-widget. The Tk call - to create these widgets is therefore bypassed in TixWidget.__init__ - - Both options are for use by subclasses only. - """ def __init__( self, master: tkinter.Misc | None = None, @@ -178,39 +57,17 @@ class TixWidget(tkinter.Widget): kw: dict[str, Any] = {}, ) -> None: ... def __getattr__(self, name: str): ... - def set_silent(self, value: str) -> None: - """Set a variable without calling its action routine""" - ... - def subwidget(self, name: str) -> tkinter.Widget: - """ - Return the named subwidget (which must have been created by - the sub-class). - """ - ... - def subwidgets_all(self) -> list[tkinter.Widget]: - """Return all subwidgets.""" - ... - def config_all(self, option: Any, value: Any) -> None: - """Set configuration options for all subwidgets (and self).""" - ... + def set_silent(self, value: str) -> None: ... + def subwidget(self, name: str) -> tkinter.Widget: ... + def subwidgets_all(self) -> list[tkinter.Widget]: ... + def config_all(self, option: Any, value: Any) -> None: ... def image_create(self, imgtype: str, cnf: dict[str, Any] = {}, master: tkinter.Widget | None = None, **kw) -> None: ... def image_delete(self, imgname: str) -> None: ... class TixSubWidget(TixWidget): - """ - Subwidget class. - - This is used to mirror child widgets automatically created - by Tix/Tk as part of a mega-widget in Python (which is not informed - of this) - """ def __init__(self, master: tkinter.Widget, name: str, destroy_physically: int = 1, check_intermediate: int = 1) -> None: ... class DisplayStyle: - """ - DisplayStyle - handle configuration options shared by - (multiple) Display Items - """ def __init__(self, itemtype: str, cnf: dict[str, Any] = {}, *, master: tkinter.Widget | None = None, **kw) -> None: ... def __getitem__(self, key: str): ... def __setitem__(self, key: str, value: Any) -> None: ... @@ -218,48 +75,16 @@ class DisplayStyle: def config(self, cnf: dict[str, Any] = {}, **kw): ... class Balloon(TixWidget): - """ - Balloon help widget. - - Subwidget Class - --------- ----- - label Label - message Message - """ def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... - def bind_widget(self, widget: tkinter.Widget, cnf: dict[str, Any] = {}, **kw) -> None: - """ - Bind balloon widget to another. - One balloon widget may be bound to several widgets at the same time - """ - ... + def bind_widget(self, widget: tkinter.Widget, cnf: dict[str, Any] = {}, **kw) -> None: ... def unbind_widget(self, widget: tkinter.Widget) -> None: ... class ButtonBox(TixWidget): - """ - ButtonBox - A container for pushbuttons. - Subwidgets are the buttons added with the add method. - """ def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... - def add(self, name: str, cnf: dict[str, Any] = {}, **kw) -> tkinter.Widget: - """Add a button with given name to box.""" - ... + def add(self, name: str, cnf: dict[str, Any] = {}, **kw) -> tkinter.Widget: ... def invoke(self, name: str) -> None: ... class ComboBox(TixWidget): - """ - ComboBox - an Entry field with a dropdown menu. The user can select a - choice by either typing in the entry subwidget or selecting from the - listbox subwidget. - - Subwidget Class - --------- ----- - entry Entry - arrow Button - slistbox ScrolledListBox - tick Button - cross Button : present if created with the fancy option - """ def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... def add_history(self, str: str) -> None: ... def append_history(self, str: str) -> None: ... @@ -267,67 +92,21 @@ class ComboBox(TixWidget): def pick(self, index: int) -> None: ... class Control(TixWidget): - """ - Control - An entry field with value change arrows. The user can - adjust the value by pressing the two arrow buttons or by entering - the value directly into the entry. The new value will be checked - against the user-defined upper and lower limits. - - Subwidget Class - --------- ----- - incr Button - decr Button - entry Entry - label Label - """ def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... def decrement(self) -> None: ... def increment(self) -> None: ... def invoke(self) -> None: ... class LabelEntry(TixWidget): - """ - LabelEntry - Entry field with label. Packages an entry widget - and a label into one mega widget. It can be used to simplify the creation - of ``entry-form'' type of interface. - - Subwidgets Class - ---------- ----- - label Label - entry Entry - """ def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... class LabelFrame(TixWidget): - """ - LabelFrame - Labelled Frame container. Packages a frame widget - and a label into one mega widget. To create widgets inside a - LabelFrame widget, one creates the new widgets relative to the - frame subwidget and manage them inside the frame subwidget. - - Subwidgets Class - ---------- ----- - label Label - frame Frame - """ def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... class Meter(TixWidget): - """ - The Meter widget can be used to show the progress of a background - job which may take a long time to execute. - """ def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... class OptionMenu(TixWidget): - """ - OptionMenu - creates a menu button of options. - - Subwidget Class - --------- ----- - menubutton Menubutton - menu Menu - """ def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def add_command(self, name: str, cnf: dict[str, Any] = {}, **kw) -> None: ... def add_separator(self, name: str, cnf: dict[str, Any] = {}, **kw) -> None: ... @@ -336,164 +115,52 @@ class OptionMenu(TixWidget): def enable(self, name: str) -> None: ... class PopupMenu(TixWidget): - """ - PopupMenu widget can be used as a replacement of the tk_popup command. - The advantage of the Tix PopupMenu widget is it requires less application - code to manipulate. - - - Subwidgets Class - ---------- ----- - menubutton Menubutton - menu Menu - """ def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def bind_widget(self, widget: tkinter.Widget) -> None: ... def unbind_widget(self, widget: tkinter.Widget) -> None: ... def post_widget(self, widget: tkinter.Widget, x: int, y: int) -> None: ... class Select(TixWidget): - """ - Select - Container of button subwidgets. It can be used to provide - radio-box or check-box style of selection options for the user. - - Subwidgets are buttons added dynamically using the add method. - """ def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def add(self, name: str, cnf: dict[str, Any] = {}, **kw) -> tkinter.Widget: ... def invoke(self, name: str) -> None: ... class StdButtonBox(TixWidget): - """StdButtonBox - Standard Button Box (OK, Apply, Cancel and Help) """ def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... def invoke(self, name: str) -> None: ... class DirList(TixWidget): - """ - DirList - displays a list view of a directory, its previous - directories and its sub-directories. The user can choose one of - the directories displayed in the list or change to another directory. - - Subwidget Class - --------- ----- - hlist HList - hsb Scrollbar - vsb Scrollbar - """ def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def chdir(self, dir: str) -> None: ... class DirTree(TixWidget): - """ - DirTree - Directory Listing in a hierarchical view. - Displays a tree view of a directory, its previous directories and its - sub-directories. The user can choose one of the directories displayed - in the list or change to another directory. - - Subwidget Class - --------- ----- - hlist HList - hsb Scrollbar - vsb Scrollbar - """ def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def chdir(self, dir: str) -> None: ... class DirSelectDialog(TixWidget): - """ - The DirSelectDialog widget presents the directories in the file - system in a dialog window. The user can use this dialog window to - navigate through the file system to select the desired directory. - - Subwidgets Class - ---------- ----- - dirbox DirSelectDialog - """ def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def popup(self) -> None: ... def popdown(self) -> None: ... class DirSelectBox(TixWidget): - """ - DirSelectBox - Motif style file select box. - It is generally used for - the user to choose a file. FileSelectBox stores the files mostly - recently selected into a ComboBox widget so that they can be quickly - selected again. - - Subwidget Class - --------- ----- - selection ComboBox - filter ComboBox - dirlist ScrolledListBox - filelist ScrolledListBox - """ def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... class ExFileSelectBox(TixWidget): - """ - ExFileSelectBox - MS Windows style file select box. - It provides a convenient method for the user to select files. - - Subwidget Class - --------- ----- - cancel Button - ok Button - hidden Checkbutton - types ComboBox - dir ComboBox - file ComboBox - dirlist ScrolledListBox - filelist ScrolledListBox - """ def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def filter(self) -> None: ... def invoke(self) -> None: ... class FileSelectBox(TixWidget): - """ - ExFileSelectBox - Motif style file select box. - It is generally used for - the user to choose a file. FileSelectBox stores the files mostly - recently selected into a ComboBox widget so that they can be quickly - selected again. - - Subwidget Class - --------- ----- - selection ComboBox - filter ComboBox - dirlist ScrolledListBox - filelist ScrolledListBox - """ def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def apply_filter(self) -> None: ... def invoke(self) -> None: ... class FileEntry(TixWidget): - """ - FileEntry - Entry field with button that invokes a FileSelectDialog. - The user can type in the filename manually. Alternatively, the user can - press the button widget that sits next to the entry, which will bring - up a file selection dialog. - - Subwidgets Class - ---------- ----- - button Button - entry Entry - """ def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def invoke(self) -> None: ... def file_dialog(self) -> None: ... class HList(TixWidget, tkinter.XView, tkinter.YView): - """ - HList - Hierarchy display widget can be used to display any data - that have a hierarchical structure, for example, file system directory - trees. The list entries are indented and connected by branch lines - according to their places in the hierarchy. - - Subwidgets - None - """ def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... def add(self, entry: str, cnf: dict[str, Any] = {}, **kw) -> tkinter.Widget: ... def add_child(self, parent: str | None = None, cnf: dict[str, Any] = {}, **kw) -> tkinter.Widget: ... @@ -550,96 +217,24 @@ class HList(TixWidget, tkinter.XView, tkinter.YView): def show_entry(self, entry: str) -> None: ... class CheckList(TixWidget): - """ - The CheckList widget - displays a list of items to be selected by the user. CheckList acts - similarly to the Tk checkbutton or radiobutton widgets, except it is - capable of handling many more items than checkbuttons or radiobuttons. - """ def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... - def autosetmode(self) -> None: - """ - This command calls the setmode method for all the entries in this - Tree widget: if an entry has no child entries, its mode is set to - none. Otherwise, if the entry has any hidden child entries, its mode is - set to open; otherwise its mode is set to close. - """ - ... - def close(self, entrypath: str) -> None: - """Close the entry given by entryPath if its mode is close.""" - ... - def getmode(self, entrypath: str) -> str: - """Returns the current mode of the entry given by entryPath.""" - ... - def open(self, entrypath: str) -> None: - """Open the entry given by entryPath if its mode is open.""" - ... - def getselection(self, mode: str = "on") -> tuple[str, ...]: - """ - Returns a list of items whose status matches status. If status is - not specified, the list of items in the "on" status will be returned. - Mode can be on, off, default - """ - ... - def getstatus(self, entrypath: str) -> str: - """Returns the current status of entryPath.""" - ... - def setstatus(self, entrypath: str, mode: str = "on") -> None: - """ - Sets the status of entryPath to be status. A bitmap will be - displayed next to the entry its status is on, off or default. - """ - ... + def autosetmode(self) -> None: ... + def close(self, entrypath: str) -> None: ... + def getmode(self, entrypath: str) -> str: ... + def open(self, entrypath: str) -> None: ... + def getselection(self, mode: str = "on") -> tuple[str, ...]: ... + def getstatus(self, entrypath: str) -> str: ... + def setstatus(self, entrypath: str, mode: str = "on") -> None: ... class Tree(TixWidget): - """ - Tree - The tixTree widget can be used to display hierarchical - data in a tree form. The user can adjust - the view of the tree by opening or closing parts of the tree. - """ def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... - def autosetmode(self) -> None: - """ - This command calls the setmode method for all the entries in this - Tree widget: if an entry has no child entries, its mode is set to - none. Otherwise, if the entry has any hidden child entries, its mode is - set to open; otherwise its mode is set to close. - """ - ... - def close(self, entrypath: str) -> None: - """Close the entry given by entryPath if its mode is close.""" - ... - def getmode(self, entrypath: str) -> str: - """Returns the current mode of the entry given by entryPath.""" - ... - def open(self, entrypath: str) -> None: - """Open the entry given by entryPath if its mode is open.""" - ... - def setmode(self, entrypath: str, mode: str = "none") -> None: - """ - This command is used to indicate whether the entry given by - entryPath has children entries and whether the children are visible. mode - must be one of open, close or none. If mode is set to open, a (+) - indicator is drawn next the entry. If mode is set to close, a (-) - indicator is drawn next the entry. If mode is set to none, no - indicators will be drawn for this entry. The default mode is none. The - open mode indicates the entry has hidden children and this entry can be - opened by the user. The close mode indicates that all the children of the - entry are now visible and the entry can be closed by the user. - """ - ... + def autosetmode(self) -> None: ... + def close(self, entrypath: str) -> None: ... + def getmode(self, entrypath: str) -> str: ... + def open(self, entrypath: str) -> None: ... + def setmode(self, entrypath: str, mode: str = "none") -> None: ... class TList(TixWidget, tkinter.XView, tkinter.YView): - """ - TList - Hierarchy display widget which can be - used to display data in a tabular format. The list entries of a TList - widget are similar to the entries in the Tk listbox widget. The main - differences are (1) the TList widget can display the list entries in a - two dimensional format and (2) you can use graphical images as well as - multiple colors and fonts for the list entries. - - Subwidgets - None - """ def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... def active_set(self, index: int) -> None: ... def active_clear(self) -> None: ... @@ -666,17 +261,6 @@ class TList(TixWidget, tkinter.XView, tkinter.YView): def selection_set(self, first: int, last: int | None = None) -> None: ... class PanedWindow(TixWidget): - """ - PanedWindow - Multi-pane container widget - allows the user to interactively manipulate the sizes of several - panes. The panes can be arranged either vertically or horizontally.The - user changes the sizes of the panes by dragging the resize handle - between two panes. - - Subwidgets Class - ---------- ----- - g/p widgets added dynamically with the add method. - """ def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def add(self, name: str, cnf: dict[str, Any] = {}, **kw) -> None: ... def delete(self, name: str) -> None: ... @@ -686,14 +270,6 @@ class PanedWindow(TixWidget): def panes(self) -> list[tkinter.Widget]: ... class ListNoteBook(TixWidget): - """ - A ListNoteBook widget is very similar to the TixNoteBook widget: - it can be used to display many windows in a limited space using a - notebook metaphor. The notebook is divided into a stack of pages - (windows). At one time only one of these pages can be shown. - The user can navigate through these pages by - choosing the name of the desired page in the hlist subwidget. - """ def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def add(self, name: str, cnf: dict[str, Any] = {}, **kw) -> None: ... def page(self, name: str) -> tkinter.Widget: ... @@ -701,14 +277,6 @@ class ListNoteBook(TixWidget): def raise_page(self, name: str) -> None: ... class NoteBook(TixWidget): - """ - NoteBook - Multi-page container widget (tabbed notebook metaphor). - - Subwidgets Class - ---------- ----- - nbframe NoteBookFrame - page widgets added dynamically with the add method - """ def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... def add(self, name: str, cnf: dict[str, Any] = {}, **kw) -> None: ... def delete(self, name: str) -> None: ... @@ -718,20 +286,9 @@ class NoteBook(TixWidget): def raised(self) -> bool: ... class InputOnly(TixWidget): - """ - InputOnly - Invisible widget. Unix only. - - Subwidgets - None - """ def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... class Form: - """ - The Tix Form geometry manager - - Widgets can be arranged by specifying attachments to other widgets. - See Tix documentation for complete details - """ def __setitem__(self, key: str, value: Any) -> None: ... def config(self, cnf: dict[str, Any] = {}, **kw) -> None: ... def form(self, cnf: dict[str, Any] = {}, **kw) -> None: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tkinter/ttk.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tkinter/ttk.pyi index c869ed8..533fba7 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tkinter/ttk.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tkinter/ttk.pyi @@ -1114,7 +1114,9 @@ class Notebook(Widget): sticky: str = ..., # consists of letters 'n', 's', 'w', 'e', no repeats, may be empty padding: _Padding = ..., text: str = ..., - image=..., # Sequence of an image name, followed by zero or more (sequences of one or more state names followed by an image name) + # `image` is a sequence of an image name, followed by zero or more + # (sequences of one or more state names followed by an image name) + image=..., compound: tkinter._Compound = ..., underline: int = ..., ) -> None: @@ -2371,6 +2373,7 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): To configure the tree column heading, call this with column = "#0" """ ... + # Internal Method. Leave untyped: def identify(self, component, x, y): """ Returns a description of the specified component under the diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/trace.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/trace.pyi index d590755..e987bf1 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/trace.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/trace.pyi @@ -51,17 +51,31 @@ class CoverageResults: def update(self, other: CoverageResults) -> None: """Merge in the data from another CoverageResults""" ... - def write_results(self, show_missing: bool = True, summary: bool = False, coverdir: StrPath | None = None) -> None: - """ - Write the coverage results. + if sys.version_info >= (3, 13): + def write_results( + self, + show_missing: bool = True, + summary: bool = False, + coverdir: StrPath | None = None, + *, + ignore_missing_files: bool = False, + ) -> None: + """ + Write the coverage results. + + :param show_missing: Show lines that had no hits. + :param summary: Include coverage summary per module. + :param coverdir: If None, the results of each module are placed in its + directory, otherwise it is included in the directory + specified. + :param ignore_missing_files: If True, counts for files that no longer + exist are silently ignored. Otherwise, a missing file + will raise a FileNotFoundError. + """ + ... + else: + def write_results(self, show_missing: bool = True, summary: bool = False, coverdir: StrPath | None = None) -> None: ... - :param show_missing: Show lines that had no hits. - :param summary: Include coverage summary per module. - :param coverdir: If None, the results of each module are placed in its - directory, otherwise it is included in the directory - specified. - """ - ... def write_results_file( self, path: StrPath, lines: Sequence[str], lnotab: Any, lines_hit: Mapping[int, int], encoding: str | None = None ) -> tuple[int, int]: diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/traceback.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/traceback.pyi index 434139a..87e6db8 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/traceback.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/traceback.pyi @@ -195,13 +195,7 @@ def print_list(extracted_list: Iterable[FrameSummary | _FrameSummaryTuple], file if sys.version_info >= (3, 13): @overload - def format_exception_only(exc: BaseException | None, /, *, show_group: bool = False) -> list[str]: ... - @overload - def format_exception_only(exc: Unused, /, value: BaseException | None, *, show_group: bool = False) -> list[str]: ... - -elif sys.version_info >= (3, 10): - @overload - def format_exception_only(exc: BaseException | None, /) -> list[str]: + def format_exception_only(exc: BaseException | None, /, *, show_group: bool = False) -> list[str]: """ Format the exception part of a traceback. @@ -212,10 +206,14 @@ elif sys.version_info >= (3, 10): contains several lines that (when printed) display detailed information about where the syntax error occurred. Following the message, the list contains the exception's ``__notes__``. + + When *show_group* is ``True``, and the exception is an instance of + :exc:`BaseExceptionGroup`, the nested exceptions are included as + well, recursively, with indentation relative to their nesting depth. """ ... @overload - def format_exception_only(exc: Unused, /, value: BaseException | None) -> list[str]: + def format_exception_only(exc: Unused, /, value: BaseException | None, *, show_group: bool = False) -> list[str]: """ Format the exception part of a traceback. @@ -226,9 +224,19 @@ elif sys.version_info >= (3, 10): contains several lines that (when printed) display detailed information about where the syntax error occurred. Following the message, the list contains the exception's ``__notes__``. + + When *show_group* is ``True``, and the exception is an instance of + :exc:`BaseExceptionGroup`, the nested exceptions are included as + well, recursively, with indentation relative to their nesting depth. """ ... +elif sys.version_info >= (3, 10): + @overload + def format_exception_only(exc: BaseException | None, /) -> list[str]: ... + @overload + def format_exception_only(exc: Unused, /, value: BaseException | None) -> list[str]: ... + else: def format_exception_only(etype: type[BaseException] | None, value: BaseException | None) -> list[str]: ... @@ -290,7 +298,8 @@ class TracebackException: - :attr:`__suppress_context__` The *__suppress_context__* value from the original exception. - :attr:`stack` A `StackSummary` representing the traceback. - - :attr:`exc_type` The class of the original traceback. + - :attr:`exc_type` (deprecated) The class of the original traceback. + - :attr:`exc_type_str` String display of exc_type - :attr:`filename` For syntax errors - the filename where the error occurred. - :attr:`lineno` For syntax errors - the linenumber where the error @@ -404,9 +413,7 @@ class TracebackException: lookup_lines: bool = True, capture_locals: bool = False, compact: bool = False, - ) -> Self: - """Create a TracebackException from an exception.""" - ... + ) -> Self: ... else: @classmethod def from_exception( @@ -430,25 +437,10 @@ class TracebackException: """ ... else: - def format(self, *, chain: bool = True) -> Generator[str, None, None]: - """ - Format the exception. - - If chain is not *True*, *__cause__* and *__context__* will not be formatted. - - The return value is a generator of strings, each ending in a newline and - some containing internal newlines. `print_exception` is a wrapper around - this method which just prints the lines to a file. - - The message indicating which exception occurred is always the last - string in the output. - """ - ... + def format(self, *, chain: bool = True) -> Generator[str, None, None]: ... if sys.version_info >= (3, 13): - def format_exception_only(self, *, show_group: bool = False, _depth: int = 0) -> Generator[str, None, None]: ... - else: - def format_exception_only(self) -> Generator[str, None, None]: + def format_exception_only(self, *, show_group: bool = False, _depth: int = 0) -> Generator[str, None, None]: """ Format the exception part of the traceback. @@ -461,8 +453,14 @@ class TracebackException: display detailed information about where the syntax error occurred. Following the message, generator also yields all the exception's ``__notes__``. + + When *show_group* is ``True``, and the exception is an instance of + :exc:`BaseExceptionGroup`, the nested exceptions are included as + well, recursively, with indentation relative to their nesting depth. """ ... + else: + def format_exception_only(self) -> Generator[str, None, None]: ... if sys.version_info >= (3, 11): def print(self, *, file: SupportsWrite[str] | None = None, chain: bool = True) -> None: @@ -521,18 +519,7 @@ class FrameSummary(Iterable[Any]): lookup_line: bool = True, locals: Mapping[str, str] | None = None, line: str | None = None, - ) -> None: - """ - Construct a FrameSummary. - - :param lookup_line: If True, `linecache` is consulted for the source - code line. Otherwise, the line will be looked up when first needed. - :param locals: If supplied the frame locals, which will be captured as - object representations. - :param line: If provided, use this instead of looking up the line in - the linecache. - """ - ... + ) -> None: ... filename: str lineno: int | None name: str diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tracemalloc.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tracemalloc.pyi index a213d7d..e1ffeb3 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tracemalloc.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tracemalloc.pyi @@ -87,15 +87,9 @@ class Frame: """Return a <= b. Computed by @total_ordering from (a < b) or (a == b).""" ... else: - def __gt__(self, other: Frame, NotImplemented: Any = ...) -> bool: - """Return a > b. Computed by @total_ordering from (not a < b) and (a != b).""" - ... - def __ge__(self, other: Frame, NotImplemented: Any = ...) -> bool: - """Return a >= b. Computed by @total_ordering from (not a < b).""" - ... - def __le__(self, other: Frame, NotImplemented: Any = ...) -> bool: - """Return a <= b. Computed by @total_ordering from (a < b) or (a == b).""" - ... + def __gt__(self, other: Frame, NotImplemented: Any = ...) -> bool: ... + def __ge__(self, other: Frame, NotImplemented: Any = ...) -> bool: ... + def __le__(self, other: Frame, NotImplemented: Any = ...) -> bool: ... if sys.version_info >= (3, 9): _TraceTuple: TypeAlias = tuple[int, int, Sequence[_FrameTuple], int | None] | tuple[int, int, Sequence[_FrameTuple]] @@ -147,15 +141,9 @@ class Traceback(Sequence[Frame]): """Return a <= b. Computed by @total_ordering from (a < b) or (a == b).""" ... else: - def __gt__(self, other: Traceback, NotImplemented: Any = ...) -> bool: - """Return a > b. Computed by @total_ordering from (not a < b) and (a != b).""" - ... - def __ge__(self, other: Traceback, NotImplemented: Any = ...) -> bool: - """Return a >= b. Computed by @total_ordering from (not a < b).""" - ... - def __le__(self, other: Traceback, NotImplemented: Any = ...) -> bool: - """Return a <= b. Computed by @total_ordering from (a < b) or (a == b).""" - ... + def __gt__(self, other: Traceback, NotImplemented: Any = ...) -> bool: ... + def __ge__(self, other: Traceback, NotImplemented: Any = ...) -> bool: ... + def __le__(self, other: Traceback, NotImplemented: Any = ...) -> bool: ... class Snapshot: """Snapshot of traces of memory blocks allocated by Python.""" diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tty.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tty.pyi index 2ecd12e..12c9f9f 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tty.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/tty.pyi @@ -2,7 +2,7 @@ import sys import termios -from typing import IO +from typing import IO, Final from typing_extensions import TypeAlias if sys.platform != "win32": @@ -17,20 +17,24 @@ if sys.platform != "win32": _FD: TypeAlias = int | IO[str] # XXX: Undocumented integer constants - IFLAG: int - OFLAG: int - CFLAG: int - LFLAG: int - ISPEED: int - OSPEED: int - CC: int + IFLAG: Final[int] + OFLAG: Final[int] + CFLAG: Final[int] + LFLAG: Final[int] + ISPEED: Final[int] + OSPEED: Final[int] + CC: Final[int] def setraw(fd: _FD, when: int = 2) -> _ModeSetterReturn: - """Put terminal into a raw mode.""" + """Put terminal into raw mode.""" ... def setcbreak(fd: _FD, when: int = 2) -> _ModeSetterReturn: - """Put terminal into a cbreak mode.""" + """Put terminal into cbreak mode.""" ... if sys.version_info >= (3, 12): - def cfmakeraw(mode: termios._Attr) -> None: ... - def cfmakecbreak(mode: termios._Attr) -> None: ... + def cfmakeraw(mode: termios._Attr) -> None: + """Make termios mode raw.""" + ... + def cfmakecbreak(mode: termios._Attr) -> None: + """Make termios mode cbreak.""" + ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/turtle.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/turtle.pyi index 221f59b..18a0e8f 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/turtle.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/turtle.pyi @@ -178,7 +178,6 @@ __all__ = [ "setheading", "setpos", "setposition", - "settiltangle", "setundobuffer", "setx", "sety", @@ -209,6 +208,9 @@ __all__ = [ if sys.version_info >= (3, 12): __all__ += ["teleport"] +if sys.version_info < (3, 13): + __all__ += ["settiltangle"] + # Note: '_Color' is the alias we use for arguments and _AnyColor is the # alias we use for return types. Really, these two aliases should be the # same, but as per the "no union returns" typeshed policy, we'll return @@ -1084,7 +1086,7 @@ class TNavigator: >>> reset() >>> turtle.left(60) >>> turtle.forward(100) - >>> print turtle.xcor() + >>> print(turtle.xcor()) 50.0 """ ... @@ -1098,7 +1100,7 @@ class TNavigator: >>> reset() >>> turtle.left(60) >>> turtle.forward(100) - >>> print turtle.ycor() + >>> print(turtle.ycor()) 86.6025403784 """ ... @@ -1967,7 +1969,7 @@ class TPen: Example (for a Turtle instance named turtle): >>> turtle.hideturtle() - >>> print turtle.isvisible(): + >>> print(turtle.isvisible()) False """ ... @@ -2388,29 +2390,10 @@ class RawTurtle(TPen, TNavigator): ((50, -20), (30, 20), (-50, 20), (-30, -20)) """ ... - def settiltangle(self, angle: float) -> None: - """ - Rotate the turtleshape to point in the specified direction - Argument: angle -- number + if sys.version_info < (3, 13): + def settiltangle(self, angle: float) -> None: ... - Rotate the turtleshape to point in the direction specified by angle, - regardless of its current tilt-angle. DO NOT change the turtle's - heading (direction of movement). - - Deprecated since Python 3.1 - - Examples (for a Turtle instance named turtle): - >>> turtle.shape("circle") - >>> turtle.shapesize(5,2) - >>> turtle.settiltangle(45) - >>> turtle.stamp() - >>> turtle.fd(50) - >>> turtle.settiltangle(-45) - >>> turtle.stamp() - >>> turtle.fd(50) - """ - ... @overload def tiltangle(self, angle: None = None) -> float: """ @@ -2425,9 +2408,6 @@ class RawTurtle(TPen, TNavigator): between the orientation of the turtleshape and the heading of the turtle (its direction of movement). - (Incorrectly marked as deprecated since Python 3.1, it is really - settiltangle that is deprecated.) - Examples (for a Turtle instance named turtle): >>> turtle.shape("circle") >>> turtle.shapesize(5, 2) @@ -2459,9 +2439,6 @@ class RawTurtle(TPen, TNavigator): between the orientation of the turtleshape and the heading of the turtle (its direction of movement). - (Incorrectly marked as deprecated since Python 3.1, it is really - settiltangle that is deprecated.) - Examples (for a Turtle instance named turtle): >>> turtle.shape("circle") >>> turtle.shapesize(5, 2) @@ -3723,7 +3700,7 @@ def xcor() -> float: >>> reset() >>> left(60) >>> forward(100) - >>> print xcor() + >>> print(xcor()) 50.0 """ ... @@ -3737,7 +3714,7 @@ def ycor() -> float: >>> reset() >>> left(60) >>> forward(100) - >>> print ycor() + >>> print(ycor()) 86.6025403784 """ ... @@ -4594,7 +4571,7 @@ def isvisible() -> bool: Example: >>> hideturtle() - >>> print isvisible(): + >>> print(isvisible()) False """ ... @@ -4993,29 +4970,10 @@ def get_shapepoly() -> _PolygonCoords | None: ((50, -20), (30, 20), (-50, 20), (-30, -20)) """ ... -def settiltangle(angle: float) -> None: - """ - Rotate the turtleshape to point in the specified direction - - Argument: angle -- number - Rotate the turtleshape to point in the direction specified by angle, - regardless of its current tilt-angle. DO NOT change the turtle's - heading (direction of movement). +if sys.version_info < (3, 13): + def settiltangle(angle: float) -> None: ... - Deprecated since Python 3.1 - - Examples: - >>> shape("circle") - >>> shapesize(5,2) - >>> settiltangle(45) - >>> stamp() - >>> fd(50) - >>> settiltangle(-45) - >>> stamp() - >>> fd(50) - """ - ... @overload def tiltangle(angle: None = None) -> float: """ @@ -5030,9 +4988,6 @@ def tiltangle(angle: None = None) -> float: between the orientation of the turtleshape and the heading of the turtle (its direction of movement). - (Incorrectly marked as deprecated since Python 3.1, it is really - settiltangle that is deprecated.) - Examples: >>> shape("circle") >>> shapesize(5, 2) @@ -5064,9 +5019,6 @@ def tiltangle(angle: float) -> None: between the orientation of the turtleshape and the heading of the turtle (its direction of movement). - (Incorrectly marked as deprecated since Python 3.1, it is really - settiltangle that is deprecated.) - Examples: >>> shape("circle") >>> shapesize(5, 2) diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/types.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/types.pyi index e8c6de8..d0ca990 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/types.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/types.pyi @@ -86,6 +86,8 @@ class FunctionType: a tuple that specifies the default argument values closure a tuple that supplies the bindings for free variables + kwdefaults + a dictionary that specifies the default keyword argument values """ @property def __closure__(self) -> tuple[CellType, ...] | None: ... @@ -272,7 +274,7 @@ class CodeType: co_qualname: str = ..., co_linetable: bytes = ..., co_exceptiontable: bytes = ..., - ) -> CodeType: + ) -> Self: """Return a copy of the code object with new values for the specified fields.""" ... elif sys.version_info >= (3, 10): @@ -295,9 +297,7 @@ class CodeType: co_filename: str = ..., co_name: str = ..., co_linetable: bytes = ..., - ) -> CodeType: - """Return a copy of the code object with new values for the specified fields.""" - ... + ) -> Self: ... else: def replace( self, @@ -318,10 +318,14 @@ class CodeType: co_filename: str = ..., co_name: str = ..., co_lnotab: bytes = ..., - ) -> CodeType: ... + ) -> Self: ... + + if sys.version_info >= (3, 13): + __replace__ = replace @final class MappingProxyType(Mapping[_KT, _VT_co]): + """Read-only proxy of a mapping.""" __hash__: ClassVar[None] # type: ignore[assignment] def __new__(cls, mapping: SupportsKeysAndGetItem[_KT, _VT_co]) -> Self: ... def __getitem__(self, key: _KT, /) -> _VT_co: @@ -348,6 +352,14 @@ class MappingProxyType(Mapping[_KT, _VT_co]): def items(self) -> ItemsView[_KT, _VT_co]: """D.items() -> a set-like object providing a view on D's items""" ... + @overload + def get(self, key: _KT, /) -> _VT_co | None: + """Return the value for key if key is in the mapping, else default.""" + ... + @overload + def get(self, key: _KT, default: _VT_co | _T2, /) -> _VT_co | _T2: + """Return the value for key if key is in the mapping, else default.""" + ... if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any, /) -> GenericAlias: """See PEP 585""" @@ -363,13 +375,13 @@ class MappingProxyType(Mapping[_KT, _VT_co]): ... class SimpleNamespace: - """ - A simple attribute-based namespace. - - SimpleNamespace(**kwargs) - """ + """A simple attribute-based namespace.""" __hash__: ClassVar[None] # type: ignore[assignment] - def __init__(self, **kwargs: Any) -> None: ... + if sys.version_info >= (3, 13): + def __init__(self, mapping_or_iterable: Mapping[str, Any] | Iterable[tuple[str, Any]] = (), /, **kwargs: Any) -> None: ... + else: + def __init__(self, **kwargs: Any) -> None: ... + def __eq__(self, value: object, /) -> bool: """Return self==value.""" ... @@ -382,6 +394,10 @@ class SimpleNamespace: def __delattr__(self, name: str, /) -> None: """Implement delattr(self, name).""" ... + if sys.version_info >= (3, 13): + def __replace__(self, **kwargs: Any) -> Self: + """Return a copy of the namespace object with new values for the specified attributes.""" + ... class ModuleType: """ @@ -472,7 +488,9 @@ class GeneratorType(Generator[_YieldT_co, _SendT_contra, _ReturnT_co]): """ ... if sys.version_info >= (3, 13): - def __class_getitem__(cls, item: Any, /) -> Any: ... + def __class_getitem__(cls, item: Any, /) -> Any: + """See PEP 585""" + ... @final class AsyncGeneratorType(AsyncGenerator[_YieldT_co, _SendT_contra]): @@ -576,7 +594,9 @@ class CoroutineType(Coroutine[_YieldT_co, _SendT_contra, _ReturnT_co]): """ ... if sys.version_info >= (3, 13): - def __class_getitem__(cls, item: Any, /) -> Any: ... + def __class_getitem__(cls, item: Any, /) -> Any: + """See PEP 585""" + ... @final class MethodType: @@ -698,12 +718,7 @@ class ClassMethodDescriptorType: @final class TracebackType: - """ - TracebackType(tb_next, tb_frame, tb_lasti, tb_lineno) - -- - - Create a new traceback object. - """ + """Create a new traceback object.""" def __new__(cls, tb_next: TracebackType | None, tb_frame: FrameType, tb_lasti: int, tb_lineno: int) -> Self: ... tb_next: TracebackType | None # the rest are read-only @@ -888,12 +903,15 @@ if sys.version_info >= (3, 9): if sys.version_info >= (3, 10): @final class NoneType: + """The type of the None singleton.""" def __bool__(self) -> Literal[False]: """True if self else False""" ... @final - class EllipsisType: ... + class EllipsisType: + """The type of the Ellipsis singleton.""" + ... from builtins import _NotImplementedType @@ -922,4 +940,15 @@ if sys.version_info >= (3, 10): if sys.version_info >= (3, 13): @final - class CapsuleType: ... + class CapsuleType: + """ + Capsule objects let you wrap a C "void *" pointer in a Python + object. They're a way of passing data through the Python interpreter + without creating your own custom type. + + Capsules are used for communication between extension modules. + They provide a way for an extension module to export a C interface + to other extension modules, so that extension modules can use the + Python import mechanism to link to one another. + """ + ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/typing.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/typing.pyi index aed0069..4e80ee2 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/typing.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/typing.pyi @@ -17,14 +17,13 @@ Among other things, the module includes the following: * Several protocols to support duck-typing: SupportsFloat, SupportsIndex, SupportsAbs, and others. * Special types: NewType, NamedTuple, TypedDict. -* Deprecated wrapper submodules for re and io related types. * Deprecated aliases for builtin types and collections.abc ABCs. Any name not present in __all__ is an implementation detail that may be changed without notice. Use at your own risk! """ -import collections # noqa: F401 # pyright: ignore +import collections # noqa: F401 # pyright: ignore[reportUnusedImport] import sys import typing_extensions from _collections_abc import dict_items, dict_keys, dict_values @@ -43,7 +42,7 @@ from types import ( TracebackType, WrapperDescriptorType, ) -from typing_extensions import Never as _Never, ParamSpec as _ParamSpec +from typing_extensions import Never as _Never, ParamSpec as _ParamSpec, deprecated if sys.version_info >= (3, 9): from types import GenericAlias @@ -205,12 +204,18 @@ class TypeVar: class StrOrBytesSequence[A: (str, bytes)]: ... + Type variables can also have defaults: + + class IntDefault[T = int]: + ... + However, if desired, reusable type variables can also be constructed manually, like so:: T = TypeVar('T') # Can be anything S = TypeVar('S', bound=str) # Can be any subtype of str A = TypeVar('A', str, bytes) # Must be exactly str or bytes + D = TypeVar('D', default=int) # Defaults to int Type variables exist primarily for the benefit of static type checkers. They serve as the parameters for generic types as well @@ -327,10 +332,15 @@ if sys.version_info >= (3, 11): def move_first_element_to_last[T, *Ts](tup: tuple[T, *Ts]) -> tuple[*Ts, T]: return (*tup[1:], tup[0]) + Type variables tuples can have default values: + + type AliasWithDefault[*Ts = (str, int)] = tuple[*Ts] + For compatibility with Python 3.11 and earlier, TypeVarTuple objects can also be created as follows:: Ts = TypeVarTuple('Ts') # Can be given any name + DefaultTs = TypeVarTuple('Ts', default=(str, int)) Just as a TypeVar (type variable) is a placeholder for a single type, a TypeVarTuple is a placeholder for an *arbitrary* number of types. For @@ -354,7 +364,9 @@ if sys.version_info >= (3, 11): def __name__(self) -> str: ... if sys.version_info >= (3, 13): @property - def __default__(self) -> Any: ... + def __default__(self) -> Any: + """The default value for this TypeVarTuple.""" + ... def has_default(self) -> bool: ... if sys.version_info >= (3, 13): def __init__(self, name: str, *, default: Any = ...) -> None: ... @@ -425,10 +437,17 @@ if sys.version_info >= (3, 10): type IntFunc[**P] = Callable[P, int] + The following syntax creates a parameter specification that defaults + to a callable accepting two positional-only arguments of types int + and str: + + type IntFuncDefault[**P = (int, str)] = Callable[P, int] + For compatibility with Python 3.11 and earlier, ParamSpec objects can also be created as follows:: P = ParamSpec('P') + DefaultP = ParamSpec('DefaultP', default=(int, str)) Parameter specification variables exist primarily for the benefit of static type checkers. They are used to forward the parameter types of @@ -474,7 +493,9 @@ if sys.version_info >= (3, 10): def __infer_variance__(self) -> bool: ... if sys.version_info >= (3, 13): @property - def __default__(self) -> Any: ... + def __default__(self) -> Any: + """The default value for this ParamSpec.""" + ... if sys.version_info >= (3, 13): def __init__( self, @@ -818,14 +839,10 @@ else: from contextlib import AbstractAsyncContextManager, AbstractContextManager @runtime_checkable - class ContextManager(AbstractContextManager[_T_co, bool | None], Protocol[_T_co]): - """A generic version of contextlib.AbstractContextManager.""" - ... + class ContextManager(AbstractContextManager[_T_co, bool | None], Protocol[_T_co]): ... @runtime_checkable - class AsyncContextManager(AbstractAsyncContextManager[_T_co, bool | None], Protocol[_T_co]): - """A generic version of contextlib.AbstractAsyncContextManager.""" - ... + class AsyncContextManager(AbstractAsyncContextManager[_T_co, bool | None], Protocol[_T_co]): ... @runtime_checkable class Awaitable(Protocol[_T_co]): @@ -907,9 +924,9 @@ class AsyncIterator(AsyncIterable[_T_co], Protocol[_T_co]): class AsyncGenerator(AsyncIterator[_YieldT_co], Generic[_YieldT_co, _SendT_contra]): """A generic version of collections.abc.AsyncGenerator.""" - def __anext__(self) -> Awaitable[_YieldT_co]: ... + def __anext__(self) -> Coroutine[Any, Any, _YieldT_co]: ... @abstractmethod - def asend(self, value: _SendT_contra, /) -> Awaitable[_YieldT_co]: + def asend(self, value: _SendT_contra, /) -> Coroutine[Any, Any, _YieldT_co]: """ Send a value into the asynchronous generator. Return next yielded value or raise StopAsyncIteration. @@ -919,7 +936,7 @@ class AsyncGenerator(AsyncIterator[_YieldT_co], Generic[_YieldT_co, _SendT_contr @abstractmethod def athrow( self, typ: type[BaseException], val: BaseException | object = None, tb: TracebackType | None = None, / - ) -> Awaitable[_YieldT_co]: + ) -> Coroutine[Any, Any, _YieldT_co]: """ Raise an exception in the asynchronous generator. Return next yielded value or raise StopAsyncIteration. @@ -927,13 +944,15 @@ class AsyncGenerator(AsyncIterator[_YieldT_co], Generic[_YieldT_co, _SendT_contr ... @overload @abstractmethod - def athrow(self, typ: BaseException, val: None = None, tb: TracebackType | None = None, /) -> Awaitable[_YieldT_co]: + def athrow( + self, typ: BaseException, val: None = None, tb: TracebackType | None = None, / + ) -> Coroutine[Any, Any, _YieldT_co]: """ Raise an exception in the asynchronous generator. Return next yielded value or raise StopAsyncIteration. """ ... - def aclose(self) -> Awaitable[None]: + def aclose(self) -> Coroutine[Any, Any, None]: """ Raise GeneratorExit inside coroutine. @@ -1336,18 +1355,12 @@ class IO(Iterator[AnyStr]): def writable(self) -> bool: ... @abstractmethod @overload - def write(self: IO[str], s: str, /) -> int: ... - @abstractmethod - @overload def write(self: IO[bytes], s: ReadableBuffer, /) -> int: ... @abstractmethod @overload def write(self, s: AnyStr, /) -> int: ... @abstractmethod @overload - def writelines(self: IO[str], lines: Iterable[str], /) -> None: ... - @abstractmethod - @overload def writelines(self: IO[bytes], lines: Iterable[ReadableBuffer], /) -> None: ... @abstractmethod @overload @@ -1384,7 +1397,8 @@ class TextIO(IO[str]): @abstractmethod def __enter__(self) -> TextIO: ... -ByteString: typing_extensions.TypeAlias = bytes | bytearray | memoryview +if sys.version_info < (3, 14): + ByteString: typing_extensions.TypeAlias = bytes | bytearray | memoryview # Functions @@ -1404,7 +1418,7 @@ if sys.version_info >= (3, 9): def get_type_hints( obj: _get_type_hints_obj_allowed_types, globalns: dict[str, Any] | None = None, - localns: dict[str, Any] | None = None, + localns: Mapping[str, Any] | None = None, include_extras: bool = False, ) -> dict[str, Any]: """ @@ -1442,7 +1456,7 @@ if sys.version_info >= (3, 9): else: def get_type_hints( - obj: _get_type_hints_obj_allowed_types, globalns: dict[str, Any] | None = None, localns: dict[str, Any] | None = None + obj: _get_type_hints_obj_allowed_types, globalns: dict[str, Any] | None = None, localns: Mapping[str, Any] | None = None ) -> dict[str, Any]: ... def get_args(tp: Any) -> tuple[Any, ...]: @@ -1832,20 +1846,39 @@ class ForwardRef: def __init__(self, arg: str, is_argument: bool = True) -> None: ... if sys.version_info >= (3, 13): + @overload + @deprecated( + "Failing to pass a value to the 'type_params' parameter of ForwardRef._evaluate() is deprecated, " + "as it leads to incorrect behaviour when evaluating a stringified annotation " + "that references a PEP 695 type parameter. It will be disallowed in Python 3.15." + ) + def _evaluate( + self, globalns: dict[str, Any] | None, localns: Mapping[str, Any] | None, *, recursive_guard: frozenset[str] + ) -> Any | None: ... + @overload def _evaluate( self, globalns: dict[str, Any] | None, - localns: dict[str, Any] | None, - type_params: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] = ..., + localns: Mapping[str, Any] | None, + type_params: tuple[TypeVar | ParamSpec | TypeVarTuple, ...], + *, + recursive_guard: frozenset[str], + ) -> Any | None: ... + elif sys.version_info >= (3, 12): + def _evaluate( + self, + globalns: dict[str, Any] | None, + localns: Mapping[str, Any] | None, + type_params: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] | None = None, *, recursive_guard: frozenset[str], ) -> Any | None: ... elif sys.version_info >= (3, 9): def _evaluate( - self, globalns: dict[str, Any] | None, localns: dict[str, Any] | None, recursive_guard: frozenset[str] + self, globalns: dict[str, Any] | None, localns: Mapping[str, Any] | None, recursive_guard: frozenset[str] ) -> Any | None: ... else: - def _evaluate(self, globalns: dict[str, Any] | None, localns: dict[str, Any] | None) -> Any | None: ... + def _evaluate(self, globalns: dict[str, Any] | None, localns: Mapping[str, Any] | None) -> Any | None: ... def __eq__(self, other: object) -> bool: ... def __hash__(self) -> int: ... @@ -1951,7 +1984,7 @@ if sys.version_info >= (3, 12): # It's writable on types, but not on instances of TypeAliasType. @property def __module__(self) -> str | None: ... # type: ignore[override] - def __getitem__(self, parameters: Any) -> Any: + def __getitem__(self, parameters: Any) -> GenericAlias: """Return self[key].""" ... def __or__(self, right: Any) -> _SpecialForm: @@ -1962,8 +1995,38 @@ if sys.version_info >= (3, 12): ... if sys.version_info >= (3, 13): - def is_protocol(tp: type, /) -> bool: ... - def get_protocol_members(tp: type, /) -> frozenset[str]: ... + def is_protocol(tp: type, /) -> bool: + """ + Return True if the given type is a Protocol. + + Example:: + + >>> from typing import Protocol, is_protocol + >>> class P(Protocol): + ... def a(self) -> str: ... + ... b: int + >>> is_protocol(P) + True + >>> is_protocol(int) + False + """ + ... + def get_protocol_members(tp: type, /) -> frozenset[str]: + """ + Return the set of members defined in a Protocol. + + Example:: + + >>> from typing import Protocol, get_protocol_members + >>> class P(Protocol): + ... def a(self) -> str: ... + ... b: int + >>> get_protocol_members(P) == frozenset({'a', 'b'}) + True + + Raise a TypeError for arguments that are not Protocols. + """ + ... @final class _NoDefaultType: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/typing_extensions.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/typing_extensions.pyi index 1952203..93c1dfb 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/typing_extensions.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/typing_extensions.pyi @@ -204,60 +204,13 @@ class _SpecialForm: # on older versions of Python. Protocol: _SpecialForm -def runtime_checkable(cls: _TC) -> _TC: - """ - Mark a protocol class as a runtime protocol. - - Such protocol can be used with isinstance() and issubclass(). - Raise TypeError if applied to a non-protocol class. - This allows a simple-minded structural check very similar to - one trick ponies in collections.abc such as Iterable. - - For example:: - - @runtime_checkable - class Closable(Protocol): - def close(self): ... - - assert isinstance(open('/some/file'), Closable) - - Warning: this will check only the presence of the required methods, - not their type signatures! - """ - ... +def runtime_checkable(cls: _TC) -> _TC: ... # This alias for above is kept here for backwards compatibility. runtime = runtime_checkable Final: _SpecialForm -def final(f: _F) -> _F: - """ - Decorator to indicate final methods and final classes. - - Use this decorator to indicate to type checkers that the decorated - method cannot be overridden, and decorated class cannot be subclassed. - - For example:: - - class Base: - @final - def done(self) -> None: - ... - class Sub(Base): - def done(self) -> None: # Error reported by type checker - ... - - @final - class Leaf: - ... - class Other(Leaf): # Error reported by type checker - ... - - There is no runtime checking of these properties. The decorator - attempts to set the ``__final__`` attribute to ``True`` on the decorated - object to allow runtime introspection. - """ - ... +def final(f: _F) -> _F: ... Literal: _SpecialForm @@ -290,21 +243,13 @@ class _TypedDict(Mapping[str, object], metaclass=abc.ABCMeta): def __delitem__(self, k: Never) -> None: ... if sys.version_info >= (3, 9): @overload - def __or__(self, value: Self, /) -> Self: - """Return self|value.""" - ... + def __or__(self, value: Self, /) -> Self: ... @overload - def __or__(self, value: dict[str, Any], /) -> dict[str, object]: - """Return self|value.""" - ... + def __or__(self, value: dict[str, Any], /) -> dict[str, object]: ... @overload - def __ror__(self, value: Self, /) -> Self: - """Return value|self.""" - ... + def __ror__(self, value: Self, /) -> Self: ... @overload - def __ror__(self, value: dict[str, Any], /) -> dict[str, object]: - """Return value|self.""" - ... + def __ror__(self, value: dict[str, Any], /) -> dict[str, object]: ... # supposedly incompatible definitions of `__ior__` and `__or__`: def __ior__(self, value: Self, /) -> Self: ... # type: ignore[misc] @@ -316,154 +261,29 @@ OrderedDict = _Alias() def get_type_hints( obj: Callable[..., Any], globalns: dict[str, Any] | None = None, - localns: dict[str, Any] | None = None, + localns: Mapping[str, Any] | None = None, include_extras: bool = False, -) -> dict[str, Any]: - """ - Return type hints for an object. - - This is often the same as obj.__annotations__, but it handles - forward references encoded as string literals, adds Optional[t] if a - default value equal to None is set and recursively replaces all - 'Annotated[T, ...]', 'Required[T]' or 'NotRequired[T]' with 'T' - (unless 'include_extras=True'). - - The argument may be a module, class, method, or function. The annotations - are returned as a dictionary. For classes, annotations include also - inherited members. - - TypeError is raised if the argument is not of a type that can contain - annotations, and an empty dictionary is returned if no annotations are - present. - - BEWARE -- the behavior of globalns and localns is counterintuitive - (unless you are familiar with how eval() and exec() work). The - search order is locals first, then globals. - - - If no dict arguments are passed, an attempt is made to use the - globals from obj (or the respective module's globals for classes), - and these are also used as the locals. If the object does not appear - to have globals, an empty dictionary is used. - - - If one dict argument is passed, it is used for both globals and - locals. - - - If two dict arguments are passed, they specify globals and - locals, respectively. - """ - ... -def get_args(tp: Any) -> tuple[Any, ...]: - """ - Get type arguments with all substitutions performed. - - For unions, basic simplifications used by Union constructor are performed. - - Examples:: - - >>> T = TypeVar('T') - >>> assert get_args(Dict[str, int]) == (str, int) - >>> assert get_args(int) == () - >>> assert get_args(Union[int, Union[T, int], str][int]) == (int, str) - >>> assert get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int]) - >>> assert get_args(Callable[[], T][int]) == ([], int) - """ - ... +) -> dict[str, Any]: ... +def get_args(tp: Any) -> tuple[Any, ...]: ... if sys.version_info >= (3, 10): @overload - def get_origin(tp: UnionType) -> type[UnionType]: - """ - Get the unsubscripted version of a type. - - This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar, - Annotated, and others. Return None for unsupported types. - - Examples:: - - >>> P = ParamSpec('P') - >>> assert get_origin(Literal[42]) is Literal - >>> assert get_origin(int) is None - >>> assert get_origin(ClassVar[int]) is ClassVar - >>> assert get_origin(Generic) is Generic - >>> assert get_origin(Generic[T]) is Generic - >>> assert get_origin(Union[T, int]) is Union - >>> assert get_origin(List[Tuple[T, T]][int]) is list - >>> assert get_origin(P.args) is P - """ - ... + def get_origin(tp: UnionType) -> type[UnionType]: ... if sys.version_info >= (3, 9): @overload - def get_origin(tp: GenericAlias) -> type: - """ - Get the unsubscripted version of a type. - - This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar, - Annotated, and others. Return None for unsupported types. - - Examples:: - - >>> P = ParamSpec('P') - >>> assert get_origin(Literal[42]) is Literal - >>> assert get_origin(int) is None - >>> assert get_origin(ClassVar[int]) is ClassVar - >>> assert get_origin(Generic) is Generic - >>> assert get_origin(Generic[T]) is Generic - >>> assert get_origin(Union[T, int]) is Union - >>> assert get_origin(List[Tuple[T, T]][int]) is list - >>> assert get_origin(P.args) is P - """ - ... + def get_origin(tp: GenericAlias) -> type: ... @overload -def get_origin(tp: ParamSpecArgs | ParamSpecKwargs) -> ParamSpec: - """ - Get the unsubscripted version of a type. - - This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar, - Annotated, and others. Return None for unsupported types. - - Examples:: - - >>> P = ParamSpec('P') - >>> assert get_origin(Literal[42]) is Literal - >>> assert get_origin(int) is None - >>> assert get_origin(ClassVar[int]) is ClassVar - >>> assert get_origin(Generic) is Generic - >>> assert get_origin(Generic[T]) is Generic - >>> assert get_origin(Union[T, int]) is Union - >>> assert get_origin(List[Tuple[T, T]][int]) is list - >>> assert get_origin(P.args) is P - """ - ... +def get_origin(tp: ParamSpecArgs | ParamSpecKwargs) -> ParamSpec: ... @overload -def get_origin(tp: Any) -> Any | None: - """ - Get the unsubscripted version of a type. - - This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar, - Annotated, and others. Return None for unsupported types. - - Examples:: - - >>> P = ParamSpec('P') - >>> assert get_origin(Literal[42]) is Literal - >>> assert get_origin(int) is None - >>> assert get_origin(ClassVar[int]) is ClassVar - >>> assert get_origin(Generic) is Generic - >>> assert get_origin(Generic[T]) is Generic - >>> assert get_origin(Union[T, int]) is Union - >>> assert get_origin(List[Tuple[T, T]][int]) is list - >>> assert get_origin(P.args) is P - """ - ... +def get_origin(tp: Any) -> Any | None: ... Annotated: _SpecialForm _AnnotatedAlias: Any # undocumented @runtime_checkable class SupportsIndex(Protocol, metaclass=abc.ABCMeta): - """An ABC with one abstract method __index__.""" @abc.abstractmethod def __index__(self) -> int: ... @@ -516,65 +336,11 @@ if sys.version_info >= (3, 11): else: Self: _SpecialForm Never: _SpecialForm - def reveal_type(obj: _T, /) -> _T: - """ - Reveal the inferred type of a variable. - - When a static type checker encounters a call to ``reveal_type()``, - it will emit the inferred type of the argument:: - - x: int = 1 - reveal_type(x) - - Running a static type checker (e.g., ``mypy``) on this example - will produce output similar to 'Revealed type is "builtins.int"'. - - At runtime, the function prints the runtime type of the - argument and returns it unchanged. - """ - ... - def assert_never(arg: Never, /) -> Never: - """ - Assert to the type checker that a line of code is unreachable. - - Example:: - - def int_or_str(arg: int | str) -> None: - match arg: - case int(): - print("It's an int") - case str(): - print("It's a str") - case _: - assert_never(arg) - - If a type checker finds that a call to assert_never() is - reachable, it will emit an error. - - At runtime, this throws an exception when called. - """ - ... - def assert_type(val: _T, typ: Any, /) -> _T: - """ - Assert (to the type checker) that the value is of the given type. - - When the type checker encounters a call to assert_type(), it - emits an error if the value is not of the specified type:: - - def greet(name: str) -> None: - assert_type(name, str) # ok - assert_type(name, int) # type checker error - - At runtime this returns the first argument unchanged and otherwise - does nothing. - """ - ... - def clear_overloads() -> None: - """Clear all overloads in the registry.""" - ... - def get_overloads(func: Callable[..., object]) -> Sequence[Callable[..., object]]: - """Return all defined overloads for *func* as a sequence.""" - ... + def reveal_type(obj: _T, /) -> _T: ... + def assert_never(arg: Never, /) -> Never: ... + def assert_type(val: _T, typ: Any, /) -> _T: ... + def clear_overloads() -> None: ... + def get_overloads(func: Callable[..., object]) -> Sequence[Callable[..., object]]: ... Required: _SpecialForm NotRequired: _SpecialForm @@ -589,123 +355,24 @@ else: frozen_default: bool = False, field_specifiers: tuple[type[Any] | Callable[..., Any], ...] = (), **kwargs: object, - ) -> IdentityFunction: - """ - Decorator that marks a function, class, or metaclass as providing - dataclass-like behavior. - - Example: - - from typing_extensions import dataclass_transform - - _T = TypeVar("_T") - - # Used on a decorator function - @dataclass_transform() - def create_model(cls: type[_T]) -> type[_T]: - ... - return cls - - @create_model - class CustomerModel: - id: int - name: str - - # Used on a base class - @dataclass_transform() - class ModelBase: ... - - class CustomerModel(ModelBase): - id: int - name: str - - # Used on a metaclass - @dataclass_transform() - class ModelMeta(type): ... - - class ModelBase(metaclass=ModelMeta): ... - - class CustomerModel(ModelBase): - id: int - name: str - - Each of the ``CustomerModel`` classes defined in this example will now - behave similarly to a dataclass created with the ``@dataclasses.dataclass`` - decorator. For example, the type checker will synthesize an ``__init__`` - method. - - The arguments to this decorator can be used to customize this behavior: - - ``eq_default`` indicates whether the ``eq`` parameter is assumed to be - True or False if it is omitted by the caller. - - ``order_default`` indicates whether the ``order`` parameter is - assumed to be True or False if it is omitted by the caller. - - ``kw_only_default`` indicates whether the ``kw_only`` parameter is - assumed to be True or False if it is omitted by the caller. - - ``frozen_default`` indicates whether the ``frozen`` parameter is - assumed to be True or False if it is omitted by the caller. - - ``field_specifiers`` specifies a static list of supported classes - or functions that describe fields, similar to ``dataclasses.field()``. - - At runtime, this decorator records its arguments in the - ``__dataclass_transform__`` attribute on the decorated object. - - See PEP 681 for details. - """ - ... + ) -> IdentityFunction: ... class NamedTuple(tuple[Any, ...]): - """ - Typed version of namedtuple. - - Usage:: - - class Employee(NamedTuple): - name: str - id: int - - This is equivalent to:: - - Employee = collections.namedtuple('Employee', ['name', 'id']) - - The resulting class has an extra __annotations__ attribute, giving a - dict that maps field names to types. (The field names are also in - the _fields attribute, which is part of the namedtuple API.) - An alternative equivalent functional syntax is also accepted:: - - Employee = NamedTuple('Employee', [('name', str), ('id', int)]) - """ if sys.version_info < (3, 9): _field_types: ClassVar[dict[str, type]] _field_defaults: ClassVar[dict[str, Any]] _fields: ClassVar[tuple[str, ...]] __orig_bases__: ClassVar[tuple[Any, ...]] @overload - def __init__(self, typename: str, fields: Iterable[tuple[str, Any]] = ...) -> None: - """Initialize self. See help(type(self)) for accurate signature.""" - ... + def __init__(self, typename: str, fields: Iterable[tuple[str, Any]] = ...) -> None: ... @overload - def __init__(self, typename: str, fields: None = None, **kwargs: Any) -> None: - """Initialize self. See help(type(self)) for accurate signature.""" - ... + def __init__(self, typename: str, fields: None = None, **kwargs: Any) -> None: ... @classmethod def _make(cls, iterable: Iterable[Any]) -> Self: ... def _asdict(self) -> dict[str, Any]: ... def _replace(self, **kwargs: Any) -> Self: ... class NewType: - """ - NewType creates simple unique types with almost zero - runtime overhead. NewType(name, tp) is considered a subtype of tp - by static type checkers. At runtime, NewType(name, tp) returns - a dummy callable that simply returns its argument. Usage:: - UserId = NewType('UserId', int) - def name_by_id(user_id: UserId) -> str: - ... - UserId('user') # Fails type check - name_by_id(42) # Fails type check - name_by_id(UserId(42)) # OK - num = UserId(5) + 1 # type: int - """ def __init__(self, name: str, tp: Any) -> None: ... def __call__(self, obj: _T, /) -> _T: ... __supertype__: type | NewType @@ -718,84 +385,10 @@ if sys.version_info >= (3, 12): from types import get_original_bases as get_original_bases from typing import TypeAliasType as TypeAliasType, override as override else: - def override(arg: _F, /) -> _F: - """ - Indicate that a method is intended to override a method in a base class. - - Usage: - - class Base: - def method(self) -> None: - pass - - class Child(Base): - @override - def method(self) -> None: - super().method() - - When this decorator is applied to a method, the type checker will - validate that it overrides a method with the same name on a base class. - This helps prevent bugs that may occur when a base class is changed - without an equivalent change to a child class. - - There is no runtime checking of these properties. The decorator - sets the ``__override__`` attribute to ``True`` on the decorated object - to allow runtime introspection. - - See PEP 698 for details. - """ - ... - def get_original_bases(cls: type, /) -> tuple[Any, ...]: - """ - Return the class's "original" bases prior to modification by `__mro_entries__`. - - Examples:: - - from typing import TypeVar, Generic - from typing_extensions import NamedTuple, TypedDict - - T = TypeVar("T") - class Foo(Generic[T]): ... - class Bar(Foo[int], float): ... - class Baz(list[str]): ... - Eggs = NamedTuple("Eggs", [("a", int), ("b", str)]) - Spam = TypedDict("Spam", {"a": int, "b": str}) - - assert get_original_bases(Bar) == (Foo[int], float) - assert get_original_bases(Baz) == (list[str],) - assert get_original_bases(Eggs) == (NamedTuple,) - assert get_original_bases(Spam) == (TypedDict,) - assert get_original_bases(int) == (object,) - """ - ... + def override(arg: _F, /) -> _F: ... + def get_original_bases(cls: type, /) -> tuple[Any, ...]: ... @final class TypeAliasType: - """ - Create named, parameterized type aliases. - - This provides a backport of the new `type` statement in Python 3.12: - - type ListOrSet[T] = list[T] | set[T] - - is equivalent to: - - T = TypeVar("T") - ListOrSet = TypeAliasType("ListOrSet", list[T] | set[T], type_params=(T,)) - - The name ListOrSet can then be used as an alias for the type it refers to. - - The type_params argument should contain all the type parameters used - in the value of the type alias. If the alias is not generic, this - argument is omitted. - - Static type checkers should only support type aliases declared using - TypeAliasType that follow these rules: - - - The first argument (the name) must be a string literal. - - The TypeAliasType instance must be immediately assigned to a variable - of the same name. (For example, 'X = TypeAliasType("Y", int)' is invalid, - as is 'X, Y = TypeAliasType("X", int), TypeAliasType("Y", int)'). - """ def __init__( self, name: str, value: Any, *, type_params: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] = () ) -> None: ... @@ -810,6 +403,7 @@ else: # It's writable on types, but not on instances of TypeAliasType. @property def __module__(self) -> str | None: ... # type: ignore[override] + # Returns typing._GenericAlias, which isn't stubbed. def __getitem__(self, parameters: Any) -> Any: ... if sys.version_info >= (3, 10): def __or__(self, right: Any) -> _SpecialForm: ... @@ -817,24 +411,6 @@ else: @runtime_checkable class Buffer(Protocol): - """ - Base class for classes that implement the buffer protocol. - - The buffer protocol allows Python objects to expose a low-level - memory buffer interface. Before Python 3.12, it is not possible - to implement the buffer protocol in pure Python code, or even - to check whether a class implements the buffer protocol. In - Python 3.12 and higher, the ``__buffer__`` method allows access - to the buffer protocol from Python code, and the - ``collections.abc.Buffer`` ABC allows checking whether a class - implements the buffer protocol. - - To indicate support for the buffer protocol in earlier versions, - inherit from this ABC, either in a stub file or at runtime, - or use ABC registration. This ABC provides no methods, because - there is no Python-accessible methods shared by pre-3.12 buffer - classes. It is useful primarily for static checks. - """ # Not actually a Protocol at runtime; see # https://github.com/python/typeshed/issues/10224 for why we're defining it this way def __buffer__(self, flags: int, /) -> memoryview: ... @@ -853,98 +429,16 @@ if sys.version_info >= (3, 13): ) from warnings import deprecated as deprecated else: - def is_protocol(tp: type, /) -> bool: - """ - Return True if the given type is a Protocol. - - Example:: - - >>> from typing_extensions import Protocol, is_protocol - >>> class P(Protocol): - ... def a(self) -> str: ... - ... b: int - >>> is_protocol(P) - True - >>> is_protocol(int) - False - """ - ... - def get_protocol_members(tp: type, /) -> frozenset[str]: - """ - Return the set of members defined in a Protocol. - - Example:: - - >>> from typing_extensions import Protocol, get_protocol_members - >>> class P(Protocol): - ... def a(self) -> str: ... - ... b: int - >>> get_protocol_members(P) - frozenset({'a', 'b'}) - - Raise a TypeError for arguments that are not Protocols. - """ - ... + def is_protocol(tp: type, /) -> bool: ... + def get_protocol_members(tp: type, /) -> frozenset[str]: ... @final class _NoDefaultType: ... NoDefault: _NoDefaultType @final - class CapsuleType: - """ - Capsule objects let you wrap a C "void *" pointer in a Python - object. They're a way of passing data through the Python interpreter - without creating your own custom type. - - Capsules are used for communication between extension modules. - They provide a way for an extension module to export a C interface - to other extension modules, so that extension modules can use the - Python import mechanism to link to one another. - """ - ... + class CapsuleType: ... class deprecated: - """ - Indicate that a class, function or overload is deprecated. - - When this decorator is applied to an object, the type checker - will generate a diagnostic on usage of the deprecated object. - - Usage: - - @deprecated("Use B instead") - class A: - pass - - @deprecated("Use g instead") - def f(): - pass - - @overload - @deprecated("int support is deprecated") - def g(x: int) -> int: ... - @overload - def g(x: str) -> int: ... - - The warning specified by *category* will be emitted at runtime - on use of deprecated objects. For functions, that happens on calls; - for classes, on instantiation and on creation of subclasses. - If the *category* is ``None``, no warning is emitted at runtime. - The *stacklevel* determines where the - warning is emitted. If it is ``1`` (the default), the warning - is emitted at the direct caller of the deprecated object; if it - is higher, it is emitted further up the stack. - Static type checker behavior is not affected by the *category* - and *stacklevel* arguments. - - The deprecation message passed to the decorator is saved in the - ``__deprecated__`` attribute on the decorated object. - If applied to an overload, the decorator - must be after the ``@overload`` decorator for the attribute to - exist on the overload as returned by ``get_overloads()``. - - See PEP 702 for details. - """ message: LiteralString category: type[Warning] | None stacklevel: int @@ -953,7 +447,6 @@ else: @final class TypeVar: - """Type variable.""" @property def __name__(self) -> str: ... @property @@ -981,18 +474,13 @@ else: def has_default(self) -> bool: ... def __typing_prepare_subst__(self, alias: Any, args: Any) -> tuple[Any, ...]: ... if sys.version_info >= (3, 10): - def __or__(self, right: Any) -> _SpecialForm: - """Return self|value.""" - ... - def __ror__(self, left: Any) -> _SpecialForm: - """Return value|self.""" - ... + def __or__(self, right: Any) -> _SpecialForm: ... + def __ror__(self, left: Any) -> _SpecialForm: ... if sys.version_info >= (3, 11): def __typing_subst__(self, arg: Any) -> Any: ... @final class ParamSpec: - """Parameter specification.""" @property def __name__(self) -> str: ... @property @@ -1021,16 +509,11 @@ else: def has_default(self) -> bool: ... def __typing_prepare_subst__(self, alias: Any, args: Any) -> tuple[Any, ...]: ... if sys.version_info >= (3, 10): - def __or__(self, right: Any) -> _SpecialForm: - """Return self|value.""" - ... - def __ror__(self, left: Any) -> _SpecialForm: - """Return value|self.""" - ... + def __or__(self, right: Any) -> _SpecialForm: ... + def __ror__(self, left: Any) -> _SpecialForm: ... @final class TypeVarTuple: - """Type variable tuple.""" @property def __name__(self) -> str: ... @property @@ -1044,24 +527,15 @@ else: TypeIs: _SpecialForm class Doc: - """ - Define the documentation of a type annotation using ``Annotated``, to be - used in class attributes, function and method parameters, return values, - and variables. - - The value should be a positional-only string literal to allow static tools - like editors and documentation generators to use it. - - This complements docstrings. - - The string value passed is available in the attribute ``documentation``. - - Example:: - - >>> from typing_extensions import Annotated, Doc - >>> def hi(to: Annotated[str, Doc("Who to say hi to")]) -> None: ... - """ documentation: str def __init__(self, documentation: str, /) -> None: ... def __hash__(self) -> int: ... def __eq__(self, other: object) -> bool: ... + + +# PEP 747 (Draft) + +if sys.version_info >= (3, 14): + from typing import TypeForm +else: + TypeForm: _SpecialForm diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/unicodedata.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/unicodedata.pyi index 89fab8e..0f6cbea 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/unicodedata.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/unicodedata.pyi @@ -2,10 +2,10 @@ This module provides access to the Unicode Character Database which defines character properties for all Unicode characters. The data in this database is based on the UnicodeData.txt file version -15.0.0 which is publicly available from ftp://ftp.unicode.org/. +15.1.0 which is publicly available from ftp://ftp.unicode.org/. The module uses the same names and symbols as defined by the -UnicodeData File Format 15.0.0. +UnicodeData File Format 15.1.0. """ import sys diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/unittest/__init__.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/unittest/__init__.pyi index afc0ef5..2806e1c 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/unittest/__init__.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/unittest/__init__.pyi @@ -57,13 +57,7 @@ from .case import ( skipIf as skipIf, skipUnless as skipUnless, ) -from .loader import ( - TestLoader as TestLoader, - defaultTestLoader as defaultTestLoader, - findTestCases as findTestCases, - getTestCaseNames as getTestCaseNames, - makeSuite as makeSuite, -) +from .loader import TestLoader as TestLoader, defaultTestLoader as defaultTestLoader from .main import TestProgram as TestProgram, main as main from .result import TestResult as TestResult from .runner import TextTestResult as TextTestResult, TextTestRunner as TextTestRunner @@ -98,12 +92,14 @@ __all__ = [ "registerResult", "removeResult", "removeHandler", - "getTestCaseNames", - "makeSuite", - "findTestCases", "addModuleCleanup", ] +if sys.version_info < (3, 13): + from .loader import findTestCases as findTestCases, getTestCaseNames as getTestCaseNames, makeSuite as makeSuite + + __all__ += ["getTestCaseNames", "makeSuite", "findTestCases"] + if sys.version_info >= (3, 11): __all__ += ["enterModuleContext", "doModuleCleanups"] diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/unittest/async_case.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/unittest/async_case.pyi index 9dd5e70..463e02a 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/unittest/async_case.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/unittest/async_case.pyi @@ -1,4 +1,5 @@ import sys +from asyncio.events import AbstractEventLoop from collections.abc import Awaitable, Callable from typing import TypeVar from typing_extensions import ParamSpec @@ -12,6 +13,9 @@ _T = TypeVar("_T") _P = ParamSpec("_P") class IsolatedAsyncioTestCase(TestCase): + if sys.version_info >= (3, 13): + loop_factory: Callable[[], AbstractEventLoop] | None = None + async def asyncSetUp(self) -> None: ... async def asyncTearDown(self) -> None: ... def addAsyncCleanup(self, func: Callable[_P, Awaitable[object]], /, *args: _P.args, **kwargs: _P.kwargs) -> None: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/unittest/case.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/unittest/case.pyi index 992fb96..0b8cd7a 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/unittest/case.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/unittest/case.pyi @@ -8,7 +8,20 @@ from collections.abc import Callable, Container, Iterable, Mapping, Sequence, Se from contextlib import AbstractContextManager from re import Pattern from types import TracebackType -from typing import Any, AnyStr, ClassVar, Generic, NamedTuple, NoReturn, Protocol, SupportsAbs, SupportsRound, TypeVar, overload +from typing import ( + Any, + AnyStr, + ClassVar, + Final, + Generic, + NamedTuple, + NoReturn, + Protocol, + SupportsAbs, + SupportsRound, + TypeVar, + overload, +) from typing_extensions import ParamSpec, Self, TypeAlias from warnings import WarningMessage @@ -24,7 +37,7 @@ _E = TypeVar("_E", bound=BaseException) _FT = TypeVar("_FT", bound=Callable[..., Any]) _P = ParamSpec("_P") -DIFF_OMITTED: str +DIFF_OMITTED: Final[str] class _BaseTestCaseContext: test_case: TestCase @@ -892,9 +905,7 @@ class TestCase: assertRaisesRegexp = assertRaisesRegex def assertDictContainsSubset( self, subset: Mapping[Any, Any], dictionary: Mapping[Any, Any], msg: object = None - ) -> None: - """Checks whether dictionary is a superset of subset.""" - ... + ) -> None: ... class FunctionTestCase(TestCase): """ diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/unittest/loader.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/unittest/loader.pyi index 6b5f10d..ca5715a 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/unittest/loader.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/unittest/loader.pyi @@ -6,13 +6,13 @@ import unittest.suite from collections.abc import Callable, Sequence from re import Pattern from types import ModuleType -from typing import Any -from typing_extensions import TypeAlias +from typing import Any, Final +from typing_extensions import TypeAlias, deprecated _SortComparisonMethod: TypeAlias = Callable[[str, str], int] _SuiteClass: TypeAlias = Callable[[list[unittest.case.TestCase]], unittest.suite.TestSuite] -VALID_MODULE_NAME: Pattern[str] +VALID_MODULE_NAME: Final[Pattern[str]] class TestLoader: """ @@ -32,9 +32,7 @@ class TestLoader: """Return a suite of all test cases contained in the given module""" ... else: - def loadTestsFromModule(self, module: ModuleType, *args: Any, pattern: str | None = None) -> unittest.suite.TestSuite: - """Return a suite of all test cases contained in the given module""" - ... + def loadTestsFromModule(self, module: ModuleType, *args: Any, pattern: str | None = None) -> unittest.suite.TestSuite: ... def loadTestsFromName(self, name: str, module: ModuleType | None = None) -> unittest.suite.TestSuite: """ @@ -95,18 +93,22 @@ class TestLoader: defaultTestLoader: TestLoader -def getTestCaseNames( - testCaseClass: type[unittest.case.TestCase], - prefix: str, - sortUsing: _SortComparisonMethod = ..., - testNamePatterns: list[str] | None = None, -) -> Sequence[str]: ... -def makeSuite( - testCaseClass: type[unittest.case.TestCase], - prefix: str = "test", - sortUsing: _SortComparisonMethod = ..., - suiteClass: _SuiteClass = ..., -) -> unittest.suite.TestSuite: ... -def findTestCases( - module: ModuleType, prefix: str = "test", sortUsing: _SortComparisonMethod = ..., suiteClass: _SuiteClass = ... -) -> unittest.suite.TestSuite: ... +if sys.version_info < (3, 13): + @deprecated("Deprecated in Python 3.11; removal scheduled for Python 3.13") + def getTestCaseNames( + testCaseClass: type[unittest.case.TestCase], + prefix: str, + sortUsing: _SortComparisonMethod = ..., + testNamePatterns: list[str] | None = None, + ) -> Sequence[str]: ... + @deprecated("Deprecated in Python 3.11; removal scheduled for Python 3.13") + def makeSuite( + testCaseClass: type[unittest.case.TestCase], + prefix: str = "test", + sortUsing: _SortComparisonMethod = ..., + suiteClass: _SuiteClass = ..., + ) -> unittest.suite.TestSuite: ... + @deprecated("Deprecated in Python 3.11; removal scheduled for Python 3.13") + def findTestCases( + module: ModuleType, prefix: str = "test", sortUsing: _SortComparisonMethod = ..., suiteClass: _SuiteClass = ... + ) -> unittest.suite.TestSuite: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/unittest/main.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/unittest/main.pyi index 67ceac3..e1d3bb9 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/unittest/main.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/unittest/main.pyi @@ -7,10 +7,11 @@ import unittest.result import unittest.suite from collections.abc import Iterable from types import ModuleType -from typing import Any, Protocol +from typing import Any, Final, Protocol +from typing_extensions import deprecated -MAIN_EXAMPLES: str -MODULE_EXAMPLES: str +MAIN_EXAMPLES: Final[str] +MODULE_EXAMPLES: Final[str] class _TestRunner(Protocol): def run(self, test: unittest.suite.TestSuite | unittest.case.TestCase, /) -> unittest.result.TestResult: ... @@ -67,7 +68,10 @@ class TestProgram: tb_locals: bool = False, ) -> None: ... - def usageExit(self, msg: Any = None) -> None: ... + if sys.version_info < (3, 13): + @deprecated("Deprecated in Python 3.11; removal scheduled for Python 3.13") + def usageExit(self, msg: Any = None) -> None: ... + def parseArgs(self, argv: list[str]) -> None: ... def createTests(self, from_discovery: bool = False, Loader: unittest.loader.TestLoader | None = None) -> None: ... def runTests(self) -> None: ... # undocumented diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/unittest/mock.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/unittest/mock.pyi index 6e3f801..766b722 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/unittest/mock.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/unittest/mock.pyi @@ -12,23 +12,44 @@ _F = TypeVar("_F", bound=Callable[..., Any]) _AF = TypeVar("_AF", bound=Callable[..., Coroutine[Any, Any, Any]]) _P = ParamSpec("_P") -__all__ = ( - "Mock", - "MagicMock", - "patch", - "sentinel", - "DEFAULT", - "ANY", - "call", - "create_autospec", - "AsyncMock", - "FILTER_DIR", - "NonCallableMock", - "NonCallableMagicMock", - "mock_open", - "PropertyMock", - "seal", -) +if sys.version_info >= (3, 13): + # ThreadingMock added in 3.13 + __all__ = ( + "Mock", + "MagicMock", + "patch", + "sentinel", + "DEFAULT", + "ANY", + "call", + "create_autospec", + "ThreadingMock", + "AsyncMock", + "FILTER_DIR", + "NonCallableMock", + "NonCallableMagicMock", + "mock_open", + "PropertyMock", + "seal", + ) +else: + __all__ = ( + "Mock", + "MagicMock", + "patch", + "sentinel", + "DEFAULT", + "ANY", + "call", + "create_autospec", + "AsyncMock", + "FILTER_DIR", + "NonCallableMock", + "NonCallableMagicMock", + "mock_open", + "PropertyMock", + "seal", + ) if sys.version_info < (3, 9): __version__: Final[str] @@ -155,18 +176,6 @@ class NonCallableMock(Base, Any): def __dir__(self) -> list[str]: """Filter the output of `dir(mock)` to only useful members.""" ... - def _calls_repr(self, prefix: str = "Calls") -> str: - """ - Renders self.mock_calls as a string. - - Example: " - Calls: [call(1), call(2)]." - - If self.mock_calls is empty, an empty string is returned. The - output will be truncated if very long. - - """ - ... def assert_called_with(self, *args: Any, **kwargs: Any) -> None: """ assert that the last call was made with the specified arguments. @@ -294,6 +303,21 @@ class NonCallableMock(Base, Any): any custom subclass). """ ... + if sys.version_info >= (3, 13): + def _calls_repr(self) -> str: + """ + Renders self.mock_calls as a string. + + Example: " + Calls: [call(1), call(2)]." + + If self.mock_calls is empty, an empty string is returned. The + output will be truncated if very long. + + """ + ... + else: + def _calls_repr(self, prefix: str = "Calls") -> str: ... class CallableMixin(Base): side_effect: Any @@ -515,7 +539,7 @@ class _patcher: # Ideally we'd be able to add an overload for it so that the return type is _patch[MagicMock], # but that's impossible with the current type system. @overload - def __call__( # type: ignore[overload-overlap] + def __call__( self, target: str, new: _T, @@ -579,7 +603,7 @@ class _patcher: patch: _patcher -class MagicMixin: +class MagicMixin(Base): def __init__(self, *args: Any, **kw: Any) -> None: ... class NonCallableMagicMock(MagicMixin, NonCallableMock): @@ -693,7 +717,7 @@ class AsyncMock(AsyncMockMixin, AsyncMagicMixin, Mock): """See :func:`.Mock.reset_mock()`""" ... -class MagicProxy: +class MagicProxy(Base): name: str parent: Any def __init__(self, name: str, parent: Any) -> None: ... @@ -796,6 +820,47 @@ class PropertyMock(Mock): def __get__(self, obj: _T, obj_type: type[_T] | None = None) -> Self: ... def __set__(self, obj: Any, val: Any) -> None: ... +if sys.version_info >= (3, 13): + class ThreadingMixin(Base): + DEFAULT_TIMEOUT: Final[float | None] = None + + def __init__(self, /, *args: Any, timeout: float | None | _SentinelObject = ..., **kwargs: Any) -> None: ... + # Same as `NonCallableMock.reset_mock.` + def reset_mock(self, visited: Any = None, *, return_value: bool = False, side_effect: bool = False) -> None: + """See :func:`.Mock.reset_mock()`""" + ... + def wait_until_called(self, *, timeout: float | None | _SentinelObject = ...) -> None: + """ + Wait until the mock object is called. + + `timeout` - time to wait for in seconds, waits forever otherwise. + Defaults to the constructor provided timeout. + Use None to block undefinetively. + """ + ... + def wait_until_any_call_with(self, *args: Any, **kwargs: Any) -> None: + """ + Wait until the mock object is called with given args. + + Waits for the timeout in seconds provided in the constructor. + """ + ... + + class ThreadingMock(ThreadingMixin, MagicMixin, Mock): + """ + A mock that can be used to wait until on calls happening + in a different thread. + + The constructor can take a `timeout` argument which + controls the timeout in seconds for all `wait` calls of the mock. + + You can change the default timeout of all instances via the + `ThreadingMock.DEFAULT_TIMEOUT` attribute. + + If no timeout is set, it will block undefinetively. + """ + ... + def seal(mock: Any) -> None: """ Disable the automatic generation of child mocks. diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/unittest/result.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/unittest/result.pyi index 04f8b04..4225b68 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/unittest/result.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/unittest/result.pyi @@ -4,14 +4,14 @@ import sys import unittest.case from _typeshed import OptExcInfo from collections.abc import Callable -from typing import Any, TextIO, TypeVar +from typing import Any, Final, TextIO, TypeVar from typing_extensions import TypeAlias _F = TypeVar("_F", bound=Callable[..., Any]) _DurationsType: TypeAlias = list[tuple[str, float]] -STDOUT_LINE: str -STDERR_LINE: str +STDOUT_LINE: Final[str] +STDERR_LINE: Final[str] # undocumented def failfast(method: _F) -> _F: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/unittest/runner.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/unittest/runner.pyi index f186bae..60b7500 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/unittest/runner.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/unittest/runner.pyi @@ -4,14 +4,33 @@ import sys import unittest.case import unittest.result import unittest.suite -from _typeshed import Incomplete +from _typeshed import SupportsFlush, SupportsWrite from collections.abc import Callable, Iterable -from typing import TextIO -from typing_extensions import TypeAlias +from typing import Any, Generic, Protocol, TypeVar +from typing_extensions import Never, TypeAlias -_ResultClassType: TypeAlias = Callable[[TextIO, bool, int], unittest.result.TestResult] +_ResultClassType: TypeAlias = Callable[[_TextTestStream, bool, int], TextTestResult] -class TextTestResult(unittest.result.TestResult): +class _SupportsWriteAndFlush(SupportsWrite[str], SupportsFlush, Protocol): ... + +# All methods used by unittest.runner.TextTestResult's stream +class _TextTestStream(_SupportsWriteAndFlush, Protocol): + def writeln(self, arg: str | None = None) -> str: ... + +# _WritelnDecorator should have all the same attrs as its stream param. +# But that's not feasible to do Generically +# We can expand the attributes if requested +class _WritelnDecorator(_TextTestStream): + """Used to decorate file-like objects with a handy 'writeln' method""" + def __init__(self, stream: _TextTestStream) -> None: ... + def __getattr__(self, attr: str) -> Any: ... # Any attribute from the stream type passed to __init__ + # These attributes are prevented by __getattr__ + stream: Never + __getstate__: Never + +_StreamT = TypeVar("_StreamT", bound=_TextTestStream, default=_WritelnDecorator) + +class TextTestResult(unittest.result.TestResult, Generic[_StreamT]): """ A test result class that can print formatted text results to a stream. @@ -22,11 +41,11 @@ class TextTestResult(unittest.result.TestResult): separator1: str separator2: str showAll: bool # undocumented - stream: TextIO # undocumented + stream: _StreamT # undocumented if sys.version_info >= (3, 12): durations: unittest.result._DurationsType | None def __init__( - self, stream: TextIO, descriptions: bool, verbosity: int, *, durations: unittest.result._DurationsType | None = None + self, stream: _StreamT, descriptions: bool, verbosity: int, *, durations: unittest.result._DurationsType | None = None ) -> None: """ Construct a TextTestResult. Subclasses should accept **kwargs @@ -34,7 +53,7 @@ class TextTestResult(unittest.result.TestResult): """ ... else: - def __init__(self, stream: TextIO, descriptions: bool, verbosity: int) -> None: ... + def __init__(self, stream: _StreamT, descriptions: bool, verbosity: int) -> None: ... def getDescription(self, test: unittest.case.TestCase) -> str: ... def printErrorList(self, flavour: str, errors: Iterable[tuple[unittest.case.TestCase, str]]) -> None: ... @@ -47,9 +66,7 @@ class TextTestRunner: occur, and a summary of the results at the end of the test run. """ resultclass: _ResultClassType - # TODO: add `_WritelnDecorator` type - # stream: _WritelnDecorator - stream: Incomplete + stream: _WritelnDecorator descriptions: bool verbosity: int failfast: bool @@ -61,7 +78,7 @@ class TextTestRunner: durations: unittest.result._DurationsType | None def __init__( self, - stream: TextIO | None = None, + stream: _SupportsWriteAndFlush | None = None, descriptions: bool = True, verbosity: int = 1, failfast: bool = False, @@ -82,7 +99,7 @@ class TextTestRunner: else: def __init__( self, - stream: TextIO | None = None, + stream: _SupportsWriteAndFlush | None = None, descriptions: bool = True, verbosity: int = 1, failfast: bool = False, @@ -91,16 +108,9 @@ class TextTestRunner: warnings: str | None = None, *, tb_locals: bool = False, - ) -> None: - """ - Construct a TextTestRunner. - - Subclasses should accept **kwargs to ensure compatibility as the - interface changes. - """ - ... + ) -> None: ... - def _makeResult(self) -> unittest.result.TestResult: ... - def run(self, test: unittest.suite.TestSuite | unittest.case.TestCase) -> unittest.result.TestResult: + def _makeResult(self) -> TextTestResult: ... + def run(self, test: unittest.suite.TestSuite | unittest.case.TestCase) -> TextTestResult: """Run the given test case or test suite.""" ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/unittest/util.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/unittest/util.pyi index 88a0420..f17a0b7 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/unittest/util.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/unittest/util.pyi @@ -1,18 +1,18 @@ """Various utility functions.""" from collections.abc import MutableSequence, Sequence -from typing import Any, TypeVar +from typing import Any, Final, TypeVar from typing_extensions import TypeAlias _T = TypeVar("_T") _Mismatch: TypeAlias = tuple[_T, _T, int] -_MAX_LENGTH: int -_PLACEHOLDER_LEN: int -_MIN_BEGIN_LEN: int -_MIN_END_LEN: int -_MIN_COMMON_LEN: int -_MIN_DIFF_LEN: int +_MAX_LENGTH: Final[int] +_PLACEHOLDER_LEN: Final[int] +_MIN_BEGIN_LEN: Final[int] +_MIN_END_LEN: Final[int] +_MIN_COMMON_LEN: Final[int] +_MIN_DIFF_LEN: Final[int] def _shorten(s: str, prefixlen: int, suffixlen: int) -> str: ... def _common_shorten_repr(*args: str) -> tuple[str, ...]: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/urllib/parse.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/urllib/parse.pyi index d7040a2..6eba42d 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/urllib/parse.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/urllib/parse.pyi @@ -584,28 +584,7 @@ else: @overload def urlsplit( url: bytes | bytearray | None, scheme: bytes | bytearray | None | Literal[""] = "", allow_fragments: bool = True - ) -> SplitResultBytes: - """ - Parse a URL into 5 components: - :///?# - - The result is a named 5-tuple with fields corresponding to the - above. It is either a SplitResult or SplitResultBytes object, - depending on the type of the url parameter. - - The username, password, hostname, and port sub-components of netloc - can also be accessed as attributes of the returned object. - - The scheme argument provides the default value of the scheme - component when no scheme is found in url. - - If allow_fragments is False, no attempt is made to separate the - fragment component from the previous component, which can be either - path or query. - - Note that % escapes are not expanded. - """ - ... + ) -> SplitResultBytes: ... # Requires an iterable of length 6 @overload diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/urllib/request.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/urllib/request.pyi index afdd687..6fa1d64 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/urllib/request.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/urllib/request.pyi @@ -125,18 +125,6 @@ _DataType: TypeAlias = ReadableBuffer | SupportsRead[bytes] | Iterable[bytes] | if sys.version_info >= (3, 13): def urlopen( url: str | Request, data: _DataType | None = None, timeout: float | None = ..., *, context: ssl.SSLContext | None = None - ) -> _UrlopenRet: ... - -else: - def urlopen( - url: str | Request, - data: _DataType | None = None, - timeout: float | None = ..., - *, - cafile: str | None = None, - capath: str | None = None, - cadefault: bool = False, - context: ssl.SSLContext | None = None, ) -> _UrlopenRet: """ Open the URL url, which can be either a string or a Request object. @@ -156,14 +144,6 @@ else: If *context* is specified, it must be a ssl.SSLContext instance describing the various SSL options. See HTTPSConnection for more details. - The optional *cafile* and *capath* parameters specify a set of trusted CA - certificates for HTTPS requests. cafile should point to a single file - containing a bundle of CA certificates, whereas capath should point to a - directory of hashed certificate files. More information can be found in - ssl.SSLContext.load_verify_locations(). - - The *cadefault* parameter is ignored. - This function always returns an object which can work as a context manager and has the properties url, headers, and status. @@ -189,6 +169,18 @@ else: """ ... +else: + def urlopen( + url: str | Request, + data: _DataType | None = None, + timeout: float | None = ..., + *, + cafile: str | None = None, + capath: str | None = None, + cadefault: bool = False, + context: ssl.SSLContext | None = None, + ) -> _UrlopenRet: ... + def install_opener(opener: OpenerDirector) -> None: ... def build_opener(*handlers: BaseHandler | Callable[[], BaseHandler]) -> OpenerDirector: """ @@ -226,6 +218,16 @@ def getproxies() -> dict[str, str]: or the registry. """ ... +def getproxies_environment() -> dict[str, str]: + """ + Return a dictionary of scheme -> proxy server URL mappings. + + Scan the environment for variables named _proxy; + this seems to be the standard convention. If you need a + different way, you can pass a proxies dictionary to the + [Fancy]URLopener constructor. + """ + ... def parse_http_list(s: str) -> list[str]: """ Parse lists as described by RFC 2068 Section 2. diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/uu.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/uu.pyi index 30afa2f..324053e 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/uu.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/uu.pyi @@ -1,10 +1,3 @@ -""" -Implementation of the UUencode and UUdecode functions. - -encode(in_file, out_file [,name, mode], *, backtick=False) -decode(in_file [, out_file, mode, quiet]) -""" - from typing import BinaryIO from typing_extensions import TypeAlias @@ -16,9 +9,5 @@ class Error(Exception): ... def encode( in_file: _File, out_file: _File, name: str | None = None, mode: int | None = None, *, backtick: bool = False -) -> None: - """Uuencode file""" - ... -def decode(in_file: _File, out_file: _File | None = None, mode: int | None = None, quiet: bool = False) -> None: - """Decode uuencoded file""" - ... +) -> None: ... +def decode(in_file: _File, out_file: _File | None = None, mode: int | None = None, quiet: bool = False) -> None: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/uuid.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/uuid.pyi index c8b6077..d5b9283 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/uuid.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/uuid.pyi @@ -224,9 +224,7 @@ if sys.version_info >= (3, 12): ... else: - def uuid3(namespace: UUID, name: str) -> UUID: - """Generate a UUID from the MD5 hash of a namespace UUID and a name.""" - ... + def uuid3(namespace: UUID, name: str) -> UUID: ... def uuid4() -> UUID: """Generate a random UUID.""" @@ -238,9 +236,7 @@ if sys.version_info >= (3, 12): ... else: - def uuid5(namespace: UUID, name: str) -> UUID: - """Generate a UUID from the SHA-1 hash of a namespace UUID and a name.""" - ... + def uuid5(namespace: UUID, name: str) -> UUID: ... NAMESPACE_DNS: UUID NAMESPACE_URL: UUID diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/venv/__init__.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/venv/__init__.pyi index fd88f5b..50224dc 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/venv/__init__.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/venv/__init__.pyi @@ -40,6 +40,8 @@ class EnvBuilder: environment :param prompt: Alternative terminal prefix for the environment. :param upgrade_deps: Update the base venv modules to the latest on PyPI + :param scm_ignore_files: Create ignore files for the SCMs specified by the + iterable. """ system_site_packages: bool clear: bool @@ -112,7 +114,11 @@ class EnvBuilder: def symlink_or_copy( self, src: StrOrBytesPath, dst: StrOrBytesPath, relative_symlinks_ok: bool = False ) -> None: - """Try symlinking a file, and if that fails, fall back to copying.""" + """ + Try symlinking a file, and if that fails, fall back to copying. + (Unused on Windows, because we can't just copy a failed symlink file: we + switch to a different set of files instead.) + """ ... def setup_python(self, context: SimpleNamespace) -> None: """ @@ -175,7 +181,14 @@ class EnvBuilder: if sys.version_info >= (3, 9): def upgrade_dependencies(self, context: SimpleNamespace) -> None: ... if sys.version_info >= (3, 13): - def create_git_ignore_file(self, context: SimpleNamespace) -> None: ... + def create_git_ignore_file(self, context: SimpleNamespace) -> None: + """ + Create a .gitignore file in the environment directory. + + The contents of the file cause the entire environment directory to be + ignored by git. + """ + ... if sys.version_info >= (3, 13): def create( @@ -188,7 +201,9 @@ if sys.version_info >= (3, 13): upgrade_deps: bool = False, *, scm_ignore_files: Iterable[str] = ..., - ) -> None: ... + ) -> None: + """Create a virtual environment in a directory.""" + ... elif sys.version_info >= (3, 9): def create( @@ -199,9 +214,7 @@ elif sys.version_info >= (3, 9): with_pip: bool = False, prompt: str | None = None, upgrade_deps: bool = False, - ) -> None: - """Create a virtual environment in a directory.""" - ... + ) -> None: ... else: def create( diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/warnings.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/warnings.pyi index 3224d4e..f6a9c1f 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/warnings.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/warnings.pyi @@ -23,8 +23,10 @@ if sys.version_info >= (3, 13): _T = TypeVar("_T") _W = TypeVar("_W", bound=list[WarningMessage] | None) -_ActionKind: TypeAlias = Literal["default", "error", "ignore", "always", "module", "once"] - +if sys.version_info >= (3, 14): + _ActionKind: TypeAlias = Literal["default", "error", "ignore", "always", "module", "once"] +else: + _ActionKind: TypeAlias = Literal["default", "error", "ignore", "always", "all", "module", "once"] filters: Sequence[tuple[str, str | None, type[Warning], str | None, int]] # undocumented, do not mutate def showwarning( @@ -175,39 +177,15 @@ class catch_warnings(Generic[_W]): ... else: @overload - def __init__(self: catch_warnings[None], *, record: Literal[False] = False, module: ModuleType | None = None) -> None: - """ - Specify whether to record warnings and if an alternative module - should be used other than sys.modules['warnings']. - - For compatibility with Python 3.0, please consider all arguments to be - keyword-only. - """ - ... + def __init__(self: catch_warnings[None], *, record: Literal[False] = False, module: ModuleType | None = None) -> None: ... @overload def __init__( self: catch_warnings[list[WarningMessage]], *, record: Literal[True], module: ModuleType | None = None - ) -> None: - """ - Specify whether to record warnings and if an alternative module - should be used other than sys.modules['warnings']. - - For compatibility with Python 3.0, please consider all arguments to be - keyword-only. - """ - ... + ) -> None: ... @overload def __init__( self: catch_warnings[list[WarningMessage] | None], *, record: bool, module: ModuleType | None = None - ) -> None: - """ - Specify whether to record warnings and if an alternative module - should be used other than sys.modules['warnings']. - - For compatibility with Python 3.0, please consider all arguments to be - keyword-only. - """ - ... + ) -> None: ... def __enter__(self) -> _W: ... def __exit__( @@ -216,6 +194,47 @@ class catch_warnings(Generic[_W]): if sys.version_info >= (3, 13): class deprecated: + """ + Indicate that a class, function or overload is deprecated. + + When this decorator is applied to an object, the type checker + will generate a diagnostic on usage of the deprecated object. + + Usage: + + @deprecated("Use B instead") + class A: + pass + + @deprecated("Use g instead") + def f(): + pass + + @overload + @deprecated("int support is deprecated") + def g(x: int) -> int: ... + @overload + def g(x: str) -> int: ... + + The warning specified by *category* will be emitted at runtime + on use of deprecated objects. For functions, that happens on calls; + for classes, on instantiation and on creation of subclasses. + If the *category* is ``None``, no warning is emitted at runtime. + The *stacklevel* determines where the + warning is emitted. If it is ``1`` (the default), the warning + is emitted at the direct caller of the deprecated object; if it + is higher, it is emitted further up the stack. + Static type checker behavior is not affected by the *category* + and *stacklevel* arguments. + + The deprecation message passed to the decorator is saved in the + ``__deprecated__`` attribute on the decorated object. + If applied to an overload, the decorator + must be after the ``@overload`` decorator for the attribute to + exist on the overload as returned by ``get_overloads()``. + + See PEP 702 for details. + """ message: LiteralString category: type[Warning] | None stacklevel: int diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/wave.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/wave.pyi index 54395bb..80d4e59 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/wave.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/wave.pyi @@ -22,9 +22,9 @@ This returns an instance of a class with the following public methods: getparams() -- returns a namedtuple consisting of all of the above in the above order getmarkers() -- returns None (for compatibility with the - aifc module) + old aifc module) getmark(id) -- raises an error since the mark does not - exist (for compatibility with the aifc module) + exist (for compatibility with the old aifc module) readframes(n) -- returns at most n frames of audio rewind() -- rewind to the beginning of the audio stream setpos(pos) -- seek to the specified position @@ -74,7 +74,7 @@ is destroyed. import sys from _typeshed import ReadableBuffer, Unused -from typing import IO, Any, BinaryIO, Literal, NamedTuple, NoReturn, overload +from typing import IO, Any, BinaryIO, Final, Literal, NamedTuple, NoReturn, overload from typing_extensions import Self, TypeAlias, deprecated if sys.version_info >= (3, 9): @@ -86,7 +86,7 @@ _File: TypeAlias = str | IO[bytes] class Error(Exception): ... -WAVE_FORMAT_PCM: Literal[1] +WAVE_FORMAT_PCM: Final = 1 class _wave_params(NamedTuple): """_wave_params(nchannels, sampwidth, framerate, nframes, comptype, compname)""" diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/weakref.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/weakref.pyi index 4482fd5..298784d 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/weakref.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/weakref.pyi @@ -8,20 +8,15 @@ https://peps.python.org/pep-0205/ import sys from _typeshed import SupportsKeysAndGetItem -from _weakref import ( - CallableProxyType as CallableProxyType, - ProxyType as ProxyType, - ReferenceType as ReferenceType, - getweakrefcount as getweakrefcount, - getweakrefs as getweakrefs, - proxy as proxy, - ref as ref, -) +from _weakref import getweakrefcount as getweakrefcount, getweakrefs as getweakrefs, proxy as proxy from _weakrefset import WeakSet as WeakSet from collections.abc import Callable, Iterable, Iterator, Mapping, MutableMapping -from typing import Any, Generic, TypeVar, overload +from typing import Any, Generic, TypeVar, final, overload from typing_extensions import ParamSpec, Self +if sys.version_info >= (3, 9): + from types import GenericAlias + __all__ = [ "ref", "proxy", @@ -48,12 +43,55 @@ _P = ParamSpec("_P") ProxyTypes: tuple[type[Any], ...] +# These classes are implemented in C and imported from _weakref at runtime. However, +# they consider themselves to live in the weakref module for sys.version_info >= (3, 11), +# so defining their stubs here means we match their __module__ value. +# Prior to 3.11 they did not declare a module for themselves and ended up looking like they +# came from the builtin module at runtime, which was just wrong, and we won't attempt to +# duplicate that. + +@final +class CallableProxyType(Generic[_CallableT]): # "weakcallableproxy" + def __eq__(self, value: object, /) -> bool: + """Return self==value.""" + ... + def __getattr__(self, attr: str) -> Any: ... + __call__: _CallableT + +@final +class ProxyType(Generic[_T]): # "weakproxy" + def __eq__(self, value: object, /) -> bool: + """Return self==value.""" + ... + def __getattr__(self, attr: str) -> Any: ... + +class ReferenceType(Generic[_T]): # "weakref" + __callback__: Callable[[ReferenceType[_T]], Any] + def __new__(cls, o: _T, callback: Callable[[ReferenceType[_T]], Any] | None = ..., /) -> Self: ... + def __call__(self) -> _T | None: + """Call self as a function.""" + ... + def __eq__(self, value: object, /) -> bool: + """Return self==value.""" + ... + def __hash__(self) -> int: + """Return hash(self).""" + ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """See PEP 585""" + ... + +ref = ReferenceType + +# everything below here is implemented in weakref.py + class WeakMethod(ref[_CallableT]): """ A custom `weakref.ref` subclass which simulates a weak reference to a bound method, working around the lifetime problem of bound methods. """ - def __new__(cls, meth: _CallableT, callback: Callable[[Self], object] | None = None) -> Self: ... + def __new__(cls, meth: _CallableT, callback: Callable[[Self], Any] | None = None) -> Self: ... def __call__(self) -> _CallableT | None: ... def __eq__(self, other: object) -> bool: ... def __ne__(self, other: object) -> bool: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/webbrowser.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/webbrowser.pyi index 9701a28..237cb70 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/webbrowser.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/webbrowser.pyi @@ -4,6 +4,7 @@ import sys from abc import abstractmethod from collections.abc import Callable, Sequence from typing import Literal +from typing_extensions import deprecated __all__ = ["Error", "open", "open_new", "open_new_tab", "get", "register"] @@ -26,6 +27,9 @@ def open(url: str, new: int = 0, autoraise: bool = True) -> bool: - 1: a new browser window. - 2: a new browser page ("tab"). If possible, autoraise raises the window (the default) or not. + + If opening the browser succeeds, return True. + If there is a problem, return False. """ ... def open_new(url: str) -> bool: @@ -86,7 +90,6 @@ class Mozilla(UnixBrowser): if sys.version_info < (3, 12): class Galeon(UnixBrowser): - """Launcher class for Galeon/Epiphany browsers.""" raise_opts: list[str] class Grail(BaseBrowser): @@ -116,8 +119,10 @@ if sys.platform == "win32": def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... if sys.platform == "darwin": - class MacOSX(BaseBrowser): - def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... + if sys.version_info < (3, 13): + @deprecated("Deprecated in 3.11, to be removed in 3.13.") + class MacOSX(BaseBrowser): + def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... class MacOSXOSAScript(BaseBrowser): # In runtime this class does not have `name` and `basename` if sys.version_info >= (3, 11): diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/winsound.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/winsound.pyi index 86925dd..5b1cb43 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/winsound.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/winsound.pyi @@ -16,25 +16,25 @@ MessageBeep(type) - Call Windows MessageBeep. import sys from _typeshed import ReadableBuffer -from typing import Literal, overload +from typing import Final, Literal, overload if sys.platform == "win32": - SND_APPLICATION: Literal[128] - SND_FILENAME: Literal[131072] - SND_ALIAS: Literal[65536] - SND_LOOP: Literal[8] - SND_MEMORY: Literal[4] - SND_PURGE: Literal[64] - SND_ASYNC: Literal[1] - SND_NODEFAULT: Literal[2] - SND_NOSTOP: Literal[16] - SND_NOWAIT: Literal[8192] + SND_APPLICATION: Final = 128 + SND_FILENAME: Final = 131072 + SND_ALIAS: Final = 65536 + SND_LOOP: Final = 8 + SND_MEMORY: Final = 4 + SND_PURGE: Final = 64 + SND_ASYNC: Final = 1 + SND_NODEFAULT: Final = 2 + SND_NOSTOP: Final = 16 + SND_NOWAIT: Final = 8192 - MB_ICONASTERISK: Literal[64] - MB_ICONEXCLAMATION: Literal[48] - MB_ICONHAND: Literal[16] - MB_ICONQUESTION: Literal[32] - MB_OK: Literal[0] + MB_ICONASTERISK: Final = 64 + MB_ICONEXCLAMATION: Final = 48 + MB_ICONHAND: Final = 16 + MB_ICONQUESTION: Final = 32 + MB_OK: Final = 0 def Beep(frequency: int, duration: int) -> None: ... # Can actually accept anything ORed with 4, and if not it's definitely str, but that's inexpressible @overload diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/xdrlib.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/xdrlib.pyi index d440fda..78f3ece 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/xdrlib.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/xdrlib.pyi @@ -1,9 +1,3 @@ -""" -Implements (a subset of) Sun XDR -- eXternal Data Representation. - -See: RFC 1014 -""" - from collections.abc import Callable, Sequence from typing import TypeVar @@ -12,22 +6,12 @@ __all__ = ["Error", "Packer", "Unpacker", "ConversionError"] _T = TypeVar("_T") class Error(Exception): - """ - Exception class for this module. Use: - - except xdrlib.Error as var: - # var has the Error instance for the exception - - Public ivars: - msg -- contains the message - """ msg: str def __init__(self, msg: str) -> None: ... class ConversionError(Error): ... class Packer: - """Pack various data representations into a buffer.""" def reset(self) -> None: ... def get_buffer(self) -> bytes: ... def get_buf(self) -> bytes: ... @@ -49,7 +33,6 @@ class Packer: def pack_array(self, list: Sequence[_T], pack_item: Callable[[_T], object]) -> None: ... class Unpacker: - """Unpacks various data representations from the given buffer.""" def __init__(self, data: bytes) -> None: ... def reset(self, data: bytes) -> None: ... def get_position(self) -> int: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/xml/dom/__init__.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/xml/dom/__init__.pyi index 2b4d9ce..fa39188 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/xml/dom/__init__.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/xml/dom/__init__.pyi @@ -14,7 +14,7 @@ pulldom -- DOM builder supporting on-demand tree-building for selected subtrees of the document. """ -from typing import Any +from typing import Any, Final from .domreg import getDOMImplementation as getDOMImplementation, registerDOMImplementation as registerDOMImplementation @@ -34,22 +34,22 @@ class Node: NOTATION_NODE: int # ExceptionCode -INDEX_SIZE_ERR: int -DOMSTRING_SIZE_ERR: int -HIERARCHY_REQUEST_ERR: int -WRONG_DOCUMENT_ERR: int -INVALID_CHARACTER_ERR: int -NO_DATA_ALLOWED_ERR: int -NO_MODIFICATION_ALLOWED_ERR: int -NOT_FOUND_ERR: int -NOT_SUPPORTED_ERR: int -INUSE_ATTRIBUTE_ERR: int -INVALID_STATE_ERR: int -SYNTAX_ERR: int -INVALID_MODIFICATION_ERR: int -NAMESPACE_ERR: int -INVALID_ACCESS_ERR: int -VALIDATION_ERR: int +INDEX_SIZE_ERR: Final[int] +DOMSTRING_SIZE_ERR: Final[int] +HIERARCHY_REQUEST_ERR: Final[int] +WRONG_DOCUMENT_ERR: Final[int] +INVALID_CHARACTER_ERR: Final[int] +NO_DATA_ALLOWED_ERR: Final[int] +NO_MODIFICATION_ALLOWED_ERR: Final[int] +NOT_FOUND_ERR: Final[int] +NOT_SUPPORTED_ERR: Final[int] +INUSE_ATTRIBUTE_ERR: Final[int] +INVALID_STATE_ERR: Final[int] +SYNTAX_ERR: Final[int] +INVALID_MODIFICATION_ERR: Final[int] +NAMESPACE_ERR: Final[int] +INVALID_ACCESS_ERR: Final[int] +VALIDATION_ERR: Final[int] class DOMException(Exception): """ @@ -84,8 +84,8 @@ class UserDataHandler: NODE_DELETED: int NODE_RENAMED: int -XML_NAMESPACE: str -XMLNS_NAMESPACE: str -XHTML_NAMESPACE: str -EMPTY_NAMESPACE: None -EMPTY_PREFIX: None +XML_NAMESPACE: Final[str] +XMLNS_NAMESPACE: Final[str] +XHTML_NAMESPACE: Final[str] +EMPTY_NAMESPACE: Final[None] +EMPTY_PREFIX: Final[None] diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/xml/dom/pulldom.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/xml/dom/pulldom.pyi index 262055e..9455401 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/xml/dom/pulldom.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/xml/dom/pulldom.pyi @@ -1,20 +1,20 @@ import sys from _typeshed import Incomplete, SupportsRead from collections.abc import Sequence -from typing import Literal +from typing import Final, Literal from typing_extensions import TypeAlias from xml.dom.minidom import Document, DOMImplementation, Element, Text from xml.sax.handler import ContentHandler from xml.sax.xmlreader import XMLReader -START_ELEMENT: Literal["START_ELEMENT"] -END_ELEMENT: Literal["END_ELEMENT"] -COMMENT: Literal["COMMENT"] -START_DOCUMENT: Literal["START_DOCUMENT"] -END_DOCUMENT: Literal["END_DOCUMENT"] -PROCESSING_INSTRUCTION: Literal["PROCESSING_INSTRUCTION"] -IGNORABLE_WHITESPACE: Literal["IGNORABLE_WHITESPACE"] -CHARACTERS: Literal["CHARACTERS"] +START_ELEMENT: Final = "START_ELEMENT" +END_ELEMENT: Final = "END_ELEMENT" +COMMENT: Final = "COMMENT" +START_DOCUMENT: Final = "START_DOCUMENT" +END_DOCUMENT: Final = "END_DOCUMENT" +PROCESSING_INSTRUCTION: Final = "PROCESSING_INSTRUCTION" +IGNORABLE_WHITESPACE: Final = "IGNORABLE_WHITESPACE" +CHARACTERS: Final = "CHARACTERS" _DocumentFactory: TypeAlias = DOMImplementation | None _Node: TypeAlias = Document | Element | Text diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/xml/etree/ElementInclude.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/xml/etree/ElementInclude.pyi index cbba15d..5a15772 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/xml/etree/ElementInclude.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/xml/etree/ElementInclude.pyi @@ -1,14 +1,15 @@ import sys from _typeshed import FileDescriptorOrPath from collections.abc import Callable +from typing import Final from xml.etree.ElementTree import Element -XINCLUDE: str -XINCLUDE_INCLUDE: str -XINCLUDE_FALLBACK: str +XINCLUDE: Final[str] +XINCLUDE_INCLUDE: Final[str] +XINCLUDE_FALLBACK: Final[str] if sys.version_info >= (3, 9): - DEFAULT_MAX_INCLUSION_DEPTH: int + DEFAULT_MAX_INCLUSION_DEPTH: Final = 6 class FatalIncludeError(SyntaxError): ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/xml/etree/ElementTree.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/xml/etree/ElementTree.pyi index 1376ae8..00ca55e 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/xml/etree/ElementTree.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/xml/etree/ElementTree.pyi @@ -37,7 +37,7 @@ import sys from _collections_abc import dict_keys from _typeshed import FileDescriptorOrPath, ReadableBuffer, SupportsRead, SupportsWrite from collections.abc import Callable, Generator, ItemsView, Iterable, Iterator, Mapping, Sequence -from typing import Any, Literal, SupportsIndex, TypeVar, overload +from typing import Any, Final, Literal, SupportsIndex, TypeVar, overload from typing_extensions import TypeAlias, TypeGuard, deprecated __all__ = [ @@ -76,7 +76,7 @@ _FileRead: TypeAlias = FileDescriptorOrPath | SupportsRead[bytes] | SupportsRead _FileWriteC14N: TypeAlias = FileDescriptorOrPath | SupportsWrite[bytes] _FileWrite: TypeAlias = _FileWriteC14N | SupportsWrite[str] -VERSION: str +VERSION: Final[str] class ParseError(SyntaxError): code: int @@ -550,9 +550,15 @@ def parse(source: _FileRead, parser: XMLParser | None = None) -> ElementTree: Return an ElementTree instance. """ ... -def iterparse( - source: _FileRead, events: Sequence[str] | None = None, parser: XMLParser | None = None -) -> Iterator[tuple[str, Any]]: + +class _IterParseIterator(Iterator[tuple[str, Any]]): + def __next__(self) -> tuple[str, Any]: ... + if sys.version_info >= (3, 13): + def close(self) -> None: ... + if sys.version_info >= (3, 11): + def __del__(self) -> None: ... + +def iterparse(source: _FileRead, events: Sequence[str] | None = None, parser: XMLParser | None = None) -> _IterParseIterator: """ Incrementally parse XML document into ElementTree. diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/xml/parsers/expat/__init__.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/xml/parsers/expat/__init__.pyi index 0b05588..c4ec99a 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/xml/parsers/expat/__init__.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/xml/parsers/expat/__init__.pyi @@ -1,3 +1,9 @@ """Interface to the Expat non-validating XML parser.""" from pyexpat import * + +# This is actually implemented in the C module pyexpat, but considers itself to live here. +class ExpatError(Exception): + code: int + lineno: int + offset: int diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/xml/sax/handler.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/xml/sax/handler.pyi index ad1b318..f2e789f 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/xml/sax/handler.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/xml/sax/handler.pyi @@ -113,7 +113,7 @@ class ContentHandler: not guaranteed. """ ... - def endPrefixMapping(self, prefix) -> None: + def endPrefixMapping(self, prefix: str | None) -> None: """ End the scope of a prefix-URI mapping. diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/xmlrpc/client.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/xmlrpc/client.pyi index e130826..7578778 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/xmlrpc/client.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/xmlrpc/client.pyi @@ -48,7 +48,7 @@ from collections.abc import Callable, Iterable, Mapping from datetime import datetime from io import BytesIO from types import TracebackType -from typing import Any, Literal, Protocol, overload +from typing import Any, Final, Literal, Protocol, overload from typing_extensions import Self, TypeAlias class _SupportsTimeTuple(Protocol): @@ -76,22 +76,22 @@ _HostType: TypeAlias = tuple[str, dict[str, str]] | str def escape(s: str) -> str: ... # undocumented -MAXINT: int # undocumented -MININT: int # undocumented +MAXINT: Final[int] # undocumented +MININT: Final[int] # undocumented -PARSE_ERROR: int # undocumented -SERVER_ERROR: int # undocumented -APPLICATION_ERROR: int # undocumented -SYSTEM_ERROR: int # undocumented -TRANSPORT_ERROR: int # undocumented +PARSE_ERROR: Final[int] # undocumented +SERVER_ERROR: Final[int] # undocumented +APPLICATION_ERROR: Final[int] # undocumented +SYSTEM_ERROR: Final[int] # undocumented +TRANSPORT_ERROR: Final[int] # undocumented -NOT_WELLFORMED_ERROR: int # undocumented -UNSUPPORTED_ENCODING: int # undocumented -INVALID_ENCODING_CHAR: int # undocumented -INVALID_XMLRPC: int # undocumented -METHOD_NOT_FOUND: int # undocumented -INVALID_METHOD_PARAMS: int # undocumented -INTERNAL_ERROR: int # undocumented +NOT_WELLFORMED_ERROR: Final[int] # undocumented +UNSUPPORTED_ENCODING: Final[int] # undocumented +INVALID_ENCODING_CHAR: Final[int] # undocumented +INVALID_XMLRPC: Final[int] # undocumented +METHOD_NOT_FOUND: Final[int] # undocumented +INVALID_METHOD_PARAMS: Final[int] # undocumented +INTERNAL_ERROR: Final[int] # undocumented class Error(Exception): """Base class for client errors.""" @@ -152,7 +152,7 @@ class Binary: def _binary(data: ReadableBuffer) -> Binary: ... # undocumented -WRAPPERS: tuple[type[DateTime], type[Binary]] # undocumented +WRAPPERS: Final[tuple[type[DateTime], type[Binary]]] # undocumented class ExpatParser: # undocumented def __init__(self, target: Unmarshaller) -> None: ... @@ -320,7 +320,7 @@ def dumps( """ ... def loads( - data: str, use_datetime: bool = False, use_builtin_types: bool = False + data: str | ReadableBuffer, use_datetime: bool = False, use_builtin_types: bool = False ) -> tuple[tuple[_Marshallable, ...], str | None]: """ data -> unmarshalled data, method name diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/xmlrpc/server.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/xmlrpc/server.pyi index b595561..b51d890 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/xmlrpc/server.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/xmlrpc/server.pyi @@ -105,6 +105,7 @@ server.handle_request() import http.server import pydoc import socketserver +from _typeshed import ReadableBuffer from collections.abc import Callable, Iterable, Mapping from re import Pattern from typing import Any, ClassVar, Protocol @@ -228,8 +229,8 @@ class SimpleXMLRPCDispatcher: # undocumented ... def _marshaled_dispatch( self, - data: str, - dispatch_method: Callable[[str | None, tuple[_Marshallable, ...]], Fault | tuple[_Marshallable, ...]] | None = None, + data: str | ReadableBuffer, + dispatch_method: Callable[[str, tuple[_Marshallable, ...]], Fault | tuple[_Marshallable, ...]] | None = None, path: Any | None = None, ) -> str: """ diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/xxlimited.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/xxlimited.pyi index 4217a62..6bae87a 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/xxlimited.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/xxlimited.pyi @@ -1,5 +1,3 @@ -"""This is a template module just for instruction.""" - import sys from typing import Any, final @@ -7,23 +5,12 @@ class Str(str): ... @final class Xxo: - """A class that explicitly stores attributes in an internal dict""" - def demo(self) -> None: - """demo(o) -> o""" - ... + def demo(self) -> None: ... if sys.version_info >= (3, 11) and sys.platform != "win32": x_exports: int -def foo(i: int, j: int, /) -> Any: - """ - foo(i,j) - - Return the sum of i and j. - """ - ... -def new() -> Xxo: - """new() -> new Xx object""" - ... +def foo(i: int, j: int, /) -> Any: ... +def new() -> Xxo: ... if sys.version_info >= (3, 10): class Error(Exception): ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/zipfile/__init__.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/zipfile/__init__.pyi index aa70eee..0217ba2 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/zipfile/__init__.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/zipfile/__init__.pyi @@ -11,7 +11,7 @@ from collections.abc import Callable, Iterable, Iterator from io import TextIOWrapper from os import PathLike from types import TracebackType -from typing import IO, Literal, Protocol, overload +from typing import IO, Final, Literal, Protocol, overload from typing_extensions import Self, TypeAlias __all__ = [ @@ -124,6 +124,20 @@ class ZipExtFile(io.BufferedIOBase): class _Writer(Protocol): def write(self, s: str, /) -> object: ... +class _ZipReadable(Protocol): + def seek(self, offset: int, whence: int = 0, /) -> int: ... + def read(self, n: int = -1, /) -> bytes: ... + +class _ZipTellable(Protocol): + def tell(self) -> int: ... + +class _ZipReadableTellable(_ZipReadable, _ZipTellable, Protocol): ... + +class _ZipWritable(Protocol): + def flush(self) -> None: ... + def close(self) -> None: ... + def write(self, b: bytes, /) -> int: ... + class ZipFile: """ Class with methods to open, read, write, close, list zip files. @@ -157,18 +171,37 @@ class ZipFile: compresslevel: int | None # undocumented mode: _ZipFileMode # undocumented pwd: bytes | None # undocumented + # metadata_encoding is new in 3.11 if sys.version_info >= (3, 11): @overload def __init__( self, file: StrPath | IO[bytes], + mode: _ZipFileMode = "r", + compression: int = 0, + allowZip64: bool = True, + compresslevel: int | None = None, + *, + strict_timestamps: bool = True, + metadata_encoding: str | None = None, + ) -> None: + """ + Open the ZIP file with mode read 'r', write 'w', exclusive create 'x', + or append 'a'. + """ + ... + # metadata_encoding is only allowed for read mode + @overload + def __init__( + self, + file: StrPath | _ZipReadable, mode: Literal["r"] = "r", compression: int = 0, allowZip64: bool = True, compresslevel: int | None = None, *, strict_timestamps: bool = True, - metadata_encoding: str | None, + metadata_encoding: str | None = None, ) -> None: """ Open the ZIP file with mode read 'r', write 'w', exclusive create 'x', @@ -178,8 +211,8 @@ class ZipFile: @overload def __init__( self, - file: StrPath | IO[bytes], - mode: _ZipFileMode = "r", + file: StrPath | _ZipWritable, + mode: Literal["w", "x"] = ..., compression: int = 0, allowZip64: bool = True, compresslevel: int | None = None, @@ -192,22 +225,68 @@ class ZipFile: or append 'a'. """ ... - else: + @overload def __init__( self, - file: StrPath | IO[bytes], - mode: _ZipFileMode = "r", + file: StrPath | _ZipReadableTellable, + mode: Literal["a"] = ..., compression: int = 0, allowZip64: bool = True, compresslevel: int | None = None, *, strict_timestamps: bool = True, + metadata_encoding: None = None, ) -> None: """ Open the ZIP file with mode read 'r', write 'w', exclusive create 'x', or append 'a'. """ ... + else: + @overload + def __init__( + self, + file: StrPath | IO[bytes], + mode: _ZipFileMode = "r", + compression: int = 0, + allowZip64: bool = True, + compresslevel: int | None = None, + *, + strict_timestamps: bool = True, + ) -> None: ... + @overload + def __init__( + self, + file: StrPath | _ZipReadable, + mode: Literal["r"] = "r", + compression: int = 0, + allowZip64: bool = True, + compresslevel: int | None = None, + *, + strict_timestamps: bool = True, + ) -> None: ... + @overload + def __init__( + self, + file: StrPath | _ZipWritable, + mode: Literal["w", "x"] = ..., + compression: int = 0, + allowZip64: bool = True, + compresslevel: int | None = None, + *, + strict_timestamps: bool = True, + ) -> None: ... + @overload + def __init__( + self, + file: StrPath | _ZipReadableTellable, + mode: Literal["a"] = ..., + compression: int = 0, + allowZip64: bool = True, + compresslevel: int | None = None, + *, + strict_timestamps: bool = True, + ) -> None: ... def __enter__(self) -> Self: ... def __exit__( @@ -369,6 +448,9 @@ class ZipInfo: compress_size: int file_size: int orig_filename: str # undocumented + if sys.version_info >= (3, 13): + compress_level: int | None + def __init__(self, filename: str = "NoName", date_time: _DateTuple = (1980, 1, 1, 0, 0, 0)) -> None: ... @classmethod def from_file(cls, filename: StrPath, arcname: StrPath | None = None, *, strict_timestamps: bool = True) -> Self: @@ -400,122 +482,18 @@ if sys.version_info >= (3, 12): else: class CompleteDirs(ZipFile): - """ - A ZipFile subclass that ensures that implied directories - are always included in the namelist. - """ - def resolve_dir(self, name: str) -> str: - """ - If the name represents a directory, return that name - as a directory (with the trailing slash). - """ - ... + def resolve_dir(self, name: str) -> str: ... @overload @classmethod - def make(cls, source: ZipFile) -> CompleteDirs: - """ - Given a source (filename or zipfile), return an - appropriate CompleteDirs subclass. - """ - ... + def make(cls, source: ZipFile) -> CompleteDirs: ... @overload @classmethod - def make(cls, source: StrPath | IO[bytes]) -> Self: - """ - Given a source (filename or zipfile), return an - appropriate CompleteDirs subclass. - """ - ... + def make(cls, source: StrPath | IO[bytes]) -> Self: ... class Path: - """ - A pathlib-compatible interface for zip files. - - Consider a zip file with this structure:: - - . - ├── a.txt - └── b - ├── c.txt - └── d - └── e.txt - - >>> data = io.BytesIO() - >>> zf = ZipFile(data, 'w') - >>> zf.writestr('a.txt', 'content of a') - >>> zf.writestr('b/c.txt', 'content of c') - >>> zf.writestr('b/d/e.txt', 'content of e') - >>> zf.filename = 'mem/abcde.zip' - - Path accepts the zipfile object itself or a filename - - >>> root = Path(zf) - - From there, several path operations are available. - - Directory iteration (including the zip file itself): - - >>> a, b = root.iterdir() - >>> a - Path('mem/abcde.zip', 'a.txt') - >>> b - Path('mem/abcde.zip', 'b/') - - name property: - - >>> b.name - 'b' - - join with divide operator: - - >>> c = b / 'c.txt' - >>> c - Path('mem/abcde.zip', 'b/c.txt') - >>> c.name - 'c.txt' - - Read text: - - >>> c.read_text() - 'content of c' - - existence: - - >>> c.exists() - True - >>> (b / 'missing.txt').exists() - False - - Coercion to string: - - >>> import os - >>> str(c).replace(os.sep, posixpath.sep) - 'mem/abcde.zip/b/c.txt' - - At the root, ``name``, ``filename``, and ``parent`` - resolve to the zipfile. Note these attributes are not - valid and will raise a ``ValueError`` if the zipfile - has no filename. - - >>> root.name - 'abcde.zip' - >>> str(root.filename).replace(os.sep, posixpath.sep) - 'mem/abcde.zip' - >>> str(root.parent) - 'mem' - """ root: CompleteDirs - def __init__(self, root: ZipFile | StrPath | IO[bytes], at: str = "") -> None: - """ - Construct a Path from a ZipFile or filename. - - Note: When the source is an existing ZipFile object, - its type (__class__) will be mutated to a - specialized type. If the caller wishes to retain the - original type, the caller should either create a - separate ZipFile object or pass a filename. - """ - ... + at: str + def __init__(self, root: ZipFile | StrPath | IO[bytes], at: str = "") -> None: ... @property def name(self) -> str: ... @property @@ -543,21 +521,9 @@ else: write_through: bool = ..., *, pwd: bytes | None = None, - ) -> TextIOWrapper: - """ - Open this entry as text or binary following the semantics - of ``pathlib.Path.open()`` by passing arguments through - to io.TextIOWrapper(). - """ - ... + ) -> TextIOWrapper: ... @overload - def open(self, mode: Literal["rb", "wb"], *, pwd: bytes | None = None) -> IO[bytes]: - """ - Open this entry as text or binary following the semantics - of ``pathlib.Path.open()`` by passing arguments through - to io.TextIOWrapper(). - """ - ... + def open(self, mode: Literal["rb", "wb"], *, pwd: bytes | None = None) -> IO[bytes]: ... else: def open( self, mode: _ReadWriteBinaryMode = "r", pwd: bytes | None = None, *, force_zip64: bool = False @@ -603,10 +569,10 @@ def is_zipfile(filename: StrOrBytesPath | _SupportsReadSeekTell) -> bool: """ ... -ZIP_STORED: int -ZIP_DEFLATED: int -ZIP64_LIMIT: int -ZIP_FILECOUNT_LIMIT: int -ZIP_MAX_COMMENT: int -ZIP_BZIP2: int -ZIP_LZMA: int +ZIP_STORED: Final[int] +ZIP_DEFLATED: Final[int] +ZIP64_LIMIT: Final[int] +ZIP_FILECOUNT_LIMIT: Final[int] +ZIP_MAX_COMMENT: Final[int] +ZIP_BZIP2: Final[int] +ZIP_LZMA: Final[int] diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/zipfile/_path.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/zipfile/_path.pyi index ce2d47a..b3a7889 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/zipfile/_path.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/zipfile/_path.pyi @@ -1,14 +1,25 @@ +""" +A Path-like interface for zipfiles. + +This codebase is shared between zipfile.Path in the stdlib +and zipp in PyPI. See +https://github.com/python/importlib_metadata/wiki/Development-Methodology +for more detail. +""" + import sys from _typeshed import StrPath from collections.abc import Iterator, Sequence from io import TextIOWrapper from os import PathLike -from typing import IO, Literal, overload +from typing import IO, Literal, TypeVar, overload from typing_extensions import Self, TypeAlias from zipfile import ZipFile _ReadWriteBinaryMode: TypeAlias = Literal["r", "w", "rb", "wb"] +_ZF = TypeVar("_ZF", bound=ZipFile) + if sys.version_info >= (3, 12): class InitializedState: """Mix-in to save the initialization state for pickling.""" @@ -48,85 +59,19 @@ if sys.version_info >= (3, 12): appropriate CompleteDirs subclass. """ ... + if sys.version_info >= (3, 13): + @classmethod + def inject(cls, zf: _ZF) -> _ZF: + """ + Given a writable zip file zf, inject directory entries for + any directories implied by the presence of children. + """ + ... class Path: - """ - A pathlib-compatible interface for zip files. - - Consider a zip file with this structure:: - - . - ├── a.txt - └── b - ├── c.txt - └── d - └── e.txt - - >>> data = io.BytesIO() - >>> zf = ZipFile(data, 'w') - >>> zf.writestr('a.txt', 'content of a') - >>> zf.writestr('b/c.txt', 'content of c') - >>> zf.writestr('b/d/e.txt', 'content of e') - >>> zf.filename = 'mem/abcde.zip' - - Path accepts the zipfile object itself or a filename - - >>> root = Path(zf) - - From there, several path operations are available. - - Directory iteration (including the zip file itself): - - >>> a, b = root.iterdir() - >>> a - Path('mem/abcde.zip', 'a.txt') - >>> b - Path('mem/abcde.zip', 'b/') - - name property: - - >>> b.name - 'b' - - join with divide operator: - - >>> c = b / 'c.txt' - >>> c - Path('mem/abcde.zip', 'b/c.txt') - >>> c.name - 'c.txt' - - Read text: - - >>> c.read_text(encoding='utf-8') - 'content of c' - - existence: - - >>> c.exists() - True - >>> (b / 'missing.txt').exists() - False - - Coercion to string: - - >>> import os - >>> str(c).replace(os.sep, posixpath.sep) - 'mem/abcde.zip/b/c.txt' - - At the root, ``name``, ``filename``, and ``parent`` - resolve to the zipfile. Note these attributes are not - valid and will raise a ``ValueError`` if the zipfile - has no filename. - - >>> root.name - 'abcde.zip' - >>> str(root.filename).replace(os.sep, posixpath.sep) - 'mem/abcde.zip' - >>> str(root.parent) - 'mem' - """ + "A :class:`importlib.resources.abc.Traversable` interface for zip files.\n\nImplements many of the features users enjoy from\n:class:`pathlib.Path`.\n\nConsider a zip file with this structure::\n\n .\n ├── a.txt\n └── b\n ├── c.txt\n └── d\n └── e.txt\n\n>>> data = io.BytesIO()\n>>> zf = ZipFile(data, 'w')\n>>> zf.writestr('a.txt', 'content of a')\n>>> zf.writestr('b/c.txt', 'content of c')\n>>> zf.writestr('b/d/e.txt', 'content of e')\n>>> zf.filename = 'mem/abcde.zip'\n\nPath accepts the zipfile object itself or a filename\n\n>>> path = Path(zf)\n\nFrom there, several path operations are available.\n\nDirectory iteration (including the zip file itself):\n\n>>> a, b = path.iterdir()\n>>> a\nPath('mem/abcde.zip', 'a.txt')\n>>> b\nPath('mem/abcde.zip', 'b/')\n\nname property:\n\n>>> b.name\n'b'\n\njoin with divide operator:\n\n>>> c = b / 'c.txt'\n>>> c\nPath('mem/abcde.zip', 'b/c.txt')\n>>> c.name\n'c.txt'\n\nRead text:\n\n>>> c.read_text(encoding='utf-8')\n'content of c'\n\nexistence:\n\n>>> c.exists()\nTrue\n>>> (b / 'missing.txt').exists()\nFalse\n\nCoercion to string:\n\n>>> import os\n>>> str(c).replace(os.sep, posixpath.sep)\n'mem/abcde.zip/b/c.txt'\n\nAt the root, ``name``, ``filename``, and ``parent``\nresolve to the zipfile.\n\n>>> str(path)\n'mem/abcde.zip/'\n>>> path.name\n'abcde.zip'\n>>> path.filename == pathlib.Path('mem/abcde.zip')\nTrue\n>>> str(path.parent)\n'mem'\n\nIf the zipfile has no filename, such \ufeffattributes are not\nvalid and accessing them will raise an Exception.\n\n>>> zf.filename = None\n>>> path.name\nTraceback (most recent call last):\n...\nTypeError: ...\n\n>>> path.filename\nTraceback (most recent call last):\n...\nTypeError: ...\n\n>>> path.parent\nTraceback (most recent call last):\n...\nTypeError: ...\n\n# workaround python/cpython#106763\n>>> pass" root: CompleteDirs + at: str def __init__(self, root: ZipFile | StrPath | IO[bytes], at: str = "") -> None: """ Construct a Path from a ZipFile or filename. @@ -210,7 +155,7 @@ if sys.version_info >= (3, 12): def glob(self, pattern: str) -> Iterator[Self]: ... def rglob(self, pattern: str) -> Iterator[Self]: ... def is_symlink(self) -> Literal[False]: - """Return whether this path is a symlink. Always false (python/cpython#82102).""" + """Return whether this path is a symlink.""" ... def relative_to(self, other: Path, *extra: StrPath) -> str: ... def match(self, path_pattern: str) -> bool: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/zipimport.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/zipimport.pyi index 74d071b..070b58a 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/zipimport.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/zipimport.pyi @@ -1,12 +1,10 @@ """ zipimport provides support for importing Python modules from Zip archives. -This module exports three objects: +This module exports two objects: - zipimporter: a class; its constructor takes a path to a Zip archive. - ZipImportError: exception raised by zipimporter objects. It's a subclass of ImportError, so it can be caught as ImportError, too. -- _zip_directory_cache: a dict, mapping archive paths to zip directory - info dicts, as used in zipimporter._files. It is usually not needed to use the zipimport module explicitly; it is used by the builtin import mechanism for sys.path items that are paths @@ -46,33 +44,8 @@ class zipimporter: def __init__(self, path: StrOrBytesPath) -> None: ... if sys.version_info < (3, 12): - def find_loader(self, fullname: str, path: str | None = None) -> tuple[zipimporter | None, list[str]]: - """ - find_loader(fullname, path=None) -> self, str or None. - - Search for a module specified by 'fullname'. 'fullname' must be the - fully qualified (dotted) module name. It returns the zipimporter - instance itself if the module was found, a string containing the - full path name if it's possibly a portion of a namespace package, - or None otherwise. The optional 'path' argument is ignored -- it's - there for compatibility with the importer protocol. - - Deprecated since Python 3.10. Use find_spec() instead. - """ - ... - def find_module(self, fullname: str, path: str | None = None) -> zipimporter | None: - """ - find_module(fullname, path=None) -> self or None. - - Search for a module specified by 'fullname'. 'fullname' must be the - fully qualified (dotted) module name. It returns the zipimporter - instance itself if the module was found, or None if it wasn't. - The optional 'path' argument is ignored -- it's there for compatibility - with the importer protocol. - - Deprecated since Python 3.10. Use find_spec() instead. - """ - ... + def find_loader(self, fullname: str, path: str | None = None) -> tuple[zipimporter | None, list[str]]: ... # undocumented + def find_module(self, fullname: str, path: str | None = None) -> zipimporter | None: ... def get_code(self, fullname: str) -> CodeType: """ @@ -99,12 +72,7 @@ class zipimporter: """ ... def get_resource_reader(self, fullname: str) -> ResourceReader | None: - """ - Return the ResourceReader for a package in a zip file. - - If 'fullname' is a package within the zip file, return the - 'ResourceReader' object for the package. Otherwise return None. - """ + """Return the ResourceReader for a module in a zip file.""" ... def get_source(self, fullname: str) -> str | None: """ @@ -135,6 +103,12 @@ class zipimporter: """ ... if sys.version_info >= (3, 10): + def exec_module(self, module: ModuleType) -> None: + """Execute the module.""" + ... + def create_module(self, spec: ModuleSpec) -> None: + """Use default semantics for module creation.""" + ... def find_spec(self, fullname: str, target: ModuleType | None = None) -> ModuleSpec | None: """ Create a ModuleSpec for the specified module. @@ -143,5 +117,5 @@ class zipimporter: """ ... def invalidate_caches(self) -> None: - """Reload the file data of the archive path.""" + """Invalidates the cache of file data of the archive path.""" ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/zlib.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/zlib.pyi index 6496201..7973beb 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/zlib.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/zlib.pyi @@ -16,30 +16,30 @@ objects support decompress() and flush(). import sys from _typeshed import ReadableBuffer -from typing import Literal +from typing import Final -DEFLATED: Literal[8] +DEFLATED: Final = 8 DEF_MEM_LEVEL: int # can change -DEF_BUF_SIZE: Literal[16384] +DEF_BUF_SIZE: Final = 16384 MAX_WBITS: int ZLIB_VERSION: str # can change ZLIB_RUNTIME_VERSION: str # can change -Z_NO_COMPRESSION: Literal[0] -Z_PARTIAL_FLUSH: Literal[1] -Z_BEST_COMPRESSION: Literal[9] -Z_BEST_SPEED: Literal[1] -Z_BLOCK: Literal[5] -Z_DEFAULT_COMPRESSION: Literal[-1] -Z_DEFAULT_STRATEGY: Literal[0] -Z_FILTERED: Literal[1] -Z_FINISH: Literal[4] -Z_FIXED: Literal[4] -Z_FULL_FLUSH: Literal[3] -Z_HUFFMAN_ONLY: Literal[2] -Z_NO_FLUSH: Literal[0] -Z_RLE: Literal[3] -Z_SYNC_FLUSH: Literal[2] -Z_TREES: Literal[6] +Z_NO_COMPRESSION: Final = 0 +Z_PARTIAL_FLUSH: Final = 1 +Z_BEST_COMPRESSION: Final = 9 +Z_BEST_SPEED: Final = 1 +Z_BLOCK: Final = 5 +Z_DEFAULT_COMPRESSION: Final = -1 +Z_DEFAULT_STRATEGY: Final = 0 +Z_FILTERED: Final = 1 +Z_FINISH: Final = 4 +Z_FIXED: Final = 4 +Z_FULL_FLUSH: Final = 3 +Z_HUFFMAN_ONLY: Final = 2 +Z_NO_FLUSH: Final = 0 +Z_RLE: Final = 3 +Z_SYNC_FLUSH: Final = 2 +Z_TREES: Final = 6 class error(Exception): ... @@ -82,16 +82,7 @@ if sys.version_info >= (3, 11): ... else: - def compress(data: ReadableBuffer, /, level: int = -1) -> bytes: - """ - Returns a bytes object containing compressed data. - - data - Binary data to be compressed. - level - Compression level, in 0-9 or -1. - """ - ... + def compress(data: ReadableBuffer, /, level: int = -1) -> bytes: ... def compressobj( level: int = -1, method: int = 8, wbits: int = 15, memLevel: int = 8, strategy: int = 0, zdict: ReadableBuffer | None = None diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/zoneinfo/__init__.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/zoneinfo/__init__.pyi index 689d995..d7e6178 100644 --- a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/zoneinfo/__init__.pyi +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/zoneinfo/__init__.pyi @@ -1,66 +1,41 @@ -from _typeshed import StrPath -from collections.abc import Iterable, Sequence +import sys +from collections.abc import Iterable from datetime import datetime, timedelta, tzinfo -from typing import Any, Protocol from typing_extensions import Self -__all__ = ["ZoneInfo", "reset_tzpath", "available_timezones", "TZPATH", "ZoneInfoNotFoundError", "InvalidTZPathWarning"] - -class _IOBytes(Protocol): - def read(self, size: int, /) -> bytes: ... - def seek(self, size: int, whence: int = ..., /) -> Any: ... - -class ZoneInfo(tzinfo): - @property - def key(self) -> str: ... - def __init__(self, key: str) -> None: ... - @classmethod - def no_cache(cls, key: str) -> Self: - """Get a new instance of ZoneInfo, bypassing the cache.""" - ... - @classmethod - def from_file(cls, fobj: _IOBytes, /, key: str | None = None) -> Self: - """Create a ZoneInfo file from a file object.""" - ... - @classmethod - def clear_cache(cls, *, only_keys: Iterable[str] | None = None) -> None: - """Clear the ZoneInfo cache.""" - ... - def tzname(self, dt: datetime | None, /) -> str | None: - """Retrieve a string containing the abbreviation for the time zone that applies in a zone at a given datetime.""" - ... - def utcoffset(self, dt: datetime | None, /) -> timedelta | None: - """Retrieve a timedelta representing the UTC offset in a zone at the given datetime.""" - ... - def dst(self, dt: datetime | None, /) -> timedelta | None: - """Retrieve a timedelta representing the amount of DST applied in a zone at the given datetime.""" - ... - -# Note: Both here and in clear_cache, the types allow the use of `str` where -# a sequence of strings is required. This should be remedied if a solution -# to this typing bug is found: https://github.com/python/typing/issues/256 -def reset_tzpath(to: Sequence[StrPath] | None = None) -> None: - """Reset global TZPATH.""" - ... -def available_timezones() -> set[str]: - """ - Returns a set containing all available time zones. - - .. caution:: - - This may attempt to open a large number of files, since the best way to - determine if a given file on the time zone search path is to open it - and check for the "magic string" at the beginning. - """ - ... - -TZPATH: tuple[str, ...] - -class ZoneInfoNotFoundError(KeyError): - """Exception raised when a ZoneInfo key is not found.""" - ... -class InvalidTZPathWarning(RuntimeWarning): - """Warning raised if an invalid path is specified in PYTHONTZPATH.""" - ... - -def __dir__() -> list[str]: ... +# TODO: remove this version check +# In theory we shouldn't need this version check. Pyright complains about the imports +# from zoneinfo.* when run on 3.8 and 3.7 without this. Updates to typeshed's +# pyright test script are probably needed, see #11189 +if sys.version_info >= (3, 9): + from zoneinfo._common import ZoneInfoNotFoundError as ZoneInfoNotFoundError, _IOBytes + from zoneinfo._tzpath import ( + TZPATH as TZPATH, + InvalidTZPathWarning as InvalidTZPathWarning, + available_timezones as available_timezones, + reset_tzpath as reset_tzpath, + ) + + __all__ = ["ZoneInfo", "reset_tzpath", "available_timezones", "TZPATH", "ZoneInfoNotFoundError", "InvalidTZPathWarning"] + + class ZoneInfo(tzinfo): + @property + def key(self) -> str: ... + def __init__(self, key: str) -> None: ... + @classmethod + def no_cache(cls, key: str) -> Self: ... + @classmethod + def from_file(cls, fobj: _IOBytes, /, key: str | None = None) -> Self: ... + @classmethod + def clear_cache(cls, *, only_keys: Iterable[str] | None = None) -> None: ... + def tzname(self, dt: datetime | None, /) -> str | None: + """Retrieve a string containing the abbreviation for the time zone that applies in a zone at a given datetime.""" + ... + def utcoffset(self, dt: datetime | None, /) -> timedelta | None: + """Retrieve a timedelta representing the UTC offset in a zone at the given datetime.""" + ... + def dst(self, dt: datetime | None, /) -> timedelta | None: + """Retrieve a timedelta representing the amount of DST applied in a zone at the given datetime.""" + ... + + def __dir__() -> list[str]: ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/zoneinfo/_common.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/zoneinfo/_common.pyi new file mode 100644 index 0000000..d40d841 --- /dev/null +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/zoneinfo/_common.pyi @@ -0,0 +1,15 @@ +import io +from typing import Any, Protocol + +class _IOBytes(Protocol): + def read(self, size: int, /) -> bytes: ... + def seek(self, size: int, whence: int = ..., /) -> Any: ... + +def load_tzdata(key: str) -> io.BufferedReader: ... +def load_data( + fobj: _IOBytes, +) -> tuple[tuple[int, ...], tuple[int, ...], tuple[int, ...], tuple[int, ...], tuple[str, ...], bytes | None]: ... + +class ZoneInfoNotFoundError(KeyError): + """Exception raised when a ZoneInfo key is not found.""" + ... diff --git a/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/zoneinfo/_tzpath.pyi b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/zoneinfo/_tzpath.pyi new file mode 100644 index 0000000..b379760 --- /dev/null +++ b/overwrites/node_modules/pyright/dist/typeshed-fallback/stdlib/zoneinfo/_tzpath.pyi @@ -0,0 +1,29 @@ +from _typeshed import StrPath +from collections.abc import Sequence + +# Note: Both here and in clear_cache, the types allow the use of `str` where +# a sequence of strings is required. This should be remedied if a solution +# to this typing bug is found: https://github.com/python/typing/issues/256 +def reset_tzpath(to: Sequence[StrPath] | None = None) -> None: + """Reset global TZPATH.""" + ... +def find_tzfile(key: str) -> str | None: + """Retrieve the path to a TZif file from a key.""" + ... +def available_timezones() -> set[str]: + """ + Returns a set containing all available time zones. + + .. caution:: + + This may attempt to open a large number of files, since the best way to + determine if a given file on the time zone search path is to open it + and check for the "magic string" at the beginning. + """ + ... + +TZPATH: tuple[str, ...] + +class InvalidTZPathWarning(RuntimeWarning): + """Warning raised if an invalid path is specified in PYTHONTZPATH.""" + ...