Updated typeshed stubs to the latest version.

This commit is contained in:
Eric Traut 2024-06-26 16:30:53 -07:00
parent 05726120ad
commit bdb8eba712
132 changed files with 1534 additions and 1044 deletions

View File

@ -1 +1 @@
58f2a795bac5924367d21961af53a32af7bb5727 82199768bf2e651804cb718f4b570af6d41333d9

View File

@ -34,6 +34,7 @@ _dummy_thread: 3.0-3.8
_dummy_threading: 3.0-3.8 _dummy_threading: 3.0-3.8
_heapq: 3.0- _heapq: 3.0-
_imp: 3.0- _imp: 3.0-
_interpchannels: 3.13-
_json: 3.0- _json: 3.0-
_locale: 3.0- _locale: 3.0-
_lsprof: 3.0- _lsprof: 3.0-
@ -65,9 +66,9 @@ array: 3.0-
ast: 3.0- ast: 3.0-
asynchat: 3.0-3.11 asynchat: 3.0-3.11
asyncio: 3.4- asyncio: 3.4-
asyncio.mixins: 3.10-
asyncio.exceptions: 3.8- asyncio.exceptions: 3.8-
asyncio.format_helpers: 3.7- asyncio.format_helpers: 3.7-
asyncio.mixins: 3.10-
asyncio.runners: 3.7- asyncio.runners: 3.7-
asyncio.staggered: 3.8- asyncio.staggered: 3.8-
asyncio.taskgroups: 3.11- asyncio.taskgroups: 3.11-
@ -270,6 +271,7 @@ threading: 3.0-
time: 3.0- time: 3.0-
timeit: 3.0- timeit: 3.0-
tkinter: 3.0- tkinter: 3.0-
tkinter.tix: 3.0-3.12
token: 3.0- token: 3.0-
tokenize: 3.0- tokenize: 3.0-
tomllib: 3.11- tomllib: 3.11-

View File

@ -11,7 +11,7 @@ if sys.version_info >= (3, 13):
PyCF_OPTIMIZED_AST: Literal[33792] PyCF_OPTIMIZED_AST: Literal[33792]
# Used for node end positions in constructor keyword arguments # Used for node end positions in constructor keyword arguments
_EndPositionT = typing_extensions.TypeVar("_EndPositionT", int, int | None, default=int | None) # noqa: Y023 _EndPositionT = typing_extensions.TypeVar("_EndPositionT", int, int | None, default=int | None)
# Alias used for fields that must always be valid identifiers # Alias used for fields that must always be valid identifiers
# A string `x` counts as a valid identifier if both the following are True # A string `x` counts as a valid identifier if both the following are True

View File

@ -63,8 +63,7 @@ A_COLOR: int
A_DIM: int A_DIM: int
A_HORIZONTAL: int A_HORIZONTAL: int
A_INVIS: int A_INVIS: int
if sys.platform != "darwin": A_ITALIC: int
A_ITALIC: int
A_LEFT: int A_LEFT: int
A_LOW: int A_LOW: int
A_NORMAL: int A_NORMAL: int

View File

@ -0,0 +1,84 @@
from _typeshed import structseq
from typing import Final, Literal, SupportsIndex, final
from typing_extensions import Buffer, Self
class ChannelError(RuntimeError): ...
class ChannelClosedError(ChannelError): ...
class ChannelEmptyError(ChannelError): ...
class ChannelNotEmptyError(ChannelError): ...
class ChannelNotFoundError(ChannelError): ...
# Mark as final, since instantiating ChannelID is not supported.
@final
class ChannelID:
@property
def end(self) -> Literal["send", "recv", "both"]: ...
@property
def send(self) -> Self: ...
@property
def recv(self) -> Self: ...
def __eq__(self, other: object) -> bool: ...
def __ge__(self, other: ChannelID) -> bool: ...
def __gt__(self, other: ChannelID) -> bool: ...
def __hash__(self) -> int: ...
def __index__(self) -> int: ...
def __int__(self) -> int: ...
def __le__(self, other: ChannelID) -> bool: ...
def __lt__(self, other: ChannelID) -> bool: ...
def __ne__(self, other: object) -> bool: ...
@final
class ChannelInfo(structseq[int], tuple[bool, bool, bool, int, int, int, int, int]):
__match_args__: Final = (
"open",
"closing",
"closed",
"count",
"num_interp_send",
"num_interp_send_released",
"num_interp_recv",
"num_interp_recv_released",
)
@property
def open(self) -> bool: ...
@property
def closing(self) -> bool: ...
@property
def closed(self) -> bool: ...
@property
def count(self) -> int: ... # type: ignore[override]
@property
def num_interp_send(self) -> int: ...
@property
def num_interp_send_released(self) -> int: ...
@property
def num_interp_recv(self) -> int: ...
@property
def num_interp_recv_released(self) -> int: ...
@property
def num_interp_both(self) -> int: ...
@property
def num_interp_both_recv_released(self) -> int: ...
@property
def num_interp_both_send_released(self) -> int: ...
@property
def num_interp_both_released(self) -> int: ...
@property
def recv_associated(self) -> bool: ...
@property
def recv_released(self) -> bool: ...
@property
def send_associated(self) -> bool: ...
@property
def send_released(self) -> bool: ...
def create() -> ChannelID: ...
def destroy(cid: SupportsIndex) -> None: ...
def list_all() -> list[ChannelID]: ...
def list_interpreters(cid: SupportsIndex, *, send: bool) -> list[int]: ...
def send(cid: SupportsIndex, obj: object, *, blocking: bool = True, timeout: float | None = None) -> None: ...
def send_buffer(cid: SupportsIndex, obj: Buffer, *, blocking: bool = True, timeout: float | None = None) -> None: ...
def recv(cid: SupportsIndex, default: object = ...) -> object: ...
def close(cid: SupportsIndex, *, send: bool = False, recv: bool = False) -> None: ...
def get_info(cid: SupportsIndex) -> ChannelInfo: ...
def release(cid: SupportsIndex, *, send: bool = False, recv: bool = False, force: bool = False) -> None: ...

View File

@ -1,5 +1,7 @@
import sys import sys
from collections.abc import Callable
from typing import Any, ClassVar, Literal, final from typing import Any, ClassVar, Literal, final
from typing_extensions import TypeAlias
# _tkinter is meant to be only used internally by tkinter, but some tkinter # _tkinter is meant to be only used internally by tkinter, but some tkinter
# functions e.g. return _tkinter.Tcl_Obj objects. Tcl_Obj represents a Tcl # functions e.g. return _tkinter.Tcl_Obj objects. Tcl_Obj represents a Tcl
@ -30,6 +32,8 @@ class Tcl_Obj:
class TclError(Exception): ... class TclError(Exception): ...
_TkinterTraceFunc: TypeAlias = Callable[[tuple[str, ...]], object]
# This class allows running Tcl code. Tkinter uses it internally a lot, and # This class allows running Tcl code. Tkinter uses it internally a lot, and
# it's often handy to drop a piece of Tcl code into a tkinter program. Example: # it's often handy to drop a piece of Tcl code into a tkinter program. Example:
# #
@ -86,6 +90,9 @@ class TkappType:
def unsetvar(self, *args, **kwargs): ... def unsetvar(self, *args, **kwargs): ...
def wantobjects(self, *args, **kwargs): ... def wantobjects(self, *args, **kwargs): ...
def willdispatch(self): ... def willdispatch(self): ...
if sys.version_info >= (3, 12):
def gettrace(self, /) -> _TkinterTraceFunc | None: ...
def settrace(self, func: _TkinterTraceFunc | None, /) -> None: ...
# These should be kept in sync with tkinter.tix constants, except ALL_EVENTS which doesn't match TCL_ALL_EVENTS # These should be kept in sync with tkinter.tix constants, except ALL_EVENTS which doesn't match TCL_ALL_EVENTS
ALL_EVENTS: Literal[-3] ALL_EVENTS: Literal[-3]

View File

@ -21,8 +21,9 @@ class ProxyType(Generic[_T]): # "weakproxy"
def __getattr__(self, attr: str) -> Any: ... def __getattr__(self, attr: str) -> Any: ...
class ReferenceType(Generic[_T]): class ReferenceType(Generic[_T]):
__callback__: Callable[[ReferenceType[_T]], Any] __callback__: Callable[[Self], Any]
def __new__(cls, o: _T, callback: Callable[[ReferenceType[_T]], Any] | None = ..., /) -> Self: ... def __new__(cls, o: _T, callback: Callable[[Self], Any] | None = ..., /) -> Self: ...
def __init__(self, o: _T, callback: Callable[[Self], Any] | None = ..., /) -> None: ...
def __call__(self) -> _T | None: ... def __call__(self) -> _T | None: ...
def __eq__(self, value: object, /) -> bool: ... def __eq__(self, value: object, /) -> bool: ...
def __hash__(self) -> int: ... def __hash__(self) -> int: ...

View File

@ -32,6 +32,7 @@ _T = TypeVar("_T")
_ActionT = TypeVar("_ActionT", bound=Action) _ActionT = TypeVar("_ActionT", bound=Action)
_ArgumentParserT = TypeVar("_ArgumentParserT", bound=ArgumentParser) _ArgumentParserT = TypeVar("_ArgumentParserT", bound=ArgumentParser)
_N = TypeVar("_N") _N = TypeVar("_N")
_ActionType: TypeAlias = Callable[[str], Any] | FileType | str
# more precisely, Literal["store", "store_const", "store_true", # more precisely, Literal["store", "store_const", "store_true",
# "store_false", "append", "append_const", "count", "help", "version", # "store_false", "append", "append_const", "count", "help", "version",
# "extend"], but using this would make it hard to annotate callers # "extend"], but using this would make it hard to annotate callers
@ -89,7 +90,7 @@ class _ActionsContainer:
nargs: int | _NArgsStr | _SUPPRESS_T | None = None, nargs: int | _NArgsStr | _SUPPRESS_T | None = None,
const: Any = ..., const: Any = ...,
default: Any = ..., default: Any = ...,
type: Callable[[str], _T] | FileType = ..., type: _ActionType = ...,
choices: Iterable[_T] | None = ..., choices: Iterable[_T] | None = ...,
required: bool = ..., required: bool = ...,
help: str | None = ..., help: str | None = ...,
@ -313,7 +314,7 @@ class Action(_AttributeHolder):
nargs: int | str | None nargs: int | str | None
const: Any const: Any
default: Any default: Any
type: Callable[[str], Any] | FileType | None type: _ActionType | None
choices: Iterable[Any] | None choices: Iterable[Any] | None
required: bool required: bool
help: str | None help: str | None
@ -699,6 +700,7 @@ class _SubParsersAction(Action, Generic[_ArgumentParserT]):
add_help: bool = ..., add_help: bool = ...,
allow_abbrev: bool = ..., allow_abbrev: bool = ...,
exit_on_error: bool = ..., exit_on_error: bool = ...,
**kwargs: Any, # Accepting any additional kwargs for custom parser classes
) -> _ArgumentParserT: ... ) -> _ArgumentParserT: ...
elif sys.version_info >= (3, 9): elif sys.version_info >= (3, 9):
def add_parser( def add_parser(
@ -721,6 +723,7 @@ class _SubParsersAction(Action, Generic[_ArgumentParserT]):
add_help: bool = ..., add_help: bool = ...,
allow_abbrev: bool = ..., allow_abbrev: bool = ...,
exit_on_error: bool = ..., exit_on_error: bool = ...,
**kwargs: Any, # Accepting any additional kwargs for custom parser classes
) -> _ArgumentParserT: ... ) -> _ArgumentParserT: ...
else: else:
def add_parser( def add_parser(
@ -742,6 +745,7 @@ class _SubParsersAction(Action, Generic[_ArgumentParserT]):
conflict_handler: str = ..., conflict_handler: str = ...,
add_help: bool = ..., add_help: bool = ...,
allow_abbrev: bool = ..., allow_abbrev: bool = ...,
**kwargs: Any, # Accepting any additional kwargs for custom parser classes
) -> _ArgumentParserT: ... ) -> _ArgumentParserT: ...
def _get_subactions(self) -> list[Action]: ... def _get_subactions(self) -> list[Action]: ...

View File

@ -16,23 +16,40 @@ from .tasks import Task
from .transports import BaseTransport, DatagramTransport, ReadTransport, SubprocessTransport, Transport, WriteTransport from .transports import BaseTransport, DatagramTransport, ReadTransport, SubprocessTransport, Transport, WriteTransport
from .unix_events import AbstractChildWatcher from .unix_events import AbstractChildWatcher
__all__ = ( if sys.version_info >= (3, 14):
"AbstractEventLoopPolicy", __all__ = (
"AbstractEventLoop", "AbstractEventLoopPolicy",
"AbstractServer", "AbstractEventLoop",
"Handle", "AbstractServer",
"TimerHandle", "Handle",
"get_event_loop_policy", "TimerHandle",
"set_event_loop_policy", "get_event_loop_policy",
"get_event_loop", "set_event_loop_policy",
"set_event_loop", "get_event_loop",
"new_event_loop", "set_event_loop",
"get_child_watcher", "new_event_loop",
"set_child_watcher", "_set_running_loop",
"_set_running_loop", "get_running_loop",
"get_running_loop", "_get_running_loop",
"_get_running_loop", )
) else:
__all__ = (
"AbstractEventLoopPolicy",
"AbstractEventLoop",
"AbstractServer",
"Handle",
"TimerHandle",
"get_event_loop_policy",
"set_event_loop_policy",
"get_event_loop",
"set_event_loop",
"new_event_loop",
"get_child_watcher",
"set_child_watcher",
"_set_running_loop",
"get_running_loop",
"_get_running_loop",
)
_T = TypeVar("_T") _T = TypeVar("_T")
_Ts = TypeVarTuple("_Ts") _Ts = TypeVarTuple("_Ts")
@ -541,18 +558,19 @@ class AbstractEventLoopPolicy:
@abstractmethod @abstractmethod
def new_event_loop(self) -> AbstractEventLoop: ... def new_event_loop(self) -> AbstractEventLoop: ...
# Child processes handling (Unix only). # Child processes handling (Unix only).
if sys.version_info >= (3, 12): if sys.version_info < (3, 14):
@abstractmethod if sys.version_info >= (3, 12):
@deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14") @abstractmethod
def get_child_watcher(self) -> AbstractChildWatcher: ... @deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14")
@abstractmethod def get_child_watcher(self) -> AbstractChildWatcher: ...
@deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14") @abstractmethod
def set_child_watcher(self, watcher: AbstractChildWatcher) -> None: ... @deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14")
else: def set_child_watcher(self, watcher: AbstractChildWatcher) -> None: ...
@abstractmethod else:
def get_child_watcher(self) -> AbstractChildWatcher: ... @abstractmethod
@abstractmethod def get_child_watcher(self) -> AbstractChildWatcher: ...
def set_child_watcher(self, watcher: AbstractChildWatcher) -> None: ... @abstractmethod
def set_child_watcher(self, watcher: AbstractChildWatcher) -> None: ...
class BaseDefaultEventLoopPolicy(AbstractEventLoopPolicy, metaclass=ABCMeta): class BaseDefaultEventLoopPolicy(AbstractEventLoopPolicy, metaclass=ABCMeta):
def get_event_loop(self) -> AbstractEventLoop: ... def get_event_loop(self) -> AbstractEventLoop: ...
@ -565,15 +583,16 @@ def get_event_loop() -> AbstractEventLoop: ...
def set_event_loop(loop: AbstractEventLoop | None) -> None: ... def set_event_loop(loop: AbstractEventLoop | None) -> None: ...
def new_event_loop() -> AbstractEventLoop: ... def new_event_loop() -> AbstractEventLoop: ...
if sys.version_info >= (3, 12): if sys.version_info < (3, 14):
@deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14") if sys.version_info >= (3, 12):
def get_child_watcher() -> AbstractChildWatcher: ... @deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14")
@deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14") def get_child_watcher() -> AbstractChildWatcher: ...
def set_child_watcher(watcher: AbstractChildWatcher) -> None: ... @deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14")
def set_child_watcher(watcher: AbstractChildWatcher) -> None: ...
else: else:
def get_child_watcher() -> AbstractChildWatcher: ... def get_child_watcher() -> AbstractChildWatcher: ...
def set_child_watcher(watcher: AbstractChildWatcher) -> None: ... def set_child_watcher(watcher: AbstractChildWatcher) -> None: ...
def _set_running_loop(loop: AbstractEventLoop | None, /) -> None: ... def _set_running_loop(loop: AbstractEventLoop | None, /) -> None: ...
def _get_running_loop() -> AbstractEventLoop: ... def _get_running_loop() -> AbstractEventLoop: ...

View File

@ -70,7 +70,10 @@ _T4 = TypeVar("_T4")
_T5 = TypeVar("_T5") _T5 = TypeVar("_T5")
_T6 = TypeVar("_T6") _T6 = TypeVar("_T6")
_FT = TypeVar("_FT", bound=Future[Any]) _FT = TypeVar("_FT", bound=Future[Any])
_FutureLike: TypeAlias = Future[_T] | Generator[Any, None, _T] | Awaitable[_T] if sys.version_info >= (3, 12):
_FutureLike: TypeAlias = Future[_T] | Awaitable[_T]
else:
_FutureLike: TypeAlias = Future[_T] | Generator[Any, None, _T] | Awaitable[_T]
_TaskYieldType: TypeAlias = Future[object] | None _TaskYieldType: TypeAlias = Future[object] | None
FIRST_COMPLETED = concurrent.futures.FIRST_COMPLETED FIRST_COMPLETED = concurrent.futures.FIRST_COMPLETED

View File

@ -13,51 +13,54 @@ _Ts = TypeVarTuple("_Ts")
# This is also technically not available on Win, # This is also technically not available on Win,
# but other parts of typeshed need this definition. # but other parts of typeshed need this definition.
# So, it is special cased. # So, it is special cased.
if sys.version_info >= (3, 12): if sys.version_info < (3, 14):
@deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14") if sys.version_info >= (3, 12):
class AbstractChildWatcher: @deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14")
@abstractmethod class AbstractChildWatcher:
def add_child_handler( @abstractmethod
self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts] def add_child_handler(
) -> None: ... self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts]
@abstractmethod ) -> None: ...
def remove_child_handler(self, pid: int) -> bool: ... @abstractmethod
@abstractmethod def remove_child_handler(self, pid: int) -> bool: ...
def attach_loop(self, loop: AbstractEventLoop | None) -> None: ... @abstractmethod
@abstractmethod def attach_loop(self, loop: AbstractEventLoop | None) -> None: ...
def close(self) -> None: ... @abstractmethod
@abstractmethod def close(self) -> None: ...
def __enter__(self) -> Self: ... @abstractmethod
@abstractmethod def __enter__(self) -> Self: ...
def __exit__( @abstractmethod
self, typ: type[BaseException] | None, exc: BaseException | None, tb: types.TracebackType | None def __exit__(
) -> None: ... self, typ: type[BaseException] | None, exc: BaseException | None, tb: types.TracebackType | None
@abstractmethod ) -> None: ...
def is_active(self) -> bool: ... @abstractmethod
def is_active(self) -> bool: ...
else: else:
class AbstractChildWatcher: class AbstractChildWatcher:
@abstractmethod @abstractmethod
def add_child_handler( def add_child_handler(
self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts] self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts]
) -> None: ... ) -> None: ...
@abstractmethod @abstractmethod
def remove_child_handler(self, pid: int) -> bool: ... def remove_child_handler(self, pid: int) -> bool: ...
@abstractmethod @abstractmethod
def attach_loop(self, loop: AbstractEventLoop | None) -> None: ... def attach_loop(self, loop: AbstractEventLoop | None) -> None: ...
@abstractmethod @abstractmethod
def close(self) -> None: ... def close(self) -> None: ...
@abstractmethod @abstractmethod
def __enter__(self) -> Self: ... def __enter__(self) -> Self: ...
@abstractmethod @abstractmethod
def __exit__( def __exit__(
self, typ: type[BaseException] | None, exc: BaseException | None, tb: types.TracebackType | None self, typ: type[BaseException] | None, exc: BaseException | None, tb: types.TracebackType | None
) -> None: ... ) -> None: ...
@abstractmethod @abstractmethod
def is_active(self) -> bool: ... def is_active(self) -> bool: ...
if sys.platform != "win32": if sys.platform != "win32":
if sys.version_info >= (3, 9): if sys.version_info >= (3, 14):
__all__ = ("SelectorEventLoop", "DefaultEventLoopPolicy")
elif sys.version_info >= (3, 9):
__all__ = ( __all__ = (
"SelectorEventLoop", "SelectorEventLoop",
"AbstractChildWatcher", "AbstractChildWatcher",
@ -79,118 +82,137 @@ if sys.platform != "win32":
"DefaultEventLoopPolicy", "DefaultEventLoopPolicy",
) )
# Doesn't actually have ABCMeta metaclass at runtime, but mypy complains if we don't have it in the stub. if sys.version_info < (3, 14):
# See discussion in #7412 if sys.version_info >= (3, 12):
class BaseChildWatcher(AbstractChildWatcher, metaclass=ABCMeta): # Doesn't actually have ABCMeta metaclass at runtime, but mypy complains if we don't have it in the stub.
def close(self) -> None: ... # See discussion in #7412
def is_active(self) -> bool: ... class BaseChildWatcher(AbstractChildWatcher, metaclass=ABCMeta):
def attach_loop(self, loop: AbstractEventLoop | None) -> None: ... def close(self) -> None: ...
def is_active(self) -> bool: ...
def attach_loop(self, loop: AbstractEventLoop | None) -> None: ...
if sys.version_info >= (3, 12): @deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14")
@deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14") class SafeChildWatcher(BaseChildWatcher):
class SafeChildWatcher(BaseChildWatcher): def __enter__(self) -> Self: ...
def __enter__(self) -> Self: ... def __exit__(
def __exit__(self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None) -> None: ... self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None
def add_child_handler( ) -> None: ...
self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts] def add_child_handler(
) -> None: ... self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts]
def remove_child_handler(self, pid: int) -> bool: ... ) -> None: ...
def remove_child_handler(self, pid: int) -> bool: ...
@deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14") @deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14")
class FastChildWatcher(BaseChildWatcher): class FastChildWatcher(BaseChildWatcher):
def __enter__(self) -> Self: ... def __enter__(self) -> Self: ...
def __exit__(self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None) -> None: ... def __exit__(
def add_child_handler( self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None
self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts] ) -> None: ...
) -> None: ... def add_child_handler(
def remove_child_handler(self, pid: int) -> bool: ... self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts]
) -> None: ...
def remove_child_handler(self, pid: int) -> bool: ...
else: else:
class SafeChildWatcher(BaseChildWatcher): # Doesn't actually have ABCMeta metaclass at runtime, but mypy complains if we don't have it in the stub.
def __enter__(self) -> Self: ... # See discussion in #7412
def __exit__(self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None) -> None: ... class BaseChildWatcher(AbstractChildWatcher, metaclass=ABCMeta):
def add_child_handler( def close(self) -> None: ...
self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts] def is_active(self) -> bool: ...
) -> None: ... def attach_loop(self, loop: AbstractEventLoop | None) -> None: ...
def remove_child_handler(self, pid: int) -> bool: ...
class FastChildWatcher(BaseChildWatcher): class SafeChildWatcher(BaseChildWatcher):
def __enter__(self) -> Self: ... def __enter__(self) -> Self: ...
def __exit__(self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None) -> None: ... def __exit__(
def add_child_handler( self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None
self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts] ) -> None: ...
) -> None: ... def add_child_handler(
def remove_child_handler(self, pid: int) -> bool: ... self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts]
) -> None: ...
def remove_child_handler(self, pid: int) -> bool: ...
class FastChildWatcher(BaseChildWatcher):
def __enter__(self) -> Self: ...
def __exit__(
self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None
) -> None: ...
def add_child_handler(
self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts]
) -> None: ...
def remove_child_handler(self, pid: int) -> bool: ...
class _UnixSelectorEventLoop(BaseSelectorEventLoop): ... class _UnixSelectorEventLoop(BaseSelectorEventLoop): ...
class _UnixDefaultEventLoopPolicy(BaseDefaultEventLoopPolicy): class _UnixDefaultEventLoopPolicy(BaseDefaultEventLoopPolicy):
if sys.version_info >= (3, 12): if sys.version_info < (3, 14):
@deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14") if sys.version_info >= (3, 12):
def get_child_watcher(self) -> AbstractChildWatcher: ... @deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14")
@deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14") def get_child_watcher(self) -> AbstractChildWatcher: ...
def set_child_watcher(self, watcher: AbstractChildWatcher | None) -> None: ... @deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14")
else: def set_child_watcher(self, watcher: AbstractChildWatcher | None) -> None: ...
def get_child_watcher(self) -> AbstractChildWatcher: ... else:
def set_child_watcher(self, watcher: AbstractChildWatcher | None) -> None: ... def get_child_watcher(self) -> AbstractChildWatcher: ...
def set_child_watcher(self, watcher: AbstractChildWatcher | None) -> None: ...
SelectorEventLoop = _UnixSelectorEventLoop SelectorEventLoop = _UnixSelectorEventLoop
DefaultEventLoopPolicy = _UnixDefaultEventLoopPolicy DefaultEventLoopPolicy = _UnixDefaultEventLoopPolicy
if sys.version_info >= (3, 12): if sys.version_info < (3, 14):
@deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14") if sys.version_info >= (3, 12):
class MultiLoopChildWatcher(AbstractChildWatcher): @deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14")
def is_active(self) -> bool: ... class MultiLoopChildWatcher(AbstractChildWatcher):
def is_active(self) -> bool: ...
def close(self) -> None: ...
def __enter__(self) -> Self: ...
def __exit__(
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None
) -> None: ...
def add_child_handler(
self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts]
) -> None: ...
def remove_child_handler(self, pid: int) -> bool: ...
def attach_loop(self, loop: AbstractEventLoop | None) -> None: ...
else:
class MultiLoopChildWatcher(AbstractChildWatcher):
def is_active(self) -> bool: ...
def close(self) -> None: ...
def __enter__(self) -> Self: ...
def __exit__(
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None
) -> None: ...
def add_child_handler(
self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts]
) -> None: ...
def remove_child_handler(self, pid: int) -> bool: ...
def attach_loop(self, loop: AbstractEventLoop | None) -> None: ...
if sys.version_info < (3, 14):
class ThreadedChildWatcher(AbstractChildWatcher):
def is_active(self) -> Literal[True]: ...
def close(self) -> None: ... def close(self) -> None: ...
def __enter__(self) -> Self: ... def __enter__(self) -> Self: ...
def __exit__( def __exit__(
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None
) -> None: ... ) -> None: ...
def __del__(self) -> None: ...
def add_child_handler( def add_child_handler(
self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts] self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts]
) -> None: ... ) -> None: ...
def remove_child_handler(self, pid: int) -> bool: ... def remove_child_handler(self, pid: int) -> bool: ...
def attach_loop(self, loop: AbstractEventLoop | None) -> None: ... def attach_loop(self, loop: AbstractEventLoop | None) -> None: ...
else: if sys.version_info >= (3, 9):
class MultiLoopChildWatcher(AbstractChildWatcher): class PidfdChildWatcher(AbstractChildWatcher):
def is_active(self) -> bool: ... def __enter__(self) -> Self: ...
def close(self) -> None: ... def __exit__(
def __enter__(self) -> Self: ... self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None
def __exit__( ) -> None: ...
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None def is_active(self) -> bool: ...
) -> None: ... def close(self) -> None: ...
def add_child_handler( def attach_loop(self, loop: AbstractEventLoop | None) -> None: ...
self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts] def add_child_handler(
) -> None: ... self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts]
def remove_child_handler(self, pid: int) -> bool: ... ) -> None: ...
def attach_loop(self, loop: AbstractEventLoop | None) -> None: ... def remove_child_handler(self, pid: int) -> bool: ...
class ThreadedChildWatcher(AbstractChildWatcher):
def is_active(self) -> Literal[True]: ...
def close(self) -> None: ...
def __enter__(self) -> Self: ...
def __exit__(
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None
) -> None: ...
def __del__(self) -> None: ...
def add_child_handler(
self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts]
) -> None: ...
def remove_child_handler(self, pid: int) -> bool: ...
def attach_loop(self, loop: AbstractEventLoop | None) -> None: ...
if sys.version_info >= (3, 9):
class PidfdChildWatcher(AbstractChildWatcher):
def __enter__(self) -> Self: ...
def __exit__(
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None
) -> None: ...
def is_active(self) -> bool: ...
def close(self) -> None: ...
def attach_loop(self, loop: AbstractEventLoop | None) -> None: ...
def add_child_handler(
self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts]
) -> None: ...
def remove_child_handler(self, pid: int) -> bool: ...

View File

@ -74,8 +74,9 @@ if sys.platform == "win32":
class WindowsSelectorEventLoopPolicy(events.BaseDefaultEventLoopPolicy): class WindowsSelectorEventLoopPolicy(events.BaseDefaultEventLoopPolicy):
_loop_factory: ClassVar[type[SelectorEventLoop]] _loop_factory: ClassVar[type[SelectorEventLoop]]
def get_child_watcher(self) -> NoReturn: ... if sys.version_info < (3, 14):
def set_child_watcher(self, watcher: Any) -> NoReturn: ... def get_child_watcher(self) -> NoReturn: ...
def set_child_watcher(self, watcher: Any) -> NoReturn: ...
class WindowsProactorEventLoopPolicy(events.BaseDefaultEventLoopPolicy): class WindowsProactorEventLoopPolicy(events.BaseDefaultEventLoopPolicy):
_loop_factory: ClassVar[type[ProactorEventLoop]] _loop_factory: ClassVar[type[ProactorEventLoop]]

View File

@ -1673,9 +1673,9 @@ def pow(base: float, exp: complex | _SupportsSomeKindOfPow, mod: None = None) ->
@overload @overload
def pow(base: complex, exp: complex | _SupportsSomeKindOfPow, mod: None = None) -> complex: ... def pow(base: complex, exp: complex | _SupportsSomeKindOfPow, mod: None = None) -> complex: ...
@overload @overload
def pow(base: _SupportsPow2[_E, _T_co], exp: _E, mod: None = None) -> _T_co: ... def pow(base: _SupportsPow2[_E, _T_co], exp: _E, mod: None = None) -> _T_co: ... # type: ignore[overload-overlap]
@overload @overload
def pow(base: _SupportsPow3NoneOnly[_E, _T_co], exp: _E, mod: None = None) -> _T_co: ... def pow(base: _SupportsPow3NoneOnly[_E, _T_co], exp: _E, mod: None = None) -> _T_co: ... # type: ignore[overload-overlap]
@overload @overload
def pow(base: _SupportsPow3[_E, _M, _T_co], exp: _E, mod: _M) -> _T_co: ... def pow(base: _SupportsPow3[_E, _M, _T_co], exp: _E, mod: _M) -> _T_co: ...
@overload @overload

View File

@ -108,7 +108,7 @@ class _DefaultFactory(Protocol[_T_co]):
class Field(Generic[_T]): class Field(Generic[_T]):
name: str name: str
type: Type[_T] type: Type[_T] | str | Any
default: _T | Literal[_MISSING_TYPE.MISSING] default: _T | Literal[_MISSING_TYPE.MISSING]
default_factory: _DefaultFactory[_T] | Literal[_MISSING_TYPE.MISSING] default_factory: _DefaultFactory[_T] | Literal[_MISSING_TYPE.MISSING]
repr: bool repr: bool

View File

@ -8,7 +8,7 @@ from string import Template
from time import struct_time from time import struct_time
from types import FrameType, TracebackType from types import FrameType, TracebackType
from typing import Any, ClassVar, Generic, Literal, Protocol, TextIO, TypeVar, overload from typing import Any, ClassVar, Generic, Literal, Protocol, TextIO, TypeVar, overload
from typing_extensions import Self, TypeAlias from typing_extensions import Self, TypeAlias, deprecated
if sys.version_info >= (3, 11): if sys.version_info >= (3, 11):
from types import GenericAlias from types import GenericAlias
@ -574,11 +574,8 @@ def disable(level: int = 50) -> None: ...
def addLevelName(level: int, levelName: str) -> None: ... def addLevelName(level: int, levelName: str) -> None: ...
@overload @overload
def getLevelName(level: int) -> str: ... def getLevelName(level: int) -> str: ...
# The str -> int case is considered a mistake, but retained for backward
# compatibility. See
# https://docs.python.org/3/library/logging.html#logging.getLevelName.
@overload @overload
@deprecated("The str -> int case is considered a mistake.")
def getLevelName(level: str) -> Any: ... def getLevelName(level: str) -> Any: ...
if sys.version_info >= (3, 11): if sys.version_info >= (3, 11):

View File

@ -1,7 +1,7 @@
import sys import sys
from _typeshed import ReadableBuffer, Unused from _typeshed import ReadableBuffer, Unused
from collections.abc import Iterable, Iterator, Sized from collections.abc import Iterable, Iterator, Sized
from typing import NoReturn, overload from typing import Final, NoReturn, overload
from typing_extensions import Self from typing_extensions import Self
ACCESS_DEFAULT: int ACCESS_DEFAULT: int
@ -113,3 +113,9 @@ if sys.platform != "linux" and sys.platform != "darwin" and sys.platform != "win
if sys.version_info >= (3, 10) and sys.platform == "darwin": if sys.version_info >= (3, 10) and sys.platform == "darwin":
MADV_FREE_REUSABLE: int MADV_FREE_REUSABLE: int
MADV_FREE_REUSE: int MADV_FREE_REUSE: int
if sys.version_info >= (3, 13) and sys.platform != "win32":
MAP_32BIT: Final = 32768
if sys.version_info >= (3, 13) and sys.platform == "darwin":
MAP_TPRO: Final = 524288

View File

@ -92,17 +92,21 @@ class BaseContext:
@overload @overload
def Value(self, typecode_or_type: str | type[_CData], *args: Any, lock: bool | _LockLike = True) -> Any: ... def Value(self, typecode_or_type: str | type[_CData], *args: Any, lock: bool | _LockLike = True) -> Any: ...
@overload @overload
def Array(
self, typecode_or_type: type[_SimpleCData[_T]], size_or_initializer: int | Sequence[Any], *, lock: Literal[False]
) -> SynchronizedArray[_T]: ...
@overload
def Array( def Array(
self, typecode_or_type: type[c_char], size_or_initializer: int | Sequence[Any], *, lock: Literal[True] | _LockLike = True self, typecode_or_type: type[c_char], size_or_initializer: int | Sequence[Any], *, lock: Literal[True] | _LockLike = True
) -> SynchronizedString: ... ) -> SynchronizedString: ...
@overload @overload
def Array( def Array(
self, typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any], *, lock: Literal[False] self,
) -> SynchronizedArray[_CT]: ... typecode_or_type: type[_SimpleCData[_T]],
@overload size_or_initializer: int | Sequence[Any],
def Array( *,
self, typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any], *, lock: Literal[True] | _LockLike = True lock: Literal[True] | _LockLike = True,
) -> SynchronizedArray[_CT]: ... ) -> SynchronizedArray[_T]: ...
@overload @overload
def Array( def Array(
self, typecode_or_type: str, size_or_initializer: int | Sequence[Any], *, lock: Literal[True] | _LockLike = True self, typecode_or_type: str, size_or_initializer: int | Sequence[Any], *, lock: Literal[True] | _LockLike = True

View File

@ -39,12 +39,20 @@ def Array(
) -> _CT: ... ) -> _CT: ...
@overload @overload
def Array( def Array(
typecode_or_type: type[_CT], typecode_or_type: type[c_char],
size_or_initializer: int | Sequence[Any], size_or_initializer: int | Sequence[Any],
*, *,
lock: Literal[True] | _LockLike = True, lock: Literal[True] | _LockLike = True,
ctx: BaseContext | None = None, ctx: BaseContext | None = None,
) -> SynchronizedArray[_CT]: ... ) -> SynchronizedString: ...
@overload
def Array(
typecode_or_type: type[_SimpleCData[_T]],
size_or_initializer: int | Sequence[Any],
*,
lock: Literal[True] | _LockLike = True,
ctx: BaseContext | None = None,
) -> SynchronizedArray[_T]: ...
@overload @overload
def Array( def Array(
typecode_or_type: str, typecode_or_type: str,
@ -65,9 +73,11 @@ def copy(obj: _CT) -> _CT: ...
@overload @overload
def synchronized(obj: _SimpleCData[_T], lock: _LockLike | None = None, ctx: Any | None = None) -> Synchronized[_T]: ... def synchronized(obj: _SimpleCData[_T], lock: _LockLike | None = None, ctx: Any | None = None) -> Synchronized[_T]: ...
@overload @overload
def synchronized(obj: ctypes.Array[c_char], lock: _LockLike | None = None, ctx: Any | None = None) -> SynchronizedString: ... def synchronized(obj: ctypes.Array[c_char], lock: _LockLike | None = None, ctx: Any | None = None) -> SynchronizedString: ... # type: ignore
@overload @overload
def synchronized(obj: ctypes.Array[_CT], lock: _LockLike | None = None, ctx: Any | None = None) -> SynchronizedArray[_CT]: ... def synchronized(
obj: ctypes.Array[_SimpleCData[_T]], lock: _LockLike | None = None, ctx: Any | None = None
) -> SynchronizedArray[_T]: ...
@overload @overload
def synchronized(obj: _CT, lock: _LockLike | None = None, ctx: Any | None = None) -> SynchronizedBase[_CT]: ... def synchronized(obj: _CT, lock: _LockLike | None = None, ctx: Any | None = None) -> SynchronizedBase[_CT]: ...
@ -89,19 +99,30 @@ class SynchronizedBase(Generic[_CT]):
class Synchronized(SynchronizedBase[_SimpleCData[_T]], Generic[_T]): class Synchronized(SynchronizedBase[_SimpleCData[_T]], Generic[_T]):
value: _T value: _T
class SynchronizedArray(SynchronizedBase[ctypes.Array[_CT]], Generic[_CT]): class SynchronizedArray(SynchronizedBase[ctypes.Array[_SimpleCData[_T]]], Generic[_T]):
def __len__(self) -> int: ... def __len__(self) -> int: ...
@overload @overload
def __getitem__(self, i: slice) -> list[_CT]: ... def __getitem__(self, i: slice) -> list[_T]: ...
@overload @overload
def __getitem__(self, i: int) -> _CT: ... def __getitem__(self, i: int) -> _T: ...
@overload @overload
def __setitem__(self, i: slice, value: Iterable[_CT]) -> None: ... def __setitem__(self, i: slice, value: Iterable[_T]) -> None: ...
@overload @overload
def __setitem__(self, i: int, value: _CT) -> None: ... def __setitem__(self, i: int, value: _T) -> None: ...
def __getslice__(self, start: int, stop: int) -> list[_CT]: ... def __getslice__(self, start: int, stop: int) -> list[_T]: ...
def __setslice__(self, start: int, stop: int, values: Iterable[_CT]) -> None: ... def __setslice__(self, start: int, stop: int, values: Iterable[_T]) -> None: ...
class SynchronizedString(SynchronizedArray[bytes]):
@overload # type: ignore[override]
def __getitem__(self, i: slice) -> bytes: ...
@overload # type: ignore[override]
def __getitem__(self, i: int) -> bytes: ...
@overload # type: ignore[override]
def __setitem__(self, i: slice, value: bytes) -> None: ...
@overload # type: ignore[override]
def __setitem__(self, i: int, value: bytes) -> None: ... # type: ignore[override]
def __getslice__(self, start: int, stop: int) -> bytes: ... # type: ignore[override]
def __setslice__(self, start: int, stop: int, values: bytes) -> None: ... # type: ignore[override]
class SynchronizedString(SynchronizedArray[c_char]):
value: bytes value: bytes
raw: bytes raw: bytes

View File

@ -914,8 +914,8 @@ if sys.platform != "win32":
def forkpty() -> tuple[int, int]: ... # some flavors of Unix def forkpty() -> tuple[int, int]: ... # some flavors of Unix
def killpg(pgid: int, signal: int, /) -> None: ... def killpg(pgid: int, signal: int, /) -> None: ...
def nice(increment: int, /) -> int: ... def nice(increment: int, /) -> int: ...
if sys.platform != "darwin": if sys.platform != "darwin" and sys.platform != "linux":
def plock(op: int, /) -> None: ... # ???op is int? def plock(op: int, /) -> None: ...
class _wrap_close(_TextIOWrapper): class _wrap_close(_TextIOWrapper):
def __init__(self, stream: _TextIOWrapper, proc: Popen[str]) -> None: ... def __init__(self, stream: _TextIOWrapper, proc: Popen[str]) -> None: ...
@ -1141,16 +1141,16 @@ if sys.version_info >= (3, 10) and sys.platform == "linux":
if sys.version_info >= (3, 12) and sys.platform == "linux": if sys.version_info >= (3, 12) and sys.platform == "linux":
CLONE_FILES: int CLONE_FILES: int
CLONE_FS: int CLONE_FS: int
CLONE_NEWCGROUP: int CLONE_NEWCGROUP: int # Linux 4.6+
CLONE_NEWIPC: int CLONE_NEWIPC: int # Linux 2.6.19+
CLONE_NEWNET: int CLONE_NEWNET: int # Linux 2.6.24+
CLONE_NEWNS: int CLONE_NEWNS: int
CLONE_NEWPID: int CLONE_NEWPID: int # Linux 3.8+
CLONE_NEWTIME: int CLONE_NEWTIME: int # Linux 5.6+
CLONE_NEWUSER: int CLONE_NEWUSER: int # Linux 3.8+
CLONE_NEWUTS: int CLONE_NEWUTS: int # Linux 2.6.19+
CLONE_SIGHAND: int CLONE_SIGHAND: int
CLONE_SYSVSEM: int CLONE_SYSVSEM: int # Linux 2.6.26+
CLONE_THREAD: int CLONE_THREAD: int
CLONE_VM: int CLONE_VM: int
def unshare(flags: int) -> None: ... def unshare(flags: int) -> None: ...

View File

@ -77,11 +77,7 @@ pathsep: LiteralString
defpath: LiteralString defpath: LiteralString
devnull: LiteralString devnull: LiteralString
# Overloads are necessary to work around python/mypy#3644. def abspath(path: PathLike[AnyStr] | AnyStr) -> AnyStr: ...
@overload
def abspath(path: PathLike[AnyStr]) -> AnyStr: ...
@overload
def abspath(path: AnyStr) -> AnyStr: ...
@overload @overload
def basename(p: PathLike[AnyStr]) -> AnyStr: ... def basename(p: PathLike[AnyStr]) -> AnyStr: ...
@overload @overload
@ -90,14 +86,8 @@ def basename(p: AnyOrLiteralStr) -> AnyOrLiteralStr: ...
def dirname(p: PathLike[AnyStr]) -> AnyStr: ... def dirname(p: PathLike[AnyStr]) -> AnyStr: ...
@overload @overload
def dirname(p: AnyOrLiteralStr) -> AnyOrLiteralStr: ... def dirname(p: AnyOrLiteralStr) -> AnyOrLiteralStr: ...
@overload def expanduser(path: PathLike[AnyStr] | AnyStr) -> AnyStr: ...
def expanduser(path: PathLike[AnyStr]) -> AnyStr: ... def expandvars(path: PathLike[AnyStr] | AnyStr) -> AnyStr: ...
@overload
def expanduser(path: AnyStr) -> AnyStr: ...
@overload
def expandvars(path: PathLike[AnyStr]) -> AnyStr: ...
@overload
def expandvars(path: AnyStr) -> AnyStr: ...
@overload @overload
def normcase(s: PathLike[AnyStr]) -> AnyStr: ... def normcase(s: PathLike[AnyStr]) -> AnyStr: ...
@overload @overload

View File

@ -36,6 +36,11 @@ if sys.platform != "win32":
def sp_expire(self) -> int: ... def sp_expire(self) -> int: ...
@property @property
def sp_flag(self) -> int: ... def sp_flag(self) -> int: ...
# Deprecated aliases below.
@property
def sp_nam(self) -> str: ...
@property
def sp_pwd(self) -> str: ...
def getspall() -> list[struct_spwd]: ... def getspall() -> list[struct_spwd]: ...
def getspnam(arg: str, /) -> struct_spwd: ... def getspnam(arg: str, /) -> struct_spwd: ...

View File

@ -889,6 +889,7 @@ if sys.version_info >= (3, 11):
start_new_session: bool = False, start_new_session: bool = False,
pass_fds: Collection[int] = ..., pass_fds: Collection[int] = ...,
*, *,
encoding: str | None = None,
timeout: float | None = None, timeout: float | None = None,
text: bool | None = None, text: bool | None = None,
user: str | int | None = None, user: str | int | None = None,
@ -920,6 +921,7 @@ elif sys.version_info >= (3, 10):
start_new_session: bool = False, start_new_session: bool = False,
pass_fds: Collection[int] = ..., pass_fds: Collection[int] = ...,
*, *,
encoding: str | None = None,
timeout: float | None = None, timeout: float | None = None,
text: bool | None = None, text: bool | None = None,
user: str | int | None = None, user: str | int | None = None,
@ -950,6 +952,7 @@ elif sys.version_info >= (3, 9):
start_new_session: bool = False, start_new_session: bool = False,
pass_fds: Collection[int] = ..., pass_fds: Collection[int] = ...,
*, *,
encoding: str | None = None,
timeout: float | None = None, timeout: float | None = None,
text: bool | None = None, text: bool | None = None,
user: str | int | None = None, user: str | int | None = None,
@ -978,6 +981,7 @@ else:
start_new_session: bool = False, start_new_session: bool = False,
pass_fds: Collection[int] = ..., pass_fds: Collection[int] = ...,
*, *,
encoding: str | None = None,
timeout: float | None = None, timeout: float | None = None,
text: bool | None = None, text: bool | None = None,
) -> int: ... ) -> int: ...
@ -1005,6 +1009,7 @@ if sys.version_info >= (3, 11):
pass_fds: Collection[int] = ..., pass_fds: Collection[int] = ...,
timeout: float | None = ..., timeout: float | None = ...,
*, *,
encoding: str | None = None,
text: bool | None = None, text: bool | None = None,
user: str | int | None = None, user: str | int | None = None,
group: str | int | None = None, group: str | int | None = None,
@ -1036,6 +1041,7 @@ elif sys.version_info >= (3, 10):
pass_fds: Collection[int] = ..., pass_fds: Collection[int] = ...,
timeout: float | None = ..., timeout: float | None = ...,
*, *,
encoding: str | None = None,
text: bool | None = None, text: bool | None = None,
user: str | int | None = None, user: str | int | None = None,
group: str | int | None = None, group: str | int | None = None,
@ -1066,6 +1072,7 @@ elif sys.version_info >= (3, 9):
pass_fds: Collection[int] = ..., pass_fds: Collection[int] = ...,
timeout: float | None = ..., timeout: float | None = ...,
*, *,
encoding: str | None = None,
text: bool | None = None, text: bool | None = None,
user: str | int | None = None, user: str | int | None = None,
group: str | int | None = None, group: str | int | None = None,
@ -1094,6 +1101,7 @@ else:
pass_fds: Collection[int] = ..., pass_fds: Collection[int] = ...,
timeout: float | None = ..., timeout: float | None = ...,
*, *,
encoding: str | None = None,
text: bool | None = None, text: bool | None = None,
) -> int: ... ) -> int: ...

View File

@ -103,10 +103,13 @@ PAX_NAME_FIELDS: set[str]
ENCODING: str ENCODING: str
_FileCreationModes: TypeAlias = Literal["a", "w", "x"]
@overload
def open( def open(
name: StrOrBytesPath | None = None, name: StrOrBytesPath | None = None,
mode: str = "r", mode: str = "r",
fileobj: IO[bytes] | None = None, # depends on mode fileobj: IO[bytes] | None = None,
bufsize: int = 10240, bufsize: int = 10240,
*, *,
format: int | None = ..., format: int | None = ...,
@ -121,6 +124,25 @@ def open(
compresslevel: int | None = ..., compresslevel: int | None = ...,
preset: Literal[0, 1, 2, 3, 4, 5, 6, 7, 8, 9] | None = ..., preset: Literal[0, 1, 2, 3, 4, 5, 6, 7, 8, 9] | None = ...,
) -> TarFile: ... ) -> TarFile: ...
@overload
def open(
name: StrOrBytesPath | None = None,
mode: _FileCreationModes = ...,
fileobj: _Fileobj | None = None,
bufsize: int = 10240,
*,
format: int | None = ...,
tarinfo: type[TarInfo] | None = ...,
dereference: bool | None = ...,
ignore_zeros: bool | None = ...,
encoding: str | None = ...,
errors: str = ...,
pax_headers: Mapping[str, str] | None = ...,
debug: int | None = ...,
errorlevel: int | None = ...,
compresslevel: int | None = ...,
preset: int | None = ...,
) -> TarFile: ...
class ExFileObject(io.BufferedReader): class ExFileObject(io.BufferedReader):
def __init__(self, tarfile: TarFile, tarinfo: TarInfo) -> None: ... def __init__(self, tarfile: TarFile, tarinfo: TarInfo) -> None: ...

View File

@ -88,6 +88,7 @@ NOOPT: bytes
class Telnet: class Telnet:
host: str | None # undocumented host: str | None # undocumented
sock: socket.socket | None # undocumented
def __init__(self, host: str | None = None, port: int = 0, timeout: float = ...) -> None: ... def __init__(self, host: str | None = None, port: int = 0, timeout: float = ...) -> None: ...
def open(self, host: str, port: int = 0, timeout: float = ...) -> None: ... def open(self, host: str, port: int = 0, timeout: float = ...) -> None: ...
def msg(self, msg: str, *args: Any) -> None: ... def msg(self, msg: str, *args: Any) -> None: ...

View File

@ -21,7 +21,7 @@ from types import (
TracebackType, TracebackType,
WrapperDescriptorType, WrapperDescriptorType,
) )
from typing_extensions import Never as _Never, ParamSpec as _ParamSpec from typing_extensions import Never as _Never, ParamSpec as _ParamSpec, deprecated
if sys.version_info >= (3, 9): if sys.version_info >= (3, 9):
from types import GenericAlias from types import GenericAlias
@ -991,11 +991,30 @@ class ForwardRef:
def __init__(self, arg: str, is_argument: bool = True) -> None: ... def __init__(self, arg: str, is_argument: bool = True) -> None: ...
if sys.version_info >= (3, 13): if sys.version_info >= (3, 13):
@overload
@deprecated(
"Failing to pass a value to the 'type_params' parameter of ForwardRef._evaluate() is deprecated, "
"as it leads to incorrect behaviour when evaluating a stringified annotation "
"that references a PEP 695 type parameter. It will be disallowed in Python 3.15."
)
def _evaluate(
self, globalns: dict[str, Any] | None, localns: dict[str, Any] | None, *, recursive_guard: frozenset[str]
) -> Any | None: ...
@overload
def _evaluate( def _evaluate(
self, self,
globalns: dict[str, Any] | None, globalns: dict[str, Any] | None,
localns: dict[str, Any] | None, localns: dict[str, Any] | None,
type_params: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] = ..., type_params: tuple[TypeVar | ParamSpec | TypeVarTuple, ...],
*,
recursive_guard: frozenset[str],
) -> Any | None: ...
elif sys.version_info >= (3, 12):
def _evaluate(
self,
globalns: dict[str, Any] | None,
localns: dict[str, Any] | None,
type_params: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] | None = None,
*, *,
recursive_guard: frozenset[str], recursive_guard: frozenset[str],
) -> Any | None: ... ) -> Any | None: ...

View File

@ -41,7 +41,10 @@ _P = ParamSpec("_P")
ProxyTypes: tuple[type[Any], ...] ProxyTypes: tuple[type[Any], ...]
class WeakMethod(ref[_CallableT]): class WeakMethod(ref[_CallableT]):
def __new__(cls, meth: _CallableT, callback: Callable[[Self], object] | None = None) -> Self: ... # `ref` is implemented in `C` so positional-only arguments are enforced, but not in `WeakMethod`.
def __new__( # pyright: ignore[reportInconsistentConstructor]
cls, meth: _CallableT, callback: Callable[[Self], Any] | None = None
) -> Self: ...
def __call__(self) -> _CallableT | None: ... def __call__(self) -> _CallableT | None: ...
def __eq__(self, other: object) -> bool: ... def __eq__(self, other: object) -> bool: ...
def __ne__(self, other: object) -> bool: ... def __ne__(self, other: object) -> bool: ...

View File

@ -14,7 +14,7 @@ class ContentHandler:
def startDocument(self) -> None: ... def startDocument(self) -> None: ...
def endDocument(self) -> None: ... def endDocument(self) -> None: ...
def startPrefixMapping(self, prefix: str | None, uri: str) -> None: ... def startPrefixMapping(self, prefix: str | None, uri: str) -> None: ...
def endPrefixMapping(self, prefix) -> None: ... def endPrefixMapping(self, prefix: str | None) -> None: ...
def startElement(self, name: str, attrs: xmlreader.AttributesImpl) -> None: ... def startElement(self, name: str, attrs: xmlreader.AttributesImpl) -> None: ...
def endElement(self, name: str) -> None: ... def endElement(self, name: str) -> None: ...
def startElementNS(self, name: tuple[str, str], qname: str, attrs: xmlreader.AttributesNSImpl) -> None: ... def startElementNS(self, name: tuple[str, str], qname: str, attrs: xmlreader.AttributesNSImpl) -> None: ...

View File

@ -7,8 +7,8 @@ from typing_extensions import ParamSpec, TypeAlias
from flask import Flask from flask import Flask
from flask.testing import FlaskClient from flask.testing import FlaskClient
from .namespace import Namespace from .namespace import Namespace as Namespace
from .test_client import SocketIOTestClient from .test_client import SocketIOTestClient as SocketIOTestClient
_P = ParamSpec("_P") _P = ParamSpec("_P")
_R_co = TypeVar("_R_co", covariant=True) _R_co = TypeVar("_R_co", covariant=True)
@ -96,9 +96,9 @@ class SocketIO:
port: int | None = None, port: int | None = None,
*, *,
debug: bool = True, debug: bool = True,
use_reloader: bool, use_reloader: bool = ...,
reloader_options: dict[str, Incomplete] = {}, reloader_options: dict[str, Incomplete] = {},
log_output: bool, log_output: bool = ...,
allow_unsafe_werkzeug: bool = False, allow_unsafe_werkzeug: bool = False,
**kwargs, **kwargs,
) -> None: ... ) -> None: ...

View File

@ -1,4 +1,4 @@
version = "23.2.*" version = "24.1.*"
upstream_repository = "https://github.com/Tinche/aiofiles" upstream_repository = "https://github.com/Tinche/aiofiles"
[tool.stubtest] [tool.stubtest]

View File

@ -1,12 +1,11 @@
from collections.abc import Callable, Coroutine, Generator, Iterator from collections.abc import Awaitable, Callable, Generator
from types import CodeType, FrameType, TracebackType, coroutine from contextlib import AbstractAsyncContextManager
from types import TracebackType
from typing import Any, BinaryIO, Generic, TextIO, TypeVar from typing import Any, BinaryIO, Generic, TextIO, TypeVar
from typing_extensions import Self from typing_extensions import Self
_T = TypeVar("_T") _T = TypeVar("_T")
_T_co = TypeVar("_T_co", covariant=True)
_V_co = TypeVar("_V_co", covariant=True) _V_co = TypeVar("_V_co", covariant=True)
_T_contra = TypeVar("_T_contra", contravariant=True)
class AsyncBase(Generic[_T]): class AsyncBase(Generic[_T]):
def __init__(self, file: str, loop: Any, executor: Any) -> None: ... def __init__(self, file: str, loop: Any, executor: Any) -> None: ...
@ -16,22 +15,9 @@ class AsyncBase(Generic[_T]):
class AsyncIndirectBase(AsyncBase[_T]): class AsyncIndirectBase(AsyncBase[_T]):
def __init__(self, name: str, loop: Any, executor: Any, indirect: Callable[[], TextIO | BinaryIO]) -> None: ... def __init__(self, name: str, loop: Any, executor: Any, indirect: Callable[[], TextIO | BinaryIO]) -> None: ...
class AiofilesContextManager(Generic[_T_co, _T_contra, _V_co]): class AiofilesContextManager(Awaitable[_V_co], AbstractAsyncContextManager[_V_co]):
def __init__(self, coro: Coroutine[_T_co, _T_contra, _V_co]) -> None: ... def __init__(self, coro: Awaitable[_V_co]) -> None: ...
def send(self, value: _T_contra) -> _T_co: ... def __await__(self) -> Generator[Any, Any, _V_co]: ...
def throw(self, typ: type[BaseException], val: BaseException | object = None, tb: TracebackType | None = None) -> _T_co: ...
def close(self) -> None: ...
@property
def gi_frame(self) -> FrameType: ...
@property
def gi_running(self) -> bool: ...
@property
def gi_code(self) -> CodeType: ...
def __next__(self) -> _T_co: ...
@coroutine
def __iter__(self) -> Iterator[Coroutine[_T_co, _T_contra, _V_co]]: ...
def __await__(self) -> Generator[Any, None, _V_co]: ...
async def __anext__(self) -> _V_co: ...
async def __aenter__(self) -> _V_co: ... async def __aenter__(self) -> _V_co: ...
async def __aexit__( async def __aexit__(
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None

View File

@ -27,6 +27,7 @@ __all__ = [
"scandir", "scandir",
"access", "access",
"wrap", "wrap",
"getcwd",
] ]
if sys.platform != "win32": if sys.platform != "win32":
@ -118,6 +119,7 @@ async def listdir(path: int, *, loop: AbstractEventLoop | None = ..., executor:
async def access( async def access(
path: FileDescriptorOrPath, mode: int, *, dir_fd: int | None = None, effective_ids: bool = False, follow_symlinks: bool = True path: FileDescriptorOrPath, mode: int, *, dir_fd: int | None = None, effective_ids: bool = False, follow_symlinks: bool = True
) -> bool: ... ) -> bool: ...
async def getcwd() -> str: ...
if sys.platform != "win32": if sys.platform != "win32":
from os import statvfs_result from os import statvfs_result

View File

@ -1,7 +1,8 @@
from _typeshed import FileDescriptorOrPath from _typeshed import FileDescriptorOrPath
from asyncio.events import AbstractEventLoop from asyncio.events import AbstractEventLoop
from collections.abc import Awaitable, Callable from collections.abc import Awaitable, Callable
from typing import Any, TypeVar from os import PathLike
from typing import Any, AnyStr, TypeVar
_R = TypeVar("_R") _R = TypeVar("_R")
@ -9,6 +10,8 @@ def wrap(func: Callable[..., _R]) -> Callable[..., Awaitable[_R]]: ...
async def exists(path: FileDescriptorOrPath, *, loop: AbstractEventLoop | None = ..., executor: Any = ...) -> bool: ... async def exists(path: FileDescriptorOrPath, *, loop: AbstractEventLoop | None = ..., executor: Any = ...) -> bool: ...
async def isfile(path: FileDescriptorOrPath, *, loop: AbstractEventLoop | None = ..., executor: Any = ...) -> bool: ... async def isfile(path: FileDescriptorOrPath, *, loop: AbstractEventLoop | None = ..., executor: Any = ...) -> bool: ...
async def isdir(s: FileDescriptorOrPath, *, loop: AbstractEventLoop | None = ..., executor: Any = ...) -> bool: ... async def isdir(s: FileDescriptorOrPath, *, loop: AbstractEventLoop | None = ..., executor: Any = ...) -> bool: ...
async def islink(path: FileDescriptorOrPath) -> bool: ...
async def ismount(path: FileDescriptorOrPath) -> bool: ...
async def getsize(filename: FileDescriptorOrPath, *, loop: AbstractEventLoop | None = ..., executor: Any = ...) -> int: ... async def getsize(filename: FileDescriptorOrPath, *, loop: AbstractEventLoop | None = ..., executor: Any = ...) -> int: ...
async def getmtime(filename: FileDescriptorOrPath, *, loop: AbstractEventLoop | None = ..., executor: Any = ...) -> float: ... async def getmtime(filename: FileDescriptorOrPath, *, loop: AbstractEventLoop | None = ..., executor: Any = ...) -> float: ...
async def getatime(filename: FileDescriptorOrPath, *, loop: AbstractEventLoop | None = ..., executor: Any = ...) -> float: ... async def getatime(filename: FileDescriptorOrPath, *, loop: AbstractEventLoop | None = ..., executor: Any = ...) -> float: ...
@ -17,5 +20,4 @@ async def samefile(
f1: FileDescriptorOrPath, f2: FileDescriptorOrPath, *, loop: AbstractEventLoop | None = ..., executor: Any = ... f1: FileDescriptorOrPath, f2: FileDescriptorOrPath, *, loop: AbstractEventLoop | None = ..., executor: Any = ...
) -> bool: ... ) -> bool: ...
async def sameopenfile(fp1: int, fp2: int, *, loop: AbstractEventLoop | None = ..., executor: Any = ...) -> bool: ... async def sameopenfile(fp1: int, fp2: int, *, loop: AbstractEventLoop | None = ..., executor: Any = ...) -> bool: ...
async def islink(path: FileDescriptorOrPath) -> bool: ... async def abspath(path: PathLike[AnyStr] | AnyStr) -> AnyStr: ...
async def ismount(path: FileDescriptorOrPath) -> bool: ...

View File

@ -1,3 +1,4 @@
import sys
from _typeshed import ( from _typeshed import (
BytesPath, BytesPath,
Incomplete, Incomplete,
@ -10,77 +11,13 @@ from _typeshed import (
StrPath, StrPath,
) )
from asyncio import AbstractEventLoop from asyncio import AbstractEventLoop
from typing import AnyStr, Literal, TypeVar, overload from typing import AnyStr, Literal, overload
from ..base import AiofilesContextManager from ..base import AiofilesContextManager
from ..threadpool.binary import AsyncBufferedIOBase, AsyncBufferedReader, AsyncFileIO from ..threadpool.binary import AsyncBufferedIOBase, AsyncBufferedReader, AsyncFileIO
from ..threadpool.text import AsyncTextIOWrapper from ..threadpool.text import AsyncTextIOWrapper
from .temptypes import AsyncTemporaryDirectory from .temptypes import AsyncTemporaryDirectory
_T_co = TypeVar("_T_co", covariant=True)
_V_co = TypeVar("_V_co", covariant=True)
_T_contra = TypeVar("_T_contra", contravariant=True)
# Text mode: always returns AsyncTextIOWrapper
@overload
def NamedTemporaryFile(
mode: OpenTextMode,
buffering: int = -1,
encoding: str | None = None,
newline: str | None = None,
suffix: AnyStr | None = None,
prefix: AnyStr | None = None,
dir: StrOrBytesPath | None = None,
delete: bool = True,
loop: AbstractEventLoop | None = None,
executor: Incomplete | None = None,
) -> AiofilesContextManager[None, None, AsyncTextIOWrapper]: ...
# Unbuffered binary: returns a FileIO
@overload
def NamedTemporaryFile(
mode: OpenBinaryMode,
buffering: Literal[0],
encoding: None = None,
newline: None = None,
suffix: AnyStr | None = None,
prefix: AnyStr | None = None,
dir: StrOrBytesPath | None = None,
delete: bool = True,
loop: AbstractEventLoop | None = None,
executor: Incomplete | None = None,
) -> AiofilesContextManager[None, None, AsyncFileIO]: ...
# Buffered binary reading/updating: AsyncBufferedReader
@overload
def NamedTemporaryFile(
mode: OpenBinaryModeReading | OpenBinaryModeUpdating = "w+b",
buffering: Literal[-1, 1] = -1,
encoding: None = None,
newline: None = None,
suffix: AnyStr | None = None,
prefix: AnyStr | None = None,
dir: StrOrBytesPath | None = None,
delete: bool = True,
loop: AbstractEventLoop | None = None,
executor: Incomplete | None = None,
) -> AiofilesContextManager[None, None, AsyncBufferedReader]: ...
# Buffered binary writing: AsyncBufferedIOBase
@overload
def NamedTemporaryFile(
mode: OpenBinaryModeWriting,
buffering: Literal[-1, 1] = -1,
encoding: None = None,
newline: None = None,
suffix: AnyStr | None = None,
prefix: AnyStr | None = None,
dir: StrOrBytesPath | None = None,
delete: bool = True,
loop: AbstractEventLoop | None = None,
executor: Incomplete | None = None,
) -> AiofilesContextManager[None, None, AsyncBufferedIOBase]: ...
# Text mode: always returns AsyncTextIOWrapper # Text mode: always returns AsyncTextIOWrapper
@overload @overload
def TemporaryFile( def TemporaryFile(
@ -93,7 +30,7 @@ def TemporaryFile(
dir: StrOrBytesPath | None = None, dir: StrOrBytesPath | None = None,
loop: AbstractEventLoop | None = None, loop: AbstractEventLoop | None = None,
executor: Incomplete | None = None, executor: Incomplete | None = None,
) -> AiofilesContextManager[None, None, AsyncTextIOWrapper]: ... ) -> AiofilesContextManager[AsyncTextIOWrapper]: ...
# Unbuffered binary: returns a FileIO # Unbuffered binary: returns a FileIO
@overload @overload
@ -107,7 +44,7 @@ def TemporaryFile(
dir: StrOrBytesPath | None = None, dir: StrOrBytesPath | None = None,
loop: AbstractEventLoop | None = None, loop: AbstractEventLoop | None = None,
executor: Incomplete | None = None, executor: Incomplete | None = None,
) -> AiofilesContextManager[None, None, AsyncFileIO]: ... ) -> AiofilesContextManager[AsyncFileIO]: ...
# Buffered binary reading/updating: AsyncBufferedReader # Buffered binary reading/updating: AsyncBufferedReader
@overload @overload
@ -121,7 +58,7 @@ def TemporaryFile(
dir: StrOrBytesPath | None = None, dir: StrOrBytesPath | None = None,
loop: AbstractEventLoop | None = None, loop: AbstractEventLoop | None = None,
executor: Incomplete | None = None, executor: Incomplete | None = None,
) -> AiofilesContextManager[None, None, AsyncBufferedReader]: ... ) -> AiofilesContextManager[AsyncBufferedReader]: ...
# Buffered binary writing: AsyncBufferedIOBase # Buffered binary writing: AsyncBufferedIOBase
@overload @overload
@ -135,7 +72,134 @@ def TemporaryFile(
dir: StrOrBytesPath | None = None, dir: StrOrBytesPath | None = None,
loop: AbstractEventLoop | None = None, loop: AbstractEventLoop | None = None,
executor: Incomplete | None = None, executor: Incomplete | None = None,
) -> AiofilesContextManager[None, None, AsyncBufferedIOBase]: ... ) -> AiofilesContextManager[AsyncBufferedIOBase]: ...
# 3.12 added `delete_on_close`
if sys.version_info >= (3, 12):
# Text mode: always returns AsyncTextIOWrapper
@overload
def NamedTemporaryFile(
mode: OpenTextMode,
buffering: int = -1,
encoding: str | None = None,
newline: str | None = None,
suffix: AnyStr | None = None,
prefix: AnyStr | None = None,
dir: StrOrBytesPath | None = None,
delete: bool = True,
delete_on_close: bool = True,
loop: AbstractEventLoop | None = None,
executor: Incomplete | None = None,
) -> AiofilesContextManager[AsyncTextIOWrapper]: ...
# Unbuffered binary: returns a FileIO
@overload
def NamedTemporaryFile(
mode: OpenBinaryMode,
buffering: Literal[0],
encoding: None = None,
newline: None = None,
suffix: AnyStr | None = None,
prefix: AnyStr | None = None,
dir: StrOrBytesPath | None = None,
delete: bool = True,
delete_on_close: bool = True,
loop: AbstractEventLoop | None = None,
executor: Incomplete | None = None,
) -> AiofilesContextManager[AsyncFileIO]: ...
# Buffered binary reading/updating: AsyncBufferedReader
@overload
def NamedTemporaryFile(
mode: OpenBinaryModeReading | OpenBinaryModeUpdating = "w+b",
buffering: Literal[-1, 1] = -1,
encoding: None = None,
newline: None = None,
suffix: AnyStr | None = None,
prefix: AnyStr | None = None,
dir: StrOrBytesPath | None = None,
delete: bool = True,
delete_on_close: bool = True,
loop: AbstractEventLoop | None = None,
executor: Incomplete | None = None,
) -> AiofilesContextManager[AsyncBufferedReader]: ...
# Buffered binary writing: AsyncBufferedIOBase
@overload
def NamedTemporaryFile(
mode: OpenBinaryModeWriting,
buffering: Literal[-1, 1] = -1,
encoding: None = None,
newline: None = None,
suffix: AnyStr | None = None,
prefix: AnyStr | None = None,
dir: StrOrBytesPath | None = None,
delete: bool = True,
delete_on_close: bool = True,
loop: AbstractEventLoop | None = None,
executor: Incomplete | None = None,
) -> AiofilesContextManager[AsyncBufferedIOBase]: ...
else:
# Text mode: always returns AsyncTextIOWrapper
@overload
def NamedTemporaryFile(
mode: OpenTextMode,
buffering: int = -1,
encoding: str | None = None,
newline: str | None = None,
suffix: AnyStr | None = None,
prefix: AnyStr | None = None,
dir: StrOrBytesPath | None = None,
delete: bool = True,
loop: AbstractEventLoop | None = None,
executor: Incomplete | None = None,
) -> AiofilesContextManager[AsyncTextIOWrapper]: ...
# Unbuffered binary: returns a FileIO
@overload
def NamedTemporaryFile(
mode: OpenBinaryMode,
buffering: Literal[0],
encoding: None = None,
newline: None = None,
suffix: AnyStr | None = None,
prefix: AnyStr | None = None,
dir: StrOrBytesPath | None = None,
delete: bool = True,
loop: AbstractEventLoop | None = None,
executor: Incomplete | None = None,
) -> AiofilesContextManager[AsyncFileIO]: ...
# Buffered binary reading/updating: AsyncBufferedReader
@overload
def NamedTemporaryFile(
mode: OpenBinaryModeReading | OpenBinaryModeUpdating = "w+b",
buffering: Literal[-1, 1] = -1,
encoding: None = None,
newline: None = None,
suffix: AnyStr | None = None,
prefix: AnyStr | None = None,
dir: StrOrBytesPath | None = None,
delete: bool = True,
loop: AbstractEventLoop | None = None,
executor: Incomplete | None = None,
) -> AiofilesContextManager[AsyncBufferedReader]: ...
# Buffered binary writing: AsyncBufferedIOBase
@overload
def NamedTemporaryFile(
mode: OpenBinaryModeWriting,
buffering: Literal[-1, 1] = -1,
encoding: None = None,
newline: None = None,
suffix: AnyStr | None = None,
prefix: AnyStr | None = None,
dir: StrOrBytesPath | None = None,
delete: bool = True,
loop: AbstractEventLoop | None = None,
executor: Incomplete | None = None,
) -> AiofilesContextManager[AsyncBufferedIOBase]: ...
# Text mode: always returns AsyncTextIOWrapper # Text mode: always returns AsyncTextIOWrapper
@overload @overload
@ -151,7 +215,7 @@ def SpooledTemporaryFile(
dir: StrOrBytesPath | None = None, dir: StrOrBytesPath | None = None,
loop: AbstractEventLoop | None = None, loop: AbstractEventLoop | None = None,
executor: Incomplete | None = None, executor: Incomplete | None = None,
) -> AiofilesContextManager[None, None, AsyncTextIOWrapper]: ... ) -> AiofilesContextManager[AsyncTextIOWrapper]: ...
@overload @overload
def SpooledTemporaryFile( def SpooledTemporaryFile(
max_size: int, max_size: int,
@ -164,7 +228,7 @@ def SpooledTemporaryFile(
dir: StrOrBytesPath | None = None, dir: StrOrBytesPath | None = None,
loop: AbstractEventLoop | None = None, loop: AbstractEventLoop | None = None,
executor: Incomplete | None = None, executor: Incomplete | None = None,
) -> AiofilesContextManager[None, None, AsyncTextIOWrapper]: ... ) -> AiofilesContextManager[AsyncTextIOWrapper]: ...
# Unbuffered binary: returns a FileIO # Unbuffered binary: returns a FileIO
@overload @overload
@ -180,7 +244,7 @@ def SpooledTemporaryFile(
dir: StrOrBytesPath | None = None, dir: StrOrBytesPath | None = None,
loop: AbstractEventLoop | None = None, loop: AbstractEventLoop | None = None,
executor: Incomplete | None = None, executor: Incomplete | None = None,
) -> AiofilesContextManager[None, None, AsyncFileIO]: ... ) -> AiofilesContextManager[AsyncFileIO]: ...
@overload @overload
def SpooledTemporaryFile( def SpooledTemporaryFile(
max_size: int, max_size: int,
@ -193,7 +257,7 @@ def SpooledTemporaryFile(
dir: StrOrBytesPath | None = None, dir: StrOrBytesPath | None = None,
loop: AbstractEventLoop | None = None, loop: AbstractEventLoop | None = None,
executor: Incomplete | None = None, executor: Incomplete | None = None,
) -> AiofilesContextManager[None, None, AsyncFileIO]: ... ) -> AiofilesContextManager[AsyncFileIO]: ...
# Buffered binary reading/updating: AsyncBufferedReader # Buffered binary reading/updating: AsyncBufferedReader
@overload @overload
@ -208,7 +272,7 @@ def SpooledTemporaryFile(
dir: StrOrBytesPath | None = None, dir: StrOrBytesPath | None = None,
loop: AbstractEventLoop | None = None, loop: AbstractEventLoop | None = None,
executor: Incomplete | None = None, executor: Incomplete | None = None,
) -> AiofilesContextManager[None, None, AsyncBufferedReader]: ... ) -> AiofilesContextManager[AsyncBufferedReader]: ...
# Buffered binary writing: AsyncBufferedIOBase # Buffered binary writing: AsyncBufferedIOBase
@overload @overload
@ -224,7 +288,7 @@ def SpooledTemporaryFile(
dir: StrOrBytesPath | None = None, dir: StrOrBytesPath | None = None,
loop: AbstractEventLoop | None = None, loop: AbstractEventLoop | None = None,
executor: Incomplete | None = None, executor: Incomplete | None = None,
) -> AiofilesContextManager[None, None, AsyncBufferedIOBase]: ... ) -> AiofilesContextManager[AsyncBufferedIOBase]: ...
@overload @overload
def SpooledTemporaryFile( def SpooledTemporaryFile(
max_size: int, max_size: int,
@ -237,7 +301,7 @@ def SpooledTemporaryFile(
dir: StrOrBytesPath | None = None, dir: StrOrBytesPath | None = None,
loop: AbstractEventLoop | None = None, loop: AbstractEventLoop | None = None,
executor: Incomplete | None = None, executor: Incomplete | None = None,
) -> AiofilesContextManager[None, None, AsyncBufferedIOBase]: ... ) -> AiofilesContextManager[AsyncBufferedIOBase]: ...
@overload @overload
def TemporaryDirectory( def TemporaryDirectory(
suffix: str | None = None, suffix: str | None = None,
@ -245,7 +309,7 @@ def TemporaryDirectory(
dir: StrPath | None = None, dir: StrPath | None = None,
loop: AbstractEventLoop | None = None, loop: AbstractEventLoop | None = None,
executor: Incomplete | None = None, executor: Incomplete | None = None,
) -> AiofilesContextManagerTempDir[None, None, AsyncTemporaryDirectory]: ... ) -> AiofilesContextManagerTempDir: ...
@overload @overload
def TemporaryDirectory( def TemporaryDirectory(
suffix: bytes | None = None, suffix: bytes | None = None,
@ -253,7 +317,7 @@ def TemporaryDirectory(
dir: BytesPath | None = None, dir: BytesPath | None = None,
loop: AbstractEventLoop | None = None, loop: AbstractEventLoop | None = None,
executor: Incomplete | None = None, executor: Incomplete | None = None,
) -> AiofilesContextManagerTempDir[None, None, AsyncTemporaryDirectory]: ... ) -> AiofilesContextManagerTempDir: ...
class AiofilesContextManagerTempDir(AiofilesContextManager[_T_co, _T_contra, _V_co]): class AiofilesContextManagerTempDir(AiofilesContextManager[AsyncTemporaryDirectory]):
async def __aenter__(self) -> str: ... # type: ignore[override] async def __aenter__(self) -> str: ... # type: ignore[override]

View File

@ -32,7 +32,7 @@ def open(
*, *,
loop: AbstractEventLoop | None = None, loop: AbstractEventLoop | None = None,
executor: Incomplete | None = None, executor: Incomplete | None = None,
) -> AiofilesContextManager[None, None, AsyncTextIOWrapper]: ... ) -> AiofilesContextManager[AsyncTextIOWrapper]: ...
# Unbuffered binary: returns a FileIO # Unbuffered binary: returns a FileIO
@overload @overload
@ -48,7 +48,7 @@ def open(
*, *,
loop: AbstractEventLoop | None = None, loop: AbstractEventLoop | None = None,
executor: Incomplete | None = None, executor: Incomplete | None = None,
) -> AiofilesContextManager[None, None, AsyncFileIO]: ... ) -> AiofilesContextManager[AsyncFileIO]: ...
# Buffered binary reading/updating: AsyncBufferedReader # Buffered binary reading/updating: AsyncBufferedReader
@overload @overload
@ -64,7 +64,7 @@ def open(
*, *,
loop: AbstractEventLoop | None = None, loop: AbstractEventLoop | None = None,
executor: Incomplete | None = None, executor: Incomplete | None = None,
) -> AiofilesContextManager[None, None, AsyncBufferedReader]: ... ) -> AiofilesContextManager[AsyncBufferedReader]: ...
# Buffered binary writing: AsyncBufferedIOBase # Buffered binary writing: AsyncBufferedIOBase
@overload @overload
@ -80,7 +80,7 @@ def open(
*, *,
loop: AbstractEventLoop | None = None, loop: AbstractEventLoop | None = None,
executor: Incomplete | None = None, executor: Incomplete | None = None,
) -> AiofilesContextManager[None, None, AsyncBufferedIOBase]: ... ) -> AiofilesContextManager[AsyncBufferedIOBase]: ...
# Buffering cannot be determined: fall back to _UnknownAsyncBinaryIO # Buffering cannot be determined: fall back to _UnknownAsyncBinaryIO
@overload @overload
@ -96,7 +96,7 @@ def open(
*, *,
loop: AbstractEventLoop | None = None, loop: AbstractEventLoop | None = None,
executor: Incomplete | None = None, executor: Incomplete | None = None,
) -> AiofilesContextManager[None, None, _UnknownAsyncBinaryIO]: ... ) -> AiofilesContextManager[_UnknownAsyncBinaryIO]: ...
stdin: AsyncTextIndirectIOWrapper stdin: AsyncTextIndirectIOWrapper
stdout: AsyncTextIndirectIOWrapper stdout: AsyncTextIndirectIOWrapper

View File

@ -1,7 +1,13 @@
from collections.abc import Callable, Iterable as _Iterable, Mapping
from typing import Any from typing import Any
from typing_extensions import Self from typing_extensions import Self
__tracebackhide__: bool __tracebackhide__: bool
class ExtractingMixin: class ExtractingMixin:
def extracting(self, *names: Any, **kwargs: dict[str, Any]) -> Self: ... def extracting(
self,
*names: str,
filter: str | Mapping[str, Any] | Callable[[Any], bool],
sort: str | _Iterable[str] | Callable[[Any], Any],
) -> Self: ...

View File

@ -0,0 +1,8 @@
# Internal-use module for types shared by multiple modules.
# This does not match a module in docker-py.
from typing_extensions import TypeAlias
# Type alias for JSON, explained at:
# https://github.com/python/typing/issues/182#issuecomment-1320974824.
JSON: TypeAlias = dict[str, JSON] | list[JSON] | str | int | float | bool | None

View File

@ -154,10 +154,10 @@ class ContainerApiMixin:
def rename(self, container: _Container, name: str) -> None: ... def rename(self, container: _Container, name: str) -> None: ...
def resize(self, container: _Container, height: int, width: int) -> None: ... def resize(self, container: _Container, height: int, width: int) -> None: ...
def restart(self, container: _Container, timeout: int = 10) -> None: ... def restart(self, container: _Container, timeout: int = 10) -> None: ...
def start(self, container: _Container, *args, **kwargs) -> None: ... def start(self, container: _Container) -> None: ...
def stats(self, container: _Container, decode: bool | None = None, stream: bool = True, one_shot: bool | None = None): ... def stats(self, container: _Container, decode: bool | None = None, stream: bool = True, one_shot: bool | None = None): ...
def stop(self, container: _Container, timeout: int | None = None) -> None: ... def stop(self, container: _Container, timeout: int | None = None) -> None: ...
def top(self, container: _Container, ps_args: str | None = None): ... def top(self, container: _Container, ps_args: str | None = None) -> str: ...
def unpause(self, container: _Container) -> None: ... def unpause(self, container: _Container) -> None: ...
def update_container( def update_container(
self, self,

View File

@ -1,4 +1,5 @@
from _typeshed import Incomplete from _typeshed import Incomplete
from typing import Any
log: Incomplete log: Incomplete
@ -39,9 +40,9 @@ class ImageApiMixin:
repository: str, repository: str,
tag: str | None = None, tag: str | None = None,
stream: bool = False, stream: bool = False,
auth_config: Incomplete | None = None, auth_config: dict[str, Any] | None = None,
decode: bool = False, decode: bool = False,
platform: Incomplete | None = None, platform: str | None = None,
all_tags: bool = False, all_tags: bool = False,
): ... ): ...
def push( def push(

View File

@ -1,28 +1,45 @@
from _typeshed import Incomplete from _typeshed import Incomplete
from typing import Any, Literal, TypedDict, type_check_only
from typing_extensions import TypeAlias
from docker.types import IPAMConfig
@type_check_only
class _HasId(TypedDict):
Id: str
@type_check_only
class _HasID(TypedDict):
ID: str
_Network: TypeAlias = _HasId | _HasID | str
_Container: TypeAlias = _HasId | _HasID | str
class NetworkApiMixin: class NetworkApiMixin:
def networks(self, names: Incomplete | None = None, ids: Incomplete | None = None, filters: Incomplete | None = None): ... def networks(self, names: Incomplete | None = None, ids: Incomplete | None = None, filters: Incomplete | None = None): ...
def create_network( def create_network(
self, self,
name, name: str,
driver: Incomplete | None = None, driver: str | None = None,
options: Incomplete | None = None, options: dict[str, Any] | None = None,
ipam: Incomplete | None = None, ipam: IPAMConfig | None = None,
check_duplicate: Incomplete | None = None, check_duplicate: bool | None = None,
internal: bool = False, internal: bool = False,
labels: Incomplete | None = None, labels: dict[str, Any] | None = None,
enable_ipv6: bool = False, enable_ipv6: bool = False,
attachable: Incomplete | None = None, attachable: bool | None = None,
scope: Incomplete | None = None, scope: Literal["local", "global", "swarm"] | None = None,
ingress: Incomplete | None = None, ingress: bool | None = None,
): ... ) -> dict[str, str]: ...
def prune_networks(self, filters: Incomplete | None = None): ... def prune_networks(self, filters: Incomplete | None = None): ...
def remove_network(self, net_id) -> None: ... def remove_network(self, net_id: _Network) -> None: ...
def inspect_network(self, net_id, verbose: Incomplete | None = None, scope: Incomplete | None = None): ... def inspect_network(
self, net_id: _Network, verbose: bool | None = None, scope: Literal["local", "global", "swarm"] | None = None
): ...
def connect_container_to_network( def connect_container_to_network(
self, self,
container, container: _Container,
net_id, net_id: str,
ipv4_address: Incomplete | None = None, ipv4_address: Incomplete | None = None,
ipv6_address: Incomplete | None = None, ipv6_address: Incomplete | None = None,
aliases: Incomplete | None = None, aliases: Incomplete | None = None,
@ -31,4 +48,4 @@ class NetworkApiMixin:
driver_opt: Incomplete | None = None, driver_opt: Incomplete | None = None,
mac_address: Incomplete | None = None, mac_address: Incomplete | None = None,
) -> None: ... ) -> None: ...
def disconnect_container_from_network(self, container, net_id, force: bool = False) -> None: ... def disconnect_container_from_network(self, container: _Container, net_id: str, force: bool = False) -> None: ...

View File

@ -71,22 +71,37 @@ class Container(Model):
follow: bool | None = None, follow: bool | None = None,
until: datetime.datetime | float | None = None, until: datetime.datetime | float | None = None,
) -> bytes: ... ) -> bytes: ...
def pause(self): ... def pause(self) -> None: ...
def put_archive(self, path: str, data) -> bool: ... def put_archive(self, path: str, data) -> bool: ...
def remove(self, **kwargs) -> None: ... def remove(self, *, v: bool = False, link: bool = False, force: bool = False) -> None: ...
def rename(self, name: str): ... def rename(self, name: str): ...
def resize(self, height: int, width: int): ... def resize(self, height: int, width: int): ...
def restart(self, **kwargs): ... def restart(self, *, timeout: float | None = 10): ...
def start(self, **kwargs) -> None: ... def start(self) -> None: ...
def stats(self, **kwargs): ... def stats(self, **kwargs): ...
def stop(self, *, timeout: float | None = None) -> None: ... def stop(self, *, timeout: float | None = None) -> None: ...
def top(self, **kwargs): ... def top(self, *, ps_args: str | None = None) -> str: ...
def unpause(self): ... def unpause(self): ...
def update(self, **kwargs): ... def update(
self,
*,
blkio_weight: int | None = None,
cpu_period: int | None = None,
cpu_quota: int | None = None,
cpu_shares: int | None = None,
cpuset_cpus: str | None = None,
cpuset_mems: str | None = None,
mem_limit: float | str | None = None,
mem_reservation: float | str | None = None,
memswap_limit: int | str | None = None,
kernel_memory: int | str | None = None,
restart_policy: Incomplete | None = None,
): ...
def wait(self, *, timeout: float | None = None, condition: Literal["not-running", "next-exit", "removed"] | None = None): ... def wait(self, *, timeout: float | None = None, condition: Literal["not-running", "next-exit", "removed"] | None = None): ...
class ContainerCollection(Collection[Container]): class ContainerCollection(Collection[Container]):
model: type[Container] model: type[Container]
@overload
def run( def run(
self, self,
image: str | Image, image: str | Image,
@ -94,8 +109,22 @@ class ContainerCollection(Collection[Container]):
stdout: bool = True, stdout: bool = True,
stderr: bool = False, stderr: bool = False,
remove: bool = False, remove: bool = False,
*,
detach: Literal[False] = False,
**kwargs, **kwargs,
): ... ) -> bytes: ...
@overload
def run(
self,
image: str | Image,
command: str | list[str] | None = None,
stdout: bool = True,
stderr: bool = False,
remove: bool = False,
*,
detach: Literal[True],
**kwargs,
) -> Container: ...
def create(self, image: str, command: str | list[str] | None = None, **kwargs) -> Container: ... # type:ignore[override] def create(self, image: str, command: str | list[str] | None = None, **kwargs) -> Container: ... # type:ignore[override]
def get(self, container_id: str) -> Container: ... def get(self, container_id: str) -> Container: ...
def list( def list(

View File

@ -1,11 +1,21 @@
from collections.abc import Iterator from collections.abc import Iterator
from typing import Any, Literal, overload from io import StringIO
from typing import IO, Any, Literal, TypedDict, overload, type_check_only
from typing_extensions import TypeAlias from typing_extensions import TypeAlias
from docker._types import JSON
from .resource import Collection, Model from .resource import Collection, Model
_ImageList: TypeAlias = list[Image] # To resolve conflicts with a method called "list" _ImageList: TypeAlias = list[Image] # To resolve conflicts with a method called "list"
@type_check_only
class _ContainerLimits(TypedDict, total=False):
memory: int
memswap: int
cpushares: int
cpusetcpus: str
class Image(Model): class Image(Model):
@property @property
def labels(self) -> dict[str, Any]: ... def labels(self) -> dict[str, Any]: ...
@ -31,17 +41,69 @@ class RegistryData(Model):
class ImageCollection(Collection[Image]): class ImageCollection(Collection[Image]):
model: type[Image] model: type[Image]
def build(self, **kwargs) -> tuple[Image, Iterator[Any]]: ... def build(
self,
*,
path: str | None = None,
fileobj: StringIO | IO[bytes] | None = None,
tag: str | None = None,
quiet: bool = False,
nocache: bool = False,
rm: bool = False,
timeout: int | None = None,
custom_context: bool = False,
encoding: str | None = None,
pull: bool = False,
forcerm: bool = False,
dockerfile: str | None = None,
buildargs: dict[str, Any] | None = None,
container_limits: _ContainerLimits | None = None,
shmsize: int | None = None,
labels: dict[str, Any] | None = None,
# need to use list, because the type must be json serializable
cache_from: list[str] | None = None,
target: str | None = None,
network_mode: str | None = None,
squash: bool | None = None,
extra_hosts: list[str] | dict[str, str] | None = None,
platform: str | None = None,
isolation: str | None = None,
use_config_proxy: bool = True,
) -> tuple[Image, Iterator[JSON]]: ...
def get(self, name: str) -> Image: ... def get(self, name: str) -> Image: ...
def get_registry_data(self, name, auth_config: dict[str, Any] | None = None) -> RegistryData: ... def get_registry_data(self, name, auth_config: dict[str, Any] | None = None) -> RegistryData: ...
def list(self, name: str | None = None, all: bool = False, filters: dict[str, Any] | None = None) -> _ImageList: ... def list(self, name: str | None = None, all: bool = False, filters: dict[str, Any] | None = None) -> _ImageList: ...
def load(self, data: bytes) -> _ImageList: ... def load(self, data: bytes) -> _ImageList: ...
@overload @overload
def pull(self, repository: str, tag: str | None = None, all_tags: Literal[False] = False, **kwargs) -> Image: ... def pull(
self,
repository: str,
tag: str | None = None,
all_tags: Literal[False] = False,
*,
platform: str | None = None,
auth_config: dict[str, Any] | None = None,
) -> Image: ...
@overload @overload
def pull(self, repository: str, tag: str | None = None, *, all_tags: Literal[True], **kwargs) -> _ImageList: ... def pull(
self,
repository: str,
tag: str | None = None,
*,
all_tags: Literal[True],
auth_config: dict[str, Any] | None = None,
platform: str | None = None,
) -> _ImageList: ...
@overload @overload
def pull(self, repository: str, tag: str | None, all_tags: Literal[True], **kwargs) -> _ImageList: ... def pull(
self,
repository: str,
tag: str | None,
all_tags: Literal[True],
*,
auth_config: dict[str, Any] | None = None,
platform: str | None = None,
) -> _ImageList: ...
def push(self, repository: str, tag: str | None = None, **kwargs): ... def push(self, repository: str, tag: str | None = None, **kwargs): ...
def remove(self, *args, **kwargs) -> None: ... def remove(self, *args, **kwargs) -> None: ...
def search(self, *args, **kwargs): ... def search(self, *args, **kwargs): ...

View File

@ -1,4 +1,6 @@
from typing import Any from typing import Any, Literal
from docker.types import IPAMConfig
from .containers import Container from .containers import Container
from .resource import Collection, Model from .resource import Collection, Model
@ -14,7 +16,22 @@ class Network(Model):
class NetworkCollection(Collection[Network]): class NetworkCollection(Collection[Network]):
model: type[Network] model: type[Network]
def create(self, name: str, *args, **kwargs) -> Network: ... # type:ignore[override] def create( # type:ignore[override]
def get(self, network_id: str, *args, **kwargs) -> Network: ... # type:ignore[override] self,
name: str,
driver: str | None = None,
options: dict[str, Any] | None = None,
ipam: IPAMConfig | None = None,
check_duplicate: bool | None = None,
internal: bool = False,
labels: dict[str, Any] | None = None,
enable_ipv6: bool = False,
attachable: bool | None = None,
scope: Literal["local", "global", "swarm"] | None = None,
ingress: bool | None = None,
) -> Network: ...
def get(
self, network_id: str, verbose: bool | None = None, scope: Literal["local", "global", "swarm"] | None = None
) -> Network: ... # type:ignore[override]
def list(self, *args, **kwargs) -> list[Network]: ... def list(self, *args, **kwargs) -> list[Network]: ...
def prune(self, filters: dict[str, Any] | None = None) -> dict[str, Any]: ... def prune(self, filters: dict[str, Any] | None = None) -> dict[str, Any]: ...

View File

@ -1,6 +1,14 @@
from _typeshed import Incomplete from _typeshed import Incomplete
from pathlib import Path
from typing import Literal, TypedDict, type_check_only
from .base import DictType from .base import DictType
from .services import Mount
@type_check_only
class ContainerWeightDevice(TypedDict):
Path: Path
Weight: int
class LogConfigTypesEnum: class LogConfigTypesEnum:
JSON: Incomplete JSON: Incomplete
@ -63,71 +71,71 @@ class DeviceRequest(DictType):
class HostConfig(dict[str, Incomplete]): class HostConfig(dict[str, Incomplete]):
def __init__( def __init__(
self, self,
version, version: str,
binds: Incomplete | None = None, binds: Incomplete | None = None,
port_bindings: Incomplete | None = None, port_bindings: Incomplete | None = None,
lxc_conf: Incomplete | None = None, lxc_conf: dict[Incomplete, Incomplete] | None = None,
publish_all_ports: bool = False, publish_all_ports: bool = False,
links: Incomplete | None = None, links: dict[str, str | None] | None = None,
privileged: bool = False, privileged: bool = False,
dns: Incomplete | None = None, dns: list[Incomplete] | None = None,
dns_search: Incomplete | None = None, dns_search: list[Incomplete] | None = None,
volumes_from: Incomplete | None = None, volumes_from: list[str] | None = None,
network_mode: Incomplete | None = None, network_mode: str | None = None,
restart_policy: Incomplete | None = None, restart_policy: dict[Incomplete, Incomplete] | None = None,
cap_add: Incomplete | None = None, cap_add: list[str] | None = None,
cap_drop: Incomplete | None = None, cap_drop: list[str] | None = None,
devices: Incomplete | None = None, devices: list[str] | None = None,
extra_hosts: Incomplete | None = None, extra_hosts: dict[Incomplete, Incomplete] | None = None,
read_only: Incomplete | None = None, read_only: bool | None = None,
pid_mode: Incomplete | None = None, pid_mode: str | None = None,
ipc_mode: Incomplete | None = None, ipc_mode: str | None = None,
security_opt: Incomplete | None = None, security_opt: list[str] | None = None,
ulimits: Incomplete | None = None, ulimits: list[Ulimit] | None = None,
log_config: Incomplete | None = None, log_config: LogConfig | None = None,
mem_limit: Incomplete | None = None, mem_limit: str | int | None = None,
memswap_limit: Incomplete | None = None, memswap_limit: str | int | None = None,
mem_reservation: Incomplete | None = None, mem_reservation: str | int | None = None,
kernel_memory: Incomplete | None = None, kernel_memory: str | int | None = None,
mem_swappiness: Incomplete | None = None, mem_swappiness: int | None = None,
cgroup_parent: Incomplete | None = None, cgroup_parent: str | None = None,
group_add: Incomplete | None = None, group_add: list[str | int] | None = None,
cpu_quota: Incomplete | None = None, cpu_quota: int | None = None,
cpu_period: Incomplete | None = None, cpu_period: int | None = None,
blkio_weight: Incomplete | None = None, blkio_weight: int | None = None,
blkio_weight_device: Incomplete | None = None, blkio_weight_device: list[ContainerWeightDevice] | None = None,
device_read_bps: Incomplete | None = None, device_read_bps: Incomplete | None = None,
device_write_bps: Incomplete | None = None, device_write_bps: Incomplete | None = None,
device_read_iops: Incomplete | None = None, device_read_iops: Incomplete | None = None,
device_write_iops: Incomplete | None = None, device_write_iops: Incomplete | None = None,
oom_kill_disable: bool = False, oom_kill_disable: bool = False,
shm_size: Incomplete | None = None, shm_size: str | int | None = None,
sysctls: Incomplete | None = None, sysctls: dict[Incomplete, Incomplete] | None = None,
tmpfs: Incomplete | None = None, tmpfs: dict[str, str] | None = None,
oom_score_adj: Incomplete | None = None, oom_score_adj: int | None = None,
dns_opt: Incomplete | None = None, dns_opt: list[Incomplete] | None = None,
cpu_shares: Incomplete | None = None, cpu_shares: int | None = None,
cpuset_cpus: Incomplete | None = None, cpuset_cpus: str | None = None,
userns_mode: Incomplete | None = None, userns_mode: str | None = None,
uts_mode: Incomplete | None = None, uts_mode: str | None = None,
pids_limit: Incomplete | None = None, pids_limit: int | None = None,
isolation: Incomplete | None = None, isolation: str | None = None,
auto_remove: bool = False, auto_remove: bool = False,
storage_opt: Incomplete | None = None, storage_opt: dict[Incomplete, Incomplete] | None = None,
init: Incomplete | None = None, init: bool | None = None,
init_path: Incomplete | None = None, init_path: str | None = None,
volume_driver: Incomplete | None = None, volume_driver: str | None = None,
cpu_count: Incomplete | None = None, cpu_count: int | None = None,
cpu_percent: Incomplete | None = None, cpu_percent: int | None = None,
nano_cpus: Incomplete | None = None, nano_cpus: int | None = None,
cpuset_mems: Incomplete | None = None, cpuset_mems: str | None = None,
runtime: Incomplete | None = None, runtime: str | None = None,
mounts: Incomplete | None = None, mounts: list[Mount] | None = None,
cpu_rt_period: Incomplete | None = None, cpu_rt_period: int | None = None,
cpu_rt_runtime: Incomplete | None = None, cpu_rt_runtime: int | None = None,
device_cgroup_rules: Incomplete | None = None, device_cgroup_rules: list[Incomplete] | None = None,
device_requests: Incomplete | None = None, device_requests: list[DeviceRequest] | None = None,
cgroupns: Incomplete | None = None, cgroupns: Literal["private", "host"] | None = None,
) -> None: ... ) -> None: ...
def host_config_type_error(param, param_value, expected): ... def host_config_type_error(param, param_value, expected): ...
@ -138,27 +146,27 @@ def host_config_incompatible_error(param, param_value, incompatible_param): ...
class ContainerConfig(dict[str, Incomplete]): class ContainerConfig(dict[str, Incomplete]):
def __init__( def __init__(
self, self,
version, version: str,
image, image,
command, command: str | list[str],
hostname: Incomplete | None = None, hostname: str | None = None,
user: Incomplete | None = None, user: str | int | None = None,
detach: bool = False, detach: bool = False,
stdin_open: bool = False, stdin_open: bool = False,
tty: bool = False, tty: bool = False,
ports: Incomplete | None = None, ports: dict[str, int | None] | None = None,
environment: Incomplete | None = None, environment: dict[str, str] | list[str] | None = None,
volumes: Incomplete | None = None, volumes: str | list[str] | None = None,
network_disabled: bool = False, network_disabled: bool = False,
entrypoint: Incomplete | None = None, entrypoint: str | list[str] | None = None,
working_dir: Incomplete | None = None, working_dir: str | None = None,
domainname: Incomplete | None = None, domainname: str | None = None,
host_config: Incomplete | None = None, host_config: Incomplete | None = None,
mac_address: Incomplete | None = None, mac_address: str | None = None,
labels: Incomplete | None = None, labels: dict[str, str] | list[str] | None = None,
stop_signal: Incomplete | None = None, stop_signal: str | None = None,
networking_config: Incomplete | None = None, networking_config: Incomplete | None = None,
healthcheck: Incomplete | None = None, healthcheck: Incomplete | None = None,
stop_timeout: Incomplete | None = None, stop_timeout: int | None = None,
runtime: Incomplete | None = None, runtime: str | None = None,
) -> None: ... ) -> None: ...

View File

@ -1,17 +1,14 @@
import json import json
from collections.abc import Callable, Generator, Iterator from collections.abc import Callable, Generator, Iterator
from typing import Any from typing import Any
from typing_extensions import TypeAlias
from docker._types import JSON
json_decoder: json.JSONDecoder json_decoder: json.JSONDecoder
# Type alias for JSON, explained at:
# https://github.com/python/typing/issues/182#issuecomment-1320974824.
_JSON: TypeAlias = dict[str, _JSON] | list[_JSON] | str | int | float | bool | None
def stream_as_text(stream: Iterator[str | bytes]) -> Generator[str, None, None]: ... def stream_as_text(stream: Iterator[str | bytes]) -> Generator[str, None, None]: ...
def json_splitter(buffer: str) -> tuple[_JSON, str] | None: ... def json_splitter(buffer: str) -> tuple[JSON, str] | None: ...
def json_stream(stream: Iterator[str]) -> Generator[_JSON, None, None]: ... def json_stream(stream: Iterator[str]) -> Generator[JSON, None, None]: ...
def line_splitter(buffer: str, separator: str = "\n") -> tuple[str, str] | None: ... def line_splitter(buffer: str, separator: str = "\n") -> tuple[str, str] | None: ...
def split_buffer( def split_buffer(
stream: Iterator[str | bytes], splitter: Callable[[str], tuple[str, str]] | None = None, decoder: Callable[[str], Any] = ... stream: Iterator[str | bytes], splitter: Callable[[str], tuple[str, str]] | None = None, decoder: Callable[[str], Any] = ...

View File

@ -1,3 +1,3 @@
version = "7.0.*" version = "7.1.*"
upstream_repository = "https://github.com/pycqa/flake8" upstream_repository = "https://github.com/pycqa/flake8"
requires = ["types-pyflakes"] requires = ["types-pyflakes"]

View File

@ -25,9 +25,11 @@ from .enums import (
RenderStyle, RenderStyle,
TableBordersLayout, TableBordersLayout,
TableCellFillMode, TableCellFillMode,
TableHeadingsDisplay,
TextDirection, TextDirection,
TextMarkupType, TextMarkupType,
TextMode as TextMode, TextMode as TextMode,
VAlign,
WrapMode as WrapMode, WrapMode as WrapMode,
XPos as XPos, XPos as XPos,
YPos as YPos, YPos as YPos,
@ -41,14 +43,14 @@ from .image_datastructures import (
ImageInfo as ImageInfo, ImageInfo as ImageInfo,
RasterImageInfo as RasterImageInfo, RasterImageInfo as RasterImageInfo,
VectorImageInfo as VectorImageInfo, VectorImageInfo as VectorImageInfo,
_AlignLiteral, _TextAlign,
) )
from .output import OutputProducer, PDFPage from .output import OutputProducer, PDFPage
from .recorder import FPDFRecorder from .recorder import FPDFRecorder
from .structure_tree import StructureTreeBuilder from .structure_tree import StructureTreeBuilder
from .syntax import DestinationXYZ from .syntax import DestinationXYZ
from .table import Table from .table import Table
from .util import _Unit from .util import Padding, _Unit
__all__ = [ __all__ = [
"FPDF", "FPDF",
@ -489,7 +491,7 @@ class FPDF(GraphicsStateMixin):
ncols: int = 1, ncols: int = 1,
gutter: float = 10, gutter: float = 10,
balance: bool = False, balance: bool = False,
text_align: Align | _AlignLiteral = "LEFT", text_align: str | _TextAlign | tuple[_TextAlign | str, ...] = "LEFT",
line_height: float = 1, line_height: float = 1,
l_margin: float | None = None, l_margin: float | None = None,
r_margin: float | None = None, r_margin: float | None = None,
@ -570,17 +572,26 @@ class FPDF(GraphicsStateMixin):
self, self,
rows: Iterable[Incomplete] = (), rows: Iterable[Incomplete] = (),
*, *,
align: str | Align = "CENTER", # Keep in sync with `fpdf.table.Table`:
align: str | _TextAlign = "CENTER",
v_align: str | VAlign = "MIDDLE",
borders_layout: str | TableBordersLayout = ..., borders_layout: str | TableBordersLayout = ...,
cell_fill_color: int | tuple[Incomplete, ...] | DeviceGray | DeviceRGB | None = None, cell_fill_color: int | tuple[Incomplete, ...] | DeviceGray | DeviceRGB | None = None,
cell_fill_mode: str | TableCellFillMode = ..., cell_fill_mode: str | TableCellFillMode = ...,
col_widths: int | tuple[int, ...] | None = None, col_widths: int | tuple[int, ...] | None = None,
first_row_as_headings: bool = True, first_row_as_headings: bool = True,
gutter_height: float = 0,
gutter_width: float = 0,
headings_style: FontFace = ..., headings_style: FontFace = ...,
line_height: int | None = None, line_height: int | None = None,
markdown: bool = False, markdown: bool = False,
text_align: str | Align = "JUSTIFY", text_align: str | _TextAlign | tuple[str | _TextAlign, ...] = "JUSTIFY",
width: int | None = None, width: int | None = None,
wrapmode: WrapMode = ...,
padding: float | Padding | None = None,
outer_border_width: float | None = None,
num_heading_rows: int = 1,
repeat_headings: TableHeadingsDisplay | int = 1,
) -> _GeneratorContextManager[Table]: ... ) -> _GeneratorContextManager[Table]: ...
@overload @overload
def output( # type: ignore[overload-overlap] def output( # type: ignore[overload-overlap]

View File

@ -31,6 +31,7 @@ _AlignLiteral: TypeAlias = Literal[
"r", "r",
"j", "j",
] ]
_TextAlign: TypeAlias = Align | _AlignLiteral
class ImageInfo(dict[str, Any]): class ImageInfo(dict[str, Any]):
@property @property
@ -43,7 +44,7 @@ class ImageInfo(dict[str, Any]):
def rendered_height(self) -> int: ... def rendered_height(self) -> int: ...
def scale_inside_box(self, x: float, y: float, w: float, h: float) -> tuple[float, float, float, float]: ... def scale_inside_box(self, x: float, y: float, w: float, h: float) -> tuple[float, float, float, float]: ...
@staticmethod @staticmethod
def x_by_align(x: Align | _AlignLiteral, w: float, pdf: FPDF, keep_aspect_ratio: Literal[False]) -> float: ... def x_by_align(x: _TextAlign, w: float, pdf: FPDF, keep_aspect_ratio: Literal[False]) -> float: ...
class RasterImageInfo(ImageInfo): class RasterImageInfo(ImageInfo):
def size_in_document_units(self, w: float, h: float, scale=1) -> tuple[float, float]: ... def size_in_document_units(self, w: float, h: float, scale=1) -> tuple[float, float]: ...

View File

@ -10,6 +10,7 @@ from .drawing import DeviceGray, DeviceRGB
from .enums import Align, TableBordersLayout, TableCellFillMode, TableHeadingsDisplay, TableSpan, VAlign, WrapMode from .enums import Align, TableBordersLayout, TableCellFillMode, TableHeadingsDisplay, TableSpan, VAlign, WrapMode
from .fonts import FontFace from .fonts import FontFace
from .fpdf import FPDF from .fpdf import FPDF
from .image_datastructures import _TextAlign
from .util import Padding from .util import Padding
DEFAULT_HEADINGS_STYLE: FontFace DEFAULT_HEADINGS_STYLE: FontFace
@ -22,7 +23,8 @@ class Table:
fpdf: FPDF, fpdf: FPDF,
rows: Iterable[str] = (), rows: Iterable[str] = (),
*, *,
align: str | Align = "CENTER", # Keep in sync with `fpdf.fpdf.FPDF.table`:
align: str | _TextAlign = "CENTER",
v_align: str | VAlign = "MIDDLE", v_align: str | VAlign = "MIDDLE",
borders_layout: str | TableBordersLayout = ..., borders_layout: str | TableBordersLayout = ...,
cell_fill_color: int | tuple[Incomplete, ...] | DeviceGray | DeviceRGB | None = None, cell_fill_color: int | tuple[Incomplete, ...] | DeviceGray | DeviceRGB | None = None,
@ -34,7 +36,7 @@ class Table:
headings_style: FontFace = ..., headings_style: FontFace = ...,
line_height: int | None = None, line_height: int | None = None,
markdown: bool = False, markdown: bool = False,
text_align: str | Align = "JUSTIFY", text_align: str | _TextAlign | tuple[str | _TextAlign, ...] = "JUSTIFY",
width: int | None = None, width: int | None = None,
wrapmode: WrapMode = ..., wrapmode: WrapMode = ...,
padding: float | Padding | None = None, padding: float | Padding | None = None,

View File

@ -4,7 +4,7 @@ from typing import NamedTuple
from typing_extensions import Self from typing_extensions import Self
from .enums import Align, WrapMode from .enums import Align, WrapMode
from .image_datastructures import RasterImageInfo, VectorImageInfo, _AlignLiteral from .image_datastructures import RasterImageInfo, VectorImageInfo, _TextAlign
class Extents(NamedTuple): class Extents(NamedTuple):
left: float left: float
@ -24,7 +24,7 @@ class LineWrapper(NamedTuple):
class Paragraph: class Paragraph:
pdf: Incomplete pdf: Incomplete
text_align: Incomplete text_align: Align
line_height: Incomplete line_height: Incomplete
top_margin: Incomplete top_margin: Incomplete
bottom_margin: Incomplete bottom_margin: Incomplete
@ -34,7 +34,7 @@ class Paragraph:
def __init__( def __init__(
self, self,
region, region,
text_align: Incomplete | None = None, text_align: _TextAlign | None = None,
line_height: Incomplete | None = None, line_height: Incomplete | None = None,
top_margin: float = 0, top_margin: float = 0,
bottom_margin: float = 0, bottom_margin: float = 0,
@ -67,7 +67,7 @@ class ImageParagraph:
self, self,
region, region,
name, name,
align: Align | _AlignLiteral | None = None, align: _TextAlign | None = None,
width: float | None = None, width: float | None = None,
height: float | None = None, height: float | None = None,
fill_width: bool = False, fill_width: bool = False,
@ -93,7 +93,7 @@ class ParagraphCollectorMixin:
pdf, pdf,
*args, *args,
text: str | None = None, text: str | None = None,
text_align: Align | _AlignLiteral = "LEFT", text_align: _TextAlign = "LEFT",
line_height: float = 1.0, line_height: float = 1.0,
print_sh: bool = False, print_sh: bool = False,
skip_leading_spaces: bool = False, skip_leading_spaces: bool = False,
@ -108,7 +108,7 @@ class ParagraphCollectorMixin:
def ln(self, h: float | None = None) -> None: ... def ln(self, h: float | None = None) -> None: ...
def paragraph( def paragraph(
self, self,
text_align: Incomplete | None = None, text_align: _TextAlign | None = None,
line_height: Incomplete | None = None, line_height: Incomplete | None = None,
skip_leading_spaces: bool = False, skip_leading_spaces: bool = False,
top_margin: int = 0, top_margin: int = 0,
@ -119,7 +119,7 @@ class ParagraphCollectorMixin:
def image( def image(
self, self,
name, name,
align: Align | _AlignLiteral | None = None, align: _TextAlign | None = None,
width: float | None = None, width: float | None = None,
height: float | None = None, height: float | None = None,
fill_width: bool = False, fill_width: bool = False,

View File

@ -1,2 +1,2 @@
version = "2.20.*" version = "2.21.*"
# upstream_repository = closed-source # upstream_repository = closed-source

View File

@ -38,6 +38,7 @@ class Connection:
def rollback(self) -> None: ... def rollback(self) -> None: ...
def setautocommit(self, auto: bool = ...) -> None: ... def setautocommit(self, auto: bool = ...) -> None: ...
def setclientinfo(self, key: str, value: str | None = ...) -> None: ... def setclientinfo(self, key: str, value: str | None = ...) -> None: ...
def ontrace(self) -> None: ...
connect = Connection connect = Connection

View File

@ -1,3 +1,3 @@
version = "2.2.*" version = "2.3.*"
upstream_repository = "https://github.com/hvac/hvac" upstream_repository = "https://github.com/hvac/hvac"
requires = ["types-requests"] requires = ["types-requests"]

View File

@ -1,6 +1,8 @@
from _typeshed import Incomplete from _typeshed import Incomplete
from typing import Any
from hvac.api.system_backend.system_backend_mixin import SystemBackendMixin from hvac.api.system_backend.system_backend_mixin import SystemBackendMixin
from requests import Response
class Raft(SystemBackendMixin): class Raft(SystemBackendMixin):
def join_raft_cluster( def join_raft_cluster(
@ -16,3 +18,10 @@ class Raft(SystemBackendMixin):
def take_raft_snapshot(self): ... def take_raft_snapshot(self): ...
def restore_raft_snapshot(self, snapshot): ... def restore_raft_snapshot(self, snapshot): ...
def force_restore_raft_snapshot(self, snapshot): ... def force_restore_raft_snapshot(self, snapshot): ...
def read_raft_auto_snapshot_status(self, name: str) -> Response: ...
def read_raft_auto_snapshot_config(self, name: str) -> Response: ...
def list_raft_auto_snapshot_configs(self) -> Response: ...
def create_or_update_raft_auto_snapshot_config(
self, name: str, interval: str, storage_type: str, retain: int = 1, **kwargs: Any
) -> Response: ...
def delete_raft_auto_snapshot_config(self, name: str) -> Response: ...

View File

@ -4,3 +4,4 @@ from hvac.api.system_backend.system_backend_mixin import SystemBackendMixin
class Wrapping(SystemBackendMixin): class Wrapping(SystemBackendMixin):
def unwrap(self, token: Incomplete | None = None): ... def unwrap(self, token: Incomplete | None = None): ...
def wrap(self, payload: dict[Incomplete, Incomplete] | None = None, ttl: int = 60): ...

View File

@ -1,4 +1,4 @@
version = "1.43.*" version = "1.44.*"
upstream_repository = "https://github.com/influxdata/influxdb-client-python" upstream_repository = "https://github.com/influxdata/influxdb-client-python"
# requires a version of urllib3 with a py.typed file # requires a version of urllib3 with a py.typed file
requires = ["urllib3>=2"] requires = ["urllib3>=2"]

View File

@ -0,0 +1,37 @@
from collections.abc import Callable
from typing import Any, Generic, Protocol, TypeVar
from typing_extensions import Self
class _HasId(Protocol):
@property
def id(self) -> str | None: ...
_R = TypeVar("_R", default=Any)
_T = TypeVar("_T", bound=_HasId)
class _Page(Generic[_T]):
has_next: bool
values: list[_T]
next_after: str | None
def __init__(self, values: list[_T], has_next: bool, next_after: str | None) -> None: ...
@staticmethod
def empty() -> _Page[_T]: ...
@staticmethod
def initial(after: str | None) -> _Page[_T]: ...
class _PageIterator(Generic[_T]):
page: _Page[_T]
get_next_page: Callable[[_Page[_T]], _Page[_T]]
def __init__(self, page: _Page[_T], get_next_page: Callable[[_Page[_T]], _Page[_T]]) -> None: ...
def __iter__(self) -> Self: ...
def __next__(self) -> _T: ...
class _Paginated(Generic[_T, _R]):
paginated_getter: Callable[..., _R] # Gets passed additional kwargs to find_iter().
pluck_page_resources_from_response: Callable[[_R], list[_T]]
def __init__(
self, paginated_getter: Callable[..., _R], pluck_page_resources_from_response: Callable[[_R], list[_T]]
) -> None: ...
def find_iter(self, *, after: str | None = None, **kwargs: Any) -> _PageIterator[_T]: ...

View File

@ -1,6 +1,7 @@
from _typeshed import Incomplete from _typeshed import Incomplete
from influxdb_client import Bucket from ..domain.bucket import Bucket
from ._pages import _PageIterator
class BucketsApi: class BucketsApi:
def __init__(self, influxdb_client) -> None: ... def __init__(self, influxdb_client) -> None: ...
@ -18,3 +19,6 @@ class BucketsApi:
def find_bucket_by_id(self, id): ... def find_bucket_by_id(self, id): ...
def find_bucket_by_name(self, bucket_name): ... def find_bucket_by_name(self, bucket_name): ...
def find_buckets(self, **kwargs): ... def find_buckets(self, **kwargs): ...
def find_buckets_iter(
self, *, name: str = ..., org: str = ..., org_id: str = ..., after: str | None = None, limit: int = ...
) -> _PageIterator[Bucket]: ...

View File

@ -1,9 +1,10 @@
from collections.abc import Iterator
from datetime import datetime from datetime import datetime
from influxdb_client import LabelResponse, LogEvent, Run, TaskCreateRequest, TaskUpdateRequest from influxdb_client import LabelResponse, LogEvent, Run, TaskCreateRequest, TaskUpdateRequest
from influxdb_client.domain.task import Task from influxdb_client.domain.task import Task
from ._pages import _PageIterator
class TasksApi: class TasksApi:
def __init__(self, influxdb_client) -> None: ... def __init__(self, influxdb_client) -> None: ...
def find_task_by_id(self, task_id) -> Task: ... def find_task_by_id(self, task_id) -> Task: ...
@ -11,8 +12,8 @@ class TasksApi:
self, *, name: str = ..., after: str = ..., user: str = ..., org: str = ..., org_id: str = ..., limit: int = ..., **kwargs self, *, name: str = ..., after: str = ..., user: str = ..., org: str = ..., org_id: str = ..., limit: int = ..., **kwargs
) -> list[Task]: ... ) -> list[Task]: ...
def find_tasks_iter( def find_tasks_iter(
self, *, name: str = ..., after: str = ..., user: str = ..., org: str = ..., org_id: str = ..., limit: int = ..., **kwargs self, *, name: str = ..., after: str | None = None, user: str = ..., org: str = ..., org_id: str = ..., limit: int = ...
) -> Iterator[Task]: ... ) -> _PageIterator[Task]: ...
def create_task(self, task: Task | None = None, task_create_request: TaskCreateRequest | None = None) -> Task: ... def create_task(self, task: Task | None = None, task_create_request: TaskCreateRequest | None = None) -> Task: ...
def create_task_every(self, name, flux, every, organization) -> Task: ... def create_task_every(self, name, flux, every, organization) -> Task: ...
def create_task_cron(self, name: str, flux: str, cron: str, org_id: str) -> Task: ... def create_task_cron(self, name: str, flux: str, cron: str, org_id: str) -> Task: ...

View File

@ -7,11 +7,11 @@ class Bucket:
def __init__( def __init__(
self, self,
links: Incomplete | None = None, links: Incomplete | None = None,
id: Incomplete | None = None, id: str | None = None,
type: str = "user", type: str = "user",
name: Incomplete | None = None, name: Incomplete | None = None,
description: Incomplete | None = None, description: Incomplete | None = None,
org_id: Incomplete | None = None, org_id: str | None = None,
rp: Incomplete | None = None, rp: Incomplete | None = None,
schema_type: Incomplete | None = None, schema_type: Incomplete | None = None,
created_at: Incomplete | None = None, created_at: Incomplete | None = None,
@ -24,9 +24,9 @@ class Bucket:
@links.setter @links.setter
def links(self, links) -> None: ... def links(self, links) -> None: ...
@property @property
def id(self): ... def id(self) -> str | None: ...
@id.setter @id.setter
def id(self, id) -> None: ... def id(self, id: str) -> None: ...
@property @property
def type(self): ... def type(self): ...
@type.setter @type.setter
@ -40,9 +40,9 @@ class Bucket:
@description.setter @description.setter
def description(self, description) -> None: ... def description(self, description) -> None: ...
@property @property
def org_id(self): ... def org_id(self) -> str | None: ...
@org_id.setter @org_id.setter
def org_id(self, org_id) -> None: ... def org_id(self, org_id: str) -> None: ...
@property @property
def rp(self): ... def rp(self): ...
@rp.setter @rp.setter

View File

@ -6,8 +6,8 @@ class Task:
discriminator: Incomplete discriminator: Incomplete
def __init__( def __init__(
self, self,
id: Incomplete | None = None, id: str | None = None,
org_id: Incomplete | None = None, org_id: str | None = None,
org: Incomplete | None = None, org: Incomplete | None = None,
name: Incomplete | None = None, name: Incomplete | None = None,
owner_id: Incomplete | None = None, owner_id: Incomplete | None = None,
@ -27,13 +27,13 @@ class Task:
links: Incomplete | None = None, links: Incomplete | None = None,
) -> None: ... ) -> None: ...
@property @property
def id(self): ... def id(self) -> str | None: ...
@id.setter @id.setter
def id(self, id) -> None: ... def id(self, id: str) -> None: ...
@property @property
def org_id(self): ... def org_id(self) -> str | None: ...
@org_id.setter @org_id.setter
def org_id(self, org_id) -> None: ... def org_id(self, org_id: str) -> None: ...
@property @property
def org(self): ... def org(self): ...
@org.setter @org.setter

View File

@ -1,6 +1,8 @@
version = "3.2.1" version = "3.2.1"
upstream_repository = "https://github.com/networkx/networkx" upstream_repository = "https://github.com/networkx/networkx"
requires = ["numpy"] # requires a version of numpy with a `py.typed` file
# TODO: Lots of stubtest errors when using numpy 2
requires = ["numpy>=1.20,<2"]
partial_stub = true partial_stub = true
[tool.stubtest] [tool.stubtest]

View File

@ -4,8 +4,8 @@ import io
import logging import logging
import traceback import traceback
from collections.abc import Sequence from collections.abc import Sequence
from typing import IO from typing import IO, AnyStr, Generic
from typing_extensions import Self from typing_extensions import Self, TypeAlias
__date__: str __date__: str
__version__: str __version__: str
@ -137,7 +137,7 @@ class OleMetadata:
DOCSUM_ATTRIBS: list[str] DOCSUM_ATTRIBS: list[str]
def __init__(self) -> None: ... def __init__(self) -> None: ...
def parse_properties(self, ole_file: OleFileIO) -> None: ... def parse_properties(self, ole_file: OleFileIO[AnyStr]) -> None: ...
def dump(self) -> None: ... def dump(self) -> None: ...
class OleFileIONotClosed(RuntimeWarning): class OleFileIONotClosed(RuntimeWarning):
@ -153,29 +153,34 @@ class OleStream(io.BytesIO):
sectorsize: int, sectorsize: int,
fat: list[int], fat: list[int],
filesize: int, filesize: int,
olefileio: OleFileIO, olefileio: OleFileIO[AnyStr],
) -> None: ... ) -> None: ...
class OleDirectoryEntry: class OleDirectoryEntry(Generic[AnyStr]):
STRUCT_DIRENTRY: str STRUCT_DIRENTRY: str
DIRENTRY_SIZE: int DIRENTRY_SIZE: int
clsid: str
def __init__(self, entry: bytes, sid: int, ole_file: OleFileIO) -> None: ... def __init__(self, entry: bytes, sid: int, ole_file: OleFileIO[AnyStr]) -> None: ...
def build_sect_chain(self, ole_file: OleFileIO) -> None: ... def build_sect_chain(self, ole_file: OleFileIO[AnyStr]) -> None: ...
def build_storage_tree(self) -> None: ... def build_storage_tree(self) -> None: ...
def append_kids(self, child_sid: int) -> None: ... def append_kids(self, child_sid: int) -> None: ...
def __eq__(self, other: OleDirectoryEntry) -> bool: ... # type: ignore[override] def __eq__(self, other: OleDirectoryEntry[AnyStr]) -> bool: ... # type: ignore[override]
def __lt__(self, other: OleDirectoryEntry) -> bool: ... # type: ignore[override] def __lt__(self, other: OleDirectoryEntry[AnyStr]) -> bool: ... # type: ignore[override]
def __ne__(self, other: OleDirectoryEntry) -> bool: ... # type: ignore[override] def __ne__(self, other: OleDirectoryEntry[AnyStr]) -> bool: ... # type: ignore[override]
def __le__(self, other: OleDirectoryEntry) -> bool: ... # type: ignore[override] def __le__(self, other: OleDirectoryEntry[AnyStr]) -> bool: ... # type: ignore[override]
def dump(self, tab: int = 0) -> None: ... def dump(self, tab: int = 0) -> None: ...
def getmtime(self) -> datetime.datetime | None: ... def getmtime(self) -> datetime.datetime | None: ...
def getctime(self) -> datetime.datetime | None: ... def getctime(self) -> datetime.datetime | None: ...
class OleFileIO: _Property: TypeAlias = int | str | bytes | bool | None
class OleFileIO(Generic[AnyStr]):
root: OleDirectoryEntry[AnyStr] | None
def __init__( def __init__(
self, self,
filename: IO[bytes] | bytes | str | None = None, filename: IO[bytes] | AnyStr | None = None,
raise_defects: int = 40, raise_defects: int = 40,
write_mode: bool = False, write_mode: bool = False,
debug: bool = False, debug: bool = False,
@ -187,8 +192,8 @@ class OleFileIO:
def _raise_defect( def _raise_defect(
self, defect_level: int, message: str, exception_type: type[Exception] = OleFileError # noqa: Y011 self, defect_level: int, message: str, exception_type: type[Exception] = OleFileError # noqa: Y011
) -> None: ... ) -> None: ...
def _decode_utf16_str(self, utf16_str: bytes, errors: str = "replace") -> bytes: ... def _decode_utf16_str(self, utf16_str: bytes, errors: str = "replace") -> str | bytes: ...
def open(self, filename: IO[bytes] | bytes | str, write_mode: bool = False) -> None: ... def open(self, filename: IO[bytes] | AnyStr, write_mode: bool = False) -> None: ...
def close(self) -> None: ... def close(self) -> None: ...
def _close(self, warn: bool = False) -> None: ... def _close(self, warn: bool = False) -> None: ...
def _check_duplicate_stream(self, first_sect: int, minifat: bool = False) -> None: ... def _check_duplicate_stream(self, first_sect: int, minifat: bool = False) -> None: ...
@ -202,36 +207,41 @@ class OleFileIO:
def write_sect(self, sect: int, data: bytes, padding: bytes = b"\x00") -> None: ... def write_sect(self, sect: int, data: bytes, padding: bytes = b"\x00") -> None: ...
def _write_mini_sect(self, fp_pos: int, data: bytes, padding: bytes = b"\x00") -> None: ... def _write_mini_sect(self, fp_pos: int, data: bytes, padding: bytes = b"\x00") -> None: ...
def loaddirectory(self, sect: int) -> None: ... def loaddirectory(self, sect: int) -> None: ...
def _load_direntry(self, sid: int) -> OleDirectoryEntry: ... def _load_direntry(self, sid: int) -> OleDirectoryEntry[AnyStr]: ...
def dumpdirectory(self) -> None: ... def dumpdirectory(self) -> None: ...
def _open(self, start: int, size: int = 0x7FFFFFFF, force_FAT: bool = False) -> OleStream: ... def _open(self, start: int, size: int = 0x7FFFFFFF, force_FAT: bool = False) -> OleStream: ...
def _list( def _list(
self, files: list[list[bytes]], prefix: list[bytes], node: OleDirectoryEntry, streams: bool = True, storages: bool = False self,
files: list[list[AnyStr]],
prefix: list[AnyStr],
node: OleDirectoryEntry[AnyStr],
streams: bool = True,
storages: bool = False,
) -> None: ... ) -> None: ...
def listdir(self, streams: bool = True, storages: bool = False) -> list[list[bytes]]: ... def listdir(self, streams: bool = True, storages: bool = False) -> list[list[AnyStr]]: ...
def _find(self, filename: str | Sequence[str]) -> int: ... def _find(self, filename: str | Sequence[str]) -> int: ...
def openstream(self, filename: str | Sequence[str]) -> OleStream: ... def openstream(self, filename: AnyStr | Sequence[AnyStr]) -> OleStream: ...
def _write_mini_stream(self, entry: OleDirectoryEntry, data_to_write: bytes) -> None: ... def _write_mini_stream(self, entry: OleDirectoryEntry[AnyStr], data_to_write: bytes) -> None: ...
def write_stream(self, stream_name: str | Sequence[str], data: bytes) -> None: ... def write_stream(self, stream_name: str | Sequence[str], data: bytes) -> None: ...
def get_type(self, filename: str | Sequence[str]) -> bool | int: ... def get_type(self, filename: AnyStr | Sequence[AnyStr]) -> bool | int: ...
def getclsid(self, filename: str | Sequence[str]) -> str: ... def getclsid(self, filename: AnyStr | Sequence[AnyStr]) -> str: ...
def getmtime(self, filename: str | Sequence[str]) -> datetime.datetime | None: ... def getmtime(self, filename: AnyStr | Sequence[AnyStr]) -> datetime.datetime | None: ...
def getctime(self, filename: str | Sequence[str]) -> datetime.datetime | None: ... def getctime(self, filename: AnyStr | Sequence[AnyStr]) -> datetime.datetime | None: ...
def exists(self, filename: str | Sequence[str]) -> bool: ... def exists(self, filename: AnyStr | Sequence[AnyStr]) -> bool: ...
def get_size(self, filename: str | Sequence[str]) -> int: ... def get_size(self, filename: AnyStr | Sequence[AnyStr]) -> int: ...
def get_rootentry_name(self) -> bytes: ... def get_rootentry_name(self) -> bytes: ...
def getproperties( def getproperties(
self, filename: str | Sequence[str], convert_time: bool = False, no_conversion: list[int] | None = None self, filename: AnyStr | Sequence[AnyStr], convert_time: bool = False, no_conversion: list[int] | None = None
) -> dict[int, list[int | str | bytes | bool | None]]: ... ) -> dict[int, list[_Property] | _Property]: ...
def _parse_property( def _parse_property(
self, s: bytes, offset: int, property_id: int, property_type: int, convert_time: bool, no_conversion: list[int] self, s: bytes, offset: int, property_id: int, property_type: int, convert_time: bool, no_conversion: list[int]
) -> list[int | str | bytes | bool | None] | None: ... ) -> list[_Property] | _Property: ...
def _parse_property_basic( def _parse_property_basic(
self, s: bytes, offset: int, property_id: int, property_type: int, convert_time: bool, no_conversion: list[int] self, s: bytes, offset: int, property_id: int, property_type: int, convert_time: bool, no_conversion: list[int]
) -> tuple[int | str | bytes | bool | None, int]: ... ) -> tuple[_Property, int]: ...
def get_metadata(self) -> OleMetadata: ... def get_metadata(self) -> OleMetadata: ...
def get_userdefined_properties( def get_userdefined_properties(
self, filename: str | Sequence[str], convert_time: bool = False, no_conversion: list[int] | None = None self, filename: AnyStr | Sequence[AnyStr], convert_time: bool = False, no_conversion: list[int] | None = None
) -> list[dict[str, bytes | int | None]]: ... ) -> list[dict[str, bytes | int | None]]: ...
def main() -> None: ... def main() -> None: ...

View File

@ -1,2 +1,2 @@
version = "3.1.2" version = "3.1.4"
upstream_repository = "https://foss.heptapod.net/openpyxl/openpyxl" upstream_repository = "https://foss.heptapod.net/openpyxl/openpyxl"

View File

@ -5,6 +5,7 @@ from typing_extensions import Self
from openpyxl.cell.text import InlineFont from openpyxl.cell.text import InlineFont
from openpyxl.descriptors import Strict, String, Typed from openpyxl.descriptors import Strict, String, Typed
from openpyxl.descriptors.serialisable import _ChildSerialisableTreeElement from openpyxl.descriptors.serialisable import _ChildSerialisableTreeElement
from openpyxl.xml.functions import Element
class TextBlock(Strict): class TextBlock(Strict):
font: Typed[InlineFont, Literal[False]] font: Typed[InlineFont, Literal[False]]
@ -12,6 +13,7 @@ class TextBlock(Strict):
def __init__(self, font: InlineFont, text: str) -> None: ... def __init__(self, font: InlineFont, text: str) -> None: ...
def __eq__(self, other: TextBlock) -> bool: ... # type: ignore[override] def __eq__(self, other: TextBlock) -> bool: ... # type: ignore[override]
def to_tree(self) -> Element: ...
class CellRichText(list[str | TextBlock]): class CellRichText(list[str | TextBlock]):
@overload @overload
@ -24,3 +26,4 @@ class CellRichText(list[str | TextBlock]):
def append(self, arg: str | TextBlock) -> None: ... def append(self, arg: str | TextBlock) -> None: ...
def extend(self, arg: Iterable[str | TextBlock]) -> None: ... def extend(self, arg: Iterable[str | TextBlock]) -> None: ...
def as_list(self) -> list[str]: ... def as_list(self) -> list[str]: ...
def to_tree(self) -> Element: ...

View File

@ -10,8 +10,7 @@ from openpyxl.descriptors.base import Alias, String, Typed, _ConvertibleToBool
from openpyxl.descriptors.excel import ExtensionList from openpyxl.descriptors.excel import ExtensionList
from openpyxl.descriptors.nested import NestedBool, NestedFloat, NestedInteger, NestedSet from openpyxl.descriptors.nested import NestedBool, NestedFloat, NestedInteger, NestedSet
from openpyxl.descriptors.serialisable import Serialisable from openpyxl.descriptors.serialisable import Serialisable
from openpyxl.xml._functions_overloads import _HasTagAndGet
from ..xml._functions_overloads import _HasTagAndGet
_TrendlineTrendlineType: TypeAlias = Literal["exp", "linear", "log", "movingAvg", "poly", "power"] _TrendlineTrendlineType: TypeAlias = Literal["exp", "linear", "log", "movingAvg", "poly", "power"]

View File

@ -12,7 +12,7 @@ from openpyxl.worksheet.cell_range import CellRange, MultiCellRange
_T = TypeVar("_T") _T = TypeVar("_T")
_P = TypeVar("_P", str, ReadableBuffer) _P = TypeVar("_P", str, ReadableBuffer)
_N = TypeVar("_N", bound=bool) _N = TypeVar("_N", bound=bool, default=Literal[False])
_L = TypeVar("_L", bound=Sized) _L = TypeVar("_L", bound=Sized)
_M = TypeVar("_M", int, float) _M = TypeVar("_M", int, float)

View File

@ -0,0 +1,18 @@
from typing import TypeVar
from typing_extensions import Self
from openpyxl.descriptors.serialisable import Serialisable
from openpyxl.xml.functions import Element
_T = TypeVar("_T", bound=Serialisable)
# Abstract base class.
class ElementList(list[_T]):
@property
def tagname(self) -> str: ... # abstract
@property
def expected_type(self) -> type[_T]: ... # abstract
@classmethod
def from_tree(cls, tree: Element) -> Self: ...
def to_tree(self) -> Element: ...
def append(self, value: _T) -> None: ...

View File

@ -11,6 +11,7 @@ from openpyxl.xml.functions import Element
from .base import Alias, Descriptor from .base import Alias, Descriptor
_T = TypeVar("_T") _T = TypeVar("_T")
_ContainerT = TypeVar("_ContainerT")
class _SupportsFromTree(Protocol): class _SupportsFromTree(Protocol):
@classmethod @classmethod
@ -19,22 +20,27 @@ class _SupportsFromTree(Protocol):
class _SupportsToTree(Protocol): class _SupportsToTree(Protocol):
def to_tree(self) -> Element: ... def to_tree(self) -> Element: ...
class Sequence(Descriptor[Incomplete]): # `_ContainerT` is the internal container type (which defaults to `list`), or
expected_type: type[Incomplete] # `IndexedList` if unique is `True`.
seq_types: tuple[type, ...] class Sequence(Descriptor[_ContainerT]):
expected_type: type[Any] # expected type of the sequence elements
seq_types: tuple[type, ...] # allowed settable sequence types, defaults to `list`, `tuple`
idx_base: int idx_base: int
unique: bool unique: bool
container: type container: type # internal container type, defaults to `list`
def __set__(self, instance: Serialisable | Strict, seq) -> None: ... # seq must be an instance of any of the declared `seq_types`.
def __set__(self, instance: Serialisable | Strict, seq: Any) -> None: ...
def to_tree( def to_tree(
self, tagname: str | None, obj: Iterable[object], namespace: str | None = None self, tagname: str | None, obj: Iterable[object], namespace: str | None = None
) -> Generator[Element, None, None]: ... ) -> Generator[Element, None, None]: ...
class UniqueSequence(Sequence): # `_T` is the type of the elements in the sequence.
seq_types: tuple[type, ...] class UniqueSequence(Sequence[set[_T]]):
container: type seq_types: tuple[type[list[_T]], type[tuple[_T, ...]], type[set[_T]]]
container: type[set[_T]]
class ValueSequence(Sequence): # See `Sequence` for the meaning of `_ContainerT`.
class ValueSequence(Sequence[_ContainerT]):
attribute: str attribute: str
def to_tree( def to_tree(
self, tagname: str, obj: Iterable[object], namespace: str | None = None # type: ignore[override] self, tagname: str, obj: Iterable[object], namespace: str | None = None # type: ignore[override]
@ -43,7 +49,8 @@ class ValueSequence(Sequence):
class _NestedSequenceToTreeObj(Sized, Iterable[_SupportsToTree], Protocol): ... class _NestedSequenceToTreeObj(Sized, Iterable[_SupportsToTree], Protocol): ...
class NestedSequence(Sequence): # See `Sequence` for the meaning of `_ContainerT`.
class NestedSequence(Sequence[_ContainerT]):
count: bool count: bool
expected_type: type[_SupportsFromTree] expected_type: type[_SupportsFromTree]
def to_tree( # type: ignore[override] def to_tree( # type: ignore[override]
@ -53,8 +60,9 @@ class NestedSequence(Sequence):
# Which can really be anything given the wildly different, and sometimes generic, from_tree return types # Which can really be anything given the wildly different, and sometimes generic, from_tree return types
def from_tree(self, node: Iterable[_SerialisableTreeElement]) -> list[Any]: ... def from_tree(self, node: Iterable[_SerialisableTreeElement]) -> list[Any]: ...
class MultiSequence(Sequence): # `_T` is the type of the elements in the sequence.
def __set__(self, instance: Serialisable | Strict, seq) -> None: ... class MultiSequence(Sequence[list[_T]]):
def __set__(self, instance: Serialisable | Strict, seq: tuple[_T, ...] | list[_T]) -> None: ...
def to_tree( def to_tree(
self, tagname: Unused, obj: Iterable[_SupportsToTree], namespace: str | None = None # type: ignore[override] self, tagname: Unused, obj: Iterable[_SupportsToTree], namespace: str | None = None # type: ignore[override]
) -> Generator[Element, None, None]: ... ) -> Generator[Element, None, None]: ...

View File

@ -52,7 +52,7 @@ CLASS_MAPPING: Final[dict[type[_MappingPropertyType], str]]
XML_MAPPING: Final[dict[str, type[_MappingPropertyType]]] XML_MAPPING: Final[dict[str, type[_MappingPropertyType]]]
class CustomPropertyList(Strict, Generic[_T]): class CustomPropertyList(Strict, Generic[_T]):
props: Sequence props: Sequence[list[_TypedProperty[_T]]]
def __init__(self) -> None: ... def __init__(self) -> None: ...
@classmethod @classmethod
def from_tree(cls, tree: _ChildSerialisableTreeElement) -> Self: ... def from_tree(cls, tree: _ChildSerialisableTreeElement) -> Self: ...

View File

@ -76,8 +76,8 @@ class ExtendedProperties(Serialisable):
HLinks: Unused = None, HLinks: Unused = None,
HyperlinksChanged: object = None, HyperlinksChanged: object = None,
DigSig: Unused = None, DigSig: Unused = None,
Application: object = "Microsoft Excel", Application: Unused = None,
AppVersion: object = None, AppVersion: str | None = None,
DocSecurity: ConvertibleToInt | None = None, DocSecurity: ConvertibleToInt | None = None,
) -> None: ... ) -> None: ...
def to_tree(self) -> Element: ... # type: ignore[override] def to_tree(self) -> Element: ... # type: ignore[override]

View File

@ -4,11 +4,11 @@ from typing import ClassVar, Literal, TypeVar, overload
from zipfile import ZipFile from zipfile import ZipFile
from openpyxl.descriptors.base import Alias, String from openpyxl.descriptors.base import Alias, String
from openpyxl.descriptors.container import ElementList
from openpyxl.descriptors.serialisable import Serialisable from openpyxl.descriptors.serialisable import Serialisable
from openpyxl.pivot.cache import CacheDefinition from openpyxl.pivot.cache import CacheDefinition
from openpyxl.pivot.record import RecordList from openpyxl.pivot.record import RecordList
from openpyxl.pivot.table import TableDefinition from openpyxl.pivot.table import TableDefinition
from openpyxl.xml.functions import Element
_SerialisableT = TypeVar("_SerialisableT", bound=Serialisable) _SerialisableT = TypeVar("_SerialisableT", bound=Serialisable)
_SerialisableRelTypeT = TypeVar("_SerialisableRelTypeT", bound=CacheDefinition | RecordList | TableDefinition) _SerialisableRelTypeT = TypeVar("_SerialisableRelTypeT", bound=CacheDefinition | RecordList | TableDefinition)
@ -32,16 +32,11 @@ class Relationship(Serialisable):
self, Id: str, Type: str, type: None = None, Target: str | None = None, TargetMode: str | None = None self, Id: str, Type: str, type: None = None, Target: str | None = None, TargetMode: str | None = None
) -> None: ... ) -> None: ...
class RelationshipList(Serialisable): class RelationshipList(ElementList[Relationship]):
tagname: ClassVar[str] expected_type: type[Relationship]
Relationship: Incomplete def find(self, content_type: str) -> Generator[Relationship, None, None]: ...
def __init__(self, Relationship=()) -> None: ... def get(self, key: str) -> Relationship: ...
def append(self, value) -> None: ... def to_dict(self) -> dict[Incomplete, Relationship]: ...
def __len__(self) -> int: ...
def __bool__(self) -> bool: ...
def find(self, content_type) -> Generator[Incomplete, None, None]: ...
def __getitem__(self, key): ...
def to_tree(self) -> Element: ... # type: ignore[override]
def get_rels_path(path): ... def get_rels_path(path): ...
def get_dependents(archive: ZipFile, filename: str) -> RelationshipList: ... def get_dependents(archive: ZipFile, filename: str) -> RelationshipList: ...

View File

@ -3,9 +3,10 @@ from datetime import datetime
from typing import ClassVar, Literal, overload from typing import ClassVar, Literal, overload
from typing_extensions import TypeAlias from typing_extensions import TypeAlias
from openpyxl.descriptors.base import Bool, DateTime, Float, Integer, Set, String, Typed, _ConvertibleToBool from openpyxl.descriptors.base import Bool, DateTime, Float, Integer, NoneSet, Set, String, Typed, _ConvertibleToBool
from openpyxl.descriptors.excel import ExtensionList from openpyxl.descriptors.excel import ExtensionList
from openpyxl.descriptors.nested import NestedInteger from openpyxl.descriptors.nested import NestedInteger
from openpyxl.descriptors.sequence import NestedSequence
from openpyxl.descriptors.serialisable import Serialisable from openpyxl.descriptors.serialisable import Serialisable
from openpyxl.pivot.fields import Error, Missing, Number, Text, TupleList from openpyxl.pivot.fields import Error, Missing, Number, Text, TupleList
from openpyxl.pivot.table import PivotArea from openpyxl.pivot.table import PivotArea
@ -43,10 +44,10 @@ class CalculatedMember(Serialisable):
tagname: ClassVar[str] tagname: ClassVar[str]
name: String[Literal[False]] name: String[Literal[False]]
mdx: String[Literal[False]] mdx: String[Literal[False]]
memberName: String[Literal[False]] memberName: String[Literal[True]]
hierarchy: String[Literal[False]] hierarchy: String[Literal[True]]
parent: String[Literal[False]] parent: String[Literal[True]]
solveOrder: Integer[Literal[False]] solveOrder: Integer[Literal[True]]
set: Bool[Literal[False]] set: Bool[Literal[False]]
extLst: Typed[ExtensionList, Literal[True]] extLst: Typed[ExtensionList, Literal[True]]
__elements__: ClassVar[tuple[str, ...]] __elements__: ClassVar[tuple[str, ...]]
@ -84,15 +85,6 @@ class ServerFormat(Serialisable):
format: String[Literal[True]] format: String[Literal[True]]
def __init__(self, culture: str | None = None, format: str | None = None) -> None: ... def __init__(self, culture: str | None = None, format: str | None = None) -> None: ...
class ServerFormatList(Serialisable):
tagname: ClassVar[str]
serverFormat: Incomplete
__elements__: ClassVar[tuple[str, ...]]
__attrs__: ClassVar[tuple[str, ...]]
def __init__(self, count: Incomplete | None = None, serverFormat: Incomplete | None = None) -> None: ...
@property
def count(self) -> int: ...
class Query(Serialisable): class Query(Serialisable):
tagname: ClassVar[str] tagname: ClassVar[str]
mdx: String[Literal[False]] mdx: String[Literal[False]]
@ -100,13 +92,6 @@ class Query(Serialisable):
__elements__: ClassVar[tuple[str, ...]] __elements__: ClassVar[tuple[str, ...]]
def __init__(self, mdx: str, tpls: TupleList | None = None) -> None: ... def __init__(self, mdx: str, tpls: TupleList | None = None) -> None: ...
class QueryCache(Serialisable):
tagname: ClassVar[str]
count: Integer[Literal[False]]
query: Typed[Query, Literal[False]]
__elements__: ClassVar[tuple[str, ...]]
def __init__(self, count: ConvertibleToInt, query: Query) -> None: ...
class OLAPSet(Serialisable): class OLAPSet(Serialisable):
tagname: ClassVar[str] tagname: ClassVar[str]
count: Integer[Literal[False]] count: Integer[Literal[False]]
@ -128,82 +113,59 @@ class OLAPSet(Serialisable):
sortByTuple: TupleList | None = None, sortByTuple: TupleList | None = None,
) -> None: ... ) -> None: ...
class OLAPSets(Serialisable):
count: Integer[Literal[False]]
set: Typed[OLAPSet, Literal[False]]
__elements__: ClassVar[tuple[str, ...]]
def __init__(self, count: ConvertibleToInt, set: OLAPSet) -> None: ...
class PCDSDTCEntries(Serialisable): class PCDSDTCEntries(Serialisable):
tagname: ClassVar[str] tagname: ClassVar[str]
count: Integer[Literal[False]] count: Integer[Literal[True]]
m: Typed[Missing, Literal[False]] m: Typed[Missing, Literal[True]]
n: Typed[Number, Literal[False]] n: Typed[Number, Literal[True]]
e: Typed[Error, Literal[False]] e: Typed[Error, Literal[True]]
s: Typed[Text, Literal[False]] s: Typed[Text, Literal[True]]
__elements__: ClassVar[tuple[str, ...]] __elements__: ClassVar[tuple[str, ...]]
def __init__(self, count: ConvertibleToInt, m: Missing, n: Number, e: Error, s: Text) -> None: ... def __init__(self, count: ConvertibleToInt, m: Missing, n: Number, e: Error, s: Text) -> None: ...
class TupleCache(Serialisable): class TupleCache(Serialisable):
tagname: ClassVar[str] tagname: ClassVar[str]
entries: Typed[PCDSDTCEntries, Literal[True]] entries: Typed[PCDSDTCEntries, Literal[True]]
sets: Typed[OLAPSets, Literal[True]] sets: NestedSequence[list[OLAPSet]]
queryCache: Typed[QueryCache, Literal[True]] queryCache: NestedSequence[list[Query]]
serverFormats: Typed[ServerFormatList, Literal[True]] serverFormats: NestedSequence[list[ServerFormat]]
extLst: Typed[ExtensionList, Literal[True]] extLst: Typed[ExtensionList, Literal[True]]
__elements__: ClassVar[tuple[str, ...]] __elements__: ClassVar[tuple[str, ...]]
def __init__( def __init__(
self, self,
entries: PCDSDTCEntries | None = None, entries: PCDSDTCEntries | None = None,
sets: OLAPSets | None = None, sets: list[OLAPSet] | tuple[OLAPSet, ...] = (),
queryCache: QueryCache | None = None, queryCache: list[Query] | tuple[Query, ...] = (),
serverFormats: ServerFormatList | None = None, serverFormats: list[ServerFormat] | tuple[ServerFormat, ...] = (),
extLst: ExtensionList | None = None, extLst: ExtensionList | None = None,
) -> None: ... ) -> None: ...
class PCDKPI(Serialisable): class OLAPKPI(Serialisable):
tagname: ClassVar[str] tagname: ClassVar[str]
uniqueName: String[Literal[False]] uniqueName: String[Literal[False]]
caption: String[Literal[True]] caption: String[Literal[True]]
displayFolder: String[Literal[False]] displayFolder: String[Literal[True]]
measureGroup: String[Literal[False]] measureGroup: String[Literal[True]]
parent: String[Literal[False]] parent: String[Literal[True]]
value: String[Literal[False]] value: String[Literal[False]]
goal: String[Literal[False]] goal: String[Literal[True]]
status: String[Literal[False]] status: String[Literal[True]]
trend: String[Literal[False]] trend: String[Literal[True]]
weight: String[Literal[False]] weight: String[Literal[True]]
time: String[Literal[False]] time: String[Literal[True]]
@overload
def __init__( def __init__(
self, self,
uniqueName: str, uniqueName: str | None = None,
caption: str | None = None, caption: str | None = None,
*, displayFolder: str | None = None,
displayFolder: str, measureGroup: str | None = None,
measureGroup: str, parent: str | None = None,
parent: str, value: str | None = None,
value: str, goal: str | None = None,
goal: str, status: str | None = None,
status: str, trend: str | None = None,
trend: str, weight: str | None = None,
weight: str, time: str | None = None,
time: str,
) -> None: ...
@overload
def __init__(
self,
uniqueName: str,
caption: str | None,
displayFolder: str,
measureGroup: str,
parent: str,
value: str,
goal: str,
status: str,
trend: str,
weight: str,
time: str,
) -> None: ... ) -> None: ...
class GroupMember(Serialisable): class GroupMember(Serialisable):
@ -212,12 +174,6 @@ class GroupMember(Serialisable):
group: Bool[Literal[False]] group: Bool[Literal[False]]
def __init__(self, uniqueName: str, group: _ConvertibleToBool = None) -> None: ... def __init__(self, uniqueName: str, group: _ConvertibleToBool = None) -> None: ...
class GroupMembers(Serialisable):
count: Integer[Literal[False]]
groupMember: Typed[GroupMember, Literal[False]]
__elements__: ClassVar[tuple[str, ...]]
def __init__(self, count: ConvertibleToInt, groupMember: GroupMember) -> None: ...
class LevelGroup(Serialisable): class LevelGroup(Serialisable):
tagname: ClassVar[str] tagname: ClassVar[str]
name: String[Literal[False]] name: String[Literal[False]]
@ -225,26 +181,25 @@ class LevelGroup(Serialisable):
caption: String[Literal[False]] caption: String[Literal[False]]
uniqueParent: String[Literal[False]] uniqueParent: String[Literal[False]]
id: Integer[Literal[False]] id: Integer[Literal[False]]
groupMembers: Typed[GroupMembers, Literal[False]] groupMembers: NestedSequence[list[GroupMember]]
__elements__: ClassVar[tuple[str, ...]] __elements__: ClassVar[tuple[str, ...]]
def __init__( def __init__(
self, name: str, uniqueName: str, caption: str, uniqueParent: str, id: ConvertibleToInt, groupMembers: GroupMembers self,
name: str,
uniqueName: str,
caption: str,
uniqueParent: str,
id: ConvertibleToInt,
groupMembers: list[GroupMember] | tuple[GroupMember, ...] = (),
) -> None: ... ) -> None: ...
class Groups(Serialisable):
tagname: ClassVar[str]
count: Integer[Literal[False]]
group: Typed[LevelGroup, Literal[False]]
__elements__: ClassVar[tuple[str, ...]]
def __init__(self, count: ConvertibleToInt, group: LevelGroup) -> None: ...
class GroupLevel(Serialisable): class GroupLevel(Serialisable):
tagname: ClassVar[str] tagname: ClassVar[str]
uniqueName: String[Literal[False]] uniqueName: String[Literal[False]]
caption: String[Literal[False]] caption: String[Literal[False]]
user: Bool[Literal[False]] user: Bool[Literal[False]]
customRollUp: Bool[Literal[False]] customRollUp: Bool[Literal[False]]
groups: Typed[Groups, Literal[True]] groups: NestedSequence[list[LevelGroup]]
extLst: Typed[ExtensionList, Literal[True]] extLst: Typed[ExtensionList, Literal[True]]
__elements__: ClassVar[tuple[str, ...]] __elements__: ClassVar[tuple[str, ...]]
def __init__( def __init__(
@ -253,27 +208,15 @@ class GroupLevel(Serialisable):
caption: str, caption: str,
user: _ConvertibleToBool = None, user: _ConvertibleToBool = None,
customRollUp: _ConvertibleToBool = None, customRollUp: _ConvertibleToBool = None,
groups: Groups | None = None, groups: list[LevelGroup] | tuple[LevelGroup, ...] = (),
extLst: ExtensionList | None = None, extLst: ExtensionList | None = None,
) -> None: ... ) -> None: ...
class GroupLevels(Serialisable):
count: Integer[Literal[False]]
groupLevel: Typed[GroupLevel, Literal[False]]
__elements__: ClassVar[tuple[str, ...]]
def __init__(self, count: ConvertibleToInt, groupLevel: GroupLevel) -> None: ...
class FieldUsage(Serialisable): class FieldUsage(Serialisable):
tagname: ClassVar[str] tagname: ClassVar[str]
x: Integer[Literal[False]] x: Integer[Literal[False]]
def __init__(self, x: ConvertibleToInt) -> None: ... def __init__(self, x: ConvertibleToInt) -> None: ...
class FieldsUsage(Serialisable):
count: Integer[Literal[False]]
fieldUsage: Typed[FieldUsage, Literal[True]]
__elements__: ClassVar[tuple[str, ...]]
def __init__(self, count: ConvertibleToInt, fieldUsage: FieldUsage | None = None) -> None: ...
class CacheHierarchy(Serialisable): class CacheHierarchy(Serialisable):
tagname: ClassVar[str] tagname: ClassVar[str]
uniqueName: String[Literal[False]] uniqueName: String[Literal[False]]
@ -298,8 +241,8 @@ class CacheHierarchy(Serialisable):
unbalanced: Bool[Literal[True]] unbalanced: Bool[Literal[True]]
unbalancedGroup: Bool[Literal[True]] unbalancedGroup: Bool[Literal[True]]
hidden: Bool[Literal[False]] hidden: Bool[Literal[False]]
fieldsUsage: Typed[FieldsUsage, Literal[True]] fieldsUsage: NestedSequence[list[FieldUsage]]
groupLevels: Typed[GroupLevels, Literal[True]] groupLevels: NestedSequence[list[GroupLevel]]
extLst: Typed[ExtensionList, Literal[True]] extLst: Typed[ExtensionList, Literal[True]]
__elements__: ClassVar[tuple[str, ...]] __elements__: ClassVar[tuple[str, ...]]
@overload @overload
@ -328,8 +271,8 @@ class CacheHierarchy(Serialisable):
unbalanced: _ConvertibleToBool | None = None, unbalanced: _ConvertibleToBool | None = None,
unbalancedGroup: _ConvertibleToBool | None = None, unbalancedGroup: _ConvertibleToBool | None = None,
hidden: _ConvertibleToBool = None, hidden: _ConvertibleToBool = None,
fieldsUsage: FieldsUsage | None = None, fieldsUsage: list[FieldUsage] | tuple[FieldUsage, ...] = (),
groupLevels: GroupLevels | None = None, groupLevels: list[FieldUsage] | tuple[FieldUsage, ...] = (),
extLst: ExtensionList | None = None, extLst: ExtensionList | None = None,
) -> None: ... ) -> None: ...
@overload @overload
@ -357,8 +300,8 @@ class CacheHierarchy(Serialisable):
unbalanced: _ConvertibleToBool | None = None, unbalanced: _ConvertibleToBool | None = None,
unbalancedGroup: _ConvertibleToBool | None = None, unbalancedGroup: _ConvertibleToBool | None = None,
hidden: _ConvertibleToBool = None, hidden: _ConvertibleToBool = None,
fieldsUsage: FieldsUsage | None = None, fieldsUsage: list[FieldUsage] | tuple[FieldUsage, ...] = (),
groupLevels: GroupLevels | None = None, groupLevels: list[FieldUsage] | tuple[FieldUsage, ...] = (),
extLst: ExtensionList | None = None, extLst: ExtensionList | None = None,
) -> None: ... ) -> None: ...
@ -376,20 +319,11 @@ class GroupItems(Serialisable):
@property @property
def count(self) -> int: ... def count(self) -> int: ...
class DiscretePr(Serialisable):
tagname: ClassVar[str]
count: Integer[Literal[False]]
x: NestedInteger[Literal[True]]
__elements__: ClassVar[tuple[str, ...]]
def __init__(
self, count: ConvertibleToInt, x: _HasTagAndGet[ConvertibleToInt | None] | ConvertibleToInt | None = None
) -> None: ...
class RangePr(Serialisable): class RangePr(Serialisable):
tagname: ClassVar[str] tagname: ClassVar[str]
autoStart: Bool[Literal[True]] autoStart: Bool[Literal[True]]
autoEnd: Bool[Literal[True]] autoEnd: Bool[Literal[True]]
groupBy: Set[_RangePrGroupBy] groupBy: NoneSet[_RangePrGroupBy]
startNum: Float[Literal[True]] startNum: Float[Literal[True]]
endNum: Float[Literal[True]] endNum: Float[Literal[True]]
startDate: DateTime[Literal[True]] startDate: DateTime[Literal[True]]
@ -412,7 +346,7 @@ class FieldGroup(Serialisable):
par: Integer[Literal[True]] par: Integer[Literal[True]]
base: Integer[Literal[True]] base: Integer[Literal[True]]
rangePr: Typed[RangePr, Literal[True]] rangePr: Typed[RangePr, Literal[True]]
discretePr: Typed[DiscretePr, Literal[True]] discretePr: NestedSequence[list[NestedInteger[Literal[False]]]]
groupItems: Typed[GroupItems, Literal[True]] groupItems: Typed[GroupItems, Literal[True]]
__elements__: ClassVar[tuple[str, ...]] __elements__: ClassVar[tuple[str, ...]]
def __init__( def __init__(
@ -420,7 +354,7 @@ class FieldGroup(Serialisable):
par: ConvertibleToInt | None = None, par: ConvertibleToInt | None = None,
base: ConvertibleToInt | None = None, base: ConvertibleToInt | None = None,
rangePr: RangePr | None = None, rangePr: RangePr | None = None,
discretePr: DiscretePr | None = None, discretePr: list[NestedInteger[Literal[False]]] | tuple[NestedInteger[Literal[False]], ...] = (),
groupItems: GroupItems | None = None, groupItems: GroupItems | None = None,
) -> None: ... ) -> None: ...
@ -569,21 +503,18 @@ class PageItem(Serialisable):
name: String[Literal[False]] name: String[Literal[False]]
def __init__(self, name: str) -> None: ... def __init__(self, name: str) -> None: ...
class Page(Serialisable):
tagname: ClassVar[str]
pageItem: Incomplete
__elements__: ClassVar[tuple[str, ...]]
def __init__(self, count: Incomplete | None = None, pageItem: Incomplete | None = None) -> None: ...
@property
def count(self) -> int: ...
class Consolidation(Serialisable): class Consolidation(Serialisable):
tagname: ClassVar[str] tagname: ClassVar[str]
autoPage: Bool[Literal[True]] autoPage: Bool[Literal[True]]
pages: Incomplete pages: NestedSequence[list[PageItem]]
rangeSets: Incomplete rangeSets: NestedSequence[list[RangeSet]]
__elements__: ClassVar[tuple[str, ...]] __elements__: ClassVar[tuple[str, ...]]
def __init__(self, autoPage: _ConvertibleToBool | None = None, pages=(), rangeSets=()) -> None: ... def __init__(
self,
autoPage: _ConvertibleToBool | None = None,
pages: list[PageItem] | tuple[PageItem, ...] = (),
rangeSets: list[RangeSet] | tuple[RangeSet, ...] = (),
) -> None: ...
class WorksheetSource(Serialisable): class WorksheetSource(Serialisable):
tagname: ClassVar[str] tagname: ClassVar[str]
@ -629,13 +560,13 @@ class CacheDefinition(Serialisable):
minRefreshableVersion: Integer[Literal[True]] minRefreshableVersion: Integer[Literal[True]]
recordCount: Integer[Literal[True]] recordCount: Integer[Literal[True]]
upgradeOnRefresh: Bool[Literal[True]] upgradeOnRefresh: Bool[Literal[True]]
tupleCache: Typed[TupleCache, Literal[True]]
supportSubquery: Bool[Literal[True]] supportSubquery: Bool[Literal[True]]
supportAdvancedDrill: Bool[Literal[True]] supportAdvancedDrill: Bool[Literal[True]]
cacheSource: Typed[CacheSource, Literal[True]] cacheSource: Typed[CacheSource, Literal[True]]
cacheFields: Incomplete cacheFields: Incomplete
cacheHierarchies: Incomplete cacheHierarchies: Incomplete
kpis: Incomplete kpis: NestedSequence[list[OLAPKPI]]
tupleCache: Typed[TupleCache, Literal[True]]
calculatedItems: Incomplete calculatedItems: Incomplete
calculatedMembers: Incomplete calculatedMembers: Incomplete
dimensions: Incomplete dimensions: Incomplete
@ -669,7 +600,7 @@ class CacheDefinition(Serialisable):
cacheSource: CacheSource, cacheSource: CacheSource,
cacheFields=(), cacheFields=(),
cacheHierarchies=(), cacheHierarchies=(),
kpis=(), kpis: list[OLAPKPI] | tuple[OLAPKPI, ...] = (),
calculatedItems=(), calculatedItems=(),
calculatedMembers=(), calculatedMembers=(),
dimensions=(), dimensions=(),

View File

@ -11,8 +11,8 @@ class Index(Serialisable):
def __init__(self, v: ConvertibleToInt | None = 0) -> None: ... def __init__(self, v: ConvertibleToInt | None = 0) -> None: ...
class Tuple(Serialisable): class Tuple(Serialisable):
fld: Integer[Literal[False]] fld: Integer[Literal[True]]
hier: Integer[Literal[False]] hier: Integer[Literal[True]]
item: Integer[Literal[False]] item: Integer[Literal[False]]
def __init__(self, fld: ConvertibleToInt, hier: ConvertibleToInt, item: ConvertibleToInt) -> None: ... def __init__(self, fld: ConvertibleToInt, hier: ConvertibleToInt, item: ConvertibleToInt) -> None: ...

View File

@ -16,7 +16,6 @@ vertical_aligments: Final[tuple[_VerticalAlignmentsType, ...]]
class Alignment(Serialisable): class Alignment(Serialisable):
tagname: ClassVar[str] tagname: ClassVar[str]
__fields__: ClassVar[tuple[str, ...]]
horizontal: NoneSet[_HorizontalAlignmentsType] horizontal: NoneSet[_HorizontalAlignmentsType]
vertical: NoneSet[_VerticalAlignmentsType] vertical: NoneSet[_VerticalAlignmentsType]
textRotation: NoneSet[int] textRotation: NoneSet[int]

View File

@ -39,7 +39,6 @@ BORDER_THICK: Final = "thick"
BORDER_THIN: Final = "thin" BORDER_THIN: Final = "thin"
class Side(Serialisable): class Side(Serialisable):
__fields__: ClassVar[tuple[str, ...]]
color: ColorDescriptor[Literal[True]] color: ColorDescriptor[Literal[True]]
style: NoneSet[_SideStyle] style: NoneSet[_SideStyle]
border_style: Alias border_style: Alias
@ -52,7 +51,6 @@ class Side(Serialisable):
class Border(Serialisable): class Border(Serialisable):
tagname: ClassVar[str] tagname: ClassVar[str]
__fields__: ClassVar[tuple[str, ...]]
__elements__: ClassVar[tuple[str, ...]] __elements__: ClassVar[tuple[str, ...]]
start: Typed[Side, Literal[True]] start: Typed[Side, Literal[True]]
end: Typed[Side, Literal[True]] end: Typed[Side, Literal[True]]

View File

@ -88,9 +88,9 @@ class Stop(Serialisable):
color: Incomplete color: Incomplete
def __init__(self, color, position: ConvertibleToFloat) -> None: ... def __init__(self, color, position: ConvertibleToFloat) -> None: ...
class StopList(Sequence): class StopList(Sequence[list[Stop]]):
expected_type: type[Incomplete] expected_type: type[Stop]
def __set__(self, obj: Serialisable | Strict, values) -> None: ... def __set__(self, obj: Serialisable | Strict, values: list[Stop] | tuple[Stop, ...]) -> None: ...
class GradientFill(Fill): class GradientFill(Fill):
tagname: ClassVar[str] tagname: ClassVar[str]

View File

@ -4,6 +4,7 @@ from typing import ClassVar, Literal
from openpyxl.descriptors.base import Bool, Integer, String, Typed, _ConvertibleToBool from openpyxl.descriptors.base import Bool, Integer, String, Typed, _ConvertibleToBool
from openpyxl.descriptors.excel import ExtensionList from openpyxl.descriptors.excel import ExtensionList
from openpyxl.descriptors.sequence import Sequence
from openpyxl.descriptors.serialisable import Serialisable from openpyxl.descriptors.serialisable import Serialisable
from openpyxl.styles.alignment import Alignment from openpyxl.styles.alignment import Alignment
from openpyxl.styles.borders import Border from openpyxl.styles.borders import Border
@ -22,8 +23,6 @@ class NamedStyle(Serialisable):
protection: Typed[Protection, Literal[False]] protection: Typed[Protection, Literal[False]]
builtinId: Integer[Literal[True]] builtinId: Integer[Literal[True]]
hidden: Bool[Literal[True]] hidden: Bool[Literal[True]]
# Overwritten by property below
# xfId: Integer
name: String[Literal[False]] name: String[Literal[False]]
def __init__( def __init__(
self, self,
@ -36,12 +35,9 @@ class NamedStyle(Serialisable):
protection: Protection | None = None, protection: Protection | None = None,
builtinId: ConvertibleToInt | None = None, builtinId: ConvertibleToInt | None = None,
hidden: _ConvertibleToBool | None = False, hidden: _ConvertibleToBool | None = False,
xfId: Unused = None,
) -> None: ... ) -> None: ...
def __setattr__(self, attr: str, value) -> None: ... def __setattr__(self, attr: str, value) -> None: ...
def __iter__(self) -> Iterator[tuple[str, str]]: ... def __iter__(self) -> Iterator[tuple[str, str]]: ...
@property
def xfId(self) -> int | None: ...
def bind(self, wb: Workbook) -> None: ... def bind(self, wb: Workbook) -> None: ...
def as_tuple(self) -> StyleArray: ... def as_tuple(self) -> StyleArray: ...
def as_xf(self) -> CellStyle: ... def as_xf(self) -> CellStyle: ...
@ -77,11 +73,10 @@ class _NamedCellStyle(Serialisable):
class _NamedCellStyleList(Serialisable): class _NamedCellStyleList(Serialisable):
tagname: ClassVar[str] tagname: ClassVar[str]
# Overwritten by property below # Overwritten by property below
# count: Integer # count: Integer[Literal[True]]
cellStyle: Incomplete cellStyle: Sequence[list[_NamedCellStyle]]
__attrs__: ClassVar[tuple[str, ...]] __attrs__: ClassVar[tuple[str, ...]]
def __init__(self, count: Unused = None, cellStyle=()) -> None: ... def __init__(self, count: Unused = None, cellStyle: list[_NamedCellStyle] | tuple[_NamedCellStyle, ...] = ()) -> None: ...
@property @property
def count(self) -> int: ... def count(self) -> int: ...
@property def remove_duplicates(self) -> list[_NamedCellStyle]: ...
def names(self) -> NamedStyleList: ...

View File

@ -17,8 +17,8 @@ SHEETRANGE_RE: Final[Pattern[str]]
def get_column_interval(start: str | int, end: str | int) -> list[str]: ... def get_column_interval(start: str | int, end: str | int) -> list[str]: ...
def coordinate_from_string(coord_string: str) -> tuple[str, int]: ... def coordinate_from_string(coord_string: str) -> tuple[str, int]: ...
def absolute_coordinate(coord_string: str) -> str: ... def absolute_coordinate(coord_string: str) -> str: ...
def get_column_letter(idx: int) -> str: ... def get_column_letter(col_idx: int) -> str: ...
def column_index_from_string(str_col: str) -> int: ... def column_index_from_string(col: str) -> int: ...
def range_boundaries(range_string: str) -> _RangeBoundariesTuple: ... def range_boundaries(range_string: str) -> _RangeBoundariesTuple: ...
def rows_from_range(range_string: str) -> Generator[tuple[str, ...], None, None]: ... def rows_from_range(range_string: str) -> Generator[tuple[str, ...], None, None]: ...
def cols_from_range(range_string: str) -> Generator[tuple[str, ...], None, None]: ... def cols_from_range(range_string: str) -> Generator[tuple[str, ...], None, None]: ...

View File

@ -7,6 +7,6 @@ class IndexedList(list[_T]):
clean: bool clean: bool
def __init__(self, iterable: Iterable[_T] | None = None) -> None: ... def __init__(self, iterable: Iterable[_T] | None = None) -> None: ...
def __contains__(self, value: object) -> bool: ... def __contains__(self, value: object) -> bool: ...
def index(self, value: _T): ... # type: ignore[override] def index(self, value: _T) -> int: ... # type: ignore[override]
def append(self, value: _T) -> None: ... def append(self, value: _T) -> None: ...
def add(self, value: _T): ... def add(self, value: _T) -> int: ...

View File

@ -65,7 +65,7 @@ class DefinedNameDict(dict[str, DefinedName]):
class DefinedNameList(Serialisable): class DefinedNameList(Serialisable):
tagname: ClassVar[str] tagname: ClassVar[str]
definedName: Sequence definedName: Sequence[list[DefinedName]]
def __init__(self, definedName=()) -> None: ... def __init__(self, definedName: list[DefinedName] | tuple[DefinedName, ...] = ()) -> None: ...
def by_sheet(self) -> defaultdict[int, DefinedNameDict]: ... def by_sheet(self) -> defaultdict[int, DefinedNameDict]: ...
def __len__(self) -> int: ... def __len__(self) -> int: ...

View File

@ -1,5 +1,6 @@
from _typeshed import SupportsGetItem from _typeshed import SupportsGetItem
from collections.abc import Generator from collections.abc import Generator, Iterator
from typing import Any, overload
from openpyxl import _VisibilityType from openpyxl import _VisibilityType
from openpyxl.cell import _CellValue from openpyxl.cell import _CellValue
@ -21,8 +22,15 @@ class ReadOnlyWorksheet:
# https://github.com/python/mypy/issues/6700 # https://github.com/python/mypy/issues/6700
@property @property
def rows(self) -> Generator[tuple[Cell, ...], None, None]: ... def rows(self) -> Generator[tuple[Cell, ...], None, None]: ...
__getitem__ = Worksheet.__getitem__ # From Worksheet.__getitem__
__iter__ = Worksheet.__iter__ @overload
def __getitem__(self, key: int) -> tuple[Cell, ...]: ...
@overload
def __getitem__(self, key: slice) -> tuple[Any, ...]: ... # tuple[AnyOf[Cell, tuple[Cell, ...]]]
@overload
def __getitem__(self, key: str) -> Any: ... # AnyOf[Cell, tuple[Cell, ...], tuple[tuple[Cell, ...], ...]]
# From Worksheet.__iter__
def __iter__(self) -> Iterator[tuple[Cell, ...]]: ...
parent: Workbook parent: Workbook
title: str title: str
sheet_state: _VisibilityType sheet_state: _VisibilityType

View File

@ -72,7 +72,7 @@ class DataValidation(Serialisable):
showErrorMessage: _ConvertibleToBool | None = False, showErrorMessage: _ConvertibleToBool | None = False,
showInputMessage: _ConvertibleToBool | None = False, showInputMessage: _ConvertibleToBool | None = False,
showDropDown: _ConvertibleToBool | None = False, showDropDown: _ConvertibleToBool | None = False,
allowBlank: _ConvertibleToBool | None = False, allowBlank: _ConvertibleToBool = False,
sqref: _ConvertibleToMultiCellRange = (), sqref: _ConvertibleToMultiCellRange = (),
promptTitle: str | None = None, promptTitle: str | None = None,
errorStyle: _DataValidationErrorStyle | Literal["none"] | None = None, errorStyle: _DataValidationErrorStyle | Literal["none"] | None = None,
@ -81,7 +81,7 @@ class DataValidation(Serialisable):
errorTitle: str | None = None, errorTitle: str | None = None,
imeMode: _DataValidationImeMode | Literal["none"] | None = None, imeMode: _DataValidationImeMode | Literal["none"] | None = None,
operator: _DataValidationOperator | Literal["none"] | None = None, operator: _DataValidationOperator | Literal["none"] | None = None,
allow_blank: Incomplete | None = False, allow_blank: _ConvertibleToBool | None = None,
) -> None: ... ) -> None: ...
def add(self, cell) -> None: ... def add(self, cell) -> None: ...
def __contains__(self, cell: _HasCoordinate | str | CellRange) -> bool: ... def __contains__(self, cell: _HasCoordinate | str | CellRange) -> bool: ...

View File

@ -1,6 +1,6 @@
from _typeshed import ConvertibleToFloat, ConvertibleToInt, Incomplete, Unused from _typeshed import ConvertibleToFloat, ConvertibleToInt, Incomplete, Unused
from collections.abc import Callable, Iterator from collections.abc import Callable, Iterator
from typing import ClassVar, Generic, Literal, TypeVar from typing import ClassVar, Literal, TypeVar
from typing_extensions import Self from typing_extensions import Self
from openpyxl.descriptors import Strict from openpyxl.descriptors import Strict
@ -12,6 +12,7 @@ from openpyxl.utils.cell import _RangeBoundariesTuple
from openpyxl.worksheet.worksheet import Worksheet from openpyxl.worksheet.worksheet import Worksheet
from openpyxl.xml.functions import Element from openpyxl.xml.functions import Element
_DimKeyT = TypeVar("_DimKeyT", bound=str | int)
_DimT = TypeVar("_DimT", bound=Dimension) _DimT = TypeVar("_DimT", bound=Dimension)
class Dimension(Strict, StyleableObject): class Dimension(Strict, StyleableObject):
@ -101,9 +102,11 @@ class ColumnDimension(Dimension):
@property @property
def customWidth(self) -> bool: ... def customWidth(self) -> bool: ...
def reindex(self) -> None: ... def reindex(self) -> None: ...
@property
def range(self) -> str: ...
def to_tree(self) -> Element | None: ... def to_tree(self) -> Element | None: ...
class DimensionHolder(BoundDictionary[str, _DimT], Generic[_DimT]): class DimensionHolder(BoundDictionary[_DimKeyT, _DimT]):
worksheet: Worksheet worksheet: Worksheet
max_outline: int | None max_outline: int | None
default_factory: Callable[[], _DimT] | None default_factory: Callable[[], _DimT] | None
@ -111,7 +114,7 @@ class DimensionHolder(BoundDictionary[str, _DimT], Generic[_DimT]):
def __init__( def __init__(
self, worksheet: Worksheet, reference: str = "index", default_factory: Callable[[], _DimT] | None = None self, worksheet: Worksheet, reference: str = "index", default_factory: Callable[[], _DimT] | None = None
) -> None: ... ) -> None: ...
def group(self, start: str, end: str | None = None, outline_level: int = 1, hidden: bool = False) -> None: ... def group(self, start: _DimKeyT, end: _DimKeyT | None = None, outline_level: int = 1, hidden: bool = False) -> None: ...
def to_tree(self) -> Element | None: ... def to_tree(self) -> Element | None: ...
class SheetFormatProperties(Serialisable): class SheetFormatProperties(Serialisable):

View File

@ -1,14 +1,11 @@
from _typeshed import ConvertibleToFloat, ConvertibleToInt, Incomplete, Unused from _typeshed import ConvertibleToFloat, ConvertibleToInt, Incomplete, Unused
from datetime import datetime from datetime import datetime
from re import Pattern from typing import ClassVar, Final, Literal, overload
from typing import ClassVar, Literal, overload
from typing_extensions import TypeAlias from typing_extensions import TypeAlias
from openpyxl.descriptors import Strict
from openpyxl.descriptors.base import ( from openpyxl.descriptors.base import (
Alias, Alias,
Bool, Bool,
Convertible,
DateTime, DateTime,
Float, Float,
Integer, Integer,
@ -22,8 +19,6 @@ from openpyxl.descriptors.base import (
from openpyxl.descriptors.excel import ExtensionList from openpyxl.descriptors.excel import ExtensionList
from openpyxl.descriptors.serialisable import Serialisable from openpyxl.descriptors.serialisable import Serialisable
from ..descriptors.base import _N
_SortConditionSortBy: TypeAlias = Literal["value", "cellColor", "fontColor", "icon"] _SortConditionSortBy: TypeAlias = Literal["value", "cellColor", "fontColor", "icon"]
_IconSet: TypeAlias = Literal[ _IconSet: TypeAlias = Literal[
"3Arrows", "3Arrows",
@ -48,6 +43,7 @@ _SortStateSortMethod: TypeAlias = Literal["stroke", "pinYin"]
_CustomFilterOperator: TypeAlias = Literal[ _CustomFilterOperator: TypeAlias = Literal[
"equal", "lessThan", "lessThanOrEqual", "notEqual", "greaterThanOrEqual", "greaterThan" "equal", "lessThan", "lessThanOrEqual", "notEqual", "greaterThanOrEqual", "greaterThan"
] ]
_StringFilterOperator: TypeAlias = Literal["contains", "startswith", "endswith", "wildcard"]
_FiltersCalendarType: TypeAlias = Literal[ _FiltersCalendarType: TypeAlias = Literal[
"gregorian", "gregorian",
"gregorianUs", "gregorianUs",
@ -170,24 +166,32 @@ class DynamicFilter(Serialisable):
maxValIso: datetime | str | None = None, maxValIso: datetime | str | None = None,
) -> None: ... ) -> None: ...
class CustomFilterValueDescriptor(Convertible[float | str, _N]):
pattern: Pattern[str]
expected_type: type[float | str]
@overload # type: ignore[override] # Different restrictions
def __set__(
self: CustomFilterValueDescriptor[Literal[True]], instance: Serialisable | Strict, value: str | ConvertibleToFloat | None
) -> None: ...
@overload
def __set__(
self: CustomFilterValueDescriptor[Literal[False]], instance: Serialisable | Strict, value: str | ConvertibleToFloat
) -> None: ...
class CustomFilter(Serialisable): class CustomFilter(Serialisable):
tagname: ClassVar[str] tagname: ClassVar[str]
operator: NoneSet[_CustomFilterOperator] val: String[Literal[False]]
val: Incomplete operator: Set[_CustomFilterOperator]
def __init__(self, operator: _CustomFilterOperator = "equal", val: str | None = None) -> None: ...
def convert(self) -> BlankFilter | NumberFilter | StringFilter: ...
class BlankFilter(CustomFilter):
def __init__(self, **kw: Unused) -> None: ...
@property
def operator(self) -> Literal["notEqual"]: ... # type: ignore[override]
@property
def val(self) -> Literal[" "]: ... # type: ignore[override]
class NumberFilter(CustomFilter):
val: Float[Literal[False]] # type: ignore[assignment]
def __init__(self, operator: _CustomFilterOperator = "equal", val: ConvertibleToFloat | None = None) -> None: ...
string_format_mapping: Final[dict[_StringFilterOperator, str]]
class StringFilter(CustomFilter):
operator: Set[_StringFilterOperator] # type: ignore[assignment]
val: String[Literal[False]]
exclude: Bool[Literal[False]]
def __init__( def __init__(
self, operator: _CustomFilterOperator | Literal["none"] | None = None, val: Incomplete | None = None self, operator: _StringFilterOperator = "contains", val: str | None = None, exclude: _ConvertibleToBool = False
) -> None: ... ) -> None: ...
class CustomFilters(Serialisable): class CustomFilters(Serialisable):
@ -195,7 +199,7 @@ class CustomFilters(Serialisable):
_and: Bool[Literal[True]] # Not private. Avoids name clash _and: Bool[Literal[True]] # Not private. Avoids name clash
customFilter: Incomplete customFilter: Incomplete
__elements__: ClassVar[tuple[str, ...]] __elements__: ClassVar[tuple[str, ...]]
def __init__(self, _and: _ConvertibleToBool | None = False, customFilter=()) -> None: ... def __init__(self, _and: _ConvertibleToBool | None = None, customFilter=()) -> None: ...
class Top10(Serialisable): class Top10(Serialisable):
tagname: ClassVar[str] tagname: ClassVar[str]

View File

@ -2,6 +2,7 @@ from _typeshed import Incomplete
from typing import ClassVar, Literal from typing import ClassVar, Literal
from openpyxl.descriptors.base import String from openpyxl.descriptors.base import String
from openpyxl.descriptors.sequence import Sequence
from openpyxl.descriptors.serialisable import Serialisable from openpyxl.descriptors.serialisable import Serialisable
class Hyperlink(Serialisable): class Hyperlink(Serialisable):
@ -25,8 +26,5 @@ class Hyperlink(Serialisable):
class HyperlinkList(Serialisable): class HyperlinkList(Serialisable):
tagname: ClassVar[str] tagname: ClassVar[str]
hyperlink: Incomplete hyperlink: Sequence[list[Hyperlink]]
def __init__(self, hyperlink=()) -> None: ... def __init__(self, hyperlink: list[Hyperlink] | tuple[Hyperlink, ...] = ()) -> None: ...
def __bool__(self) -> bool: ...
def __len__(self) -> int: ...
def append(self, value) -> None: ...

View File

@ -4,6 +4,7 @@ from typing_extensions import TypeAlias
from openpyxl.descriptors.base import Bool, Float, Integer, NoneSet, Set, String, Typed, _ConvertibleToBool from openpyxl.descriptors.base import Bool, Float, Integer, NoneSet, Set, String, Typed, _ConvertibleToBool
from openpyxl.descriptors.excel import ExtensionList from openpyxl.descriptors.excel import ExtensionList
from openpyxl.descriptors.sequence import Sequence
from openpyxl.descriptors.serialisable import Serialisable from openpyxl.descriptors.serialisable import Serialisable
_Pane: TypeAlias = Literal["bottomRight", "topRight", "bottomLeft", "topLeft"] _Pane: TypeAlias = Literal["bottomRight", "topRight", "bottomLeft", "topLeft"]
@ -90,7 +91,9 @@ class SheetView(Serialisable):
class SheetViewList(Serialisable): class SheetViewList(Serialisable):
tagname: ClassVar[str] tagname: ClassVar[str]
sheetView: Incomplete sheetView: Sequence[list[SheetView]]
extLst: Typed[ExtensionList, Literal[True]] extLst: Typed[ExtensionList, Literal[True]]
__elements__: ClassVar[tuple[str, ...]] __elements__: ClassVar[tuple[str, ...]]
def __init__(self, sheetView: Incomplete | None = None, extLst: Unused = None) -> None: ... def __init__(self, sheetView: SheetView | None = None, extLst: Unused = None) -> None: ...
@property
def active(self) -> SheetView: ...

View File

@ -51,8 +51,8 @@ class Worksheet(_WorkbookChild):
ORIENTATION_PORTRAIT: Final = "portrait" ORIENTATION_PORTRAIT: Final = "portrait"
ORIENTATION_LANDSCAPE: Final = "landscape" ORIENTATION_LANDSCAPE: Final = "landscape"
row_dimensions: DimensionHolder[RowDimension] row_dimensions: DimensionHolder[int, RowDimension]
column_dimensions: DimensionHolder[ColumnDimension] column_dimensions: DimensionHolder[str, ColumnDimension]
row_breaks: RowBreak row_breaks: RowBreak
col_breaks: ColBreak col_breaks: ColBreak
merged_cells: MultiCellRange merged_cells: MultiCellRange
@ -190,6 +190,8 @@ class Worksheet(_WorkbookChild):
) -> Generator[tuple[Cell, ...], None, None] | Generator[tuple[str | float | datetime | None, ...], None, None]: ... ) -> Generator[tuple[Cell, ...], None, None] | Generator[tuple[str | float | datetime | None, ...], None, None]: ...
@property @property
def columns(self) -> Generator[tuple[Cell, ...], None, None]: ... def columns(self) -> Generator[tuple[Cell, ...], None, None]: ...
@property
def column_groups(self) -> list[str]: ...
def set_printer_settings( def set_printer_settings(
self, paper_size: int | None, orientation: Literal["default", "portrait", "landscape"] | None self, paper_size: int | None, orientation: Literal["default", "portrait", "landscape"] | None
) -> None: ... ) -> None: ...

View File

@ -1,4 +1,4 @@
version = "3.17.3" version = "3.17.5"
upstream_repository = "https://github.com/coleifer/peewee" upstream_repository = "https://github.com/coleifer/peewee"
# We're not providing stubs for all playhouse modules right now # We're not providing stubs for all playhouse modules right now
# https://github.com/python/typeshed/pull/11731#issuecomment-2065729058 # https://github.com/python/typeshed/pull/11731#issuecomment-2065729058

View File

@ -459,7 +459,12 @@ class ForUpdate(Node):
def __init__(self, expr, of: Incomplete | None = ..., nowait: Incomplete | None = ...) -> None: ... def __init__(self, expr, of: Incomplete | None = ..., nowait: Incomplete | None = ...) -> None: ...
def __sql__(self, ctx): ... def __sql__(self, ctx): ...
def Case(predicate, expression_tuples, default: Incomplete | None = ...): ... class Case(ColumnBase):
predicate: Incomplete
expression_tuples: Incomplete
default: Incomplete | None
def __init__(self, predicate, expression_tuples, default: Incomplete | None = None) -> None: ...
def __sql__(self, ctx): ...
class NodeList(ColumnBase): class NodeList(ColumnBase):
nodes: Incomplete nodes: Incomplete

View File

@ -1,8 +1,8 @@
# Whenever you update version here, PROTOBUF_VERSION should be updated # Whenever you update version here, PROTOBUF_VERSION should be updated
# in scripts/generate_proto_stubs.sh and vice-versa. # in scripts/generate_proto_stubs.sh and vice-versa.
version = "5.26.*" version = "5.27.*"
upstream_repository = "https://github.com/protocolbuffers/protobuf" upstream_repository = "https://github.com/protocolbuffers/protobuf"
extra_description = "Generated using [mypy-protobuf==3.6.0](https://github.com/nipunn1313/mypy-protobuf/tree/v3.6.0) and libprotoc 25.1 on [protobuf v26.1](https://github.com/protocolbuffers/protobuf/releases/tag/v26.1) (python protobuf==5.26.1)" extra_description = "Partially generated using [mypy-protobuf==3.6.0](https://github.com/nipunn1313/mypy-protobuf/tree/v3.6.0) and libprotoc 26.1 on [protobuf v27.1](https://github.com/protocolbuffers/protobuf/releases/tag/v27.1) (python protobuf==5.27.1)."
partial_stub = true partial_stub = true
[tool.stubtest] [tool.stubtest]

View File

@ -35,6 +35,10 @@ class _EditionEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTy
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
EDITION_UNKNOWN: _Edition.ValueType # 0 EDITION_UNKNOWN: _Edition.ValueType # 0
"""A placeholder for an unknown edition value.""" """A placeholder for an unknown edition value."""
EDITION_LEGACY: _Edition.ValueType # 900
"""A placeholder edition for specifying default behaviors *before* a feature
was first introduced. This is effectively an "infinite past".
"""
EDITION_PROTO2: _Edition.ValueType # 998 EDITION_PROTO2: _Edition.ValueType # 998
"""Legacy syntax "editions". These pre-date editions, but behave much like """Legacy syntax "editions". These pre-date editions, but behave much like
distinct editions. These can't be used to specify the edition of proto distinct editions. These can't be used to specify the edition of proto
@ -67,6 +71,10 @@ class Edition(_Edition, metaclass=_EditionEnumTypeWrapper):
EDITION_UNKNOWN: Edition.ValueType # 0 EDITION_UNKNOWN: Edition.ValueType # 0
"""A placeholder for an unknown edition value.""" """A placeholder for an unknown edition value."""
EDITION_LEGACY: Edition.ValueType # 900
"""A placeholder edition for specifying default behaviors *before* a feature
was first introduced. This is effectively an "infinite past".
"""
EDITION_PROTO2: Edition.ValueType # 998 EDITION_PROTO2: Edition.ValueType # 998
"""Legacy syntax "editions". These pre-date editions, but behave much like """Legacy syntax "editions". These pre-date editions, but behave much like
distinct editions. These can't be used to specify the edition of proto distinct editions. These can't be used to specify the edition of proto
@ -898,12 +906,16 @@ class FileOptions(google.protobuf.message.Message):
java_generate_equals_and_hash: builtins.bool java_generate_equals_and_hash: builtins.bool
"""This option does nothing.""" """This option does nothing."""
java_string_check_utf8: builtins.bool java_string_check_utf8: builtins.bool
"""If set true, then the Java2 code generator will generate code that """A proto2 file can set this to true to opt in to UTF-8 checking for Java,
throws an exception whenever an attempt is made to assign a non-UTF-8 which will throw an exception if invalid UTF-8 is parsed from the wire or
byte sequence to a string field. assigned to a string field.
Message reflection will do the same.
However, an extension field still accepts non-UTF-8 byte sequences. TODO: clarify exactly what kinds of field types this option
This option has no effect on when used with the lite runtime. applies to, and update these docs accordingly.
Proto3 files already perform these checks. Setting the option explicitly to
false has no effect: it cannot be used to opt proto3 files out of UTF-8
checks.
""" """
optimize_for: global___FileOptions.OptimizeMode.ValueType optimize_for: global___FileOptions.OptimizeMode.ValueType
go_package: builtins.str go_package: builtins.str
@ -1238,6 +1250,45 @@ class FieldOptions(google.protobuf.message.Message):
def HasField(self, field_name: typing.Literal["edition", b"edition", "value", b"value"]) -> builtins.bool: ... def HasField(self, field_name: typing.Literal["edition", b"edition", "value", b"value"]) -> builtins.bool: ...
def ClearField(self, field_name: typing.Literal["edition", b"edition", "value", b"value"]) -> None: ... def ClearField(self, field_name: typing.Literal["edition", b"edition", "value", b"value"]) -> None: ...
@typing.final
class FeatureSupport(google.protobuf.message.Message):
"""Information about the support window of a feature."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
EDITION_INTRODUCED_FIELD_NUMBER: builtins.int
EDITION_DEPRECATED_FIELD_NUMBER: builtins.int
DEPRECATION_WARNING_FIELD_NUMBER: builtins.int
EDITION_REMOVED_FIELD_NUMBER: builtins.int
edition_introduced: global___Edition.ValueType
"""The edition that this feature was first available in. In editions
earlier than this one, the default assigned to EDITION_LEGACY will be
used, and proto files will not be able to override it.
"""
edition_deprecated: global___Edition.ValueType
"""The edition this feature becomes deprecated in. Using this after this
edition may trigger warnings.
"""
deprecation_warning: builtins.str
"""The deprecation warning text if this feature is used after the edition it
was marked deprecated in.
"""
edition_removed: global___Edition.ValueType
"""The edition this feature is no longer available in. In editions after
this one, the last default assigned will be used, and proto files will
not be able to override it.
"""
def __init__(
self,
*,
edition_introduced: global___Edition.ValueType | None = ...,
edition_deprecated: global___Edition.ValueType | None = ...,
deprecation_warning: builtins.str | None = ...,
edition_removed: global___Edition.ValueType | None = ...,
) -> None: ...
def HasField(self, field_name: typing.Literal["deprecation_warning", b"deprecation_warning", "edition_deprecated", b"edition_deprecated", "edition_introduced", b"edition_introduced", "edition_removed", b"edition_removed"]) -> builtins.bool: ...
def ClearField(self, field_name: typing.Literal["deprecation_warning", b"deprecation_warning", "edition_deprecated", b"edition_deprecated", "edition_introduced", b"edition_introduced", "edition_removed", b"edition_removed"]) -> None: ...
CTYPE_FIELD_NUMBER: builtins.int CTYPE_FIELD_NUMBER: builtins.int
PACKED_FIELD_NUMBER: builtins.int PACKED_FIELD_NUMBER: builtins.int
JSTYPE_FIELD_NUMBER: builtins.int JSTYPE_FIELD_NUMBER: builtins.int
@ -1250,6 +1301,7 @@ class FieldOptions(google.protobuf.message.Message):
TARGETS_FIELD_NUMBER: builtins.int TARGETS_FIELD_NUMBER: builtins.int
EDITION_DEFAULTS_FIELD_NUMBER: builtins.int EDITION_DEFAULTS_FIELD_NUMBER: builtins.int
FEATURES_FIELD_NUMBER: builtins.int FEATURES_FIELD_NUMBER: builtins.int
FEATURE_SUPPORT_FIELD_NUMBER: builtins.int
UNINTERPRETED_OPTION_FIELD_NUMBER: builtins.int UNINTERPRETED_OPTION_FIELD_NUMBER: builtins.int
ctype: global___FieldOptions.CType.ValueType ctype: global___FieldOptions.CType.ValueType
"""The ctype option instructs the C++ code generator to use a different """The ctype option instructs the C++ code generator to use a different
@ -1331,6 +1383,8 @@ class FieldOptions(google.protobuf.message.Message):
def features(self) -> global___FeatureSet: def features(self) -> global___FeatureSet:
"""Any features defined in the specific edition.""" """Any features defined in the specific edition."""
@property
def feature_support(self) -> global___FieldOptions.FeatureSupport: ...
@property @property
def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]:
"""The parser stores options it doesn't recognize here. See above.""" """The parser stores options it doesn't recognize here. See above."""
@ -1350,10 +1404,11 @@ class FieldOptions(google.protobuf.message.Message):
targets: collections.abc.Iterable[global___FieldOptions.OptionTargetType.ValueType] | None = ..., targets: collections.abc.Iterable[global___FieldOptions.OptionTargetType.ValueType] | None = ...,
edition_defaults: collections.abc.Iterable[global___FieldOptions.EditionDefault] | None = ..., edition_defaults: collections.abc.Iterable[global___FieldOptions.EditionDefault] | None = ...,
features: global___FeatureSet | None = ..., features: global___FeatureSet | None = ...,
feature_support: global___FieldOptions.FeatureSupport | None = ...,
uninterpreted_option: collections.abc.Iterable[global___UninterpretedOption] | None = ..., uninterpreted_option: collections.abc.Iterable[global___UninterpretedOption] | None = ...,
) -> None: ... ) -> None: ...
def HasField(self, field_name: typing.Literal["ctype", b"ctype", "debug_redact", b"debug_redact", "deprecated", b"deprecated", "features", b"features", "jstype", b"jstype", "lazy", b"lazy", "packed", b"packed", "retention", b"retention", "unverified_lazy", b"unverified_lazy", "weak", b"weak"]) -> builtins.bool: ... def HasField(self, field_name: typing.Literal["ctype", b"ctype", "debug_redact", b"debug_redact", "deprecated", b"deprecated", "feature_support", b"feature_support", "features", b"features", "jstype", b"jstype", "lazy", b"lazy", "packed", b"packed", "retention", b"retention", "unverified_lazy", b"unverified_lazy", "weak", b"weak"]) -> builtins.bool: ...
def ClearField(self, field_name: typing.Literal["ctype", b"ctype", "debug_redact", b"debug_redact", "deprecated", b"deprecated", "edition_defaults", b"edition_defaults", "features", b"features", "jstype", b"jstype", "lazy", b"lazy", "packed", b"packed", "retention", b"retention", "targets", b"targets", "uninterpreted_option", b"uninterpreted_option", "unverified_lazy", b"unverified_lazy", "weak", b"weak"]) -> None: ... def ClearField(self, field_name: typing.Literal["ctype", b"ctype", "debug_redact", b"debug_redact", "deprecated", b"deprecated", "edition_defaults", b"edition_defaults", "feature_support", b"feature_support", "features", b"features", "jstype", b"jstype", "lazy", b"lazy", "packed", b"packed", "retention", b"retention", "targets", b"targets", "uninterpreted_option", b"uninterpreted_option", "unverified_lazy", b"unverified_lazy", "weak", b"weak"]) -> None: ...
global___FieldOptions = FieldOptions global___FieldOptions = FieldOptions
@ -1438,6 +1493,7 @@ class EnumValueOptions(google.protobuf.message.Message):
DEPRECATED_FIELD_NUMBER: builtins.int DEPRECATED_FIELD_NUMBER: builtins.int
FEATURES_FIELD_NUMBER: builtins.int FEATURES_FIELD_NUMBER: builtins.int
DEBUG_REDACT_FIELD_NUMBER: builtins.int DEBUG_REDACT_FIELD_NUMBER: builtins.int
FEATURE_SUPPORT_FIELD_NUMBER: builtins.int
UNINTERPRETED_OPTION_FIELD_NUMBER: builtins.int UNINTERPRETED_OPTION_FIELD_NUMBER: builtins.int
deprecated: builtins.bool deprecated: builtins.bool
"""Is this enum value deprecated? """Is this enum value deprecated?
@ -1454,6 +1510,10 @@ class EnumValueOptions(google.protobuf.message.Message):
def features(self) -> global___FeatureSet: def features(self) -> global___FeatureSet:
"""Any features defined in the specific edition.""" """Any features defined in the specific edition."""
@property
def feature_support(self) -> global___FieldOptions.FeatureSupport:
"""Information about the support window of a feature value."""
@property @property
def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]:
"""The parser stores options it doesn't recognize here. See above.""" """The parser stores options it doesn't recognize here. See above."""
@ -1464,10 +1524,11 @@ class EnumValueOptions(google.protobuf.message.Message):
deprecated: builtins.bool | None = ..., deprecated: builtins.bool | None = ...,
features: global___FeatureSet | None = ..., features: global___FeatureSet | None = ...,
debug_redact: builtins.bool | None = ..., debug_redact: builtins.bool | None = ...,
feature_support: global___FieldOptions.FeatureSupport | None = ...,
uninterpreted_option: collections.abc.Iterable[global___UninterpretedOption] | None = ..., uninterpreted_option: collections.abc.Iterable[global___UninterpretedOption] | None = ...,
) -> None: ... ) -> None: ...
def HasField(self, field_name: typing.Literal["debug_redact", b"debug_redact", "deprecated", b"deprecated", "features", b"features"]) -> builtins.bool: ... def HasField(self, field_name: typing.Literal["debug_redact", b"debug_redact", "deprecated", b"deprecated", "feature_support", b"feature_support", "features", b"features"]) -> builtins.bool: ...
def ClearField(self, field_name: typing.Literal["debug_redact", b"debug_redact", "deprecated", b"deprecated", "features", b"features", "uninterpreted_option", b"uninterpreted_option"]) -> None: ... def ClearField(self, field_name: typing.Literal["debug_redact", b"debug_redact", "deprecated", b"deprecated", "feature_support", b"feature_support", "features", b"features", "uninterpreted_option", b"uninterpreted_option"]) -> None: ...
global___EnumValueOptions = EnumValueOptions global___EnumValueOptions = EnumValueOptions
@ -1799,18 +1860,26 @@ class FeatureSetDefaults(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor DESCRIPTOR: google.protobuf.descriptor.Descriptor
EDITION_FIELD_NUMBER: builtins.int EDITION_FIELD_NUMBER: builtins.int
FEATURES_FIELD_NUMBER: builtins.int OVERRIDABLE_FEATURES_FIELD_NUMBER: builtins.int
FIXED_FEATURES_FIELD_NUMBER: builtins.int
edition: global___Edition.ValueType edition: global___Edition.ValueType
@property @property
def features(self) -> global___FeatureSet: ... def overridable_features(self) -> global___FeatureSet:
"""Defaults of features that can be overridden in this edition."""
@property
def fixed_features(self) -> global___FeatureSet:
"""Defaults of features that can't be overridden in this edition."""
def __init__( def __init__(
self, self,
*, *,
edition: global___Edition.ValueType | None = ..., edition: global___Edition.ValueType | None = ...,
features: global___FeatureSet | None = ..., overridable_features: global___FeatureSet | None = ...,
fixed_features: global___FeatureSet | None = ...,
) -> None: ... ) -> None: ...
def HasField(self, field_name: typing.Literal["edition", b"edition", "features", b"features"]) -> builtins.bool: ... def HasField(self, field_name: typing.Literal["edition", b"edition", "fixed_features", b"fixed_features", "overridable_features", b"overridable_features"]) -> builtins.bool: ...
def ClearField(self, field_name: typing.Literal["edition", b"edition", "features", b"features"]) -> None: ... def ClearField(self, field_name: typing.Literal["edition", b"edition", "fixed_features", b"fixed_features", "overridable_features", b"overridable_features"]) -> None: ...
DEFAULTS_FIELD_NUMBER: builtins.int DEFAULTS_FIELD_NUMBER: builtins.int
MINIMUM_EDITION_FIELD_NUMBER: builtins.int MINIMUM_EDITION_FIELD_NUMBER: builtins.int

View File

@ -1,4 +1,4 @@
version = "5.9.*" version = "6.0.*"
upstream_repository = "https://github.com/giampaolo/psutil" upstream_repository = "https://github.com/giampaolo/psutil"
[tool.stubtest] [tool.stubtest]

View File

@ -3,7 +3,7 @@ from _typeshed import Incomplete
from collections.abc import Callable, Iterable, Iterator from collections.abc import Callable, Iterable, Iterator
from contextlib import AbstractContextManager from contextlib import AbstractContextManager
from typing import Any, Literal, overload from typing import Any, Literal, overload
from typing_extensions import Self, TypeAlias from typing_extensions import Self, TypeAlias, deprecated
from psutil._common import ( from psutil._common import (
AIX as AIX, AIX as AIX,
@ -216,6 +216,7 @@ class Process:
def memory_full_info(self) -> pfullmem: ... def memory_full_info(self) -> pfullmem: ...
def memory_percent(self, memtype: str = "rss") -> float: ... def memory_percent(self, memtype: str = "rss") -> float: ...
def open_files(self) -> list[popenfile]: ... def open_files(self) -> list[popenfile]: ...
@deprecated('use "net_connections" method instead')
def connections(self, kind: str = "inet") -> list[pconn]: ... def connections(self, kind: str = "inet") -> list[pconn]: ...
def send_signal(self, sig: int) -> None: ... def send_signal(self, sig: int) -> None: ...
def suspend(self) -> None: ... def suspend(self) -> None: ...
@ -223,6 +224,7 @@ class Process:
def terminate(self) -> None: ... def terminate(self) -> None: ...
def kill(self) -> None: ... def kill(self) -> None: ...
def wait(self, timeout: float | None = None) -> int: ... def wait(self, timeout: float | None = None) -> int: ...
def net_connections(self, kind: str = "inet") -> list[pconn]: ...
class Popen(Process): class Popen(Process):
def __init__(self, *args, **kwargs) -> None: ... def __init__(self, *args, **kwargs) -> None: ...

View File

@ -89,8 +89,6 @@ class sdiskpart(NamedTuple):
mountpoint: str mountpoint: str
fstype: str fstype: str
opts: str opts: str
maxfile: int
maxpath: int
class snetio(NamedTuple): class snetio(NamedTuple):
bytes_sent: int bytes_sent: int

View File

@ -20,7 +20,6 @@ xrange = range
unicode = str unicode = str
basestring = str basestring = str
def u(s): ...
def b(s): ... def b(s): ...
SubprocessTimeoutExpired = TimeoutExpired SubprocessTimeoutExpired = TimeoutExpired

View File

@ -85,7 +85,7 @@ class Process:
def create_time(self): ... def create_time(self): ...
def num_threads(self): ... def num_threads(self): ...
def threads(self): ... def threads(self): ...
def connections(self, kind: str = ...): ... def net_connections(self, kind: str = ...): ...
def nice_get(self): ... def nice_get(self): ...
def nice_set(self, value): ... def nice_set(self, value): ...
def ppid(self): ... def ppid(self): ...

View File

@ -136,7 +136,7 @@ class Process:
def num_threads(self): ... def num_threads(self): ...
def num_ctx_switches(self): ... def num_ctx_switches(self): ...
def threads(self): ... def threads(self): ...
def connections(self, kind: str = ...): ... def net_connections(self, kind: str = ...): ...
def wait(self, timeout: Incomplete | None = ...): ... def wait(self, timeout: Incomplete | None = ...): ...
def nice_get(self): ... def nice_get(self): ...
def nice_set(self, value): ... def nice_set(self, value): ...

View File

@ -148,7 +148,7 @@ net_if_addrs: Any
class _Ipv6UnsupportedError(Exception): ... class _Ipv6UnsupportedError(Exception): ...
class Connections: class NetConnections:
tmap: Any tmap: Any
def __init__(self) -> None: ... def __init__(self) -> None: ...
def get_proc_inodes(self, pid): ... def get_proc_inodes(self, pid): ...
@ -220,7 +220,7 @@ class Process:
def rlimit(self, resource_, limits: Incomplete | None = ...): ... def rlimit(self, resource_, limits: Incomplete | None = ...): ...
def status(self): ... def status(self): ...
def open_files(self): ... def open_files(self): ...
def connections(self, kind: str = ...): ... def net_connections(self, kind: str = ...): ...
def num_fds(self): ... def num_fds(self): ...
def ppid(self): ... def ppid(self): ...
def uids(self, _uids_re=...): ... def uids(self, _uids_re=...): ...

View File

@ -99,7 +99,7 @@ class Process:
def num_ctx_switches(self): ... def num_ctx_switches(self): ...
def num_threads(self): ... def num_threads(self): ...
def open_files(self): ... def open_files(self): ...
def connections(self, kind: str = ...): ... def net_connections(self, kind: str = ...): ...
def num_fds(self): ... def num_fds(self): ...
def wait(self, timeout: Incomplete | None = ...): ... def wait(self, timeout: Incomplete | None = ...): ...
def nice_get(self): ... def nice_get(self): ...

View File

@ -114,7 +114,7 @@ class Process:
def status(self): ... def status(self): ...
def threads(self): ... def threads(self): ...
def open_files(self): ... def open_files(self): ...
def connections(self, kind: str = ...): ... def net_connections(self, kind: str = ...): ...
class nt_mmap_grouped(NamedTuple): class nt_mmap_grouped(NamedTuple):
path: Incomplete path: Incomplete

Some files were not shown because too many files have changed in this diff Show More