import abc
import collections
import collections.abc
import functools
import inspect
import operator
import sys
import types
as _types
import typing
import warnings
__all__ = [
# Super-special typing primitives.
'Any',
'ClassVar',
'Concatenate',
'Final',
'LiteralString',
'ParamSpec',
'ParamSpecArgs',
'ParamSpecKwargs',
'Self',
'Type',
'TypeVar',
'TypeVarTuple',
'Unpack',
# ABCs (from collections.abc).
'Awaitable',
'AsyncIterator',
'AsyncIterable',
'Coroutine',
'AsyncGenerator',
'AsyncContextManager',
'Buffer',
'ChainMap',
# Concrete collection types.
'ContextManager',
'Counter',
'Deque',
'DefaultDict',
'NamedTuple',
'OrderedDict',
'TypedDict',
# Structural checks, a.k.a. protocols.
'SupportsAbs',
'SupportsBytes',
'SupportsComplex',
'SupportsFloat',
'SupportsIndex',
'SupportsInt',
'SupportsRound',
# One-off things.
'Annotated',
'assert_never',
'assert_type',
'clear_overloads',
'dataclass_transform',
'deprecated',
'get_overloads',
'final',
'get_args',
'get_origin',
'get_original_bases',
'get_protocol_members',
'get_type_hints',
'IntVar',
'is_protocol',
'is_typeddict',
'Literal',
'NewType',
'overload',
'override',
'Protocol',
'reveal_type',
'runtime',
'runtime_checkable',
'Text',
'TypeAlias',
'TypeAliasType',
'TypeGuard',
'TYPE_CHECKING',
'Never',
'NoReturn',
'Required',
'NotRequired',
# Pure aliases, have always been in typing
'AbstractSet',
'AnyStr',
'BinaryIO',
'Callable',
'Collection',
'Container',
'Dict',
'ForwardRef',
'FrozenSet',
'Generator',
'Generic',
'Hashable',
'IO',
'ItemsView',
'Iterable',
'Iterator',
'KeysView',
'List',
'Mapping',
'MappingView',
'Match',
'MutableMapping',
'MutableSequence',
'MutableSet',
'Optional',
'Pattern',
'Reversible',
'Sequence',
'Set',
'Sized',
'TextIO',
'Tuple',
'Union',
'ValuesView',
'cast',
'no_type_check',
'no_type_check_decorator',
]
# for backward compatibility
PEP_560 =
True
GenericMeta = type
# The functions below are modified copies of typing internal helpers.
# They are needed by _ProtocolMeta and they provide support for PEP 646.
class _Sentinel:
def __repr__(self):
return ""
_marker = _Sentinel()
def _check_generic(cls, parameters, elen=_marker):
"""Check correct count for parameters of a generic cls (internal helper).
This gives a nice error message
in case of count mismatch.
"""
if not elen:
raise TypeError(f
"{cls} is not a generic class")
if elen
is _marker:
if not hasattr(cls,
"__parameters__")
or not cls.__parameters__:
raise TypeError(f
"{cls} is not a generic class")
elen = len(cls.__parameters__)
alen = len(parameters)
if alen != elen:
if hasattr(cls,
"__parameters__"):
parameters = [p
for p
in cls.__parameters__
if not _is_unpack(p)]
num_tv_tuples = sum(isinstance(p, TypeVarTuple)
for p
in parameters)
if (num_tv_tuples > 0)
and (alen >= elen - num_tv_tuples):
return
raise TypeError(f
"Too {'many' if alen > elen else 'few'} parameters for {cls};"
f
" actual {alen}, expected {elen}")
if sys.version_info >= (3, 10):
def _should_collect_from_parameters(t):
return isinstance(
t, (typing._GenericAlias, _types.GenericAlias, _types.UnionType)
)
elif sys.version_info >= (3, 9):
def _should_collect_from_parameters(t):
return isinstance(t, (typing._GenericAlias, _types.GenericAlias))
else:
def _should_collect_from_parameters(t):
return isinstance(t, typing._GenericAlias)
and not t._special
def _collect_type_vars(types, typevar_types=
None):
"""Collect all type variable contained in types in order of
first appearance (lexicographic order).
For example::
_collect_type_vars((T, List[S, T])) == (T, S)
"""
if typevar_types
is None:
typevar_types = typing.TypeVar
tvars = []
for t
in types:
if (
isinstance(t, typevar_types)
and
t
not in tvars
and
not _is_unpack(t)
):
tvars.append(t)
if _should_collect_from_parameters(t):
tvars.extend([t
for t
in t.__parameters__
if t
not in tvars])
return tuple(tvars)
NoReturn = typing.NoReturn
# Some unconstrained type variables. These are used by the container types.
# (These are not for export.)
T = typing.TypeVar(
'T')
# Any type.
KT = typing.TypeVar(
'KT')
# Key type.
VT = typing.TypeVar(
'VT')
# Value type.
T_co = typing.TypeVar(
'T_co', covariant=
True)
# Any type covariant containers.
T_contra = typing.TypeVar(
'T_contra', contravariant=
True)
# Ditto contravariant.
if sys.version_info >= (3, 11):
from typing
import Any
else:
class _AnyMeta(type):
def __instancecheck__(self, obj):
if self
is Any:
raise TypeError(
"typing_extensions.Any cannot be used with isinstance()")
return super().__instancecheck__(obj)
def __repr__(self):
if self
is Any:
return "typing_extensions.Any"
return super().__repr__()
class Any(metaclass=_AnyMeta):
"""Special type indicating an unconstrained type.
- Any
is compatible
with every type.
- Any assumed to have all methods.
- All values assumed to be instances of Any.
Note that all the above statements are
true from the point of view of
static type checkers. At runtime, Any should
not be used
with instance
checks.
"""
def __new__(cls, *args, **kwargs):
if cls
is Any:
raise TypeError(
"Any cannot be instantiated")
return super().__new__(cls, *args, **kwargs)
ClassVar = typing.ClassVar
class _ExtensionsSpecialForm(typing._SpecialForm, _root=
True):
def __repr__(self):
return 'typing_extensions.' + self._name
# On older versions of typing there is an internal class named "Final".
# 3.8+
if hasattr(typing,
'Final')
and sys.version_info[:2] >= (3, 7):
Final = typing.Final
# 3.7
else:
class _FinalForm(_ExtensionsSpecialForm, _root=
True):
def __getitem__(self, parameters):
item = typing._type_check(parameters,
f
'{self._name} accepts only a single type.')
return typing._GenericAlias(self, (item,))
Final = _FinalForm(
'Final',
doc=
"""A special typing construct to indicate that a name
cannot be re-assigned
or overridden
in a subclass.
For example:
MAX_SIZE: Final = 9000
MAX_SIZE += 1
# Error reported by type checker
class Connection:
TIMEOUT: Final[int] = 10
class FastConnector(Connection):
TIMEOUT = 1
# Error reported by type checker
There
is no runtime checking of these properties.
""")
if sys.version_info >= (3, 11):
final = typing.final
else:
# @final exists in 3.8+, but we backport it for all versions
# before 3.11 to keep support for the __final__ attribute.
# See https://bugs.python.org/issue46342
def final(f):
"""This decorator can be used to indicate to type checkers that
the decorated method cannot be overridden,
and decorated
class
cannot be subclassed.
For example:
class Base:
@final
def done(self) ->
None:
...
class Sub(Base):
def done(self) ->
None:
# Error reported by type checker
...
@final
class Leaf:
...
class Other(Leaf):
# Error reported by type checker
...
There
is no runtime checking of these properties. The decorator
sets the ``__final__`` attribute to ``
True`` on the decorated object
to allow runtime introspection.
"""
try:
f.__final__ =
True
except (AttributeError, TypeError):
# Skip the attribute silently if it is not writable.
# AttributeError happens if the object has __slots__ or a
# read-only property, TypeError if it's a builtin class.
pass
return f
def IntVar(name):
return typing.TypeVar(name)
# A Literal bug was fixed in 3.11.0, 3.10.1 and 3.9.8
if sys.version_info >= (3, 10, 1):
Literal = typing.Literal
else:
def _flatten_literal_params(parameters):
"""An internal helper for Literal creation: flatten Literals among parameters"""
params = []
for p
in parameters:
if isinstance(p, _LiteralGenericAlias):
params.extend(p.__args__)
else:
params.append(p)
return tuple(params)
def _value_and_type_iter(params):
for p
in params:
yield p, type(p)
class _LiteralGenericAlias(typing._GenericAlias, _root=
True):
def __eq__(self, other):
if not isinstance(other, _LiteralGenericAlias):
return NotImplemented
these_args_deduped = set(_value_and_type_iter(self.__args__))
other_args_deduped = set(_value_and_type_iter(other.__args__))
return these_args_deduped == other_args_deduped
def __hash__(self):
return hash(frozenset(_value_and_type_iter(self.__args__)))
class _LiteralForm(_ExtensionsSpecialForm, _root=
True):
def __init__(self, doc: str):
self._name =
'Literal'
self._doc = self.__doc__ = doc
def __getitem__(self, parameters):
if not isinstance(parameters, tuple):
parameters = (parameters,)
parameters = _flatten_literal_params(parameters)
val_type_pairs = list(_value_and_type_iter(parameters))
try:
deduped_pairs = set(val_type_pairs)
except TypeError:
# unhashable parameters
pass
else:
# similar logic to typing._deduplicate on Python 3.9+
if len(deduped_pairs) < len(val_type_pairs):
new_parameters = []
for pair
in val_type_pairs:
if pair
in deduped_pairs:
new_parameters.append(pair[0])
deduped_pairs.remove(pair)
assert not deduped_pairs, deduped_pairs
parameters = tuple(new_parameters)
return _LiteralGenericAlias(self, parameters)
Literal = _LiteralForm(doc=
"""\
A type that can be used to indicate to type checkers
that the corresponding value has a value literally equivalent
to the provided parameter.
For example:
var: Literal[4] = 4
The type checker understands that
'var' is literally equal to
the value 4
and no other value.
Literal[...] cannot be subclassed. There
is no runtime
checking verifying that the parameter
is actually a value
instead of a type.
""")
_overload_dummy = typing._overload_dummy
if hasattr(typing,
"get_overloads"):
# 3.11+
overload = typing.overload
get_overloads = typing.get_overloads
clear_overloads = typing.clear_overloads
else:
# {module: {qualname: {firstlineno: func}}}
_overload_registry = collections.defaultdict(
functools.partial(collections.defaultdict, dict)
)
def overload(func):
"""Decorator for overloaded functions/methods.
In a stub file, place two
or more stub definitions
for the same
function
in a row, each decorated
with @overload.
For example:
@overload
def utf8(value:
None) ->
None: ...
@overload
def utf8(value: bytes) -> bytes: ...
@overload
def utf8(value: str) -> bytes: ...
In a non-stub file (i.e. a regular .py file), do the same but
follow it
with an implementation. The implementation should *
not*
be decorated
with @overload.
For example:
@overload
def utf8(value:
None) ->
None: ...
@overload
def utf8(value: bytes) -> bytes: ...
@overload
def utf8(value: str) -> bytes: ...
def utf8(value):
# implementation goes here
The overloads
for a function can be retrieved at runtime using the
get_overloads() function.
"""
# classmethod and staticmethod
f = getattr(func,
"__func__", func)
try:
_overload_registry[f.__module__][f.__qualname__][
f.__code__.co_firstlineno
] = func
except AttributeError:
# Not a normal function; ignore.
pass
return _overload_dummy
def get_overloads(func):
"""Return all defined overloads for *func* as a sequence."""
# classmethod and staticmethod
f = getattr(func,
"__func__", func)
if f.__module__
not in _overload_registry:
return []
mod_dict = _overload_registry[f.__module__]
if f.__qualname__
not in mod_dict:
return []
return list(mod_dict[f.__qualname__].values())
def clear_overloads():
"""Clear all overloads in the registry."""
_overload_registry.clear()
# This is not a real generic class. Don't use outside annotations.
Type = typing.Type
# Various ABCs mimicking those in collections.abc.
# A few are simply re-exported for completeness.
Awaitable = typing.Awaitable
Coroutine = typing.Coroutine
AsyncIterable = typing.AsyncIterable
AsyncIterator = typing.AsyncIterator
Deque = typing.Deque
ContextManager = typing.ContextManager
AsyncContextManager = typing.AsyncContextManager
DefaultDict = typing.DefaultDict
# 3.7.2+
if hasattr(typing,
'OrderedDict'):
OrderedDict = typing.OrderedDict
# 3.7.0-3.7.2
else:
OrderedDict = typing._alias(collections.OrderedDict, (KT, VT))
Counter = typing.Counter
ChainMap = typing.ChainMap
AsyncGenerator = typing.AsyncGenerator
Text = typing.Text
TYPE_CHECKING = typing.TYPE_CHECKING
_PROTO_ALLOWLIST = {
'collections.abc': [
'Callable',
'Awaitable',
'Iterable',
'Iterator',
'AsyncIterable',
'Hashable',
'Sized',
'Container',
'Collection',
'Reversible',
'Buffer',
],
'contextlib': [
'AbstractContextManager',
'AbstractAsyncContextManager'],
'typing_extensions': [
'Buffer'],
}
_EXCLUDED_ATTRS = {
"__abstractmethods__",
"__annotations__",
"__weakref__",
"_is_protocol",
"_is_runtime_protocol",
"__dict__",
"__slots__",
"__parameters__",
"__orig_bases__",
"__module__",
"_MutableMapping__marker",
"__doc__",
"__subclasshook__",
"__orig_class__",
"__init__",
"__new__",
"__protocol_attrs__",
"__callable_proto_members_only__",
}
if sys.version_info < (3, 8):
_EXCLUDED_ATTRS |= {
"_gorg",
"__next_in_mro__",
"__extra__",
"__tree_hash__",
"__args__",
"__origin__"
}
if sys.version_info >= (3, 9):
_EXCLUDED_ATTRS.add(
"__class_getitem__")
if sys.version_info >= (3, 12):
_EXCLUDED_ATTRS.add(
"__type_params__")
_EXCLUDED_ATTRS = frozenset(_EXCLUDED_ATTRS)
def _get_protocol_attrs(cls):
attrs = set()
for base
in cls.__mro__[:-1]:
# without object
if base.__name__
in {
'Protocol',
'Generic'}:
continue
annotations = getattr(base,
'__annotations__', {})
for attr
in (*base.__dict__, *annotations):
if (
not attr.startswith(
'_abc_')
and attr
not in _EXCLUDED_ATTRS):
attrs.add(attr)
return attrs
def _maybe_adjust_parameters(cls):
"""Helper function used in Protocol.__init_subclass__ and _TypedDictMeta.__new__.
The contents of this function are very similar
to logic found
in typing.Generic.__init_subclass__
on the CPython main branch.
"""
tvars = []
if '__orig_bases__' in cls.__dict__:
tvars = _collect_type_vars(cls.__orig_bases__)
# Look for Generic[T1, ..., Tn] or Protocol[T1, ..., Tn].
# If found, tvars must be a subset of it.
# If not found, tvars is it.
# Also check for and reject plain Generic,
# and reject multiple Generic[...] and/or Protocol[...].
gvars =
None
for base
in cls.__orig_bases__:
if (isinstance(base, typing._GenericAlias)
and
base.__origin__
in (typing.Generic, Protocol)):
# for error messages
the_base = base.__origin__.__name__
if gvars
is not None:
raise TypeError(
"Cannot inherit from Generic[...]"
" and/or Protocol[...] multiple types.")
gvars = base.__parameters__
if gvars
is None:
gvars = tvars
else:
tvarset = set(tvars)
gvarset = set(gvars)
if not tvarset <= gvarset:
s_vars =
', '.join(str(t)
for t
in tvars
if t
not in gvarset)
s_args =
', '.join(str(g)
for g
in gvars)
raise TypeError(f
"Some type variables ({s_vars}) are"
f
" not listed in {the_base}[{s_args}]")
tvars = gvars
cls.__parameters__ = tuple(tvars)
def _caller(depth=2):
try:
return sys._getframe(depth).f_globals.get(
'__name__',
'__main__')
except (AttributeError, ValueError):
# For platforms without _getframe()
return None
# The performance of runtime-checkable protocols is significantly improved on Python 3.12,
# so we backport the 3.12 version of Protocol to Python <=3.11
if sys.version_info >= (3, 12):
Protocol = typing.Protocol
else:
def _allow_reckless_class_checks(depth=3):
"""Allow instance and class checks for special stdlib modules.
The abc
and functools modules indiscriminately call isinstance()
and
issubclass() on the whole MRO of a user
class, which may contain protocols.
"""
return _caller(depth)
in {
'abc',
'functools',
None}
def _no_init(self, *args, **kwargs):
if type(self)._is_protocol:
raise TypeError(
'Protocols cannot be instantiated')
if sys.version_info >= (3, 8):
# Inheriting from typing._ProtocolMeta isn't actually desirable,
# but is necessary to allow typing.Protocol and typing_extensions.Protocol
# to mix without getting TypeErrors about "metaclass conflict"
_typing_Protocol = typing.Protocol
_ProtocolMetaBase = type(_typing_Protocol)
else:
_typing_Protocol = _marker
_ProtocolMetaBase = abc.ABCMeta
class _ProtocolMeta(_ProtocolMetaBase):
# This metaclass is somewhat unfortunate,
# but is necessary for several reasons...
#
# NOTE: DO NOT call super() in any methods in this class
# That would call the methods on typing._ProtocolMeta on Python 3.8-3.11
# and those are slow
def __new__(mcls, name, bases, namespace, **kwargs):
if name ==
"Protocol" and len(bases) < 2:
pass
elif {Protocol, _typing_Protocol} & set(bases):
for base
in bases:
if not (
base
in {object, typing.Generic, Protocol, _typing_Protocol}
or base.__name__
in _PROTO_ALLOWLIST.get(base.__module__, [])
or is_protocol(base)
):
raise TypeError(
f
"Protocols can only inherit from other protocols, "
f
"got {base!r}"
)
return abc.ABCMeta.__new__(mcls, name, bases, namespace, **kwargs)
def __init__(cls, *args, **kwargs):
abc.ABCMeta.__init__(cls, *args, **kwargs)
if getattr(cls,
"_is_protocol",
False):
cls.__protocol_attrs__ = _get_protocol_attrs(cls)
# PEP 544 prohibits using issubclass()
# with protocols that have non-method members.
cls.__callable_proto_members_only__ = all(
callable(getattr(cls, attr,
None))
for attr
in cls.__protocol_attrs__
)
def __subclasscheck__(cls, other):
if cls
is Protocol:
return type.__subclasscheck__(cls, other)
if (
getattr(cls,
'_is_protocol',
False)
and not _allow_reckless_class_checks()
):
if not isinstance(other, type):
# Same error message as for issubclass(1, int).
raise TypeError(
'issubclass() arg 1 must be a class')
if (
not cls.__callable_proto_members_only__
and cls.__dict__.get(
"__subclasshook__")
is _proto_hook
):
raise TypeError(
"Protocols with non-method members don't support issubclass()"
)
if not getattr(cls,
'_is_runtime_protocol',
False):
raise TypeError(
"Instance and class checks can only be used with "
"@runtime_checkable protocols"
)
return abc.ABCMeta.__subclasscheck__(cls, other)
def __instancecheck__(cls, instance):
# We need this method for situations where attributes are
# assigned in __init__.
if cls
is Protocol:
return type.__instancecheck__(cls, instance)
if not getattr(cls,
"_is_protocol",
False):
# i.e., it's a concrete subclass of a protocol
return abc.ABCMeta.__instancecheck__(cls, instance)
if (
not getattr(cls,
'_is_runtime_protocol',
False)
and
not _allow_reckless_class_checks()
):
raise TypeError(
"Instance and class checks can only be used with"
" @runtime_checkable protocols")
if abc.ABCMeta.__instancecheck__(cls, instance):
return True
for attr
in cls.__protocol_attrs__:
try:
val = inspect.getattr_static(instance, attr)
except AttributeError:
break
if val
is None and callable(getattr(cls, attr,
None)):
break
else:
return True
return False
def __eq__(cls, other):
# Hack so that typing.Generic.__class_getitem__
# treats typing_extensions.Protocol
# as equivalent to typing.Protocol on Python 3.8+
if abc.ABCMeta.__eq__(cls, other)
is True:
return True
return (
cls
is Protocol
and other
is getattr(typing,
"Protocol", object())
)
# This has to be defined, or the abc-module cache
# complains about classes with this metaclass being unhashable,
# if we define only __eq__!
def __hash__(cls) -> int:
return type.__hash__(cls)
@classmethod
def _proto_hook(cls, other):
if not cls.__dict__.get(
'_is_protocol',
False):
return NotImplemented
for attr
in cls.__protocol_attrs__:
for base
in other.__mro__:
# Check if the members appears in the class dictionary...
if attr
in base.__dict__:
if base.__dict__[attr]
is None:
return NotImplemented
break
# ...or in annotations, if it is a sub-protocol.
annotations = getattr(base,
'__annotations__', {})
if (
isinstance(annotations, collections.abc.Mapping)
and attr
in annotations
and is_protocol(other)
):
break
else:
return NotImplemented
return True
if sys.version_info >= (3, 8):
class Protocol(typing.Generic, metaclass=_ProtocolMeta):
__doc__ = typing.Protocol.__doc__
__slots__ = ()
_is_protocol =
True
_is_runtime_protocol =
False
def __init_subclass__(cls, *args, **kwargs):
super().__init_subclass__(*args, **kwargs)
# Determine if this is a protocol or a concrete subclass.
if not cls.__dict__.get(
'_is_protocol',
False):
cls._is_protocol = any(b
is Protocol
for b
in cls.__bases__)
# Set (or override) the protocol subclass hook.
if '__subclasshook__' not in cls.__dict__:
cls.__subclasshook__ = _proto_hook
# Prohibit instantiation for protocol classes
if cls._is_protocol
and cls.__init__
is Protocol.__init__:
cls.__init__ = _no_init
else:
class Protocol(metaclass=_ProtocolMeta):
# There is quite a lot of overlapping code with typing.Generic.
# Unfortunately it is hard to avoid this on Python <3.8,
# as the typing module on Python 3.7 doesn't let us subclass typing.Generic!
"""Base class for protocol classes. Protocol classes are defined as::
class Proto(Protocol):
def meth(self) -> int:
...
Such classes are primarily used
with static type checkers that recognize
structural subtyping (static duck-typing),
for example::
class C:
def meth(self) -> int:
return 0
def func(x: Proto) -> int:
return x.meth()
func(C())
# Passes static type check
See PEP 544
for details. Protocol classes decorated
with
@typing_extensions.runtime_checkable act
as simple-minded runtime-checkable protocols that check
only the presence of given attributes, ignoring their type signatures.
Protocol classes can be generic, they are defined
as::
class GenProto(Protocol[T]):
def meth(self) -> T:
...
"""
__slots__ = ()
_is_protocol =
True
_is_runtime_protocol =
False
def __new__(cls, *args, **kwds):
if cls
is Protocol:
raise TypeError(
"Type Protocol cannot be instantiated; "
"it can only be used as a base class")
return super().__new__(cls)
@typing._tp_cache
def __class_getitem__(cls, params):
if not isinstance(params, tuple):
params = (params,)
if not params
and cls
is not typing.Tuple:
raise TypeError(
f
"Parameter list to {cls.__qualname__}[...] cannot be empty")
msg =
"Parameters to generic types must be types."
params = tuple(typing._type_check(p, msg)
for p
in params)
if cls
is Protocol:
# Generic can only be subscripted with unique type variables.
if not all(isinstance(p, typing.TypeVar)
for p
in params):
i = 0
while isinstance(params[i], typing.TypeVar):
i += 1
raise TypeError(
"Parameters to Protocol[...] must all be type variables."
f
" Parameter {i + 1} is {params[i]}")
if len(set(params)) != len(params):
raise TypeError(
"Parameters to Protocol[...] must all be unique")
else:
# Subscripting a regular Generic subclass.
_check_generic(cls, params, len(cls.__parameters__))
return typing._GenericAlias(cls, params)
def __init_subclass__(cls, *args, **kwargs):
if '__orig_bases__' in cls.__dict__:
error = typing.Generic
in cls.__orig_bases__
else:
error = typing.Generic
in cls.__bases__
if error:
raise TypeError(
"Cannot inherit from plain Generic")
_maybe_adjust_parameters(cls)
# Determine if this is a protocol or a concrete subclass.
if not cls.__dict__.get(
'_is_protocol',
None):
cls._is_protocol = any(b
is Protocol
for b
in cls.__bases__)
# Set (or override) the protocol subclass hook.
if '__subclasshook__' not in cls.__dict__:
cls.__subclasshook__ = _proto_hook
# Prohibit instantiation for protocol classes
if cls._is_protocol
and cls.__init__
is Protocol.__init__:
cls.__init__ = _no_init
if sys.version_info >= (3, 8):
runtime_checkable = typing.runtime_checkable
else:
def runtime_checkable(cls):
"""Mark a protocol class as a runtime protocol, so that it
can be used
with isinstance()
and issubclass().
Raise TypeError
if applied to a non-protocol
class.
This allows a simple-minded structural check very similar to the
one-offs
in collections.abc such
as Hashable.
"""
if not (
(isinstance(cls, _ProtocolMeta)
or issubclass(cls, typing.Generic))
and getattr(cls,
"_is_protocol",
False)
):
raise TypeError(
'@runtime_checkable can be only applied to protocol classes,'
f
' got {cls!r}')
cls._is_runtime_protocol =
True
return cls
# Exists for backwards compatibility.
runtime = runtime_checkable
# Our version of runtime-checkable protocols is faster on Python 3.7-3.11
if sys.version_info >= (3, 12):
SupportsInt = typing.SupportsInt
SupportsFloat = typing.SupportsFloat
SupportsComplex = typing.SupportsComplex
SupportsBytes = typing.SupportsBytes
SupportsIndex = typing.SupportsIndex
SupportsAbs = typing.SupportsAbs
SupportsRound = typing.SupportsRound
else:
@runtime_checkable
class SupportsInt(Protocol):
"""An ABC with one abstract method __int__."""
__slots__ = ()
@abc.abstractmethod
def __int__(self) -> int:
pass
@runtime_checkable
class SupportsFloat(Protocol):
"""An ABC with one abstract method __float__."""
__slots__ = ()
@abc.abstractmethod
def __float__(self) -> float:
pass
@runtime_checkable
class SupportsComplex(Protocol):
"""An ABC with one abstract method __complex__."""
__slots__ = ()
@abc.abstractmethod
def __complex__(self) -> complex:
pass
@runtime_checkable
class SupportsBytes(Protocol):
"""An ABC with one abstract method __bytes__."""
__slots__ = ()
@abc.abstractmethod
def __bytes__(self) -> bytes:
pass
@runtime_checkable
class SupportsIndex(Protocol):
__slots__ = ()
@abc.abstractmethod
def __index__(self) -> int:
pass
@runtime_checkable
class SupportsAbs(Protocol[T_co]):
"""
An ABC
with one abstract method __abs__ that
is covariant
in its
return type.
"""
__slots__ = ()
@abc.abstractmethod
def __abs__(self) -> T_co:
pass
@runtime_checkable
class SupportsRound(Protocol[T_co]):
"""
An ABC
with one abstract method __round__ that
is covariant
in its
return type.
"""
__slots__ = ()
@abc.abstractmethod
def __round__(self, ndigits: int = 0) -> T_co:
pass
def _ensure_subclassable(mro_entries):
def inner(func):
if sys.implementation.name ==
"pypy" and sys.version_info < (3, 9):
cls_dict = {
"__call__": staticmethod(func),
"__mro_entries__": staticmethod(mro_entries)
}
t = type(func.__name__, (), cls_dict)
return functools.update_wrapper(t(), func)
else:
func.__mro_entries__ = mro_entries
return func
return inner
if sys.version_info >= (3, 13):
# The standard library TypedDict in Python 3.8 does not store runtime information
# about which (if any) keys are optional. See https://bugs.python.org/issue38834
# The standard library TypedDict in Python 3.9.0/1 does not honour the "total"
# keyword with old-style TypedDict(). See https://bugs.python.org/issue42059
# The standard library TypedDict below Python 3.11 does not store runtime
# information about optional and required keys when using Required or NotRequired.
# Generic TypedDicts are also impossible using typing.TypedDict on Python <3.11.
# Aaaand on 3.12 we add __orig_bases__ to TypedDict
# to enable better runtime introspection.
# On 3.13 we deprecate some odd ways of creating TypedDicts.
TypedDict = typing.TypedDict
_TypedDictMeta = typing._TypedDictMeta
is_typeddict = typing.is_typeddict
else:
# 3.10.0 and later
_TAKES_MODULE =
"module" in inspect.signature(typing._type_check).parameters
if sys.version_info >= (3, 8):
_fake_name =
"Protocol"
else:
_fake_name =
"_Protocol"
class _TypedDictMeta(type):
def __new__(cls, name, bases, ns, total=
True):
"""Create new typed dict class object.
This method
is called when TypedDict
is subclassed,
or when TypedDict
is instantiated. This way
TypedDict supports all three syntax forms described
in its docstring.
Subclasses
and instances of TypedDict
return actual dictionaries.
"""
for base
in bases:
if type(base)
is not _TypedDictMeta
and base
is not typing.Generic:
raise TypeError(
'cannot inherit from both a TypedDict type '
'and a non-TypedDict base class')
if any(issubclass(b, typing.Generic)
for b
in bases):
generic_base = (typing.Generic,)
else:
generic_base = ()
# typing.py generally doesn't let you inherit from plain Generic, unless
# the name of the class happens to be "Protocol" (or "_Protocol" on 3.7).
tp_dict = type.__new__(_TypedDictMeta, _fake_name, (*generic_base, dict), ns)
tp_dict.__name__ = name
if tp_dict.__qualname__ == _fake_name:
tp_dict.__qualname__ = name
if not hasattr(tp_dict,
'__orig_bases__'):
tp_dict.__orig_bases__ = bases
annotations = {}
own_annotations = ns.get(
'__annotations__', {})
msg =
"TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
if _TAKES_MODULE:
own_annotations = {
n: typing._type_check(tp, msg, module=tp_dict.__module__)
for n, tp
in own_annotations.items()
}
else:
own_annotations = {
n: typing._type_check(tp, msg)
for n, tp
in own_annotations.items()
}
required_keys = set()
optional_keys = set()
for base
in bases:
annotations.update(base.__dict__.get(
'__annotations__', {}))
required_keys.update(base.__dict__.get(
'__required_keys__', ()))
optional_keys.update(base.__dict__.get(
'__optional_keys__', ()))
annotations.update(own_annotations)
for annotation_key, annotation_type
in own_annotations.items():
annotation_origin = get_origin(annotation_type)
if annotation_origin
is Annotated:
annotation_args = get_args(annotation_type)
if annotation_args:
annotation_type = annotation_args[0]
annotation_origin = get_origin(annotation_type)
if annotation_origin
is Required:
required_keys.add(annotation_key)
elif annotation_origin
is NotRequired:
optional_keys.add(annotation_key)
elif total:
required_keys.add(annotation_key)
else:
optional_keys.add(annotation_key)
tp_dict.__annotations__ = annotations
tp_dict.__required_keys__ = frozenset(required_keys)
tp_dict.__optional_keys__ = frozenset(optional_keys)
if not hasattr(tp_dict,
'__total__'):
tp_dict.__total__ = total
return tp_dict
__call__ = dict
# static method
def __subclasscheck__(cls, other):
# Typed dicts are only for static structural subtyping.
raise TypeError(
'TypedDict does not support instance and class checks')
__instancecheck__ = __subclasscheck__
_TypedDict = type.__new__(_TypedDictMeta,
'TypedDict', (), {})
@_ensure_subclassable(
lambda bases: (_TypedDict,))
def TypedDict(__typename, __fields=_marker, *, total=
True, **kwargs):
"""A simple typed namespace. At runtime it is equivalent to a plain dict.
TypedDict creates a dictionary type such that a type checker will expect all
instances to have a certain set of keys, where each key
is
associated
with a value of a consistent type. This expectation
is not checked at runtime.
Usage::
class Point2D(TypedDict):
x: int
y: int
label: str
a: Point2D = {
'x': 1,
'y': 2,
'label':
'good'}
# OK
b: Point2D = {
'z': 3,
'label':
'bad'}
# Fails type check
assert Point2D(x=1, y=2, label=
'first') == dict(x=1, y=2, label=
'first')
The type info can be accessed via the Point2D.__annotations__ dict,
and
the Point2D.__required_keys__
and Point2D.__optional_keys__ frozensets.
TypedDict supports an additional equivalent form::
Point2D = TypedDict(
'Point2D', {
'x': int,
'y': int,
'label': str})
By default, all keys must be present
in a TypedDict. It
is possible
to override this by specifying totality::
class Point2D(TypedDict, total=
False):
x: int
y: int
This means that a Point2D TypedDict can have any of the keys omitted. A type
checker
is only expected to support a literal
False or True as the value of
the total argument.
True is the default,
and makes all items defined
in the
class body be required.
The Required
and NotRequired special forms can also be used to mark
individual keys
as being required
or not required::
class Point2D(TypedDict):
x: int
# the "x" key must always be present (Required is the default)
y: NotRequired[int]
# the "y" key can be omitted
See PEP 655
for more details on Required
and NotRequired.
"""
if __fields
is _marker
or __fields
is None:
if __fields
is _marker:
deprecated_thing =
"Failing to pass a value for the 'fields' parameter"
else:
deprecated_thing =
"Passing `None` as the 'fields' parameter"
example = f
"`{__typename} = TypedDict({__typename!r}, {{}})`"
deprecation_msg = (
f
"{deprecated_thing} is deprecated and will be disallowed in "
"Python 3.15. To create a TypedDict class with 0 fields "
"using the functional syntax, pass an empty dictionary, e.g. "
) + example +
"."
warnings.warn(deprecation_msg, DeprecationWarning, stacklevel=2)
__fields = kwargs
elif kwargs:
raise TypeError(
"TypedDict takes either a dict or keyword arguments,"
" but not both")
if kwargs:
warnings.warn(
"The kwargs-based syntax for TypedDict definitions is deprecated "
"in Python 3.11, will be removed in Python 3.13, and may not be "
"understood by third-party type checkers.",
DeprecationWarning,
stacklevel=2,
)
ns = {
'__annotations__': dict(__fields)}
module = _caller()
if module
is not None:
# Setting correct module is necessary to make typed dict classes pickleable.
ns[
'__module__'] = module
td = _TypedDictMeta(__typename, (), ns, total=total)
td.__orig_bases__ = (TypedDict,)
return td
if hasattr(typing,
"_TypedDictMeta"):
_TYPEDDICT_TYPES = (typing._TypedDictMeta, _TypedDictMeta)
else:
_TYPEDDICT_TYPES = (_TypedDictMeta,)
def is_typeddict(tp):
"""Check if an annotation is a TypedDict class
For example::
class Film(TypedDict):
title: str
year: int
is_typeddict(Film)
# => True
is_typeddict(Union[list, str])
# => False
"""
# On 3.8, this would otherwise return True
if hasattr(typing,
"TypedDict")
and tp
is typing.TypedDict:
return False
return isinstance(tp, _TYPEDDICT_TYPES)
if hasattr(typing,
"assert_type"):
assert_type = typing.assert_type
else:
def assert_type(__val, __typ):
"""Assert (to the type checker) that the value is of the given type.
When the type checker encounters a call to assert_type(), it
emits an error
if the value
is not of the specified type::
def greet(name: str) ->
None:
assert_type(name, str)
# ok
assert_type(name, int)
# type checker error
At runtime this returns the first argument unchanged
and otherwise
does nothing.
"""
return __val
if hasattr(typing,
"Required"):
get_type_hints = typing.get_type_hints
else:
# replaces _strip_annotations()
def _strip_extras(t):
"""Strips Annotated, Required and NotRequired from a given type."""
if isinstance(t, _AnnotatedAlias):
return _strip_extras(t.__origin__)
if hasattr(t,
"__origin__")
and t.__origin__
in (Required, NotRequired):
return _strip_extras(t.__args__[0])
if isinstance(t, typing._GenericAlias):
stripped_args = tuple(_strip_extras(a)
for a
in t.__args__)
if stripped_args == t.__args__:
return t
return t.copy_with(stripped_args)
if hasattr(_types,
"GenericAlias")
and isinstance(t, _types.GenericAlias):
stripped_args = tuple(_strip_extras(a)
for a
in t.__args__)
if stripped_args == t.__args__:
return t
return _types.GenericAlias(t.__origin__, stripped_args)
if hasattr(_types,
"UnionType")
and isinstance(t, _types.UnionType):
stripped_args = tuple(_strip_extras(a)
for a
in t.__args__)
if stripped_args == t.__args__:
return t
return functools.reduce(operator.or_, stripped_args)
return t
def get_type_hints(obj, globalns=
None, localns=
None, include_extras=
False):
"""Return type hints for an object.
This
is often the same
as obj.__annotations__, but it handles
forward references encoded
as string literals, adds Optional[t]
if a
default value equal to
None is set
and recursively replaces all
'Annotated[T, ...]',
'Required[T]' or 'NotRequired[T]' with 'T'
(unless
'include_extras=True').
The argument may be a module,
class, method,
or function. The annotations
are returned
as a dictionary.
For classes, annotations include also
inherited members.
TypeError
is raised
if the argument
is not of a type that can contain
annotations,
and an empty dictionary
is returned
if no annotations are
present.
BEWARE -- the behavior of globalns
and localns
is counterintuitive
(unless you are familiar
with how eval()
and exec() work). The
search order
is locals first, then globals.
-
If no dict arguments are passed, an attempt
is made to use the
globals
from obj (
or the respective module
's globals for classes),
and these are also used
as the locals.
If the object does
not appear
to have globals, an empty dictionary
is used.
-
If one dict argument
is passed, it
is used
for both globals
and
locals.
-
If two dict arguments are passed, they specify globals
and
locals, respectively.
"""
if hasattr(typing,
"Annotated"):
hint = typing.get_type_hints(
obj, globalns=globalns, localns=localns, include_extras=
True
)
else:
hint = typing.get_type_hints(obj, globalns=globalns, localns=localns)
if include_extras:
return hint
return {k: _strip_extras(t)
for k, t
in hint.items()}
# Python 3.9+ has PEP 593 (Annotated)
if hasattr(typing,
'Annotated'):
Annotated = typing.Annotated
# Not exported and not a public API, but needed for get_origin() and get_args()
# to work.
_AnnotatedAlias = typing._AnnotatedAlias
# 3.7-3.8
else:
class _AnnotatedAlias(typing._GenericAlias, _root=
True):
"""Runtime representation of an annotated type.
At its core
'Annotated[t, dec1, dec2, ...]' is an alias
for the type
't'
with extra annotations. The alias behaves like a normal typing alias,
instantiating
is the same
as instantiating the underlying type, binding
it to types
is also the same.
"""
def __init__(self, origin, metadata):
if isinstance(origin, _AnnotatedAlias):
metadata = origin.__metadata__ + metadata
origin = origin.__origin__
super().__init__(origin, origin)
self.__metadata__ = metadata
def copy_with(self, params):
assert len(params) == 1
new_type = params[0]
return _AnnotatedAlias(new_type, self.__metadata__)
def __repr__(self):
return (f
"typing_extensions.Annotated[{typing._type_repr(self.__origin__)}, "
f
"{', '.join(repr(a) for a in self.__metadata__)}]")
def __reduce__(self):
return operator.getitem, (
Annotated, (self.__origin__,) + self.__metadata__
)
def __eq__(self, other):
if not isinstance(other, _AnnotatedAlias):
return NotImplemented
if self.__origin__ != other.__origin__:
return False
return self.__metadata__ == other.__metadata__
def __hash__(self):
return hash((self.__origin__, self.__metadata__))
class Annotated:
"""Add context specific metadata to a type.
Example: Annotated[int, runtime_check.Unsigned] indicates to the
hypothetical runtime_check module that this type
is an unsigned int.
Every other consumer of this type can ignore this metadata
and treat
this type
as int.
The first argument to Annotated must be a valid type (
and will be
in
the __origin__ field), the remaining arguments are kept
as a tuple
in
the __extra__ field.
Details:
- It
's an error to call `Annotated` with less than two arguments.
- Nested Annotated are flattened::
Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3]
- Instantiating an annotated type
is equivalent to instantiating the
underlying type::
Annotated[C, Ann1](5) == C(5)
- Annotated can be used
as a generic type alias::
Optimized = Annotated[T, runtime.Optimize()]
Optimized[int] == Annotated[int, runtime.Optimize()]
OptimizedList = Annotated[List[T], runtime.Optimize()]
OptimizedList[int] == Annotated[List[int], runtime.Optimize()]
"""
__slots__ = ()
def __new__(cls, *args, **kwargs):
raise TypeError(
"Type Annotated cannot be instantiated.")
@typing._tp_cache
def __class_getitem__(cls, params):
if not isinstance(params, tuple)
or len(params) < 2:
raise TypeError(
"Annotated[...] should be used "
"with at least two arguments (a type and an "
"annotation).")
allowed_special_forms = (ClassVar, Final)
if get_origin(params[0])
in allowed_special_forms:
origin = params[0]
else:
msg =
"Annotated[t, ...]: t must be a type."
origin = typing._type_check(params[0], msg)
metadata = tuple(params[1:])
return _AnnotatedAlias(origin, metadata)
def __init_subclass__(cls, *args, **kwargs):
raise TypeError(
f
"Cannot subclass {cls.__module__}.Annotated"
)
# Python 3.8 has get_origin() and get_args() but those implementations aren't
# Annotated-aware, so we can't use those. Python 3.9's versions don't support
# ParamSpecArgs and ParamSpecKwargs, so only Python 3.10's versions will do.
if sys.version_info[:2] >= (3, 10):
get_origin = typing.get_origin
get_args = typing.get_args
# 3.7-3.9
else:
try:
# 3.9+
from typing
import _BaseGenericAlias
except ImportError:
_BaseGenericAlias = typing._GenericAlias
try:
# 3.9+
from typing
import GenericAlias
as _typing_GenericAlias
except ImportError:
_typing_GenericAlias = typing._GenericAlias
def get_origin(tp):
"""Get the unsubscripted version of a type.
This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar
and Annotated.
Return None for unsupported types. Examples::
get_origin(Literal[42])
is Literal
get_origin(int)
is None
get_origin(ClassVar[int])
is ClassVar
get_origin(Generic)
is Generic
get_origin(Generic[T])
is Generic
get_origin(Union[T, int])
is Union
get_origin(List[Tuple[T, T]][int]) == list
get_origin(P.args)
is P
"""
if isinstance(tp, _AnnotatedAlias):
return Annotated
if isinstance(tp, (typing._GenericAlias, _typing_GenericAlias, _BaseGenericAlias,
ParamSpecArgs, ParamSpecKwargs)):
return tp.__origin__
if tp
is typing.Generic:
return typing.Generic
return None
def get_args(tp):
"""Get type arguments with all substitutions performed.
For unions, basic simplifications used by Union constructor are performed.
Examples::
get_args(Dict[str, int]) == (str, int)
get_args(int) == ()
get_args(Union[int, Union[T, int], str][int]) == (int, str)
get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int])
get_args(Callable[[], T][int]) == ([], int)
"""
if isinstance(tp, _AnnotatedAlias):
return (tp.__origin__,) + tp.__metadata__
if isinstance(tp, (typing._GenericAlias, _typing_GenericAlias)):
if getattr(tp,
"_special",
False):
return ()
res = tp.__args__
if get_origin(tp)
is collections.abc.Callable
and res[0]
is not Ellipsis:
res = (list(res[:-1]), res[-1])
return res
return ()
# 3.10+
if hasattr(typing,
'TypeAlias'):
TypeAlias = typing.TypeAlias
# 3.9
elif sys.version_info[:2] >= (3, 9):
@_ExtensionsSpecialForm
def TypeAlias(self, parameters):
"""Special marker indicating that an assignment should
be recognized
as a proper type alias definition by type
checkers.
For example::
Predicate: TypeAlias = Callable[..., bool]
It
's invalid when used anywhere except as in the example above.
"""
raise TypeError(f
"{self} is not subscriptable")
# 3.7-3.8
else:
TypeAlias = _ExtensionsSpecialForm(
'TypeAlias',
doc=
"""Special marker indicating that an assignment should
be recognized
as a proper type alias definition by type
checkers.
For example::
Predicate: TypeAlias = Callable[..., bool]
It
's invalid when used anywhere except as in the example
above.
"""
)
def _set_default(type_param, default):
if isinstance(default, (tuple, list)):
type_param.__default__ = tuple((typing._type_check(d,
"Default must be a type")
for d
in default))
elif default != _marker:
type_param.__default__ = typing._type_check(default,
"Default must be a type")
else:
type_param.__default__ =
None
def _set_module(typevarlike):
# for pickling:
def_mod = _caller(depth=3)
if def_mod !=
'typing_extensions':
typevarlike.__module__ = def_mod
class _DefaultMixin:
"""Mixin for TypeVarLike defaults."""
__slots__ = ()
__init__ = _set_default
# Classes using this metaclass must provide a _backported_typevarlike ClassVar
class _TypeVarLikeMeta(type):
def __instancecheck__(cls, __instance: Any) -> bool:
return isinstance(__instance, cls._backported_typevarlike)
# Add default and infer_variance parameters from PEP 696 and 695
class TypeVar(metaclass=_TypeVarLikeMeta):
"""Type variable."""
_backported_typevarlike = typing.TypeVar
def __new__(cls, name, *constraints, bound=
None,
covariant=
False, contravariant=
False,
default=_marker, infer_variance=
False):
if hasattr(typing,
"TypeAliasType"):
# PEP 695 implemented, can pass infer_variance to typing.TypeVar
typevar = typing.TypeVar(name, *constraints, bound=bound,
covariant=covariant, contravariant=contravariant,
infer_variance=infer_variance)
else:
typevar = typing.TypeVar(name, *constraints, bound=bound,
covariant=covariant, contravariant=contravariant)
if infer_variance
and (covariant
or contravariant):
raise ValueError(
"Variance cannot be specified with infer_variance.")
typevar.__infer_variance__ = infer_variance
_set_default(typevar, default)
_set_module(typevar)
return typevar
def __init_subclass__(cls) ->
None:
raise TypeError(f
"type '{__name__}.TypeVar' is not an acceptable base type")
# Python 3.10+ has PEP 612
if hasattr(typing,
'ParamSpecArgs'):
ParamSpecArgs = typing.ParamSpecArgs
ParamSpecKwargs = typing.ParamSpecKwargs
# 3.7-3.9
else:
class _Immutable:
"""Mixin to indicate that object should not be copied."""
__slots__ = ()
def __copy__(self):
return self
def __deepcopy__(self, memo):
return self
class ParamSpecArgs(_Immutable):
"""The args for a ParamSpec object.
Given a ParamSpec object P, P.args
is an instance of ParamSpecArgs.
ParamSpecArgs objects have a reference back to their ParamSpec:
P.args.__origin__
is P
This type
is meant
for runtime introspection
and has no special meaning to
static type checkers.
"""
def __init__(self, origin):
self.__origin__ = origin
def __repr__(self):
return f
"{self.__origin__.__name__}.args"
def __eq__(self, other):
if not isinstance(other, ParamSpecArgs):
return NotImplemented
return self.__origin__ == other.__origin__
class ParamSpecKwargs(_Immutable):
"""The kwargs for a ParamSpec object.
Given a ParamSpec object P, P.kwargs
is an instance of ParamSpecKwargs.
ParamSpecKwargs objects have a reference back to their ParamSpec:
P.kwargs.__origin__
is P
This type
is meant
for runtime introspection
and has no special meaning to
static type checkers.
"""
def __init__(self, origin):
self.__origin__ = origin
def __repr__(self):
return f
"{self.__origin__.__name__}.kwargs"
def __eq__(self, other):
if not isinstance(other, ParamSpecKwargs):
return NotImplemented
return self.__origin__ == other.__origin__
# 3.10+
if hasattr(typing,
'ParamSpec'):
# Add default parameter - PEP 696
class ParamSpec(metaclass=_TypeVarLikeMeta):
"""Parameter specification."""
_backported_typevarlike = typing.ParamSpec
def __new__(cls, name, *, bound=
None,
covariant=
False, contravariant=
False,
infer_variance=
False, default=_marker):
if hasattr(typing,
"TypeAliasType"):
# PEP 695 implemented, can pass infer_variance to typing.TypeVar
paramspec = typing.ParamSpec(name, bound=bound,
covariant=covariant,
contravariant=contravariant,
infer_variance=infer_variance)
else:
paramspec = typing.ParamSpec(name, bound=bound,
covariant=covariant,
contravariant=contravariant)
paramspec.__infer_variance__ = infer_variance
_set_default(paramspec, default)
_set_module(paramspec)
return paramspec
def __init_subclass__(cls) ->
None:
raise TypeError(f
"type '{__name__}.ParamSpec' is not an acceptable base type")
# 3.7-3.9
else:
# Inherits from list as a workaround for Callable checks in Python < 3.9.2.
class ParamSpec(list, _DefaultMixin):
"""Parameter specification variable.
Usage::
P = ParamSpec(
'P')
Parameter specification variables exist primarily
for the benefit of static
type checkers. They are used to forward the parameter types of one
callable to another callable, a pattern commonly found
in higher order
functions
and decorators. They are only valid when used
in ``Concatenate``,
or s the first argument to ``Callable``.
In Python 3.10
and higher,
they are also supported
in user-defined Generics at runtime.
See
class Generic
for more information on generic types. An
example
for annotating a decorator::
T = TypeVar(
'T')
P = ParamSpec(
'P')
def add_logging(f: Callable[P, T]) -> Callable[P, T]:
'''A type-safe decorator to add logging to a function.'''
def inner(*args: P.args, **kwargs: P.kwargs) -> T:
logging.info(f
'{f.__name__} was called')
return f(*args, **kwargs)
return inner
@add_logging
def add_two(x: float, y: float) -> float:
'''Add two numbers together.'''
return x + y
Parameter specification variables defined
with covariant=
True or
contravariant=
True can be used to declare covariant
or contravariant
generic types. These keyword arguments are valid, but their actual semantics
are yet to be decided. See PEP 612
for details.
Parameter specification variables can be introspected. e.g.:
P.__name__ ==
'T'
P.__bound__ ==
None
P.__covariant__ ==
False
P.__contravariant__ ==
False
Note that only parameter specification variables defined
in global scope can
be pickled.
"""
# Trick Generic __parameters__.
__class__ = typing.TypeVar
@property
def args(self):
return ParamSpecArgs(self)
@property
def kwargs(self):
return ParamSpecKwargs(self)
def __init__(self, name, *, bound=
None, covariant=
False, contravariant=
False,
infer_variance=
False, default=_marker):
super().__init__([self])
self.__name__ = name
self.__covariant__ = bool(covariant)
self.__contravariant__ = bool(contravariant)
self.__infer_variance__ = bool(infer_variance)
if bound:
self.__bound__ = typing._type_check(bound,
'Bound must be a type.')
else:
self.__bound__ =
None
_DefaultMixin.__init__(self, default)
# for pickling:
def_mod = _caller()
if def_mod !=
'typing_extensions':
self.__module__ = def_mod
def __repr__(self):
if self.__infer_variance__:
prefix =
''
elif self.__covariant__:
prefix =
'+'
elif self.__contravariant__:
prefix =
'-'
else:
prefix =
'~'
return prefix + self.__name__
def __hash__(self):
return object.__hash__(self)
def __eq__(self, other):
return self
is other
def __reduce__(self):
return self.__name__
# Hack to get typing._type_check to pass.
def __call__(self, *args, **kwargs):
pass
# 3.7-3.9
if not hasattr(typing,
'Concatenate'):
# Inherits from list as a workaround for Callable checks in Python < 3.9.2.
class _ConcatenateGenericAlias(list):
# Trick Generic into looking into this for __parameters__.
__class__ = typing._GenericAlias
# Flag in 3.8.
_special =
False
def __init__(self, origin, args):
super().__init__(args)
self.__origin__ = origin
self.__args__ = args
def __repr__(self):
_type_repr = typing._type_repr
return (f
'{_type_repr(self.__origin__)}'
f
'[{", ".join(_type_repr(arg) for arg in self.__args__)}]')
def __hash__(self):
return hash((self.__origin__, self.__args__))
# Hack to get typing._type_check to pass in Generic.
def __call__(self, *args, **kwargs):
pass
@property
def __parameters__(self):
return tuple(
tp
for tp
in self.__args__
if isinstance(tp, (typing.TypeVar, ParamSpec))
)
# 3.7-3.9
@typing._tp_cache
def _concatenate_getitem(self, parameters):
if parameters == ():
raise TypeError(
"Cannot take a Concatenate of no types.")
if not isinstance(parameters, tuple):
parameters = (parameters,)
if not isinstance(parameters[-1], ParamSpec):
raise TypeError(
"The last parameter to Concatenate should be a "
"ParamSpec variable.")
msg =
"Concatenate[arg, ...]: each arg must be a type."
parameters = tuple(typing._type_check(p, msg)
for p
in parameters)
return _ConcatenateGenericAlias(self, parameters)
# 3.10+
if hasattr(typing,
'Concatenate'):
Concatenate = typing.Concatenate
_ConcatenateGenericAlias = typing._ConcatenateGenericAlias
# noqa: F811
# 3.9
elif sys.version_info[:2] >= (3, 9):
@_ExtensionsSpecialForm
def Concatenate(self, parameters):
"""Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a
higher order function which adds, removes
or transforms parameters of a
callable.
For example::
Callable[Concatenate[int, P], int]
See PEP 612
for detailed information.
"""
return _concatenate_getitem(self, parameters)
# 3.7-8
else:
class _ConcatenateForm(_ExtensionsSpecialForm, _root=
True):
def __getitem__(self, parameters):
return _concatenate_getitem(self, parameters)
Concatenate = _ConcatenateForm(
'Concatenate',
doc=
"""Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a
higher order function which adds, removes
or transforms parameters of a
callable.
For example::
--> --------------------
--> maximum size reached
--> --------------------