Commit | Line | Data |
---|---|---|
e8f4ebf5 SM |
1 | import abc |
2 | from abc import abstractmethod, abstractproperty | |
3 | import collections | |
4 | import contextlib | |
5 | import functools | |
6 | import re as stdlib_re # Avoid confusion with the re we export. | |
7 | import sys | |
8 | import types | |
9 | try: | |
10 | import collections.abc as collections_abc | |
11 | except ImportError: | |
12 | import collections as collections_abc # Fallback for PY3.2. | |
13 | if sys.version_info[:2] >= (3, 6): | |
14 | import _collections_abc # Needed for private function _check_methods # noqa | |
15 | try: | |
16 | from types import WrapperDescriptorType, MethodWrapperType, MethodDescriptorType | |
17 | except ImportError: | |
18 | WrapperDescriptorType = type(object.__init__) | |
19 | MethodWrapperType = type(object().__str__) | |
20 | MethodDescriptorType = type(str.join) | |
21 | ||
22 | ||
23 | # Please keep __all__ alphabetized within each category. | |
24 | __all__ = [ | |
25 | # Super-special typing primitives. | |
26 | 'Any', | |
27 | 'Callable', | |
28 | 'ClassVar', | |
29 | 'Generic', | |
30 | 'Optional', | |
31 | 'Tuple', | |
32 | 'Type', | |
33 | 'TypeVar', | |
34 | 'Union', | |
35 | ||
36 | # ABCs (from collections.abc). | |
37 | 'AbstractSet', # collections.abc.Set. | |
38 | 'GenericMeta', # subclass of abc.ABCMeta and a metaclass | |
39 | # for 'Generic' and ABCs below. | |
40 | 'ByteString', | |
41 | 'Container', | |
42 | 'ContextManager', | |
43 | 'Hashable', | |
44 | 'ItemsView', | |
45 | 'Iterable', | |
46 | 'Iterator', | |
47 | 'KeysView', | |
48 | 'Mapping', | |
49 | 'MappingView', | |
50 | 'MutableMapping', | |
51 | 'MutableSequence', | |
52 | 'MutableSet', | |
53 | 'Sequence', | |
54 | 'Sized', | |
55 | 'ValuesView', | |
56 | # The following are added depending on presence | |
57 | # of their non-generic counterparts in stdlib: | |
58 | # Awaitable, | |
59 | # AsyncIterator, | |
60 | # AsyncIterable, | |
61 | # Coroutine, | |
62 | # Collection, | |
63 | # AsyncGenerator, | |
64 | # AsyncContextManager | |
65 | ||
66 | # Structural checks, a.k.a. protocols. | |
67 | 'Reversible', | |
68 | 'SupportsAbs', | |
69 | 'SupportsBytes', | |
70 | 'SupportsComplex', | |
71 | 'SupportsFloat', | |
72 | 'SupportsIndex', | |
73 | 'SupportsInt', | |
74 | 'SupportsRound', | |
75 | ||
76 | # Concrete collection types. | |
77 | 'Counter', | |
78 | 'Deque', | |
79 | 'Dict', | |
80 | 'DefaultDict', | |
81 | 'List', | |
82 | 'Set', | |
83 | 'FrozenSet', | |
84 | 'NamedTuple', # Not really a type. | |
85 | 'Generator', | |
86 | ||
87 | # One-off things. | |
88 | 'AnyStr', | |
89 | 'cast', | |
90 | 'get_type_hints', | |
91 | 'NewType', | |
92 | 'no_type_check', | |
93 | 'no_type_check_decorator', | |
94 | 'NoReturn', | |
95 | 'overload', | |
96 | 'Text', | |
97 | 'TYPE_CHECKING', | |
98 | ] | |
99 | ||
100 | # The pseudo-submodules 're' and 'io' are part of the public | |
101 | # namespace, but excluded from __all__ because they might stomp on | |
102 | # legitimate imports of those modules. | |
103 | ||
104 | ||
105 | def _qualname(x): | |
106 | if sys.version_info[:2] >= (3, 3): | |
107 | return x.__qualname__ | |
108 | else: | |
109 | # Fall back to just name. | |
110 | return x.__name__ | |
111 | ||
112 | ||
113 | def _trim_name(nm): | |
114 | whitelist = ('_TypeAlias', '_ForwardRef', '_TypingBase', '_FinalTypingBase') | |
115 | if nm.startswith('_') and nm not in whitelist: | |
116 | nm = nm[1:] | |
117 | return nm | |
118 | ||
119 | ||
120 | class TypingMeta(type): | |
121 | """Metaclass for most types defined in typing module | |
122 | (not a part of public API). | |
123 | ||
124 | This overrides __new__() to require an extra keyword parameter | |
125 | '_root', which serves as a guard against naive subclassing of the | |
126 | typing classes. Any legitimate class defined using a metaclass | |
127 | derived from TypingMeta must pass _root=True. | |
128 | ||
129 | This also defines a dummy constructor (all the work for most typing | |
130 | constructs is done in __new__) and a nicer repr(). | |
131 | """ | |
132 | ||
133 | _is_protocol = False | |
134 | ||
135 | def __new__(cls, name, bases, namespace, *, _root=False): | |
136 | if not _root: | |
137 | raise TypeError("Cannot subclass %s" % | |
138 | (', '.join(map(_type_repr, bases)) or '()')) | |
139 | return super().__new__(cls, name, bases, namespace) | |
140 | ||
141 | def __init__(self, *args, **kwds): | |
142 | pass | |
143 | ||
144 | def _eval_type(self, globalns, localns): | |
145 | """Override this in subclasses to interpret forward references. | |
146 | ||
147 | For example, List['C'] is internally stored as | |
148 | List[_ForwardRef('C')], which should evaluate to List[C], | |
149 | where C is an object found in globalns or localns (searching | |
150 | localns first, of course). | |
151 | """ | |
152 | return self | |
153 | ||
154 | def _get_type_vars(self, tvars): | |
155 | pass | |
156 | ||
157 | def __repr__(self): | |
158 | qname = _trim_name(_qualname(self)) | |
159 | return '%s.%s' % (self.__module__, qname) | |
160 | ||
161 | ||
162 | class _TypingBase(metaclass=TypingMeta, _root=True): | |
163 | """Internal indicator of special typing constructs.""" | |
164 | ||
165 | __slots__ = ('__weakref__',) | |
166 | ||
167 | def __init__(self, *args, **kwds): | |
168 | pass | |
169 | ||
170 | def __new__(cls, *args, **kwds): | |
171 | """Constructor. | |
172 | ||
173 | This only exists to give a better error message in case | |
174 | someone tries to subclass a special typing object (not a good idea). | |
175 | """ | |
176 | if (len(args) == 3 and | |
177 | isinstance(args[0], str) and | |
178 | isinstance(args[1], tuple)): | |
179 | # Close enough. | |
180 | raise TypeError("Cannot subclass %r" % cls) | |
181 | return super().__new__(cls) | |
182 | ||
183 | # Things that are not classes also need these. | |
184 | def _eval_type(self, globalns, localns): | |
185 | return self | |
186 | ||
187 | def _get_type_vars(self, tvars): | |
188 | pass | |
189 | ||
190 | def __repr__(self): | |
191 | cls = type(self) | |
192 | qname = _trim_name(_qualname(cls)) | |
193 | return '%s.%s' % (cls.__module__, qname) | |
194 | ||
195 | def __call__(self, *args, **kwds): | |
196 | raise TypeError("Cannot instantiate %r" % type(self)) | |
197 | ||
198 | ||
199 | class _FinalTypingBase(_TypingBase, _root=True): | |
200 | """Internal mix-in class to prevent instantiation. | |
201 | ||
202 | Prevents instantiation unless _root=True is given in class call. | |
203 | It is used to create pseudo-singleton instances Any, Union, Optional, etc. | |
204 | """ | |
205 | ||
206 | __slots__ = () | |
207 | ||
208 | def __new__(cls, *args, _root=False, **kwds): | |
209 | self = super().__new__(cls, *args, **kwds) | |
210 | if _root is True: | |
211 | return self | |
212 | raise TypeError("Cannot instantiate %r" % cls) | |
213 | ||
214 | def __reduce__(self): | |
215 | return _trim_name(type(self).__name__) | |
216 | ||
217 | ||
218 | class _ForwardRef(_TypingBase, _root=True): | |
219 | """Internal wrapper to hold a forward reference.""" | |
220 | ||
221 | __slots__ = ('__forward_arg__', '__forward_code__', | |
222 | '__forward_evaluated__', '__forward_value__') | |
223 | ||
224 | def __init__(self, arg): | |
225 | super().__init__(arg) | |
226 | if not isinstance(arg, str): | |
227 | raise TypeError('Forward reference must be a string -- got %r' % (arg,)) | |
228 | try: | |
229 | code = compile(arg, '<string>', 'eval') | |
230 | except SyntaxError: | |
231 | raise SyntaxError('Forward reference must be an expression -- got %r' % | |
232 | (arg,)) | |
233 | self.__forward_arg__ = arg | |
234 | self.__forward_code__ = code | |
235 | self.__forward_evaluated__ = False | |
236 | self.__forward_value__ = None | |
237 | ||
238 | def _eval_type(self, globalns, localns): | |
239 | if not self.__forward_evaluated__ or localns is not globalns: | |
240 | if globalns is None and localns is None: | |
241 | globalns = localns = {} | |
242 | elif globalns is None: | |
243 | globalns = localns | |
244 | elif localns is None: | |
245 | localns = globalns | |
246 | self.__forward_value__ = _type_check( | |
247 | eval(self.__forward_code__, globalns, localns), | |
248 | "Forward references must evaluate to types.") | |
249 | self.__forward_evaluated__ = True | |
250 | return self.__forward_value__ | |
251 | ||
252 | def __eq__(self, other): | |
253 | if not isinstance(other, _ForwardRef): | |
254 | return NotImplemented | |
255 | if self.__forward_evaluated__ and other.__forward_evaluated__: | |
256 | return (self.__forward_arg__ == other.__forward_arg__ and | |
257 | self.__forward_value__ == other.__forward_value__) | |
258 | return self.__forward_arg__ == other.__forward_arg__ | |
259 | ||
260 | def __hash__(self): | |
261 | return hash(self.__forward_arg__) | |
262 | ||
263 | def __instancecheck__(self, obj): | |
264 | raise TypeError("Forward references cannot be used with isinstance().") | |
265 | ||
266 | def __subclasscheck__(self, cls): | |
267 | raise TypeError("Forward references cannot be used with issubclass().") | |
268 | ||
269 | def __repr__(self): | |
270 | return '_ForwardRef(%r)' % (self.__forward_arg__,) | |
271 | ||
272 | ||
273 | class _TypeAlias(_TypingBase, _root=True): | |
274 | """Internal helper class for defining generic variants of concrete types. | |
275 | ||
276 | Note that this is not a type; let's call it a pseudo-type. It cannot | |
277 | be used in instance and subclass checks in parameterized form, i.e. | |
278 | ``isinstance(42, Match[str])`` raises ``TypeError`` instead of returning | |
279 | ``False``. | |
280 | """ | |
281 | ||
282 | __slots__ = ('name', 'type_var', 'impl_type', 'type_checker') | |
283 | ||
284 | def __init__(self, name, type_var, impl_type, type_checker): | |
285 | """Initializer. | |
286 | ||
287 | Args: | |
288 | name: The name, e.g. 'Pattern'. | |
289 | type_var: The type parameter, e.g. AnyStr, or the | |
290 | specific type, e.g. str. | |
291 | impl_type: The implementation type. | |
292 | type_checker: Function that takes an impl_type instance. | |
293 | and returns a value that should be a type_var instance. | |
294 | """ | |
295 | assert isinstance(name, str), repr(name) | |
296 | assert isinstance(impl_type, type), repr(impl_type) | |
297 | assert not isinstance(impl_type, TypingMeta), repr(impl_type) | |
298 | assert isinstance(type_var, (type, _TypingBase)), repr(type_var) | |
299 | self.name = name | |
300 | self.type_var = type_var | |
301 | self.impl_type = impl_type | |
302 | self.type_checker = type_checker | |
303 | ||
304 | def __repr__(self): | |
305 | return "%s[%s]" % (self.name, _type_repr(self.type_var)) | |
306 | ||
307 | def __getitem__(self, parameter): | |
308 | if not isinstance(self.type_var, TypeVar): | |
309 | raise TypeError("%s cannot be further parameterized." % self) | |
310 | if self.type_var.__constraints__ and isinstance(parameter, type): | |
311 | if not issubclass(parameter, self.type_var.__constraints__): | |
312 | raise TypeError("%s is not a valid substitution for %s." % | |
313 | (parameter, self.type_var)) | |
314 | if isinstance(parameter, TypeVar) and parameter is not self.type_var: | |
315 | raise TypeError("%s cannot be re-parameterized." % self) | |
316 | return self.__class__(self.name, parameter, | |
317 | self.impl_type, self.type_checker) | |
318 | ||
319 | def __eq__(self, other): | |
320 | if not isinstance(other, _TypeAlias): | |
321 | return NotImplemented | |
322 | return self.name == other.name and self.type_var == other.type_var | |
323 | ||
324 | def __hash__(self): | |
325 | return hash((self.name, self.type_var)) | |
326 | ||
327 | def __instancecheck__(self, obj): | |
328 | if not isinstance(self.type_var, TypeVar): | |
329 | raise TypeError("Parameterized type aliases cannot be used " | |
330 | "with isinstance().") | |
331 | return isinstance(obj, self.impl_type) | |
332 | ||
333 | def __subclasscheck__(self, cls): | |
334 | if not isinstance(self.type_var, TypeVar): | |
335 | raise TypeError("Parameterized type aliases cannot be used " | |
336 | "with issubclass().") | |
337 | return issubclass(cls, self.impl_type) | |
338 | ||
339 | ||
340 | def _get_type_vars(types, tvars): | |
341 | for t in types: | |
342 | if isinstance(t, TypingMeta) or isinstance(t, _TypingBase): | |
343 | t._get_type_vars(tvars) | |
344 | ||
345 | ||
346 | def _type_vars(types): | |
347 | tvars = [] | |
348 | _get_type_vars(types, tvars) | |
349 | return tuple(tvars) | |
350 | ||
351 | ||
352 | def _eval_type(t, globalns, localns): | |
353 | if isinstance(t, TypingMeta) or isinstance(t, _TypingBase): | |
354 | return t._eval_type(globalns, localns) | |
355 | return t | |
356 | ||
357 | ||
358 | def _type_check(arg, msg): | |
359 | """Check that the argument is a type, and return it (internal helper). | |
360 | ||
361 | As a special case, accept None and return type(None) instead. | |
362 | Also, _TypeAlias instances (e.g. Match, Pattern) are acceptable. | |
363 | ||
364 | The msg argument is a human-readable error message, e.g. | |
365 | ||
366 | "Union[arg, ...]: arg should be a type." | |
367 | ||
368 | We append the repr() of the actual value (truncated to 100 chars). | |
369 | """ | |
370 | if arg is None: | |
371 | return type(None) | |
372 | if isinstance(arg, str): | |
373 | arg = _ForwardRef(arg) | |
374 | if ( | |
375 | isinstance(arg, _TypingBase) and type(arg).__name__ == '_ClassVar' or | |
376 | not isinstance(arg, (type, _TypingBase)) and not callable(arg) | |
377 | ): | |
378 | raise TypeError(msg + " Got %.100r." % (arg,)) | |
379 | # Bare Union etc. are not valid as type arguments | |
380 | if ( | |
381 | type(arg).__name__ in ('_Union', '_Optional') and | |
382 | not getattr(arg, '__origin__', None) or | |
383 | isinstance(arg, TypingMeta) and arg._gorg in (Generic, _Protocol) | |
384 | ): | |
385 | raise TypeError("Plain %s is not valid as type argument" % arg) | |
386 | return arg | |
387 | ||
388 | ||
389 | def _type_repr(obj): | |
390 | """Return the repr() of an object, special-casing types (internal helper). | |
391 | ||
392 | If obj is a type, we return a shorter version than the default | |
393 | type.__repr__, based on the module and qualified name, which is | |
394 | typically enough to uniquely identify a type. For everything | |
395 | else, we fall back on repr(obj). | |
396 | """ | |
397 | if isinstance(obj, type) and not isinstance(obj, TypingMeta): | |
398 | if obj.__module__ == 'builtins': | |
399 | return _qualname(obj) | |
400 | return '%s.%s' % (obj.__module__, _qualname(obj)) | |
401 | if obj is ...: | |
402 | return('...') | |
403 | if isinstance(obj, types.FunctionType): | |
404 | return obj.__name__ | |
405 | return repr(obj) | |
406 | ||
407 | ||
408 | class _Any(_FinalTypingBase, _root=True): | |
409 | """Special type indicating an unconstrained type. | |
410 | ||
411 | - Any is compatible with every type. | |
412 | - Any assumed to have all methods. | |
413 | - All values assumed to be instances of Any. | |
414 | ||
415 | Note that all the above statements are true from the point of view of | |
416 | static type checkers. At runtime, Any should not be used with instance | |
417 | or class checks. | |
418 | """ | |
419 | ||
420 | __slots__ = () | |
421 | ||
422 | def __instancecheck__(self, obj): | |
423 | raise TypeError("Any cannot be used with isinstance().") | |
424 | ||
425 | def __subclasscheck__(self, cls): | |
426 | raise TypeError("Any cannot be used with issubclass().") | |
427 | ||
428 | ||
429 | Any = _Any(_root=True) | |
430 | ||
431 | ||
432 | class _NoReturn(_FinalTypingBase, _root=True): | |
433 | """Special type indicating functions that never return. | |
434 | Example:: | |
435 | ||
436 | from typing import NoReturn | |
437 | ||
438 | def stop() -> NoReturn: | |
439 | raise Exception('no way') | |
440 | ||
441 | This type is invalid in other positions, e.g., ``List[NoReturn]`` | |
442 | will fail in static type checkers. | |
443 | """ | |
444 | ||
445 | __slots__ = () | |
446 | ||
447 | def __instancecheck__(self, obj): | |
448 | raise TypeError("NoReturn cannot be used with isinstance().") | |
449 | ||
450 | def __subclasscheck__(self, cls): | |
451 | raise TypeError("NoReturn cannot be used with issubclass().") | |
452 | ||
453 | ||
454 | NoReturn = _NoReturn(_root=True) | |
455 | ||
456 | ||
457 | class TypeVar(_TypingBase, _root=True): | |
458 | """Type variable. | |
459 | ||
460 | Usage:: | |
461 | ||
462 | T = TypeVar('T') # Can be anything | |
463 | A = TypeVar('A', str, bytes) # Must be str or bytes | |
464 | ||
465 | Type variables exist primarily for the benefit of static type | |
466 | checkers. They serve as the parameters for generic types as well | |
467 | as for generic function definitions. See class Generic for more | |
468 | information on generic types. Generic functions work as follows: | |
469 | ||
470 | def repeat(x: T, n: int) -> List[T]: | |
471 | '''Return a list containing n references to x.''' | |
472 | return [x]*n | |
473 | ||
474 | def longest(x: A, y: A) -> A: | |
475 | '''Return the longest of two strings.''' | |
476 | return x if len(x) >= len(y) else y | |
477 | ||
478 | The latter example's signature is essentially the overloading | |
479 | of (str, str) -> str and (bytes, bytes) -> bytes. Also note | |
480 | that if the arguments are instances of some subclass of str, | |
481 | the return type is still plain str. | |
482 | ||
483 | At runtime, isinstance(x, T) and issubclass(C, T) will raise TypeError. | |
484 | ||
485 | Type variables defined with covariant=True or contravariant=True | |
486 | can be used do declare covariant or contravariant generic types. | |
487 | See PEP 484 for more details. By default generic types are invariant | |
488 | in all type variables. | |
489 | ||
490 | Type variables can be introspected. e.g.: | |
491 | ||
492 | T.__name__ == 'T' | |
493 | T.__constraints__ == () | |
494 | T.__covariant__ == False | |
495 | T.__contravariant__ = False | |
496 | A.__constraints__ == (str, bytes) | |
497 | """ | |
498 | ||
499 | __slots__ = ('__name__', '__bound__', '__constraints__', | |
500 | '__covariant__', '__contravariant__') | |
501 | ||
502 | def __init__(self, name, *constraints, bound=None, | |
503 | covariant=False, contravariant=False): | |
504 | super().__init__(name, *constraints, bound=bound, | |
505 | covariant=covariant, contravariant=contravariant) | |
506 | self.__name__ = name | |
507 | if covariant and contravariant: | |
508 | raise ValueError("Bivariant types are not supported.") | |
509 | self.__covariant__ = bool(covariant) | |
510 | self.__contravariant__ = bool(contravariant) | |
511 | if constraints and bound is not None: | |
512 | raise TypeError("Constraints cannot be combined with bound=...") | |
513 | if constraints and len(constraints) == 1: | |
514 | raise TypeError("A single constraint is not allowed") | |
515 | msg = "TypeVar(name, constraint, ...): constraints must be types." | |
516 | self.__constraints__ = tuple(_type_check(t, msg) for t in constraints) | |
517 | if bound: | |
518 | self.__bound__ = _type_check(bound, "Bound must be a type.") | |
519 | else: | |
520 | self.__bound__ = None | |
521 | ||
522 | def _get_type_vars(self, tvars): | |
523 | if self not in tvars: | |
524 | tvars.append(self) | |
525 | ||
526 | def __repr__(self): | |
527 | if self.__covariant__: | |
528 | prefix = '+' | |
529 | elif self.__contravariant__: | |
530 | prefix = '-' | |
531 | else: | |
532 | prefix = '~' | |
533 | return prefix + self.__name__ | |
534 | ||
535 | def __instancecheck__(self, instance): | |
536 | raise TypeError("Type variables cannot be used with isinstance().") | |
537 | ||
538 | def __subclasscheck__(self, cls): | |
539 | raise TypeError("Type variables cannot be used with issubclass().") | |
540 | ||
541 | ||
542 | # Some unconstrained type variables. These are used by the container types. | |
543 | # (These are not for export.) | |
544 | T = TypeVar('T') # Any type. | |
545 | KT = TypeVar('KT') # Key type. | |
546 | VT = TypeVar('VT') # Value type. | |
547 | T_co = TypeVar('T_co', covariant=True) # Any type covariant containers. | |
548 | V_co = TypeVar('V_co', covariant=True) # Any type covariant containers. | |
549 | VT_co = TypeVar('VT_co', covariant=True) # Value type covariant containers. | |
550 | T_contra = TypeVar('T_contra', contravariant=True) # Ditto contravariant. | |
551 | ||
552 | # A useful type variable with constraints. This represents string types. | |
553 | # (This one *is* for export!) | |
554 | AnyStr = TypeVar('AnyStr', bytes, str) | |
555 | ||
556 | ||
557 | def _replace_arg(arg, tvars, args): | |
558 | """An internal helper function: replace arg if it is a type variable | |
559 | found in tvars with corresponding substitution from args or | |
560 | with corresponding substitution sub-tree if arg is a generic type. | |
561 | """ | |
562 | ||
563 | if tvars is None: | |
564 | tvars = [] | |
565 | if hasattr(arg, '_subs_tree') and isinstance(arg, (GenericMeta, _TypingBase)): | |
566 | return arg._subs_tree(tvars, args) | |
567 | if isinstance(arg, TypeVar): | |
568 | for i, tvar in enumerate(tvars): | |
569 | if arg == tvar: | |
570 | return args[i] | |
571 | return arg | |
572 | ||
573 | ||
574 | # Special typing constructs Union, Optional, Generic, Callable and Tuple | |
575 | # use three special attributes for internal bookkeeping of generic types: | |
576 | # * __parameters__ is a tuple of unique free type parameters of a generic | |
577 | # type, for example, Dict[T, T].__parameters__ == (T,); | |
578 | # * __origin__ keeps a reference to a type that was subscripted, | |
579 | # e.g., Union[T, int].__origin__ == Union; | |
580 | # * __args__ is a tuple of all arguments used in subscripting, | |
581 | # e.g., Dict[T, int].__args__ == (T, int). | |
582 | ||
583 | ||
584 | def _subs_tree(cls, tvars=None, args=None): | |
585 | """An internal helper function: calculate substitution tree | |
586 | for generic cls after replacing its type parameters with | |
587 | substitutions in tvars -> args (if any). | |
588 | Repeat the same following __origin__'s. | |
589 | ||
590 | Return a list of arguments with all possible substitutions | |
591 | performed. Arguments that are generic classes themselves are represented | |
592 | as tuples (so that no new classes are created by this function). | |
593 | For example: _subs_tree(List[Tuple[int, T]][str]) == [(Tuple, int, str)] | |
594 | """ | |
595 | ||
596 | if cls.__origin__ is None: | |
597 | return cls | |
598 | # Make of chain of origins (i.e. cls -> cls.__origin__) | |
599 | current = cls.__origin__ | |
600 | orig_chain = [] | |
601 | while current.__origin__ is not None: | |
602 | orig_chain.append(current) | |
603 | current = current.__origin__ | |
604 | # Replace type variables in __args__ if asked ... | |
605 | tree_args = [] | |
606 | for arg in cls.__args__: | |
607 | tree_args.append(_replace_arg(arg, tvars, args)) | |
608 | # ... then continue replacing down the origin chain. | |
609 | for ocls in orig_chain: | |
610 | new_tree_args = [] | |
611 | for arg in ocls.__args__: | |
612 | new_tree_args.append(_replace_arg(arg, ocls.__parameters__, tree_args)) | |
613 | tree_args = new_tree_args | |
614 | return tree_args | |
615 | ||
616 | ||
617 | def _remove_dups_flatten(parameters): | |
618 | """An internal helper for Union creation and substitution: flatten Union's | |
619 | among parameters, then remove duplicates and strict subclasses. | |
620 | """ | |
621 | ||
622 | # Flatten out Union[Union[...], ...]. | |
623 | params = [] | |
624 | for p in parameters: | |
625 | if isinstance(p, _Union) and p.__origin__ is Union: | |
626 | params.extend(p.__args__) | |
627 | elif isinstance(p, tuple) and len(p) > 0 and p[0] is Union: | |
628 | params.extend(p[1:]) | |
629 | else: | |
630 | params.append(p) | |
631 | # Weed out strict duplicates, preserving the first of each occurrence. | |
632 | all_params = set(params) | |
633 | if len(all_params) < len(params): | |
634 | new_params = [] | |
635 | for t in params: | |
636 | if t in all_params: | |
637 | new_params.append(t) | |
638 | all_params.remove(t) | |
639 | params = new_params | |
640 | assert not all_params, all_params | |
641 | # Weed out subclasses. | |
642 | # E.g. Union[int, Employee, Manager] == Union[int, Employee]. | |
643 | # If object is present it will be sole survivor among proper classes. | |
644 | # Never discard type variables. | |
645 | # (In particular, Union[str, AnyStr] != AnyStr.) | |
646 | all_params = set(params) | |
647 | for t1 in params: | |
648 | if not isinstance(t1, type): | |
649 | continue | |
650 | if any(isinstance(t2, type) and issubclass(t1, t2) | |
651 | for t2 in all_params - {t1} | |
652 | if not (isinstance(t2, GenericMeta) and | |
653 | t2.__origin__ is not None)): | |
654 | all_params.remove(t1) | |
655 | return tuple(t for t in params if t in all_params) | |
656 | ||
657 | ||
658 | def _check_generic(cls, parameters): | |
659 | # Check correct count for parameters of a generic cls (internal helper). | |
660 | if not cls.__parameters__: | |
661 | raise TypeError("%s is not a generic class" % repr(cls)) | |
662 | alen = len(parameters) | |
663 | elen = len(cls.__parameters__) | |
664 | if alen != elen: | |
665 | raise TypeError("Too %s parameters for %s; actual %s, expected %s" % | |
666 | ("many" if alen > elen else "few", repr(cls), alen, elen)) | |
667 | ||
668 | ||
669 | _cleanups = [] | |
670 | ||
671 | ||
672 | def _tp_cache(func): | |
673 | """Internal wrapper caching __getitem__ of generic types with a fallback to | |
674 | original function for non-hashable arguments. | |
675 | """ | |
676 | ||
677 | cached = functools.lru_cache()(func) | |
678 | _cleanups.append(cached.cache_clear) | |
679 | ||
680 | @functools.wraps(func) | |
681 | def inner(*args, **kwds): | |
682 | try: | |
683 | return cached(*args, **kwds) | |
684 | except TypeError: | |
685 | pass # All real errors (not unhashable args) are raised below. | |
686 | return func(*args, **kwds) | |
687 | return inner | |
688 | ||
689 | ||
690 | class _Union(_FinalTypingBase, _root=True): | |
691 | """Union type; Union[X, Y] means either X or Y. | |
692 | ||
693 | To define a union, use e.g. Union[int, str]. Details: | |
694 | ||
695 | - The arguments must be types and there must be at least one. | |
696 | ||
697 | - None as an argument is a special case and is replaced by | |
698 | type(None). | |
699 | ||
700 | - Unions of unions are flattened, e.g.:: | |
701 | ||
702 | Union[Union[int, str], float] == Union[int, str, float] | |
703 | ||
704 | - Unions of a single argument vanish, e.g.:: | |
705 | ||
706 | Union[int] == int # The constructor actually returns int | |
707 | ||
708 | - Redundant arguments are skipped, e.g.:: | |
709 | ||
710 | Union[int, str, int] == Union[int, str] | |
711 | ||
712 | - When comparing unions, the argument order is ignored, e.g.:: | |
713 | ||
714 | Union[int, str] == Union[str, int] | |
715 | ||
716 | - When two arguments have a subclass relationship, the least | |
717 | derived argument is kept, e.g.:: | |
718 | ||
719 | class Employee: pass | |
720 | class Manager(Employee): pass | |
721 | Union[int, Employee, Manager] == Union[int, Employee] | |
722 | Union[Manager, int, Employee] == Union[int, Employee] | |
723 | Union[Employee, Manager] == Employee | |
724 | ||
725 | - Similar for object:: | |
726 | ||
727 | Union[int, object] == object | |
728 | ||
729 | - You cannot subclass or instantiate a union. | |
730 | ||
731 | - You can use Optional[X] as a shorthand for Union[X, None]. | |
732 | """ | |
733 | ||
734 | __slots__ = ('__parameters__', '__args__', '__origin__', '__tree_hash__') | |
735 | ||
736 | def __new__(cls, parameters=None, origin=None, *args, _root=False): | |
737 | self = super().__new__(cls, parameters, origin, *args, _root=_root) | |
738 | if origin is None: | |
739 | self.__parameters__ = None | |
740 | self.__args__ = None | |
741 | self.__origin__ = None | |
742 | self.__tree_hash__ = hash(frozenset(('Union',))) | |
743 | return self | |
744 | if not isinstance(parameters, tuple): | |
745 | raise TypeError("Expected parameters=<tuple>") | |
746 | if origin is Union: | |
747 | parameters = _remove_dups_flatten(parameters) | |
748 | # It's not a union if there's only one type left. | |
749 | if len(parameters) == 1: | |
750 | return parameters[0] | |
751 | self.__parameters__ = _type_vars(parameters) | |
752 | self.__args__ = parameters | |
753 | self.__origin__ = origin | |
754 | # Pre-calculate the __hash__ on instantiation. | |
755 | # This improves speed for complex substitutions. | |
756 | subs_tree = self._subs_tree() | |
757 | if isinstance(subs_tree, tuple): | |
758 | self.__tree_hash__ = hash(frozenset(subs_tree)) | |
759 | else: | |
760 | self.__tree_hash__ = hash(subs_tree) | |
761 | return self | |
762 | ||
763 | def _eval_type(self, globalns, localns): | |
764 | if self.__args__ is None: | |
765 | return self | |
766 | ev_args = tuple(_eval_type(t, globalns, localns) for t in self.__args__) | |
767 | ev_origin = _eval_type(self.__origin__, globalns, localns) | |
768 | if ev_args == self.__args__ and ev_origin == self.__origin__: | |
769 | # Everything is already evaluated. | |
770 | return self | |
771 | return self.__class__(ev_args, ev_origin, _root=True) | |
772 | ||
773 | def _get_type_vars(self, tvars): | |
774 | if self.__origin__ and self.__parameters__: | |
775 | _get_type_vars(self.__parameters__, tvars) | |
776 | ||
777 | def __repr__(self): | |
778 | if self.__origin__ is None: | |
779 | return super().__repr__() | |
780 | tree = self._subs_tree() | |
781 | if not isinstance(tree, tuple): | |
782 | return repr(tree) | |
783 | return tree[0]._tree_repr(tree) | |
784 | ||
785 | def _tree_repr(self, tree): | |
786 | arg_list = [] | |
787 | for arg in tree[1:]: | |
788 | if not isinstance(arg, tuple): | |
789 | arg_list.append(_type_repr(arg)) | |
790 | else: | |
791 | arg_list.append(arg[0]._tree_repr(arg)) | |
792 | return super().__repr__() + '[%s]' % ', '.join(arg_list) | |
793 | ||
794 | @_tp_cache | |
795 | def __getitem__(self, parameters): | |
796 | if parameters == (): | |
797 | raise TypeError("Cannot take a Union of no types.") | |
798 | if not isinstance(parameters, tuple): | |
799 | parameters = (parameters,) | |
800 | if self.__origin__ is None: | |
801 | msg = "Union[arg, ...]: each arg must be a type." | |
802 | else: | |
803 | msg = "Parameters to generic types must be types." | |
804 | parameters = tuple(_type_check(p, msg) for p in parameters) | |
805 | if self is not Union: | |
806 | _check_generic(self, parameters) | |
807 | return self.__class__(parameters, origin=self, _root=True) | |
808 | ||
809 | def _subs_tree(self, tvars=None, args=None): | |
810 | if self is Union: | |
811 | return Union # Nothing to substitute | |
812 | tree_args = _subs_tree(self, tvars, args) | |
813 | tree_args = _remove_dups_flatten(tree_args) | |
814 | if len(tree_args) == 1: | |
815 | return tree_args[0] # Union of a single type is that type | |
816 | return (Union,) + tree_args | |
817 | ||
818 | def __eq__(self, other): | |
819 | if isinstance(other, _Union): | |
820 | return self.__tree_hash__ == other.__tree_hash__ | |
821 | elif self is not Union: | |
822 | return self._subs_tree() == other | |
823 | else: | |
824 | return self is other | |
825 | ||
826 | def __hash__(self): | |
827 | return self.__tree_hash__ | |
828 | ||
829 | def __instancecheck__(self, obj): | |
830 | raise TypeError("Unions cannot be used with isinstance().") | |
831 | ||
832 | def __subclasscheck__(self, cls): | |
833 | raise TypeError("Unions cannot be used with issubclass().") | |
834 | ||
835 | ||
836 | Union = _Union(_root=True) | |
837 | ||
838 | ||
839 | class _Optional(_FinalTypingBase, _root=True): | |
840 | """Optional type. | |
841 | ||
842 | Optional[X] is equivalent to Union[X, None]. | |
843 | """ | |
844 | ||
845 | __slots__ = () | |
846 | ||
847 | @_tp_cache | |
848 | def __getitem__(self, arg): | |
849 | arg = _type_check(arg, "Optional[t] requires a single type.") | |
850 | return Union[arg, type(None)] | |
851 | ||
852 | ||
853 | Optional = _Optional(_root=True) | |
854 | ||
855 | ||
856 | def _next_in_mro(cls): | |
857 | """Helper for Generic.__new__. | |
858 | ||
859 | Returns the class after the last occurrence of Generic or | |
860 | Generic[...] in cls.__mro__. | |
861 | """ | |
862 | next_in_mro = object | |
863 | # Look for the last occurrence of Generic or Generic[...]. | |
864 | for i, c in enumerate(cls.__mro__[:-1]): | |
865 | if isinstance(c, GenericMeta) and c._gorg is Generic: | |
866 | next_in_mro = cls.__mro__[i + 1] | |
867 | return next_in_mro | |
868 | ||
869 | ||
870 | def _make_subclasshook(cls): | |
871 | """Construct a __subclasshook__ callable that incorporates | |
872 | the associated __extra__ class in subclass checks performed | |
873 | against cls. | |
874 | """ | |
875 | if isinstance(cls.__extra__, abc.ABCMeta): | |
876 | # The logic mirrors that of ABCMeta.__subclasscheck__. | |
877 | # Registered classes need not be checked here because | |
878 | # cls and its extra share the same _abc_registry. | |
879 | def __extrahook__(subclass): | |
880 | res = cls.__extra__.__subclasshook__(subclass) | |
881 | if res is not NotImplemented: | |
882 | return res | |
883 | if cls.__extra__ in subclass.__mro__: | |
884 | return True | |
885 | for scls in cls.__extra__.__subclasses__(): | |
886 | if isinstance(scls, GenericMeta): | |
887 | continue | |
888 | if issubclass(subclass, scls): | |
889 | return True | |
890 | return NotImplemented | |
891 | else: | |
892 | # For non-ABC extras we'll just call issubclass(). | |
893 | def __extrahook__(subclass): | |
894 | if cls.__extra__ and issubclass(subclass, cls.__extra__): | |
895 | return True | |
896 | return NotImplemented | |
897 | return __extrahook__ | |
898 | ||
899 | ||
900 | def _no_slots_copy(dct): | |
901 | """Internal helper: copy class __dict__ and clean slots class variables. | |
902 | (They will be re-created if necessary by normal class machinery.) | |
903 | """ | |
904 | dict_copy = dict(dct) | |
905 | if '__slots__' in dict_copy: | |
906 | for slot in dict_copy['__slots__']: | |
907 | dict_copy.pop(slot, None) | |
908 | return dict_copy | |
909 | ||
910 | ||
911 | class GenericMeta(TypingMeta, abc.ABCMeta): | |
912 | """Metaclass for generic types. | |
913 | ||
914 | This is a metaclass for typing.Generic and generic ABCs defined in | |
915 | typing module. User defined subclasses of GenericMeta can override | |
916 | __new__ and invoke super().__new__. Note that GenericMeta.__new__ | |
917 | has strict rules on what is allowed in its bases argument: | |
918 | * plain Generic is disallowed in bases; | |
919 | * Generic[...] should appear in bases at most once; | |
920 | * if Generic[...] is present, then it should list all type variables | |
921 | that appear in other bases. | |
922 | In addition, type of all generic bases is erased, e.g., C[int] is | |
923 | stripped to plain C. | |
924 | """ | |
925 | ||
926 | def __new__(cls, name, bases, namespace, | |
927 | tvars=None, args=None, origin=None, extra=None, orig_bases=None): | |
928 | """Create a new generic class. GenericMeta.__new__ accepts | |
929 | keyword arguments that are used for internal bookkeeping, therefore | |
930 | an override should pass unused keyword arguments to super(). | |
931 | """ | |
932 | if tvars is not None: | |
933 | # Called from __getitem__() below. | |
934 | assert origin is not None | |
935 | assert all(isinstance(t, TypeVar) for t in tvars), tvars | |
936 | else: | |
937 | # Called from class statement. | |
938 | assert tvars is None, tvars | |
939 | assert args is None, args | |
940 | assert origin is None, origin | |
941 | ||
942 | # Get the full set of tvars from the bases. | |
943 | tvars = _type_vars(bases) | |
944 | # Look for Generic[T1, ..., Tn]. | |
945 | # If found, tvars must be a subset of it. | |
946 | # If not found, tvars is it. | |
947 | # Also check for and reject plain Generic, | |
948 | # and reject multiple Generic[...]. | |
949 | gvars = None | |
950 | for base in bases: | |
951 | if base is Generic: | |
952 | raise TypeError("Cannot inherit from plain Generic") | |
953 | if (isinstance(base, GenericMeta) and | |
954 | base.__origin__ is Generic): | |
955 | if gvars is not None: | |
956 | raise TypeError( | |
957 | "Cannot inherit from Generic[...] multiple types.") | |
958 | gvars = base.__parameters__ | |
959 | if gvars is None: | |
960 | gvars = tvars | |
961 | else: | |
962 | tvarset = set(tvars) | |
963 | gvarset = set(gvars) | |
964 | if not tvarset <= gvarset: | |
965 | raise TypeError( | |
966 | "Some type variables (%s) " | |
967 | "are not listed in Generic[%s]" % | |
968 | (", ".join(str(t) for t in tvars if t not in gvarset), | |
969 | ", ".join(str(g) for g in gvars))) | |
970 | tvars = gvars | |
971 | ||
972 | initial_bases = bases | |
973 | if extra is not None and type(extra) is abc.ABCMeta and extra not in bases: | |
974 | bases = (extra,) + bases | |
975 | bases = tuple(b._gorg if isinstance(b, GenericMeta) else b for b in bases) | |
976 | ||
977 | # remove bare Generic from bases if there are other generic bases | |
978 | if any(isinstance(b, GenericMeta) and b is not Generic for b in bases): | |
979 | bases = tuple(b for b in bases if b is not Generic) | |
980 | namespace.update({'__origin__': origin, '__extra__': extra, | |
981 | '_gorg': None if not origin else origin._gorg}) | |
982 | self = super().__new__(cls, name, bases, namespace, _root=True) | |
983 | super(GenericMeta, self).__setattr__('_gorg', | |
984 | self if not origin else origin._gorg) | |
985 | self.__parameters__ = tvars | |
986 | # Be prepared that GenericMeta will be subclassed by TupleMeta | |
987 | # and CallableMeta, those two allow ..., (), or [] in __args___. | |
988 | self.__args__ = tuple(... if a is _TypingEllipsis else | |
989 | () if a is _TypingEmpty else | |
990 | a for a in args) if args else None | |
991 | # Speed hack (https://github.com/python/typing/issues/196). | |
992 | self.__next_in_mro__ = _next_in_mro(self) | |
993 | # Preserve base classes on subclassing (__bases__ are type erased now). | |
994 | if orig_bases is None: | |
995 | self.__orig_bases__ = initial_bases | |
996 | ||
997 | # This allows unparameterized generic collections to be used | |
998 | # with issubclass() and isinstance() in the same way as their | |
999 | # collections.abc counterparts (e.g., isinstance([], Iterable)). | |
1000 | if ( | |
1001 | '__subclasshook__' not in namespace and extra or | |
1002 | # allow overriding | |
1003 | getattr(self.__subclasshook__, '__name__', '') == '__extrahook__' | |
1004 | ): | |
1005 | self.__subclasshook__ = _make_subclasshook(self) | |
1006 | if isinstance(extra, abc.ABCMeta): | |
1007 | self._abc_registry = extra._abc_registry | |
1008 | self._abc_cache = extra._abc_cache | |
1009 | elif origin is not None: | |
1010 | self._abc_registry = origin._abc_registry | |
1011 | self._abc_cache = origin._abc_cache | |
1012 | ||
1013 | if origin and hasattr(origin, '__qualname__'): # Fix for Python 3.2. | |
1014 | self.__qualname__ = origin.__qualname__ | |
1015 | self.__tree_hash__ = (hash(self._subs_tree()) if origin else | |
1016 | super(GenericMeta, self).__hash__()) | |
1017 | return self | |
1018 | ||
1019 | # _abc_negative_cache and _abc_negative_cache_version | |
1020 | # realised as descriptors, since GenClass[t1, t2, ...] always | |
1021 | # share subclass info with GenClass. | |
1022 | # This is an important memory optimization. | |
1023 | @property | |
1024 | def _abc_negative_cache(self): | |
1025 | if isinstance(self.__extra__, abc.ABCMeta): | |
1026 | return self.__extra__._abc_negative_cache | |
1027 | return self._gorg._abc_generic_negative_cache | |
1028 | ||
1029 | @_abc_negative_cache.setter | |
1030 | def _abc_negative_cache(self, value): | |
1031 | if self.__origin__ is None: | |
1032 | if isinstance(self.__extra__, abc.ABCMeta): | |
1033 | self.__extra__._abc_negative_cache = value | |
1034 | else: | |
1035 | self._abc_generic_negative_cache = value | |
1036 | ||
1037 | @property | |
1038 | def _abc_negative_cache_version(self): | |
1039 | if isinstance(self.__extra__, abc.ABCMeta): | |
1040 | return self.__extra__._abc_negative_cache_version | |
1041 | return self._gorg._abc_generic_negative_cache_version | |
1042 | ||
1043 | @_abc_negative_cache_version.setter | |
1044 | def _abc_negative_cache_version(self, value): | |
1045 | if self.__origin__ is None: | |
1046 | if isinstance(self.__extra__, abc.ABCMeta): | |
1047 | self.__extra__._abc_negative_cache_version = value | |
1048 | else: | |
1049 | self._abc_generic_negative_cache_version = value | |
1050 | ||
1051 | def _get_type_vars(self, tvars): | |
1052 | if self.__origin__ and self.__parameters__: | |
1053 | _get_type_vars(self.__parameters__, tvars) | |
1054 | ||
1055 | def _eval_type(self, globalns, localns): | |
1056 | ev_origin = (self.__origin__._eval_type(globalns, localns) | |
1057 | if self.__origin__ else None) | |
1058 | ev_args = tuple(_eval_type(a, globalns, localns) for a | |
1059 | in self.__args__) if self.__args__ else None | |
1060 | if ev_origin == self.__origin__ and ev_args == self.__args__: | |
1061 | return self | |
1062 | return self.__class__(self.__name__, | |
1063 | self.__bases__, | |
1064 | _no_slots_copy(self.__dict__), | |
1065 | tvars=_type_vars(ev_args) if ev_args else None, | |
1066 | args=ev_args, | |
1067 | origin=ev_origin, | |
1068 | extra=self.__extra__, | |
1069 | orig_bases=self.__orig_bases__) | |
1070 | ||
1071 | def __repr__(self): | |
1072 | if self.__origin__ is None: | |
1073 | return super().__repr__() | |
1074 | return self._tree_repr(self._subs_tree()) | |
1075 | ||
1076 | def _tree_repr(self, tree): | |
1077 | arg_list = [] | |
1078 | for arg in tree[1:]: | |
1079 | if arg == (): | |
1080 | arg_list.append('()') | |
1081 | elif not isinstance(arg, tuple): | |
1082 | arg_list.append(_type_repr(arg)) | |
1083 | else: | |
1084 | arg_list.append(arg[0]._tree_repr(arg)) | |
1085 | return super().__repr__() + '[%s]' % ', '.join(arg_list) | |
1086 | ||
1087 | def _subs_tree(self, tvars=None, args=None): | |
1088 | if self.__origin__ is None: | |
1089 | return self | |
1090 | tree_args = _subs_tree(self, tvars, args) | |
1091 | return (self._gorg,) + tuple(tree_args) | |
1092 | ||
1093 | def __eq__(self, other): | |
1094 | if not isinstance(other, GenericMeta): | |
1095 | return NotImplemented | |
1096 | if self.__origin__ is None or other.__origin__ is None: | |
1097 | return self is other | |
1098 | return self.__tree_hash__ == other.__tree_hash__ | |
1099 | ||
1100 | def __hash__(self): | |
1101 | return self.__tree_hash__ | |
1102 | ||
1103 | @_tp_cache | |
1104 | def __getitem__(self, params): | |
1105 | if not isinstance(params, tuple): | |
1106 | params = (params,) | |
1107 | if not params and self._gorg is not Tuple: | |
1108 | raise TypeError( | |
1109 | "Parameter list to %s[...] cannot be empty" % _qualname(self)) | |
1110 | msg = "Parameters to generic types must be types." | |
1111 | params = tuple(_type_check(p, msg) for p in params) | |
1112 | if self is Generic: | |
1113 | # Generic can only be subscripted with unique type variables. | |
1114 | if not all(isinstance(p, TypeVar) for p in params): | |
1115 | raise TypeError( | |
1116 | "Parameters to Generic[...] must all be type variables") | |
1117 | if len(set(params)) != len(params): | |
1118 | raise TypeError( | |
1119 | "Parameters to Generic[...] must all be unique") | |
1120 | tvars = params | |
1121 | args = params | |
1122 | elif self in (Tuple, Callable): | |
1123 | tvars = _type_vars(params) | |
1124 | args = params | |
1125 | elif self is _Protocol: | |
1126 | # _Protocol is internal, don't check anything. | |
1127 | tvars = params | |
1128 | args = params | |
1129 | elif self.__origin__ in (Generic, _Protocol): | |
1130 | # Can't subscript Generic[...] or _Protocol[...]. | |
1131 | raise TypeError("Cannot subscript already-subscripted %s" % | |
1132 | repr(self)) | |
1133 | else: | |
1134 | # Subscripting a regular Generic subclass. | |
1135 | _check_generic(self, params) | |
1136 | tvars = _type_vars(params) | |
1137 | args = params | |
1138 | ||
1139 | prepend = (self,) if self.__origin__ is None else () | |
1140 | return self.__class__(self.__name__, | |
1141 | prepend + self.__bases__, | |
1142 | _no_slots_copy(self.__dict__), | |
1143 | tvars=tvars, | |
1144 | args=args, | |
1145 | origin=self, | |
1146 | extra=self.__extra__, | |
1147 | orig_bases=self.__orig_bases__) | |
1148 | ||
1149 | def __subclasscheck__(self, cls): | |
1150 | if self.__origin__ is not None: | |
1151 | if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']: | |
1152 | raise TypeError("Parameterized generics cannot be used with class " | |
1153 | "or instance checks") | |
1154 | return False | |
1155 | if self is Generic: | |
1156 | raise TypeError("Class %r cannot be used with class " | |
1157 | "or instance checks" % self) | |
1158 | return super().__subclasscheck__(cls) | |
1159 | ||
1160 | def __instancecheck__(self, instance): | |
1161 | # Since we extend ABC.__subclasscheck__ and | |
1162 | # ABC.__instancecheck__ inlines the cache checking done by the | |
1163 | # latter, we must extend __instancecheck__ too. For simplicity | |
1164 | # we just skip the cache check -- instance checks for generic | |
1165 | # classes are supposed to be rare anyways. | |
1166 | return issubclass(instance.__class__, self) | |
1167 | ||
1168 | def __setattr__(self, attr, value): | |
1169 | # We consider all the subscripted generics as proxies for original class | |
1170 | if ( | |
1171 | attr.startswith('__') and attr.endswith('__') or | |
1172 | attr.startswith('_abc_') or | |
1173 | self._gorg is None # The class is not fully created, see #typing/506 | |
1174 | ): | |
1175 | super(GenericMeta, self).__setattr__(attr, value) | |
1176 | else: | |
1177 | super(GenericMeta, self._gorg).__setattr__(attr, value) | |
1178 | ||
1179 | ||
1180 | # Prevent checks for Generic to crash when defining Generic. | |
1181 | Generic = None | |
1182 | ||
1183 | ||
1184 | def _generic_new(base_cls, cls, *args, **kwds): | |
1185 | # Assure type is erased on instantiation, | |
1186 | # but attempt to store it in __orig_class__ | |
1187 | if cls.__origin__ is None: | |
1188 | if (base_cls.__new__ is object.__new__ and | |
1189 | cls.__init__ is not object.__init__): | |
1190 | return base_cls.__new__(cls) | |
1191 | else: | |
1192 | return base_cls.__new__(cls, *args, **kwds) | |
1193 | else: | |
1194 | origin = cls._gorg | |
1195 | if (base_cls.__new__ is object.__new__ and | |
1196 | cls.__init__ is not object.__init__): | |
1197 | obj = base_cls.__new__(origin) | |
1198 | else: | |
1199 | obj = base_cls.__new__(origin, *args, **kwds) | |
1200 | try: | |
1201 | obj.__orig_class__ = cls | |
1202 | except AttributeError: | |
1203 | pass | |
1204 | obj.__init__(*args, **kwds) | |
1205 | return obj | |
1206 | ||
1207 | ||
1208 | class Generic(metaclass=GenericMeta): | |
1209 | """Abstract base class for generic types. | |
1210 | ||
1211 | A generic type is typically declared by inheriting from | |
1212 | this class parameterized with one or more type variables. | |
1213 | For example, a generic mapping type might be defined as:: | |
1214 | ||
1215 | class Mapping(Generic[KT, VT]): | |
1216 | def __getitem__(self, key: KT) -> VT: | |
1217 | ... | |
1218 | # Etc. | |
1219 | ||
1220 | This class can then be used as follows:: | |
1221 | ||
1222 | def lookup_name(mapping: Mapping[KT, VT], key: KT, default: VT) -> VT: | |
1223 | try: | |
1224 | return mapping[key] | |
1225 | except KeyError: | |
1226 | return default | |
1227 | """ | |
1228 | ||
1229 | __slots__ = () | |
1230 | ||
1231 | def __new__(cls, *args, **kwds): | |
1232 | if cls._gorg is Generic: | |
1233 | raise TypeError("Type Generic cannot be instantiated; " | |
1234 | "it can be used only as a base class") | |
1235 | return _generic_new(cls.__next_in_mro__, cls, *args, **kwds) | |
1236 | ||
1237 | ||
1238 | class _TypingEmpty: | |
1239 | """Internal placeholder for () or []. Used by TupleMeta and CallableMeta | |
1240 | to allow empty list/tuple in specific places, without allowing them | |
1241 | to sneak in where prohibited. | |
1242 | """ | |
1243 | ||
1244 | ||
1245 | class _TypingEllipsis: | |
1246 | """Internal placeholder for ... (ellipsis).""" | |
1247 | ||
1248 | ||
1249 | class TupleMeta(GenericMeta): | |
1250 | """Metaclass for Tuple (internal).""" | |
1251 | ||
1252 | @_tp_cache | |
1253 | def __getitem__(self, parameters): | |
1254 | if self.__origin__ is not None or self._gorg is not Tuple: | |
1255 | # Normal generic rules apply if this is not the first subscription | |
1256 | # or a subscription of a subclass. | |
1257 | return super().__getitem__(parameters) | |
1258 | if parameters == (): | |
1259 | return super().__getitem__((_TypingEmpty,)) | |
1260 | if not isinstance(parameters, tuple): | |
1261 | parameters = (parameters,) | |
1262 | if len(parameters) == 2 and parameters[1] is ...: | |
1263 | msg = "Tuple[t, ...]: t must be a type." | |
1264 | p = _type_check(parameters[0], msg) | |
1265 | return super().__getitem__((p, _TypingEllipsis)) | |
1266 | msg = "Tuple[t0, t1, ...]: each t must be a type." | |
1267 | parameters = tuple(_type_check(p, msg) for p in parameters) | |
1268 | return super().__getitem__(parameters) | |
1269 | ||
1270 | def __instancecheck__(self, obj): | |
1271 | if self.__args__ is None: | |
1272 | return isinstance(obj, tuple) | |
1273 | raise TypeError("Parameterized Tuple cannot be used " | |
1274 | "with isinstance().") | |
1275 | ||
1276 | def __subclasscheck__(self, cls): | |
1277 | if self.__args__ is None: | |
1278 | return issubclass(cls, tuple) | |
1279 | raise TypeError("Parameterized Tuple cannot be used " | |
1280 | "with issubclass().") | |
1281 | ||
1282 | ||
1283 | class Tuple(tuple, extra=tuple, metaclass=TupleMeta): | |
1284 | """Tuple type; Tuple[X, Y] is the cross-product type of X and Y. | |
1285 | ||
1286 | Example: Tuple[T1, T2] is a tuple of two elements corresponding | |
1287 | to type variables T1 and T2. Tuple[int, float, str] is a tuple | |
1288 | of an int, a float and a string. | |
1289 | ||
1290 | To specify a variable-length tuple of homogeneous type, use Tuple[T, ...]. | |
1291 | """ | |
1292 | ||
1293 | __slots__ = () | |
1294 | ||
1295 | def __new__(cls, *args, **kwds): | |
1296 | if cls._gorg is Tuple: | |
1297 | raise TypeError("Type Tuple cannot be instantiated; " | |
1298 | "use tuple() instead") | |
1299 | return _generic_new(tuple, cls, *args, **kwds) | |
1300 | ||
1301 | ||
1302 | class CallableMeta(GenericMeta): | |
1303 | """Metaclass for Callable (internal).""" | |
1304 | ||
1305 | def __repr__(self): | |
1306 | if self.__origin__ is None: | |
1307 | return super().__repr__() | |
1308 | return self._tree_repr(self._subs_tree()) | |
1309 | ||
1310 | def _tree_repr(self, tree): | |
1311 | if self._gorg is not Callable: | |
1312 | return super()._tree_repr(tree) | |
1313 | # For actual Callable (not its subclass) we override | |
1314 | # super()._tree_repr() for nice formatting. | |
1315 | arg_list = [] | |
1316 | for arg in tree[1:]: | |
1317 | if not isinstance(arg, tuple): | |
1318 | arg_list.append(_type_repr(arg)) | |
1319 | else: | |
1320 | arg_list.append(arg[0]._tree_repr(arg)) | |
1321 | if arg_list[0] == '...': | |
1322 | return repr(tree[0]) + '[..., %s]' % arg_list[1] | |
1323 | return (repr(tree[0]) + | |
1324 | '[[%s], %s]' % (', '.join(arg_list[:-1]), arg_list[-1])) | |
1325 | ||
1326 | def __getitem__(self, parameters): | |
1327 | """A thin wrapper around __getitem_inner__ to provide the latter | |
1328 | with hashable arguments to improve speed. | |
1329 | """ | |
1330 | ||
1331 | if self.__origin__ is not None or self._gorg is not Callable: | |
1332 | return super().__getitem__(parameters) | |
1333 | if not isinstance(parameters, tuple) or len(parameters) != 2: | |
1334 | raise TypeError("Callable must be used as " | |
1335 | "Callable[[arg, ...], result].") | |
1336 | args, result = parameters | |
1337 | if args is Ellipsis: | |
1338 | parameters = (Ellipsis, result) | |
1339 | else: | |
1340 | if not isinstance(args, list): | |
1341 | raise TypeError("Callable[args, result]: args must be a list." | |
1342 | " Got %.100r." % (args,)) | |
1343 | parameters = (tuple(args), result) | |
1344 | return self.__getitem_inner__(parameters) | |
1345 | ||
1346 | @_tp_cache | |
1347 | def __getitem_inner__(self, parameters): | |
1348 | args, result = parameters | |
1349 | msg = "Callable[args, result]: result must be a type." | |
1350 | result = _type_check(result, msg) | |
1351 | if args is Ellipsis: | |
1352 | return super().__getitem__((_TypingEllipsis, result)) | |
1353 | msg = "Callable[[arg, ...], result]: each arg must be a type." | |
1354 | args = tuple(_type_check(arg, msg) for arg in args) | |
1355 | parameters = args + (result,) | |
1356 | return super().__getitem__(parameters) | |
1357 | ||
1358 | ||
1359 | class Callable(extra=collections_abc.Callable, metaclass=CallableMeta): | |
1360 | """Callable type; Callable[[int], str] is a function of (int) -> str. | |
1361 | ||
1362 | The subscription syntax must always be used with exactly two | |
1363 | values: the argument list and the return type. The argument list | |
1364 | must be a list of types or ellipsis; the return type must be a single type. | |
1365 | ||
1366 | There is no syntax to indicate optional or keyword arguments, | |
1367 | such function types are rarely used as callback types. | |
1368 | """ | |
1369 | ||
1370 | __slots__ = () | |
1371 | ||
1372 | def __new__(cls, *args, **kwds): | |
1373 | if cls._gorg is Callable: | |
1374 | raise TypeError("Type Callable cannot be instantiated; " | |
1375 | "use a non-abstract subclass instead") | |
1376 | return _generic_new(cls.__next_in_mro__, cls, *args, **kwds) | |
1377 | ||
1378 | ||
1379 | class _ClassVar(_FinalTypingBase, _root=True): | |
1380 | """Special type construct to mark class variables. | |
1381 | ||
1382 | An annotation wrapped in ClassVar indicates that a given | |
1383 | attribute is intended to be used as a class variable and | |
1384 | should not be set on instances of that class. Usage:: | |
1385 | ||
1386 | class Starship: | |
1387 | stats: ClassVar[Dict[str, int]] = {} # class variable | |
1388 | damage: int = 10 # instance variable | |
1389 | ||
1390 | ClassVar accepts only types and cannot be further subscribed. | |
1391 | ||
1392 | Note that ClassVar is not a class itself, and should not | |
1393 | be used with isinstance() or issubclass(). | |
1394 | """ | |
1395 | ||
1396 | __slots__ = ('__type__',) | |
1397 | ||
1398 | def __init__(self, tp=None, **kwds): | |
1399 | self.__type__ = tp | |
1400 | ||
1401 | def __getitem__(self, item): | |
1402 | cls = type(self) | |
1403 | if self.__type__ is None: | |
1404 | return cls(_type_check(item, | |
1405 | '{} accepts only single type.'.format(cls.__name__[1:])), | |
1406 | _root=True) | |
1407 | raise TypeError('{} cannot be further subscripted' | |
1408 | .format(cls.__name__[1:])) | |
1409 | ||
1410 | def _eval_type(self, globalns, localns): | |
1411 | new_tp = _eval_type(self.__type__, globalns, localns) | |
1412 | if new_tp == self.__type__: | |
1413 | return self | |
1414 | return type(self)(new_tp, _root=True) | |
1415 | ||
1416 | def __repr__(self): | |
1417 | r = super().__repr__() | |
1418 | if self.__type__ is not None: | |
1419 | r += '[{}]'.format(_type_repr(self.__type__)) | |
1420 | return r | |
1421 | ||
1422 | def __hash__(self): | |
1423 | return hash((type(self).__name__, self.__type__)) | |
1424 | ||
1425 | def __eq__(self, other): | |
1426 | if not isinstance(other, _ClassVar): | |
1427 | return NotImplemented | |
1428 | if self.__type__ is not None: | |
1429 | return self.__type__ == other.__type__ | |
1430 | return self is other | |
1431 | ||
1432 | ||
1433 | ClassVar = _ClassVar(_root=True) | |
1434 | ||
1435 | ||
1436 | def cast(typ, val): | |
1437 | """Cast a value to a type. | |
1438 | ||
1439 | This returns the value unchanged. To the type checker this | |
1440 | signals that the return value has the designated type, but at | |
1441 | runtime we intentionally don't check anything (we want this | |
1442 | to be as fast as possible). | |
1443 | """ | |
1444 | return val | |
1445 | ||
1446 | ||
1447 | def _get_defaults(func): | |
1448 | """Internal helper to extract the default arguments, by name.""" | |
1449 | try: | |
1450 | code = func.__code__ | |
1451 | except AttributeError: | |
1452 | # Some built-in functions don't have __code__, __defaults__, etc. | |
1453 | return {} | |
1454 | pos_count = code.co_argcount | |
1455 | arg_names = code.co_varnames | |
1456 | arg_names = arg_names[:pos_count] | |
1457 | defaults = func.__defaults__ or () | |
1458 | kwdefaults = func.__kwdefaults__ | |
1459 | res = dict(kwdefaults) if kwdefaults else {} | |
1460 | pos_offset = pos_count - len(defaults) | |
1461 | for name, value in zip(arg_names[pos_offset:], defaults): | |
1462 | assert name not in res | |
1463 | res[name] = value | |
1464 | return res | |
1465 | ||
1466 | ||
1467 | _allowed_types = (types.FunctionType, types.BuiltinFunctionType, | |
1468 | types.MethodType, types.ModuleType, | |
1469 | WrapperDescriptorType, MethodWrapperType, MethodDescriptorType) | |
1470 | ||
1471 | ||
1472 | def get_type_hints(obj, globalns=None, localns=None): | |
1473 | """Return type hints for an object. | |
1474 | ||
1475 | This is often the same as obj.__annotations__, but it handles | |
1476 | forward references encoded as string literals, and if necessary | |
1477 | adds Optional[t] if a default value equal to None is set. | |
1478 | ||
1479 | The argument may be a module, class, method, or function. The annotations | |
1480 | are returned as a dictionary. For classes, annotations include also | |
1481 | inherited members. | |
1482 | ||
1483 | TypeError is raised if the argument is not of a type that can contain | |
1484 | annotations, and an empty dictionary is returned if no annotations are | |
1485 | present. | |
1486 | ||
1487 | BEWARE -- the behavior of globalns and localns is counterintuitive | |
1488 | (unless you are familiar with how eval() and exec() work). The | |
1489 | search order is locals first, then globals. | |
1490 | ||
1491 | - If no dict arguments are passed, an attempt is made to use the | |
1492 | globals from obj (or the respective module's globals for classes), | |
1493 | and these are also used as the locals. If the object does not appear | |
1494 | to have globals, an empty dictionary is used. | |
1495 | ||
1496 | - If one dict argument is passed, it is used for both globals and | |
1497 | locals. | |
1498 | ||
1499 | - If two dict arguments are passed, they specify globals and | |
1500 | locals, respectively. | |
1501 | """ | |
1502 | ||
1503 | if getattr(obj, '__no_type_check__', None): | |
1504 | return {} | |
1505 | # Classes require a special treatment. | |
1506 | if isinstance(obj, type): | |
1507 | hints = {} | |
1508 | for base in reversed(obj.__mro__): | |
1509 | if globalns is None: | |
1510 | base_globals = sys.modules[base.__module__].__dict__ | |
1511 | else: | |
1512 | base_globals = globalns | |
1513 | ann = base.__dict__.get('__annotations__', {}) | |
1514 | for name, value in ann.items(): | |
1515 | if value is None: | |
1516 | value = type(None) | |
1517 | if isinstance(value, str): | |
1518 | value = _ForwardRef(value) | |
1519 | value = _eval_type(value, base_globals, localns) | |
1520 | hints[name] = value | |
1521 | return hints | |
1522 | ||
1523 | if globalns is None: | |
1524 | if isinstance(obj, types.ModuleType): | |
1525 | globalns = obj.__dict__ | |
1526 | else: | |
1527 | globalns = getattr(obj, '__globals__', {}) | |
1528 | if localns is None: | |
1529 | localns = globalns | |
1530 | elif localns is None: | |
1531 | localns = globalns | |
1532 | hints = getattr(obj, '__annotations__', None) | |
1533 | if hints is None: | |
1534 | # Return empty annotations for something that _could_ have them. | |
1535 | if isinstance(obj, _allowed_types): | |
1536 | return {} | |
1537 | else: | |
1538 | raise TypeError('{!r} is not a module, class, method, ' | |
1539 | 'or function.'.format(obj)) | |
1540 | defaults = _get_defaults(obj) | |
1541 | hints = dict(hints) | |
1542 | for name, value in hints.items(): | |
1543 | if value is None: | |
1544 | value = type(None) | |
1545 | if isinstance(value, str): | |
1546 | value = _ForwardRef(value) | |
1547 | value = _eval_type(value, globalns, localns) | |
1548 | if name in defaults and defaults[name] is None: | |
1549 | value = Optional[value] | |
1550 | hints[name] = value | |
1551 | return hints | |
1552 | ||
1553 | ||
1554 | def no_type_check(arg): | |
1555 | """Decorator to indicate that annotations are not type hints. | |
1556 | ||
1557 | The argument must be a class or function; if it is a class, it | |
1558 | applies recursively to all methods and classes defined in that class | |
1559 | (but not to methods defined in its superclasses or subclasses). | |
1560 | ||
1561 | This mutates the function(s) or class(es) in place. | |
1562 | """ | |
1563 | if isinstance(arg, type): | |
1564 | arg_attrs = arg.__dict__.copy() | |
1565 | for attr, val in arg.__dict__.items(): | |
1566 | if val in arg.__bases__ + (arg,): | |
1567 | arg_attrs.pop(attr) | |
1568 | for obj in arg_attrs.values(): | |
1569 | if isinstance(obj, types.FunctionType): | |
1570 | obj.__no_type_check__ = True | |
1571 | if isinstance(obj, type): | |
1572 | no_type_check(obj) | |
1573 | try: | |
1574 | arg.__no_type_check__ = True | |
1575 | except TypeError: # built-in classes | |
1576 | pass | |
1577 | return arg | |
1578 | ||
1579 | ||
1580 | def no_type_check_decorator(decorator): | |
1581 | """Decorator to give another decorator the @no_type_check effect. | |
1582 | ||
1583 | This wraps the decorator with something that wraps the decorated | |
1584 | function in @no_type_check. | |
1585 | """ | |
1586 | ||
1587 | @functools.wraps(decorator) | |
1588 | def wrapped_decorator(*args, **kwds): | |
1589 | func = decorator(*args, **kwds) | |
1590 | func = no_type_check(func) | |
1591 | return func | |
1592 | ||
1593 | return wrapped_decorator | |
1594 | ||
1595 | ||
1596 | def _overload_dummy(*args, **kwds): | |
1597 | """Helper for @overload to raise when called.""" | |
1598 | raise NotImplementedError( | |
1599 | "You should not call an overloaded function. " | |
1600 | "A series of @overload-decorated functions " | |
1601 | "outside a stub module should always be followed " | |
1602 | "by an implementation that is not @overload-ed.") | |
1603 | ||
1604 | ||
1605 | def overload(func): | |
1606 | """Decorator for overloaded functions/methods. | |
1607 | ||
1608 | In a stub file, place two or more stub definitions for the same | |
1609 | function in a row, each decorated with @overload. For example: | |
1610 | ||
1611 | @overload | |
1612 | def utf8(value: None) -> None: ... | |
1613 | @overload | |
1614 | def utf8(value: bytes) -> bytes: ... | |
1615 | @overload | |
1616 | def utf8(value: str) -> bytes: ... | |
1617 | ||
1618 | In a non-stub file (i.e. a regular .py file), do the same but | |
1619 | follow it with an implementation. The implementation should *not* | |
1620 | be decorated with @overload. For example: | |
1621 | ||
1622 | @overload | |
1623 | def utf8(value: None) -> None: ... | |
1624 | @overload | |
1625 | def utf8(value: bytes) -> bytes: ... | |
1626 | @overload | |
1627 | def utf8(value: str) -> bytes: ... | |
1628 | def utf8(value): | |
1629 | # implementation goes here | |
1630 | """ | |
1631 | return _overload_dummy | |
1632 | ||
1633 | ||
1634 | class _ProtocolMeta(GenericMeta): | |
1635 | """Internal metaclass for _Protocol. | |
1636 | ||
1637 | This exists so _Protocol classes can be generic without deriving | |
1638 | from Generic. | |
1639 | """ | |
1640 | ||
1641 | def __instancecheck__(self, obj): | |
1642 | if _Protocol not in self.__bases__: | |
1643 | return super().__instancecheck__(obj) | |
1644 | raise TypeError("Protocols cannot be used with isinstance().") | |
1645 | ||
1646 | def __subclasscheck__(self, cls): | |
1647 | if not self._is_protocol: | |
1648 | # No structural checks since this isn't a protocol. | |
1649 | return NotImplemented | |
1650 | ||
1651 | if self is _Protocol: | |
1652 | # Every class is a subclass of the empty protocol. | |
1653 | return True | |
1654 | ||
1655 | # Find all attributes defined in the protocol. | |
1656 | attrs = self._get_protocol_attrs() | |
1657 | ||
1658 | for attr in attrs: | |
1659 | if not any(attr in d.__dict__ for d in cls.__mro__): | |
1660 | return False | |
1661 | return True | |
1662 | ||
1663 | def _get_protocol_attrs(self): | |
1664 | # Get all Protocol base classes. | |
1665 | protocol_bases = [] | |
1666 | for c in self.__mro__: | |
1667 | if getattr(c, '_is_protocol', False) and c.__name__ != '_Protocol': | |
1668 | protocol_bases.append(c) | |
1669 | ||
1670 | # Get attributes included in protocol. | |
1671 | attrs = set() | |
1672 | for base in protocol_bases: | |
1673 | for attr in base.__dict__.keys(): | |
1674 | # Include attributes not defined in any non-protocol bases. | |
1675 | for c in self.__mro__: | |
1676 | if (c is not base and attr in c.__dict__ and | |
1677 | not getattr(c, '_is_protocol', False)): | |
1678 | break | |
1679 | else: | |
1680 | if (not attr.startswith('_abc_') and | |
1681 | attr != '__abstractmethods__' and | |
1682 | attr != '__annotations__' and | |
1683 | attr != '__weakref__' and | |
1684 | attr != '_is_protocol' and | |
1685 | attr != '_gorg' and | |
1686 | attr != '__dict__' and | |
1687 | attr != '__args__' and | |
1688 | attr != '__slots__' and | |
1689 | attr != '_get_protocol_attrs' and | |
1690 | attr != '__next_in_mro__' and | |
1691 | attr != '__parameters__' and | |
1692 | attr != '__origin__' and | |
1693 | attr != '__orig_bases__' and | |
1694 | attr != '__extra__' and | |
1695 | attr != '__tree_hash__' and | |
1696 | attr != '__module__'): | |
1697 | attrs.add(attr) | |
1698 | ||
1699 | return attrs | |
1700 | ||
1701 | ||
1702 | class _Protocol(metaclass=_ProtocolMeta): | |
1703 | """Internal base class for protocol classes. | |
1704 | ||
1705 | This implements a simple-minded structural issubclass check | |
1706 | (similar but more general than the one-offs in collections.abc | |
1707 | such as Hashable). | |
1708 | """ | |
1709 | ||
1710 | __slots__ = () | |
1711 | ||
1712 | _is_protocol = True | |
1713 | ||
1714 | ||
1715 | # Various ABCs mimicking those in collections.abc. | |
1716 | # A few are simply re-exported for completeness. | |
1717 | ||
1718 | Hashable = collections_abc.Hashable # Not generic. | |
1719 | ||
1720 | ||
1721 | if hasattr(collections_abc, 'Awaitable'): | |
1722 | class Awaitable(Generic[T_co], extra=collections_abc.Awaitable): | |
1723 | __slots__ = () | |
1724 | ||
1725 | __all__.append('Awaitable') | |
1726 | ||
1727 | ||
1728 | if hasattr(collections_abc, 'Coroutine'): | |
1729 | class Coroutine(Awaitable[V_co], Generic[T_co, T_contra, V_co], | |
1730 | extra=collections_abc.Coroutine): | |
1731 | __slots__ = () | |
1732 | ||
1733 | __all__.append('Coroutine') | |
1734 | ||
1735 | ||
1736 | if hasattr(collections_abc, 'AsyncIterable'): | |
1737 | ||
1738 | class AsyncIterable(Generic[T_co], extra=collections_abc.AsyncIterable): | |
1739 | __slots__ = () | |
1740 | ||
1741 | class AsyncIterator(AsyncIterable[T_co], | |
1742 | extra=collections_abc.AsyncIterator): | |
1743 | __slots__ = () | |
1744 | ||
1745 | __all__.append('AsyncIterable') | |
1746 | __all__.append('AsyncIterator') | |
1747 | ||
1748 | ||
1749 | class Iterable(Generic[T_co], extra=collections_abc.Iterable): | |
1750 | __slots__ = () | |
1751 | ||
1752 | ||
1753 | class Iterator(Iterable[T_co], extra=collections_abc.Iterator): | |
1754 | __slots__ = () | |
1755 | ||
1756 | ||
1757 | class SupportsInt(_Protocol): | |
1758 | __slots__ = () | |
1759 | ||
1760 | @abstractmethod | |
1761 | def __int__(self) -> int: | |
1762 | pass | |
1763 | ||
1764 | ||
1765 | class SupportsFloat(_Protocol): | |
1766 | __slots__ = () | |
1767 | ||
1768 | @abstractmethod | |
1769 | def __float__(self) -> float: | |
1770 | pass | |
1771 | ||
1772 | ||
1773 | class SupportsComplex(_Protocol): | |
1774 | __slots__ = () | |
1775 | ||
1776 | @abstractmethod | |
1777 | def __complex__(self) -> complex: | |
1778 | pass | |
1779 | ||
1780 | ||
1781 | class SupportsBytes(_Protocol): | |
1782 | __slots__ = () | |
1783 | ||
1784 | @abstractmethod | |
1785 | def __bytes__(self) -> bytes: | |
1786 | pass | |
1787 | ||
1788 | ||
1789 | class SupportsIndex(_Protocol): | |
1790 | __slots__ = () | |
1791 | ||
1792 | @abstractmethod | |
1793 | def __index__(self) -> int: | |
1794 | pass | |
1795 | ||
1796 | ||
1797 | class SupportsAbs(_Protocol[T_co]): | |
1798 | __slots__ = () | |
1799 | ||
1800 | @abstractmethod | |
1801 | def __abs__(self) -> T_co: | |
1802 | pass | |
1803 | ||
1804 | ||
1805 | class SupportsRound(_Protocol[T_co]): | |
1806 | __slots__ = () | |
1807 | ||
1808 | @abstractmethod | |
1809 | def __round__(self, ndigits: int = 0) -> T_co: | |
1810 | pass | |
1811 | ||
1812 | ||
1813 | if hasattr(collections_abc, 'Reversible'): | |
1814 | class Reversible(Iterable[T_co], extra=collections_abc.Reversible): | |
1815 | __slots__ = () | |
1816 | else: | |
1817 | class Reversible(_Protocol[T_co]): | |
1818 | __slots__ = () | |
1819 | ||
1820 | @abstractmethod | |
1821 | def __reversed__(self) -> 'Iterator[T_co]': | |
1822 | pass | |
1823 | ||
1824 | ||
1825 | Sized = collections_abc.Sized # Not generic. | |
1826 | ||
1827 | ||
1828 | class Container(Generic[T_co], extra=collections_abc.Container): | |
1829 | __slots__ = () | |
1830 | ||
1831 | ||
1832 | if hasattr(collections_abc, 'Collection'): | |
1833 | class Collection(Sized, Iterable[T_co], Container[T_co], | |
1834 | extra=collections_abc.Collection): | |
1835 | __slots__ = () | |
1836 | ||
1837 | __all__.append('Collection') | |
1838 | ||
1839 | ||
1840 | # Callable was defined earlier. | |
1841 | ||
1842 | if hasattr(collections_abc, 'Collection'): | |
1843 | class AbstractSet(Collection[T_co], | |
1844 | extra=collections_abc.Set): | |
1845 | __slots__ = () | |
1846 | else: | |
1847 | class AbstractSet(Sized, Iterable[T_co], Container[T_co], | |
1848 | extra=collections_abc.Set): | |
1849 | __slots__ = () | |
1850 | ||
1851 | ||
1852 | class MutableSet(AbstractSet[T], extra=collections_abc.MutableSet): | |
1853 | __slots__ = () | |
1854 | ||
1855 | ||
1856 | # NOTE: It is only covariant in the value type. | |
1857 | if hasattr(collections_abc, 'Collection'): | |
1858 | class Mapping(Collection[KT], Generic[KT, VT_co], | |
1859 | extra=collections_abc.Mapping): | |
1860 | __slots__ = () | |
1861 | else: | |
1862 | class Mapping(Sized, Iterable[KT], Container[KT], Generic[KT, VT_co], | |
1863 | extra=collections_abc.Mapping): | |
1864 | __slots__ = () | |
1865 | ||
1866 | ||
1867 | class MutableMapping(Mapping[KT, VT], extra=collections_abc.MutableMapping): | |
1868 | __slots__ = () | |
1869 | ||
1870 | ||
1871 | if hasattr(collections_abc, 'Reversible'): | |
1872 | if hasattr(collections_abc, 'Collection'): | |
1873 | class Sequence(Reversible[T_co], Collection[T_co], | |
1874 | extra=collections_abc.Sequence): | |
1875 | __slots__ = () | |
1876 | else: | |
1877 | class Sequence(Sized, Reversible[T_co], Container[T_co], | |
1878 | extra=collections_abc.Sequence): | |
1879 | __slots__ = () | |
1880 | else: | |
1881 | class Sequence(Sized, Iterable[T_co], Container[T_co], | |
1882 | extra=collections_abc.Sequence): | |
1883 | __slots__ = () | |
1884 | ||
1885 | ||
1886 | class MutableSequence(Sequence[T], extra=collections_abc.MutableSequence): | |
1887 | __slots__ = () | |
1888 | ||
1889 | ||
1890 | class ByteString(Sequence[int], extra=collections_abc.ByteString): | |
1891 | __slots__ = () | |
1892 | ||
1893 | ||
1894 | class List(list, MutableSequence[T], extra=list): | |
1895 | ||
1896 | __slots__ = () | |
1897 | ||
1898 | def __new__(cls, *args, **kwds): | |
1899 | if cls._gorg is List: | |
1900 | raise TypeError("Type List cannot be instantiated; " | |
1901 | "use list() instead") | |
1902 | return _generic_new(list, cls, *args, **kwds) | |
1903 | ||
1904 | ||
1905 | class Deque(collections.deque, MutableSequence[T], extra=collections.deque): | |
1906 | ||
1907 | __slots__ = () | |
1908 | ||
1909 | def __new__(cls, *args, **kwds): | |
1910 | if cls._gorg is Deque: | |
1911 | return collections.deque(*args, **kwds) | |
1912 | return _generic_new(collections.deque, cls, *args, **kwds) | |
1913 | ||
1914 | ||
1915 | class Set(set, MutableSet[T], extra=set): | |
1916 | ||
1917 | __slots__ = () | |
1918 | ||
1919 | def __new__(cls, *args, **kwds): | |
1920 | if cls._gorg is Set: | |
1921 | raise TypeError("Type Set cannot be instantiated; " | |
1922 | "use set() instead") | |
1923 | return _generic_new(set, cls, *args, **kwds) | |
1924 | ||
1925 | ||
1926 | class FrozenSet(frozenset, AbstractSet[T_co], extra=frozenset): | |
1927 | __slots__ = () | |
1928 | ||
1929 | def __new__(cls, *args, **kwds): | |
1930 | if cls._gorg is FrozenSet: | |
1931 | raise TypeError("Type FrozenSet cannot be instantiated; " | |
1932 | "use frozenset() instead") | |
1933 | return _generic_new(frozenset, cls, *args, **kwds) | |
1934 | ||
1935 | ||
1936 | class MappingView(Sized, Iterable[T_co], extra=collections_abc.MappingView): | |
1937 | __slots__ = () | |
1938 | ||
1939 | ||
1940 | class KeysView(MappingView[KT], AbstractSet[KT], | |
1941 | extra=collections_abc.KeysView): | |
1942 | __slots__ = () | |
1943 | ||
1944 | ||
1945 | class ItemsView(MappingView[Tuple[KT, VT_co]], | |
1946 | AbstractSet[Tuple[KT, VT_co]], | |
1947 | Generic[KT, VT_co], | |
1948 | extra=collections_abc.ItemsView): | |
1949 | __slots__ = () | |
1950 | ||
1951 | ||
1952 | class ValuesView(MappingView[VT_co], extra=collections_abc.ValuesView): | |
1953 | __slots__ = () | |
1954 | ||
1955 | ||
1956 | if hasattr(contextlib, 'AbstractContextManager'): | |
1957 | class ContextManager(Generic[T_co], extra=contextlib.AbstractContextManager): | |
1958 | __slots__ = () | |
1959 | else: | |
1960 | class ContextManager(Generic[T_co]): | |
1961 | __slots__ = () | |
1962 | ||
1963 | def __enter__(self): | |
1964 | return self | |
1965 | ||
1966 | @abc.abstractmethod | |
1967 | def __exit__(self, exc_type, exc_value, traceback): | |
1968 | return None | |
1969 | ||
1970 | @classmethod | |
1971 | def __subclasshook__(cls, C): | |
1972 | if cls is ContextManager: | |
1973 | # In Python 3.6+, it is possible to set a method to None to | |
1974 | # explicitly indicate that the class does not implement an ABC | |
1975 | # (https://bugs.python.org/issue25958), but we do not support | |
1976 | # that pattern here because this fallback class is only used | |
1977 | # in Python 3.5 and earlier. | |
1978 | if (any("__enter__" in B.__dict__ for B in C.__mro__) and | |
1979 | any("__exit__" in B.__dict__ for B in C.__mro__)): | |
1980 | return True | |
1981 | return NotImplemented | |
1982 | ||
1983 | ||
1984 | if hasattr(contextlib, 'AbstractAsyncContextManager'): | |
1985 | class AsyncContextManager(Generic[T_co], | |
1986 | extra=contextlib.AbstractAsyncContextManager): | |
1987 | __slots__ = () | |
1988 | ||
1989 | __all__.append('AsyncContextManager') | |
1990 | elif sys.version_info[:2] >= (3, 5): | |
1991 | exec(""" | |
1992 | class AsyncContextManager(Generic[T_co]): | |
1993 | __slots__ = () | |
1994 | ||
1995 | async def __aenter__(self): | |
1996 | return self | |
1997 | ||
1998 | @abc.abstractmethod | |
1999 | async def __aexit__(self, exc_type, exc_value, traceback): | |
2000 | return None | |
2001 | ||
2002 | @classmethod | |
2003 | def __subclasshook__(cls, C): | |
2004 | if cls is AsyncContextManager: | |
2005 | if sys.version_info[:2] >= (3, 6): | |
2006 | return _collections_abc._check_methods(C, "__aenter__", "__aexit__") | |
2007 | if (any("__aenter__" in B.__dict__ for B in C.__mro__) and | |
2008 | any("__aexit__" in B.__dict__ for B in C.__mro__)): | |
2009 | return True | |
2010 | return NotImplemented | |
2011 | ||
2012 | __all__.append('AsyncContextManager') | |
2013 | """) | |
2014 | ||
2015 | ||
2016 | class Dict(dict, MutableMapping[KT, VT], extra=dict): | |
2017 | ||
2018 | __slots__ = () | |
2019 | ||
2020 | def __new__(cls, *args, **kwds): | |
2021 | if cls._gorg is Dict: | |
2022 | raise TypeError("Type Dict cannot be instantiated; " | |
2023 | "use dict() instead") | |
2024 | return _generic_new(dict, cls, *args, **kwds) | |
2025 | ||
2026 | ||
2027 | class DefaultDict(collections.defaultdict, MutableMapping[KT, VT], | |
2028 | extra=collections.defaultdict): | |
2029 | ||
2030 | __slots__ = () | |
2031 | ||
2032 | def __new__(cls, *args, **kwds): | |
2033 | if cls._gorg is DefaultDict: | |
2034 | return collections.defaultdict(*args, **kwds) | |
2035 | return _generic_new(collections.defaultdict, cls, *args, **kwds) | |
2036 | ||
2037 | ||
2038 | class Counter(collections.Counter, Dict[T, int], extra=collections.Counter): | |
2039 | ||
2040 | __slots__ = () | |
2041 | ||
2042 | def __new__(cls, *args, **kwds): | |
2043 | if cls._gorg is Counter: | |
2044 | return collections.Counter(*args, **kwds) | |
2045 | return _generic_new(collections.Counter, cls, *args, **kwds) | |
2046 | ||
2047 | ||
2048 | if hasattr(collections, 'ChainMap'): | |
2049 | # ChainMap only exists in 3.3+ | |
2050 | __all__.append('ChainMap') | |
2051 | ||
2052 | class ChainMap(collections.ChainMap, MutableMapping[KT, VT], | |
2053 | extra=collections.ChainMap): | |
2054 | ||
2055 | __slots__ = () | |
2056 | ||
2057 | def __new__(cls, *args, **kwds): | |
2058 | if cls._gorg is ChainMap: | |
2059 | return collections.ChainMap(*args, **kwds) | |
2060 | return _generic_new(collections.ChainMap, cls, *args, **kwds) | |
2061 | ||
2062 | ||
2063 | # Determine what base class to use for Generator. | |
2064 | if hasattr(collections_abc, 'Generator'): | |
2065 | # Sufficiently recent versions of 3.5 have a Generator ABC. | |
2066 | _G_base = collections_abc.Generator | |
2067 | else: | |
2068 | # Fall back on the exact type. | |
2069 | _G_base = types.GeneratorType | |
2070 | ||
2071 | ||
2072 | class Generator(Iterator[T_co], Generic[T_co, T_contra, V_co], | |
2073 | extra=_G_base): | |
2074 | __slots__ = () | |
2075 | ||
2076 | def __new__(cls, *args, **kwds): | |
2077 | if cls._gorg is Generator: | |
2078 | raise TypeError("Type Generator cannot be instantiated; " | |
2079 | "create a subclass instead") | |
2080 | return _generic_new(_G_base, cls, *args, **kwds) | |
2081 | ||
2082 | ||
2083 | if hasattr(collections_abc, 'AsyncGenerator'): | |
2084 | class AsyncGenerator(AsyncIterator[T_co], Generic[T_co, T_contra], | |
2085 | extra=collections_abc.AsyncGenerator): | |
2086 | __slots__ = () | |
2087 | ||
2088 | __all__.append('AsyncGenerator') | |
2089 | ||
2090 | ||
2091 | # Internal type variable used for Type[]. | |
2092 | CT_co = TypeVar('CT_co', covariant=True, bound=type) | |
2093 | ||
2094 | ||
2095 | # This is not a real generic class. Don't use outside annotations. | |
2096 | class Type(Generic[CT_co], extra=type): | |
2097 | """A special construct usable to annotate class objects. | |
2098 | ||
2099 | For example, suppose we have the following classes:: | |
2100 | ||
2101 | class User: ... # Abstract base for User classes | |
2102 | class BasicUser(User): ... | |
2103 | class ProUser(User): ... | |
2104 | class TeamUser(User): ... | |
2105 | ||
2106 | And a function that takes a class argument that's a subclass of | |
2107 | User and returns an instance of the corresponding class:: | |
2108 | ||
2109 | U = TypeVar('U', bound=User) | |
2110 | def new_user(user_class: Type[U]) -> U: | |
2111 | user = user_class() | |
2112 | # (Here we could write the user object to a database) | |
2113 | return user | |
2114 | ||
2115 | joe = new_user(BasicUser) | |
2116 | ||
2117 | At this point the type checker knows that joe has type BasicUser. | |
2118 | """ | |
2119 | ||
2120 | __slots__ = () | |
2121 | ||
2122 | ||
2123 | def _make_nmtuple(name, types): | |
2124 | msg = "NamedTuple('Name', [(f0, t0), (f1, t1), ...]); each t must be a type" | |
2125 | types = [(n, _type_check(t, msg)) for n, t in types] | |
2126 | nm_tpl = collections.namedtuple(name, [n for n, t in types]) | |
2127 | # Prior to PEP 526, only _field_types attribute was assigned. | |
2128 | # Now, both __annotations__ and _field_types are used to maintain compatibility. | |
2129 | nm_tpl.__annotations__ = nm_tpl._field_types = collections.OrderedDict(types) | |
2130 | try: | |
2131 | nm_tpl.__module__ = sys._getframe(2).f_globals.get('__name__', '__main__') | |
2132 | except (AttributeError, ValueError): | |
2133 | pass | |
2134 | return nm_tpl | |
2135 | ||
2136 | ||
2137 | _PY36 = sys.version_info[:2] >= (3, 6) | |
2138 | ||
2139 | # attributes prohibited to set in NamedTuple class syntax | |
2140 | _prohibited = ('__new__', '__init__', '__slots__', '__getnewargs__', | |
2141 | '_fields', '_field_defaults', '_field_types', | |
2142 | '_make', '_replace', '_asdict', '_source') | |
2143 | ||
2144 | _special = ('__module__', '__name__', '__qualname__', '__annotations__') | |
2145 | ||
2146 | ||
2147 | class NamedTupleMeta(type): | |
2148 | ||
2149 | def __new__(cls, typename, bases, ns): | |
2150 | if ns.get('_root', False): | |
2151 | return super().__new__(cls, typename, bases, ns) | |
2152 | if not _PY36: | |
2153 | raise TypeError("Class syntax for NamedTuple is only supported" | |
2154 | " in Python 3.6+") | |
2155 | types = ns.get('__annotations__', {}) | |
2156 | nm_tpl = _make_nmtuple(typename, types.items()) | |
2157 | defaults = [] | |
2158 | defaults_dict = {} | |
2159 | for field_name in types: | |
2160 | if field_name in ns: | |
2161 | default_value = ns[field_name] | |
2162 | defaults.append(default_value) | |
2163 | defaults_dict[field_name] = default_value | |
2164 | elif defaults: | |
2165 | raise TypeError("Non-default namedtuple field {field_name} cannot " | |
2166 | "follow default field(s) {default_names}" | |
2167 | .format(field_name=field_name, | |
2168 | default_names=', '.join(defaults_dict.keys()))) | |
2169 | nm_tpl.__new__.__annotations__ = collections.OrderedDict(types) | |
2170 | nm_tpl.__new__.__defaults__ = tuple(defaults) | |
2171 | nm_tpl._field_defaults = defaults_dict | |
2172 | # update from user namespace without overriding special namedtuple attributes | |
2173 | for key in ns: | |
2174 | if key in _prohibited: | |
2175 | raise AttributeError("Cannot overwrite NamedTuple attribute " + key) | |
2176 | elif key not in _special and key not in nm_tpl._fields: | |
2177 | setattr(nm_tpl, key, ns[key]) | |
2178 | return nm_tpl | |
2179 | ||
2180 | ||
2181 | class NamedTuple(metaclass=NamedTupleMeta): | |
2182 | """Typed version of namedtuple. | |
2183 | ||
2184 | Usage in Python versions >= 3.6:: | |
2185 | ||
2186 | class Employee(NamedTuple): | |
2187 | name: str | |
2188 | id: int | |
2189 | ||
2190 | This is equivalent to:: | |
2191 | ||
2192 | Employee = collections.namedtuple('Employee', ['name', 'id']) | |
2193 | ||
2194 | The resulting class has extra __annotations__ and _field_types | |
2195 | attributes, giving an ordered dict mapping field names to types. | |
2196 | __annotations__ should be preferred, while _field_types | |
2197 | is kept to maintain pre PEP 526 compatibility. (The field names | |
2198 | are in the _fields attribute, which is part of the namedtuple | |
2199 | API.) Alternative equivalent keyword syntax is also accepted:: | |
2200 | ||
2201 | Employee = NamedTuple('Employee', name=str, id=int) | |
2202 | ||
2203 | In Python versions <= 3.5 use:: | |
2204 | ||
2205 | Employee = NamedTuple('Employee', [('name', str), ('id', int)]) | |
2206 | """ | |
2207 | _root = True | |
2208 | ||
2209 | def __new__(*args, **kwargs): | |
2210 | if kwargs and not _PY36: | |
2211 | raise TypeError("Keyword syntax for NamedTuple is only supported" | |
2212 | " in Python 3.6+") | |
2213 | if not args: | |
2214 | raise TypeError('NamedTuple.__new__(): not enough arguments') | |
2215 | _, args = args[0], args[1:] # allow the "cls" keyword be passed | |
2216 | if args: | |
2217 | typename, args = args[0], args[1:] # allow the "typename" keyword be passed | |
2218 | elif 'typename' in kwargs: | |
2219 | typename = kwargs.pop('typename') | |
2220 | import warnings | |
2221 | warnings.warn("Passing 'typename' as keyword argument is deprecated", | |
2222 | DeprecationWarning, stacklevel=2) | |
2223 | else: | |
2224 | raise TypeError("NamedTuple.__new__() missing 1 required positional " | |
2225 | "argument: 'typename'") | |
2226 | if args: | |
2227 | try: | |
2228 | fields, = args # allow the "fields" keyword be passed | |
2229 | except ValueError: | |
2230 | raise TypeError('NamedTuple.__new__() takes from 2 to 3 ' | |
2231 | 'positional arguments but {} ' | |
2232 | 'were given'.format(len(args) + 2)) | |
2233 | elif 'fields' in kwargs and len(kwargs) == 1: | |
2234 | fields = kwargs.pop('fields') | |
2235 | import warnings | |
2236 | warnings.warn("Passing 'fields' as keyword argument is deprecated", | |
2237 | DeprecationWarning, stacklevel=2) | |
2238 | else: | |
2239 | fields = None | |
2240 | ||
2241 | if fields is None: | |
2242 | fields = kwargs.items() | |
2243 | elif kwargs: | |
2244 | raise TypeError("Either list of fields or keywords" | |
2245 | " can be provided to NamedTuple, not both") | |
2246 | return _make_nmtuple(typename, fields) | |
2247 | ||
2248 | __new__.__text_signature__ = '($cls, typename, fields=None, /, **kwargs)' | |
2249 | ||
2250 | ||
2251 | def NewType(name, tp): | |
2252 | """NewType creates simple unique types with almost zero | |
2253 | runtime overhead. NewType(name, tp) is considered a subtype of tp | |
2254 | by static type checkers. At runtime, NewType(name, tp) returns | |
2255 | a dummy function that simply returns its argument. Usage:: | |
2256 | ||
2257 | UserId = NewType('UserId', int) | |
2258 | ||
2259 | def name_by_id(user_id: UserId) -> str: | |
2260 | ... | |
2261 | ||
2262 | UserId('user') # Fails type check | |
2263 | ||
2264 | name_by_id(42) # Fails type check | |
2265 | name_by_id(UserId(42)) # OK | |
2266 | ||
2267 | num = UserId(5) + 1 # type: int | |
2268 | """ | |
2269 | ||
2270 | def new_type(x): | |
2271 | return x | |
2272 | ||
2273 | new_type.__name__ = name | |
2274 | new_type.__supertype__ = tp | |
2275 | return new_type | |
2276 | ||
2277 | ||
2278 | # Python-version-specific alias (Python 2: unicode; Python 3: str) | |
2279 | Text = str | |
2280 | ||
2281 | ||
2282 | # Constant that's True when type checking, but False here. | |
2283 | TYPE_CHECKING = False | |
2284 | ||
2285 | ||
2286 | class IO(Generic[AnyStr]): | |
2287 | """Generic base class for TextIO and BinaryIO. | |
2288 | ||
2289 | This is an abstract, generic version of the return of open(). | |
2290 | ||
2291 | NOTE: This does not distinguish between the different possible | |
2292 | classes (text vs. binary, read vs. write vs. read/write, | |
2293 | append-only, unbuffered). The TextIO and BinaryIO subclasses | |
2294 | below capture the distinctions between text vs. binary, which is | |
2295 | pervasive in the interface; however we currently do not offer a | |
2296 | way to track the other distinctions in the type system. | |
2297 | """ | |
2298 | ||
2299 | __slots__ = () | |
2300 | ||
2301 | @abstractproperty | |
2302 | def mode(self) -> str: | |
2303 | pass | |
2304 | ||
2305 | @abstractproperty | |
2306 | def name(self) -> str: | |
2307 | pass | |
2308 | ||
2309 | @abstractmethod | |
2310 | def close(self) -> None: | |
2311 | pass | |
2312 | ||
2313 | @abstractproperty | |
2314 | def closed(self) -> bool: | |
2315 | pass | |
2316 | ||
2317 | @abstractmethod | |
2318 | def fileno(self) -> int: | |
2319 | pass | |
2320 | ||
2321 | @abstractmethod | |
2322 | def flush(self) -> None: | |
2323 | pass | |
2324 | ||
2325 | @abstractmethod | |
2326 | def isatty(self) -> bool: | |
2327 | pass | |
2328 | ||
2329 | @abstractmethod | |
2330 | def read(self, n: int = -1) -> AnyStr: | |
2331 | pass | |
2332 | ||
2333 | @abstractmethod | |
2334 | def readable(self) -> bool: | |
2335 | pass | |
2336 | ||
2337 | @abstractmethod | |
2338 | def readline(self, limit: int = -1) -> AnyStr: | |
2339 | pass | |
2340 | ||
2341 | @abstractmethod | |
2342 | def readlines(self, hint: int = -1) -> List[AnyStr]: | |
2343 | pass | |
2344 | ||
2345 | @abstractmethod | |
2346 | def seek(self, offset: int, whence: int = 0) -> int: | |
2347 | pass | |
2348 | ||
2349 | @abstractmethod | |
2350 | def seekable(self) -> bool: | |
2351 | pass | |
2352 | ||
2353 | @abstractmethod | |
2354 | def tell(self) -> int: | |
2355 | pass | |
2356 | ||
2357 | @abstractmethod | |
2358 | def truncate(self, size: int = None) -> int: | |
2359 | pass | |
2360 | ||
2361 | @abstractmethod | |
2362 | def writable(self) -> bool: | |
2363 | pass | |
2364 | ||
2365 | @abstractmethod | |
2366 | def write(self, s: AnyStr) -> int: | |
2367 | pass | |
2368 | ||
2369 | @abstractmethod | |
2370 | def writelines(self, lines: List[AnyStr]) -> None: | |
2371 | pass | |
2372 | ||
2373 | @abstractmethod | |
2374 | def __enter__(self) -> 'IO[AnyStr]': | |
2375 | pass | |
2376 | ||
2377 | @abstractmethod | |
2378 | def __exit__(self, type, value, traceback) -> None: | |
2379 | pass | |
2380 | ||
2381 | ||
2382 | class BinaryIO(IO[bytes]): | |
2383 | """Typed version of the return of open() in binary mode.""" | |
2384 | ||
2385 | __slots__ = () | |
2386 | ||
2387 | @abstractmethod | |
2388 | def write(self, s: Union[bytes, bytearray]) -> int: | |
2389 | pass | |
2390 | ||
2391 | @abstractmethod | |
2392 | def __enter__(self) -> 'BinaryIO': | |
2393 | pass | |
2394 | ||
2395 | ||
2396 | class TextIO(IO[str]): | |
2397 | """Typed version of the return of open() in text mode.""" | |
2398 | ||
2399 | __slots__ = () | |
2400 | ||
2401 | @abstractproperty | |
2402 | def buffer(self) -> BinaryIO: | |
2403 | pass | |
2404 | ||
2405 | @abstractproperty | |
2406 | def encoding(self) -> str: | |
2407 | pass | |
2408 | ||
2409 | @abstractproperty | |
2410 | def errors(self) -> Optional[str]: | |
2411 | pass | |
2412 | ||
2413 | @abstractproperty | |
2414 | def line_buffering(self) -> bool: | |
2415 | pass | |
2416 | ||
2417 | @abstractproperty | |
2418 | def newlines(self) -> Any: | |
2419 | pass | |
2420 | ||
2421 | @abstractmethod | |
2422 | def __enter__(self) -> 'TextIO': | |
2423 | pass | |
2424 | ||
2425 | ||
2426 | class io: | |
2427 | """Wrapper namespace for IO generic classes.""" | |
2428 | ||
2429 | __all__ = ['IO', 'TextIO', 'BinaryIO'] | |
2430 | IO = IO | |
2431 | TextIO = TextIO | |
2432 | BinaryIO = BinaryIO | |
2433 | ||
2434 | ||
2435 | io.__name__ = __name__ + '.io' | |
2436 | sys.modules[io.__name__] = io | |
2437 | ||
2438 | ||
2439 | Pattern = _TypeAlias('Pattern', AnyStr, type(stdlib_re.compile('')), | |
2440 | lambda p: p.pattern) | |
2441 | Match = _TypeAlias('Match', AnyStr, type(stdlib_re.match('', '')), | |
2442 | lambda m: m.re.pattern) | |
2443 | ||
2444 | ||
2445 | class re: | |
2446 | """Wrapper namespace for re type aliases.""" | |
2447 | ||
2448 | __all__ = ['Pattern', 'Match'] | |
2449 | Pattern = Pattern | |
2450 | Match = Match | |
2451 | ||
2452 | ||
2453 | re.__name__ = __name__ + '.re' | |
2454 | sys.modules[re.__name__] = re |