Linux lorencats.com 5.10.103-v7l+ #1529 SMP Tue Mar 8 12:24:00 GMT 2022 armv7l
Apache/2.4.59 (Raspbian)
: 10.0.0.29 | : 216.73.216.130
Cant Read [ /etc/named.conf ]
7.3.31-1~deb10u7
root
www.github.com/MadExploits
Terminal
AUTO ROOT
Adminer
Backdoor Destroyer
Linux Exploit
Lock Shell
Lock File
Create User
CREATE RDP
PHP Mailer
BACKCONNECT
UNLOCK SHELL
HASH IDENTIFIER
CPANEL RESET
CREATE WP USER
README
+ Create Folder
+ Create File
/
usr /
lib /
python3 /
dist-packages /
mypy /
plugins /
[ HOME SHELL ]
Name
Size
Permission
Action
__pycache__
[ DIR ]
drwxr-xr-x
__init__.py
0
B
-rw-r--r--
attrs.py
24.08
KB
-rw-r--r--
common.py
4.46
KB
-rw-r--r--
ctypes.py
10.12
KB
-rw-r--r--
dataclasses.py
12.76
KB
-rw-r--r--
default.py
14.4
KB
-rw-r--r--
Delete
Unzip
Zip
${this.title}
Close
Code Editor : attrs.py
"""Plugin for supporting the attrs library (http://www.attrs.org)""" from collections import OrderedDict from typing import Optional, Dict, List, cast, Tuple, Iterable import mypy.plugin # To avoid circular imports. from mypy.exprtotype import expr_to_unanalyzed_type, TypeTranslationError from mypy.fixup import lookup_qualified_stnode from mypy.nodes import ( Context, Argument, Var, ARG_OPT, ARG_POS, TypeInfo, AssignmentStmt, TupleExpr, ListExpr, NameExpr, CallExpr, RefExpr, FuncBase, is_class_var, TempNode, Decorator, MemberExpr, Expression, SymbolTableNode, MDEF, JsonDict, OverloadedFuncDef, ARG_NAMED_OPT, ARG_NAMED ) from mypy.plugins.common import ( _get_argument, _get_bool_argument, _get_decorator_bool_argument, add_method ) from mypy.types import ( Type, AnyType, TypeOfAny, CallableType, NoneTyp, TypeVarDef, TypeVarType, Overloaded, UnionType, FunctionLike ) from mypy.typevars import fill_typevars from mypy.util import unmangle from mypy.server.trigger import make_wildcard_trigger MYPY = False if MYPY: from typing_extensions import Final KW_ONLY_PYTHON_2_UNSUPPORTED = "kw_only is not supported in Python 2" # The names of the different functions that create classes or arguments. attr_class_makers = { 'attr.s', 'attr.attrs', 'attr.attributes', } # type: Final attr_dataclass_makers = { 'attr.dataclass', } # type: Final attr_attrib_makers = { 'attr.ib', 'attr.attrib', 'attr.attr', } # type: Final class Converter: """Holds information about a `converter=` argument""" def __init__(self, name: Optional[str] = None, is_attr_converters_optional: bool = False) -> None: self.name = name self.is_attr_converters_optional = is_attr_converters_optional class Attribute: """The value of an attr.ib() call.""" def __init__(self, name: str, info: TypeInfo, has_default: bool, init: bool, kw_only: bool, converter: Converter, context: Context) -> None: self.name = name self.info = info self.has_default = has_default self.init = init self.kw_only = kw_only self.converter = converter self.context = context def argument(self, ctx: 'mypy.plugin.ClassDefContext') -> Argument: """Return this attribute as an argument to __init__.""" assert self.init init_type = self.info[self.name].type if self.converter.name: # When a converter is set the init_type is overridden by the first argument # of the converter method. converter = lookup_qualified_stnode(ctx.api.modules, self.converter.name, True) if not converter: # The converter may be a local variable. Check there too. converter = ctx.api.lookup_qualified(self.converter.name, self.info, True) # Get the type of the converter. converter_type = None if converter and isinstance(converter.node, TypeInfo): from mypy.checkmember import type_object_type # To avoid import cycle. converter_type = type_object_type(converter.node, ctx.api.builtin_type) elif converter and isinstance(converter.node, OverloadedFuncDef): converter_type = converter.node.type elif converter and converter.type: converter_type = converter.type init_type = None if isinstance(converter_type, CallableType) and converter_type.arg_types: init_type = ctx.api.anal_type(converter_type.arg_types[0]) elif isinstance(converter_type, Overloaded): types = [] # type: List[Type] for item in converter_type.items(): # Walk the overloads looking for methods that can accept one argument. num_arg_types = len(item.arg_types) if not num_arg_types: continue if num_arg_types > 1 and any(kind == ARG_POS for kind in item.arg_kinds[1:]): continue types.append(item.arg_types[0]) # Make a union of all the valid types. if types: args = UnionType.make_simplified_union(types) init_type = ctx.api.anal_type(args) if self.converter.is_attr_converters_optional and init_type: # If the converter was attr.converter.optional(type) then add None to # the allowed init_type. init_type = UnionType.make_simplified_union([init_type, NoneTyp()]) if not init_type: ctx.api.fail("Cannot determine __init__ type from converter", self.context) init_type = AnyType(TypeOfAny.from_error) elif self.converter.name == '': # This means we had a converter but it's not of a type we can infer. # Error was shown in _get_converter_name init_type = AnyType(TypeOfAny.from_error) if init_type is None: if ctx.api.options.disallow_untyped_defs: # This is a compromise. If you don't have a type here then the # __init__ will be untyped. But since the __init__ is added it's # pointing at the decorator. So instead we also show the error in the # assignment, which is where you would fix the issue. node = self.info[self.name].node assert node is not None ctx.api.msg.need_annotation_for_var(node, self.context) # Convert type not set to Any. init_type = AnyType(TypeOfAny.unannotated) if self.kw_only: arg_kind = ARG_NAMED_OPT if self.has_default else ARG_NAMED else: arg_kind = ARG_OPT if self.has_default else ARG_POS # Attrs removes leading underscores when creating the __init__ arguments. return Argument(Var(self.name.lstrip("_"), init_type), init_type, None, arg_kind) def serialize(self) -> JsonDict: """Serialize this object so it can be saved and restored.""" return { 'name': self.name, 'has_default': self.has_default, 'init': self.init, 'kw_only': self.kw_only, 'converter_name': self.converter.name, 'converter_is_attr_converters_optional': self.converter.is_attr_converters_optional, 'context_line': self.context.line, 'context_column': self.context.column, } @classmethod def deserialize(cls, info: TypeInfo, data: JsonDict) -> 'Attribute': """Return the Attribute that was serialized.""" return Attribute( data['name'], info, data['has_default'], data['init'], data['kw_only'], Converter(data['converter_name'], data['converter_is_attr_converters_optional']), Context(line=data['context_line'], column=data['context_column']) ) def attr_class_maker_callback(ctx: 'mypy.plugin.ClassDefContext', auto_attribs_default: bool = False) -> None: """Add necessary dunder methods to classes decorated with attr.s. attrs is a package that lets you define classes without writing dull boilerplate code. At a quick glance, the decorator searches the class body for assignments of `attr.ib`s (or annotated variables if auto_attribs=True), then depending on how the decorator is called, it will add an __init__ or all the __cmp__ methods. For frozen=True it will turn the attrs into properties. See http://www.attrs.org/en/stable/how-does-it-work.html for information on how attrs works. """ info = ctx.cls.info init = _get_decorator_bool_argument(ctx, 'init', True) frozen = _get_frozen(ctx) cmp = _get_decorator_bool_argument(ctx, 'cmp', True) auto_attribs = _get_decorator_bool_argument(ctx, 'auto_attribs', auto_attribs_default) kw_only = _get_decorator_bool_argument(ctx, 'kw_only', False) if ctx.api.options.python_version[0] < 3: if auto_attribs: ctx.api.fail("auto_attribs is not supported in Python 2", ctx.reason) return if not info.defn.base_type_exprs: # Note: This will not catch subclassing old-style classes. ctx.api.fail("attrs only works with new-style classes", info.defn) return if kw_only: ctx.api.fail(KW_ONLY_PYTHON_2_UNSUPPORTED, ctx.reason) return attributes = _analyze_class(ctx, auto_attribs, kw_only) # Save the attributes so that subclasses can reuse them. ctx.cls.info.metadata['attrs'] = { 'attributes': [attr.serialize() for attr in attributes], 'frozen': frozen, } adder = MethodAdder(ctx) if init: _add_init(ctx, attributes, adder) if cmp: _add_cmp(ctx, adder) if frozen: _make_frozen(ctx, attributes) def _get_frozen(ctx: 'mypy.plugin.ClassDefContext') -> bool: """Return whether this class is frozen.""" if _get_decorator_bool_argument(ctx, 'frozen', False): return True # Subclasses of frozen classes are frozen so check that. for super_info in ctx.cls.info.mro[1:-1]: if 'attrs' in super_info.metadata and super_info.metadata['attrs']['frozen']: return True return False def _analyze_class(ctx: 'mypy.plugin.ClassDefContext', auto_attribs: bool, kw_only: bool) -> List[Attribute]: """Analyze the class body of an attr maker, its parents, and return the Attributes found. auto_attribs=True means we'll generate attributes from type annotations also. kw_only=True means that all attributes created here will be keyword only args in __init__. """ own_attrs = OrderedDict() # type: OrderedDict[str, Attribute] # Walk the body looking for assignments and decorators. for stmt in ctx.cls.defs.body: if isinstance(stmt, AssignmentStmt): for attr in _attributes_from_assignment(ctx, stmt, auto_attribs, kw_only): # When attrs are defined twice in the same body we want to use the 2nd definition # in the 2nd location. So remove it from the OrderedDict. # Unless it's auto_attribs in which case we want the 2nd definition in the # 1st location. if not auto_attribs and attr.name in own_attrs: del own_attrs[attr.name] own_attrs[attr.name] = attr elif isinstance(stmt, Decorator): _cleanup_decorator(stmt, own_attrs) for attribute in own_attrs.values(): # Even though these look like class level assignments we want them to look like # instance level assignments. if attribute.name in ctx.cls.info.names: node = ctx.cls.info.names[attribute.name].node assert isinstance(node, Var) node.is_initialized_in_class = False # Traverse the MRO and collect attributes from the parents. taken_attr_names = set(own_attrs) super_attrs = [] for super_info in ctx.cls.info.mro[1:-1]: if 'attrs' in super_info.metadata: # Each class depends on the set of attributes in its attrs ancestors. ctx.api.add_plugin_dependency(make_wildcard_trigger(super_info.fullname())) for data in super_info.metadata['attrs']['attributes']: # Only add an attribute if it hasn't been defined before. This # allows for overwriting attribute definitions by subclassing. if data['name'] not in taken_attr_names: a = Attribute.deserialize(super_info, data) super_attrs.append(a) taken_attr_names.add(a.name) attributes = super_attrs + list(own_attrs.values()) # Check the init args for correct default-ness. Note: This has to be done after all the # attributes for all classes have been read, because subclasses can override parents. last_default = False last_kw_only = False for attribute in attributes: if not attribute.init: continue if attribute.kw_only: # Keyword-only attributes don't care whether they are default or not. last_kw_only = True continue if not attribute.has_default and last_default: ctx.api.fail( "Non-default attributes not allowed after default attributes.", attribute.context) if last_kw_only: ctx.api.fail( "Non keyword-only attributes are not allowed after a keyword-only attribute.", attribute.context ) last_default |= attribute.has_default return attributes def _attributes_from_assignment(ctx: 'mypy.plugin.ClassDefContext', stmt: AssignmentStmt, auto_attribs: bool, kw_only: bool) -> Iterable[Attribute]: """Return Attribute objects that are created by this assignment. The assignments can look like this: x = attr.ib() x = y = attr.ib() x, y = attr.ib(), attr.ib() or if auto_attribs is enabled also like this: x: type x: type = default_value """ for lvalue in stmt.lvalues: lvalues, rvalues = _parse_assignments(lvalue, stmt) if len(lvalues) != len(rvalues): # This means we have some assignment that isn't 1 to 1. # It can't be an attrib. continue for lhs, rvalue in zip(lvalues, rvalues): # Check if the right hand side is a call to an attribute maker. if (isinstance(rvalue, CallExpr) and isinstance(rvalue.callee, RefExpr) and rvalue.callee.fullname in attr_attrib_makers): attr = _attribute_from_attrib_maker(ctx, auto_attribs, kw_only, lhs, rvalue, stmt) if attr: yield attr elif auto_attribs and stmt.type and stmt.new_syntax and not is_class_var(lhs): yield _attribute_from_auto_attrib(ctx, kw_only, lhs, rvalue, stmt) def _cleanup_decorator(stmt: Decorator, attr_map: Dict[str, Attribute]) -> None: """Handle decorators in class bodies. `x.default` will set a default value on x `x.validator` and `x.default` will get removed to avoid throwing a type error. """ remove_me = [] for func_decorator in stmt.decorators: if (isinstance(func_decorator, MemberExpr) and isinstance(func_decorator.expr, NameExpr) and func_decorator.expr.name in attr_map): if func_decorator.name == 'default': attr_map[func_decorator.expr.name].has_default = True if func_decorator.name in ('default', 'validator'): # These are decorators on the attrib object that only exist during # class creation time. In order to not trigger a type error later we # just remove them. This might leave us with a Decorator with no # decorators (Emperor's new clothes?) # TODO: It would be nice to type-check these rather than remove them. # default should be Callable[[], T] # validator should be Callable[[Any, 'Attribute', T], Any] # where T is the type of the attribute. remove_me.append(func_decorator) for dec in remove_me: stmt.decorators.remove(dec) def _attribute_from_auto_attrib(ctx: 'mypy.plugin.ClassDefContext', kw_only: bool, lhs: NameExpr, rvalue: Expression, stmt: AssignmentStmt) -> Attribute: """Return an Attribute for a new type assignment.""" name = unmangle(lhs.name) # `x: int` (without equal sign) assigns rvalue to TempNode(AnyType()) has_rhs = not isinstance(rvalue, TempNode) return Attribute(name, ctx.cls.info, has_rhs, True, kw_only, Converter(), stmt) def _attribute_from_attrib_maker(ctx: 'mypy.plugin.ClassDefContext', auto_attribs: bool, kw_only: bool, lhs: NameExpr, rvalue: CallExpr, stmt: AssignmentStmt) -> Optional[Attribute]: """Return an Attribute from the assignment or None if you can't make one.""" if auto_attribs and not stmt.new_syntax: # auto_attribs requires an annotation on *every* attr.ib. assert lhs.node is not None ctx.api.msg.need_annotation_for_var(lhs.node, stmt) return None if len(stmt.lvalues) > 1: ctx.api.fail("Too many names for one attribute", stmt) return None # This is the type that belongs in the __init__ method for this attrib. init_type = stmt.type # Read all the arguments from the call. init = _get_bool_argument(ctx, rvalue, 'init', True) # Note: If the class decorator says kw_only=True the attribute is ignored. # See https://github.com/python-attrs/attrs/issues/481 for explanation. kw_only |= _get_bool_argument(ctx, rvalue, 'kw_only', False) if kw_only and ctx.api.options.python_version[0] < 3: ctx.api.fail(KW_ONLY_PYTHON_2_UNSUPPORTED, stmt) return None # TODO: Check for attr.NOTHING attr_has_default = bool(_get_argument(rvalue, 'default')) attr_has_factory = bool(_get_argument(rvalue, 'factory')) if attr_has_default and attr_has_factory: ctx.api.fail("Can't pass both `default` and `factory`.", rvalue) elif attr_has_factory: attr_has_default = True # If the type isn't set through annotation but is passed through `type=` use that. type_arg = _get_argument(rvalue, 'type') if type_arg and not init_type: try: un_type = expr_to_unanalyzed_type(type_arg) except TypeTranslationError: ctx.api.fail('Invalid argument to type', type_arg) else: init_type = ctx.api.anal_type(un_type) if init_type and isinstance(lhs.node, Var) and not lhs.node.type: # If there is no annotation, add one. lhs.node.type = init_type lhs.is_inferred_def = False # Note: convert is deprecated but works the same as converter. converter = _get_argument(rvalue, 'converter') convert = _get_argument(rvalue, 'convert') if convert and converter: ctx.api.fail("Can't pass both `convert` and `converter`.", rvalue) elif convert: ctx.api.fail("convert is deprecated, use converter", rvalue) converter = convert converter_info = _parse_converter(ctx, converter) name = unmangle(lhs.name) return Attribute(name, ctx.cls.info, attr_has_default, init, kw_only, converter_info, stmt) def _parse_converter(ctx: 'mypy.plugin.ClassDefContext', converter: Optional[Expression]) -> Converter: """Return the Converter object from an Expression.""" # TODO: Support complex converters, e.g. lambdas, calls, etc. if converter: if isinstance(converter, RefExpr) and converter.node: if (isinstance(converter.node, FuncBase) and converter.node.type and isinstance(converter.node.type, FunctionLike)): return Converter(converter.node.fullname()) elif isinstance(converter.node, TypeInfo): return Converter(converter.node.fullname()) if (isinstance(converter, CallExpr) and isinstance(converter.callee, RefExpr) and converter.callee.fullname == "attr.converters.optional" and converter.args and converter.args[0]): # Special handling for attr.converters.optional(type) # We extract the type and add make the init_args Optional in Attribute.argument argument = _parse_converter(ctx, converter.args[0]) argument.is_attr_converters_optional = True return argument # Signal that we have an unsupported converter. ctx.api.fail( "Unsupported converter, only named functions and types are currently supported", converter ) return Converter('') return Converter(None) def _parse_assignments( lvalue: Expression, stmt: AssignmentStmt) -> Tuple[List[NameExpr], List[Expression]]: """Convert a possibly complex assignment expression into lists of lvalues and rvalues.""" lvalues = [] # type: List[NameExpr] rvalues = [] # type: List[Expression] if isinstance(lvalue, (TupleExpr, ListExpr)): if all(isinstance(item, NameExpr) for item in lvalue.items): lvalues = cast(List[NameExpr], lvalue.items) if isinstance(stmt.rvalue, (TupleExpr, ListExpr)): rvalues = stmt.rvalue.items elif isinstance(lvalue, NameExpr): lvalues = [lvalue] rvalues = [stmt.rvalue] return lvalues, rvalues def _add_cmp(ctx: 'mypy.plugin.ClassDefContext', adder: 'MethodAdder') -> None: """Generate all the cmp methods for this class.""" # For __ne__ and __eq__ the type is: # def __ne__(self, other: object) -> bool bool_type = ctx.api.named_type('__builtins__.bool') object_type = ctx.api.named_type('__builtins__.object') args = [Argument(Var('other', object_type), object_type, None, ARG_POS)] for method in ['__ne__', '__eq__']: adder.add_method(method, args, bool_type) # For the rest we use: # AT = TypeVar('AT') # def __lt__(self: AT, other: AT) -> bool # This way comparisons with subclasses will work correctly. tvd = TypeVarDef('AT', 'AT', -1, [], object_type) tvd_type = TypeVarType(tvd) args = [Argument(Var('other', tvd_type), tvd_type, None, ARG_POS)] for method in ['__lt__', '__le__', '__gt__', '__ge__']: adder.add_method(method, args, bool_type, self_type=tvd_type, tvd=tvd) def _make_frozen(ctx: 'mypy.plugin.ClassDefContext', attributes: List[Attribute]) -> None: """Turn all the attributes into properties to simulate frozen classes.""" for attribute in attributes: if attribute.name in ctx.cls.info.names: # This variable belongs to this class so we can modify it. node = ctx.cls.info.names[attribute.name].node assert isinstance(node, Var) node.is_property = True else: # This variable belongs to a super class so create new Var so we # can modify it. var = Var(attribute.name, ctx.cls.info[attribute.name].type) var.info = ctx.cls.info var._fullname = '%s.%s' % (ctx.cls.info.fullname(), var.name()) ctx.cls.info.names[var.name()] = SymbolTableNode(MDEF, var) var.is_property = True def _add_init(ctx: 'mypy.plugin.ClassDefContext', attributes: List[Attribute], adder: 'MethodAdder') -> None: """Generate an __init__ method for the attributes and add it to the class.""" adder.add_method( '__init__', [attribute.argument(ctx) for attribute in attributes if attribute.init], NoneTyp() ) class MethodAdder: """Helper to add methods to a TypeInfo. ctx: The ClassDefCtx we are using on which we will add methods. """ # TODO: Combine this with the code build_namedtuple_typeinfo to support both. def __init__(self, ctx: 'mypy.plugin.ClassDefContext') -> None: self.ctx = ctx self.self_type = fill_typevars(ctx.cls.info) def add_method(self, method_name: str, args: List[Argument], ret_type: Type, self_type: Optional[Type] = None, tvd: Optional[TypeVarDef] = None) -> None: """Add a method: def <method_name>(self, <args>) -> <ret_type>): ... to info. self_type: The type to use for the self argument or None to use the inferred self type. tvd: If the method is generic these should be the type variables. """ self_type = self_type if self_type is not None else self.self_type add_method(self.ctx, method_name, args, ret_type, self_type, tvd)
Close