diff --git a/newsfragments/2515.misc.rst b/newsfragments/2515.misc.rst new file mode 100644 index 0000000000..a2d7498813 --- /dev/null +++ b/newsfragments/2515.misc.rst @@ -0,0 +1 @@ +Add black to ``web3/_utils``, excluding ``web3/_utils/module_testing`` diff --git a/tox.ini b/tox.ini index c83659e771..764fe09aaa 100644 --- a/tox.ini +++ b/tox.ini @@ -24,7 +24,7 @@ use_parentheses=True [flake8] max-line-length= 100 exclude= venv*,.tox,docs,build -ignore=W503 +ignore=E203,W503 [testenv] whitelist_externals=/usr/bin/make @@ -65,6 +65,7 @@ extras=linter commands= flake8 {toxinidir}/web3 {toxinidir}/ens {toxinidir}/ethpm {toxinidir}/tests --exclude {toxinidir}/ethpm/ethpm-spec black {toxinidir}/ethpm {toxinidir}/web3/auto {toxinidir}/web3/utils --exclude {toxinidir}/ethpm/ethpm-spec --check + black {toxinidir}/web3/_utils --exclude {toxinidir}/web3/_utils/module_testing --check isort --recursive --check-only --diff {toxinidir}/web3/ {toxinidir}/ens/ {toxinidir}/ethpm/ {toxinidir}/tests/ mypy -p web3 -p ethpm -p ens --config-file {toxinidir}/mypy.ini diff --git a/web3/_utils/abi.py b/web3/_utils/abi.py index 791f7bdfd5..2abaf4dbf7 100644 --- a/web3/_utils/abi.py +++ b/web3/_utils/abi.py @@ -86,44 +86,43 @@ def filter_by_type(_type: str, contract_abi: ABI) -> List[Union[ABIFunction, ABIEvent]]: - return [abi for abi in contract_abi if abi['type'] == _type] + return [abi for abi in contract_abi if abi["type"] == _type] def filter_by_name(name: str, contract_abi: ABI) -> List[Union[ABIFunction, ABIEvent]]: return [ abi - for abi - in contract_abi + for abi in contract_abi if ( - abi['type'] not in ('fallback', 'constructor', 'receive') - and abi['name'] == name + abi["type"] not in ("fallback", "constructor", "receive") + and abi["name"] == name ) ] def get_abi_input_types(abi: ABIFunction) -> List[str]: - if 'inputs' not in abi and (abi['type'] == 'fallback' or abi['type'] == 'receive'): + if "inputs" not in abi and (abi["type"] == "fallback" or abi["type"] == "receive"): return [] else: - return [collapse_if_tuple(cast(Dict[str, Any], arg)) for arg in abi['inputs']] + return [collapse_if_tuple(cast(Dict[str, Any], arg)) for arg in abi["inputs"]] def get_abi_output_types(abi: ABIFunction) -> List[str]: - if abi['type'] == 'fallback': + if abi["type"] == "fallback": return [] else: - return [collapse_if_tuple(cast(Dict[str, Any], arg)) for arg in abi['outputs']] + return [collapse_if_tuple(cast(Dict[str, Any], arg)) for arg in abi["outputs"]] def get_abi_input_names(abi: Union[ABIFunction, ABIEvent]) -> List[str]: - if 'inputs' not in abi and abi['type'] == 'fallback': + if "inputs" not in abi and abi["type"] == "fallback": return [] else: - return [arg['name'] for arg in abi['inputs']] + return [arg["name"] for arg in abi["inputs"]] def get_receive_func_abi(contract_abi: ABI) -> ABIFunction: - receive_abis = filter_by_type('receive', contract_abi) + receive_abis = filter_by_type("receive", contract_abi) if receive_abis: return cast(ABIFunction, receive_abis[0]) else: @@ -131,7 +130,7 @@ def get_receive_func_abi(contract_abi: ABI) -> ABIFunction: def get_fallback_func_abi(contract_abi: ABI) -> ABIFunction: - fallback_abis = filter_by_type('fallback', contract_abi) + fallback_abis = filter_by_type("fallback", contract_abi) if fallback_abis: return cast(ABIFunction, fallback_abis[0]) else: @@ -139,19 +138,19 @@ def get_fallback_func_abi(contract_abi: ABI) -> ABIFunction: def fallback_func_abi_exists(contract_abi: ABI) -> List[Union[ABIFunction, ABIEvent]]: - return filter_by_type('fallback', contract_abi) + return filter_by_type("fallback", contract_abi) def receive_func_abi_exists(contract_abi: ABI) -> List[Union[ABIFunction, ABIEvent]]: - return filter_by_type('receive', contract_abi) + return filter_by_type("receive", contract_abi) def get_indexed_event_inputs(event_abi: ABIEvent) -> List[ABIEventParams]: - return [arg for arg in event_abi['inputs'] if arg['indexed'] is True] + return [arg for arg in event_abi["inputs"] if arg["indexed"] is True] def exclude_indexed_event_inputs(event_abi: ABIEvent) -> List[ABIEventParams]: - return [arg for arg in event_abi['inputs'] if arg['indexed'] is False] + return [arg for arg in event_abi["inputs"] if arg["indexed"] is False] def get_normalized_abi_arg_type(abi_arg: ABIEventParams) -> str: @@ -166,12 +165,7 @@ def get_normalized_abi_arg_type(abi_arg: ABIEventParams) -> str: def filter_by_argument_count( num_arguments: int, contract_abi: ABI ) -> List[Union[ABIFunction, ABIEvent]]: - return [ - abi - for abi - in contract_abi - if len(abi['inputs']) == num_arguments - ] + return [abi for abi in contract_abi if len(abi["inputs"]) == num_arguments] def filter_by_argument_name( @@ -180,9 +174,8 @@ def filter_by_argument_name( return [ abi for abi in contract_abi - if set(argument_names).intersection( - get_abi_input_names(abi) - ) == set(argument_names) + if set(argument_names).intersection(get_abi_input_names(abi)) + == set(argument_names) ] @@ -208,16 +201,20 @@ def is_dynamic(self) -> bool: # type: ignore return self.subencoder.is_dynamic @classmethod - def from_type_str(cls, abi_type: TypeStr, registry: ABIRegistry) -> "AcceptsHexStrEncoder": + def from_type_str( + cls, abi_type: TypeStr, registry: ABIRegistry + ) -> "AcceptsHexStrEncoder": subencoder_cls = cls.get_subencoder_class() # cast b/c expects BaseCoder but `from_type_string` restricted to BaseEncoder subclasses - subencoder = cast(encoding.BaseEncoder, subencoder_cls.from_type_str(abi_type, registry)) + subencoder = cast( + encoding.BaseEncoder, subencoder_cls.from_type_str(abi_type, registry) + ) return cls(subencoder) @classmethod def get_subencoder_class(cls) -> Type[encoding.BaseEncoder]: if cls.subencoder_cls is None: - raise AttributeError(f'No subencoder class is set. {cls.__name__}') + raise AttributeError(f"No subencoder class is set. {cls.__name__}") return cls.subencoder_cls # type ignored b/c combomethod makes signature conflict w/ defined BaseEncoder.validate_value() @@ -238,19 +235,18 @@ def validate_and_normalize(self, value: Any) -> HexStr: except binascii.Error: self.invalidate_value( value, - msg=f'{value} is an invalid hex string', + msg=f"{value} is an invalid hex string", ) else: - if raw_value[:2] != '0x': + if raw_value[:2] != "0x": if self.is_strict: self.invalidate_value( - raw_value, - msg='hex string must be prefixed with 0x' + raw_value, msg="hex string must be prefixed with 0x" ) - elif raw_value[:2] != '0x': + elif raw_value[:2] != "0x": warnings.warn( 'in v6 it will be invalid to pass a hex string without the "0x" prefix', - category=DeprecationWarning + category=DeprecationWarning, ) return value @@ -312,13 +308,12 @@ def validate_value(self, value: Any) -> bytes: # type: ignore except binascii.Error: self.invalidate_value( value, - msg=f'{value} is not a valid hex string', + msg=f"{value} is not a valid hex string", ) else: - if raw_value[:2] != '0x': + if raw_value[:2] != "0x": self.invalidate_value( - raw_value, - msg='hex string must be prefixed with 0x' + raw_value, msg="hex string must be prefixed with 0x" ) byte_size = self.value_bit_size // 8 @@ -340,7 +335,7 @@ def validate_value(self, value: Any) -> bytes: # type: ignore def encode_fn(value: Any) -> bytes: return value - @parse_type_str('bytes') + @parse_type_str("bytes") def from_type_str(cls, abi_type: BasicType, registry: ABIRegistry) -> bytes: # type ignored b/c kwargs are set in superclass init # Unexpected keyword argument "value_bit_size" for "__call__" of "BaseEncoder" @@ -359,7 +354,7 @@ class BytesDecoder(decoding.FixedByteSizeDecoder): def decoder_fn(data: bytes) -> bytes: # type: ignore return data - @parse_type_str('bytes') + @parse_type_str("bytes") def from_type_str(cls, abi_type: BasicType, registry: ABIRegistry) -> bytes: # type ignored b/c kwargs are set in superclass init # Unexpected keyword argument "value_bit_size" for "__call__" of "BaseDecoder" @@ -378,19 +373,21 @@ def validate_value(cls, value: Any) -> None: except UnicodeDecodeError: cls.invalidate_value( value, - msg='not decodable as unicode string', + msg="not decodable as unicode string", ) super().validate_value(value) def filter_by_encodability( - abi_codec: codec.ABIEncoder, args: Sequence[Any], kwargs: Dict[str, Any], contract_abi: ABI + abi_codec: codec.ABIEncoder, + args: Sequence[Any], + kwargs: Dict[str, Any], + contract_abi: ABI, ) -> List[ABIFunction]: return [ cast(ABIFunction, function_abi) - for function_abi - in contract_abi + for function_abi in contract_abi if check_if_arguments_can_be_encoded( cast(ABIFunction, function_abi), abi_codec, args, kwargs ) @@ -408,7 +405,7 @@ def check_if_arguments_can_be_encoded( except TypeError: return False - if len(function_abi.get('inputs', [])) != len(arguments): + if len(function_abi.get("inputs", [])) != len(arguments): return False try: @@ -417,8 +414,7 @@ def check_if_arguments_can_be_encoded( return False return all( - abi_codec.is_encodable(_type, arg) - for _type, arg in zip(types, aligned_args) + abi_codec.is_encodable(_type, arg) for _type, arg in zip(types, aligned_args) ) @@ -434,7 +430,7 @@ def merge_args_and_kwargs( defined in ``function_abi``. """ # Ensure the function is being applied to the correct number of args - if len(args) + len(kwargs) != len(function_abi.get('inputs', [])): + if len(args) + len(kwargs) != len(function_abi.get("inputs", [])): raise TypeError( f"Incorrect argument count. Expected '{len(function_abi['inputs'])}" f". Got '{len(args) + len(kwargs)}'" @@ -445,7 +441,7 @@ def merge_args_and_kwargs( return cast(Tuple[Any, ...], args) kwarg_names = set(kwargs.keys()) - sorted_arg_names = tuple(arg_abi['name'] for arg_abi in function_abi['inputs']) + sorted_arg_names = tuple(arg_abi["name"] for arg_abi in function_abi["inputs"]) args_as_kwargs = dict(zip(sorted_arg_names, args)) # Check for duplicate args @@ -459,7 +455,7 @@ def merge_args_and_kwargs( # Check for unknown args unknown_args = kwarg_names.difference(sorted_arg_names) if unknown_args: - if function_abi.get('name'): + if function_abi.get("name"): raise TypeError( f"{function_abi.get('name')}() got unexpected keyword argument(s)" f" '{', '.join(unknown_args)}'" @@ -471,12 +467,14 @@ def merge_args_and_kwargs( # Sort args according to their position in the ABI and unzip them from their # names - sorted_args = tuple(zip( - *sorted( - itertools.chain(kwargs.items(), args_as_kwargs.items()), - key=lambda kv: sorted_arg_names.index(kv[0]), + sorted_args = tuple( + zip( + *sorted( + itertools.chain(kwargs.items(), args_as_kwargs.items()), + key=lambda kv: sorted_arg_names.index(kv[0]), + ) ) - )) + ) if sorted_args: return sorted_args[1] @@ -484,7 +482,7 @@ def merge_args_and_kwargs( return tuple() -TUPLE_TYPE_STR_RE = re.compile(r'^(tuple)(\[([1-9][0-9]*)?\])?$') +TUPLE_TYPE_STR_RE = re.compile(r"^(tuple)(\[([1-9][0-9]*)?\])?$") def get_tuple_type_str_parts(s: str) -> Optional[Tuple[str, Optional[str]]]: @@ -508,7 +506,7 @@ def _align_abi_input(arg_abi: ABIFunctionParams, arg: Any) -> Tuple[Any, ...]: Aligns the values of any mapping at any level of nesting in ``arg`` according to the layout of the corresponding abi spec. """ - tuple_parts = get_tuple_type_str_parts(arg_abi['type']) + tuple_parts = get_tuple_type_str_parts(arg_abi["type"]) if tuple_parts is None: # Arg is non-tuple. Just return value. @@ -518,25 +516,25 @@ def _align_abi_input(arg_abi: ABIFunctionParams, arg: Any) -> Tuple[Any, ...]: if tuple_dims is None: # Arg is non-list tuple. Each sub arg in `arg` will be aligned # according to its corresponding abi. - sub_abis = arg_abi['components'] + sub_abis = arg_abi["components"] else: # Arg is list tuple. A non-list version of its abi will be used to # align each element in `arg`. new_abi = copy.copy(arg_abi) - new_abi['type'] = tuple_prefix + new_abi["type"] = tuple_prefix sub_abis = itertools.repeat(new_abi) # type: ignore if isinstance(arg, abc.Mapping): # Arg is mapping. Align values according to abi order. - aligned_arg = tuple(arg[abi['name']] for abi in sub_abis) + aligned_arg = tuple(arg[abi["name"]] for abi in sub_abis) else: aligned_arg = arg if not is_list_like(aligned_arg): raise TypeError( f'Expected non-string sequence for "{arg_abi.get("type")}" ' - f'component type: got {aligned_arg}' + f"component type: got {aligned_arg}" ) # convert NamedTuple to regular tuple @@ -558,27 +556,22 @@ def get_aligned_abi_inputs( contained in ``args`` may contain nested mappings or sequences corresponding to tuple-encoded values in ``abi``. """ - input_abis = abi.get('inputs', []) + input_abis = abi.get("inputs", []) if isinstance(args, abc.Mapping): # `args` is mapping. Align values according to abi order. - args = tuple(args[abi['name']] for abi in input_abis) + args = tuple(args[abi["name"]] for abi in input_abis) return ( # typed dict cannot be used w/ a normal Dict # https://github.com/python/mypy/issues/4976 tuple(collapse_if_tuple(abi) for abi in input_abis), # type: ignore - type(args)( - _align_abi_input(abi, arg) - for abi, arg in zip(input_abis, args) - ), + type(args)(_align_abi_input(abi, arg) for abi, arg in zip(input_abis, args)), ) def get_constructor_abi(contract_abi: ABI) -> ABIFunction: - candidates = [ - abi for abi in contract_abi if abi['type'] == 'constructor' - ] + candidates = [abi for abi in contract_abi if abi["type"] == "constructor"] if len(candidates) == 1: return candidates[0] elif len(candidates) == 0: @@ -588,39 +581,30 @@ def get_constructor_abi(contract_abi: ABI) -> ABIFunction: return None -DYNAMIC_TYPES = ['bytes', 'string'] +DYNAMIC_TYPES = ["bytes", "string"] INT_SIZES = range(8, 257, 8) BYTES_SIZES = range(1, 33) -UINT_TYPES = [f'uint{i}' for i in INT_SIZES] -INT_TYPES = [f'int{i}' for i in INT_SIZES] -BYTES_TYPES = [f'bytes{i}' for i in BYTES_SIZES] + ['bytes32.byte'] - -STATIC_TYPES = list(itertools.chain( - ['address', 'bool'], - UINT_TYPES, - INT_TYPES, - BYTES_TYPES, -)) - -BASE_TYPE_REGEX = '|'.join(( - _type + '(?![a-z0-9])' - for _type - in itertools.chain(STATIC_TYPES, DYNAMIC_TYPES) -)) - -SUB_TYPE_REGEX = ( - r'\[' - '[0-9]*' - r'\]' +UINT_TYPES = [f"uint{i}" for i in INT_SIZES] +INT_TYPES = [f"int{i}" for i in INT_SIZES] +BYTES_TYPES = [f"bytes{i}" for i in BYTES_SIZES] + ["bytes32.byte"] + +STATIC_TYPES = list( + itertools.chain( + ["address", "bool"], + UINT_TYPES, + INT_TYPES, + BYTES_TYPES, + ) ) -TYPE_REGEX = ( - '^' - '(?:{base_type})' - '(?:(?:{sub_type})*)?' - '$' -).format( +BASE_TYPE_REGEX = "|".join( + (_type + "(?![a-z0-9])" for _type in itertools.chain(STATIC_TYPES, DYNAMIC_TYPES)) +) + +SUB_TYPE_REGEX = r"\[" "[0-9]*" r"\]" + +TYPE_REGEX = ("^" "(?:{base_type})" "(?:(?:{sub_type})*)?" "$").format( base_type=BASE_TYPE_REGEX, sub_type=SUB_TYPE_REGEX, ) @@ -631,7 +615,7 @@ def is_recognized_type(abi_type: TypeStr) -> bool: def is_bool_type(abi_type: TypeStr) -> bool: - return abi_type == 'bool' + return abi_type == "bool" def is_uint_type(abi_type: TypeStr) -> bool: @@ -643,15 +627,15 @@ def is_int_type(abi_type: TypeStr) -> bool: def is_address_type(abi_type: TypeStr) -> bool: - return abi_type == 'address' + return abi_type == "address" def is_bytes_type(abi_type: TypeStr) -> bool: - return abi_type in BYTES_TYPES + ['bytes'] + return abi_type in BYTES_TYPES + ["bytes"] def is_string_type(abi_type: TypeStr) -> bool: - return abi_type == 'string' + return abi_type == "string" @curry @@ -663,15 +647,15 @@ def size_of_type(abi_type: TypeStr) -> int: """ Returns size in bits of abi_type """ - if 'string' in abi_type: + if "string" in abi_type: return None - if 'byte' in abi_type: + if "byte" in abi_type: return None - if '[' in abi_type: + if "[" in abi_type: return None - if abi_type == 'bool': + if abi_type == "bool": return 8 - if abi_type == 'address': + if abi_type == "address": return 160 return int(re.sub(r"\D", "", abi_type)) @@ -681,51 +665,37 @@ def size_of_type(abi_type: TypeStr) -> int: def sub_type_of_array_type(abi_type: TypeStr) -> str: if not is_array_type(abi_type): - raise ValueError( - f"Cannot parse subtype of nonarray abi-type: {abi_type}" - ) + raise ValueError(f"Cannot parse subtype of nonarray abi-type: {abi_type}") - return re.sub(END_BRACKETS_OF_ARRAY_TYPE_REGEX, '', abi_type, 1) + return re.sub(END_BRACKETS_OF_ARRAY_TYPE_REGEX, "", abi_type, 1) def length_of_array_type(abi_type: TypeStr) -> int: if not is_array_type(abi_type): - raise ValueError( - f"Cannot parse length of nonarray abi-type: {abi_type}" - ) + raise ValueError(f"Cannot parse length of nonarray abi-type: {abi_type}") - inner_brackets = re.search(END_BRACKETS_OF_ARRAY_TYPE_REGEX, abi_type).group(0).strip("[]") + inner_brackets = ( + re.search(END_BRACKETS_OF_ARRAY_TYPE_REGEX, abi_type).group(0).strip("[]") + ) if not inner_brackets: return None else: return int(inner_brackets) -ARRAY_REGEX = ( - "^" - "[a-zA-Z0-9_]+" - "({sub_type})+" - "$" -).format(sub_type=SUB_TYPE_REGEX) +ARRAY_REGEX = ("^" "[a-zA-Z0-9_]+" "({sub_type})+" "$").format(sub_type=SUB_TYPE_REGEX) def is_array_type(abi_type: TypeStr) -> bool: return bool(re.match(ARRAY_REGEX, abi_type)) -NAME_REGEX = ( - '[a-zA-Z_]' - '[a-zA-Z0-9_]*' -) +NAME_REGEX = "[a-zA-Z_]" "[a-zA-Z0-9_]*" -ENUM_REGEX = ( - '^' - '{lib_name}' - r'\.' - '{enum_name}' - '$' -).format(lib_name=NAME_REGEX, enum_name=NAME_REGEX) +ENUM_REGEX = ("^" "{lib_name}" r"\." "{enum_name}" "$").format( + lib_name=NAME_REGEX, enum_name=NAME_REGEX +) def is_probably_enum(abi_type: TypeStr) -> bool: @@ -737,20 +707,20 @@ def normalize_event_input_types( abi_args: Collection[Union[ABIFunction, ABIEvent]] ) -> Iterable[Union[ABIFunction, ABIEvent, Dict[TypeStr, Any]]]: for arg in abi_args: - if is_recognized_type(arg['type']): + if is_recognized_type(arg["type"]): yield arg - elif is_probably_enum(arg['type']): - yield {k: 'uint8' if k == 'type' else v for k, v in arg.items()} + elif is_probably_enum(arg["type"]): + yield {k: "uint8" if k == "type" else v for k, v in arg.items()} else: yield arg def abi_to_signature(abi: Union[ABIFunction, ABIEvent]) -> str: function_signature = "{fn_name}({fn_input_types})".format( - fn_name=abi['name'], - fn_input_types=','.join([ - arg['type'] for arg in normalize_event_input_types(abi.get('inputs', [])) - ]), + fn_name=abi["name"], + fn_input_types=",".join( + [arg["type"] for arg in normalize_event_input_types(abi.get("inputs", []))] + ), ) return function_signature @@ -812,8 +782,7 @@ def abi_data_tree(types: Sequence[TypeStr], data: Sequence[Any]) -> List[Any]: """ return [ abi_sub_tree(data_type, data_value) - for data_type, data_value - in zip(types, data) + for data_type, data_value in zip(types, data) ] @@ -825,15 +794,17 @@ def data_tree_map( Map func to every ABITypedData element in the tree. func will receive two args: abi_type, and data """ + def map_to_typed_data(elements: Any) -> "ABITypedData": if isinstance(elements, ABITypedData) and elements.abi_type is not None: return ABITypedData(func(*elements)) else: return elements + return recursive_map(map_to_typed_data, data_tree) -class ABITypedData(namedtuple('ABITypedData', 'abi_type, data')): +class ABITypedData(namedtuple("ABITypedData", "abi_type, data")): """ This class marks data as having a certain ABI-type. @@ -851,6 +822,7 @@ class ABITypedData(namedtuple('ABITypedData', 'abi_type, data')): positional argument that is iterable, to match the init interface of all other relevant collections. """ + def __new__(cls, iterable: Iterable[Any]) -> "ABITypedData": return super().__new__(cls, *iterable) @@ -873,8 +845,7 @@ def abi_sub_tree( # items in iterable with that type item_type_str = abi_type.item_type.to_type_str() value_to_annotate = [ - abi_sub_tree(item_type_str, item_value) - for item_value in data_value + abi_sub_tree(item_type_str, item_value) for item_value in data_value ] elif isinstance(abi_type, TupleType): # Otherwise, if type is tuple, determine component types and annotate @@ -886,10 +857,12 @@ def abi_sub_tree( else: value_to_annotate = data_value - return ABITypedData([ - abi_type.to_type_str(), - value_to_annotate, - ]) + return ABITypedData( + [ + abi_type.to_type_str(), + value_to_annotate, + ] + ) def strip_abi_type(elements: Any) -> Any: @@ -904,30 +877,34 @@ def build_default_registry() -> ABIRegistry: # affected by our custom encoder subclasses registry = default_registry.copy() - registry.unregister('address') - registry.unregister('bytes') - registry.unregister('bytes') - registry.unregister('string') + registry.unregister("address") + registry.unregister("bytes") + registry.unregister("bytes") + registry.unregister("string") registry.register( - BaseEquals('address'), - AddressEncoder, decoding.AddressDecoder, - label='address', + BaseEquals("address"), + AddressEncoder, + decoding.AddressDecoder, + label="address", ) registry.register( - BaseEquals('bytes', with_sub=True), - BytesEncoder, decoding.BytesDecoder, - label='bytes', + BaseEquals("bytes", with_sub=True), + BytesEncoder, + decoding.BytesDecoder, + label="bytes", ) registry.register( - BaseEquals('bytes', with_sub=False), - ByteStringEncoder, decoding.ByteStringDecoder, - label='bytes', + BaseEquals("bytes", with_sub=False), + ByteStringEncoder, + decoding.ByteStringDecoder, + label="bytes", ) registry.register( - BaseEquals('string'), - TextStringEncoder, decoding.StringDecoder, - label='string', + BaseEquals("string"), + TextStringEncoder, + decoding.StringDecoder, + label="string", ) return registry @@ -935,29 +912,33 @@ def build_default_registry() -> ABIRegistry: def build_strict_registry() -> ABIRegistry: registry = default_registry.copy() - registry.unregister('address') - registry.unregister('bytes') - registry.unregister('bytes') - registry.unregister('string') + registry.unregister("address") + registry.unregister("bytes") + registry.unregister("bytes") + registry.unregister("string") registry.register( - BaseEquals('address'), - AddressEncoder, decoding.AddressDecoder, - label='address', + BaseEquals("address"), + AddressEncoder, + decoding.AddressDecoder, + label="address", ) registry.register( - BaseEquals('bytes', with_sub=True), - ExactLengthBytesEncoder, BytesDecoder, - label='bytes', + BaseEquals("bytes", with_sub=True), + ExactLengthBytesEncoder, + BytesDecoder, + label="bytes", ) registry.register( - BaseEquals('bytes', with_sub=False), - StrictByteStringEncoder, decoding.ByteStringDecoder, - label='bytes', + BaseEquals("bytes", with_sub=False), + StrictByteStringEncoder, + decoding.ByteStringDecoder, + label="bytes", ) registry.register( - BaseEquals('string'), - TextStringEncoder, decoding.StringDecoder, - label='string', + BaseEquals("string"), + TextStringEncoder, + decoding.StringDecoder, + label="string", ) return registry diff --git a/web3/_utils/admin.py b/web3/_utils/admin.py index dbe841c913..a494eb95f1 100644 --- a/web3/_utils/admin.py +++ b/web3/_utils/admin.py @@ -26,8 +26,11 @@ def admin_start_params_munger( - module: Module, host: str = 'localhost', port: int = 8546, cors: str = '', - apis: str = 'eth,net,web3' + module: Module, + host: str = "localhost", + port: int = 8546, + cors: str = "", + apis: str = "eth,net,web3", ) -> Tuple[str, int, str, str]: return (host, port, cors, apis) @@ -58,7 +61,11 @@ def admin_start_params_munger( class ServerConnection(Protocol): def __call__( - self, host: str = "localhost", port: int = 8546, cors: str = "", apis: str = "eth,net,web3" + self, + host: str = "localhost", + port: int = 8546, + cors: str = "", + apis: str = "eth,net,web3", ) -> bool: pass @@ -89,5 +96,5 @@ def __call__( # # Deprecated Methods # -start_rpc = DeprecatedMethod(start_http, 'start_rpc', 'start_http') -stop_rpc = DeprecatedMethod(stop_http, 'stop_rpc', 'stop_http') +start_rpc = DeprecatedMethod(start_http, "start_rpc", "start_http") +stop_rpc = DeprecatedMethod(stop_http, "stop_rpc", "stop_http") diff --git a/web3/_utils/async_transactions.py b/web3/_utils/async_transactions.py index 3779ba3594..a0190b72eb 100644 --- a/web3/_utils/async_transactions.py +++ b/web3/_utils/async_transactions.py @@ -47,34 +47,35 @@ from web3.eth import AsyncEth # noqa: F401 -async def _estimate_gas(w3: 'Web3', tx: TxParams) -> Awaitable[int]: +async def _estimate_gas(w3: "Web3", tx: TxParams) -> Awaitable[int]: return await w3.eth.estimate_gas(tx) # type: ignore -async def _gas_price(w3: 'Web3', tx: TxParams) -> Awaitable[Optional[Wei]]: +async def _gas_price(w3: "Web3", tx: TxParams) -> Awaitable[Optional[Wei]]: return await w3.eth.generate_gas_price(tx) or w3.eth.gas_price # type: ignore -async def _max_fee_per_gas(w3: 'Web3', tx: TxParams) -> Awaitable[Wei]: - block = await w3.eth.get_block('latest') # type: ignore - return await w3.eth.max_priority_fee + (2 * block['baseFeePerGas']) # type: ignore +async def _max_fee_per_gas(w3: "Web3", tx: TxParams) -> Awaitable[Wei]: + block = await w3.eth.get_block("latest") # type: ignore + return await w3.eth.max_priority_fee + (2 * block["baseFeePerGas"]) # type: ignore -async def _max_priority_fee_gas(w3: 'Web3', tx: TxParams) -> Awaitable[Wei]: +async def _max_priority_fee_gas(w3: "Web3", tx: TxParams) -> Awaitable[Wei]: return await w3.eth.max_priority_fee # type: ignore -async def _chain_id(w3: 'Web3', tx: TxParams) -> Awaitable[int]: +async def _chain_id(w3: "Web3", tx: TxParams) -> Awaitable[int]: return await w3.eth.chain_id # type: ignore + TRANSACTION_DEFAULTS = { - 'value': 0, - 'data': b'', - 'gas': _estimate_gas, - 'gasPrice': _gas_price, - 'maxFeePerGas': _max_fee_per_gas, - 'maxPriorityFeePerGas': _max_priority_fee_gas, - 'chainId': _chain_id, + "value": 0, + "data": b"", + "gas": _estimate_gas, + "gasPrice": _gas_price, + "maxFeePerGas": _max_fee_per_gas, + "maxPriorityFeePerGas": _max_priority_fee_gas, + "chainId": _chain_id, } @@ -84,7 +85,7 @@ async def get_block_gas_limit( if block_identifier is None: block_identifier = await web3_eth.block_number block = await web3_eth.get_block(block_identifier) - return block['gasLimit'] + return block["gasLimit"] async def get_buffered_gas_estimate( @@ -112,27 +113,28 @@ async def fill_transaction_defaults(w3: "Web3", transaction: TxParams) -> TxPara if w3 is None, fill as much as possible while offline """ strategy_based_gas_price = await w3.eth.generate_gas_price(transaction) # type: ignore - is_dynamic_fee_transaction = ( - not strategy_based_gas_price - and ( - 'gasPrice' not in transaction # default to dynamic fee transaction - or any_in_dict(DYNAMIC_FEE_TXN_PARAMS, transaction) - ) + is_dynamic_fee_transaction = not strategy_based_gas_price and ( + "gasPrice" not in transaction # default to dynamic fee transaction + or any_in_dict(DYNAMIC_FEE_TXN_PARAMS, transaction) ) defaults = {} for key, default_getter in TRANSACTION_DEFAULTS.items(): if key not in transaction: if ( - is_dynamic_fee_transaction and key == 'gasPrice' - or not is_dynamic_fee_transaction and key in DYNAMIC_FEE_TXN_PARAMS + is_dynamic_fee_transaction + and key == "gasPrice" + or not is_dynamic_fee_transaction + and key in DYNAMIC_FEE_TXN_PARAMS ): # do not set default max fees if legacy txn or gas price if dynamic fee txn continue if callable(default_getter): if w3 is None: - raise ValueError(f"You must specify a '{key}' value in the transaction") + raise ValueError( + f"You must specify a '{key}' value in the transaction" + ) default_val = await default_getter(w3, transaction) else: default_val = default_getter @@ -145,61 +147,64 @@ async def async_handle_offchain_lookup( offchain_lookup_payload: Dict[str, Any], transaction: TxParams, ) -> bytes: - formatted_sender = to_hex_if_bytes(offchain_lookup_payload['sender']).lower() - formatted_data = to_hex_if_bytes(offchain_lookup_payload['callData']).lower() + formatted_sender = to_hex_if_bytes(offchain_lookup_payload["sender"]).lower() + formatted_data = to_hex_if_bytes(offchain_lookup_payload["callData"]).lower() - if formatted_sender != to_hex_if_bytes(transaction['to']).lower(): + if formatted_sender != to_hex_if_bytes(transaction["to"]).lower(): raise ValidationError( - 'Cannot handle OffchainLookup raised inside nested call. Returned `sender` ' - 'value does not equal `to` address in transaction.' + "Cannot handle OffchainLookup raised inside nested call. Returned `sender` " + "value does not equal `to` address in transaction." ) - for url in offchain_lookup_payload['urls']: + for url in offchain_lookup_payload["urls"]: formatted_url = URI( str(url) - .replace('{sender}', str(formatted_sender)) - .replace('{data}', str(formatted_data)) + .replace("{sender}", str(formatted_sender)) + .replace("{data}", str(formatted_data)) ) try: - if '{data}' in url and '{sender}' in url: + if "{data}" in url and "{sender}" in url: response = await async_get_response_from_get_request(formatted_url) - elif '{sender}' in url: - response = await async_get_response_from_post_request(formatted_url, data={ - "data": formatted_data, - "sender": formatted_sender - }) + elif "{sender}" in url: + response = await async_get_response_from_post_request( + formatted_url, + data={"data": formatted_data, "sender": formatted_sender}, + ) else: - raise ValidationError('url not formatted properly.') + raise ValidationError("url not formatted properly.") except Exception: continue # try next url if timeout or issues making the request - if 400 <= response.status <= 499: # if request returns 400 error, raise exception + if ( + 400 <= response.status <= 499 + ): # if request returns 400 error, raise exception response.raise_for_status() if not 200 <= response.status <= 299: # if not 400 error, try next url continue result = await async_get_json_from_client_response(response) - if 'data' not in result.keys(): + if "data" not in result.keys(): raise ValidationError( "Improperly formatted response for offchain lookup HTTP request - missing 'data' " "field." ) - encoded_data_with_function_selector = b''.join([ - # 4-byte callback function selector - to_bytes_if_hex(offchain_lookup_payload['callbackFunction']), - - # encode the `data` from the result and the `extraData` as bytes - encode_abi( - ['bytes', 'bytes'], - [ - to_bytes_if_hex(result['data']), - to_bytes_if_hex(offchain_lookup_payload['extraData']), - ] - ) - ]) + encoded_data_with_function_selector = b"".join( + [ + # 4-byte callback function selector + to_bytes_if_hex(offchain_lookup_payload["callbackFunction"]), + # encode the `data` from the result and the `extraData` as bytes + encode_abi( + ["bytes", "bytes"], + [ + to_bytes_if_hex(result["data"]), + to_bytes_if_hex(offchain_lookup_payload["extraData"]), + ], + ), + ] + ) return encoded_data_with_function_selector raise Exception("Offchain lookup failed for supplied urls.") diff --git a/web3/_utils/blocks.py b/web3/_utils/blocks.py index dd6c237c76..68ef468de3 100644 --- a/web3/_utils/blocks.py +++ b/web3/_utils/blocks.py @@ -27,7 +27,7 @@ def is_predefined_block_number(value: Any) -> bool: # one of the words in: {"latest", "pending", "earliest"} # We cannot decode the bytes as utf8, because random bytes likely won't be valid. # So we speculatively decode as 'latin-1', which cannot fail. - value_text = value.decode('latin-1') + value_text = value.decode("latin-1") elif is_integer(value): return False else: diff --git a/web3/_utils/caching.py b/web3/_utils/caching.py index ee78b48c16..bb3f005113 100644 --- a/web3/_utils/caching.py +++ b/web3/_utils/caching.py @@ -27,17 +27,9 @@ def generate_cache_key(value: Any) -> str: elif is_boolean(value) or is_null(value) or is_number(value): return generate_cache_key(repr(value)) elif is_dict(value): - return generate_cache_key(( - (key, value[key]) - for key - in sorted(value.keys()) - )) + return generate_cache_key(((key, value[key]) for key in sorted(value.keys()))) elif is_list_like(value) or isinstance(value, collections.abc.Generator): - return generate_cache_key("".join(( - generate_cache_key(item) - for item - in value - ))) + return generate_cache_key("".join((generate_cache_key(item) for item in value))) else: raise TypeError( f"Cannot generate cache key for value {value} of type {type(value)}" diff --git a/web3/_utils/compat/__init__.py b/web3/_utils/compat/__init__.py index e272998a51..89857ae20e 100644 --- a/web3/_utils/compat/__init__.py +++ b/web3/_utils/compat/__init__.py @@ -1,4 +1,5 @@ import sys + # remove once web3 supports python>=3.8 # Types was added to typing in 3.8 if sys.version_info >= (3, 8): diff --git a/web3/_utils/contracts.py b/web3/_utils/contracts.py index dcf66f6304..de096326ab 100644 --- a/web3/_utils/contracts.py +++ b/web3/_utils/contracts.py @@ -74,21 +74,20 @@ def find_matching_event_abi( - abi: ABI, event_name: Optional[str] = None, - argument_names: Optional[Sequence[str]] = None + abi: ABI, + event_name: Optional[str] = None, + argument_names: Optional[Sequence[str]] = None, ) -> ABIEvent: filters = [ - functools.partial(filter_by_type, 'event'), + functools.partial(filter_by_type, "event"), ] if event_name is not None: filters.append(functools.partial(filter_by_name, event_name)) if argument_names is not None: - filters.append( - functools.partial(filter_by_argument_name, argument_names) - ) + filters.append(functools.partial(filter_by_argument_name, argument_names)) event_abi_candidates = pipe(abi, *filters) @@ -130,15 +129,21 @@ def find_matching_fn_abi( return function_candidates[0] else: matching_identifiers = name_filter(abi) - matching_function_signatures = [abi_to_signature(func) for func in matching_identifiers] + matching_function_signatures = [ + abi_to_signature(func) for func in matching_identifiers + ] arg_count_matches = len(arg_count_filter(matching_identifiers)) encoding_matches = len(encoding_filter(matching_identifiers)) if arg_count_matches == 0: - diagnosis = "\nFunction invocation failed due to improper number of arguments." + diagnosis = ( + "\nFunction invocation failed due to improper number of arguments." + ) elif encoding_matches == 0: - diagnosis = "\nFunction invocation failed due to no matching argument types." + diagnosis = ( + "\nFunction invocation failed due to no matching argument types." + ) elif encoding_matches > 1: diagnosis = ( "\nAmbiguous argument encoding. " @@ -155,7 +160,10 @@ def find_matching_fn_abi( def encode_abi( - w3: "Web3", abi: ABIFunction, arguments: Sequence[Any], data: Optional[HexStr] = None + w3: "Web3", + abi: ABIFunction, + arguments: Sequence[Any], + data: Optional[HexStr] = None, ) -> HexStr: argument_types = get_abi_input_types(abi) @@ -204,7 +212,9 @@ def prepare_transaction( TODO: add new prepare_deploy_transaction API """ if fn_abi is None: - fn_abi = find_matching_fn_abi(contract_abi, w3.codec, fn_identifier, fn_args, fn_kwargs) + fn_abi = find_matching_fn_abi( + contract_abi, w3.codec, fn_identifier, fn_args, fn_kwargs + ) validate_payable(transaction, fn_abi) @@ -213,13 +223,13 @@ def prepare_transaction( else: prepared_transaction = cast(TxParams, dict(**transaction)) - if 'data' in prepared_transaction: + if "data" in prepared_transaction: raise ValueError("Transaction parameter may not contain a 'data' key") if address: - prepared_transaction.setdefault('to', address) + prepared_transaction.setdefault("to", address) - prepared_transaction['data'] = encode_transaction_data( + prepared_transaction["data"] = encode_transaction_data( w3, fn_identifier, contract_abi, @@ -236,16 +246,25 @@ def encode_transaction_data( contract_abi: Optional[ABI] = None, fn_abi: Optional[ABIFunction] = None, args: Optional[Sequence[Any]] = None, - kwargs: Optional[Any] = None + kwargs: Optional[Any] = None, ) -> HexStr: if fn_identifier is FallbackFn: - fn_abi, fn_selector, fn_arguments = get_fallback_function_info(contract_abi, fn_abi) + fn_abi, fn_selector, fn_arguments = get_fallback_function_info( + contract_abi, fn_abi + ) elif fn_identifier is ReceiveFn: - fn_abi, fn_selector, fn_arguments = get_receive_function_info(contract_abi, fn_abi) + fn_abi, fn_selector, fn_arguments = get_receive_function_info( + contract_abi, fn_abi + ) elif is_text(fn_identifier): fn_abi, fn_selector, fn_arguments = get_function_info( # type ignored b/c fn_id here is always str b/c FallbackFn is handled above - fn_identifier, w3.codec, contract_abi, fn_abi, args, kwargs, # type: ignore + fn_identifier, # type: ignore + w3.codec, + contract_abi, + fn_abi, + args, + kwargs, ) else: raise TypeError("Unsupported function identifier") @@ -258,7 +277,7 @@ def get_fallback_function_info( ) -> Tuple[ABIFunction, HexStr, Tuple[Any, ...]]: if fn_abi is None: fn_abi = get_fallback_func_abi(contract_abi) - fn_selector = encode_hex(b'') + fn_selector = encode_hex(b"") fn_arguments: Tuple[Any, ...] = tuple() return fn_abi, fn_selector, fn_arguments @@ -268,7 +287,7 @@ def get_receive_function_info( ) -> Tuple[ABIFunction, HexStr, Tuple[Any, ...]]: if fn_abi is None: fn_abi = get_receive_func_abi(contract_abi) - fn_selector = encode_hex(b'') + fn_selector = encode_hex(b"") fn_arguments: Tuple[Any, ...] = tuple() return fn_abi, fn_selector, fn_arguments @@ -304,8 +323,8 @@ def validate_payable(transaction: TxParams, abi: ABIFunction) -> None: """Raise ValidationError if non-zero ether is sent to a non payable function. """ - if 'value' in transaction: - if transaction['value'] != 0: + if "value" in transaction: + if transaction["value"] != 0: if "payable" in abi and not abi["payable"]: raise ValidationError( "Sending non-zero ether to a contract function " diff --git a/web3/_utils/datatypes.py b/web3/_utils/datatypes.py index 110449fd75..5b4e324478 100644 --- a/web3/_utils/datatypes.py +++ b/web3/_utils/datatypes.py @@ -42,8 +42,8 @@ def __new__( name: str, bases: Tuple[type], namespace: Dict[str, Any], - normalizers: Optional[Dict[str, Any]] = None - ) -> 'PropertyCheckingFactory': + normalizers: Optional[Dict[str, Any]] = None, + ) -> "PropertyCheckingFactory": all_bases = set(concat(base.__mro__ for base in bases)) all_keys = set(concat(base.__dict__.keys() for base in all_bases)) diff --git a/web3/_utils/decorators.py b/web3/_utils/decorators.py index d93ff99277..2d1a9d3f1f 100644 --- a/web3/_utils/decorators.py +++ b/web3/_utils/decorators.py @@ -24,13 +24,14 @@ def wrapped(*args: Any) -> Any: thread_id = threading.get_ident() thread_local_args = (thread_id,) + arg_instances if thread_local_args in to_wrap.__already_called: # type: ignore - raise ValueError(f'Recursively called {to_wrap} with {args!r}') + raise ValueError(f"Recursively called {to_wrap} with {args!r}") to_wrap.__already_called[thread_local_args] = True # type: ignore try: wrapped_val = to_wrap(*args) finally: del to_wrap.__already_called[thread_local_args] # type: ignore return wrapped_val + return wrapped @@ -42,12 +43,16 @@ def deprecated_for(replace_message: str) -> Callable[..., Any]: def toAscii(arg): ... """ + def decorator(to_wrap: TFunc) -> TFunc: @functools.wraps(to_wrap) def wrapper(*args: Any, **kwargs: Any) -> Callable[..., Any]: warnings.warn( f"{to_wrap.__name__} is deprecated in favor of {replace_message}", - category=DeprecationWarning) + category=DeprecationWarning, + ) return to_wrap(*args, **kwargs) + return cast(TFunc, wrapper) + return decorator diff --git a/web3/_utils/encoding.py b/web3/_utils/encoding.py index b8adfd88d1..f69f520a19 100644 --- a/web3/_utils/encoding.py +++ b/web3/_utils/encoding.py @@ -57,8 +57,9 @@ ) -def hex_encode_abi_type(abi_type: TypeStr, value: Any, - force_size: Optional[int] = None) -> HexStr: +def hex_encode_abi_type( + abi_type: TypeStr, value: Any, force_size: Optional[int] = None +) -> HexStr: """ Encodes value into a hex string in format of abi_type """ @@ -69,7 +70,9 @@ def hex_encode_abi_type(abi_type: TypeStr, value: Any, if is_array_type(abi_type): sub_type = sub_type_of_array_type(abi_type) return HexStr( - "".join([remove_0x_prefix(hex_encode_abi_type(sub_type, v, 256)) for v in value]) + "".join( + [remove_0x_prefix(hex_encode_abi_type(sub_type, v, 256)) for v in value] + ) ) elif is_bool_type(abi_type): return to_hex_with_size(value, data_size) @@ -87,9 +90,7 @@ def hex_encode_abi_type(abi_type: TypeStr, value: Any, elif is_string_type(abi_type): return to_hex(text=value) else: - raise ValueError( - f"Unsupported ABI type: {abi_type}" - ) + raise ValueError(f"Unsupported ABI type: {abi_type}") def to_hex_twos_compliment(value: Any, bit_size: int) -> HexStr: @@ -121,10 +122,10 @@ def pad_hex(value: Any, bit_size: int) -> HexStr: def trim_hex(hexstr: HexStr) -> HexStr: - if hexstr.startswith('0x0'): - hexstr = HexStr(re.sub('^0x0+', '0x', hexstr)) - if hexstr == '0x': - hexstr = HexStr('0x0') + if hexstr.startswith("0x0"): + hexstr = HexStr(re.sub("^0x0+", "0x", hexstr)) + if hexstr == "0x": + hexstr = HexStr("0x0") return hexstr @@ -133,7 +134,7 @@ def pad_bytes(fill_with: bytes, num_bytes: int, unpadded: bytes) -> bytes: return unpadded.rjust(num_bytes, fill_with) -zpad_bytes = pad_bytes(b'\0') +zpad_bytes = pad_bytes(b"\0") @curry @@ -184,6 +185,7 @@ class FriendlyJsonSerde: information on which fields failed, to show more helpful information in the raised error messages. """ + def _json_mapping_errors(self, mapping: Dict[Any, Any]) -> Iterable[str]: for key, val in mapping.items(): try: @@ -198,19 +200,20 @@ def _json_list_errors(self, iterable: Iterable[Any]) -> Iterable[str]: except TypeError as exc: yield f"{index}: because ({exc})" - def _friendly_json_encode(self, obj: Dict[Any, Any], - cls: Optional[Type[json.JSONEncoder]] = None) -> str: + def _friendly_json_encode( + self, obj: Dict[Any, Any], cls: Optional[Type[json.JSONEncoder]] = None + ) -> str: try: encoded = json.dumps(obj, cls=cls) return encoded except TypeError as full_exception: - if hasattr(obj, 'items'): - item_errors = '; '.join(self._json_mapping_errors(obj)) + if hasattr(obj, "items"): + item_errors = "; ".join(self._json_mapping_errors(obj)) raise TypeError( f"dict had unencodable value at keys: {{{item_errors}}}" ) elif is_list_like(obj): - element_errors = '; '.join(self._json_list_errors(obj)) + element_errors = "; ".join(self._json_list_errors(obj)) raise TypeError( f"list had unencodable value at index: [{element_errors}]" ) @@ -222,13 +225,14 @@ def json_decode(self, json_str: str) -> Dict[Any, Any]: decoded = json.loads(json_str) return decoded except json.decoder.JSONDecodeError as exc: - err_msg = f'Could not decode {json_str!r} because of {exc}.' + err_msg = f"Could not decode {json_str!r} because of {exc}." # Calling code may rely on catching JSONDecodeError to recognize bad json # so we have to re-raise the same type. raise json.decoder.JSONDecodeError(err_msg, exc.doc, exc.pos) - def json_encode(self, obj: Dict[Any, Any], - cls: Optional[Type[json.JSONEncoder]] = None) -> str: + def json_encode( + self, obj: Dict[Any, Any], cls: Optional[Type[json.JSONEncoder]] = None + ) -> str: try: return self._friendly_json_encode(obj, cls=cls) except TypeError as exc: @@ -239,9 +243,7 @@ def to_4byte_hex(hex_or_str_or_bytes: Union[HexStr, str, bytes, int]) -> HexStr: size_of_4bytes = 4 * 8 byte_str = hexstr_if_str(to_bytes, hex_or_str_or_bytes) if len(byte_str) > 4: - raise ValueError( - f'expected value of size 4 bytes. Got: {len(byte_str)} bytes' - ) + raise ValueError(f"expected value of size 4 bytes. Got: {len(byte_str)} bytes") hex_str = encode_hex(byte_str) return pad_hex(hex_str, size_of_4bytes) @@ -263,6 +265,7 @@ def encode_single_packed(_type: TypeStr, value: Any) -> bytes: grammar as abi_type_parser, ) from eth_abi.registry import has_arrlist, registry + abi_type = abi_type_parser.parse(_type) if has_arrlist(_type): item_encoder = registry.get_encoder(abi_type.item_type.to_type_str()) @@ -270,9 +273,10 @@ def encode_single_packed(_type: TypeStr, value: Any) -> bytes: return DynamicArrayPackedEncoder(item_encoder=item_encoder).encode(value) else: raise NotImplementedError( - "Fixed arrays are not implemented in this packed encoder prototype") + "Fixed arrays are not implemented in this packed encoder prototype" + ) elif abi_type.base == "string": - return codecs.encode(value, 'utf8') + return codecs.encode(value, "utf8") elif abi_type.base == "bytes": return value return None @@ -288,7 +292,7 @@ def default(self, obj: Any) -> Union[Dict[Any, Any], HexStr]: def to_json(obj: Dict[Any, Any]) -> str: - ''' + """ Convert a complex object (like a transaction object) to a JSON string - ''' + """ return FriendlyJsonSerde().json_encode(obj, cls=Web3JsonEncoder) diff --git a/web3/_utils/ens.py b/web3/_utils/ens.py index c5ba63deb5..3fd250c579 100644 --- a/web3/_utils/ens.py +++ b/web3/_utils/ens.py @@ -58,7 +58,9 @@ def address(self, name: str) -> ChecksumAddress: @contextmanager -def ens_addresses(w3: "Web3", name_addr_pairs: Dict[str, ChecksumAddress]) -> Iterator[None]: +def ens_addresses( + w3: "Web3", name_addr_pairs: Dict[str, ChecksumAddress] +) -> Iterator[None]: original_ens = w3.ens w3.ens = cast(ENS, StaticENS(name_addr_pairs)) yield diff --git a/web3/_utils/events.py b/web3/_utils/events.py index dbb1bcbf27..70d306bd54 100644 --- a/web3/_utils/events.py +++ b/web3/_utils/events.py @@ -91,21 +91,21 @@ def construct_event_topic_set( - event_abi: ABIEvent, abi_codec: ABICodec, - arguments: Optional[Union[Sequence[Any], Dict[str, Any]]] = None + event_abi: ABIEvent, + abi_codec: ABICodec, + arguments: Optional[Union[Sequence[Any], Dict[str, Any]]] = None, ) -> List[HexStr]: if arguments is None: arguments = {} if isinstance(arguments, (list, tuple)): - if len(arguments) != len(event_abi['inputs']): + if len(arguments) != len(event_abi["inputs"]): raise ValueError( "When passing an argument list, the number of arguments must " "match the event constructor." ) arguments = { - arg['name']: [arg_value] - for arg, arg_value - in zip(event_abi['inputs'], arguments) + arg["name"]: [arg_value] + for arg, arg_value in zip(event_abi["inputs"], arguments) } normalized_args = { @@ -119,13 +119,15 @@ def construct_event_topic_set( event_topic = encode_hex(event_abi_to_log_topic(event_abi)) # type: ignore indexed_args = get_indexed_event_inputs(event_abi) zipped_abi_and_args = [ - (arg, normalized_args.get(arg['name'], [None])) - for arg in indexed_args + (arg, normalized_args.get(arg["name"], [None])) for arg in indexed_args ] encoded_args = [ [ - None if option is None else encode_hex(abi_codec.encode_single(arg['type'], option)) - for option in arg_options] + None + if option is None + else encode_hex(abi_codec.encode_single(arg["type"], option)) + for option in arg_options + ] for arg, arg_options in zipped_abi_and_args ] @@ -134,21 +136,21 @@ def construct_event_topic_set( def construct_event_data_set( - event_abi: ABIEvent, abi_codec: ABICodec, - arguments: Optional[Union[Sequence[Any], Dict[str, Any]]] = None + event_abi: ABIEvent, + abi_codec: ABICodec, + arguments: Optional[Union[Sequence[Any], Dict[str, Any]]] = None, ) -> List[List[Optional[HexStr]]]: if arguments is None: arguments = {} if isinstance(arguments, (list, tuple)): - if len(arguments) != len(event_abi['inputs']): + if len(arguments) != len(event_abi["inputs"]): raise ValueError( "When passing an argument list, the number of arguments must " "match the event constructor." ) arguments = { - arg['name']: [arg_value] - for arg, arg_value - in zip(event_abi['inputs'], arguments) + arg["name"]: [arg_value] + for arg, arg_value in zip(event_abi["inputs"], arguments) } normalized_args = { @@ -159,20 +161,20 @@ def construct_event_data_set( non_indexed_args = exclude_indexed_event_inputs(event_abi) zipped_abi_and_args = [ - (arg, normalized_args.get(arg['name'], [None])) - for arg in non_indexed_args + (arg, normalized_args.get(arg["name"], [None])) for arg in non_indexed_args ] encoded_args = [ [ - None if option is None else encode_hex(abi_codec.encode_single(arg['type'], option)) - for option in arg_options] + None + if option is None + else encode_hex(abi_codec.encode_single(arg["type"], option)) + for option in arg_options + ] for arg, arg_options in zipped_abi_and_args ] data = [ - list(permutation) - if any(value is not None for value in permutation) - else [] + list(permutation) if any(value is not None for value in permutation) else [] for permutation in itertools.product(*encoded_args) ] return data @@ -184,50 +186,54 @@ def is_dynamic_sized_type(type_str: TypeStr) -> bool: @to_tuple -def get_event_abi_types_for_decoding(event_inputs: Sequence[ABIEventParams]) -> Iterable[TypeStr]: +def get_event_abi_types_for_decoding( + event_inputs: Sequence[ABIEventParams], +) -> Iterable[TypeStr]: """ Event logs use the `keccak(value)` for indexed inputs of type `bytes` or `string`. Because of this we need to modify the types so that we can decode the log entries using the correct types. """ for input_abi in event_inputs: - if input_abi['indexed'] and is_dynamic_sized_type(input_abi['type']): - yield 'bytes32' + if input_abi["indexed"] and is_dynamic_sized_type(input_abi["type"]): + yield "bytes32" else: yield get_normalized_abi_arg_type(input_abi) @curry -def get_event_data(abi_codec: ABICodec, event_abi: ABIEvent, log_entry: LogReceipt) -> EventData: +def get_event_data( + abi_codec: ABICodec, event_abi: ABIEvent, log_entry: LogReceipt +) -> EventData: """ Given an event ABI and a log entry for that event, return the decoded event data """ - if event_abi['anonymous']: - log_topics = log_entry['topics'] - elif not log_entry['topics']: + if event_abi["anonymous"]: + log_topics = log_entry["topics"] + elif not log_entry["topics"]: raise MismatchedABI("Expected non-anonymous event to have 1 or more topics") # type ignored b/c event_abi_to_log_topic(event_abi: Dict[str, Any]) - elif event_abi_to_log_topic(event_abi) != log_entry['topics'][0]: # type: ignore + elif event_abi_to_log_topic(event_abi) != log_entry["topics"][0]: # type: ignore raise MismatchedABI("The event signature did not match the provided ABI") else: - log_topics = log_entry['topics'][1:] + log_topics = log_entry["topics"][1:] log_topics_abi = get_indexed_event_inputs(event_abi) log_topic_normalized_inputs = normalize_event_input_types(log_topics_abi) log_topic_types = get_event_abi_types_for_decoding(log_topic_normalized_inputs) - log_topic_names = get_abi_input_names(ABIEvent({'inputs': log_topics_abi})) + log_topic_names = get_abi_input_names(ABIEvent({"inputs": log_topics_abi})) if len(log_topics) != len(log_topic_types): raise LogTopicError( f"Expected {len(log_topic_types)} log topics. Got {len(log_topics)}" ) - log_data = hexstr_if_str(to_bytes, log_entry['data']) + log_data = hexstr_if_str(to_bytes, log_entry["data"]) log_data_abi = exclude_indexed_event_inputs(event_abi) log_data_normalized_inputs = normalize_event_input_types(log_data_abi) log_data_types = get_event_abi_types_for_decoding(log_data_normalized_inputs) - log_data_names = get_abi_input_names(ABIEvent({'inputs': log_data_abi})) + log_data_names = get_abi_input_names(ABIEvent({"inputs": log_data_abi})) # sanity check that there are not name intersections between the topic # names and the data argument names. @@ -240,36 +246,33 @@ def get_event_data(abi_codec: ABICodec, event_abi: ABIEvent, log_entry: LogRecei decoded_log_data = abi_codec.decode_abi(log_data_types, log_data) normalized_log_data = map_abi_data( - BASE_RETURN_NORMALIZERS, - log_data_types, - decoded_log_data + BASE_RETURN_NORMALIZERS, log_data_types, decoded_log_data ) decoded_topic_data = [ abi_codec.decode_single(topic_type, topic_data) - for topic_type, topic_data - in zip(log_topic_types, log_topics) + for topic_type, topic_data in zip(log_topic_types, log_topics) ] normalized_topic_data = map_abi_data( - BASE_RETURN_NORMALIZERS, - log_topic_types, - decoded_topic_data + BASE_RETURN_NORMALIZERS, log_topic_types, decoded_topic_data ) - event_args = dict(itertools.chain( - zip(log_topic_names, normalized_topic_data), - zip(log_data_names, normalized_log_data), - )) + event_args = dict( + itertools.chain( + zip(log_topic_names, normalized_topic_data), + zip(log_data_names, normalized_log_data), + ) + ) event_data = { - 'args': event_args, - 'event': event_abi['name'], - 'logIndex': log_entry['logIndex'], - 'transactionIndex': log_entry['transactionIndex'], - 'transactionHash': log_entry['transactionHash'], - 'address': log_entry['address'], - 'blockHash': log_entry['blockHash'], - 'blockNumber': log_entry['blockNumber'], + "args": event_args, + "event": event_abi["name"], + "logIndex": log_entry["logIndex"], + "transactionIndex": log_entry["transactionIndex"], + "transactionHash": log_entry["transactionHash"], + "address": log_entry["address"], + "blockHash": log_entry["blockHash"], + "blockNumber": log_entry["blockNumber"], } return cast(EventData, AttributeDict.recursive(event_data)) @@ -281,8 +284,9 @@ def pop_singlets(seq: Sequence[Any]) -> Iterable[Any]: @curry -def remove_trailing_from_seq(seq: Sequence[Any], - remove_value: Optional[Any] = None) -> Sequence[Any]: +def remove_trailing_from_seq( + seq: Sequence[Any], remove_value: Optional[Any] = None +) -> Sequence[Any]: index = len(seq) while index > 0 and seq[index - 1] == remove_value: index -= 1 @@ -291,7 +295,8 @@ def remove_trailing_from_seq(seq: Sequence[Any], normalize_topic_list = compose( remove_trailing_from_seq(remove_value=None), - pop_singlets,) + pop_singlets, +) def is_indexed(arg: Any) -> bool: @@ -311,16 +316,19 @@ class EventFilterBuilder: _immutable = False def __init__( - self, event_abi: ABIEvent, abi_codec: ABICodec, - formatter: Optional[EventData] = None + self, + event_abi: ABIEvent, + abi_codec: ABICodec, + formatter: Optional[EventData] = None, ) -> None: self.event_abi = event_abi self.abi_codec = abi_codec self.formatter = formatter self.event_topic = initialize_event_topics(self.event_abi) self.args = AttributeDict( - _build_argument_filters_from_event_abi(event_abi, abi_codec)) - self._ordered_arg_names = tuple(arg['name'] for arg in event_abi['inputs']) + _build_argument_filters_from_event_abi(event_abi, abi_codec) + ) + self._ordered_arg_names = tuple(arg["name"] for arg in event_abi["inputs"]) @property def fromBlock(self) -> BlockIdentifier: @@ -333,7 +341,8 @@ def fromBlock(self, value: BlockIdentifier) -> None: else: raise ValueError( f"fromBlock is already set to {self._fromBlock!r}. " - "Resetting filter parameters is not permitted") + "Resetting filter parameters is not permitted" + ) @property def toBlock(self) -> BlockIdentifier: @@ -346,7 +355,8 @@ def toBlock(self, value: BlockIdentifier) -> None: else: raise ValueError( f"toBlock is already set to {self._toBlock!r}. " - "Resetting filter parameters is not permitted") + "Resetting filter parameters is not permitted" + ) @property def address(self) -> ChecksumAddress: @@ -359,7 +369,8 @@ def address(self, value: ChecksumAddress) -> None: else: raise ValueError( f"address is already set to {self.address!r}. " - "Resetting filter parameters is not permitted") + "Resetting filter parameters is not permitted" + ) @property def ordered_args(self) -> Tuple[Any, ...]: @@ -393,7 +404,7 @@ def filter_params(self) -> FilterParams: "topics": self.topics, "fromBlock": self.fromBlock, "toBlock": self.toBlock, - "address": self.address + "address": self.address, } return valfilter(lambda x: x is not None, params) @@ -415,7 +426,7 @@ def deploy(self, w3: "Web3") -> "LogFilter": def initialize_event_topics(event_abi: ABIEvent) -> Union[bytes, List[Any]]: - if event_abi['anonymous'] is False: + if event_abi["anonymous"] is False: # https://github.com/python/mypy/issues/4976 return event_abi_to_log_topic(event_abi) # type: ignore else: @@ -425,14 +436,13 @@ def initialize_event_topics(event_abi: ABIEvent) -> Union[bytes, List[Any]]: @to_dict def _build_argument_filters_from_event_abi( event_abi: ABIEvent, abi_codec: ABICodec -) -> Iterable[Tuple[str, 'BaseArgumentFilter']]: - for item in event_abi['inputs']: - key = item['name'] - value: 'BaseArgumentFilter' - if item['indexed'] is True: +) -> Iterable[Tuple[str, "BaseArgumentFilter"]]: + for item in event_abi["inputs"]: + key = item["name"] + value: "BaseArgumentFilter" + if item["indexed"] is True: value = TopicArgumentFilter( - abi_codec=abi_codec, - arg_type=get_normalized_abi_arg_type(item) + abi_codec=abi_codec, arg_type=get_normalized_abi_arg_type(item) ) else: value = DataArgumentFilter(arg_type=get_normalized_abi_arg_type(item)) @@ -509,10 +519,10 @@ def _encode(self, value: Any) -> HexStr: class EventLogErrorFlags(Enum): - Discard = 'discard' - Ignore = 'ignore' - Strict = 'strict' - Warn = 'warn' + Discard = "discard" + Ignore = "ignore" + Strict = "strict" + Warn = "warn" @classmethod def flag_options(self) -> List[str]: diff --git a/web3/_utils/fee_utils.py b/web3/_utils/fee_utils.py index 8ba6758f23..cf3b1c95a9 100644 --- a/web3/_utils/fee_utils.py +++ b/web3/_utils/fee_utils.py @@ -17,12 +17,12 @@ PRIORITY_FEE_MIN = Wei(1000000000) # 1 gwei # 5th percentile fee history from the last 10 blocks -PRIORITY_FEE_HISTORY_PARAMS = (10, 'pending', [5.0]) +PRIORITY_FEE_HISTORY_PARAMS = (10, "pending", [5.0]) def _fee_history_priority_fee_estimate(fee_history: FeeHistory) -> Wei: # grab only non-zero fees and average against only that list - non_empty_block_fees = [fee[0] for fee in fee_history['reward'] if fee[0] != 0] + non_empty_block_fees = [fee[0] for fee in fee_history["reward"] if fee[0] != 0] # prevent division by zero in the extremely unlikely case that all fees within the polled fee # history range for the specified percentile are 0 @@ -33,9 +33,11 @@ def _fee_history_priority_fee_estimate(fee_history: FeeHistory) -> Wei: ) return ( # keep estimated priority fee within a max / min range - PRIORITY_FEE_MAX if priority_fee_average_for_percentile > PRIORITY_FEE_MAX else - PRIORITY_FEE_MIN if priority_fee_average_for_percentile < PRIORITY_FEE_MIN else - priority_fee_average_for_percentile + PRIORITY_FEE_MAX + if priority_fee_average_for_percentile > PRIORITY_FEE_MAX + else PRIORITY_FEE_MIN + if priority_fee_average_for_percentile < PRIORITY_FEE_MIN + else priority_fee_average_for_percentile ) diff --git a/web3/_utils/filters.py b/web3/_utils/filters.py index 79d3ae734f..bc60bb0c4b 100644 --- a/web3/_utils/filters.py +++ b/web3/_utils/filters.py @@ -72,51 +72,54 @@ def construct_event_filter_params( topics: Optional[Sequence[HexStr]] = None, fromBlock: Optional[BlockIdentifier] = None, toBlock: Optional[BlockIdentifier] = None, - address: Optional[ChecksumAddress] = None + address: Optional[ChecksumAddress] = None, ) -> Tuple[List[List[Optional[HexStr]]], FilterParams]: filter_params: FilterParams = {} - topic_set: Sequence[HexStr] = construct_event_topic_set(event_abi, abi_codec, argument_filters) + topic_set: Sequence[HexStr] = construct_event_topic_set( + event_abi, abi_codec, argument_filters + ) if topics is not None: if len(topic_set) > 1: raise TypeError( "Merging the topics argument with topics generated " - "from argument_filters is not supported.") + "from argument_filters is not supported." + ) topic_set = topics if len(topic_set) == 1 and is_list_like(topic_set[0]): # type ignored b/c list-like check on line 88 - filter_params['topics'] = topic_set[0] # type: ignore + filter_params["topics"] = topic_set[0] # type: ignore else: - filter_params['topics'] = topic_set + filter_params["topics"] = topic_set if address and contract_address: if is_list_like(address): - filter_params['address'] = [address] + [contract_address] + filter_params["address"] = [address] + [contract_address] elif is_string(address): - filter_params['address'] = [address, contract_address] + filter_params["address"] = [address, contract_address] else: raise ValueError( f"Unsupported type for `address` parameter: {type(address)}" ) elif address: - filter_params['address'] = address + filter_params["address"] = address elif contract_address: - filter_params['address'] = contract_address + filter_params["address"] = contract_address - if 'address' not in filter_params: + if "address" not in filter_params: pass - elif is_list_like(filter_params['address']): - for addr in filter_params['address']: + elif is_list_like(filter_params["address"]): + for addr in filter_params["address"]: validate_address(addr) else: - validate_address(filter_params['address']) + validate_address(filter_params["address"]) if fromBlock is not None: - filter_params['fromBlock'] = fromBlock + filter_params["fromBlock"] = fromBlock if toBlock is not None: - filter_params['toBlock'] = toBlock + filter_params["toBlock"] = toBlock data_filters_set = construct_event_data_set(event_abi, abi_codec, argument_filters) @@ -129,9 +132,7 @@ class Filter: poll_interval = None filter_id = None - def __init__(self, - filter_id: HexStr, - eth_module: "Eth") -> None: + def __init__(self, filter_id: HexStr, eth_module: "Eth") -> None: self.eth_module = eth_module self.filter_id = filter_id self.callbacks = [] @@ -153,19 +154,26 @@ def is_valid_entry(self, entry: LogReceipt) -> bool: """ return True - def _filter_valid_entries(self, entries: Collection[LogReceipt]) -> Iterator[LogReceipt]: + def _filter_valid_entries( + self, entries: Collection[LogReceipt] + ) -> Iterator[LogReceipt]: return filter(self.is_valid_entry, entries) def get_new_entries(self) -> List[LogReceipt]: - log_entries = self._filter_valid_entries(self.eth_module.get_filter_changes(self.filter_id)) + log_entries = self._filter_valid_entries( + self.eth_module.get_filter_changes(self.filter_id) + ) return self._format_log_entries(log_entries) def get_all_entries(self) -> List[LogReceipt]: - log_entries = self._filter_valid_entries(self.eth_module.get_filter_logs(self.filter_id)) + log_entries = self._filter_valid_entries( + self.eth_module.get_filter_logs(self.filter_id) + ) return self._format_log_entries(log_entries) - def _format_log_entries(self, - log_entries: Optional[Iterator[LogReceipt]] = None) -> List[LogReceipt]: + def _format_log_entries( + self, log_entries: Optional[Iterator[LogReceipt]] = None + ) -> List[LogReceipt]: if log_entries is None: return [] @@ -193,11 +201,11 @@ class LogFilter(Filter): def __init__(self, *args: Any, **kwargs: Any) -> None: self.log_entry_formatter = kwargs.pop( - 'log_entry_formatter', + "log_entry_formatter", self.log_entry_formatter, ) - if 'data_filter_set' in kwargs: - self.set_data_filters(kwargs.pop('data_filter_set')) + if "data_filter_set" in kwargs: + self.set_data_filters(kwargs.pop("data_filter_set")) super().__init__(*args, **kwargs) def format_entry(self, entry: LogReceipt) -> LogReceipt: @@ -205,7 +213,9 @@ def format_entry(self, entry: LogReceipt) -> LogReceipt: return self.log_entry_formatter(entry) return entry - def set_data_filters(self, data_filter_set: Collection[Tuple[TypeStr, Any]]) -> None: + def set_data_filters( + self, data_filter_set: Collection[Tuple[TypeStr, Any]] + ) -> None: """Sets the data filters (non indexed argument filters) Expects a set of tuples with the type and value, e.g.: @@ -213,12 +223,14 @@ def set_data_filters(self, data_filter_set: Collection[Tuple[TypeStr, Any]]) -> """ self.data_filter_set = data_filter_set if any(data_filter_set): - self.data_filter_set_function = match_fn(self.eth_module.codec, data_filter_set) + self.data_filter_set_function = match_fn( + self.eth_module.codec, data_filter_set + ) def is_valid_entry(self, entry: LogReceipt) -> bool: if not self.data_filter_set: return True - return bool(self.data_filter_set_function(entry['data'])) + return bool(self.data_filter_set_function(entry["data"])) def decode_utf8_bytes(value: bytes) -> str: @@ -245,7 +257,9 @@ def normalize_data_values(type_string: TypeStr, data_value: Any) -> Any: @curry -def match_fn(codec: ABICodec, match_values_and_abi: Collection[Tuple[str, Any]], data: Any) -> bool: +def match_fn( + codec: ABICodec, match_values_and_abi: Collection[Tuple[str, Any]], data: Any +) -> bool: """Match function used for filtering non-indexed event arguments. Values provided through the match_values_and_abi parameter are @@ -254,7 +268,9 @@ def match_fn(codec: ABICodec, match_values_and_abi: Collection[Tuple[str, Any]], abi_types, all_match_values = zip(*match_values_and_abi) decoded_values = codec.decode_abi(abi_types, HexBytes(data)) - for data_value, match_values, abi_type in zip(decoded_values, all_match_values, abi_types): + for data_value, match_values, abi_type in zip( + decoded_values, all_match_values, abi_types + ): if match_values is None: continue normalized_data = normalize_data_values(abi_type, data_value) @@ -276,10 +292,8 @@ class _UseExistingFilter(Exception): """ Internal exception, raised when a filter_id is passed into w3.eth.filter() """ - def __init__( - self, - filter_id: Union[str, FilterParams, HexStr] - ) -> None: + + def __init__(self, filter_id: Union[str, FilterParams, HexStr]) -> None: self.filter_id = filter_id @@ -299,12 +313,16 @@ def select_filter_method( elif is_hex(value): raise _UseExistingFilter(value) else: - raise ValidationError("Filter argument needs to be either 'latest'," - " 'pending', or a hex-encoded filter_id. Filter argument" - f" is: {value}") + raise ValidationError( + "Filter argument needs to be either 'latest'," + " 'pending', or a hex-encoded filter_id. Filter argument" + f" is: {value}" + ) elif isinstance(value, dict): return if_new_filter else: - raise ValidationError("Filter argument needs to be either the string " - "'pending' or 'latest', a filter_id, " - f"or a filter params dictionary. Filter argument is: {value}") + raise ValidationError( + "Filter argument needs to be either the string " + "'pending' or 'latest', a filter_id, " + f"or a filter params dictionary. Filter argument is: {value}" + ) diff --git a/web3/_utils/formatters.py b/web3/_utils/formatters.py index 99cc5ac2c3..bd20624747 100644 --- a/web3/_utils/formatters.py +++ b/web3/_utils/formatters.py @@ -45,12 +45,15 @@ def hex_to_integer(value: HexStr) -> int: integer_to_hex = hex -def apply_formatters_to_args(*formatters: Callable[[TValue], TReturn]) -> Callable[..., TReturn]: - return compose(*( - apply_formatter_at_index(formatter, index) - for index, formatter - in enumerate(formatters) - )) +def apply_formatters_to_args( + *formatters: Callable[[TValue], TReturn] +) -> Callable[..., TReturn]: + return compose( + *( + apply_formatter_at_index(formatter, index) + for index, formatter in enumerate(formatters) + ) + ) @curry @@ -84,8 +87,10 @@ def recursive_map(func: Callable[..., TReturn], data: Any) -> TReturn: Apply func to data, and any collection items inside data (using map_collection). Define func so that it only applies to the type of value that you want it to apply to. """ + def recurse(item: Any) -> TReturn: return recursive_map(func, item) + items_mapped = map_collection(recurse, data) return func(items_mapped) @@ -93,18 +98,22 @@ def recurse(item: Any) -> TReturn: def static_return(value: TValue) -> Callable[..., TValue]: def inner(*args: Any, **kwargs: Any) -> TValue: return value + return inner def static_result(value: TValue) -> Callable[..., Dict[str, TValue]]: def inner(*args: Any, **kwargs: Any) -> Dict[str, TValue]: - return {'result': value} + return {"result": value} + return inner @curry @to_dict -def apply_key_map(key_mappings: Dict[Any, Any], value: Dict[Any, Any]) -> Iterable[Tuple[Any, Any]]: +def apply_key_map( + key_mappings: Dict[Any, Any], value: Dict[Any, Any] +) -> Iterable[Tuple[Any, Any]]: for key, item in value.items(): if key in key_mappings: yield key_mappings[key], item diff --git a/web3/_utils/http.py b/web3/_utils/http.py index e244183877..404c828a6f 100644 --- a/web3/_utils/http.py +++ b/web3/_utils/http.py @@ -1,5 +1,5 @@ def construct_user_agent(class_name: str) -> str: from web3 import __version__ as web3_version - user_agent = f'Web3.py/{web3_version}/{class_name}' + user_agent = f"Web3.py/{web3_version}/{class_name}" return user_agent diff --git a/web3/_utils/hypothesis.py b/web3/_utils/hypothesis.py index fa8e22c051..8f996a3075 100644 --- a/web3/_utils/hypothesis.py +++ b/web3/_utils/hypothesis.py @@ -7,4 +7,4 @@ def hexstr_strategy() -> SearchStrategy[str]: - return st.from_regex(r'\A(0[xX])?[0-9a-fA-F]*\Z') + return st.from_regex(r"\A(0[xX])?[0-9a-fA-F]*\Z") diff --git a/web3/_utils/math.py b/web3/_utils/math.py index 96052bb00d..7cc7389046 100644 --- a/web3/_utils/math.py +++ b/web3/_utils/math.py @@ -8,10 +8,10 @@ ) -def percentile(values: Optional[Sequence[int]] = None, - percentile: Optional[float] = None) -> float: - """Calculates a simplified weighted average percentile - """ +def percentile( + values: Optional[Sequence[int]] = None, percentile: Optional[float] = None +) -> float: + """Calculates a simplified weighted average percentile""" if values in [None, tuple(), []] or len(values) < 1: raise InsufficientData( f"Expected a sequence of at least 1 integers, got {values!r}" diff --git a/web3/_utils/method_formatters.py b/web3/_utils/method_formatters.py index d0a382055f..976e9d7e88 100644 --- a/web3/_utils/method_formatters.py +++ b/web3/_utils/method_formatters.py @@ -109,7 +109,7 @@ def bytes_to_ascii(value: bytes) -> str: - return codecs.decode(value, 'ascii') + return codecs.decode(value, "ascii") to_ascii_if_bytes = apply_formatter_if(is_bytes, bytes_to_ascii) @@ -151,26 +151,26 @@ def is_attrdict(val: Any) -> bool: TRANSACTION_RESULT_FORMATTERS = { - 'blockHash': apply_formatter_if(is_not_null, to_hexbytes(32)), - 'blockNumber': apply_formatter_if(is_not_null, to_integer_if_hex), - 'transactionIndex': apply_formatter_if(is_not_null, to_integer_if_hex), - 'nonce': to_integer_if_hex, - 'gas': to_integer_if_hex, - 'gasPrice': to_integer_if_hex, - 'maxFeePerGas': to_integer_if_hex, - 'maxPriorityFeePerGas': to_integer_if_hex, - 'value': to_integer_if_hex, - 'from': to_checksum_address, - 'publicKey': apply_formatter_if(is_not_null, to_hexbytes(64)), - 'r': apply_formatter_if(is_not_null, to_hexbytes(32, variable_length=True)), - 'raw': HexBytes, - 's': apply_formatter_if(is_not_null, to_hexbytes(32, variable_length=True)), - 'to': apply_formatter_if(is_address, to_checksum_address), - 'hash': to_hexbytes(32), - 'v': apply_formatter_if(is_not_null, to_integer_if_hex), - 'standardV': apply_formatter_if(is_not_null, to_integer_if_hex), - 'type': apply_formatter_if(is_not_null, to_integer_if_hex), - 'chainId': apply_formatter_if(is_not_null, to_integer_if_hex), + "blockHash": apply_formatter_if(is_not_null, to_hexbytes(32)), + "blockNumber": apply_formatter_if(is_not_null, to_integer_if_hex), + "transactionIndex": apply_formatter_if(is_not_null, to_integer_if_hex), + "nonce": to_integer_if_hex, + "gas": to_integer_if_hex, + "gasPrice": to_integer_if_hex, + "maxFeePerGas": to_integer_if_hex, + "maxPriorityFeePerGas": to_integer_if_hex, + "value": to_integer_if_hex, + "from": to_checksum_address, + "publicKey": apply_formatter_if(is_not_null, to_hexbytes(64)), + "r": apply_formatter_if(is_not_null, to_hexbytes(32, variable_length=True)), + "raw": HexBytes, + "s": apply_formatter_if(is_not_null, to_hexbytes(32, variable_length=True)), + "to": apply_formatter_if(is_address, to_checksum_address), + "hash": to_hexbytes(32), + "v": apply_formatter_if(is_not_null, to_integer_if_hex), + "standardV": apply_formatter_if(is_not_null, to_integer_if_hex), + "type": apply_formatter_if(is_not_null, to_integer_if_hex), + "chainId": apply_formatter_if(is_not_null, to_integer_if_hex), } @@ -182,14 +182,14 @@ def apply_list_to_array_formatter(formatter: Any) -> Callable[..., Any]: LOG_ENTRY_FORMATTERS = { - 'blockHash': apply_formatter_if(is_not_null, to_hexbytes(32)), - 'blockNumber': apply_formatter_if(is_not_null, to_integer_if_hex), - 'transactionIndex': apply_formatter_if(is_not_null, to_integer_if_hex), - 'transactionHash': apply_formatter_if(is_not_null, to_hexbytes(32)), - 'logIndex': to_integer_if_hex, - 'address': to_checksum_address, - 'topics': apply_list_to_array_formatter(to_hexbytes(32)), - 'data': HexBytes, + "blockHash": apply_formatter_if(is_not_null, to_hexbytes(32)), + "blockNumber": apply_formatter_if(is_not_null, to_integer_if_hex), + "transactionIndex": apply_formatter_if(is_not_null, to_integer_if_hex), + "transactionHash": apply_formatter_if(is_not_null, to_hexbytes(32)), + "logIndex": to_integer_if_hex, + "address": to_checksum_address, + "topics": apply_list_to_array_formatter(to_hexbytes(32)), + "data": HexBytes, } @@ -197,50 +197,55 @@ def apply_list_to_array_formatter(formatter: Any) -> Callable[..., Any]: RECEIPT_FORMATTERS = { - 'blockHash': apply_formatter_if(is_not_null, to_hexbytes(32)), - 'blockNumber': apply_formatter_if(is_not_null, to_integer_if_hex), - 'transactionIndex': apply_formatter_if(is_not_null, to_integer_if_hex), - 'transactionHash': to_hexbytes(32), - 'cumulativeGasUsed': to_integer_if_hex, - 'status': to_integer_if_hex, - 'gasUsed': to_integer_if_hex, - 'contractAddress': apply_formatter_if(is_not_null, to_checksum_address), - 'logs': apply_list_to_array_formatter(log_entry_formatter), - 'logsBloom': to_hexbytes(256), - 'from': apply_formatter_if(is_not_null, to_checksum_address), - 'to': apply_formatter_if(is_address, to_checksum_address), - 'effectiveGasPrice': to_integer_if_hex, - 'type': to_integer_if_hex, + "blockHash": apply_formatter_if(is_not_null, to_hexbytes(32)), + "blockNumber": apply_formatter_if(is_not_null, to_integer_if_hex), + "transactionIndex": apply_formatter_if(is_not_null, to_integer_if_hex), + "transactionHash": to_hexbytes(32), + "cumulativeGasUsed": to_integer_if_hex, + "status": to_integer_if_hex, + "gasUsed": to_integer_if_hex, + "contractAddress": apply_formatter_if(is_not_null, to_checksum_address), + "logs": apply_list_to_array_formatter(log_entry_formatter), + "logsBloom": to_hexbytes(256), + "from": apply_formatter_if(is_not_null, to_checksum_address), + "to": apply_formatter_if(is_address, to_checksum_address), + "effectiveGasPrice": to_integer_if_hex, + "type": to_integer_if_hex, } receipt_formatter = apply_formatters_to_dict(RECEIPT_FORMATTERS) BLOCK_FORMATTERS = { - 'baseFeePerGas': to_integer_if_hex, - 'extraData': apply_formatter_if(is_not_null, to_hexbytes(32, variable_length=True)), - 'gasLimit': to_integer_if_hex, - 'gasUsed': to_integer_if_hex, - 'size': to_integer_if_hex, - 'timestamp': to_integer_if_hex, - 'hash': apply_formatter_if(is_not_null, to_hexbytes(32)), - 'logsBloom': apply_formatter_if(is_not_null, to_hexbytes(256)), - 'miner': apply_formatter_if(is_not_null, to_checksum_address), - 'mixHash': apply_formatter_if(is_not_null, to_hexbytes(32)), - 'nonce': apply_formatter_if(is_not_null, to_hexbytes(8, variable_length=True)), - 'number': apply_formatter_if(is_not_null, to_integer_if_hex), - 'parentHash': apply_formatter_if(is_not_null, to_hexbytes(32)), - 'sha3Uncles': apply_formatter_if(is_not_null, to_hexbytes(32)), - 'uncles': apply_list_to_array_formatter(to_hexbytes(32)), - 'difficulty': to_integer_if_hex, - 'receiptsRoot': apply_formatter_if(is_not_null, to_hexbytes(32)), - 'stateRoot': apply_formatter_if(is_not_null, to_hexbytes(32)), - 'totalDifficulty': to_integer_if_hex, - 'transactions': apply_one_of_formatters(( - (is_array_of_dicts, apply_list_to_array_formatter(transaction_result_formatter)), - (is_array_of_strings, apply_list_to_array_formatter(to_hexbytes(32))), - )), - 'transactionsRoot': apply_formatter_if(is_not_null, to_hexbytes(32)), + "baseFeePerGas": to_integer_if_hex, + "extraData": apply_formatter_if(is_not_null, to_hexbytes(32, variable_length=True)), + "gasLimit": to_integer_if_hex, + "gasUsed": to_integer_if_hex, + "size": to_integer_if_hex, + "timestamp": to_integer_if_hex, + "hash": apply_formatter_if(is_not_null, to_hexbytes(32)), + "logsBloom": apply_formatter_if(is_not_null, to_hexbytes(256)), + "miner": apply_formatter_if(is_not_null, to_checksum_address), + "mixHash": apply_formatter_if(is_not_null, to_hexbytes(32)), + "nonce": apply_formatter_if(is_not_null, to_hexbytes(8, variable_length=True)), + "number": apply_formatter_if(is_not_null, to_integer_if_hex), + "parentHash": apply_formatter_if(is_not_null, to_hexbytes(32)), + "sha3Uncles": apply_formatter_if(is_not_null, to_hexbytes(32)), + "uncles": apply_list_to_array_formatter(to_hexbytes(32)), + "difficulty": to_integer_if_hex, + "receiptsRoot": apply_formatter_if(is_not_null, to_hexbytes(32)), + "stateRoot": apply_formatter_if(is_not_null, to_hexbytes(32)), + "totalDifficulty": to_integer_if_hex, + "transactions": apply_one_of_formatters( + ( + ( + is_array_of_dicts, + apply_list_to_array_formatter(transaction_result_formatter), + ), + (is_array_of_strings, apply_list_to_array_formatter(to_hexbytes(32))), + ) + ), + "transactionsRoot": apply_formatter_if(is_not_null, to_hexbytes(32)), } @@ -248,11 +253,11 @@ def apply_list_to_array_formatter(formatter: Any) -> Callable[..., Any]: SYNCING_FORMATTERS = { - 'startingBlock': to_integer_if_hex, - 'currentBlock': to_integer_if_hex, - 'highestBlock': to_integer_if_hex, - 'knownStates': to_integer_if_hex, - 'pulledStates': to_integer_if_hex, + "startingBlock": to_integer_if_hex, + "currentBlock": to_integer_if_hex, + "highestBlock": to_integer_if_hex, + "knownStates": to_integer_if_hex, + "pulledStates": to_integer_if_hex, } @@ -260,11 +265,11 @@ def apply_list_to_array_formatter(formatter: Any) -> Callable[..., Any]: TRANSACTION_POOL_CONTENT_FORMATTERS = { - 'pending': compose( + "pending": compose( curried.keymap(to_ascii_if_bytes), curried.valmap(transaction_result_formatter), ), - 'queued': compose( + "queued": compose( curried.keymap(to_ascii_if_bytes), curried.valmap(transaction_result_formatter), ), @@ -277,8 +282,8 @@ def apply_list_to_array_formatter(formatter: Any) -> Callable[..., Any]: TRANSACTION_POOL_INSPECT_FORMATTERS = { - 'pending': curried.keymap(to_ascii_if_bytes), - 'queued': curried.keymap(to_ascii_if_bytes), + "pending": curried.keymap(to_ascii_if_bytes), + "queued": curried.keymap(to_ascii_if_bytes), } @@ -287,112 +292,118 @@ def apply_list_to_array_formatter(formatter: Any) -> Callable[..., Any]: ) FEE_HISTORY_FORMATTERS = { - 'baseFeePerGas': apply_formatter_to_array(to_integer_if_hex), - 'gasUsedRatio': apply_formatter_if(is_not_null, apply_formatter_to_array(float)), - 'oldestBlock': to_integer_if_hex, - 'reward': apply_formatter_if(is_not_null, apply_formatter_to_array( - apply_formatter_to_array(to_integer_if_hex))), + "baseFeePerGas": apply_formatter_to_array(to_integer_if_hex), + "gasUsedRatio": apply_formatter_if(is_not_null, apply_formatter_to_array(float)), + "oldestBlock": to_integer_if_hex, + "reward": apply_formatter_if( + is_not_null, + apply_formatter_to_array(apply_formatter_to_array(to_integer_if_hex)), + ), } fee_history_formatter = apply_formatters_to_dict(FEE_HISTORY_FORMATTERS) STORAGE_PROOF_FORMATTERS = { - 'key': HexBytes, - 'value': HexBytes, - 'proof': apply_list_to_array_formatter(HexBytes), + "key": HexBytes, + "value": HexBytes, + "proof": apply_list_to_array_formatter(HexBytes), } ACCOUNT_PROOF_FORMATTERS = { - 'address': to_checksum_address, - 'accountProof': apply_list_to_array_formatter(HexBytes), - 'balance': to_integer_if_hex, - 'codeHash': to_hexbytes(32), - 'nonce': to_integer_if_hex, - 'storageHash': to_hexbytes(32), - 'storageProof': apply_list_to_array_formatter( + "address": to_checksum_address, + "accountProof": apply_list_to_array_formatter(HexBytes), + "balance": to_integer_if_hex, + "codeHash": to_hexbytes(32), + "nonce": to_integer_if_hex, + "storageHash": to_hexbytes(32), + "storageProof": apply_list_to_array_formatter( apply_formatters_to_dict(STORAGE_PROOF_FORMATTERS) - ) + ), } proof_formatter = apply_formatters_to_dict(ACCOUNT_PROOF_FORMATTERS) FILTER_PARAMS_FORMATTERS = { - 'fromBlock': apply_formatter_if(is_integer, integer_to_hex), - 'toBlock': apply_formatter_if(is_integer, integer_to_hex), + "fromBlock": apply_formatter_if(is_integer, integer_to_hex), + "toBlock": apply_formatter_if(is_integer, integer_to_hex), } filter_params_formatter = apply_formatters_to_dict(FILTER_PARAMS_FORMATTERS) -filter_result_formatter = apply_one_of_formatters(( - (is_array_of_dicts, apply_list_to_array_formatter(log_entry_formatter)), - (is_array_of_strings, apply_list_to_array_formatter(to_hexbytes(32))), -)) +filter_result_formatter = apply_one_of_formatters( + ( + (is_array_of_dicts, apply_list_to_array_formatter(log_entry_formatter)), + (is_array_of_strings, apply_list_to_array_formatter(to_hexbytes(32))), + ) +) TRANSACTION_REQUEST_FORMATTERS = { - 'maxFeePerGas': to_hex_if_integer, - 'maxPriorityFeePerGas': to_hex_if_integer, + "maxFeePerGas": to_hex_if_integer, + "maxPriorityFeePerGas": to_hex_if_integer, } transaction_request_formatter = apply_formatters_to_dict(TRANSACTION_REQUEST_FORMATTERS) transaction_param_formatter = compose( - remove_key_if('to', lambda txn: txn['to'] in {'', b'', None}), - remove_key_if('gasPrice', lambda txn: txn['gasPrice'] in {'', b'', None}), + remove_key_if("to", lambda txn: txn["to"] in {"", b"", None}), + remove_key_if("gasPrice", lambda txn: txn["gasPrice"] in {"", b"", None}), transaction_request_formatter, ) call_without_override: Callable[ - [Tuple[TxParams, BlockIdentifier]], - Tuple[Dict[str, Any], int] + [Tuple[TxParams, BlockIdentifier]], Tuple[Dict[str, Any], int] ] -call_without_override = apply_formatters_to_sequence([ - transaction_param_formatter, - to_hex_if_integer, -]) +call_without_override = apply_formatters_to_sequence( + [ + transaction_param_formatter, + to_hex_if_integer, + ] +) call_with_override: Callable[ [Tuple[TxParams, BlockIdentifier, CallOverrideParams]], Tuple[Dict[str, Any], int, Dict[str, Any]], ] -call_with_override = apply_formatters_to_sequence([ - transaction_param_formatter, - to_hex_if_integer, - lambda x: x, -]) +call_with_override = apply_formatters_to_sequence( + [ + transaction_param_formatter, + to_hex_if_integer, + lambda x: x, + ] +) estimate_gas_without_block_id: Callable[[Dict[str, Any]], Dict[str, Any]] estimate_gas_without_block_id = apply_formatter_at_index(transaction_param_formatter, 0) estimate_gas_with_block_id: Callable[ - [Tuple[Dict[str, Any], Union[str, int]]], - Tuple[Dict[str, Any], int] + [Tuple[Dict[str, Any], Union[str, int]]], Tuple[Dict[str, Any], int] ] -estimate_gas_with_block_id = apply_formatters_to_sequence([ - transaction_param_formatter, - to_hex_if_integer, -]) +estimate_gas_with_block_id = apply_formatters_to_sequence( + [ + transaction_param_formatter, + to_hex_if_integer, + ] +) SIGNED_TX_FORMATTER = { - 'raw': HexBytes, - 'tx': transaction_result_formatter, + "raw": HexBytes, + "tx": transaction_result_formatter, } signed_tx_formatter = apply_formatters_to_dict(SIGNED_TX_FORMATTER) -FILTER_PARAM_NORMALIZERS = apply_formatters_to_dict({ - 'address': apply_formatter_if(is_string, lambda x: [x]) -}) +FILTER_PARAM_NORMALIZERS = apply_formatters_to_dict( + {"address": apply_formatter_if(is_string, lambda x: [x])} +) -GETH_WALLET_FORMATTER = { - 'address': to_checksum_address -} +GETH_WALLET_FORMATTER = {"address": to_checksum_address} geth_wallet_formatter = apply_formatters_to_dict(GETH_WALLET_FORMATTER) GETH_WALLETS_FORMATTER = { - 'accounts': apply_list_to_array_formatter(geth_wallet_formatter), + "accounts": apply_list_to_array_formatter(geth_wallet_formatter), } geth_wallets_formatter = apply_formatters_to_dict(GETH_WALLETS_FORMATTER) @@ -402,7 +413,7 @@ def apply_list_to_array_formatter(formatter: Any) -> Callable[..., Any]: # Eth RPC.eth_feeHistory: compose( apply_formatter_at_index(to_hex_if_integer, 0), - apply_formatter_at_index(to_hex_if_integer, 1) + apply_formatter_at_index(to_hex_if_integer, 1), ), RPC.eth_getBalance: apply_formatter_at_index(to_hex_if_integer, 1), RPC.eth_getBlockByNumber: apply_formatter_at_index(to_hex_if_integer, 0), @@ -421,7 +432,9 @@ def apply_list_to_array_formatter(formatter: Any) -> Callable[..., Any]: apply_formatter_at_index(to_hex_if_integer, 0), apply_formatter_at_index(to_hex_if_integer, 1), ), - RPC.eth_getRawTransactionByBlockHashAndIndex: apply_formatter_at_index(to_hex_if_integer, 1), + RPC.eth_getRawTransactionByBlockHashAndIndex: apply_formatter_at_index( + to_hex_if_integer, 1 + ), RPC.eth_getUncleCountByBlockNumber: apply_formatter_at_index(to_hex_if_integer, 0), RPC.eth_getUncleByBlockNumberAndIndex: compose( apply_formatter_at_index(to_hex_if_integer, 0), @@ -430,14 +443,18 @@ def apply_list_to_array_formatter(formatter: Any) -> Callable[..., Any]: RPC.eth_getUncleByBlockHashAndIndex: apply_formatter_at_index(to_hex_if_integer, 1), RPC.eth_newFilter: apply_formatter_at_index(filter_params_formatter, 0), RPC.eth_getLogs: apply_formatter_at_index(filter_params_formatter, 0), - RPC.eth_call: apply_one_of_formatters(( - (is_length(2), call_without_override), - (is_length(3), call_with_override), - )), - RPC.eth_estimateGas: apply_one_of_formatters(( - (is_length(1), estimate_gas_without_block_id), - (is_length(2), estimate_gas_with_block_id), - )), + RPC.eth_call: apply_one_of_formatters( + ( + (is_length(2), call_without_override), + (is_length(3), call_with_override), + ) + ), + RPC.eth_estimateGas: apply_one_of_formatters( + ( + (is_length(1), estimate_gas_without_block_id), + (is_length(2), estimate_gas_with_block_id), + ) + ), RPC.eth_sendTransaction: apply_formatter_at_index(transaction_param_formatter, 0), RPC.eth_signTransaction: apply_formatter_at_index(transaction_param_formatter, 0), RPC.eth_getProof: apply_formatter_at_index(to_hex_if_integer, 2), @@ -448,14 +465,16 @@ def apply_list_to_array_formatter(formatter: Any) -> Callable[..., Any]: ), RPC.personal_sign: apply_formatter_at_index(text_if_str(to_hex), 0), RPC.personal_ecRecover: apply_formatter_at_index(text_if_str(to_hex), 0), - RPC.personal_sendTransaction: apply_formatter_at_index(transaction_param_formatter, 0), + RPC.personal_sendTransaction: apply_formatter_at_index( + transaction_param_formatter, 0 + ), # Snapshot and Revert RPC.evm_revert: apply_formatter_at_index(integer_to_hex, 0), RPC.trace_replayBlockTransactions: apply_formatter_at_index(to_hex_if_integer, 0), RPC.trace_block: apply_formatter_at_index(to_hex_if_integer, 0), RPC.trace_call: compose( apply_formatter_at_index(transaction_param_formatter, 0), - apply_formatter_at_index(to_hex_if_integer, 2) + apply_formatter_at_index(to_hex_if_integer, 2), ), } @@ -493,7 +512,9 @@ def apply_list_to_array_formatter(formatter: Any) -> Callable[..., Any]: is_not_null, transaction_result_formatter, ), - RPC.eth_getTransactionByHash: apply_formatter_if(is_not_null, transaction_result_formatter), + RPC.eth_getTransactionByHash: apply_formatter_if( + is_not_null, transaction_result_formatter + ), RPC.eth_getTransactionCount: to_integer_if_hex, RPC.eth_getTransactionReceipt: apply_formatter_if( is_not_null, @@ -529,12 +550,12 @@ def apply_list_to_array_formatter(formatter: Any) -> Callable[..., Any]: } ATTRDICT_FORMATTER = { - '*': apply_formatter_if(is_dict and not_attrdict, AttributeDict.recursive) + "*": apply_formatter_if(is_dict and not_attrdict, AttributeDict.recursive) } METHOD_NORMALIZERS: Dict[RPCEndpoint, Callable[..., Any]] = { RPC.eth_getLogs: apply_formatter_at_index(FILTER_PARAM_NORMALIZERS, 0), - RPC.eth_newFilter: apply_formatter_at_index(FILTER_PARAM_NORMALIZERS, 0) + RPC.eth_newFilter: apply_formatter_at_index(FILTER_PARAM_NORMALIZERS, 0), } STANDARD_NORMALIZERS = [ @@ -548,13 +569,13 @@ def apply_list_to_array_formatter(formatter: Any) -> Callable[..., Any]: ABI_REQUEST_FORMATTERS = abi_request_formatters(STANDARD_NORMALIZERS, RPC_ABIS) # the first 4 bytes of keccak hash for: "OffchainLookup(address,string[],bytes,bytes4,bytes)" -OFFCHAIN_LOOKUP_FUNC_SELECTOR = '0x556f1830' +OFFCHAIN_LOOKUP_FUNC_SELECTOR = "0x556f1830" OFFCHAIN_LOOKUP_FIELDS = { - 'sender': 'address', - 'urls': 'string[]', - 'callData': 'bytes', - 'callbackFunction': 'bytes4', - 'extraData': 'bytes', + "sender": "address", + "urls": "string[]", + "callData": "bytes", + "callbackFunction": "bytes4", + "extraData": "bytes", } @@ -569,48 +590,54 @@ def raise_solidity_error_on_revert(response: RPCResponse) -> RPCResponse: See also https://solidity.readthedocs.io/en/v0.6.3/control-structures.html#revert """ - if not isinstance(response['error'], dict): - raise ValueError('Error expected to be a dict') + if not isinstance(response["error"], dict): + raise ValueError("Error expected to be a dict") - data = response['error'].get('data', '') + data = response["error"].get("data", "") # Ganache case: - if isinstance(data, dict) and response['error'].get('message'): + if isinstance(data, dict) and response["error"].get("message"): raise ContractLogicError(f'execution reverted: {response["error"]["message"]}') # Parity/OpenEthereum case: - if data.startswith('Reverted '): + if data.startswith("Reverted "): # "Reverted", function selector and offset are always the same for revert errors - prefix = 'Reverted 0x08c379a00000000000000000000000000000000000000000000000000000000000000020' # noqa: 501 + prefix = "Reverted 0x08c379a00000000000000000000000000000000000000000000000000000000000000020" # noqa: 501 if not data.startswith(prefix): - raise ContractLogicError('execution reverted') + raise ContractLogicError("execution reverted") - reason_length = int(data[len(prefix):len(prefix) + 64], 16) - reason = data[len(prefix) + 64:len(prefix) + 64 + reason_length * 2] - raise ContractLogicError(f'execution reverted: {bytes.fromhex(reason).decode("utf8")}') + reason_length = int(data[len(prefix) : len(prefix) + 64], 16) + reason = data[len(prefix) + 64 : len(prefix) + 64 + reason_length * 2] + raise ContractLogicError( + f'execution reverted: {bytes.fromhex(reason).decode("utf8")}' + ) # --- EIP-3668 | CCIP Read --- # # 0x556f1830 is the function selector for OffchainLookup(address,string[],bytes,bytes4,bytes) - if data[:10] == '0x556f1830': + if data[:10] == "0x556f1830": parsed_data_as_bytes = to_bytes(hexstr=data[10:]) - abi_decoded_data = decode_abi(OFFCHAIN_LOOKUP_FIELDS.values(), parsed_data_as_bytes) - offchain_lookup_payload = dict(zip(OFFCHAIN_LOOKUP_FIELDS.keys(), abi_decoded_data)) + abi_decoded_data = decode_abi( + OFFCHAIN_LOOKUP_FIELDS.values(), parsed_data_as_bytes + ) + offchain_lookup_payload = dict( + zip(OFFCHAIN_LOOKUP_FIELDS.keys(), abi_decoded_data) + ) raise OffchainLookup(offchain_lookup_payload) # Geth case: - if 'message' in response['error'] and response['error'].get('code', '') == 3: - raise ContractLogicError(response['error']['message']) + if "message" in response["error"] and response["error"].get("code", "") == 3: + raise ContractLogicError(response["error"]["message"]) # Geth Revert without error message case: - if 'execution reverted' in response['error'].get('message'): - raise ContractLogicError('execution reverted') + if "execution reverted" in response["error"].get("message"): + raise ContractLogicError("execution reverted") return response def raise_invalid_parity_mode(response: RPCResponse) -> NoReturn: # eth-tester sends back an invalid RPCError, which makes mypy complain - error_message = response['error'].get('message') # type: ignore + error_message = response["error"].get("message") # type: ignore raise InvalidParityMode(error_message) @@ -623,7 +650,8 @@ def raise_invalid_parity_mode(response: RPCResponse) -> NoReturn: @to_tuple def combine_formatters( - formatter_maps: Collection[Dict[RPCEndpoint, Callable[..., TReturn]]], method_name: RPCEndpoint + formatter_maps: Collection[Dict[RPCEndpoint, Callable[..., TReturn]]], + method_name: RPCEndpoint, ) -> Iterable[Callable[..., TReturn]]: for formatter_map in formatter_maps: if method_name in formatter_map: @@ -682,7 +710,9 @@ def raise_transaction_not_found(params: Tuple[_Hash32]) -> NoReturn: raise TransactionNotFound(message) -def raise_transaction_not_found_with_index(params: Tuple[BlockIdentifier, int]) -> NoReturn: +def raise_transaction_not_found_with_index( + params: Tuple[BlockIdentifier, int] +) -> NoReturn: try: block_identifier = params[0] transaction_index = to_integer_if_hex(params[1]) @@ -727,9 +757,11 @@ def filter_wrapper( elif method == RPC.eth_newFilter: return LogFilter(filter_id, eth_module=module) else: - raise NotImplementedError('Filter wrapper needs to be used with either ' - f'{RPC.eth_newBlockFilter}, {RPC.eth_newPendingTransactionFilter}' - f' or {RPC.eth_newFilter}') + raise NotImplementedError( + "Filter wrapper needs to be used with either " + f"{RPC.eth_newBlockFilter}, {RPC.eth_newPendingTransactionFilter}" + f" or {RPC.eth_newFilter}" + ) FILTER_RESULT_FORMATTERS: Dict[RPCEndpoint, Callable[..., Any]] = { @@ -741,9 +773,9 @@ def filter_wrapper( @to_tuple def apply_module_to_formatters( - formatters: Tuple[Callable[..., TReturn]], - module: "Module", - method_name: Union[RPCEndpoint, Callable[..., RPCEndpoint]], + formatters: Tuple[Callable[..., TReturn]], + module: "Module", + method_name: Union[RPCEndpoint, Callable[..., RPCEndpoint]], ) -> Iterable[Callable[..., TReturn]]: for f in formatters: yield partial(f, module, method_name) @@ -753,21 +785,17 @@ def get_result_formatters( method_name: Union[RPCEndpoint, Callable[..., RPCEndpoint]], module: "Module", ) -> Dict[str, Callable[..., Any]]: - formatters = combine_formatters( - (PYTHONIC_RESULT_FORMATTERS,), - method_name - ) + formatters = combine_formatters((PYTHONIC_RESULT_FORMATTERS,), method_name) formatters_requiring_module = combine_formatters( - (FILTER_RESULT_FORMATTERS,), - method_name + (FILTER_RESULT_FORMATTERS,), method_name ) partial_formatters = apply_module_to_formatters( - formatters_requiring_module, - module, - method_name + formatters_requiring_module, module, method_name + ) + attrdict_formatter = apply_formatter_if( + is_dict and not_attrdict, AttributeDict.recursive ) - attrdict_formatter = apply_formatter_if(is_dict and not_attrdict, AttributeDict.recursive) return compose(*partial_formatters, attrdict_formatter, *formatters) diff --git a/web3/_utils/module.py b/web3/_utils/module.py index 7bba1a86db..5e4fac9f3a 100644 --- a/web3/_utils/module.py +++ b/web3/_utils/module.py @@ -26,7 +26,7 @@ def _validate_init_params_and_return_if_found(module_class: Any) -> List[str]: init_params_raw = list(inspect.signature(module_class.__init__).parameters) module_init_params = [ - param for param in init_params_raw if param not in ['self', 'args', 'kwargs'] + param for param in init_params_raw if param not in ["self", "args", "kwargs"] ] if len(module_init_params) > 1: @@ -42,7 +42,7 @@ def _validate_init_params_and_return_if_found(module_class: Any) -> List[str]: def attach_modules( parent_module: Union["Web3", "Module"], module_definitions: Dict[str, Any], - w3: Optional[Union["Web3", "Module"]] = None + w3: Optional[Union["Web3", "Module"]] = None, ) -> None: for module_name, module_info in module_definitions.items(): module_info_is_list_like = isinstance(module_info, Sequence) @@ -56,7 +56,7 @@ def attach_modules( ) # The parent module is the ``Web3`` instance on first run of the loop - if type(parent_module).__name__ == 'Web3': + if type(parent_module).__name__ == "Web3": w3 = parent_module module_init_params = _validate_init_params_and_return_if_found(module_class) @@ -76,4 +76,6 @@ def attach_modules( module = getattr(parent_module, module_name) attach_modules(module, submodule_definitions, w3) elif len(module_info) != 1: - raise ValidationError("Module definitions can only have 1 or 2 elements.") + raise ValidationError( + "Module definitions can only have 1 or 2 elements." + ) diff --git a/web3/_utils/normalizers.py b/web3/_utils/normalizers.py index 13fda343b3..993890ac2d 100644 --- a/web3/_utils/normalizers.py +++ b/web3/_utils/normalizers.py @@ -80,6 +80,7 @@ def wrapper(type_str: TypeStr, data: Any) -> Tuple[TypeStr, Any]: return type_str, data else: return modified + return wrapper @@ -89,16 +90,18 @@ def wrapper(type_str: TypeStr, data: Any) -> Tuple[TypeStr, Any]: @implicitly_identity -def addresses_checksummed(type_str: TypeStr, data: Any) -> Tuple[TypeStr, ChecksumAddress]: - if type_str == 'address': +def addresses_checksummed( + type_str: TypeStr, data: Any +) -> Tuple[TypeStr, ChecksumAddress]: + if type_str == "address": return type_str, to_checksum_address(data) return None @implicitly_identity def decode_abi_strings(type_str: TypeStr, data: Any) -> Tuple[TypeStr, str]: - if type_str == 'string': - return type_str, codecs.decode(data, 'utf8', 'backslashreplace') + if type_str == "string": + return type_str, codecs.decode(data, "utf8", "backslashreplace") return None @@ -115,6 +118,7 @@ def parse_basic_type_str( that type string does not represent a basic type (i.e. non-tuple type) or is not parsable, the normalizer does nothing. """ + @functools.wraps(old_normalizer) def new_normalizer(type_str: TypeStr, data: Any) -> Tuple[TypeStr, Any]: try: @@ -136,7 +140,7 @@ def new_normalizer(type_str: TypeStr, data: Any) -> Tuple[TypeStr, Any]: def abi_bytes_to_hex( abi_type: BasicType, type_str: TypeStr, data: Any ) -> Optional[Tuple[TypeStr, HexStr]]: - if abi_type.base != 'bytes' or abi_type.is_array: + if abi_type.base != "bytes" or abi_type.is_array: return None bytes_data = hexstr_if_str(to_bytes, data) @@ -150,7 +154,7 @@ def abi_bytes_to_hex( f"but instead was {len(bytes_data)}: {data!r}" ) - padded = bytes_data.ljust(num_bytes, b'\0') + padded = bytes_data.ljust(num_bytes, b"\0") return type_str, to_hex(padded) @@ -159,7 +163,7 @@ def abi_bytes_to_hex( def abi_int_to_hex( abi_type: BasicType, type_str: TypeStr, data: Any ) -> Optional[Tuple[TypeStr, HexStr]]: - if abi_type.base == 'uint' and not abi_type.is_array: + if abi_type.base == "uint" and not abi_type.is_array: # double check? return type_str, hexstr_if_str(to_hex, data) return None @@ -167,14 +171,14 @@ def abi_int_to_hex( @implicitly_identity def abi_string_to_hex(type_str: TypeStr, data: Any) -> Optional[Tuple[TypeStr, str]]: - if type_str == 'string': + if type_str == "string": return type_str, text_if_str(to_hex, data) return None @implicitly_identity def abi_string_to_text(type_str: TypeStr, data: Any) -> Optional[Tuple[TypeStr, str]]: - if type_str == 'string': + if type_str == "string": return type_str, text_if_str(to_text, data) return None @@ -184,14 +188,16 @@ def abi_string_to_text(type_str: TypeStr, data: Any) -> Optional[Tuple[TypeStr, def abi_bytes_to_bytes( abi_type: BasicType, type_str: TypeStr, data: Any ) -> Optional[Tuple[TypeStr, HexStr]]: - if abi_type.base == 'bytes' and not abi_type.is_array: + if abi_type.base == "bytes" and not abi_type.is_array: return type_str, hexstr_if_str(to_bytes, data) return None @implicitly_identity -def abi_address_to_hex(type_str: TypeStr, data: Any) -> Optional[Tuple[TypeStr, ChecksumAddress]]: - if type_str == 'address': +def abi_address_to_hex( + type_str: TypeStr, data: Any +) -> Optional[Tuple[TypeStr, ChecksumAddress]]: + if type_str == "address": validate_address(data) if is_binary_address(data): return type_str, to_checksum_address(data) @@ -200,7 +206,7 @@ def abi_address_to_hex(type_str: TypeStr, data: Any) -> Optional[Tuple[TypeStr, @curry def abi_ens_resolver(w3: "Web3", type_str: TypeStr, val: Any) -> Tuple[TypeStr, Any]: - if type_str == 'address' and is_ens_name(val): + if type_str == "address" and is_ens_name(val): if w3 is None: raise InvalidAddress( f"Could not look up name {val!r} because no web3" @@ -208,8 +214,7 @@ def abi_ens_resolver(w3: "Web3", type_str: TypeStr, val: Any) -> Tuple[TypeStr, ) elif w3.ens is None: raise InvalidAddress( - f"Could not look up name {val!r} because ENS is" - " set to None" + f"Could not look up name {val!r} because ENS is" " set to None" ) elif int(w3.net.version) != 1 and not isinstance(w3.ens, StaticENS): raise InvalidAddress( diff --git a/web3/_utils/personal.py b/web3/_utils/personal.py index b874eb70b7..d9e505519a 100644 --- a/web3/_utils/personal.py +++ b/web3/_utils/personal.py @@ -66,8 +66,9 @@ class UnlockAccountWrapper(Protocol): - def __call__(self, account: ChecksumAddress, passphrase: str, - duration: Optional[int] = None) -> bool: + def __call__( + self, account: ChecksumAddress, passphrase: str, duration: Optional[int] = None + ) -> bool: pass @@ -83,7 +84,9 @@ def __call__(self, account: ChecksumAddress, passphrase: str, ) -sign_typed_data: Method[Callable[[Dict[str, Any], ChecksumAddress, str], HexStr]] = Method( +sign_typed_data: Method[ + Callable[[Dict[str, Any], ChecksumAddress, str], HexStr] +] = Method( RPC.personal_signTypedData, mungers=[default_root_munger], ) diff --git a/web3/_utils/request.py b/web3/_utils/request.py index caab1c9f37..242e20cb4b 100644 --- a/web3/_utils/request.py +++ b/web3/_utils/request.py @@ -26,7 +26,6 @@ class SessionCache: - def __init__(self, size: int): self._size = size self._data: OrderedDict[str, Any] = OrderedDict() @@ -57,7 +56,7 @@ def __len__(self) -> int: def get_default_http_endpoint() -> URI: - return URI(os.environ.get('WEB3_HTTP_PROVIDER_URI', 'http://localhost:8545')) + return URI(os.environ.get("WEB3_HTTP_PROVIDER_URI", "http://localhost:8545")) def cache_session(endpoint_uri: URI, session: requests.Session) -> None: @@ -82,7 +81,7 @@ def get_session(endpoint_uri: URI) -> requests.Session: def get_response_from_get_request( endpoint_uri: URI, *args: Any, **kwargs: Any ) -> requests.Response: - kwargs.setdefault('timeout', 10) + kwargs.setdefault("timeout", 10) session = get_session(endpoint_uri) response = session.get(endpoint_uri, *args, **kwargs) return response @@ -91,7 +90,7 @@ def get_response_from_get_request( def get_response_from_post_request( endpoint_uri: URI, *args: Any, **kwargs: Any ) -> requests.Response: - kwargs.setdefault('timeout', 10) + kwargs.setdefault("timeout", 10) session = get_session(endpoint_uri) response = session.post(endpoint_uri, *args, **kwargs) return response @@ -130,7 +129,7 @@ async def get_async_session(endpoint_uri: URI) -> ClientSession: async def async_get_response_from_get_request( endpoint_uri: URI, *args: Any, **kwargs: Any ) -> ClientResponse: - kwargs.setdefault('timeout', ClientTimeout(10)) + kwargs.setdefault("timeout", ClientTimeout(10)) session = await get_async_session(endpoint_uri) response = await session.get(endpoint_uri, *args, **kwargs) return response @@ -139,7 +138,7 @@ async def async_get_response_from_get_request( async def async_get_response_from_post_request( endpoint_uri: URI, *args: Any, **kwargs: Any ) -> ClientResponse: - kwargs.setdefault('timeout', ClientTimeout(10)) + kwargs.setdefault("timeout", ClientTimeout(10)) session = await get_async_session(endpoint_uri) response = await session.post(endpoint_uri, *args, **kwargs) return response @@ -148,9 +147,13 @@ async def async_get_response_from_post_request( async def async_make_post_request( endpoint_uri: URI, data: Union[bytes, Dict[str, Any]], *args: Any, **kwargs: Any ) -> bytes: - response = await async_get_response_from_post_request(endpoint_uri, data=data, *args, **kwargs) + response = await async_get_response_from_post_request( + endpoint_uri, data=data, *args, **kwargs + ) return await response.read() -async def async_get_json_from_client_response(response: ClientResponse) -> Dict[str, Any]: +async def async_get_json_from_client_response( + response: ClientResponse, +) -> Dict[str, Any]: return await response.json() diff --git a/web3/_utils/rpc_abi.py b/web3/_utils/rpc_abi.py index 44dcac809e..08baa90bad 100644 --- a/web3/_utils/rpc_abi.py +++ b/web3/_utils/rpc_abi.py @@ -55,8 +55,12 @@ class RPC: eth_getBalance = RPCEndpoint("eth_getBalance") eth_getBlockByHash = RPCEndpoint("eth_getBlockByHash") eth_getBlockByNumber = RPCEndpoint("eth_getBlockByNumber") - eth_getBlockTransactionCountByHash = RPCEndpoint("eth_getBlockTransactionCountByHash") - eth_getBlockTransactionCountByNumber = RPCEndpoint("eth_getBlockTransactionCountByNumber") + eth_getBlockTransactionCountByHash = RPCEndpoint( + "eth_getBlockTransactionCountByHash" + ) + eth_getBlockTransactionCountByNumber = RPCEndpoint( + "eth_getBlockTransactionCountByNumber" + ) eth_getCode = RPCEndpoint("eth_getCode") eth_getFilterChanges = RPCEndpoint("eth_getFilterChanges") eth_getFilterLogs = RPCEndpoint("eth_getFilterLogs") @@ -64,8 +68,12 @@ class RPC: eth_getProof = RPCEndpoint("eth_getProof") eth_getRawTransactionByHash = RPCEndpoint("eth_getRawTransactionByHash") eth_getStorageAt = RPCEndpoint("eth_getStorageAt") - eth_getTransactionByBlockHashAndIndex = RPCEndpoint("eth_getTransactionByBlockHashAndIndex") - eth_getTransactionByBlockNumberAndIndex = RPCEndpoint("eth_getTransactionByBlockNumberAndIndex") + eth_getTransactionByBlockHashAndIndex = RPCEndpoint( + "eth_getTransactionByBlockHashAndIndex" + ) + eth_getTransactionByBlockNumberAndIndex = RPCEndpoint( + "eth_getTransactionByBlockNumberAndIndex" + ) eth_getRawTransactionByBlockHashAndIndex = RPCEndpoint( "eth_getRawTransactionByBlockHashAndIndex" ) @@ -159,61 +167,61 @@ class RPC: TRANSACTION_PARAMS_ABIS = { - 'data': 'bytes', - 'from': 'address', - 'gas': 'uint', - 'gasPrice': 'uint', - 'nonce': 'uint', - 'to': 'address', - 'value': 'uint', - 'chainId': 'uint', + "data": "bytes", + "from": "address", + "gas": "uint", + "gasPrice": "uint", + "nonce": "uint", + "to": "address", + "value": "uint", + "chainId": "uint", } FILTER_PARAMS_ABIS = { - 'to': 'address', - 'address': 'address[]', + "to": "address", + "address": "address[]", } TRACE_PARAMS_ABIS = { - 'to': 'address', - 'from': 'address', + "to": "address", + "from": "address", } RPC_ABIS = { # eth - 'eth_call': TRANSACTION_PARAMS_ABIS, - 'eth_estimateGas': TRANSACTION_PARAMS_ABIS, - 'eth_getBalance': ['address', None], - 'eth_getBlockByHash': ['bytes32', 'bool'], - 'eth_getBlockTransactionCountByHash': ['bytes32'], - 'eth_getCode': ['address', None], - 'eth_getLogs': FILTER_PARAMS_ABIS, - 'eth_getRawTransactionByHash': ['bytes32'], - 'eth_getStorageAt': ['address', 'uint', None], - 'eth_getProof': ['address', 'uint[]', None], - 'eth_getTransactionByBlockHashAndIndex': ['bytes32', 'uint'], - 'eth_getTransactionByHash': ['bytes32'], - 'eth_getTransactionCount': ['address', None], - 'eth_getTransactionReceipt': ['bytes32'], - 'eth_getRawTransactionByBlockHashAndIndex': ['bytes32', 'uint'], - 'eth_getUncleCountByBlockHash': ['bytes32'], - 'eth_newFilter': FILTER_PARAMS_ABIS, - 'eth_sendRawTransaction': ['bytes'], - 'eth_sendTransaction': TRANSACTION_PARAMS_ABIS, - 'eth_signTransaction': TRANSACTION_PARAMS_ABIS, - 'eth_sign': ['address', 'bytes'], - 'eth_signTypedData': ['address', None], - 'eth_submitHashrate': ['uint', 'bytes32'], - 'eth_submitWork': ['bytes8', 'bytes32', 'bytes32'], + "eth_call": TRANSACTION_PARAMS_ABIS, + "eth_estimateGas": TRANSACTION_PARAMS_ABIS, + "eth_getBalance": ["address", None], + "eth_getBlockByHash": ["bytes32", "bool"], + "eth_getBlockTransactionCountByHash": ["bytes32"], + "eth_getCode": ["address", None], + "eth_getLogs": FILTER_PARAMS_ABIS, + "eth_getRawTransactionByHash": ["bytes32"], + "eth_getStorageAt": ["address", "uint", None], + "eth_getProof": ["address", "uint[]", None], + "eth_getTransactionByBlockHashAndIndex": ["bytes32", "uint"], + "eth_getTransactionByHash": ["bytes32"], + "eth_getTransactionCount": ["address", None], + "eth_getTransactionReceipt": ["bytes32"], + "eth_getRawTransactionByBlockHashAndIndex": ["bytes32", "uint"], + "eth_getUncleCountByBlockHash": ["bytes32"], + "eth_newFilter": FILTER_PARAMS_ABIS, + "eth_sendRawTransaction": ["bytes"], + "eth_sendTransaction": TRANSACTION_PARAMS_ABIS, + "eth_signTransaction": TRANSACTION_PARAMS_ABIS, + "eth_sign": ["address", "bytes"], + "eth_signTypedData": ["address", None], + "eth_submitHashrate": ["uint", "bytes32"], + "eth_submitWork": ["bytes8", "bytes32", "bytes32"], # personal - 'personal_sendTransaction': TRANSACTION_PARAMS_ABIS, - 'personal_lockAccount': ['address'], - 'personal_unlockAccount': ['address', None, None], - 'personal_sign': [None, 'address', None], - 'personal_signTypedData': [None, 'address', None], - 'trace_call': TRACE_PARAMS_ABIS, + "personal_sendTransaction": TRANSACTION_PARAMS_ABIS, + "personal_lockAccount": ["address"], + "personal_unlockAccount": ["address", None, None], + "personal_sign": [None, "address", None], + "personal_signTypedData": [None, "address", None], + "trace_call": TRACE_PARAMS_ABIS, # parity - 'parity_listStorageKeys': ['address', None, None, None], + "parity_listStorageKeys": ["address", None, None, None], } @@ -221,7 +229,7 @@ class RPC: def apply_abi_formatters_to_dict( normalizers: Sequence[Callable[[TypeStr, Any], Tuple[TypeStr, Any]]], abi_dict: Dict[str, Any], - data: Dict[Any, Any] + data: Dict[Any, Any], ) -> Dict[Any, Any]: fields = list(abi_dict.keys() & data.keys()) formatted_values = map_abi_data( @@ -245,4 +253,6 @@ def abi_request_formatters( single_dict_formatter = apply_abi_formatters_to_dict(normalizers, abi_types) yield method, apply_formatter_at_index(single_dict_formatter, 0) else: - raise TypeError(f"ABI definitions must be a list or dictionary, got {abi_types!r}") + raise TypeError( + f"ABI definitions must be a list or dictionary, got {abi_types!r}" + ) diff --git a/web3/_utils/threads.py b/web3/_utils/threads.py index 435807dbbe..c69485604d 100644 --- a/web3/_utils/threads.py +++ b/web3/_utils/threads.py @@ -25,36 +25,45 @@ class Timeout(Exception): """ A limited subset of the `gevent.Timeout` context manager. """ + seconds = None exception = None begun_at = None is_running = None def __init__( - self, seconds: float = None, exception: Type[BaseException] = None, *args: Any, - **kwargs: Any + self, + seconds: float = None, + exception: Type[BaseException] = None, + *args: Any, + **kwargs: Any, ) -> None: self.seconds = seconds self.exception = exception - def __enter__(self) -> 'Timeout': + def __enter__(self) -> "Timeout": self.start() return self def __exit__( - self, exc_type: Type[BaseException], exc_val: BaseException, exc_tb: TracebackType + self, + exc_type: Type[BaseException], + exc_val: BaseException, + exc_tb: TracebackType, ) -> Literal[False]: return False def __str__(self) -> str: if self.seconds is None: - return '' + return "" return f"{self.seconds} seconds" @property def expire_at(self) -> int: if self.seconds is None: - raise ValueError("Timeouts with `seconds == None` do not have an expiration time") + raise ValueError( + "Timeouts with `seconds == None` do not have an expiration time" + ) elif self.begun_at is None: raise ValueError("Timeout has not been started") return self.begun_at + self.seconds @@ -91,7 +100,10 @@ def sleep(self, seconds: float) -> None: class ThreadWithReturn(threading.Thread, Generic[TReturn]): def __init__( - self, target: Callable[..., TReturn] = None, args: Any = None, kwargs: Any = None + self, + target: Callable[..., TReturn] = None, + args: Any = None, + kwargs: Any = None, ) -> None: super().__init__( target=target, diff --git a/web3/_utils/transactions.py b/web3/_utils/transactions.py index 40acdf0780..52206d2a36 100644 --- a/web3/_utils/transactions.py +++ b/web3/_utils/transactions.py @@ -35,35 +35,47 @@ _Hash32, ) -TX_PARAM_LITERALS = Literal['type', 'from', 'to', 'gas', 'maxFeePerGas', 'maxPriorityFeePerGas', - 'gasPrice', 'value', 'data', 'nonce', 'chainId', 'accessList'] +TX_PARAM_LITERALS = Literal[ + "type", + "from", + "to", + "gas", + "maxFeePerGas", + "maxPriorityFeePerGas", + "gasPrice", + "value", + "data", + "nonce", + "chainId", + "accessList", +] VALID_TRANSACTION_PARAMS: List[TX_PARAM_LITERALS] = [ - 'type', - 'from', - 'to', - 'gas', - 'accessList', - 'maxFeePerGas', - 'maxPriorityFeePerGas', - 'gasPrice', - 'value', - 'data', - 'nonce', - 'chainId', + "type", + "from", + "to", + "gas", + "accessList", + "maxFeePerGas", + "maxPriorityFeePerGas", + "gasPrice", + "value", + "data", + "nonce", + "chainId", ] TRANSACTION_DEFAULTS = { - 'value': 0, - 'data': b'', - 'gas': lambda w3, tx: w3.eth.estimate_gas(tx), - 'gasPrice': lambda w3, tx: w3.eth.generate_gas_price(tx) or w3.eth.gas_price, - 'maxFeePerGas': ( - lambda w3, tx: - w3.eth.max_priority_fee + (2 * w3.eth.get_block('latest')['baseFeePerGas']) + "value": 0, + "data": b"", + "gas": lambda w3, tx: w3.eth.estimate_gas(tx), + "gasPrice": lambda w3, tx: w3.eth.generate_gas_price(tx) or w3.eth.gas_price, + "maxFeePerGas": ( + lambda w3, tx: w3.eth.max_priority_fee + + (2 * w3.eth.get_block("latest")["baseFeePerGas"]) ), - 'maxPriorityFeePerGas': lambda w3, tx: w3.eth.max_priority_fee, - 'chainId': lambda w3, tx: w3.eth.chain_id, + "maxPriorityFeePerGas": lambda w3, tx: w3.eth.max_priority_fee, + "chainId": lambda w3, tx: w3.eth.chain_id, } if TYPE_CHECKING: @@ -72,13 +84,14 @@ @curry def fill_nonce(w3: "Web3", transaction: TxParams) -> TxParams: - if 'from' in transaction and 'nonce' not in transaction: + if "from" in transaction and "nonce" not in transaction: return assoc( transaction, - 'nonce', + "nonce", w3.eth.get_transaction_count( - cast(ChecksumAddress, transaction['from']), - block_identifier='pending')) + cast(ChecksumAddress, transaction["from"]), block_identifier="pending" + ), + ) else: return transaction @@ -89,27 +102,28 @@ def fill_transaction_defaults(w3: "Web3", transaction: TxParams) -> TxParams: if w3 is None, fill as much as possible while offline """ strategy_based_gas_price = w3.eth.generate_gas_price(transaction) - is_dynamic_fee_transaction = ( - not strategy_based_gas_price - and ( - 'gasPrice' not in transaction # default to dynamic fee transaction - or any_in_dict(DYNAMIC_FEE_TXN_PARAMS, transaction) - ) + is_dynamic_fee_transaction = not strategy_based_gas_price and ( + "gasPrice" not in transaction # default to dynamic fee transaction + or any_in_dict(DYNAMIC_FEE_TXN_PARAMS, transaction) ) defaults = {} for key, default_getter in TRANSACTION_DEFAULTS.items(): if key not in transaction: if ( - is_dynamic_fee_transaction and key == 'gasPrice' - or not is_dynamic_fee_transaction and key in DYNAMIC_FEE_TXN_PARAMS + is_dynamic_fee_transaction + and key == "gasPrice" + or not is_dynamic_fee_transaction + and key in DYNAMIC_FEE_TXN_PARAMS ): # do not set default max fees if legacy txn or gas price if dynamic fee txn continue if callable(default_getter): if w3 is None: - raise ValueError(f"You must specify a '{key}' value in the transaction") + raise ValueError( + f"You must specify a '{key}' value in the transaction" + ) default_val = default_getter(w3, transaction) else: default_val = default_getter @@ -118,11 +132,13 @@ def fill_transaction_defaults(w3: "Web3", transaction: TxParams) -> TxParams: return merge(defaults, transaction) -def get_block_gas_limit(w3: "Web3", block_identifier: Optional[BlockIdentifier] = None) -> int: +def get_block_gas_limit( + w3: "Web3", block_identifier: Optional[BlockIdentifier] = None +) -> int: if block_identifier is None: block_identifier = w3.eth.block_number block = w3.eth.get_block(block_identifier) - return block['gasLimit'] + return block["gasLimit"] def get_buffered_gas_estimate( @@ -147,87 +163,107 @@ def get_buffered_gas_estimate( def get_required_transaction(w3: "Web3", transaction_hash: _Hash32) -> TxData: current_transaction = w3.eth.get_transaction(transaction_hash) if not current_transaction: - raise ValueError(f'Supplied transaction with hash {transaction_hash!r} does not exist') + raise ValueError( + f"Supplied transaction with hash {transaction_hash!r} does not exist" + ) return current_transaction def extract_valid_transaction_params(transaction_params: TxData) -> TxParams: - extracted_params = cast(TxParams, { - key: transaction_params[key] - for key in VALID_TRANSACTION_PARAMS - if key in transaction_params - }) + extracted_params = cast( + TxParams, + { + key: transaction_params[key] + for key in VALID_TRANSACTION_PARAMS + if key in transaction_params + }, + ) # There is always a gasPrice now on eth_getTransaction call for pending transactions, including # dynamic fee transactions. For dynamic fee transactions, we need to pull the gasPrice value # back out of the extracted params if it is equal to the expected value (maxFeePerGas). If # we don't, the modified transaction will include a gasPrice as well as dynamic fee values in # the eth_sendTransaction call and cause a conflict. if all_in_dict(DYNAMIC_FEE_TXN_PARAMS, extracted_params): - if extracted_params['gasPrice'] == extracted_params['maxFeePerGas']: - extracted_params.pop('gasPrice') + if extracted_params["gasPrice"] == extracted_params["maxFeePerGas"]: + extracted_params.pop("gasPrice") - if extracted_params.get('data') is not None: - if transaction_params.get('input') is not None: - if extracted_params['data'] != transaction_params['input']: + if extracted_params.get("data") is not None: + if transaction_params.get("input") is not None: + if extracted_params["data"] != transaction_params["input"]: msg = 'failure to handle this transaction due to both "input: {}" and' msg += ' "data: {}" are populated. You need to resolve this conflict.' - err_vals = (transaction_params['input'], extracted_params['data']) + err_vals = (transaction_params["input"], extracted_params["data"]) raise AttributeError(msg.format(*err_vals)) else: return extracted_params else: return extracted_params - elif extracted_params.get('data') is None: - if transaction_params.get('input') is not None: - return assoc(extracted_params, 'data', transaction_params['input']) + elif extracted_params.get("data") is None: + if transaction_params.get("input") is not None: + return assoc(extracted_params, "data", transaction_params["input"]) else: return extracted_params else: - raise Exception("Unreachable path: transaction's 'data' is either set or not set") + raise Exception( + "Unreachable path: transaction's 'data' is either set or not set" + ) def assert_valid_transaction_params(transaction_params: TxParams) -> None: for param in transaction_params: if param not in VALID_TRANSACTION_PARAMS: - raise ValueError(f'{param} is not a valid transaction parameter') + raise ValueError(f"{param} is not a valid transaction parameter") def prepare_replacement_transaction( w3: "Web3", original_transaction: TxData, replacement_transaction: TxParams, - gas_multiplier: float = 1.125 + gas_multiplier: float = 1.125, ) -> TxParams: - if original_transaction['blockHash'] is not None: - raise ValueError(f'Supplied transaction with hash {original_transaction["hash"]!r} ' - 'has already been mined') - if 'nonce' in replacement_transaction and ( - replacement_transaction['nonce'] != original_transaction['nonce'] + if original_transaction["blockHash"] is not None: + raise ValueError( + f'Supplied transaction with hash {original_transaction["hash"]!r} ' + "has already been mined" + ) + if "nonce" in replacement_transaction and ( + replacement_transaction["nonce"] != original_transaction["nonce"] ): - raise ValueError('Supplied nonce in new_transaction must match the pending transaction') + raise ValueError( + "Supplied nonce in new_transaction must match the pending transaction" + ) - if 'nonce' not in replacement_transaction: + if "nonce" not in replacement_transaction: replacement_transaction = assoc( - replacement_transaction, 'nonce', original_transaction['nonce'] + replacement_transaction, "nonce", original_transaction["nonce"] ) if any_in_dict(DYNAMIC_FEE_TXN_PARAMS, replacement_transaction): # for now, the client decides if a dynamic fee txn can replace the existing txn or not pass - elif 'gasPrice' in replacement_transaction and original_transaction['gasPrice'] is not None: - if replacement_transaction['gasPrice'] <= original_transaction['gasPrice']: - raise ValueError('Supplied gas price must exceed existing transaction gas price') + elif ( + "gasPrice" in replacement_transaction + and original_transaction["gasPrice"] is not None + ): + if replacement_transaction["gasPrice"] <= original_transaction["gasPrice"]: + raise ValueError( + "Supplied gas price must exceed existing transaction gas price" + ) else: generated_gas_price = w3.eth.generate_gas_price(replacement_transaction) - minimum_gas_price = int(math.ceil(original_transaction['gasPrice'] * gas_multiplier)) + minimum_gas_price = int( + math.ceil(original_transaction["gasPrice"] * gas_multiplier) + ) if generated_gas_price and generated_gas_price > minimum_gas_price: replacement_transaction = assoc( - replacement_transaction, 'gasPrice', generated_gas_price + replacement_transaction, "gasPrice", generated_gas_price ) else: - replacement_transaction = assoc(replacement_transaction, 'gasPrice', minimum_gas_price) + replacement_transaction = assoc( + replacement_transaction, "gasPrice", minimum_gas_price + ) return replacement_transaction diff --git a/web3/_utils/utility_methods.py b/web3/_utils/utility_methods.py index 2e2718a835..468eea39b1 100644 --- a/web3/_utils/utility_methods.py +++ b/web3/_utils/utility_methods.py @@ -12,8 +12,7 @@ def all_in_dict( - values: Iterable[Any], - d: Union[Dict[Any, Any], TxData, TxParams] + values: Iterable[Any], d: Union[Dict[Any, Any], TxData, TxParams] ) -> bool: """ Returns a bool based on whether ALL of the provided values exist among the keys of the provided @@ -27,8 +26,7 @@ def all_in_dict( def any_in_dict( - values: Iterable[Any], - d: Union[Dict[Any, Any], TxData, TxParams] + values: Iterable[Any], d: Union[Dict[Any, Any], TxData, TxParams] ) -> bool: """ Returns a bool based on whether ANY of the provided values exist among the keys of the provided @@ -42,8 +40,7 @@ def any_in_dict( def none_in_dict( - values: Iterable[Any], - d: Union[Dict[Any, Any], TxData, TxParams] + values: Iterable[Any], d: Union[Dict[Any, Any], TxData, TxParams] ) -> bool: """ Returns a bool based on whether NONE of the provided values exist among the keys of the diff --git a/web3/_utils/validation.py b/web3/_utils/validation.py index d6080e340c..13f94e9985 100644 --- a/web3/_utils/validation.py +++ b/web3/_utils/validation.py @@ -63,9 +63,11 @@ def _prepare_selector_collision_msg(duplicates: Dict[HexStr, ABIFunction]) -> str: dup_sel = valmap(apply_formatter_to_array(abi_to_signature), duplicates) - joined_funcs = valmap(lambda funcs: ', '.join(funcs), dup_sel) - func_sel_msg_list = [funcs + ' have selector ' + sel for sel, funcs in joined_funcs.items()] - return ' and\n'.join(func_sel_msg_list) + joined_funcs = valmap(lambda funcs: ", ".join(funcs), dup_sel) + func_sel_msg_list = [ + funcs + " have selector " + sel for sel, funcs in joined_funcs.items() + ] + return " and\n".join(func_sel_msg_list) def validate_abi(abi: ABI) -> None: @@ -78,16 +80,13 @@ def validate_abi(abi: ABI) -> None: if not all(is_dict(e) for e in abi): raise ValueError("'abi' is not a list of dictionaries") - functions = filter_by_type('function', abi) - selectors = groupby( - compose(encode_hex, function_abi_to_4byte_selector), - functions - ) + functions = filter_by_type("function", abi) + selectors = groupby(compose(encode_hex, function_abi_to_4byte_selector), functions) duplicates = valfilter(lambda funcs: len(funcs) > 1, selectors) if duplicates: raise ValueError( - 'Abi contains functions with colliding selectors. ' - f'Functions {_prepare_selector_collision_msg(duplicates)}' + "Abi contains functions with colliding selectors. " + f"Functions {_prepare_selector_collision_msg(duplicates)}" ) @@ -147,14 +146,16 @@ def validate_abi_value(abi_type: TypeStr, value: Any) -> None: elif is_string_type(abi_type) and is_string(value): return - raise TypeError( - f"The following abi value is not a '{abi_type}': {value}" - ) + raise TypeError(f"The following abi value is not a '{abi_type}': {value}") def is_not_address_string(value: Any) -> bool: - return (is_string(value) and not is_bytes(value) and not - is_checksum_address(value) and not is_hex_address(value)) + return ( + is_string(value) + and not is_bytes(value) + and not is_checksum_address(value) + and not is_hex_address(value) + ) def validate_address(value: Any) -> None: @@ -167,13 +168,17 @@ def validate_address(value: Any) -> None: return if is_bytes(value): if not is_binary_address(value): - raise InvalidAddress("Address must be 20 bytes when input type is bytes", value) + raise InvalidAddress( + "Address must be 20 bytes when input type is bytes", value + ) return if not isinstance(value, str): - raise TypeError(f'Address {value} must be provided as a string') + raise TypeError(f"Address {value} must be provided as a string") if not is_hex_address(value): - raise InvalidAddress("Address must be 20 bytes, as a hex string with a 0x prefix", value) + raise InvalidAddress( + "Address must be 20 bytes, as a hex string with a 0x prefix", value + ) if not is_checksum_address(value): if value == value.lower(): raise InvalidAddress( diff --git a/web3/_utils/windows.py b/web3/_utils/windows.py index d46a71bdb2..f7ad9b0ac4 100644 --- a/web3/_utils/windows.py +++ b/web3/_utils/windows.py @@ -6,7 +6,7 @@ import pywintypes # noqa: E402 import win32file # noqa: E402 -if sys.platform != 'win32': +if sys.platform != "win32": raise ImportError("This module should not be imported on non `win32` platforms") @@ -14,8 +14,14 @@ class NamedPipe: def __init__(self, ipc_path: str) -> None: try: self.handle = win32file.CreateFile( - ipc_path, win32file.GENERIC_READ | win32file.GENERIC_WRITE, - 0, None, win32file.OPEN_EXISTING, 0, None) + ipc_path, + win32file.GENERIC_READ | win32file.GENERIC_WRITE, + 0, + None, + win32file.OPEN_EXISTING, + 0, + None, + ) except pywintypes.error as err: raise OSError(err)