This commit is contained in:
lz_db
2025-11-16 12:31:03 +08:00
commit 0fab423a18
1451 changed files with 743213 additions and 0 deletions

View File

@@ -0,0 +1,115 @@
from importlib.metadata import (
version as __version,
)
# from .abi import (
# event_abi_to_log_topic,
# event_signature_to_log_topic,
# function_abi_to_4byte_selector,
# function_signature_to_4byte_selector,
# )
from .address import (
is_address,
is_binary_address,
is_canonical_address,
is_checksum_address,
is_checksum_formatted_address,
is_hex_address,
is_normalized_address,
is_same_address,
to_canonical_address,
to_checksum_address,
to_normalized_address,
)
from .applicators import (
apply_formatter_at_index,
apply_formatter_if,
apply_formatter_to_array,
apply_formatters_to_dict,
apply_formatters_to_sequence,
apply_key_map,
apply_one_of_formatters,
combine_argument_formatters,
)
from .conversions import (
hexstr_if_str,
text_if_str,
to_bytes,
to_hex,
to_int,
to_text,
)
from .currency import (
denoms,
from_wei,
to_wei,
)
from .decorators import (
combomethod,
replace_exceptions,
)
from .encoding import (
big_endian_to_int,
int_to_big_endian,
)
from .exceptions import (
ValidationError,
)
from .functional import (
apply_to_return_value,
flatten_return,
reversed_return,
sort_return,
to_dict,
to_list,
to_ordered_dict,
to_set,
to_tuple,
)
from .hexadecimal import (
add_0x_prefix,
decode_hex,
encode_hex,
is_0x_prefixed,
is_hex,
is_hexstr,
remove_0x_prefix,
)
from .humanize import (
humanize_bytes,
humanize_hash,
humanize_integer_sequence,
humanize_ipfs_uri,
humanize_seconds,
humanize_wei,
)
from .logging import (
DEBUG2_LEVEL_NUM,
ExtendedDebugLogger,
HasExtendedDebugLogger,
HasExtendedDebugLoggerMeta,
HasLogger,
HasLoggerMeta,
get_extended_debug_logger,
get_logger,
setup_DEBUG2_logging,
)
from .module_loading import (
import_string,
)
from .numeric import (
clamp,
)
from .types import (
is_boolean,
is_bytes,
is_dict,
is_integer,
is_list,
is_list_like,
is_null,
is_number,
is_string,
is_text,
is_tuple,
)

View File

@@ -0,0 +1,72 @@
from typing import (
Any,
Dict,
)
from .conversions import (
to_bytes
)
from ...keccak import (
SHA3 as keccak,
)
def collapse_if_tuple(abi: Dict[str, Any]) -> str:
"""
Converts a tuple from a dict to a parenthesized list of its types.
>>> from eth_utils.abi import collapse_if_tuple
>>> collapse_if_tuple(
... {
... 'components': [
... {'name': 'anAddress', 'type': 'address'},
... {'name': 'anInt', 'type': 'uint256'},
... {'name': 'someBytes', 'type': 'bytes'},
... ],
... 'type': 'tuple',
... }
... )
'(address,uint256,bytes)'
"""
typ = abi["type"]
if not isinstance(typ, str):
raise TypeError(
f"The 'type' must be a string, but got {repr(typ)} of type {type(typ)}"
)
elif not typ.startswith("tuple"):
return typ
delimited = ",".join(collapse_if_tuple(c) for c in abi["components"])
# Whatever comes after "tuple" is the array dims. The ABI spec states that
# this will have the form "", "[]", or "[k]".
array_dim = typ[5:]
collapsed = f"({delimited}){array_dim}"
return collapsed
def _abi_to_signature(abi: Dict[str, Any]) -> str:
fn_input_types = ",".join(
[collapse_if_tuple(abi_input) for abi_input in abi.get("inputs", [])]
)
function_signature = f"{abi['name']}({fn_input_types})"
return function_signature
def function_signature_to_4byte_selector(event_signature: str) -> bytes:
return keccak(to_bytes(text=event_signature.replace(" ", "")))[:4]
def function_abi_to_4byte_selector(function_abi: Dict[str, Any]) -> bytes:
function_signature = _abi_to_signature(function_abi)
return function_signature_to_4byte_selector(function_signature)
def event_signature_to_log_topic(event_signature: str) -> bytes:
return keccak(to_bytes(text=event_signature.replace(" ", "")))
def event_abi_to_log_topic(event_abi: Dict[str, Any]) -> bytes:
event_signature = _abi_to_signature(event_abi)
return event_signature_to_log_topic(event_signature)

View File

@@ -0,0 +1,171 @@
import re
from typing import (
Any,
Union,
cast,
)
from ..typing import (
Address,
AnyAddress,
ChecksumAddress,
HexAddress,
HexStr,
)
from .conversions import (
hexstr_if_str,
to_hex,
to_bytes,
)
from ...keccak import (
SHA3 as keccak,
)
from .hexadecimal import (
add_0x_prefix,
decode_hex,
encode_hex,
remove_0x_prefix,
)
from .types import (
is_bytes,
is_text,
)
_HEX_ADDRESS_REGEXP = re.compile("(0x)?[0-9a-f]{40}", re.IGNORECASE | re.ASCII)
def is_hex_address(value: Any) -> bool:
"""
Checks if the given string of text type is an address in hexadecimal encoded form.
"""
if not is_text(value):
return False
return _HEX_ADDRESS_REGEXP.fullmatch(value) is not None
def is_binary_address(value: Any) -> bool:
"""
Checks if the given string is an address in raw bytes form.
"""
if not is_bytes(value):
return False
elif len(value) != 20:
return False
else:
return True
def is_address(value: Any) -> bool:
"""
Is the given string an address in any of the known formats?
"""
if is_hex_address(value):
if _is_checksum_formatted(value):
return is_checksum_address(value)
return True
if is_binary_address(value):
return True
return False
def to_normalized_address(value: Union[AnyAddress, str, bytes]) -> HexAddress:
"""
Converts an address to its normalized hexadecimal representation.
"""
try:
hex_address = hexstr_if_str(to_hex, value).lower()
except AttributeError:
raise TypeError(f"Value must be any string, instead got type {type(value)}")
if is_address(hex_address):
return HexAddress(HexStr(hex_address))
else:
raise ValueError(
f"Unknown format {repr(value)}, attempted to normalize to "
f"{repr(hex_address)}"
)
def is_normalized_address(value: Any) -> bool:
"""
Returns whether the provided value is an address in its normalized form.
"""
if not is_address(value):
return False
else:
is_equal = value == to_normalized_address(value)
return cast(bool, is_equal)
def to_canonical_address(address: Union[AnyAddress, str, bytes]) -> Address:
"""
Convert a valid address to its canonical form (20-length bytes).
"""
return Address(decode_hex(to_normalized_address(address)))
def is_canonical_address(address: Any) -> bool:
"""
Returns `True` if the `value` is an address in its canonical form.
"""
if not is_bytes(address) or len(address) != 20:
return False
is_equal = address == to_canonical_address(address)
return cast(bool, is_equal)
def is_same_address(left: AnyAddress, right: AnyAddress) -> bool:
"""
Checks if both addresses are same or not.
"""
if not is_address(left) or not is_address(right):
raise ValueError("Both values must be valid addresses")
else:
return bool(to_normalized_address(left) == to_normalized_address(right))
def to_checksum_address(value: Union[AnyAddress, str, bytes]) -> ChecksumAddress:
"""
Makes a checksum address given a supported format.
"""
norm_address = to_normalized_address(value)
address_hash = encode_hex(keccak(to_bytes(text=remove_0x_prefix(HexStr(norm_address)))))
checksum_address = add_0x_prefix(
HexStr(
"".join(
(
norm_address[i].upper()
if int(address_hash[i], 16) > 7
else norm_address[i]
)
for i in range(2, 42)
)
)
)
return ChecksumAddress(HexAddress(checksum_address))
def is_checksum_address(value: Any) -> bool:
if not is_text(value):
return False
if not is_hex_address(value):
return False
is_equal = value == to_checksum_address(value)
return cast(bool, is_equal)
def _is_checksum_formatted(value: Any) -> bool:
unprefixed_value = remove_0x_prefix(value)
return (
not unprefixed_value.islower()
and not unprefixed_value.isupper()
and not unprefixed_value.isnumeric()
)
def is_checksum_formatted_address(value: Any) -> bool:
return is_hex_address(value) and _is_checksum_formatted(value)

View File

@@ -0,0 +1,151 @@
from typing import (
Any,
Callable,
Dict,
Generator,
List,
Tuple,
)
import warnings
from .decorators import (
return_arg_type,
)
from .functional import (
to_dict,
)
from .toolz import (
compose,
curry,
)
Formatters = Callable[[List[Any]], List[Any]]
@return_arg_type(2)
def apply_formatter_at_index(
formatter: Callable[..., Any], at_index: int, value: List[Any]
) -> Generator[List[Any], None, None]:
if at_index + 1 > len(value):
raise IndexError(
f"Not enough values in iterable to apply formatter. Got: {len(value)}. "
f"Need: {at_index + 1}"
)
for index, item in enumerate(value):
if index == at_index:
yield formatter(item)
else:
yield item
def combine_argument_formatters(*formatters: List[Callable[..., Any]]) -> Formatters:
warnings.warn(
DeprecationWarning(
"combine_argument_formatters(formatter1, formatter2)([item1, item2])"
"has been deprecated and will be removed in a subsequent major version "
"release of the eth-utils library. Update your calls to use "
"apply_formatters_to_sequence([formatter1, formatter2], [item1, item2]) "
"instead."
),
stacklevel=2,
)
_formatter_at_index = curry(apply_formatter_at_index)
return compose( # type: ignore
*(
_formatter_at_index(formatter, index)
for index, formatter in enumerate(formatters)
)
)
@return_arg_type(1)
def apply_formatters_to_sequence(
formatters: List[Any], sequence: List[Any]
) -> Generator[List[Any], None, None]:
if len(formatters) > len(sequence):
raise IndexError(
f"Too many formatters for sequence: {len(formatters)} formatters for "
f"{repr(sequence)}"
)
elif len(formatters) < len(sequence):
raise IndexError(
f"Too few formatters for sequence: {len(formatters)} formatters for "
f"{repr(sequence)}"
)
else:
for formatter, item in zip(formatters, sequence):
yield formatter(item)
def apply_formatter_if(
condition: Callable[..., bool], formatter: Callable[..., Any], value: Any
) -> Any:
if condition(value):
return formatter(value)
else:
return value
@to_dict
def apply_formatters_to_dict(
formatters: Dict[Any, Any], value: Dict[Any, Any]
) -> Generator[Tuple[Any, Any], None, None]:
for key, item in value.items():
if key in formatters:
try:
yield key, formatters[key](item)
except ValueError as exc:
new_error_message = (
f"Could not format invalid value {repr(item)} as field {repr(key)}"
)
raise ValueError(new_error_message) from exc
except TypeError as exc:
new_error_message = (
f"Could not format invalid type {repr(item)} as field {repr(key)}"
)
raise TypeError(new_error_message) from exc
else:
yield key, item
@return_arg_type(1)
def apply_formatter_to_array(
formatter: Callable[..., Any], value: List[Any]
) -> Generator[List[Any], None, None]:
for item in value:
yield formatter(item)
def apply_one_of_formatters(
formatter_condition_pairs: Tuple[Tuple[Callable[..., Any], Callable[..., Any]]],
value: Any,
) -> Any:
for condition, formatter in formatter_condition_pairs:
if condition(value):
return formatter(value)
else:
raise ValueError(
"The provided value did not satisfy any of the formatter conditions"
)
@to_dict
def apply_key_map(
key_mappings: Dict[Any, Any], value: Dict[Any, Any]
) -> Generator[Tuple[Any, Any], None, None]:
key_conflicts = (
set(value.keys())
.difference(key_mappings.keys())
.intersection(v for k, v in key_mappings.items() if v in value)
)
if key_conflicts:
raise KeyError(
f"Could not apply key map due to conflicting key(s): {key_conflicts}"
)
for key, item in value.items():
if key in key_mappings:
yield key_mappings[key], item
else:
yield key, item

View File

@@ -0,0 +1,190 @@
from typing import (
Callable,
Optional,
TypeVar,
Union,
cast,
)
from ..typing import (
HexStr,
Primitives,
)
from .decorators import (
validate_conversion_arguments,
)
from .encoding import (
big_endian_to_int,
int_to_big_endian,
)
from .hexadecimal import (
add_0x_prefix,
decode_hex,
encode_hex,
is_hexstr,
remove_0x_prefix,
)
from .types import (
is_boolean,
is_integer,
is_string,
)
T = TypeVar("T")
@validate_conversion_arguments
def to_hex(
primitive: Optional[Primitives] = None,
hexstr: Optional[HexStr] = None,
text: Optional[str] = None,
) -> HexStr:
"""
Auto converts any supported value into its hex representation.
Trims leading zeros, as defined in:
https://github.com/ethereum/wiki/wiki/JSON-RPC#hex-value-encoding
"""
if hexstr is not None:
return add_0x_prefix(HexStr(hexstr.lower()))
if text is not None:
return encode_hex(text.encode("utf-8"))
if is_boolean(primitive):
return HexStr("0x1") if primitive else HexStr("0x0")
if isinstance(primitive, (bytes, bytearray)):
return encode_hex(primitive)
elif is_string(primitive):
raise TypeError(
"Unsupported type: The primitive argument must be one of: bytes,"
"bytearray, int or bool and not str"
)
if is_integer(primitive):
return HexStr(hex(cast(int, primitive)))
raise TypeError(
f"Unsupported type: '{repr(type(primitive))}'. Must be one of: bool, str, "
"bytes, bytearray or int."
)
@validate_conversion_arguments
def to_int(
primitive: Optional[Primitives] = None,
hexstr: Optional[HexStr] = None,
text: Optional[str] = None,
) -> int:
"""
Converts value to its integer representation.
Values are converted this way:
* primitive:
* bytes, bytearrays: big-endian integer
* bool: True => 1, False => 0
* hexstr: interpret hex as integer
* text: interpret as string of digits, like '12' => 12
"""
if hexstr is not None:
return int(hexstr, 16)
elif text is not None:
return int(text)
elif isinstance(primitive, (bytes, bytearray)):
return big_endian_to_int(primitive)
elif isinstance(primitive, str):
raise TypeError("Pass in strings with keyword hexstr or text")
elif isinstance(primitive, (int, bool)):
return int(primitive)
else:
raise TypeError(
"Invalid type. Expected one of int/bool/str/bytes/bytearray. Got "
f"{type(primitive)}"
)
@validate_conversion_arguments
def to_bytes(
primitive: Optional[Primitives] = None,
hexstr: Optional[HexStr] = None,
text: Optional[str] = None,
) -> bytes:
if is_boolean(primitive):
return b"\x01" if primitive else b"\x00"
elif isinstance(primitive, bytearray):
return bytes(primitive)
elif isinstance(primitive, bytes):
return primitive
elif is_integer(primitive):
return to_bytes(hexstr=to_hex(primitive))
elif hexstr is not None:
if len(hexstr) % 2:
hexstr = cast(HexStr, "0x0" + remove_0x_prefix(hexstr))
return decode_hex(hexstr)
elif text is not None:
return text.encode("utf-8")
raise TypeError(
"expected a bool, int, byte or bytearray in first arg, "
"or keyword of hexstr or text"
)
@validate_conversion_arguments
def to_text(
primitive: Optional[Primitives] = None,
hexstr: Optional[HexStr] = None,
text: Optional[str] = None,
) -> str:
if hexstr is not None:
return to_bytes(hexstr=hexstr).decode("utf-8")
elif text is not None:
return text
elif isinstance(primitive, str):
return to_text(hexstr=primitive)
elif isinstance(primitive, (bytes, bytearray)):
return primitive.decode("utf-8")
elif is_integer(primitive):
byte_encoding = int_to_big_endian(cast(int, primitive))
return to_text(byte_encoding)
raise TypeError("Expected an int, bytes, bytearray or hexstr.")
def text_if_str(
to_type: Callable[..., T], text_or_primitive: Union[bytes, int, str]
) -> T:
"""
Convert to a type, assuming that strings can be only unicode text (not a hexstr).
:param to_type function: takes the arguments (primitive, hexstr=hexstr, text=text),
eg~ to_bytes, to_text, to_hex, to_int, etc
:param text_or_primitive bytes, str, int: value to convert
"""
if isinstance(text_or_primitive, str):
return to_type(text=text_or_primitive)
else:
return to_type(text_or_primitive)
def hexstr_if_str(
to_type: Callable[..., T], hexstr_or_primitive: Union[bytes, int, str]
) -> T:
"""
Convert to a type, assuming that strings can be only hexstr (not unicode text).
:param to_type function: takes the arguments (primitive, hexstr=hexstr, text=text),
eg~ to_bytes, to_text, to_hex, to_int, etc
:param hexstr_or_primitive bytes, str, int: value to convert
"""
if isinstance(hexstr_or_primitive, str):
if remove_0x_prefix(HexStr(hexstr_or_primitive)) and not is_hexstr(
hexstr_or_primitive
):
raise ValueError(
"when sending a str, it must be a hex string. "
f"Got: {repr(hexstr_or_primitive)}"
)
return to_type(hexstr=hexstr_or_primitive)
else:
return to_type(hexstr_or_primitive)

View File

@@ -0,0 +1,107 @@
import decimal
from decimal import (
localcontext,
)
from typing import (
Union,
)
from .types import (
is_integer,
is_string,
)
from .units import (
units,
)
class denoms:
wei = int(units["wei"])
kwei = int(units["kwei"])
babbage = int(units["babbage"])
femtoether = int(units["femtoether"])
mwei = int(units["mwei"])
lovelace = int(units["lovelace"])
picoether = int(units["picoether"])
gwei = int(units["gwei"])
shannon = int(units["shannon"])
nanoether = int(units["nanoether"])
nano = int(units["nano"])
szabo = int(units["szabo"])
microether = int(units["microether"])
micro = int(units["micro"])
finney = int(units["finney"])
milliether = int(units["milliether"])
milli = int(units["milli"])
ether = int(units["ether"])
kether = int(units["kether"])
grand = int(units["grand"])
mether = int(units["mether"])
gether = int(units["gether"])
tether = int(units["tether"])
MIN_WEI = 0
MAX_WEI = 2**256 - 1
def from_wei(number: int, unit: str) -> Union[int, decimal.Decimal]:
"""
Takes a number of wei and converts it to any other ether unit.
"""
if unit.lower() not in units:
raise ValueError(f"Unknown unit. Must be one of {'/'.join(units.keys())}")
if number == 0:
return 0
if number < MIN_WEI or number > MAX_WEI:
raise ValueError("value must be between 1 and 2**256 - 1")
unit_value = units[unit.lower()]
with localcontext() as ctx:
ctx.prec = 999
d_number = decimal.Decimal(value=number, context=ctx)
result_value = d_number / unit_value
return result_value
def to_wei(number: Union[int, float, str, decimal.Decimal], unit: str) -> int:
"""
Takes a number of a unit and converts it to wei.
"""
if unit.lower() not in units:
raise ValueError(f"Unknown unit. Must be one of {'/'.join(units.keys())}")
if is_integer(number) or is_string(number):
d_number = decimal.Decimal(value=number)
elif isinstance(number, float):
d_number = decimal.Decimal(value=str(number))
elif isinstance(number, decimal.Decimal):
d_number = number
else:
raise TypeError("Unsupported type. Must be one of integer, float, or string")
s_number = str(number)
unit_value = units[unit.lower()]
if d_number == decimal.Decimal(0):
return 0
if d_number < 1 and "." in s_number:
with localcontext() as ctx:
multiplier = len(s_number) - s_number.index(".") - 1
ctx.prec = multiplier
d_number = decimal.Decimal(value=number, context=ctx) * 10**multiplier
unit_value /= 10**multiplier
with localcontext() as ctx:
ctx.prec = 999
result_value = decimal.Decimal(value=d_number, context=ctx) * unit_value
if result_value < MIN_WEI or result_value > MAX_WEI:
raise ValueError("Resulting wei value must be between 1 and 2**256 - 1")
return int(result_value)

View File

@@ -0,0 +1,269 @@
from typing import (
Any,
Callable,
Dict,
Generator,
Optional,
Sequence,
Tuple,
TypeVar,
Union,
overload,
)
from .. import (
ExtendedDebugLogger,
HasExtendedDebugLogger,
HasExtendedDebugLoggerMeta,
HasLogger,
HasLoggerMeta,
ValidationError,
add_0x_prefix,
apply_formatter_at_index,
apply_formatter_if as non_curried_apply_formatter_if,
apply_formatter_to_array,
apply_formatters_to_dict as non_curried_apply_formatters_to_dict,
apply_formatters_to_sequence,
apply_key_map,
apply_one_of_formatters as non_curried_apply_one_of_formatters,
apply_to_return_value,
big_endian_to_int,
clamp,
combine_argument_formatters,
combomethod,
decode_hex,
denoms,
encode_hex,
# event_abi_to_log_topic,
# event_signature_to_log_topic,
flatten_return,
from_wei,
# function_abi_to_4byte_selector,
# function_signature_to_4byte_selector,
get_extended_debug_logger,
get_logger,
hexstr_if_str as non_curried_hexstr_if_str,
humanize_bytes,
humanize_hash,
humanize_integer_sequence,
humanize_ipfs_uri,
humanize_seconds,
humanize_wei,
import_string,
int_to_big_endian,
is_0x_prefixed,
is_address,
is_binary_address,
is_boolean,
is_bytes,
is_canonical_address,
is_checksum_address,
is_checksum_formatted_address,
is_dict,
is_hex,
is_hex_address,
is_hexstr,
is_integer,
is_list,
is_list_like,
is_normalized_address,
is_null,
is_number,
is_same_address,
is_string,
is_text,
is_tuple,
# keccak,
remove_0x_prefix,
replace_exceptions,
reversed_return,
setup_DEBUG2_logging,
sort_return,
text_if_str as non_curried_text_if_str,
to_bytes,
to_canonical_address,
to_checksum_address,
to_dict,
to_hex,
to_int,
to_list,
to_normalized_address,
to_ordered_dict,
to_set,
to_text,
to_tuple,
to_wei,
)
from ..toolz import (
curry,
)
TReturn = TypeVar("TReturn")
TValue = TypeVar("TValue")
@overload
def apply_formatter_if(
condition: Callable[..., bool]
) -> Callable[[Callable[..., TReturn]], Callable[[TValue], Union[TReturn, TValue]]]:
pass
@overload
def apply_formatter_if(
condition: Callable[..., bool], formatter: Callable[..., TReturn]
) -> Callable[[TValue], Union[TReturn, TValue]]:
pass
@overload
def apply_formatter_if(
condition: Callable[..., bool], formatter: Callable[..., TReturn], value: TValue
) -> Union[TReturn, TValue]:
pass
# This is just a stub to appease mypy, it gets overwritten later
def apply_formatter_if( # type: ignore
condition: Callable[..., bool],
formatter: Optional[Callable[..., TReturn]] = None,
value: Optional[TValue] = None,
) -> Union[
Callable[[Callable[..., TReturn]], Callable[[TValue], Union[TReturn, TValue]]],
Callable[[TValue], Union[TReturn, TValue]],
TReturn,
TValue,
]:
pass
@overload
def apply_one_of_formatters(
formatter_condition_pairs: Sequence[
Tuple[Callable[..., bool], Callable[..., TReturn]]
]
) -> Callable[[TValue], TReturn]:
...
@overload
def apply_one_of_formatters(
formatter_condition_pairs: Sequence[
Tuple[Callable[..., bool], Callable[..., TReturn]]
],
value: TValue,
) -> TReturn:
...
# This is just a stub to appease mypy, it gets overwritten later
def apply_one_of_formatters( # type: ignore
formatter_condition_pairs: Sequence[
Tuple[Callable[..., bool], Callable[..., TReturn]]
],
value: Optional[TValue] = None,
) -> TReturn:
...
@overload
def hexstr_if_str(
to_type: Callable[..., TReturn]
) -> Callable[[Union[bytes, int, str]], TReturn]:
...
@overload
def hexstr_if_str(
to_type: Callable[..., TReturn], to_format: Union[bytes, int, str]
) -> TReturn:
...
# This is just a stub to appease mypy, it gets overwritten later
def hexstr_if_str( # type: ignore
to_type: Callable[..., TReturn], to_format: Optional[Union[bytes, int, str]] = None
) -> TReturn:
...
@overload
def text_if_str(
to_type: Callable[..., TReturn]
) -> Callable[[Union[bytes, int, str]], TReturn]:
...
@overload
def text_if_str(
to_type: Callable[..., TReturn], text_or_primitive: Union[bytes, int, str]
) -> TReturn:
...
# This is just a stub to appease mypy, it gets overwritten later
def text_if_str( # type: ignore
to_type: Callable[..., TReturn],
text_or_primitive: Optional[Union[bytes, int, str]] = None,
) -> TReturn:
...
@overload
def apply_formatters_to_dict(
formatters: Dict[Any, Any]
) -> Callable[[Dict[Any, Any]], TReturn]:
...
@overload
def apply_formatters_to_dict(
formatters: Dict[Any, Any], value: Dict[Any, Any]
) -> Dict[Any, Any]:
...
# This is just a stub to appease mypy, it gets overwritten later
def apply_formatters_to_dict( # type: ignore
formatters: Dict[Any, Any], value: Optional[Dict[Any, Any]] = None
) -> Dict[Any, Any]:
...
apply_formatter_at_index = curry(apply_formatter_at_index)
apply_formatter_if = curry(non_curried_apply_formatter_if) # noqa: F811
apply_formatter_to_array = curry(apply_formatter_to_array)
apply_formatters_to_dict = curry(non_curried_apply_formatters_to_dict) # noqa: F811
apply_formatters_to_sequence = curry(apply_formatters_to_sequence)
apply_key_map = curry(apply_key_map)
apply_one_of_formatters = curry(non_curried_apply_one_of_formatters) # noqa: F811
from_wei = curry(from_wei)
get_logger = curry(get_logger)
hexstr_if_str = curry(non_curried_hexstr_if_str) # noqa: F811
is_same_address = curry(is_same_address)
text_if_str = curry(non_curried_text_if_str) # noqa: F811
to_wei = curry(to_wei)
clamp = curry(clamp)
# Delete any methods and classes that are not intended to be importable from
# `eth_utils.curried`. We do this approach instead of __all__ because this approach
# actually prevents importing the wrong thing, while __all__ only affects
# `from eth_utils.curried import *`
del Any
del Callable
del Dict
del Generator
del Optional
del Sequence
del TReturn
del TValue
del Tuple
del TypeVar
del Union
del curry
del non_curried_apply_formatter_if
del non_curried_apply_one_of_formatters
del non_curried_apply_formatters_to_dict
del non_curried_hexstr_if_str
del non_curried_text_if_str
del overload

View File

@@ -0,0 +1,20 @@
import platform
import subprocess
import sys
def pip_freeze() -> str:
result = subprocess.run("python -m pip freeze".split(), stdout=subprocess.PIPE)
return f"python -m pip freeze result:\n{result.stdout.decode()}"
def python_version() -> str:
return f"Python version:\n{sys.version}"
def platform_info() -> str:
return f"Operating System: {platform.platform()}"
def get_environment_summary() -> str:
return "\n\n".join([python_version(), platform_info(), pip_freeze()])

View File

@@ -0,0 +1,132 @@
import functools
import itertools
from typing import (
Any,
Callable,
Dict,
Optional,
Type,
TypeVar,
)
from .types import (
is_text,
)
T = TypeVar("T")
class combomethod:
def __init__(self, method: Callable[..., Any]) -> None:
self.method = method
def __get__(
self, obj: Optional[T] = None, objtype: Optional[Type[T]] = None
) -> Callable[..., Any]:
@functools.wraps(self.method)
def _wrapper(*args: Any, **kwargs: Any) -> Any:
if obj is not None:
return self.method(obj, *args, **kwargs)
else:
return self.method(objtype, *args, **kwargs)
return _wrapper
def _has_one_val(*args: T, **kwargs: T) -> bool:
vals = itertools.chain(args, kwargs.values())
not_nones = list(filter(lambda val: val is not None, vals))
return len(not_nones) == 1
def _assert_one_val(*args: T, **kwargs: T) -> None:
if not _has_one_val(*args, **kwargs):
raise TypeError(
"Exactly one of the passed values can be specified. "
f"Instead, values were: {repr(args)}, {repr(kwargs)}"
)
def _hexstr_or_text_kwarg_is_text_type(**kwargs: T) -> bool:
value = kwargs["hexstr"] if "hexstr" in kwargs else kwargs["text"]
return is_text(value)
def _assert_hexstr_or_text_kwarg_is_text_type(**kwargs: T) -> None:
if not _hexstr_or_text_kwarg_is_text_type(**kwargs):
raise TypeError(
"Arguments passed as hexstr or text must be of text type. "
f"Instead, value was: {(repr(next(iter(list(kwargs.values())))))}"
)
def _validate_supported_kwarg(kwargs: Any) -> None:
if next(iter(kwargs)) not in ["primitive", "hexstr", "text"]:
raise TypeError(
"Kwarg must be 'primitive', 'hexstr', or 'text'. "
f"Instead, kwarg was: {repr((next(iter(kwargs))))}"
)
def validate_conversion_arguments(to_wrap: Callable[..., T]) -> Callable[..., T]:
"""
Validates arguments for conversion functions.
- Only a single argument is present
- Kwarg must be 'primitive' 'hexstr' or 'text'
- If it is 'hexstr' or 'text' that it is a text type
"""
@functools.wraps(to_wrap)
def wrapper(*args: Any, **kwargs: Any) -> T:
_assert_one_val(*args, **kwargs)
if kwargs:
_validate_supported_kwarg(kwargs)
if len(args) == 0 and "primitive" not in kwargs:
_assert_hexstr_or_text_kwarg_is_text_type(**kwargs)
return to_wrap(*args, **kwargs)
return wrapper
def return_arg_type(at_position: int) -> Callable[..., Callable[..., T]]:
"""
Wrap the return value with the result of `type(args[at_position])`.
"""
def decorator(to_wrap: Callable[..., Any]) -> Callable[..., T]:
@functools.wraps(to_wrap)
def wrapper(*args: Any, **kwargs: Any) -> T: # type: ignore
result = to_wrap(*args, **kwargs)
ReturnType = type(args[at_position])
return ReturnType(result) # type: ignore
return wrapper
return decorator
def replace_exceptions(
old_to_new_exceptions: Dict[Type[BaseException], Type[BaseException]]
) -> Callable[[Callable[..., T]], Callable[..., T]]:
"""
Replaces old exceptions with new exceptions to be raised in their place.
"""
old_exceptions = tuple(old_to_new_exceptions.keys())
def decorator(to_wrap: Callable[..., T]) -> Callable[..., T]:
@functools.wraps(to_wrap)
def wrapped(*args: Any, **kwargs: Any) -> T:
try:
return to_wrap(*args, **kwargs)
except old_exceptions as err:
try:
raise old_to_new_exceptions[type(err)](err) from err
except KeyError:
raise TypeError(
f"could not look up new exception to use for {repr(err)}"
) from err
return wrapped
return decorator

View File

@@ -0,0 +1,6 @@
def int_to_big_endian(value: int) -> bytes:
return value.to_bytes((value.bit_length() + 7) // 8 or 1, "big")
def big_endian_to_int(value: bytes) -> int:
return int.from_bytes(value, "big")

View File

@@ -0,0 +1,4 @@
class ValidationError(Exception):
"""
Raised when something does not pass a validation check.
"""

View File

@@ -0,0 +1,75 @@
import collections
import functools
import itertools
from typing import ( # noqa: F401
Any,
Callable,
Dict,
Iterable,
List,
Mapping,
Set,
Tuple,
TypeVar,
Union,
)
from .toolz import (
compose as _compose,
)
T = TypeVar("T")
def identity(value: T) -> T:
return value
TGIn = TypeVar("TGIn")
TGOut = TypeVar("TGOut")
TFOut = TypeVar("TFOut")
def combine(
f: Callable[[TGOut], TFOut], g: Callable[[TGIn], TGOut]
) -> Callable[[TGIn], TFOut]:
return lambda x: f(g(x))
def apply_to_return_value(
callback: Callable[..., T]
) -> Callable[..., Callable[..., T]]:
def outer(fn: Callable[..., T]) -> Callable[..., T]:
# We would need to type annotate *args and **kwargs but doing so segfaults
# the PyPy builds. We ignore instead.
@functools.wraps(fn)
def inner(*args, **kwargs) -> T: # type: ignore
return callback(fn(*args, **kwargs))
return inner
return outer
TVal = TypeVar("TVal")
TKey = TypeVar("TKey")
to_tuple = apply_to_return_value(
tuple
) # type: Callable[[Callable[..., Iterable[TVal]]], Callable[..., Tuple[TVal, ...]]] # noqa: E501
to_list = apply_to_return_value(
list
) # type: Callable[[Callable[..., Iterable[TVal]]], Callable[..., List[TVal]]] # noqa: E501
to_set = apply_to_return_value(
set
) # type: Callable[[Callable[..., Iterable[TVal]]], Callable[..., Set[TVal]]] # noqa: E501
to_dict = apply_to_return_value(
dict
) # type: Callable[[Callable[..., Iterable[Union[Mapping[TKey, TVal], Tuple[TKey, TVal]]]]], Callable[..., Dict[TKey, TVal]]] # noqa: E501
to_ordered_dict = apply_to_return_value(
collections.OrderedDict
) # type: Callable[[Callable[..., Iterable[Union[Mapping[TKey, TVal], Tuple[TKey, TVal]]]]], Callable[..., collections.OrderedDict[TKey, TVal]]] # noqa: E501
sort_return = _compose(to_tuple, apply_to_return_value(sorted))
flatten_return = _compose(
to_tuple, apply_to_return_value(itertools.chain.from_iterable)
)
reversed_return = _compose(to_tuple, apply_to_return_value(reversed), to_tuple)

View File

@@ -0,0 +1,74 @@
# String encodings and numeric representations
import binascii
import re
from typing import (
Any,
AnyStr,
)
from ..typing import (
HexStr,
)
from .types import (
is_string,
is_text,
)
_HEX_REGEXP = re.compile("(0[xX])?[0-9a-fA-F]*")
def decode_hex(value: str) -> bytes:
if not is_text(value):
raise TypeError("Value must be an instance of str")
non_prefixed = remove_0x_prefix(HexStr(value))
# unhexlify will only accept bytes type someday
ascii_hex = non_prefixed.encode("ascii")
return binascii.unhexlify(ascii_hex)
def encode_hex(value: AnyStr) -> HexStr:
if not is_string(value):
raise TypeError("Value must be an instance of str or unicode")
elif isinstance(value, (bytes, bytearray)):
ascii_bytes = value
else:
ascii_bytes = value.encode("ascii")
binary_hex = binascii.hexlify(ascii_bytes)
return add_0x_prefix(HexStr(binary_hex.decode("ascii")))
def is_0x_prefixed(value: str) -> bool:
if not is_text(value):
raise TypeError(
f"is_0x_prefixed requires text typed arguments. Got: {repr(value)}"
)
return value.startswith(("0x", "0X"))
def remove_0x_prefix(value: HexStr) -> HexStr:
if is_0x_prefixed(value):
return HexStr(value[2:])
return value
def add_0x_prefix(value: HexStr) -> HexStr:
if is_0x_prefixed(value):
return value
return HexStr("0x" + value)
def is_hexstr(value: Any) -> bool:
if not is_text(value) or not value:
return False
return _HEX_REGEXP.fullmatch(value) is not None
def is_hex(value: Any) -> bool:
if not is_text(value):
raise TypeError(f"is_hex requires text typed arguments. Got: {repr(value)}")
if not value:
return False
return _HEX_REGEXP.fullmatch(value) is not None

View File

@@ -0,0 +1,188 @@
from typing import (
Any,
Iterable,
Iterator,
Tuple,
Union,
)
from urllib import (
parse,
)
from ..typing import (
URI,
Hash32,
)
from .currency import (
denoms,
from_wei,
)
from .toolz import (
sliding_window,
take,
)
def humanize_seconds(seconds: Union[float, int]) -> str:
if int(seconds) == 0:
return "0s"
unit_values = _consume_leading_zero_units(_humanize_seconds(int(seconds)))
return "".join((f"{amount}{unit}" for amount, unit in take(3, unit_values)))
SECOND = 1
MINUTE = 60
HOUR = 60 * 60
DAY = 24 * HOUR
YEAR = 365 * DAY
MONTH = YEAR // 12
WEEK = 7 * DAY
UNITS = (
(YEAR, "y"),
(MONTH, "m"),
(WEEK, "w"),
(DAY, "d"),
(HOUR, "h"),
(MINUTE, "m"),
(SECOND, "s"),
)
def _consume_leading_zero_units(
units_iter: Iterator[Tuple[int, str]]
) -> Iterator[Tuple[int, str]]:
for amount, unit in units_iter:
if amount == 0:
continue
else:
yield (amount, unit)
break
yield from units_iter
def _humanize_seconds(seconds: int) -> Iterator[Tuple[int, str]]:
remainder = seconds
for duration, unit in UNITS:
if not remainder:
break
num = remainder // duration
yield num, unit
remainder %= duration
DISPLAY_HASH_CHARS = 4
def humanize_bytes(value: bytes) -> str:
if len(value) <= DISPLAY_HASH_CHARS + 1:
return value.hex()
value_as_hex = value.hex()
head = value_as_hex[:DISPLAY_HASH_CHARS]
tail = value_as_hex[-1 * DISPLAY_HASH_CHARS :]
return f"{head}..{tail}"
def humanize_hash(value: Hash32) -> str:
return humanize_bytes(value)
def humanize_ipfs_uri(uri: URI) -> str:
if not is_ipfs_uri(uri):
raise TypeError(
f"{uri} does not look like a valid IPFS uri. Currently, "
"only CIDv0 hash schemes are supported."
)
parsed = parse.urlparse(uri)
ipfs_hash = parsed.netloc
head = ipfs_hash[:DISPLAY_HASH_CHARS]
tail = ipfs_hash[-1 * DISPLAY_HASH_CHARS :]
return f"ipfs://{head}..{tail}"
def is_ipfs_uri(value: Any) -> bool:
if not isinstance(value, str):
return False
parsed = parse.urlparse(value)
if parsed.scheme != "ipfs" or not parsed.netloc:
return False
return _is_CIDv0_ipfs_hash(parsed.netloc)
def _is_CIDv0_ipfs_hash(ipfs_hash: str) -> bool:
if ipfs_hash.startswith("Qm") and len(ipfs_hash) == 46:
return True
return False
def _find_breakpoints(*values: int) -> Iterator[int]:
yield 0
for index, (left, right) in enumerate(sliding_window(2, values), 1):
if left + 1 == right:
continue
else:
yield index
yield len(values)
def _extract_integer_ranges(*values: int) -> Iterator[Tuple[int, int]]:
"""
Return a tuple of consecutive ranges of integers.
:param values: a sequence of ordered integers
- fn(1, 2, 3) -> ((1, 3),)
- fn(1, 2, 3, 7, 8, 9) -> ((1, 3), (7, 9))
- fn(1, 7, 8, 9) -> ((1, 1), (7, 9))
"""
for left, right in sliding_window(2, _find_breakpoints(*values)):
chunk = values[left:right]
yield chunk[0], chunk[-1]
def _humanize_range(bounds: Tuple[int, int]) -> str:
left, right = bounds
if left == right:
return str(left)
else:
return f"{left}-{right}"
def humanize_integer_sequence(values_iter: Iterable[int]) -> str:
"""
Return a concise, human-readable string representing a sequence of integers.
- fn((1, 2, 3)) -> '1-3'
- fn((1, 2, 3, 7, 8, 9)) -> '1-3|7-9'
- fn((1, 2, 3, 5, 7, 8, 9)) -> '1-3|5|7-9'
- fn((1, 7, 8, 9)) -> '1|7-9'
"""
values = tuple(values_iter)
if not values:
return "(empty)"
else:
return "|".join(map(_humanize_range, _extract_integer_ranges(*values)))
def humanize_wei(number: int) -> str:
if number >= denoms.finney:
unit = "ether"
elif number >= denoms.mwei:
unit = "gwei"
else:
unit = "wei"
amount = from_wei(number, unit)
x = f"{str(amount)} {unit}"
return x

View File

@@ -0,0 +1,159 @@
import contextlib
from functools import (
cached_property,
)
import logging
from typing import (
Any,
Dict,
Iterator,
Tuple,
Type,
TypeVar,
Union,
cast,
)
from .toolz import (
assoc,
)
DEBUG2_LEVEL_NUM = 8
TLogger = TypeVar("TLogger", bound=logging.Logger)
class ExtendedDebugLogger(logging.Logger):
"""
Logging class that can be used for lower level debug logging.
"""
@cached_property
def show_debug2(self) -> bool:
return self.isEnabledFor(DEBUG2_LEVEL_NUM)
def debug2(self, message: str, *args: Any, **kwargs: Any) -> None:
if self.show_debug2:
self.log(DEBUG2_LEVEL_NUM, message, *args, **kwargs)
else:
# When we find that `DEBUG2` isn't enabled we completely replace
# the `debug2` function in this instance of the logger with a noop
# lambda to further speed up
self.__dict__["debug2"] = lambda message, *args, **kwargs: None
def __reduce__(self) -> Tuple[Any, ...]:
# This is needed because our parent's implementation could
# cause us to become a regular Logger on unpickling.
return get_extended_debug_logger, (self.name,)
def setup_DEBUG2_logging() -> None:
"""
Installs the `DEBUG2` level logging levels to the main logging module.
"""
if not hasattr(logging, "DEBUG2"):
logging.addLevelName(DEBUG2_LEVEL_NUM, "DEBUG2")
logging.DEBUG2 = DEBUG2_LEVEL_NUM # type: ignore
@contextlib.contextmanager
def _use_logger_class(logger_class: Type[logging.Logger]) -> Iterator[None]:
original_logger_class = logging.getLoggerClass()
logging.setLoggerClass(logger_class)
try:
yield
finally:
logging.setLoggerClass(original_logger_class)
def get_logger(name: str, logger_class: Union[Type[TLogger], None] = None) -> TLogger:
if logger_class is None:
return cast(TLogger, logging.getLogger(name))
else:
with _use_logger_class(logger_class):
# The logging module caches logger instances. The following code
# ensures that if there is a cached instance that we don't
# accidentally return the incorrect logger type because the logging
# module does not *update* the cached instance in the event that
# the global logging class changes.
#
# types ignored b/c mypy doesn't identify presence of
# manager on logging.Logger
manager = logging.Logger.manager
if name in manager.loggerDict:
if type(manager.loggerDict[name]) is not logger_class:
del manager.loggerDict[name]
return cast(TLogger, logging.getLogger(name))
def get_extended_debug_logger(name: str) -> ExtendedDebugLogger:
return get_logger(name, ExtendedDebugLogger)
THasLoggerMeta = TypeVar("THasLoggerMeta", bound="HasLoggerMeta")
class HasLoggerMeta(type):
"""
Assigns a logger instance to a class, derived from the import path and name.
This metaclass uses `__qualname__` to identify a unique and meaningful name
to use when creating the associated logger for a given class.
"""
logger_class = logging.Logger
def __new__(
mcls: Type[THasLoggerMeta],
name: str,
bases: Tuple[Type[Any]],
namespace: Dict[str, Any],
) -> THasLoggerMeta:
if "logger" in namespace:
# If a logger was explicitly declared we shouldn't do anything to
# replace it.
return super().__new__(mcls, name, bases, namespace)
if "__qualname__" not in namespace:
raise AttributeError("Missing __qualname__")
with _use_logger_class(mcls.logger_class):
logger = logging.getLogger(namespace["__qualname__"])
return super().__new__(mcls, name, bases, assoc(namespace, "logger", logger))
@classmethod
def replace_logger_class(
mcls: Type[THasLoggerMeta], value: Type[logging.Logger]
) -> Type[THasLoggerMeta]:
return type(mcls.__name__, (mcls,), {"logger_class": value})
@classmethod
def meta_compat(
mcls: Type[THasLoggerMeta], other: Type[type]
) -> Type[THasLoggerMeta]:
return type(mcls.__name__, (mcls, other), {})
class _BaseHasLogger(metaclass=HasLoggerMeta):
# This class exists to a allow us to define the type of the logger. Once
# python3.5 is deprecated this can be removed in favor of a simple type
# annotation on the main class.
logger = logging.Logger("") # type: logging.Logger
class HasLogger(_BaseHasLogger):
pass
HasExtendedDebugLoggerMeta = HasLoggerMeta.replace_logger_class(ExtendedDebugLogger)
class _BaseHasExtendedDebugLogger(metaclass=HasExtendedDebugLoggerMeta): # type: ignore
# This class exists to a allow us to define the type of the logger. Once
# python3.5 is deprecated this can be removed in favor of a simple type
# annotation on the main class.
logger = ExtendedDebugLogger("") # type: ExtendedDebugLogger
class HasExtendedDebugLogger(_BaseHasExtendedDebugLogger):
pass

View File

@@ -0,0 +1,31 @@
from importlib import (
import_module,
)
from typing import (
Any,
)
def import_string(dotted_path: str) -> Any:
"""
Import a variable using its path and name.
:param dotted_path: dotted module path and variable/class name
:return: the attribute/class designated by the last name in the path
:raise: ImportError, if the import failed
Source: django.utils.module_loading
"""
try:
module_path, class_name = dotted_path.rsplit(".", 1)
except ValueError:
msg = f"{dotted_path} doesn't look like a module path"
raise ImportError(msg)
module = import_module(module_path)
try:
return getattr(module, class_name)
except AttributeError:
msg = f'Module "{module_path}" does not define a "{class_name}" attribute/class'
raise ImportError(msg)

View File

@@ -0,0 +1,43 @@
from abc import (
ABC,
abstractmethod,
)
import decimal
import numbers
from typing import (
Any,
TypeVar,
Union,
)
class Comparable(ABC):
@abstractmethod
def __lt__(self, other: Any) -> bool:
...
@abstractmethod
def __gt__(self, other: Any) -> bool:
...
TComparable = Union[Comparable, numbers.Real, int, float, decimal.Decimal]
TValue = TypeVar("TValue", bound=TComparable)
def clamp(lower_bound: TValue, upper_bound: TValue, value: TValue) -> TValue:
# The `mypy` ignore statements here are due to doing a comparison of
# `Union` types which isn't allowed. (per cburgdorf). This approach was
# chosen over using `typing.overload` to define multiple signatures for
# each comparison type here since the added value of "proper" typing
# doesn't seem to justify the complexity of having a bunch of different
# signatures defined. The external library perspective on this function
# should still be adequate under this approach
if value < lower_bound: # type: ignore
return lower_bound
elif value > upper_bound: # type: ignore
return upper_bound
else:
return value

View File

@@ -0,0 +1,76 @@
from ...toolz import ( # noqa: F401
accumulate,
assoc,
assoc_in,
comp,
complement,
compose,
concat,
concatv,
cons,
count,
countby,
curried,
curry,
dicttoolz,
diff,
dissoc,
do,
drop,
excepts,
filter,
first,
flip,
frequencies,
functoolz,
get,
get_in,
groupby,
identity,
interleave,
interpose,
isdistinct,
isiterable,
itemfilter,
itemmap,
iterate,
itertoolz,
join,
juxt,
keyfilter,
keymap,
last,
map,
mapcat,
memoize,
merge,
merge_sorted,
merge_with,
nth,
partial,
partition,
partition_all,
partitionby,
peek,
pipe,
pluck,
random_sample,
recipes,
reduce,
reduceby,
remove,
second,
sliding_window,
sorted,
tail,
take,
take_nth,
thread_first,
thread_last,
topk,
unique,
update_in,
utils,
valfilter,
valmap,
)

View File

@@ -0,0 +1,54 @@
import collections.abc
import numbers
from typing import (
Any,
)
bytes_types = (bytes, bytearray)
integer_types = (int,)
text_types = (str,)
string_types = (bytes, str, bytearray)
def is_integer(value: Any) -> bool:
return isinstance(value, integer_types) and not isinstance(value, bool)
def is_bytes(value: Any) -> bool:
return isinstance(value, bytes_types)
def is_text(value: Any) -> bool:
return isinstance(value, text_types)
def is_string(value: Any) -> bool:
return isinstance(value, string_types)
def is_boolean(value: Any) -> bool:
return isinstance(value, bool)
def is_dict(obj: Any) -> bool:
return isinstance(obj, collections.abc.Mapping)
def is_list_like(obj: Any) -> bool:
return not is_string(obj) and isinstance(obj, collections.abc.Sequence)
def is_list(obj: Any) -> bool:
return isinstance(obj, list)
def is_tuple(obj: Any) -> bool:
return isinstance(obj, tuple)
def is_null(obj: Any) -> bool:
return obj is None
def is_number(obj: Any) -> bool:
return isinstance(obj, numbers.Number)

View File

@@ -0,0 +1,18 @@
import warnings
from .misc import (
Address,
AnyAddress,
ChecksumAddress,
HexAddress,
HexStr,
Primitives,
T,
)
warnings.warn(
"The eth_utils.typing module will be deprecated in favor "
"of eth-typing in the next major version bump.",
category=DeprecationWarning,
stacklevel=2,
)

View File

@@ -0,0 +1,14 @@
from typing import (
TypeVar,
)
from ...typing import ( # noqa: F401
Address,
AnyAddress,
ChecksumAddress,
HexAddress,
HexStr,
Primitives,
)
T = TypeVar("T")

View File

@@ -0,0 +1,31 @@
import decimal
# Units are in their own module here, so that they can keep this
# formatting, as this module is excluded from black in pyproject.toml
# fmt: off
units = {
'wei': decimal.Decimal('1'), # noqa: E241
'kwei': decimal.Decimal('1000'), # noqa: E241
'babbage': decimal.Decimal('1000'), # noqa: E241
'femtoether': decimal.Decimal('1000'), # noqa: E241
'mwei': decimal.Decimal('1000000'), # noqa: E241
'lovelace': decimal.Decimal('1000000'), # noqa: E241
'picoether': decimal.Decimal('1000000'), # noqa: E241
'gwei': decimal.Decimal('1000000000'), # noqa: E241
'shannon': decimal.Decimal('1000000000'), # noqa: E241
'nanoether': decimal.Decimal('1000000000'), # noqa: E241
'nano': decimal.Decimal('1000000000'), # noqa: E241
'szabo': decimal.Decimal('1000000000000'), # noqa: E241
'microether': decimal.Decimal('1000000000000'), # noqa: E241
'micro': decimal.Decimal('1000000000000'), # noqa: E241
'finney': decimal.Decimal('1000000000000000'), # noqa: E241
'milliether': decimal.Decimal('1000000000000000'), # noqa: E241
'milli': decimal.Decimal('1000000000000000'), # noqa: E241
'ether': decimal.Decimal('1000000000000000000'), # noqa: E241
'kether': decimal.Decimal('1000000000000000000000'), # noqa: E241
'grand': decimal.Decimal('1000000000000000000000'), # noqa: E241
'mether': decimal.Decimal('1000000000000000000000000'), # noqa: E241
'gether': decimal.Decimal('1000000000000000000000000000'), # noqa: E241
'tether': decimal.Decimal('1000000000000000000000000000000'), # noqa: E241
}
# fmt: on