This commit is contained in:
lz_db
2025-11-16 12:31:03 +08:00
commit 0fab423a18
1451 changed files with 743213 additions and 0 deletions

View File

@@ -0,0 +1,26 @@
from .itertoolz import *
from .functoolz import *
from .dicttoolz import *
from .recipes import *
from functools import partial, reduce
sorted = sorted
map = map
filter = filter
# Aliases
comp = compose
from . import curried
# functoolz._sigs.create_signature_registry()
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions

View File

@@ -0,0 +1,784 @@
"""Internal module for better introspection of builtins.
The main functions are ``is_builtin_valid_args``, ``is_builtin_partial_args``,
and ``has_unknown_args``. Other functions in this module support these three.
Notably, we create a ``signatures`` registry to enable introspection of
builtin functions in any Python version. This includes builtins that
have more than one valid signature. Currently, the registry includes
builtins from ``builtins``, ``functools``, ``itertools``, and ``operator``
modules. More can be added as requested. We don't guarantee full coverage.
Everything in this module should be regarded as implementation details.
Users should try to not use this module directly.
"""
import functools
import inspect
import itertools
import operator
from importlib import import_module
from .functoolz import (is_partial_args, is_arity, has_varargs,
has_keywords, num_required_args)
import builtins
# We mock builtin callables using lists of tuples with lambda functions.
#
# The tuple spec is (num_position_args, lambda_func, keyword_only_args).
#
# num_position_args:
# - The number of positional-only arguments. If not specified,
# all positional arguments are considered positional-only.
#
# lambda_func:
# - lambda function that matches a signature of a builtin, but does
# not include keyword-only arguments.
#
# keyword_only_args: (optional)
# - Tuple of keyword-only arguments.
module_info = {}
module_info[builtins] = dict(
abs=[
lambda x: None],
all=[
lambda iterable: None],
anext=[
lambda aiterator: None,
lambda aiterator, default: None],
any=[
lambda iterable: None],
apply=[
lambda object: None,
lambda object, args: None,
lambda object, args, kwargs: None],
ascii=[
lambda obj: None],
bin=[
lambda number: None],
bool=[
lambda x=False: None],
buffer=[
lambda object: None,
lambda object, offset: None,
lambda object, offset, size: None],
bytearray=[
lambda: None,
lambda int: None,
lambda string, encoding='utf8', errors='strict': None],
callable=[
lambda obj: None],
chr=[
lambda i: None],
classmethod=[
lambda function: None],
cmp=[
lambda x, y: None],
coerce=[
lambda x, y: None],
complex=[
lambda real=0, imag=0: None],
delattr=[
lambda obj, name: None],
dict=[
lambda **kwargs: None,
lambda mapping, **kwargs: None],
dir=[
lambda: None,
lambda object: None],
divmod=[
lambda x, y: None],
enumerate=[
(0, lambda iterable, start=0: None)],
eval=[
lambda source: None,
lambda source, globals: None,
lambda source, globals, locals: None],
execfile=[
lambda filename: None,
lambda filename, globals: None,
lambda filename, globals, locals: None],
file=[
(0, lambda name, mode='r', buffering=-1: None)],
filter=[
lambda function, iterable: None],
float=[
lambda x=0.0: None],
format=[
lambda value: None,
lambda value, format_spec: None],
frozenset=[
lambda: None,
lambda iterable: None],
getattr=[
lambda object, name: None,
lambda object, name, default: None],
globals=[
lambda: None],
hasattr=[
lambda obj, name: None],
hash=[
lambda obj: None],
hex=[
lambda number: None],
id=[
lambda obj: None],
input=[
lambda: None,
lambda prompt: None],
int=[
lambda x=0: None,
(0, lambda x, base=10: None)],
intern=[
lambda string: None],
isinstance=[
lambda obj, class_or_tuple: None],
issubclass=[
lambda cls, class_or_tuple: None],
iter=[
lambda iterable: None,
lambda callable, sentinel: None],
len=[
lambda obj: None],
list=[
lambda: None,
lambda iterable: None],
locals=[
lambda: None],
long=[
lambda x=0: None,
(0, lambda x, base=10: None)],
map=[
lambda func, sequence, *iterables: None],
memoryview=[
(0, lambda object: None)],
next=[
lambda iterator: None,
lambda iterator, default: None],
object=[
lambda: None],
oct=[
lambda number: None],
ord=[
lambda c: None],
pow=[
lambda x, y: None,
lambda x, y, z: None],
property=[
lambda fget=None, fset=None, fdel=None, doc=None: None],
range=[
lambda stop: None,
lambda start, stop: None,
lambda start, stop, step: None],
raw_input=[
lambda: None,
lambda prompt: None],
reduce=[
lambda function, sequence: None,
lambda function, sequence, initial: None],
reload=[
lambda module: None],
repr=[
lambda obj: None],
reversed=[
lambda sequence: None],
round=[
(0, lambda number, ndigits=0: None)],
set=[
lambda: None,
lambda iterable: None],
setattr=[
lambda obj, name, value: None],
slice=[
lambda stop: None,
lambda start, stop: None,
lambda start, stop, step: None],
staticmethod=[
lambda function: None],
sum=[
lambda iterable: None,
lambda iterable, start: None],
super=[
lambda type: None,
lambda type, obj: None],
tuple=[
lambda: None,
lambda iterable: None],
type=[
lambda object: None,
lambda name, bases, dict: None],
unichr=[
lambda i: None],
unicode=[
lambda object: None,
lambda string='', encoding='utf8', errors='strict': None],
vars=[
lambda: None,
lambda object: None],
xrange=[
lambda stop: None,
lambda start, stop: None,
lambda start, stop, step: None],
zip=[
lambda *iterables: None],
__build_class__=[
(2, lambda func, name, *bases, **kwds: None, ('metaclass',))],
__import__=[
(0, lambda name, globals=None, locals=None, fromlist=None,
level=None: None)],
)
module_info[builtins]['exec'] = [
lambda source: None,
lambda source, globals: None,
lambda source, globals, locals: None]
module_info[builtins].update(
breakpoint=[
lambda *args, **kws: None],
bytes=[
lambda: None,
lambda int: None,
lambda string, encoding='utf8', errors='strict': None],
compile=[
(0, lambda source, filename, mode, flags=0,
dont_inherit=False, optimize=-1: None)],
max=[
(1, lambda iterable: None, ('default', 'key',)),
(1, lambda arg1, arg2, *args: None, ('key',))],
min=[
(1, lambda iterable: None, ('default', 'key',)),
(1, lambda arg1, arg2, *args: None, ('key',))],
open=[
(0, lambda file, mode='r', buffering=-1, encoding=None,
errors=None, newline=None, closefd=True, opener=None: None)],
sorted=[
(1, lambda iterable: None, ('key', 'reverse'))],
str=[
lambda object='', encoding='utf', errors='strict': None],
)
module_info[builtins]['print'] = [
(0, lambda *args: None, ('sep', 'end', 'file', 'flush',))]
module_info[functools] = dict(
cmp_to_key=[
(0, lambda mycmp: None)],
partial=[
lambda func, *args, **kwargs: None],
partialmethod=[
lambda func, *args, **kwargs: None],
reduce=[
lambda function, sequence: None,
lambda function, sequence, initial: None],
)
module_info[itertools] = dict(
accumulate=[
(0, lambda iterable, func=None: None)],
chain=[
lambda *iterables: None],
combinations=[
(0, lambda iterable, r: None)],
combinations_with_replacement=[
(0, lambda iterable, r: None)],
compress=[
(0, lambda data, selectors: None)],
count=[
lambda start=0, step=1: None],
cycle=[
lambda iterable: None],
dropwhile=[
lambda predicate, iterable: None],
filterfalse=[
lambda function, sequence: None],
groupby=[
(0, lambda iterable, key=None: None)],
ifilter=[
lambda function, sequence: None],
ifilterfalse=[
lambda function, sequence: None],
imap=[
lambda func, sequence, *iterables: None],
islice=[
lambda iterable, stop: None,
lambda iterable, start, stop: None,
lambda iterable, start, stop, step: None],
izip=[
lambda *iterables: None],
izip_longest=[
(0, lambda *iterables: None, ('fillvalue',))],
pairwise=[
lambda iterable: None],
permutations=[
(0, lambda iterable, r=0: None)],
product=[
(0, lambda *iterables: None, ('repeat',))],
repeat=[
(0, lambda object, times=0: None)],
starmap=[
lambda function, sequence: None],
takewhile=[
lambda predicate, iterable: None],
tee=[
lambda iterable: None,
lambda iterable, n: None],
zip_longest=[
(0, lambda *iterables: None, ('fillvalue',))],
)
module_info[operator] = dict(
__abs__=[
lambda a: None],
__add__=[
lambda a, b: None],
__and__=[
lambda a, b: None],
__concat__=[
lambda a, b: None],
__contains__=[
lambda a, b: None],
__delitem__=[
lambda a, b: None],
__delslice__=[
lambda a, b, c: None],
__div__=[
lambda a, b: None],
__eq__=[
lambda a, b: None],
__floordiv__=[
lambda a, b: None],
__ge__=[
lambda a, b: None],
__getitem__=[
lambda a, b: None],
__getslice__=[
lambda a, b, c: None],
__gt__=[
lambda a, b: None],
__iadd__=[
lambda a, b: None],
__iand__=[
lambda a, b: None],
__iconcat__=[
lambda a, b: None],
__idiv__=[
lambda a, b: None],
__ifloordiv__=[
lambda a, b: None],
__ilshift__=[
lambda a, b: None],
__imatmul__=[
lambda a, b: None],
__imod__=[
lambda a, b: None],
__imul__=[
lambda a, b: None],
__index__=[
lambda a: None],
__inv__=[
lambda a: None],
__invert__=[
lambda a: None],
__ior__=[
lambda a, b: None],
__ipow__=[
lambda a, b: None],
__irepeat__=[
lambda a, b: None],
__irshift__=[
lambda a, b: None],
__isub__=[
lambda a, b: None],
__itruediv__=[
lambda a, b: None],
__ixor__=[
lambda a, b: None],
__le__=[
lambda a, b: None],
__lshift__=[
lambda a, b: None],
__lt__=[
lambda a, b: None],
__matmul__=[
lambda a, b: None],
__mod__=[
lambda a, b: None],
__mul__=[
lambda a, b: None],
__ne__=[
lambda a, b: None],
__neg__=[
lambda a: None],
__not__=[
lambda a: None],
__or__=[
lambda a, b: None],
__pos__=[
lambda a: None],
__pow__=[
lambda a, b: None],
__repeat__=[
lambda a, b: None],
__rshift__=[
lambda a, b: None],
__setitem__=[
lambda a, b, c: None],
__setslice__=[
lambda a, b, c, d: None],
__sub__=[
lambda a, b: None],
__truediv__=[
lambda a, b: None],
__xor__=[
lambda a, b: None],
_abs=[
lambda x: None],
_compare_digest=[
lambda a, b: None],
abs=[
lambda a: None],
add=[
lambda a, b: None],
and_=[
lambda a, b: None],
attrgetter=[
lambda attr, *args: None],
concat=[
lambda a, b: None],
contains=[
lambda a, b: None],
countOf=[
lambda a, b: None],
delitem=[
lambda a, b: None],
delslice=[
lambda a, b, c: None],
div=[
lambda a, b: None],
eq=[
lambda a, b: None],
floordiv=[
lambda a, b: None],
ge=[
lambda a, b: None],
getitem=[
lambda a, b: None],
getslice=[
lambda a, b, c: None],
gt=[
lambda a, b: None],
iadd=[
lambda a, b: None],
iand=[
lambda a, b: None],
iconcat=[
lambda a, b: None],
idiv=[
lambda a, b: None],
ifloordiv=[
lambda a, b: None],
ilshift=[
lambda a, b: None],
imatmul=[
lambda a, b: None],
imod=[
lambda a, b: None],
imul=[
lambda a, b: None],
index=[
lambda a: None],
indexOf=[
lambda a, b: None],
inv=[
lambda a: None],
invert=[
lambda a: None],
ior=[
lambda a, b: None],
ipow=[
lambda a, b: None],
irepeat=[
lambda a, b: None],
irshift=[
lambda a, b: None],
is_=[
lambda a, b: None],
is_not=[
lambda a, b: None],
isCallable=[
lambda a: None],
isMappingType=[
lambda a: None],
isNumberType=[
lambda a: None],
isSequenceType=[
lambda a: None],
isub=[
lambda a, b: None],
itemgetter=[
lambda item, *args: None],
itruediv=[
lambda a, b: None],
ixor=[
lambda a, b: None],
le=[
lambda a, b: None],
length_hint=[
lambda obj: None,
lambda obj, default: None],
lshift=[
lambda a, b: None],
lt=[
lambda a, b: None],
matmul=[
lambda a, b: None],
methodcaller=[
lambda name, *args, **kwargs: None],
mod=[
lambda a, b: None],
mul=[
lambda a, b: None],
ne=[
lambda a, b: None],
neg=[
lambda a: None],
not_=[
lambda a: None],
or_=[
lambda a, b: None],
pos=[
lambda a: None],
pow=[
lambda a, b: None],
repeat=[
lambda a, b: None],
rshift=[
lambda a, b: None],
sequenceIncludes=[
lambda a, b: None],
setitem=[
lambda a, b, c: None],
setslice=[
lambda a, b, c, d: None],
sub=[
lambda a, b: None],
truediv=[
lambda a, b: None],
truth=[
lambda a: None],
xor=[
lambda a, b: None],
)
module_info['toolz'] = dict(
curry=[
(0, lambda *args, **kwargs: None)],
excepts=[
(0, lambda exc, func, handler=None: None)],
flip=[
(0, lambda func=None, a=None, b=None: None)],
juxt=[
(0, lambda *funcs: None)],
memoize=[
(0, lambda func=None, cache=None, key=None: None)],
)
module_info['toolz.functoolz'] = dict(
Compose=[
(0, lambda funcs: None)],
InstanceProperty=[
(0, lambda fget=None, fset=None, fdel=None, doc=None,
classval=None: None)],
)
def num_pos_args(sigspec):
""" Return the number of positional arguments. ``f(x, y=1)`` has 1"""
return sum(1 for x in sigspec.parameters.values()
if x.kind == x.POSITIONAL_OR_KEYWORD
and x.default is x.empty)
def get_exclude_keywords(num_pos_only, sigspec):
""" Return the names of position-only arguments if func has **kwargs"""
if num_pos_only == 0:
return ()
has_kwargs = any(x.kind == x.VAR_KEYWORD
for x in sigspec.parameters.values())
if not has_kwargs:
return ()
pos_args = list(sigspec.parameters.values())[:num_pos_only]
return tuple(x.name for x in pos_args)
def signature_or_spec(func):
try:
return inspect.signature(func)
except (ValueError, TypeError):
return None
def expand_sig(sig):
""" Convert the signature spec in ``module_info`` to add to ``signatures``
The input signature spec is one of:
- ``lambda_func``
- ``(num_position_args, lambda_func)``
- ``(num_position_args, lambda_func, keyword_only_args)``
The output signature spec is:
``(num_position_args, lambda_func, keyword_exclude, sigspec)``
where ``keyword_exclude`` includes keyword only arguments and, if variadic
keywords is present, the names of position-only argument. The latter is
included to support builtins such as ``partial(func, *args, **kwargs)``,
which allows ``func=`` to be used as a keyword even though it's the name
of a positional argument.
"""
if isinstance(sig, tuple):
if len(sig) == 3:
num_pos_only, func, keyword_only = sig
assert isinstance(sig[-1], tuple)
else:
num_pos_only, func = sig
keyword_only = ()
sigspec = signature_or_spec(func)
else:
func = sig
sigspec = signature_or_spec(func)
num_pos_only = num_pos_args(sigspec)
keyword_only = ()
keyword_exclude = get_exclude_keywords(num_pos_only, sigspec)
return num_pos_only, func, keyword_only + keyword_exclude, sigspec
signatures = {}
def create_signature_registry(module_info=module_info, signatures=signatures):
for module, info in module_info.items():
if isinstance(module, str):
module = import_module(module)
for name, sigs in info.items():
if hasattr(module, name):
new_sigs = tuple(expand_sig(sig) for sig in sigs)
signatures[getattr(module, name)] = new_sigs
def check_valid(sig, args, kwargs):
""" Like ``is_valid_args`` for the given signature spec"""
num_pos_only, func, keyword_exclude, sigspec = sig
if len(args) < num_pos_only:
return False
if keyword_exclude:
kwargs = dict(kwargs)
for item in keyword_exclude:
kwargs.pop(item, None)
try:
func(*args, **kwargs)
return True
except TypeError:
return False
def _is_valid_args(func, args, kwargs):
""" Like ``is_valid_args`` for builtins in our ``signatures`` registry"""
if func not in signatures:
return None
sigs = signatures[func]
return any(check_valid(sig, args, kwargs) for sig in sigs)
def check_partial(sig, args, kwargs):
""" Like ``is_partial_args`` for the given signature spec"""
num_pos_only, func, keyword_exclude, sigspec = sig
if len(args) < num_pos_only:
pad = (None,) * (num_pos_only - len(args))
args = args + pad
if keyword_exclude:
kwargs = dict(kwargs)
for item in keyword_exclude:
kwargs.pop(item, None)
return is_partial_args(func, args, kwargs, sigspec)
def _is_partial_args(func, args, kwargs):
""" Like ``is_partial_args`` for builtins in our ``signatures`` registry"""
if func not in signatures:
return None
sigs = signatures[func]
return any(check_partial(sig, args, kwargs) for sig in sigs)
def check_arity(n, sig):
num_pos_only, func, keyword_exclude, sigspec = sig
if keyword_exclude or num_pos_only > n:
return False
return is_arity(n, func, sigspec)
def _is_arity(n, func):
if func not in signatures:
return None
sigs = signatures[func]
checks = [check_arity(n, sig) for sig in sigs]
if all(checks):
return True
elif any(checks):
return None
return False
def check_varargs(sig):
num_pos_only, func, keyword_exclude, sigspec = sig
return has_varargs(func, sigspec)
def _has_varargs(func):
if func not in signatures:
return None
sigs = signatures[func]
checks = [check_varargs(sig) for sig in sigs]
if all(checks):
return True
elif any(checks):
return None
return False
def check_keywords(sig):
num_pos_only, func, keyword_exclude, sigspec = sig
if keyword_exclude:
return True
return has_keywords(func, sigspec)
def _has_keywords(func):
if func not in signatures:
return None
sigs = signatures[func]
checks = [check_keywords(sig) for sig in sigs]
if all(checks):
return True
elif any(checks):
return None
return False
def check_required_args(sig):
num_pos_only, func, keyword_exclude, sigspec = sig
return num_required_args(func, sigspec)
def _num_required_args(func):
if func not in signatures:
return None
sigs = signatures[func]
vals = [check_required_args(sig) for sig in sigs]
val = vals[0]
if all(x == val for x in vals):
return val
return None

View File

@@ -0,0 +1,520 @@
# This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (built by setup.py sdist) and build
# directories (produced by setup.py build) will contain a much shorter file
# that just contains the computed version number.
# This file is released into the public domain. Generated by
# versioneer-0.18 (https://github.com/warner/python-versioneer)
"""Git implementation of _version.py."""
import errno
import os
import re
import subprocess
import sys
def get_keywords():
"""Get the keywords needed to look up the version information."""
# these strings will be replaced by git during git-archive.
# setup.py/versioneer.py will grep for the variable names, so they must
# each be defined on a line of their own. _version.py will just call
# get_keywords().
git_refnames = "$Format:%d$"
git_full = "$Format:%H$"
git_date = "$Format:%ci$"
keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
return keywords
class VersioneerConfig:
"""Container for Versioneer configuration parameters."""
def get_config():
"""Create, populate and return the VersioneerConfig() object."""
# these strings are filled in when 'setup.py versioneer' creates
# _version.py
cfg = VersioneerConfig()
cfg.VCS = "git"
cfg.style = "pep440"
cfg.tag_prefix = ""
cfg.parentdir_prefix = "toolz-"
cfg.versionfile_source = "toolz/_version.py"
cfg.verbose = False
return cfg
class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
"""Decorator to mark a method as the handler for a particular VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
env=None):
"""Call the given command(s)."""
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, env=env,
stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %s" % dispcmd)
print(e)
return None, None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
return None, None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %s (error)" % dispcmd)
print("stdout was %s" % stdout)
return None, p.returncode
return stdout, p.returncode
def versions_from_parentdir(parentdir_prefix, root, verbose):
"""Try to determine the version from the parent directory name.
Source tarballs conventionally unpack into a directory that includes both
the project name and a version string. We will also support searching up
two directory levels for an appropriately named parent directory
"""
rootdirs = []
for i in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None, "date": None}
else:
rootdirs.append(root)
root = os.path.dirname(root) # up a level
if verbose:
print("Tried directories %s but none started with prefix %s" %
(str(rootdirs), parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
"""Extract version information from the given file."""
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
if line.strip().startswith("git_date ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["date"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
if not keywords:
raise NotThisMethod("no keywords at all, weird")
date = keywords.get("date")
if date is not None:
# git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
# datestamp. However we prefer "%ci" (which expands to an "ISO-8601
# -like" string, which we must then edit to make compliant), because
# it's been around since git-1.5.3, and it's too difficult to
# discover which version we're using, or to work around using an
# older one.
date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%s', no digits" % ",".join(refs - tags))
if verbose:
print("likely tags: %s" % ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %s" % r)
return {"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": None,
"date": date}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": "no suitable tags", "date": None}
@register_vcs_handler("git", "pieces_from_vcs")
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
"""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
expanded, and _version.py hasn't already been rewritten with a short
version string, meaning we're inside a checked out source tree.
"""
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
hide_stderr=True)
if rc != 0:
if verbose:
print("Directory %s not under git control" % root)
raise NotThisMethod("'git rev-parse --git-dir' returned error")
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
"--always", "--long",
"--match", "%s*" % tag_prefix],
cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
pieces = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
git_describe = git_describe[:git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
# unparsable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%s'"
% describe_out)
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix))
pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
% (full_tag, tag_prefix))
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
# commit: short hex revision ID
pieces["short"] = mo.group(3)
else:
# HEX: no tags
pieces["closest-tag"] = None
count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
cwd=root)
pieces["distance"] = int(count_out) # total number of commits
# commit date: see ISO-8601 comment in git_versions_from_keywords()
date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
cwd=root)[0].strip()
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
return pieces
def plus_or_dot(pieces):
"""Return a + if we don't already have one, else return a ."""
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
def render_pep440(pieces):
"""Build up version string, with post-release "local version identifier".
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
Exceptions:
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += plus_or_dot(pieces)
rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0+untagged.%d.g%s" % (pieces["distance"],
pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def render_pep440_pre(pieces):
"""TAG[.post.devDISTANCE] -- No -dirty.
Exceptions:
1: no tags. 0.post.devDISTANCE
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += ".post.dev%d" % pieces["distance"]
else:
# exception #1
rendered = "0.post.dev%d" % pieces["distance"]
return rendered
def render_pep440_post(pieces):
"""TAG[.postDISTANCE[.dev0]+gHEX] .
The ".dev0" means dirty. Note that .dev0 sorts backwards
(a dirty tree will appear "older" than the corresponding clean one),
but you shouldn't be releasing software with -dirty anyways.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%s" % pieces["short"]
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += "+g%s" % pieces["short"]
return rendered
def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
Eexceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
return rendered
def render_git_describe(pieces):
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render_git_describe_long(pieces):
"""TAG-DISTANCE-gHEX[-dirty].
Like 'git describe --tags --dirty --always -long'.
The distance/hash is unconditional.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render(pieces, style):
"""Render the given version pieces into the requested style."""
if pieces["error"]:
return {"version": "unknown",
"full-revisionid": pieces.get("long"),
"dirty": None,
"error": pieces["error"],
"date": None}
if not style or style == "default":
style = "pep440" # the default
if style == "pep440":
rendered = render_pep440(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
rendered = render_git_describe(pieces)
elif style == "git-describe-long":
rendered = render_git_describe_long(pieces)
else:
raise ValueError("unknown style '%s'" % style)
return {"version": rendered, "full-revisionid": pieces["long"],
"dirty": pieces["dirty"], "error": None,
"date": pieces.get("date")}
def get_versions():
"""Get version information or return default if unable to do so."""
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
# __file__, we can work backwards from there to the root. Some
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
# case we can only use expanded keywords.
cfg = get_config()
verbose = cfg.verbose
try:
return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
verbose)
except NotThisMethod:
pass
try:
root = os.path.realpath(__file__)
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
for i in cfg.versionfile_source.split('/'):
root = os.path.dirname(root)
except NameError:
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to find root of source tree",
"date": None}
try:
pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
return render(pieces, cfg.style)
except NotThisMethod:
pass
try:
if cfg.parentdir_prefix:
return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
except NotThisMethod:
pass
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to compute version", "date": None}

View File

@@ -0,0 +1,30 @@
import warnings
warnings.warn("The toolz.compatibility module is no longer "
"needed in Python 3 and has been deprecated. Please "
"import these utilities directly from the standard library. "
"This module will be removed in a future release.",
category=DeprecationWarning, stacklevel=2)
import operator
import sys
PY3 = sys.version_info[0] > 2
PY34 = sys.version_info[0] == 3 and sys.version_info[1] == 4
PYPY = hasattr(sys, 'pypy_version_info') and PY3
__all__ = ('map', 'filter', 'range', 'zip', 'reduce', 'zip_longest',
'iteritems', 'iterkeys', 'itervalues', 'filterfalse',
'PY3', 'PY34', 'PYPY')
map = map
filter = filter
range = range
zip = zip
from functools import reduce
from itertools import zip_longest
from itertools import filterfalse
iteritems = operator.methodcaller('items')
iterkeys = operator.methodcaller('keys')
itervalues = operator.methodcaller('values')
from collections.abc import Sequence

View File

@@ -0,0 +1,101 @@
"""
Alternate namespace for toolz such that all functions are curried
Currying provides implicit partial evaluation of all functions
Example:
Get usually requires two arguments, an index and a collection
>>> from curried import get
>>> get(0, ('a', 'b'))
'a'
When we use it in higher order functions we often want to pass a partially
evaluated form
>>> data = [(1, 2), (11, 22), (111, 222)]
>>> list(map(lambda seq: get(0, seq), data))
[1, 11, 111]
The curried version allows simple expression of partial evaluation
>>> list(map(get(0), data))
[1, 11, 111]
See Also:
funccurry
"""
from . import operator
from .. import (
apply,
comp,
complement,
compose,
compose_left,
concat,
concatv,
count,
curry,
diff,
first,
flip,
frequencies,
identity,
interleave,
isdistinct,
isiterable,
juxt,
last,
memoize,
merge_sorted,
peek,
pipe,
second,
thread_first,
thread_last,
)
from .exceptions import merge, merge_with
# accumulate = curry(accumulate)
# assoc = curry(assoc)
# assoc_in = curry(assoc_in)
# cons = curry(cons)
# countby = curry(countby)
# dissoc = curry(dissoc)
# do = curry(do)
# drop = curry(drop)
# excepts = curry(excepts)
# filter = curry(filter)
# get = curry(get)
# get_in = curry(get_in)
# groupby = curry(groupby)
# interpose = curry(interpose)
# itemfilter = curry(itemfilter)
# itemmap = curry(itemmap)
# iterate = curry(iterate)
# join = curry(join)
# keyfilter = curry(keyfilter)
# keymap = curry(keymap)
# map = curry(map)
# mapcat = curry(mapcat)
# nth = curry(nth)
# partial = curry(partial)
# partition = curry(partition)
# partition_all = curry(partition_all)
# partitionby = curry(partitionby)
# peekn = curry(peekn)
# pluck = curry(pluck)
# random_sample = curry(random_sample)
# reduce = curry(reduce)
# reduceby = curry(reduceby)
# remove = curry(remove)
# sliding_window = curry(sliding_window)
# sorted = curry(sorted)
# tail = curry(tail)
# take = curry(take)
# take_nth = curry(take_nth)
# topk = curry(topk)
# unique = curry(unique)
# update_in = curry(update_in)
# valfilter = curry(valfilter)
# valmap = curry(valmap)
del exceptions

View File

@@ -0,0 +1,22 @@
from .. import (
curry,
merge_with,
merge
)
__all__ = ['merge_with', 'merge']
@curry
def merge_with(func, d, *dicts, **kwargs):
return merge_with(func, d, *dicts, **kwargs)
@curry
def merge(d, *dicts, **kwargs):
return merge(d, *dicts, **kwargs)
merge_with.__doc__ = merge_with.__doc__
merge.__doc__ = merge.__doc__

View File

@@ -0,0 +1,22 @@
from __future__ import absolute_import
import operator
from ..functoolz import curry
# Tests will catch if/when this needs updated
IGNORE = {
"__abs__", "__index__", "__inv__", "__invert__", "__neg__", "__not__",
"__pos__", "_abs", "abs", "attrgetter", "index", "inv", "invert",
"itemgetter", "neg", "not_", "pos", "truth"
}
locals().update(
{name: f if name in IGNORE else curry(f)
for name, f in vars(operator).items() if callable(f)}
)
# Clean up the namespace.
del IGNORE
del curry
del operator

View File

@@ -0,0 +1,339 @@
import operator
import collections
from functools import reduce
from collections.abc import Mapping
__all__ = ('merge', 'merge_with', 'valmap', 'keymap', 'itemmap',
'valfilter', 'keyfilter', 'itemfilter',
'assoc', 'dissoc', 'assoc_in', 'update_in', 'get_in')
def _get_factory(f, kwargs):
factory = kwargs.pop('factory', dict)
if kwargs:
raise TypeError("{}() got an unexpected keyword argument "
"'{}'".format(f.__name__, kwargs.popitem()[0]))
return factory
def merge(*dicts, **kwargs):
""" Merge a collection of dictionaries
>>> merge({1: 'one'}, {2: 'two'})
{1: 'one', 2: 'two'}
Later dictionaries have precedence
>>> merge({1: 2, 3: 4}, {3: 3, 4: 4})
{1: 2, 3: 3, 4: 4}
See Also:
merge_with
"""
if len(dicts) == 1 and not isinstance(dicts[0], Mapping):
dicts = dicts[0]
factory = _get_factory(merge, kwargs)
rv = factory()
for d in dicts:
rv.update(d)
return rv
def merge_with(func, *dicts, **kwargs):
""" Merge dictionaries and apply function to combined values
A key may occur in more than one dict, and all values mapped from the key
will be passed to the function as a list, such as func([val1, val2, ...]).
>>> merge_with(sum, {1: 1, 2: 2}, {1: 10, 2: 20})
{1: 11, 2: 22}
>>> merge_with(first, {1: 1, 2: 2}, {2: 20, 3: 30}) # doctest: +SKIP
{1: 1, 2: 2, 3: 30}
See Also:
merge
"""
if len(dicts) == 1 and not isinstance(dicts[0], Mapping):
dicts = dicts[0]
factory = _get_factory(merge_with, kwargs)
values = collections.defaultdict(lambda: [].append)
for d in dicts:
for k, v in d.items():
values[k](v)
result = factory()
for k, v in values.items():
result[k] = func(v.__self__)
return result
def valmap(func, d, factory=dict):
""" Apply function to values of dictionary
>>> bills = {"Alice": [20, 15, 30], "Bob": [10, 35]}
>>> valmap(sum, bills) # doctest: +SKIP
{'Alice': 65, 'Bob': 45}
See Also:
keymap
itemmap
"""
rv = factory()
rv.update(zip(d.keys(), map(func, d.values())))
return rv
def keymap(func, d, factory=dict):
""" Apply function to keys of dictionary
>>> bills = {"Alice": [20, 15, 30], "Bob": [10, 35]}
>>> keymap(str.lower, bills) # doctest: +SKIP
{'alice': [20, 15, 30], 'bob': [10, 35]}
See Also:
valmap
itemmap
"""
rv = factory()
rv.update(zip(map(func, d.keys()), d.values()))
return rv
def itemmap(func, d, factory=dict):
""" Apply function to items of dictionary
>>> accountids = {"Alice": 10, "Bob": 20}
>>> itemmap(reversed, accountids) # doctest: +SKIP
{10: "Alice", 20: "Bob"}
See Also:
keymap
valmap
"""
rv = factory()
rv.update(map(func, d.items()))
return rv
def valfilter(predicate, d, factory=dict):
""" Filter items in dictionary by value
>>> iseven = lambda x: x % 2 == 0
>>> d = {1: 2, 2: 3, 3: 4, 4: 5}
>>> valfilter(iseven, d)
{1: 2, 3: 4}
See Also:
keyfilter
itemfilter
valmap
"""
rv = factory()
for k, v in d.items():
if predicate(v):
rv[k] = v
return rv
def keyfilter(predicate, d, factory=dict):
""" Filter items in dictionary by key
>>> iseven = lambda x: x % 2 == 0
>>> d = {1: 2, 2: 3, 3: 4, 4: 5}
>>> keyfilter(iseven, d)
{2: 3, 4: 5}
See Also:
valfilter
itemfilter
keymap
"""
rv = factory()
for k, v in d.items():
if predicate(k):
rv[k] = v
return rv
def itemfilter(predicate, d, factory=dict):
""" Filter items in dictionary by item
>>> def isvalid(item):
... k, v = item
... return k % 2 == 0 and v < 4
>>> d = {1: 2, 2: 3, 3: 4, 4: 5}
>>> itemfilter(isvalid, d)
{2: 3}
See Also:
keyfilter
valfilter
itemmap
"""
rv = factory()
for item in d.items():
if predicate(item):
k, v = item
rv[k] = v
return rv
def assoc(d, key, value, factory=dict):
""" Return a new dict with new key value pair
New dict has d[key] set to value. Does not modify the initial dictionary.
>>> assoc({'x': 1}, 'x', 2)
{'x': 2}
>>> assoc({'x': 1}, 'y', 3) # doctest: +SKIP
{'x': 1, 'y': 3}
"""
d2 = factory()
d2.update(d)
d2[key] = value
return d2
def dissoc(d, *keys, **kwargs):
""" Return a new dict with the given key(s) removed.
New dict has d[key] deleted for each supplied key.
Does not modify the initial dictionary.
>>> dissoc({'x': 1, 'y': 2}, 'y')
{'x': 1}
>>> dissoc({'x': 1, 'y': 2}, 'y', 'x')
{}
>>> dissoc({'x': 1}, 'y') # Ignores missing keys
{'x': 1}
"""
factory = _get_factory(dissoc, kwargs)
d2 = factory()
if len(keys) < len(d) * .6:
d2.update(d)
for key in keys:
if key in d2:
del d2[key]
else:
remaining = set(d)
remaining.difference_update(keys)
for k in remaining:
d2[k] = d[k]
return d2
def assoc_in(d, keys, value, factory=dict):
""" Return a new dict with new, potentially nested, key value pair
>>> purchase = {'name': 'Alice',
... 'order': {'items': ['Apple', 'Orange'],
... 'costs': [0.50, 1.25]},
... 'credit card': '5555-1234-1234-1234'}
>>> assoc_in(purchase, ['order', 'costs'], [0.25, 1.00]) # doctest: +SKIP
{'credit card': '5555-1234-1234-1234',
'name': 'Alice',
'order': {'costs': [0.25, 1.00], 'items': ['Apple', 'Orange']}}
"""
return update_in(d, keys, lambda x: value, value, factory)
def update_in(d, keys, func, default=None, factory=dict):
""" Update value in a (potentially) nested dictionary
inputs:
d - dictionary on which to operate
keys - list or tuple giving the location of the value to be changed in d
func - function to operate on that value
If keys == [k0,..,kX] and d[k0]..[kX] == v, update_in returns a copy of the
original dictionary with v replaced by func(v), but does not mutate the
original dictionary.
If k0 is not a key in d, update_in creates nested dictionaries to the depth
specified by the keys, with the innermost value set to func(default).
>>> inc = lambda x: x + 1
>>> update_in({'a': 0}, ['a'], inc)
{'a': 1}
>>> transaction = {'name': 'Alice',
... 'purchase': {'items': ['Apple', 'Orange'],
... 'costs': [0.50, 1.25]},
... 'credit card': '5555-1234-1234-1234'}
>>> update_in(transaction, ['purchase', 'costs'], sum) # doctest: +SKIP
{'credit card': '5555-1234-1234-1234',
'name': 'Alice',
'purchase': {'costs': 1.75, 'items': ['Apple', 'Orange']}}
>>> # updating a value when k0 is not in d
>>> update_in({}, [1, 2, 3], str, default="bar")
{1: {2: {3: 'bar'}}}
>>> update_in({1: 'foo'}, [2, 3, 4], inc, 0)
{1: 'foo', 2: {3: {4: 1}}}
"""
ks = iter(keys)
k = next(ks)
rv = inner = factory()
rv.update(d)
for key in ks:
if k in d:
d = d[k]
dtemp = factory()
dtemp.update(d)
else:
d = dtemp = factory()
inner[k] = inner = dtemp
k = key
if k in d:
inner[k] = func(d[k])
else:
inner[k] = func(default)
return rv
def get_in(keys, coll, default=None, no_default=False):
""" Returns coll[i0][i1]...[iX] where [i0, i1, ..., iX]==keys.
If coll[i0][i1]...[iX] cannot be found, returns ``default``, unless
``no_default`` is specified, then it raises KeyError or IndexError.
``get_in`` is a generalization of ``operator.getitem`` for nested data
structures such as dictionaries and lists.
>>> transaction = {'name': 'Alice',
... 'purchase': {'items': ['Apple', 'Orange'],
... 'costs': [0.50, 1.25]},
... 'credit card': '5555-1234-1234-1234'}
>>> get_in(['purchase', 'items', 0], transaction)
'Apple'
>>> get_in(['name'], transaction)
'Alice'
>>> get_in(['purchase', 'total'], transaction)
>>> get_in(['purchase', 'items', 'apple'], transaction)
>>> get_in(['purchase', 'items', 10], transaction)
>>> get_in(['purchase', 'total'], transaction, 0)
0
>>> get_in(['y'], {}, no_default=True)
Traceback (most recent call last):
...
KeyError: 'y'
See Also:
itertoolz.get
operator.getitem
"""
try:
return reduce(operator.getitem, keys, coll)
except (KeyError, IndexError, TypeError):
if no_default:
raise
return default

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,46 @@
import itertools
from .itertoolz import frequencies, pluck, getter
__all__ = ('countby', 'partitionby')
def countby(key, seq):
""" Count elements of a collection by a key function
>>> countby(len, ['cat', 'mouse', 'dog'])
{3: 2, 5: 1}
>>> def iseven(x): return x % 2 == 0
>>> countby(iseven, [1, 2, 3]) # doctest:+SKIP
{True: 1, False: 2}
See Also:
groupby
"""
if not callable(key):
key = getter(key)
return frequencies(map(key, seq))
def partitionby(func, seq):
""" Partition a sequence according to a function
Partition `s` into a sequence of lists such that, when traversing
`s`, every time the output of `func` changes a new list is started
and that and subsequent items are collected into that list.
>>> is_space = lambda c: c == " "
>>> list(partitionby(is_space, "I have space"))
[('I',), (' ',), ('h', 'a', 'v', 'e'), (' ',), ('s', 'p', 'a', 'c', 'e')]
>>> is_large = lambda x: x > 10
>>> list(partitionby(is_large, [1, 2, 1, 99, 88, 33, 99, -1, 5]))
[(1, 2, 1), (99, 88, 33, 99), (-1, 5)]
See also:
partition
groupby
itertools.groupby
"""
return map(tuple, pluck(1, itertools.groupby(seq, key=func)))

View File

@@ -0,0 +1,9 @@
def raises(err, lamda):
try:
lamda()
return False
except err:
return True
no_default = '__no__default__'