"""
Generic support for objects with full-featured Parameters and
messaging.
This file comes from the Param library (https://github.com/holoviz/param)
but can be taken out of the param module and used on its own if desired,
either alone (providing basic Parameter support) or with param's
__init__.py (providing specialized Parameter types).
"""
import asyncio
import copy
import datetime as dt
import html
import inspect
import logging
import numbers
import operator
import random
import re
import types
import typing
import warnings
# Allow this file to be used standalone if desired, albeit without JSON serialization
try:
from . import serializer
except ImportError:
serializer = None
from collections import defaultdict, namedtuple, OrderedDict
from functools import partial, wraps, reduce
from html import escape
from itertools import chain
from operator import itemgetter, attrgetter
from types import FunctionType, MethodType
from contextlib import contextmanager
from logging import DEBUG, INFO, WARNING, ERROR, CRITICAL
from ._utils import (
DEFAULT_SIGNATURE,
ParamDeprecationWarning as _ParamDeprecationWarning,
ParamFutureWarning as _ParamFutureWarning,
Skip,
_deprecated,
_deprecate_positional_args,
_dict_update,
_in_ipython,
_is_auto_name,
_is_mutable_container,
_recursive_repr,
_to_async_gen,
_validate_error_prefix,
accept_arguments,
iscoroutinefunction,
descendents,
)
# Ideally setting param_pager would be in __init__.py but param_pager is
# needed on import to create the Parameterized class, so it'd need to precede
# importing parameterized.py in __init__.py which would be a little weird.
if _in_ipython():
# In case the optional ipython module is unavailable
try:
from .ipython import ParamPager, ipython_async_executor as async_executor
param_pager = ParamPager(metaclass=True) # Generates param description
except ImportError:
from ._utils import async_executor
else:
from ._utils import async_executor
param_pager = None
from inspect import getfullargspec
dt_types = (dt.datetime, dt.date)
_int_types = (int,)
try:
import numpy as np
dt_types = dt_types + (np.datetime64,)
_int_types = _int_types + (np.integer,)
except:
pass
VERBOSE = INFO - 1
logging.addLevelName(VERBOSE, "VERBOSE")
# Get the appropriate logging.Logger instance. If `logger` is None, a
# logger named `"param"` will be instantiated. If `name` is set, a descendant
# logger with the name ``"param.<name>"`` is returned (or
# ``logger.name + ".<name>"``)
logger = None
[docs]def get_logger(name=None):
if logger is None:
root_logger = logging.getLogger('param')
if not root_logger.handlers:
root_logger.setLevel(logging.INFO)
formatter = logging.Formatter(
fmt='%(levelname)s:%(name)s: %(message)s')
handler = logging.StreamHandler()
handler.setFormatter(formatter)
root_logger.addHandler(handler)
else:
root_logger = logger
if name is None:
return root_logger
else:
return logging.getLogger(root_logger.name + '.' + name)
# Indicates whether warnings should be raised as errors, stopping
# processing.
warnings_as_exceptions = False
docstring_signature = True # Add signature to class docstrings
docstring_describe_params = True # Add parameter description to class
# docstrings (requires ipython module)
object_count = 0
warning_count = 0
# Hook to apply to depends and bind arguments to turn them into valid parameters
_reference_transforms = []
def transform_reference(arg):
"""
Applies transforms to turn objects which should be treated like
a parameter reference into a valid reference that can be resolved
by Param. This is useful for adding handling for depending on objects
that are not simple Parameters or functions with dependency
definitions.
"""
for transform in _reference_transforms:
if isinstance(arg, Parameter) or hasattr(arg, '_dinfo'):
break
arg = transform(arg)
return arg
def eval_function_with_deps(function):
"""Evaluates a function after resolving its dependencies.
Calls and returns a function after resolving any dependencies
stored on the _dinfo attribute and passing the resolved values
as arguments.
"""
args, kwargs = (), {}
if hasattr(function, '_dinfo'):
arg_deps = function._dinfo['dependencies']
kw_deps = function._dinfo.get('kw', {})
if kw_deps or any(isinstance(d, Parameter) for d in arg_deps):
args = (getattr(dep.owner, dep.name) for dep in arg_deps)
kwargs = {n: getattr(dep.owner, dep.name) for n, dep in kw_deps.items()}
return function(*args, **kwargs)
[docs]def resolve_value(value, recursive=True):
"""
Resolves the current value of a dynamic reference.
"""
if not recursive:
pass
elif isinstance(value, (list, tuple)):
return type(value)(resolve_value(v) for v in value)
elif isinstance(value, dict):
return type(value)((resolve_value(k), resolve_value(v)) for k, v in value.items())
elif isinstance(value, slice):
return slice(
resolve_value(value.start),
resolve_value(value.stop),
resolve_value(value.step)
)
value = transform_reference(value)
is_gen = inspect.isgeneratorfunction(value)
if hasattr(value, '_dinfo') or iscoroutinefunction(value) or is_gen:
value = eval_function_with_deps(value)
if is_gen:
value = _to_async_gen(value)
elif isinstance(value, Parameter):
value = getattr(value.owner, value.name)
return value
[docs]def resolve_ref(reference, recursive=False):
"""
Resolves all parameters a dynamic reference depends on.
"""
if recursive:
if isinstance(reference, (list, tuple, set)):
return [r for v in reference for r in resolve_ref(v, recursive)]
elif isinstance(reference, dict):
return [r for kv in reference.items() for o in kv for r in resolve_ref(o, recursive)]
elif isinstance(reference, slice):
return [r for v in (reference.start, reference.stop, reference.step) for r in resolve_ref(v, recursive)]
reference = transform_reference(reference)
if hasattr(reference, '_dinfo'):
dinfo = getattr(reference, '_dinfo', {})
args = list(dinfo.get('dependencies', []))
kwargs = list(dinfo.get('kw', {}).values())
refs = []
for arg in (args + kwargs):
if isinstance(arg, str):
owner = get_method_owner(reference)
if arg in owner.param:
arg = owner.param[arg]
elif '.' in arg:
path = arg.split('.')
arg = owner
for attr in path[:-1]:
arg = getattr(arg, attr)
arg = arg.param[path[-1]]
else:
arg = getattr(owner, arg)
refs.extend(resolve_ref(arg))
return refs
elif isinstance(reference, Parameter):
return [reference]
return []
def _identity_hook(obj, val):
"""To be removed when set_hook is removed"""
return val
class _Undefined:
"""
Dummy value to signal completely undefined values rather than
simple None values.
"""
def __bool__(self):
# Haven't defined whether Undefined is falsy or truthy,
# so to avoid subtle bugs raise an error when it
# is used in a comparison without `is`.
raise RuntimeError('Use `is` to compare Undefined')
def __repr__(self):
return '<Undefined>'
Undefined = _Undefined()
[docs]@contextmanager
def logging_level(level):
"""
Temporarily modify param's logging level.
"""
level = level.upper()
levels = [DEBUG, INFO, WARNING, ERROR, CRITICAL, VERBOSE]
level_names = ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL', 'VERBOSE']
if level not in level_names:
raise Exception(f"Level {level!r} not in {levels!r}")
param_logger = get_logger()
logging_level = param_logger.getEffectiveLevel()
param_logger.setLevel(levels[level_names.index(level)])
try:
yield None
finally:
param_logger.setLevel(logging_level)
@contextmanager
def _batch_call_watchers(parameterized, enable=True, run=True):
"""
Internal version of batch_call_watchers, adding control over queueing and running.
Only actually batches events if enable=True; otherwise a no-op. Only actually
calls the accumulated watchers on exit if run=True; otherwise they remain queued.
"""
BATCH_WATCH = parameterized.param._BATCH_WATCH
parameterized.param._BATCH_WATCH = enable or parameterized.param._BATCH_WATCH
try:
yield
finally:
parameterized.param._BATCH_WATCH = BATCH_WATCH
if run and not BATCH_WATCH:
parameterized.param._batch_call_watchers()
# PARAM3_DEPRECATION
[docs]@_deprecated(extra_msg="Use instead `batch_call_watchers`.")
@contextmanager
def batch_watch(parameterized, enable=True, run=True):
with _batch_call_watchers(parameterized, enable, run):
yield
[docs]@contextmanager
def batch_call_watchers(parameterized):
"""
Context manager to batch events to provide to Watchers on a
parameterized object. This context manager queues any events
triggered by setting a parameter on the supplied parameterized
object, saving them up to dispatch them all at once when the
context manager exits.
"""
BATCH_WATCH = parameterized.param._BATCH_WATCH
parameterized.param._BATCH_WATCH = True
try:
yield
finally:
parameterized.param._BATCH_WATCH = BATCH_WATCH
if not BATCH_WATCH:
parameterized.param._batch_call_watchers()
@contextmanager
def _syncing(parameterized, parameters):
old = parameterized._param__private.syncing
parameterized._param__private.syncing = set(old) | set(parameters)
try:
yield
finally:
parameterized._param__private.syncing = old
[docs]@contextmanager
def edit_constant(parameterized):
"""
Temporarily set parameters on Parameterized object to constant=False
to allow editing them.
"""
params = parameterized.param.objects('existing').values()
constants = [p.constant for p in params]
for p in params:
p.constant = False
try:
yield
except:
raise
finally:
for (p, const) in zip(params, constants):
p.constant = const
[docs]@contextmanager
def discard_events(parameterized):
"""
Context manager that discards any events within its scope
triggered on the supplied parameterized object.
"""
batch_watch = parameterized.param._BATCH_WATCH
parameterized.param._BATCH_WATCH = True
watchers, events = (list(parameterized.param._state_watchers),
list(parameterized.param._events))
try:
yield
except:
raise
finally:
parameterized.param._BATCH_WATCH = batch_watch
parameterized.param._state_watchers = watchers
parameterized.param._events = events
def classlist(class_):
"""
Return a list of the class hierarchy above (and including) the given class.
Same as `inspect.getmro(class_)[::-1]`
"""
return inspect.getmro(class_)[::-1]
def get_all_slots(class_):
"""
Return a list of slot names for slots defined in `class_` and its
superclasses.
"""
# A subclass's __slots__ attribute does not contain slots defined
# in its superclass (the superclass' __slots__ end up as
# attributes of the subclass).
all_slots = []
parent_param_classes = [c for c in classlist(class_)[1::]]
for c in parent_param_classes:
if hasattr(c,'__slots__'):
all_slots+=c.__slots__
return all_slots
def get_occupied_slots(instance):
"""
Return a list of slots for which values have been set.
(While a slot might be defined, if a value for that slot hasn't
been set, then it's an AttributeError to request the slot's
value.)
"""
return [slot for slot in get_all_slots(type(instance))
if hasattr(instance,slot)]
# PARAM3_DEPRECATION
@_deprecated()
def all_equal(arg1,arg2):
"""
Return a single boolean for arg1==arg2, even for numpy arrays
using element-wise comparison.
Uses all(arg1==arg2) for sequences, and arg1==arg2 otherwise.
If both objects have an '_infinitely_iterable' attribute, they are
not be zipped together and are compared directly instead.
"""
if all(hasattr(el, '_infinitely_iterable') for el in [arg1,arg2]):
return arg1==arg2
try:
return all(a1 == a2 for a1, a2 in zip(arg1, arg2))
except TypeError:
return arg1==arg2
# PARAM3_DEPRECATION
# The syntax to use a metaclass changed incompatibly between 2 and
# 3. The add_metaclass() class decorator below creates a class using a
# specified metaclass in a way that works on both 2 and 3. For 3, can
# remove this decorator and specify metaclasses in a simpler way
# (https://docs.python.org/3/reference/datamodel.html#customizing-class-creation)
#
# Code from six (https://bitbucket.org/gutworth/six; version 1.4.1).
@_deprecated()
def add_metaclass(metaclass):
"""Class decorator for creating a class with a metaclass.
.. deprecated:: 2.0.0
"""
def wrapper(cls):
orig_vars = cls.__dict__.copy()
orig_vars.pop('__dict__', None)
orig_vars.pop('__weakref__', None)
for slots_var in orig_vars.get('__slots__', ()):
orig_vars.pop(slots_var)
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper
class bothmethod:
"""
'optional @classmethod'
A decorator that allows a method to receive either the class
object (if called on the class) or the instance object
(if called on the instance) as its first argument.
"""
def __init__(self, method):
self.method = method
def __get__(self, instance, owner):
if instance is None:
# Class call
return self.method.__get__(owner)
else:
# Instance call
return self.method.__get__(instance, owner)
def _getattrr(obj, attr, *args):
def _getattr(obj, attr):
return getattr(obj, attr, *args)
return reduce(_getattr, [obj] + attr.split('.'))
def no_instance_params(cls):
"""
Disables instance parameters on the class
"""
cls._param__private.disable_instance_params = True
return cls
def _instantiate_param_obj(paramobj, owner=None):
"""Return a Parameter object suitable for instantiation given the class's Parameter object"""
# Shallow-copy Parameter object without the watchers
p = copy.copy(paramobj)
p.owner = owner
# Reset watchers since class parameter watcher should not execute
# on instance parameters
p.watchers = {}
# shallow-copy any mutable slot values other than the actual default
for s in p.__class__._all_slots_:
v = getattr(p, s)
if _is_mutable_container(v) and s != "default":
setattr(p, s, copy.copy(v))
return p
def _instantiated_parameter(parameterized, param):
"""
Given a Parameterized object and one of its class Parameter objects,
return the appropriate Parameter object for this instance, instantiating
it if need be.
"""
if (getattr(parameterized._param__private, 'initialized', False) and param.per_instance and
not getattr(type(parameterized)._param__private, 'disable_instance_params', False)):
key = param.name
if key not in parameterized._param__private.params:
parameterized._param__private.params[key] = _instantiate_param_obj(param, parameterized)
param = parameterized._param__private.params[key]
return param
def instance_descriptor(f):
# If parameter has an instance Parameter, delegate setting
def _f(self, obj, val):
# obj is None when the metaclass is setting
if obj is not None:
instance_param = obj._param__private.params.get(self.name)
if instance_param is None:
instance_param = _instantiated_parameter(obj, self)
if instance_param is not None and self is not instance_param:
instance_param.__set__(obj, val)
return
return f(self, obj, val)
return _f
def get_method_owner(method):
"""
Gets the instance that owns the supplied method
"""
if not inspect.ismethod(method):
return None
if isinstance(method, partial):
method = method.func
return method.__self__
# PARAM3_DEPRECATION
def recursive_repr(fillvalue='...'):
"""
Decorator to make a repr function return fillvalue for a recursive call
.. deprecated:: 1.12.0
"""
warnings.warn(
'recursive_repr has been deprecated and will be removed in a future version.',
category=_ParamDeprecationWarning,
stacklevel=2,
)
return _recursive_repr(fillvalue=fillvalue)
[docs]@accept_arguments
def output(func, *output, **kw):
"""
output allows annotating a method on a Parameterized class to
declare that it returns an output of a specific type. The outputs
of a Parameterized class can be queried using the
Parameterized.param.outputs method. By default the output will
inherit the method name but a custom name can be declared by
expressing the Parameter type using a keyword argument.
The simplest declaration simply declares the method returns an
object without any type guarantees, e.g.:
@output()
If a specific parameter type is specified this is a declaration
that the method will return a value of that type, e.g.:
@output(param.Number())
To override the default name of the output the type may be declared
as a keyword argument, e.g.:
@output(custom_name=param.Number())
Multiple outputs may be declared using keywords mapping from output name
to the type or using tuples of the same format, i.e. these two declarations
are equivalent:
@output(number=param.Number(), string=param.String())
@output(('number', param.Number()), ('string', param.String()))
output also accepts Python object types which will be upgraded to
a ClassSelector, e.g.:
@output(int)
"""
if output:
outputs = []
for i, out in enumerate(output):
i = i if len(output) > 1 else None
if isinstance(out, tuple) and len(out) == 2 and isinstance(out[0], str):
outputs.append(out+(i,))
elif isinstance(out, str):
outputs.append((out, Parameter(), i))
else:
outputs.append((None, out, i))
elif kw:
# (requires keywords to be kept ordered, which was not true in previous versions)
outputs = [(name, otype, i if len(kw) > 1 else None)
for i, (name, otype) in enumerate(kw.items())]
else:
outputs = [(None, Parameter(), None)]
names, processed = [], []
for name, otype, i in outputs:
if isinstance(otype, type):
if issubclass(otype, Parameter):
otype = otype()
else:
from .import ClassSelector
otype = ClassSelector(class_=otype)
elif isinstance(otype, tuple) and all(isinstance(t, type) for t in otype):
from .import ClassSelector
otype = ClassSelector(class_=otype)
if not isinstance(otype, Parameter):
raise ValueError('output type must be declared with a Parameter class, '
'instance or a Python object type.')
processed.append((name, otype, i))
names.append(name)
if len(set(names)) != len(names):
raise ValueError('When declaring multiple outputs each value '
'must be unique.')
_dinfo = getattr(func, '_dinfo', {})
_dinfo.update({'outputs': processed})
@wraps(func)
def _output(*args,**kw):
return func(*args,**kw)
_output._dinfo = _dinfo
return _output
def _parse_dependency_spec(spec):
"""
Parses param.depends specifications into three components:
1. The dotted path to the sub-object
2. The attribute being depended on, i.e. either a parameter or method
3. The parameter attribute being depended on
"""
assert spec.count(":")<=1
spec = spec.strip()
m = re.match("(?P<path>[^:]*):?(?P<what>.*)", spec)
what = m.group('what')
path = "."+m.group('path')
m = re.match(r"(?P<obj>.*)(\.)(?P<attr>.*)", path)
obj = m.group('obj')
attr = m.group("attr")
return obj or None, attr, what or 'value'
def _params_depended_on(minfo, dynamic=True, intermediate=True):
"""
Resolves dependencies declared on a Parameterized method.
Dynamic dependencies, i.e. dependencies on sub-objects which may
or may not yet be available, are only resolved if dynamic=True.
By default intermediate dependencies, i.e. dependencies on the
path to a sub-object are returned. For example for a dependency
on 'a.b.c' dependencies on 'a' and 'b' are returned as long as
intermediate=True.
Returns lists of concrete dependencies on available parameters
and dynamic dependencies specifications which have to resolved
if the referenced sub-objects are defined.
"""
deps, dynamic_deps = [], []
dinfo = getattr(minfo.method, "_dinfo", {})
for d in dinfo.get('dependencies', list(minfo.cls.param)):
ddeps, ddynamic_deps = (minfo.cls if minfo.inst is None else minfo.inst).param._spec_to_obj(d, dynamic, intermediate)
dynamic_deps += ddynamic_deps
for dep in ddeps:
if isinstance(dep, PInfo):
deps.append(dep)
else:
method_deps, method_dynamic_deps = _params_depended_on(dep, dynamic, intermediate)
deps += method_deps
dynamic_deps += method_dynamic_deps
return deps, dynamic_deps
def _resolve_mcs_deps(obj, resolved, dynamic, intermediate=True):
"""
Resolves constant and dynamic parameter dependencies previously
obtained using the _params_depended_on function. Existing resolved
dependencies are updated with a supplied parameter instance while
dynamic dependencies are resolved if possible.
"""
dependencies = []
for dep in resolved:
if not issubclass(type(obj), dep.cls):
dependencies.append(dep)
continue
inst = obj if dep.inst is None else dep.inst
dep = PInfo(inst=inst, cls=dep.cls, name=dep.name,
pobj=inst.param[dep.name], what=dep.what)
dependencies.append(dep)
for dep in dynamic:
subresolved, _ = obj.param._spec_to_obj(dep.spec, intermediate=intermediate)
for subdep in subresolved:
if isinstance(subdep, PInfo):
dependencies.append(subdep)
else:
dependencies += _params_depended_on(subdep, intermediate=intermediate)[0]
return dependencies
def _skip_event(*events, **kwargs):
"""
Checks whether a subobject event should be skipped.
Returns True if all the values on the new subobject
match the values on the previous subobject.
"""
what = kwargs.get('what', 'value')
changed = kwargs.get('changed')
if changed is None:
return False
for e in events:
for p in changed:
if what == 'value':
old = Undefined if e.old is None else _getattrr(e.old, p, None)
new = Undefined if e.new is None else _getattrr(e.new, p, None)
else:
old = Undefined if e.old is None else _getattrr(e.old.param[p], what, None)
new = Undefined if e.new is None else _getattrr(e.new.param[p], what, None)
if not Comparator.is_equal(old, new):
return False
return True
def extract_dependencies(function):
"""
Extract references from a method or function that declares the references.
"""
subparameters = list(function._dinfo['dependencies'])+list(function._dinfo['kw'].values())
params = []
for p in subparameters:
if isinstance(p, str):
owner = get_method_owner(function)
*subps, p = p.split('.')
for subp in subps:
owner = getattr(owner, subp, None)
if owner is None:
raise ValueError('Cannot depend on undefined sub-parameter {p!r}.')
if p in owner.param:
pobj = owner.param[p]
if pobj not in params:
params.append(pobj)
else:
for sp in extract_dependencies(getattr(owner, p)):
if sp not in params:
params.append(sp)
elif p not in params:
params.append(p)
return params
# Two callers at the module top level to support pickling.
async def _async_caller(*events, what='value', changed=None, callback=None, function=None):
if callback:
callback(*events)
if not _skip_event or not _skip_event(*events, what=what, changed=changed):
await function()
def _sync_caller(*events, what='value', changed=None, callback=None, function=None):
if callback:
callback(*events)
if not _skip_event(*events, what=what, changed=changed):
return function()
def _m_caller(self, method_name, what='value', changed=None, callback=None):
"""
Wraps a method call adding support for scheduling a callback
before it is executed and skipping events if a subobject has
changed but its values have not.
"""
function = getattr(self, method_name)
_caller = _async_caller if iscoroutinefunction(function) else _sync_caller
caller = partial(_caller, what=what, changed=changed, callback=callback, function=function)
caller._watcher_name = method_name
return caller
def _add_doc(obj, docstring):
"""Add a docstring to a namedtuple"""
obj.__doc__ = docstring
PInfo = namedtuple("PInfo", "inst cls name pobj what")
_add_doc(PInfo,
"""
Object describing something being watched about a Parameter.
`inst`: Parameterized instance owning the Parameter, or None
`cls`: Parameterized class owning the Parameter
`name`: Name of the Parameter being watched
`pobj`: Parameter object being watched
`what`: What is being watched on the Parameter (either 'value' or a slot name)
""")
MInfo = namedtuple("MInfo", "inst cls name method")
_add_doc(MInfo,
"""
Object describing a Parameterized method being watched.
`inst`: Parameterized instance owning the method, or None
`cls`: Parameterized class owning the method
`name`: Name of the method being watched
`method`: bound method of the object being watched
""")
DInfo = namedtuple("DInfo", "spec")
_add_doc(DInfo,
"""
Object describing dynamic dependencies.
`spec`: Dependency specification to resolve
""")
Event = namedtuple("Event", "what name obj cls old new type")
_add_doc(Event,
"""
Object representing an event that triggers a Watcher.
`what`: What is being watched on the Parameter (either value or a slot name)
`name`: Name of the Parameter that was set or triggered
`obj`: Parameterized instance owning the watched Parameter, or None
`cls`: Parameterized class owning the watched Parameter
`old`: Previous value of the item being watched
`new`: New value of the item being watched
`type`: `triggered` if this event was triggered explicitly), `changed` if
the item was set and watching for `onlychanged`, `set` if the item was set,
or None if type not yet known
""")
_Watcher = namedtuple("Watcher", "inst cls fn mode onlychanged parameter_names what queued precedence")
[docs]class Watcher(_Watcher):
"""
Object declaring a callback function to invoke when an Event is
triggered on a watched item.
`inst`: Parameterized instance owning the watched Parameter, or
None
`cls`: Parameterized class owning the watched Parameter
`fn`: Callback function to invoke when triggered by a watched
Parameter
`mode`: 'args' for param.watch (call `fn` with PInfo object
positional args), or 'kwargs' for param.watch_values (call `fn`
with <param_name>:<new_value> keywords)
`onlychanged`: If True, only trigger for actual changes, not
setting to the current value
`parameter_names`: List of Parameters to watch, by name
`what`: What to watch on the Parameters (either 'value' or a slot
name)
`queued`: Immediately invoke callbacks triggered during processing
of an Event (if False), or queue them up for processing
later, after this event has been handled (if True)
`precedence`: A numeric value which determines the precedence of
the watcher. Lower precedence values are executed
with higher priority.
"""
def __new__(cls_, *args, **kwargs):
"""
Allows creating Watcher without explicit precedence value.
"""
values = dict(zip(cls_._fields, args))
values.update(kwargs)
if 'precedence' not in values:
values['precedence'] = 0
return super().__new__(cls_, **values)
def __str__(self):
cls = type(self)
attrs = ', '.join([f'{f}={getattr(self, f)!r}' for f in cls._fields])
return f"{cls.__name__}({attrs})"
class ParameterMetaclass(type):
"""
Metaclass allowing control over creation of Parameter classes.
"""
def __new__(mcs, classname, bases, classdict):
# store the class's docstring in __classdoc
if '__doc__' in classdict:
classdict['__classdoc']=classdict['__doc__']
# when asking for help on Parameter *object*, return the doc slot
classdict['__doc__'] = property(attrgetter('doc'))
# Compute all slots in order, using a dict later turned into a list
# as it's the fastest way to get an ordered set in Python
all_slots = {}
for bcls in set(chain(*(base.__mro__[::-1] for base in bases))):
all_slots.update(dict.fromkeys(getattr(bcls, '__slots__', [])))
# To get the benefit of slots, subclasses must themselves define
# __slots__, whether or not they define attributes not present in
# the base Parameter class. That's because a subclass will have
# a __dict__ unless it also defines __slots__.
if '__slots__' not in classdict:
classdict['__slots__'] = []
else:
all_slots.update(dict.fromkeys(classdict['__slots__']))
classdict['_all_slots_'] = list(all_slots)
# No special handling for a __dict__ slot; should there be?
return type.__new__(mcs, classname, bases, classdict)
def __getattribute__(mcs,name):
if name=='__doc__':
# when asking for help on Parameter *class*, return the
# stored class docstring
return type.__getattribute__(mcs,'__classdoc')
else:
return type.__getattribute__(mcs,name)
class _ParameterBase(metaclass=ParameterMetaclass):
"""
Base Parameter class used to dynamically update the signature of all
the Parameters.
"""
@classmethod
def _modified_slots_defaults(cls):
defaults = cls._slot_defaults.copy()
defaults['label'] = defaults.pop('_label')
return defaults
@classmethod
def __init_subclass__(cls):
# _update_signature has been tested against the Parameters available
# in Param, we don't want to break the Parameters created elsewhere
# so wrapping this in a loose try/except.
try:
cls._update_signature()
except Exception:
# The super signature has been changed so we need to get the one
# from the class constructor directly.
cls.__signature__ = inspect.signature(cls.__init__)
@classmethod
def _update_signature(cls):
defaults = cls._modified_slots_defaults()
new_parameters = {}
for i, kls in enumerate(cls.mro()):
if kls.__name__.startswith('_'):
continue
sig = inspect.signature(kls.__init__)
for pname, parameter in sig.parameters.items():
if pname == 'self':
continue
if i >= 1 and parameter.default == inspect.Signature.empty:
continue
if parameter.kind in (inspect.Parameter.VAR_KEYWORD, inspect.Parameter.VAR_POSITIONAL):
continue
if getattr(parameter, 'default', None) is Undefined:
if pname not in defaults:
raise LookupError(
f'Argument {pname!r} of Parameter {cls.__name__!r} has no '
'entry in _slot_defaults.'
)
default = defaults[pname]
if callable(default) and hasattr(default, 'sig'):
default = default.sig
new_parameter = parameter.replace(default=default)
else:
new_parameter = parameter
if i >= 1:
new_parameter = new_parameter.replace(kind=inspect.Parameter.KEYWORD_ONLY)
new_parameters.setdefault(pname, new_parameter)
def _sorter(p):
if p.default == inspect.Signature.empty:
return 0
else:
return 1
new_parameters = sorted(new_parameters.values(), key=_sorter)
new_sig = sig.replace(parameters=new_parameters)
cls.__signature__ = new_sig
[docs]class Parameter(_ParameterBase):
"""
An attribute descriptor for declaring parameters.
Parameters are a special kind of class attribute. Setting a
Parameterized class attribute to be a Parameter instance causes
that attribute of the class (and the class's instances) to be
treated as a Parameter. This allows special behavior, including
dynamically generated parameter values, documentation strings,
constant and read-only parameters, and type or range checking at
assignment time.
For example, suppose someone wants to define two new kinds of
objects Foo and Bar, such that Bar has a parameter delta, Foo is a
subclass of Bar, and Foo has parameters alpha, sigma, and gamma
(and delta inherited from Bar). She would begin her class
definitions with something like this::
class Bar(Parameterized):
delta = Parameter(default=0.6, doc='The difference between steps.')
...
class Foo(Bar):
alpha = Parameter(default=0.1, doc='The starting value.')
sigma = Parameter(default=0.5, doc='The standard deviation.',
constant=True)
gamma = Parameter(default=1.0, doc='The ending value.')
...
Class Foo would then have four parameters, with delta defaulting
to 0.6.
Parameters have several advantages over plain attributes:
1. Parameters can be set automatically when an instance is
constructed: The default constructor for Foo (and Bar) will
accept arbitrary keyword arguments, each of which can be used
to specify the value of a Parameter of Foo (or any of Foo's
superclasses). E.g., if a script does this::
myfoo = Foo(alpha=0.5)
myfoo.alpha will return 0.5, without the Foo constructor
needing special code to set alpha.
If Foo implements its own constructor, keyword arguments will
still be accepted if the constructor accepts a dictionary of
keyword arguments (as in ``def __init__(self,**params):``), and
then each class calls its superclass (as in
``super(Foo,self).__init__(**params)``) so that the
Parameterized constructor will process the keywords.
2. A Parameterized class need specify only the attributes of a
Parameter whose values differ from those declared in
superclasses; the other values will be inherited. E.g. if Foo
declares::
delta = Parameter(default=0.2)
the default value of 0.2 will override the 0.6 inherited from
Bar, but the doc will be inherited from Bar.
3. The Parameter descriptor class can be subclassed to provide
more complex behavior, allowing special types of parameters
that, for example, require their values to be numbers in
certain ranges, generate their values dynamically from a random
distribution, or read their values from a file or other
external source.
4. The attributes associated with Parameters provide enough
information for automatically generating property sheets in
graphical user interfaces, allowing Parameterized instances to
be edited by users.
Note that Parameters can only be used when set as class attributes
of Parameterized classes. Parameters used as standalone objects,
or as class attributes of non-Parameterized classes, will not have
the behavior described here.
"""
# Because they implement __get__ and __set__, Parameters are known
# as 'descriptors' in Python; see "Implementing Descriptors" and
# "Invoking Descriptors" in the 'Customizing attribute access'
# section of the Python reference manual:
# http://docs.python.org/ref/attribute-access.html
#
# Overview of Parameters for programmers
# ======================================
#
# Consider the following code:
#
#
# class A(Parameterized):
# p = Parameter(default=1)
#
# a1 = A()
# a2 = A()
#
#
# * a1 and a2 share one Parameter object (A.__dict__['p']).
#
# * The default (class) value of p is stored in this Parameter
# object (A.__dict__['p'].default).
#
# * If the value of p is set on a1 (e.g. a1.p=2), a1's value of p
# is stored in a1 itself (a1._param__private.values['p'])
#
# * When a1.p is requested, a1._param__private.values['p'] is
# returned. When a2.p is requested, 'p' is not found in
# a1._param__private.values, so A.__dict__['p'].default (i.e. A.p) is
# returned instead.
#
#
# Be careful when referring to the 'name' of a Parameter:
#
# * A Parameterized class has a name for the attribute which is
# being represented by the Parameter ('p' in the example above);
# in the code, this is called the 'name'.
#
# * When a Parameterized instance has its own local value for a
# parameter, it is stored as 'p._param__private.values[X]' where X is the
# name of the Parameter
# So that the extra features of Parameters do not require a lot of
# overhead, Parameters are implemented using __slots__ (see
# http://www.python.org/doc/2.4/ref/slots.html). Instead of having
# a full Python dictionary associated with each Parameter instance,
# Parameter instances have an enumerated list (named __slots__) of
# attributes, and reserve just enough space to store these
# attributes. Using __slots__ requires special support for
# operations to copy and restore Parameters (e.g. for Python
# persistent storage pickling); see __getstate__ and __setstate__.
__slots__ = ['name', 'default', 'doc',
'precedence', 'instantiate', 'constant', 'readonly',
'pickle_default_value', 'allow_None', 'per_instance',
'watchers', 'owner', 'allow_refs', 'nested_refs', '_label']
# Note: When initially created, a Parameter does not know which
# Parameterized class owns it, nor does it know its names
# (attribute name, internal name). Once the owning Parameterized
# class is created, owner, and name are
# set.
_serializers = {'json': serializer.JSONSerialization}
_slot_defaults = dict(
default=None, precedence=None, doc=None, _label=None, instantiate=False,
constant=False, readonly=False, pickle_default_value=True, allow_None=False,
per_instance=True, allow_refs=False, nested_refs=False
)
# Parameters can be updated during Parameterized class creation when they
# are defined multiple times in a class hierarchy. We have to record which
# Parameter slots require the default value to be re-validated. Any slots
# in this list do not have to trigger such re-validation.
_non_validated_slots = ['_label', 'doc', 'name', 'precedence',
'constant', 'pickle_default_value',
'watchers', 'owner']
@typing.overload
def __init__(
self,
default=None, *,
doc=None, label=None, precedence=None, instantiate=False, constant=False,
readonly=False, pickle_default_value=True, allow_None=False, per_instance=True,
allow_refs=False, nested_refs=False
):
...
[docs] @_deprecate_positional_args
def __init__(self, default=Undefined, *, doc=Undefined, # pylint: disable-msg=R0913
label=Undefined, precedence=Undefined,
instantiate=Undefined, constant=Undefined, readonly=Undefined,
pickle_default_value=Undefined, allow_None=Undefined,
per_instance=Undefined, allow_refs=Undefined, nested_refs=Undefined):
"""Initialize a new Parameter object and store the supplied attributes:
default: the owning class's value for the attribute represented
by this Parameter, which can be overridden in an instance.
doc: docstring explaining what this parameter represents.
label: optional text label to be used when this Parameter is
shown in a listing. If no label is supplied, the attribute name
for this parameter in the owning Parameterized object is used.
precedence: a numeric value, usually in the range 0.0 to 1.0,
which allows the order of Parameters in a class to be defined in
a listing or e.g. in GUI menus. A negative precedence indicates
a parameter that should be hidden in such listings.
instantiate: controls whether the value of this Parameter will
be deepcopied when a Parameterized object is instantiated (if
True), or if the single default value will be shared by all
Parameterized instances (if False). For an immutable Parameter
value, it is best to leave instantiate at the default of
False, so that a user can choose to change the value at the
Parameterized instance level (affecting only that instance) or
at the Parameterized class or superclass level (affecting all
existing and future instances of that class or superclass). For
a mutable Parameter value, the default of False is also appropriate
if you want all instances to share the same value state, e.g. if
they are each simply referring to a single global object like
a singleton. If instead each Parameterized should have its own
independently mutable value, instantiate should be set to
True, but note that there is then no simple way to change the
value of this Parameter at the class or superclass level,
because each instance, once created, will then have an
independently instantiated value.
constant: if true, the Parameter value can be changed only at
the class level or in a Parameterized constructor call. The
value is otherwise constant on the Parameterized instance,
once it has been constructed.
readonly: if true, the Parameter value cannot ordinarily be
changed by setting the attribute at the class or instance
levels at all. The value can still be changed in code by
temporarily overriding the value of this slot and then
restoring it, which is useful for reporting values that the
_user_ should never change but which do change during code
execution.
pickle_default_value: whether the default value should be
pickled. Usually, you would want the default value to be pickled,
but there are rare cases where that would not be the case (e.g.
for file search paths that are specific to a certain system).
per_instance: whether a separate Parameter instance will be
created for every Parameterized instance. True by default.
If False, all instances of a Parameterized class will share
the same Parameter object, including all validation
attributes (bounds, etc.). See also instantiate, which is
conceptually similar but affects the Parameter value rather
than the Parameter object.
allow_None: if True, None is accepted as a valid value for
this Parameter, in addition to any other values that are
allowed. If the default value is defined as None, allow_None
is set to True automatically.
allow_refs: if True allows automatically linking parameter
references to this Parameter, i.e. the parameter value will
automatically reflect the current value of the reference that
is passed in.
nested_refs: if True and allow_refs=True then even nested objects
such as dictionaries, lists, slices, tuples and sets will be
inspected for references and will be automatically resolved.
default, doc, and precedence all default to None, which allows
inheritance of Parameter slots (attributes) from the owning-class'
class hierarchy (see ParameterizedMetaclass).
"""
self.name = None
self.owner = None
self.allow_refs = allow_refs
self.nested_refs = nested_refs
self.precedence = precedence
self.default = default
self.doc = doc
self.constant = constant is True or readonly is True # readonly => constant
self.readonly = readonly
self._label = label
self._set_instantiate(instantiate)
self.pickle_default_value = pickle_default_value
self._set_allow_None(allow_None)
self.watchers = {}
self.per_instance = per_instance
@classmethod
def serialize(cls, value):
"Given the parameter value, return a Python value suitable for serialization"
return value
@classmethod
def deserialize(cls, value):
"Given a serializable Python value, return a value that the parameter can be set to"
return value
def schema(self, safe=False, subset=None, mode='json'):
if serializer is None:
raise ImportError('Cannot import serializer.py needed to generate schema')
if mode not in self._serializers:
raise KeyError(f'Mode {mode!r} not in available serialization formats {list(self._serializers.keys())!r}')
return self._serializers[mode].param_schema(self.__class__.__name__, self,
safe=safe, subset=subset)
@property
def rx(self):
from .reactive import reactive_ops
return reactive_ops(self)
@property
def label(self):
if self.name and self._label is None:
return label_formatter(self.name)
else:
return self._label
@label.setter
def label(self, val):
self._label = val
def _set_allow_None(self, allow_None):
# allow_None is set following these rules (last takes precedence):
# 1. to False by default
# 2. to the value provided in the constructor, if any
# 3. to True if default is None
if self.default is None:
self.allow_None = True
elif allow_None is not Undefined:
self.allow_None = allow_None
else:
self.allow_None = self._slot_defaults['allow_None']
def _set_instantiate(self,instantiate):
"""Constant parameters must be instantiated."""
# instantiate doesn't actually matter for read-only
# parameters, since they can't be set even on a class. But
# having this code avoids needless instantiation.
if self.readonly:
self.instantiate = False
elif instantiate is not Undefined:
self.instantiate = instantiate
else:
# Default value
self.instantiate = self._slot_defaults['instantiate']
def __setattr__(self, attribute, value):
if attribute == 'name':
name = getattr(self, 'name', None)
if name is not None and value != name:
raise AttributeError("Parameter name cannot be modified after "
"it has been bound to a Parameterized.")
is_slot = attribute in self.__class__._all_slots_
has_watcher = attribute != "default" and attribute in getattr(self, 'watchers', [])
if not (is_slot or has_watcher):
# Return early if attribute is not a slot
return super().__setattr__(attribute, value)
# Otherwise get the old value so we can call watcher/on_set
old = getattr(self, attribute, NotImplemented)
if is_slot:
try:
self._on_set(attribute, old, value)
except AttributeError:
pass
super().__setattr__(attribute, value)
if has_watcher and old is not NotImplemented:
self._trigger_event(attribute, old, value)
def _trigger_event(self, attribute, old, new):
event = Event(what=attribute, name=self.name, obj=None, cls=self.owner,
old=old, new=new, type=None)
for watcher in self.watchers[attribute]:
self.owner.param._call_watcher(watcher, event)
if not self.owner.param._BATCH_WATCH:
self.owner.param._batch_call_watchers()
def __getattribute__(self, key):
"""
Allow slot values to be Undefined in an "unbound" parameter, i.e. one
that is not (yet) owned by a Parameterized object, in which case their
value will be retrieved from the _slot_defaults dictionary.
"""
v = object.__getattribute__(self, key)
# Safely checks for name (avoiding recursion) to decide if this object is unbound
if v is Undefined and key != "name" and getattr(self, "name", None) is None:
try:
v = self._slot_defaults[key]
except KeyError as e:
raise KeyError(
f'Slot {key!r} on unbound parameter {self.__class__.__name__!r} '
'has no default value defined in `_slot_defaults`'
) from e
if callable(v):
v = v(self)
return v
def _on_set(self, attribute, old, value):
"""
Can be overridden on subclasses to handle changes when parameter
attribute is set.
"""
def _update_state(self):
"""
Can be overridden on subclasses to update a Parameter state, i.e. slot
values, after the slot values have been set in the inheritance procedure.
"""
def __get__(self, obj, objtype): # pylint: disable-msg=W0613
"""
Return the value for this Parameter.
If called for a Parameterized class, produce that
class's value (i.e. this Parameter object's 'default'
attribute).
If called for a Parameterized instance, produce that
instance's value, if one has been set - otherwise produce the
class's value (default).
"""
if obj is None: # e.g. when __get__ called for a Parameterized class
result = self.default
else:
# Attribute error when .values does not exist (_ClassPrivate)
# and KeyError when there's no cached value for this parameter.
try:
result = obj._param__private.values[self.name]
except (AttributeError, KeyError):
result = self.default
return result
@instance_descriptor
def __set__(self, obj, val):
"""
Set the value for this Parameter.
If called for a Parameterized class, set that class's
value (i.e. set this Parameter object's 'default' attribute).
If called for a Parameterized instance, set the value of
this Parameter on that instance (i.e. in the instance's
`values` dictionary located in the private namespace `_param__private`,
under the parameter's name).
If the Parameter's constant attribute is True, only allows
the value to be set for a Parameterized class or on
uninitialized Parameterized instances.
If the Parameter's readonly attribute is True, only allows the
value to be specified in the Parameter declaration inside the
Parameterized source code. A read-only parameter also
cannot be set on a Parameterized class.
Note that until we support some form of read-only
object, it is still possible to change the attributes of the
object stored in a constant or read-only Parameter (e.g. one
item in a list).
"""
name = self.name
if obj is not None and self.allow_refs and obj._param__private.initialized:
syncing = name in obj._param__private.syncing
ref, deps, val, is_async = obj.param._resolve_ref(self, val)
refs = obj._param__private.refs
if ref is not None:
self.owner.param._update_ref(name, ref)
elif name in refs and not syncing:
del refs[name]
if name in obj._param__private.async_refs:
obj._param__private.async_refs.pop(name).cancel()
if is_async or val is Undefined:
return
# Deprecated Number set_hook called here to avoid duplicating setter
if hasattr(self, 'set_hook'):
val = self.set_hook(obj, val)
if self.set_hook is not _identity_hook:
# PARAM3_DEPRECATION
warnings.warn(
'Number.set_hook has been deprecated.',
category=_ParamDeprecationWarning,
stacklevel=6,
)
self._validate(val)
_old = NotImplemented
# obj can be None if __set__ is called for a Parameterized class
if self.constant or self.readonly:
if self.readonly:
raise TypeError("Read-only parameter '%s' cannot be modified" % name)
elif obj is None:
_old = self.default
self.default = val
elif not obj._param__private.initialized:
_old = obj._param__private.values.get(self.name, self.default)
obj._param__private.values[self.name] = val
else:
_old = obj._param__private.values.get(self.name, self.default)
if val is not _old:
raise TypeError("Constant parameter '%s' cannot be modified" % name)
else:
if obj is None:
_old = self.default
self.default = val
else:
# When setting a Parameter before calling super.
if not isinstance(obj._param__private, _InstancePrivate):
obj._param__private = _InstancePrivate(
explicit_no_refs=type(obj)._param__private.explicit_no_refs
)
_old = obj._param__private.values.get(name, self.default)
obj._param__private.values[name] = val
self._post_setter(obj, val)
if obj is not None:
if not hasattr(obj, '_param__private') or not getattr(obj._param__private, 'initialized', False):
return
obj.param._update_deps(name)
if obj is None:
watchers = self.watchers.get("value")
elif name in obj._param__private.watchers:
watchers = obj._param__private.watchers[name].get('value')
if watchers is None:
watchers = self.watchers.get("value")
else:
watchers = None
obj = self.owner if obj is None else obj
if obj is None or not watchers:
return
event = Event(what='value', name=name, obj=obj, cls=self.owner,
old=_old, new=val, type=None)
# Copy watchers here since they may be modified inplace during iteration
for watcher in sorted(watchers, key=lambda w: w.precedence):
obj.param._call_watcher(watcher, event)
if not obj.param._BATCH_WATCH:
obj.param._batch_call_watchers()
def _validate_value(self, value, allow_None):
"""Implements validation for parameter value"""
def _validate(self, val):
"""Implements validation for the parameter value and attributes"""
self._validate_value(val, self.allow_None)
def _post_setter(self, obj, val):
"""Called after the parameter value has been validated and set"""
def __delete__(self,obj):
raise TypeError("Cannot delete '%s': Parameters deletion not allowed." % self.name)
def _set_names(self, attrib_name):
if None not in (self.owner, self.name) and attrib_name != self.name:
raise AttributeError('The {} parameter {!r} has already been '
'assigned a name by the {} class, '
'could not assign new name {!r}. Parameters '
'may not be shared by multiple classes; '
'ensure that you create a new parameter '
'instance for each new class.'.format(type(self).__name__, self.name,
self.owner.name, attrib_name))
self.name = attrib_name
def __getstate__(self):
"""
All Parameters have slots, not a dict, so we have to support
pickle and deepcopy ourselves.
"""
return {slot: getattr(self, slot) for slot in self.__class__._all_slots_}
def __setstate__(self,state):
# set values of __slots__ (instead of in non-existent __dict__)
for k, v in state.items():
setattr(self, k, v)
# Define one particular type of Parameter that is used in this file
[docs]class String(Parameter):
r"""
A String Parameter, with a default value and optional regular expression (regex) matching.
Example of using a regex to implement IPv4 address matching::
class IPAddress(String):
'''IPv4 address as a string (dotted decimal notation)'''
def __init__(self, default="0.0.0.0", allow_None=False, **kwargs):
ip_regex = r'^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$'
super(IPAddress, self).__init__(default=default, regex=ip_regex, **kwargs)
"""
__slots__ = ['regex']
_slot_defaults = _dict_update(Parameter._slot_defaults, default="", regex=None)
@typing.overload
def __init__(
self,
default="", *, regex=None,
doc=None, label=None, precedence=None, instantiate=False, constant=False,
readonly=False, pickle_default_value=True, allow_None=False, per_instance=True,
allow_refs=False, nested_refs=False
):
...
[docs] @_deprecate_positional_args
def __init__(self, default=Undefined, *, regex=Undefined, **kwargs):
super().__init__(default=default, **kwargs)
self.regex = regex
self._validate(self.default)
def _validate_regex(self, val, regex):
if (val is None and self.allow_None):
return
if regex is not None and re.match(regex, val) is None:
raise ValueError(
f'{_validate_error_prefix(self)} value {val!r} does not '
f'match regex {regex!r}.'
)
def _validate_value(self, val, allow_None):
if allow_None and val is None:
return
if not isinstance(val, str):
raise ValueError(
f'{_validate_error_prefix(self)} only takes a string value, '
f'not value of {type(val)}.'
)
def _validate(self, val):
self._validate_value(val, self.allow_None)
self._validate_regex(val, self.regex)
class shared_parameters:
"""
Context manager to share parameter instances when creating
multiple Parameterized objects of the same type. Parameter default
values are instantiated once and cached to be reused when another
Parameterized object of the same type is instantiated.
Can be useful to easily modify large collections of Parameterized
objects at once and can provide a significant speedup.
"""
_share = False
_shared_cache = {}
def __enter__(self):
shared_parameters._share = True
def __exit__(self, exc_type, exc_val, exc_tb):
shared_parameters._share = False
shared_parameters._shared_cache = {}
def as_uninitialized(fn):
"""
Decorator: call fn with the parameterized_instance's
initialization flag set to False, then revert the flag.
(Used to decorate Parameterized methods that must alter
a constant Parameter.)
"""
@wraps(fn)
def override_initialization(self_,*args,**kw):
parameterized_instance = self_.self
original_initialized = parameterized_instance._param__private.initialized
parameterized_instance._param__private.initialized = False
ret = fn(self_, *args, **kw)
parameterized_instance._param__private.initialized = original_initialized
return ret
return override_initialization
class Comparator:
"""
Comparator defines methods for determining whether two objects
should be considered equal. It works by registering custom
comparison functions, which may either be registed by type or with
a predicate function. If no matching comparison can be found for
the two objects the comparison will return False.
If registered by type the Comparator will check whether both
objects are of that type and apply the comparison. If the equality
function is instead registered with a function it will call the
function with each object individually to check if the comparison
applies. This is useful for defining comparisons for objects
without explicitly importing them.
To use the Comparator simply call the is_equal function.
"""
equalities = {
numbers.Number: operator.eq,
str: operator.eq,
bytes: operator.eq,
type(None): operator.eq,
lambda o: hasattr(o, '_infinitely_iterable'): operator.eq, # Time
}
equalities.update({dtt: operator.eq for dtt in dt_types})
@classmethod
def is_equal(cls, obj1, obj2):
for eq_type, eq in cls.equalities.items():
try:
are_instances = isinstance(obj1, eq_type) and isinstance(obj2, eq_type)
except TypeError:
pass
else:
if are_instances:
return eq(obj1, obj2)
if isinstance(eq_type, FunctionType) and eq_type(obj1) and eq_type(obj2):
return eq(obj1, obj2)
if isinstance(obj2, (list, set, tuple)):
return cls.compare_iterator(obj1, obj2)
elif isinstance(obj2, dict):
return cls.compare_mapping(obj1, obj2)
return False
@classmethod
def compare_iterator(cls, obj1, obj2):
if type(obj1) != type(obj2) or len(obj1) != len(obj2):
return False
for o1, o2 in zip(obj1, obj2):
if not cls.is_equal(o1, o2):
return False
return True
@classmethod
def compare_mapping(cls, obj1, obj2):
if type(obj1) != type(obj2) or len(obj1) != len(obj2): return False
for k in obj1:
if k in obj2:
if not cls.is_equal(obj1[k], obj2[k]):
return False
else:
return False
return True
class _ParametersRestorer:
"""
Context-manager to handle the reset of parameter values after an update.
"""
def __init__(self, *, parameters, restore, refs=None):
self._parameters = parameters
self._restore = restore
self._refs = {} if refs is None else refs
def __enter__(self):
return self._restore
def __exit__(self, exc_type, exc_value, exc_tb):
try:
self._parameters._update(dict(self._restore, **self._refs))
finally:
self._restore = {}
class Parameters:
"""Object that holds the namespace and implementation of Parameterized
methods as well as any state that is not in __slots__ or the
Parameters themselves.
Exists at both the metaclass level (instantiated by the metaclass)
and at the instance level. Can contain state specific to either the
class or the instance as necessary.
"""
def __init__(self_, cls, self=None):
"""
cls is the Parameterized class which is always set.
self is the instance if set.
"""
self_.cls = cls
self_.self = self
@property
def _BATCH_WATCH(self_):
return self_.self_or_cls._param__private.parameters_state['BATCH_WATCH']
@_BATCH_WATCH.setter
def _BATCH_WATCH(self_, value):
self_.self_or_cls._param__private.parameters_state['BATCH_WATCH'] = value
@property
def _TRIGGER(self_):
return self_.self_or_cls._param__private.parameters_state['TRIGGER']
@_TRIGGER.setter
def _TRIGGER(self_, value):
self_.self_or_cls._param__private.parameters_state['TRIGGER'] = value
@property
def _events(self_):
return self_.self_or_cls._param__private.parameters_state['events']
@_events.setter
def _events(self_, value):
self_.self_or_cls._param__private.parameters_state['events'] = value
@property
def _state_watchers(self_):
return self_.self_or_cls._param__private.parameters_state['watchers']
@_state_watchers.setter
def _state_watchers(self_, value):
self_.self_or_cls._param__private.parameters_state['watchers'] = value
@property
def watchers(self_):
"""Dictionary of instance watchers."""
if self_.self is None:
raise TypeError('Accessing `.param.watchers` is only supported on a Parameterized instance, not class.')
return self_.self._param__private.watchers
@watchers.setter
def watchers(self_, value):
if self_.self is None:
raise TypeError('Setting `.param.watchers` is only supported on a Parameterized instance, not class.')
self_.self._param__private.watchers = value
@property
def self_or_cls(self_):
return self_.cls if self_.self is None else self_.self
def __setstate__(self, state):
# Set old parameters state on Parameterized.parameters_state
self_, cls = state.get('self'), state.get('cls')
self_or_cls = self_ if self_ is not None else cls
for k in self_or_cls._param__private.parameters_state:
key = '_'+k
if key in state:
self_or_cls._param__private.parameters_state[k] = state.pop(key)
for k, v in state.items():
setattr(self, k, v)
def __getitem__(self_, key):
"""
Returns the class or instance parameter
"""
inst = self_.self
if inst is None:
return self_._cls_parameters[key]
p = self_.objects(instance=False)[key]
return _instantiated_parameter(inst, p)
def __dir__(self_):
"""
Adds parameters to dir
"""
return super().__dir__() + list(self_._cls_parameters)
def __iter__(self_):
"""
Iterates over the parameters on this object.
"""
yield from self_._cls_parameters
def __contains__(self_, param):
return param in self_._cls_parameters
def __getattr__(self_, attr):
"""
Extends attribute access to parameter objects.
"""
cls = self_.__dict__.get('cls')
if cls is None: # Class not initialized
raise AttributeError
if attr in self_._cls_parameters:
return self_.__getitem__(attr)
elif self_.self is None:
raise AttributeError(f"type object '{self_.cls.__name__}.param' has no attribute {attr!r}")
else:
raise AttributeError(f"'{self_.cls.__name__}.param' object has no attribute {attr!r}")
@as_uninitialized
def _set_name(self_, name):
self_.self.name = name
@as_uninitialized
def _generate_name(self_):
self_._set_name('%s%05d' % (self_.cls.__name__, object_count))
@as_uninitialized
def _setup_params(self_, **params):
"""
Initialize default and keyword parameter values.
First, ensures that values for all Parameters with 'instantiate=True'
(typically used for mutable Parameters) are copied directly into each object,
to ensure that there is an independent copy of the value (to avoid surprising
aliasing errors). Second, ensures that Parameters with 'constant=True' are
referenced on the instance, to make sure that setting a constant
Parameter on the class doesn't affect already created instances. Then
sets each of the keyword arguments, raising when any of them are not
defined as parameters.
"""
self = self_.self
## Deepcopy all 'instantiate=True' parameters
params_to_deepcopy = {}
params_to_ref = {}
objects = self_._cls_parameters
for pname, p in objects.items():
if p.instantiate and pname != "name":
params_to_deepcopy[pname] = p
elif p.constant and pname != 'name':
params_to_ref[pname] = p
for p in params_to_deepcopy.values():
self_._instantiate_param(p)
for p in params_to_ref.values():
self_._instantiate_param(p, deepcopy=False)
## keyword arg setting
deps, refs = {}, {}
for name, val in params.items():
desc = self_.cls.get_param_descriptor(name)[0] # pylint: disable-msg=E1101
if not desc:
raise TypeError(
f"{self.__class__.__name__}.__init__() got an unexpected "
f"keyword argument {name!r}"
)
pobj = objects.get(name)
if pobj is None or not pobj.allow_refs:
# Until Parameter.allow_refs=True by default we have to
# speculatively evaluate a values to check whether they
# contain a reference and warn the user that the
# behavior may change in future.
if name not in self_.cls._param__private.explicit_no_refs:
try:
ref, _, resolved, _ = self_._resolve_ref(pobj, val)
except Exception:
ref = None
if ref:
warnings.warn(
f"Parameter {name!r} on {pobj.owner} is being given a valid parameter "
f"reference {val} but is implicitly allow_refs=False. "
"In future allow_refs will be enabled by default and "
f"the reference {val} will be resolved to its underlying "
f"value {resolved}. Please explicitly set allow_ref on the "
"Parameter definition to declare whether references "
"should be resolved or not.",
category=_ParamFutureWarning,
stacklevel=4,
)
setattr(self, name, val)
continue
# Resolve references
ref, ref_deps, resolved, is_async = self_._resolve_ref(pobj, val)
if ref is not None:
refs[name] = ref
deps[name] = ref_deps
if not is_async and not (resolved is Undefined or resolved is Skip):
setattr(self, name, resolved)
return refs, deps
def _setup_refs(self_, refs):
groups = defaultdict(list)
for pname, subrefs in refs.items():
for p in subrefs:
if isinstance(p, Parameter):
groups[p.owner].append((pname, p.name))
else:
for sp in extract_dependencies(p):
groups[sp.owner].append((pname, sp.name))
for owner, pnames in groups.items():
refnames, pnames = zip(*pnames)
self_.self._param__private.ref_watchers.append((
refnames,
owner.param._watch(self_._sync_refs, list(set(pnames)), precedence=-1)
))
def _update_ref(self_, name, ref):
param_private = self_.self._param__private
if name in param_private.async_refs:
param_private.async_refs.pop(name).cancel()
for _, watcher in param_private.ref_watchers:
dep_obj = watcher.cls if watcher.inst is None else watcher.inst
dep_obj.param.unwatch(watcher)
self_.self._param__private.ref_watchers = []
refs = dict(self_.self._param__private.refs, **{name: ref})
deps = {name: resolve_ref(ref) for name, ref in refs.items()}
self_._setup_refs(deps)
self_.self._param__private.refs = refs
def _sync_refs(self_, *events):
updates = {}
for pname, ref in self_.self._param__private.refs.items():
# Skip updating value if dependency has not changed
recursive = self_[pname].nested_refs
deps = resolve_ref(ref, recursive)
is_gen = inspect.isgeneratorfunction(ref)
is_async = iscoroutinefunction(ref) or is_gen
if not any((dep.owner is e.obj and dep.name == e.name) for dep in deps for e in events) and not is_async:
continue
try:
new_val = resolve_value(ref, recursive)
except Skip:
new_val = Undefined
if new_val is Skip or new_val is Undefined:
continue
elif is_async:
async_executor(partial(self_._async_ref, pname, new_val))
continue
updates[pname] = new_val
with edit_constant(self_.self):
with _syncing(self_.self, updates):
self_.update(updates)
def _resolve_ref(self_, pobj, value):
is_gen = inspect.isgeneratorfunction(value)
is_async = iscoroutinefunction(value) or is_gen
deps = resolve_ref(value, recursive=pobj.nested_refs)
if not (deps or is_async or is_gen):
return None, None, value, False
ref = value
try:
value = resolve_value(value, recursive=pobj.nested_refs)
except Skip:
value = Undefined
if is_async:
async_executor(partial(self_._async_ref, pobj.name, value))
value = None
return ref, deps, value, is_async
async def _async_ref(self_, pname, awaitable):
if not self_.self._param__private.initialized:
async_executor(partial(self_._async_ref, pname, awaitable))
return
current_task = asyncio.current_task()
running_task = self_.self._param__private.async_refs.get(pname)
if running_task is None:
self_.self._param__private.async_refs[pname] = current_task
elif current_task is not running_task:
self_.self._param__private.async_refs[pname].cancel()
try:
if isinstance(awaitable, types.AsyncGeneratorType):
async for new_obj in awaitable:
with _syncing(self_.self, (pname,)):
self_.update({pname: new_obj})
else:
with _syncing(self_.self, (pname,)):
try:
self_.update({pname: await awaitable})
except Skip:
pass
finally:
# Ensure we clean up but only if the task matches the currrent task
if self_.self._param__private.async_refs.get(pname) is current_task:
del self_.self._param__private.async_refs[pname]
@classmethod
def _changed(cls, event):
"""
Predicate that determines whether a Event object has actually
changed such that old != new.
"""
return not Comparator.is_equal(event.old, event.new)
def _instantiate_param(self_, param_obj, dict_=None, key=None, deepcopy=True):
# deepcopy or store a reference to reference param_obj.default into
# self._param__private.values (or dict_ if supplied) under the
# parameter's name (or key if supplied)
instantiator = copy.deepcopy if deepcopy else lambda o: o
self = self_.self
dict_ = dict_ or self._param__private.values
key = key or param_obj.name
if shared_parameters._share:
param_key = (str(type(self)), param_obj.name)
if param_key in shared_parameters._shared_cache:
new_object = shared_parameters._shared_cache[param_key]
else:
new_object = instantiator(param_obj.default)
shared_parameters._shared_cache[param_key] = new_object
else:
new_object = instantiator(param_obj.default)
dict_[key] = new_object
if isinstance(new_object, Parameterized) and deepcopy:
global object_count
object_count += 1
# Writes over name given to the original object;
# could instead have kept the same name
new_object.param._generate_name()
def _update_deps(self_, attribute=None, init=False):
obj = self_.self
init_methods = []
for method, queued, on_init, constant, dynamic in type(obj).param._depends['watch']:
# On initialization set up constant watchers; otherwise
# clean up previous dynamic watchers for the updated attribute
dynamic = [d for d in dynamic if attribute is None or d.spec.split(".")[0] == attribute]
if init:
constant_grouped = defaultdict(list)
for dep in _resolve_mcs_deps(obj, constant, []):
constant_grouped[(id(dep.inst), id(dep.cls), dep.what)].append((None, dep))
for group in constant_grouped.values():
self_._watch_group(obj, method, queued, group)
m = getattr(self_.self, method)
if on_init and m not in init_methods:
init_methods.append(m)
elif dynamic:
for w in obj._param__private.dynamic_watchers.pop(method, []):
(w.cls if w.inst is None else w.inst).param.unwatch(w)
else:
continue
# Resolve dynamic dependencies one-by-one to be able to trace their watchers
grouped = defaultdict(list)
for ddep in dynamic:
for dep in _resolve_mcs_deps(obj, [], [ddep]):
grouped[(id(dep.inst), id(dep.cls), dep.what)].append((ddep, dep))
for group in grouped.values():
watcher = self_._watch_group(obj, method, queued, group, attribute)
obj._param__private.dynamic_watchers[method].append(watcher)
for m in init_methods:
m()
def _resolve_dynamic_deps(self, obj, dynamic_dep, param_dep, attribute):
"""
If a subobject whose parameters are being depended on changes
we should only trigger events if the actual parameter values
of the new object differ from those on the old subobject,
therefore we accumulate parameters to compare on a subobject
change event.
Additionally we need to make sure to notify the parent object
if a subobject changes so the dependencies can be
reinitialized so we return a callback which updates the
dependencies.
"""
subobj = obj
subobjs = [obj]
for subpath in dynamic_dep.spec.split('.')[:-1]:
subobj = getattr(subobj, subpath.split(':')[0], None)
subobjs.append(subobj)
dep_obj = param_dep.cls if param_dep.inst is None else param_dep.inst
if dep_obj not in subobjs[:-1]:
return None, None, param_dep.what
depth = subobjs.index(dep_obj)
callback = None
if depth > 0:
def callback(*events):
"""
If a subobject changes, we need to notify the main
object to update the dependencies.
"""
obj.param._update_deps(attribute)
p = '.'.join(dynamic_dep.spec.split(':')[0].split('.')[depth+1:])
if p == 'param':
subparams = [sp for sp in list(subobjs[-1].param)]
else:
subparams = [p]
if ':' in dynamic_dep.spec:
what = dynamic_dep.spec.split(':')[-1]
else:
what = param_dep.what
return subparams, callback, what
def _watch_group(self_, obj, name, queued, group, attribute=None):
"""
Sets up a watcher for a group of dependencies. Ensures that
if the dependency was dynamically generated we check whether
a subobject change event actually causes a value change and
that we update the existing watchers, i.e. clean up watchers
on the old subobject and create watchers on the new subobject.
"""
dynamic_dep, param_dep = group[0]
dep_obj = param_dep.cls if param_dep.inst is None else param_dep.inst
params = []
for _, g in group:
if g.name not in params:
params.append(g.name)
if dynamic_dep is None:
subparams, callback, what = None, None, param_dep.what
else:
subparams, callback, what = self_._resolve_dynamic_deps(
obj, dynamic_dep, param_dep, attribute)
mcaller = _m_caller(obj, name, what, subparams, callback)
return dep_obj.param._watch(
mcaller, params, param_dep.what, queued=queued, precedence=-1)
@_recursive_repr()
def _repr_html_(self_, open=True):
return _parameterized_repr_html(self_.self_or_cls, open)
# Classmethods
# PARAM3_DEPRECATION
[docs] @_deprecated(extra_msg="""Use instead `for k,v in p.param.objects().items(): print(f"{p.__class__.name}.{k}={repr(v.default)}")`""")
def print_param_defaults(self_):
"""Print the default values of all cls's Parameters.
.. deprecated:: 1.12.0
Use instead `for k,v in p.param.objects().items(): print(f"{p.__class__.name}.{k}={repr(v.default)}")`
"""
cls = self_.cls
for key,val in cls.__dict__.items():
if isinstance(val,Parameter):
print(cls.__name__+'.'+key+ '='+ repr(val.default))
# PARAM3_DEPRECATION
[docs] @_deprecated(extra_msg="Use instead `p.param.default =`")
def set_default(self_,param_name,value):
"""
Set the default value of param_name.
Equivalent to setting param_name on the class.
.. deprecated:: 1.12.0
Use instead `p.param.default =`
"""
cls = self_.cls
setattr(cls,param_name,value)
[docs] def add_parameter(self_, param_name, param_obj):
"""
Add a new Parameter object into this object's class.
Should result in a Parameter equivalent to one declared
in the class's source code.
"""
# Could have just done setattr(cls,param_name,param_obj),
# which is supported by the metaclass's __setattr__ , but
# would need to handle the params() cache as well
# (which is tricky but important for startup speed).
cls = self_.cls
type.__setattr__(cls, param_name, param_obj)
ParameterizedMetaclass._initialize_parameter(cls, param_name, param_obj)
# delete cached params()
cls._param__private.params.clear()
# PARAM3_DEPRECATION
@_deprecated(extra_msg="Use instead `.param.add_parameter`")
def _add_parameter(self_,param_name, param_obj):
"""Add a new Parameter object into this object's class.
.. deprecated :: 1.12.0
"""
return self_.add_parameter(param_name, param_obj)
# PARAM3_DEPRECATION
[docs] @_deprecated(extra_msg="Use instead `.param.values()` or `.param['param']`")
def params(self_, parameter_name=None):
"""
Return the Parameters of this class as the
dictionary {name: parameter_object}
Includes Parameters from this class and its
superclasses.
.. deprecated:: 1.12.0
Use instead `.param.values()` or `.param['param']`
"""
pdict = self_.objects(instance='existing')
if parameter_name is None:
return pdict
else:
return pdict[parameter_name]
# Bothmethods
[docs] def update(self_, arg=Undefined, /, **kwargs):
"""
For the given dictionary or iterable or set of param=value
keyword arguments, sets the corresponding parameter of this
object or class to the given value.
May also be used as a context manager to temporarily set and
then reset parameter values.
"""
refs = {}
if self_.self is not None:
private = self_.self._param__private
params = list(kwargs if arg is Undefined else dict(arg, **kwargs))
for pname in params:
if pname in refs:
continue
elif pname in private.refs:
refs[pname] = private.refs[pname]
elif pname in private.async_refs:
refs[pname] = private.async_refs[pname]
restore = dict(self_._update(arg, **kwargs))
return _ParametersRestorer(parameters=self_, restore=restore, refs=refs)
def _update(self_, arg=Undefined, /, **kwargs):
BATCH_WATCH = self_._BATCH_WATCH
self_._BATCH_WATCH = True
self_or_cls = self_.self_or_cls
if arg is not Undefined:
kwargs = dict(arg, **kwargs)
trigger_params = [
k for k in kwargs
if k in self_ and hasattr(self_[k], '_autotrigger_value')
]
for tp in trigger_params:
self_[tp]._mode = 'set'
values = self_.values()
restore = {k: values[k] for k, v in kwargs.items() if k in values}
for (k, v) in kwargs.items():
if k not in self_:
self_._BATCH_WATCH = False
raise ValueError(f"{k!r} is not a parameter of {self_.cls.__name__}")
try:
setattr(self_or_cls, k, v)
except:
self_._BATCH_WATCH = False
raise
self_._BATCH_WATCH = BATCH_WATCH
if not BATCH_WATCH:
self_._batch_call_watchers()
for tp in trigger_params:
p = self_[tp]
p._mode = 'reset'
setattr(self_or_cls, tp, p._autotrigger_reset_value)
p._mode = 'set-reset'
return restore
# PARAM3_DEPRECATION
[docs] @_deprecated(extra_msg="Use instead `.param.update`")
def set_param(self_, *args,**kwargs):
"""
For each param=value keyword argument, sets the corresponding
parameter of this object or class to the given value.
For backwards compatibility, also accepts
set_param("param",value) for a single parameter value using
positional arguments, but the keyword interface is preferred
because it is more compact and can set multiple values.
.. deprecated:: 1.12.0
Use instead `.param.update`
"""
self_or_cls = self_.self_or_cls
if args:
if len(args) == 2 and not args[0] in kwargs and not kwargs:
kwargs[args[0]] = args[1]
else:
raise ValueError("Invalid positional arguments for %s.set_param" %
(self_or_cls.name))
return self_.update(kwargs)
@property
def _cls_parameters(self_):
"""
Class parameters are cached because they are accessed often,
and parameters are rarely added (and cannot be deleted)
"""
cls = self_.cls
pdict = cls._param__private.params
if pdict:
return pdict
paramdict = {}
for class_ in classlist(cls):
for name, val in class_.__dict__.items():
if isinstance(val, Parameter):
paramdict[name] = val
# We only want the cache to be visible to the cls on which
# params() is called, so we mangle the name ourselves at
# runtime (if we were to mangle it now, it would be
# _Parameterized.__params for all classes).
# cls._param__private.params[f'_{cls.__name__}__params'] = paramdict
cls._param__private.params = paramdict
return paramdict
[docs] def objects(self_, instance=True):
"""
Returns the Parameters of this instance or class
If instance=True and called on a Parameterized instance it
will create instance parameters for all Parameters defined on
the class. To force class parameters to be returned use
instance=False. Since classes avoid creating instance
parameters unless necessary you may also request only existing
instance parameters to be returned by setting
instance='existing'.
"""
if self_.self is not None and not self_.self._param__private.initialized and instance is True:
warnings.warn(
'Looking up instance Parameter objects (`.param.objects()`) until '
'the Parameterized instance has been fully initialized is deprecated and will raise an error in a future version. '
'Ensure you have called `super().__init__(**params)` in your Parameterized '
'constructor before trying to access instance Parameter objects, or '
'looking up the class Parameter objects with `.param.objects(instance=False)` '
'may be enough for your use case.',
category=_ParamFutureWarning,
stacklevel=2,
)
pdict = self_._cls_parameters
if instance and self_.self is not None:
if instance == 'existing':
if getattr(self_.self._param__private, 'initialized', False) and self_.self._param__private.params:
return dict(pdict, **self_.self._param__private.params)
return pdict
else:
return {k: self_.self.param[k] for k in pdict}
return pdict
[docs] def trigger(self_, *param_names):
"""
Trigger watchers for the given set of parameter names. Watchers
will be triggered whether or not the parameter values have
actually changed. As a special case, the value will actually be
changed for a Parameter of type Event, setting it to True so
that it is clear which Event parameter has been triggered.
"""
if self_.self is not None and not self_.self._param__private.initialized:
warnings.warn(
'Triggering watchers on a partially initialized Parameterized instance '
'is deprecated and will raise an error in a future version. '
'Ensure you have called super().__init__(**params) in '
'the Parameterized instance constructor before trying to set up a watcher.',
category=_ParamFutureWarning,
stacklevel=2,
)
trigger_params = [p for p in self_
if hasattr(self_[p], '_autotrigger_value')]
triggers = {p:self_[p]._autotrigger_value
for p in trigger_params if p in param_names}
events = self_._events
watchers = self_._state_watchers
self_._events = []
self_._state_watchers = []
param_values = self_.values()
params = {name: param_values[name] for name in param_names}
self_._TRIGGER = True
self_.update(dict(params, **triggers))
self_._TRIGGER = False
self_._events += events
self_._state_watchers += watchers
def _update_event_type(self_, watcher, event, triggered):
"""
Returns an updated Event object with the type field set appropriately.
"""
if triggered:
event_type = 'triggered'
else:
event_type = 'changed' if watcher.onlychanged else 'set'
return Event(what=event.what, name=event.name, obj=event.obj, cls=event.cls,
old=event.old, new=event.new, type=event_type)
def _execute_watcher(self, watcher, events):
if watcher.mode == 'args':
args, kwargs = events, {}
else:
args, kwargs = (), {event.name: event.new for event in events}
if iscoroutinefunction(watcher.fn):
if async_executor is None:
raise RuntimeError("Could not execute %s coroutine function. "
"Please register a asynchronous executor on "
"param.parameterized.async_executor, which "
"schedules the function on an event loop." %
watcher.fn)
async_executor(partial(watcher.fn, *args, **kwargs))
else:
try:
watcher.fn(*args, **kwargs)
except Skip:
pass
def _call_watcher(self_, watcher, event):
"""
Invoke the given watcher appropriately given an Event object.
"""
if self_._TRIGGER:
pass
elif watcher.onlychanged and (not self_._changed(event)):
return
if self_._BATCH_WATCH:
self_._events.append(event)
if not any(watcher is w for w in self_._state_watchers):
self_._state_watchers.append(watcher)
else:
event = self_._update_event_type(watcher, event, self_._TRIGGER)
with _batch_call_watchers(self_.self_or_cls, enable=watcher.queued, run=False):
self_._execute_watcher(watcher, (event,))
def _batch_call_watchers(self_):
"""
Batch call a set of watchers based on the parameter value
settings in kwargs using the queued Event and watcher objects.
"""
while self_._events:
event_dict = OrderedDict([((event.name, event.what), event)
for event in self_._events])
watchers = self_._state_watchers[:]
self_._events = []
self_._state_watchers = []
for watcher in sorted(watchers, key=lambda w: w.precedence):
events = [self_._update_event_type(watcher, event_dict[(name, watcher.what)],
self_._TRIGGER)
for name in watcher.parameter_names
if (name, watcher.what) in event_dict]
with _batch_call_watchers(self_.self_or_cls, enable=watcher.queued, run=False):
self_._execute_watcher(watcher, events)
[docs] def set_dynamic_time_fn(self_,time_fn,sublistattr=None):
"""
Set time_fn for all Dynamic Parameters of this class or
instance object that are currently being dynamically
generated.
Additionally, sets _Dynamic_time_fn=time_fn on this class or
instance object, so that any future changes to Dynamic
Parmeters can inherit time_fn (e.g. if a Number is changed
from a float to a number generator, the number generator will
inherit time_fn).
If specified, sublistattr is the name of an attribute of this
class or instance that contains an iterable collection of
subobjects on which set_dynamic_time_fn should be called. If
the attribute sublistattr is present on any of the subobjects,
set_dynamic_time_fn() will be called for those, too.
"""
self_or_cls = self_.self_or_cls
self_or_cls._Dynamic_time_fn = time_fn
if isinstance(self_or_cls,type):
a = (None,self_or_cls)
else:
a = (self_or_cls,)
for n,p in self_or_cls.param.objects('existing').items():
if hasattr(p, '_value_is_dynamic'):
if p._value_is_dynamic(*a):
g = self_or_cls.param.get_value_generator(n)
g._Dynamic_time_fn = time_fn
if sublistattr:
try:
sublist = getattr(self_or_cls,sublistattr)
except AttributeError:
sublist = []
for obj in sublist:
obj.param.set_dynamic_time_fn(time_fn,sublistattr)
[docs] def serialize_parameters(self_, subset=None, mode='json'):
self_or_cls = self_.self_or_cls
if mode not in Parameter._serializers:
raise ValueError(f'Mode {mode!r} not in available serialization formats {list(Parameter._serializers.keys())!r}')
serializer = Parameter._serializers[mode]
return serializer.serialize_parameters(self_or_cls, subset=subset)
[docs] def serialize_value(self_, pname, mode='json'):
self_or_cls = self_.self_or_cls
if mode not in Parameter._serializers:
raise ValueError(f'Mode {mode!r} not in available serialization formats {list(Parameter._serializers.keys())!r}')
serializer = Parameter._serializers[mode]
return serializer.serialize_parameter_value(self_or_cls, pname)
[docs] def deserialize_parameters(self_, serialization, subset=None, mode='json'):
self_or_cls = self_.self_or_cls
serializer = Parameter._serializers[mode]
return serializer.deserialize_parameters(self_or_cls, serialization, subset=subset)
[docs] def deserialize_value(self_, pname, value, mode='json'):
self_or_cls = self_.self_or_cls
if mode not in Parameter._serializers:
raise ValueError(f'Mode {mode!r} not in available serialization formats {list(Parameter._serializers.keys())!r}')
serializer = Parameter._serializers[mode]
return serializer.deserialize_parameter_value(self_or_cls, pname, value)
[docs] def schema(self_, safe=False, subset=None, mode='json'):
"""
Returns a schema for the parameters on this Parameterized object.
"""
self_or_cls = self_.self_or_cls
if mode not in Parameter._serializers:
raise ValueError(f'Mode {mode!r} not in available serialization formats {list(Parameter._serializers.keys())!r}')
serializer = Parameter._serializers[mode]
return serializer.schema(self_or_cls, safe=safe, subset=subset)
# PARAM3_DEPRECATION
# same as values() but returns list, not dict
[docs] @_deprecated(extra_msg="""
Use `.param.values().items()` instead (or `.param.values()` for the
common case of `dict(....param.get_param_values())`)
""")
def get_param_values(self_, onlychanged=False):
"""
Return a list of name,value pairs for all Parameters of this
object.
When called on an instance with onlychanged set to True, will
only return values that are not equal to the default value
(onlychanged has no effect when called on a class).
.. deprecated:: 1.12.0
Use `.param.values().items()` instead (or `.param.values()` for the
common case of `dict(....param.get_param_values())`)
"""
vals = self_.values(onlychanged)
return [(k, v) for k, v in vals.items()]
[docs] def values(self_, onlychanged=False):
"""
Return a dictionary of name,value pairs for the Parameters of this
object.
When called on an instance with onlychanged set to True, will
only return values that are not equal to the default value
(onlychanged has no effect when called on a class).
"""
self_or_cls = self_.self_or_cls
vals = []
for name, val in self_or_cls.param.objects('existing').items():
value = self_or_cls.param.get_value_generator(name)
if name == 'name' and onlychanged and _is_auto_name(self_.cls.__name__, value):
continue
if not onlychanged or not Comparator.is_equal(value, val.default):
vals.append((name, value))
vals.sort(key=itemgetter(0))
return dict(vals)
[docs] def force_new_dynamic_value(self_, name): # pylint: disable-msg=E0213
"""
Force a new value to be generated for the dynamic attribute
name, and return it.
If name is not dynamic, its current value is returned
(i.e. equivalent to getattr(name).
"""
cls_or_slf = self_.self_or_cls
param_obj = cls_or_slf.param.objects('existing').get(name)
if not param_obj:
return getattr(cls_or_slf, name)
cls, slf = None, None
if isinstance(cls_or_slf,type):
cls = cls_or_slf
else:
slf = cls_or_slf
if not hasattr(param_obj,'_force'):
return param_obj.__get__(slf, cls)
else:
return param_obj._force(slf, cls)
[docs] def get_value_generator(self_,name): # pylint: disable-msg=E0213
"""
Return the value or value-generating object of the named
attribute.
For most parameters, this is simply the parameter's value
(i.e. the same as getattr()), but Dynamic parameters have
their value-generating object returned.
"""
cls_or_slf = self_.self_or_cls
param_obj = cls_or_slf.param.objects('existing').get(name)
if not param_obj:
value = getattr(cls_or_slf,name)
# CompositeParameter detected by being a Parameter and having 'attribs'
elif hasattr(param_obj,'attribs'):
value = [cls_or_slf.param.get_value_generator(a) for a in param_obj.attribs]
# not a Dynamic Parameter
elif not hasattr(param_obj,'_value_is_dynamic'):
value = getattr(cls_or_slf,name)
# Dynamic Parameter...
else:
# TODO: is this always an instance?
if isinstance(cls_or_slf, Parameterized) and name in cls_or_slf._param__private.values:
# dealing with object and it's been set on this object
value = cls_or_slf._param__private.values[name]
else:
# dealing with class or isn't set on the object
value = param_obj.default
return value
[docs] def inspect_value(self_,name): # pylint: disable-msg=E0213
"""
Return the current value of the named attribute without modifying it.
Same as getattr() except for Dynamic parameters, which have their
last generated value returned.
"""
cls_or_slf = self_.self_or_cls
param_obj = cls_or_slf.param.objects('existing').get(name)
if not param_obj:
value = getattr(cls_or_slf,name)
elif hasattr(param_obj,'attribs'):
value = [cls_or_slf.param.inspect_value(a) for a in param_obj.attribs]
elif not hasattr(param_obj,'_inspect'):
value = getattr(cls_or_slf,name)
else:
if isinstance(cls_or_slf,type):
value = param_obj._inspect(None,cls_or_slf)
else:
value = param_obj._inspect(cls_or_slf,None)
return value
[docs] def method_dependencies(self_, name, intermediate=False):
"""
Given the name of a method, returns a PInfo object for each dependency
of this method. See help(PInfo) for the contents of these objects.
By default intermediate dependencies on sub-objects are not
returned as these are primarily useful for internal use to
determine when a sub-object dependency has to be updated.
"""
method = getattr(self_.self_or_cls, name)
minfo = MInfo(cls=self_.cls, inst=self_.self, name=name,
method=method)
deps, dynamic = _params_depended_on(
minfo, dynamic=False, intermediate=intermediate)
if self_.self is None:
return deps
return _resolve_mcs_deps(
self_.self, deps, dynamic, intermediate=intermediate)
# PARAM3_DEPRECATION
[docs] @_deprecated(extra_msg='Use instead `.param.method_dependencies`')
def params_depended_on(self_, *args, **kwargs):
"""
Given the name of a method, returns a PInfo object for each dependency
of this method. See help(PInfo) for the contents of these objects.
By default intermediate dependencies on sub-objects are not
returned as these are primarily useful for internal use to
determine when a sub-object dependency has to be updated.
.. deprecated: 2.0.0
Use instead `.param.method_dependencies`
"""
return self_.method_dependencies(*args, **kwargs)
[docs] def outputs(self_):
"""
Returns a mapping between any declared outputs and a tuple
of the declared Parameter type, the output method, and the
index into the output if multiple outputs are returned.
"""
outputs = {}
for cls in classlist(self_.cls):
for name in dir(cls):
if name == '_param_watchers':
continue
method = getattr(self_.self_or_cls, name)
dinfo = getattr(method, '_dinfo', {})
if 'outputs' not in dinfo:
continue
for override, otype, idx in dinfo['outputs']:
if override is not None:
name = override
outputs[name] = (otype, method, idx)
return outputs
def _spec_to_obj(self_, spec, dynamic=True, intermediate=True):
"""
Resolves a dependency specification into lists of explicit
parameter dependencies and dynamic dependencies.
Dynamic dependencies are specifications to be resolved when
the sub-object whose parameters are being depended on is
defined.
During class creation dynamic=False which means sub-object
dependencies are not resolved. At instance creation and
whenever a sub-object is set on an object this method will be
invoked to determine whether the dependency is available.
For sub-object dependencies we also return dependencies for
every part of the path, e.g. for a dependency specification
like "a.b.c" we return dependencies for sub-object "a" and the
sub-sub-object "b" in addition to the dependency on the actual
parameter "c" on object "b". This is to ensure that if a
sub-object is swapped out we are notified and can update the
dynamic dependency to the new object. Even if a sub-object
dependency can only partially resolved, e.g. if object "a"
does not yet have a sub-object "b" we must watch for changes
to "b" on sub-object "a" in case such a subobject is put in "b".
"""
if isinstance(spec, Parameter):
inst = spec.owner if isinstance(spec.owner, Parameterized) else None
cls = spec.owner if inst is None else type(inst)
info = PInfo(inst=inst, cls=cls, name=spec.name,
pobj=spec, what='value')
return [] if intermediate == 'only' else [info], []
obj, attr, what = _parse_dependency_spec(spec)
if obj is None:
src = self_.self_or_cls
elif not dynamic:
return [], [DInfo(spec=spec)]
else:
if not hasattr(self_.self_or_cls, obj.split('.')[1]):
raise AttributeError(
f'Dependency {obj[1:]!r} could not be resolved, {self_.self_or_cls} '
f'has no parameter or attribute {obj.split(".")[1]!r}. Ensure '
'the object being depended on is declared before calling the '
'Parameterized constructor.'
)
src = _getattrr(self_.self_or_cls, obj[1::], None)
if src is None:
path = obj[1:].split('.')
deps = []
# Attempt to partially resolve subobject path to ensure
# that if a subobject is later updated making the full
# subobject path available we have to be notified and
# set up watchers
if len(path) >= 1 and intermediate:
sub_src = None
subpath = path
while sub_src is None and subpath:
subpath = subpath[:-1]
sub_src = _getattrr(self_.self_or_cls, '.'.join(subpath), None)
if subpath:
subdeps, _ = self_._spec_to_obj(
'.'.join(path[:len(subpath)+1]), dynamic, intermediate)
deps += subdeps
return deps, [] if intermediate == 'only' else [DInfo(spec=spec)]
cls, inst = (src, None) if isinstance(src, type) else (type(src), src)
if attr == 'param':
deps, dynamic_deps = self_._spec_to_obj(obj[1:], dynamic, intermediate)
for p in src.param:
param_deps, param_dynamic_deps = src.param._spec_to_obj(p, dynamic, intermediate)
deps += param_deps
dynamic_deps += param_dynamic_deps
return deps, dynamic_deps
elif attr in src.param:
info = PInfo(inst=inst, cls=cls, name=attr,
pobj=src.param[attr], what=what)
elif hasattr(src, attr):
attr_obj = getattr(src, attr)
if isinstance(attr_obj, Parameterized):
return [], []
elif isinstance(attr_obj, (FunctionType, MethodType)):
info = MInfo(inst=inst, cls=cls, name=attr,
method=attr_obj)
else:
raise AttributeError(f"Attribute {attr!r} could not be resolved on {src}.")
elif getattr(src, "abstract", None):
return [], [] if intermediate == 'only' else [DInfo(spec=spec)]
else:
raise AttributeError(f"Attribute {attr!r} could not be resolved on {src}.")
if obj is None or not intermediate:
return [info], []
deps, dynamic_deps = self_._spec_to_obj(obj[1:], dynamic, intermediate)
if intermediate != 'only':
deps.append(info)
return deps, dynamic_deps
def _register_watcher(self_, action, watcher, what='value'):
if self_.self is not None and not self_.self._param__private.initialized:
warnings.warn(
'(Un)registering a watcher on a partially initialized Parameterized instance '
'is deprecated and will raise an error in a future version. Ensure '
'you have called super().__init__(**) in the Parameterized instance '
'constructor before trying to set up a watcher.',
category=_ParamFutureWarning,
stacklevel=4,
)
parameter_names = watcher.parameter_names
for parameter_name in parameter_names:
if parameter_name not in self_.cls.param:
raise ValueError("{} parameter was not found in list of "
"parameters of class {}".format(parameter_name, self_.cls.__name__))
if self_.self is not None and what == "value":
watchers = self_.self._param__private.watchers
if parameter_name not in watchers:
watchers[parameter_name] = {}
if what not in watchers[parameter_name]:
watchers[parameter_name][what] = []
getattr(watchers[parameter_name][what], action)(watcher)
else:
watchers = self_[parameter_name].watchers
if what not in watchers:
watchers[what] = []
getattr(watchers[what], action)(watcher)
[docs] def watch(self_, fn, parameter_names, what='value', onlychanged=True, queued=False, precedence=0):
"""
Register the given callback function `fn` to be invoked for
events on the indicated parameters.
`what`: What to watch on each parameter; either the value (by
default) or else the indicated slot (e.g. 'constant').
`onlychanged`: By default, only invokes the function when the
watched item changes, but if `onlychanged=False` also invokes
it when the `what` item is set to its current value again.
`queued`: By default, additional watcher events generated
inside the callback fn are dispatched immediately, effectively
doing depth-first processing of Watcher events. However, in
certain scenarios, it is helpful to wait to dispatch such
downstream events until all events that triggered this watcher
have been processed. In such cases setting `queued=True` on
this Watcher will queue up new downstream events generated
during `fn` until `fn` completes and all other watchers
invoked by that same event have finished executing),
effectively doing breadth-first processing of Watcher events.
`precedence`: Declares a precedence level for the Watcher that
determines the priority with which the callback is executed.
Lower precedence levels are executed earlier. Negative
precedences are reserved for internal Watchers, i.e. those
set up by param.depends.
When the `fn` is called, it will be provided the relevant
Event objects as positional arguments, which allows it to
determine which of the possible triggering events occurred.
Returns a Watcher object.
See help(Watcher) and help(Event) for the contents of those objects.
"""
if precedence < 0:
raise ValueError("User-defined watch callbacks must declare "
"a positive precedence. Negative precedences "
"are reserved for internal Watchers.")
return self_._watch(fn, parameter_names, what, onlychanged, queued, precedence)
def _watch(self_, fn, parameter_names, what='value', onlychanged=True, queued=False, precedence=-1):
parameter_names = tuple(parameter_names) if isinstance(parameter_names, list) else (parameter_names,)
watcher = Watcher(inst=self_.self, cls=self_.cls, fn=fn, mode='args',
onlychanged=onlychanged, parameter_names=parameter_names,
what=what, queued=queued, precedence=precedence)
self_._register_watcher('append', watcher, what)
return watcher
[docs] def unwatch(self_, watcher):
"""
Remove the given Watcher object (from `watch` or `watch_values`) from this object's list.
"""
try:
self_._register_watcher('remove', watcher, what=watcher.what)
except Exception:
self_.warning(f'No such watcher {str(watcher)} to remove.')
[docs] def watch_values(self_, fn, parameter_names, what='value', onlychanged=True, queued=False, precedence=0):
"""
Easier-to-use version of `watch` specific to watching for changes in parameter values.
Only allows `what` to be 'value', and invokes the callback `fn` using keyword
arguments <param_name>=<new_value> rather than with a list of Event objects.
"""
if precedence < 0:
raise ValueError("User-defined watch callbacks must declare "
"a positive precedence. Negative precedences "
"are reserved for internal Watchers.")
assert what == 'value'
if isinstance(parameter_names, list):
parameter_names = tuple(parameter_names)
else:
parameter_names = (parameter_names,)
watcher = Watcher(inst=self_.self, cls=self_.cls, fn=fn,
mode='kwargs', onlychanged=onlychanged,
parameter_names=parameter_names, what=what,
queued=queued, precedence=precedence)
self_._register_watcher('append', watcher, what)
return watcher
# Instance methods
# PARAM3_DEPRECATION
[docs] @_deprecated(extra_msg="Use instead `{k:v.default for k,v in p.param.objects().items()}`")
def defaults(self_):
"""
Return {parameter_name:parameter.default} for all non-constant
Parameters.
Note that a Parameter for which instantiate==True has its default
instantiated.
.. deprecated:: 1.12.0
Use instead `{k:v.default for k,v in p.param.objects().items()}`
"""
self = self_.self
d = {}
for param_name, param in self.param.objects('existing').items():
if param.constant:
pass
if param.instantiate:
self.param._instantiate_param(param, dict_=d, key=param_name)
d[param_name] = param.default
return d
# Designed to avoid any processing unless the print
# level is high enough, though not all callers of message(),
# verbose(), debug(), etc are taking advantage of this.
def __db_print(self_,level,msg,*args,**kw):
"""
Calls the logger returned by the get_logger() function,
prepending the result of calling dbprint_prefix() (if any).
See python's logging module for details.
"""
self_or_cls = self_.self_or_cls
if get_logger(name=self_or_cls.name).isEnabledFor(level):
if dbprint_prefix and callable(dbprint_prefix):
msg = dbprint_prefix() + ": " + msg # pylint: disable-msg=E1102
get_logger(name=self_or_cls.name).log(level, msg, *args, **kw)
# PARAM3_DEPRECATION
[docs] @_deprecated(extra_msg="""Use instead `for k,v in p.param.objects().items(): print(f"{p.__class__.name}.{k}={repr(v.default)}")`""")
def print_param_values(self_):
"""Print the values of all this object's Parameters.
.. deprecated:: 1.12.0
Use instead `for k,v in p.param.objects().items(): print(f"{p.__class__.name}.{k}={repr(v.default)}")`
"""
self = self_.self
for name, val in self.param.values().items():
print(f'{self.name}.{name} = {val}')
[docs] def warning(self_, msg,*args,**kw):
"""
Print msg merged with args as a warning, unless module variable
warnings_as_exceptions is True, then raise an Exception
containing the arguments.
See Python's logging module for details of message formatting.
"""
self_.log(WARNING, msg, *args, **kw)
# PARAM3_DEPRECATION
[docs] @_deprecated(extra_msg="Use instead `.param.log(param.MESSAGE, ...)`")
def message(self_,msg,*args,**kw):
"""
Print msg merged with args as a message.
See Python's logging module for details of message formatting.
.. deprecated:: 1.12.0
Use instead `.param.log(param.MESSAGE, ...)`
"""
self_.__db_print(INFO,msg,*args,**kw)
# PARAM3_DEPRECATION
[docs] @_deprecated(extra_msg="Use instead `.param.log(param.VERBOSE, ...)`")
def verbose(self_,msg,*args,**kw):
"""
Print msg merged with args as a verbose message.
See Python's logging module for details of message formatting.
.. deprecated:: 1.12.0
Use instead `.param.log(param.VERBOSE, ...)`
"""
self_.__db_print(VERBOSE,msg,*args,**kw)
# PARAM3_DEPRECATION
[docs] @_deprecated(extra_msg="Use instead `.param.log(param.DEBUG, ...)`")
def debug(self_,msg,*args,**kw):
"""
Print msg merged with args as a debugging statement.
See Python's logging module for details of message formatting.
.. deprecated:: 1.12.0
Use instead `.param.log(param.DEBUG, ...)`
"""
self_.__db_print(DEBUG,msg,*args,**kw)
[docs] def log(self_, level, msg, *args, **kw):
"""
Print msg merged with args as a message at the indicated logging level.
Logging levels include those provided by the Python logging module
plus VERBOSE, either obtained directly from the logging module like
`logging.INFO`, or from parameterized like `param.parameterized.INFO`.
Supported logging levels include (in order of severity)
DEBUG, VERBOSE, INFO, WARNING, ERROR, CRITICAL
See Python's logging module for details of message formatting.
"""
if level is WARNING:
if warnings_as_exceptions:
raise Exception("Warning: " + msg % args)
else:
global warning_count
warning_count+=1
self_.__db_print(level, msg, *args, **kw)
# Note that there's no _state_push method on the class, so
# dynamic parameters set on a class can't have state saved. This
# is because, to do this, _state_push() would need to be a
# @bothmethod, but that complicates inheritance in cases where we
# already have a _state_push() method.
# (isinstance(g,Parameterized) below is used to exclude classes.)
def _state_push(self_):
"""
Save this instance's state.
For Parameterized instances, this includes the state of
dynamically generated values.
Subclasses that maintain short-term state should additionally
save and restore that state using _state_push() and
_state_pop().
Generally, this method is used by operations that need to test
something without permanently altering the objects' state.
"""
self = self_.self_or_cls
if not isinstance(self, Parameterized):
raise NotImplementedError('_state_push is not implemented at the class level')
for pname, p in self.param.objects('existing').items():
g = self.param.get_value_generator(pname)
if hasattr(g,'_Dynamic_last'):
g._saved_Dynamic_last.append(g._Dynamic_last)
g._saved_Dynamic_time.append(g._Dynamic_time)
# CB: not storing the time_fn: assuming that doesn't
# change.
elif hasattr(g,'_state_push') and isinstance(g,Parameterized):
g._state_push()
def _state_pop(self_):
"""
Restore the most recently saved state.
See _state_push() for more details.
"""
self = self_.self_or_cls
if not isinstance(self, Parameterized):
raise NotImplementedError('_state_pop is not implemented at the class level')
for pname, p in self.param.objects('existing').items():
g = self.param.get_value_generator(pname)
if hasattr(g,'_Dynamic_last'):
g._Dynamic_last = g._saved_Dynamic_last.pop()
g._Dynamic_time = g._saved_Dynamic_time.pop()
elif hasattr(g,'_state_pop') and isinstance(g,Parameterized):
g._state_pop()
[docs] def pprint(self_, imports=None, prefix=" ", unknown_value='<?>',
qualify=False, separator=""):
"""
(Experimental) Pretty printed representation that may be
evaluated with eval. See pprint() function for more details.
"""
self = self_.self_or_cls
if not isinstance(self, Parameterized):
raise NotImplementedError('pprint is not implemented at the class level')
# Wrapping the staticmethod _pprint with partial to pass `self` as the `_recursive_repr`
# decorator expects `self`` to be the pprinted object (not `self_`).
return partial(self_._pprint, self, imports=imports, prefix=prefix,
unknown_value=unknown_value, qualify=qualify, separator=separator)()
@staticmethod
@_recursive_repr()
def _pprint(self, imports=None, prefix=" ", unknown_value='<?>',
qualify=False, separator=""):
if imports is None:
imports = [] # would have been simpler to use a set from the start
imports[:] = list(set(imports))
# Generate import statement
mod = self.__module__
bits = mod.split('.')
imports.append("import %s"%mod)
imports.append("import %s"%bits[0])
changed_params = self.param.values(onlychanged=script_repr_suppress_defaults)
values = self.param.values()
spec = getfullargspec(type(self).__init__)
if 'self' not in spec.args or spec.args[0] != 'self':
raise KeyError(f"'{type(self).__name__}.__init__.__signature__' must contain 'self' as its first Parameter.")
args = spec.args[1:]
if spec.defaults is not None:
posargs = spec.args[:-len(spec.defaults)]
kwargs = dict(zip(spec.args[-len(spec.defaults):], spec.defaults))
else:
posargs, kwargs = args, []
parameters = self.param.objects('existing')
ordering = sorted(
sorted(changed_params), # alphanumeric tie-breaker
key=lambda k: (- float('inf') # No precedence is lowest possible precendence
if parameters[k].precedence is None else
parameters[k].precedence))
arglist, keywords, processed = [], [], []
for k in args + ordering:
if k in processed: continue
# Suppresses automatically generated names.
if k == 'name' and (values[k] is not None
and re.match('^'+self.__class__.__name__+'[0-9]+$', values[k])):
continue
value = pprint(values[k], imports, prefix=prefix,settings=[],
unknown_value=unknown_value,
qualify=qualify) if k in values else None
if value is None:
if unknown_value is False:
raise Exception(f"{self.name}: unknown value of {k!r}")
elif unknown_value is None:
# i.e. suppress repr
continue
else:
value = unknown_value
# Explicit kwarg (unchanged, known value)
if (k in kwargs) and (k in values) and kwargs[k] == values[k]: continue
if k in posargs:
# value will be unknown_value unless k is a parameter
arglist.append(value)
elif (k in kwargs or
(hasattr(spec, 'varkw') and (spec.varkw is not None)) or
(hasattr(spec, 'keywords') and (spec.keywords is not None))):
# Explicit modified keywords or parameters in
# precendence order (if **kwargs present)
keywords.append(f'{k}={value}')
processed.append(k)
qualifier = mod + '.' if qualify else ''
arguments = arglist + keywords + (['**%s' % spec.varargs] if spec.varargs else [])
return qualifier + '{}({})'.format(self.__class__.__name__, (','+separator+prefix).join(arguments))
class ParameterizedMetaclass(type):
"""
The metaclass of Parameterized (and all its descendents).
The metaclass overrides type.__setattr__ to allow us to set
Parameter values on classes without overwriting the attribute
descriptor. That is, for a Parameterized class of type X with a
Parameter y, the user can type X.y=3, which sets the default value
of Parameter y to be 3, rather than overwriting y with the
constant value 3 (and thereby losing all other info about that
Parameter, such as the doc string, bounds, etc.).
The __init__ method is used when defining a Parameterized class,
usually when the module where that class is located is imported
for the first time. That is, the __init__ in this metaclass
initializes the *class* object, while the __init__ method defined
in each Parameterized class is called for each new instance of
that class.
Additionally, a class can declare itself abstract by having an
attribute __abstract set to True. The 'abstract' attribute can be
used to find out if a class is abstract or not.
"""
def __init__(mcs, name, bases, dict_):
"""
Initialize the class object (not an instance of the class, but
the class itself).
Initializes all the Parameters by looking up appropriate
default values (see __param_inheritance()) and setting
attrib_names (see _set_names()).
"""
type.__init__(mcs, name, bases, dict_)
# Compute which parameters explicitly do not support references
# This can be removed when Parameter.allow_refs=True by default.
explicit_no_refs = set()
for base in bases:
if issubclass(base, Parameterized):
explicit_no_refs |= set(base._param__private.explicit_no_refs)
_param__private = _ClassPrivate(explicit_no_refs=list(explicit_no_refs))
mcs._param__private = _param__private
mcs.__set_name(name, dict_)
mcs._param__parameters = Parameters(mcs)
# All objects (with their names) of type Parameter that are
# defined in this class
parameters = [(n, o) for (n, o) in dict_.items()
if isinstance(o, Parameter)]
for param_name,param in parameters:
mcs._initialize_parameter(param_name, param)
# retrieve depends info from methods and store more conveniently
dependers = [(n, m, m._dinfo) for (n, m) in dict_.items()
if hasattr(m, '_dinfo')]
# Resolve dependencies of current class
_watch = []
for name, method, dinfo in dependers:
watch = dinfo.get('watch', False)
on_init = dinfo.get('on_init', False)
minfo = MInfo(cls=mcs, inst=None, name=name,
method=method)
deps, dynamic_deps = _params_depended_on(minfo, dynamic=False)
if watch:
_watch.append((name, watch == 'queued', on_init, deps, dynamic_deps))
# Resolve dependencies in class hierarchy
_inherited = []
for cls in classlist(mcs)[:-1][::-1]:
if not hasattr(cls, '_param__parameters'):
continue
for dep in cls.param._depends['watch']:
method = getattr(mcs, dep[0], None)
dinfo = getattr(method, '_dinfo', {'watch': False})
if (not any(dep[0] == w[0] for w in _watch+_inherited)
and dinfo.get('watch')):
_inherited.append(dep)
mcs.param._depends = {'watch': _inherited+_watch}
if docstring_signature:
mcs.__class_docstring()
def __set_name(mcs, name, dict_):
"""
Give Parameterized classes a useful 'name' attribute that is by
default the class name, unless a class in the hierarchy has defined
a `name` String Parameter with a defined `default` value, in which case
that value is used to set the class name.
"""
name_param = dict_.get("name", None)
if name_param is not None:
if not type(name_param) is String:
raise TypeError(
f"Parameterized class {name!r} cannot override "
f"the 'name' Parameter with type {type(name_param)}. "
"Overriding 'name' is only allowed with a 'String' Parameter."
)
if name_param.default:
mcs.name = name_param.default
mcs._param__private.renamed = True
else:
mcs.name = name
else:
classes = classlist(mcs)[::-1]
found_renamed = False
for c in classes:
if hasattr(c, '_param__private') and c._param__private.renamed:
found_renamed = True
break
if not found_renamed:
mcs.name = name
def __class_docstring(mcs):
"""
Customize the class docstring with a Parameter table if
`docstring_describe_params` and the `param_pager` is available.
"""
if not docstring_describe_params or not param_pager:
return
class_docstr = mcs.__doc__ if mcs.__doc__ else ''
description = param_pager(mcs)
mcs.__doc__ = class_docstr + '\n' + description
def _initialize_parameter(mcs, param_name, param):
# A Parameter has no way to find out the name a
# Parameterized class has for it
param._set_names(param_name)
mcs.__param_inheritance(param_name, param)
# Should use the official Python 2.6+ abstract base classes; see
# https://github.com/holoviz/param/issues/84
def __is_abstract(mcs):
"""
Return True if the class has an attribute __abstract set to True.
Subclasses will return False unless they themselves have
__abstract set to true. This mechanism allows a class to
declare itself to be abstract (e.g. to avoid it being offered
as an option in a GUI), without the "abstract" property being
inherited by its subclasses (at least one of which is
presumably not abstract).
"""
# Can't just do ".__abstract", because that is mangled to
# _ParameterizedMetaclass__abstract before running, but
# the actual class object will have an attribute
# _ClassName__abstract. So, we have to mangle it ourselves at
# runtime. Mangling follows description in
# https://docs.python.org/2/tutorial/classes.html#private-variables-and-class-local-references
try:
return getattr(mcs,'_%s__abstract'%mcs.__name__.lstrip("_"))
except AttributeError:
return False
def __get_signature(mcs):
"""
For classes with a constructor signature that matches the default
Parameterized.__init__ signature (i.e. ``__init__(self, **params)``)
this method will generate a new signature that expands the
parameters. If the signature differs from the default the
custom signature is returned.
"""
if mcs._param__private.signature:
return mcs._param__private.signature
# allowed_signature must be the signature of Parameterized.__init__
# Inspecting `mcs.__init__` instead of `mcs` to avoid a recursion error
if inspect.signature(mcs.__init__) != DEFAULT_SIGNATURE:
return None
processed_kws, keyword_groups = set(), []
for cls in reversed(mcs.mro()):
keyword_group = []
for k, v in sorted(cls.__dict__.items()):
if isinstance(v, Parameter) and k not in processed_kws and not v.readonly:
keyword_group.append(k)
processed_kws.add(k)
keyword_groups.append(keyword_group)
keywords = [el for grp in reversed(keyword_groups) for el in grp]
mcs._param__private.signature = signature = inspect.Signature([
inspect.Parameter(k, inspect.Parameter.KEYWORD_ONLY)
for k in keywords
])
return signature
__signature__ = property(__get_signature)
abstract = property(__is_abstract)
def _get_param(mcs):
return mcs._param__parameters
param = property(_get_param)
def __setattr__(mcs, attribute_name, value):
"""
Implements 'self.attribute_name=value' in a way that also supports Parameters.
If there is already a descriptor named attribute_name, and
that descriptor is a Parameter, and the new value is *not* a
Parameter, then call that Parameter's __set__ method with the
specified value.
In all other cases set the attribute normally (i.e. overwrite
the descriptor). If the new value is a Parameter, once it has
been set we make sure that the value is inherited from
Parameterized superclasses as described in __param_inheritance().
"""
# Find out if there's a Parameter called attribute_name as a
# class attribute of this class - if not, parameter is None.
parameter,owning_class = mcs.get_param_descriptor(attribute_name)
if parameter and not isinstance(value,Parameter):
if owning_class != mcs:
parameter = copy.copy(parameter)
parameter.owner = mcs
type.__setattr__(mcs,attribute_name,parameter)
mcs.__dict__[attribute_name].__set__(None,value)
else:
type.__setattr__(mcs,attribute_name,value)
if isinstance(value,Parameter):
mcs.__param_inheritance(attribute_name,value)
def __param_inheritance(mcs, param_name, param):
"""
Look for Parameter values in superclasses of this
Parameterized class.
Ordinarily, when a Python object is instantiated, attributes
not given values in the constructor will inherit the value
given in the object's class, or in its superclasses. For
Parameters owned by Parameterized classes, we have implemented
an additional level of default lookup, should this ordinary
lookup return only `Undefined`.
In such a case, i.e. when no non-`Undefined` value was found for a
Parameter by the usual inheritance mechanisms, we explicitly
look for Parameters with the same name in superclasses of this
Parameterized class, and use the first such value that we
find.
The goal is to be able to set the default value (or other
slots) of a Parameter within a Parameterized class, just as we
can set values for non-Parameter objects in Parameterized
classes, and have the values inherited through the
Parameterized hierarchy as usual.
Note that instantiate is handled differently: if there is a
parameter with the same name in one of the superclasses with
instantiate set to True, this parameter will inherit
instantiate=True.
"""
# get all relevant slots (i.e. slots defined in all
# superclasses of this parameter)
p_type = type(param)
slots = dict.fromkeys(p_type._all_slots_)
# note for some eventual future: python 3.6+ descriptors grew
# __set_name__, which could replace this and _set_names
setattr(param, 'owner', mcs)
del slots['owner']
# backwards compatibility (see Composite parameter)
if 'objtype' in slots:
setattr(param, 'objtype', mcs)
del slots['objtype']
supers = classlist(mcs)[::-1]
# Explicitly inherit instantiate from super class and
# check if type has changed to a more specific or different
# Parameter type, requiring extra validation
type_change = False
for superclass in supers:
super_param = superclass.__dict__.get(param_name)
if not isinstance(super_param, Parameter):
continue
if super_param.instantiate is True:
param.instantiate = True
super_type = type(super_param)
if not issubclass(super_type, p_type):
type_change = True
del slots['instantiate']
callables, slot_values = {}, {}
slot_overridden = False
for slot in slots.keys():
# Search up the hierarchy until param.slot (which has to
# be obtained using getattr(param,slot)) is not Undefined,
# is a new value (using identity) or we run out of classes
# to search.
for scls in supers:
# Class may not define parameter or slot might not be
# there because could be a more general type of Parameter
new_param = scls.__dict__.get(param_name)
if new_param is None or not hasattr(new_param, slot):
continue
new_value = getattr(new_param, slot)
old_value = slot_values.get(slot, Undefined)
if new_value is Undefined:
continue
elif new_value is old_value:
continue
elif old_value is Undefined:
slot_values[slot] = new_value
# If we already know we have to re-validate abort
# early to avoid costly lookups
if slot_overridden or type_change:
break
else:
if slot not in param._non_validated_slots:
slot_overridden = True
break
if slot_values.get(slot, Undefined) is Undefined:
try:
default_val = param._slot_defaults[slot]
except KeyError as e:
raise KeyError(
f'Slot {slot!r} of parameter {param_name!r} has no '
'default value defined in `_slot_defaults`'
) from e
if callable(default_val):
callables[slot] = default_val
else:
slot_values[slot] = default_val
elif slot == 'allow_refs':
# Track Parameters that explicitly declared no refs
explicit_no_refs = mcs._param__private.explicit_no_refs
if param.allow_refs is False:
explicit_no_refs.append(param.name)
elif param.allow_refs is True and param.name in explicit_no_refs:
explicit_no_refs.remove(param.name)
# Now set the actual slot values
for slot, value in slot_values.items():
setattr(param, slot, value)
# Avoid crosstalk between mutable slot values in different Parameter objects
if slot != "default":
v = getattr(param, slot)
if _is_mutable_container(v):
setattr(param, slot, copy.copy(v))
# Once all the static slots have been filled in, fill in the dynamic ones
# (which are only allowed to use static values or results are undefined)
for slot, fn in callables.items():
setattr(param, slot, fn(param))
# Once all the slot values have been set, call _update_state for Parameters
# that need updates to make sure they're set up correctly after inheritance.
param._update_state()
# If the type has changed to a more specific or different type
# or a slot value has been changed validate the default again.
# Hack: Had to disable re-validation of None values because the
# automatic appending of an unknown value on Selector opens a whole
# rabbit hole in regard to the validation.
if type_change or slot_overridden and param.default is not None:
try:
param._validate(param.default)
# Param has no base validation exception class. Param Parameters raise
# ValueError, TypeError, OSError exceptions but external Parameters
# might raise other types of error, so we catch them all.
except Exception as e:
msg = f'{_validate_error_prefix(param)} failed to validate its ' \
'default value on class creation, this is going to raise ' \
'an error in the future. '
parents = ', '.join(klass.__name__ for klass in mcs.__mro__[1:-2])
if not type_change and slot_overridden:
msg += (
f'The Parameter is defined with attributes which when '
'combined with attributes inherited from its parent '
f'classes ({parents}) make it invalid. '
'Please fix the Parameter attributes.'
)
elif type_change and not slot_overridden:
msg += (
f'The Parameter type changed between class {mcs.__name__!r} '
f'and one of its parent classes ({parents}) which '
f'made it invalid. Please fix the Parameter type.'
)
else:
# type_change and slot_overriden is not possible as when
# the type changes checking the slots is aborted for
# performance reasons.
pass
msg += f'\nValidation failed with:\n{e}'
warnings.warn(
msg,
category=_ParamFutureWarning,
stacklevel=4,
)
def get_param_descriptor(mcs,param_name):
"""
Goes up the class hierarchy (starting from the current class)
looking for a Parameter class attribute param_name. As soon as
one is found as a class attribute, that Parameter is returned
along with the class in which it is declared.
"""
classes = classlist(mcs)
for c in classes[::-1]:
attribute = c.__dict__.get(param_name)
if isinstance(attribute,Parameter):
return attribute,c
return None,None
# Whether script_repr should avoid reporting the values of parameters
# that are just inheriting their values from the class defaults.
# Because deepcopying creates a new object, cannot detect such
# inheritance when instantiate = True, so such values will be printed
# even if they are just being copied from the default.
script_repr_suppress_defaults=True
[docs]def script_repr(val, imports=None, prefix="\n ", settings=[],
qualify=True, unknown_value=None, separator="\n",
show_imports=True):
"""
Variant of pprint() designed for generating a (nearly) runnable script.
The output of script_repr(parameterized_obj) is meant to be a
string suitable for running using `python file.py`. Not every
object is guaranteed to have a runnable script_repr
representation, but it is meant to be a good starting point for
generating a Python script that (after minor edits) can be
evaluated to get a newly initialized object similar to the one
provided.
The new object will only have the same parameter state, not the
same internal (attribute) state; the script_repr captures only
the state of the Parameters of that object and not any other
attributes it may have.
If show_imports is True (default), includes import statements
for each of the modules required for the objects being
instantiated. This list may not be complete, as it typically
includes only the imports needed for the Parameterized object
itself, not for values that may have been supplied to Parameters.
Apart from show_imports, accepts the same arguments as pprint(),
so see pprint() for explanations of the arguments accepted. The
default values of each of these arguments differ from pprint() in
ways that are more suitable for saving as a separate script than
for e.g. pretty-printing at the Python prompt.
"""
if imports is None:
imports = []
rep = pprint(val, imports, prefix, settings, unknown_value,
qualify, separator)
imports = list(set(imports))
imports_str = ("\n".join(imports) + "\n\n") if show_imports else ""
return imports_str + rep
# PARAM2_DEPRECATION: Remove entirely unused settings argument
def pprint(val,imports=None, prefix="\n ", settings=[],
unknown_value='<?>', qualify=False, separator=''):
"""
Pretty printed representation of a parameterized
object that may be evaluated with eval.
Similar to repr except introspection of the constructor (__init__)
ensures a valid and succinct representation is generated.
Only parameters are represented (whether specified as standard,
positional, or keyword arguments). Parameters specified as
positional arguments are always shown, followed by modified
parameters specified as keyword arguments, sorted by precedence.
unknown_value determines what to do where a representation cannot be
generated for something required to recreate the object. Such things
include non-parameter positional and keyword arguments, and certain
values of parameters (e.g. some random state objects).
Supplying an unknown_value of None causes unrepresentable things
to be silently ignored. If unknown_value is a string, that
string will appear in place of any unrepresentable things. If
unknown_value is False, an Exception will be raised if an
unrepresentable value is encountered.
If supplied, imports should be a list, and it will be populated
with the set of imports required for the object and all of its
parameter values.
If qualify is True, the class's path will be included (e.g. "a.b.C()"),
otherwise only the class will appear ("C()").
Parameters will be separated by a comma only by default, but the
separator parameter allows an additional separator to be supplied
(e.g. a newline could be supplied to have each Parameter appear on a
separate line).
Instances of types that require special handling can use the
script_repr_reg dictionary. Using the type as a key, add a
function that returns a suitable representation of instances of
that type, and adds the required import statement. The repr of a
parameter can be suppressed by returning None from the appropriate
hook in script_repr_reg.
"""
if imports is None:
imports = []
if isinstance(val,type):
rep = type_script_repr(val,imports,prefix,settings)
elif type(val) in script_repr_reg:
rep = script_repr_reg[type(val)](val,imports,prefix,settings)
elif isinstance(val, Parameterized) or (type(val) is type and issubclass(val, Parameterized)):
rep=val.param.pprint(imports=imports, prefix=prefix+" ",
qualify=qualify, unknown_value=unknown_value,
separator=separator)
else:
rep=repr(val)
return rep
# Registry for special handling for certain types in script_repr and pprint
script_repr_reg = {}
# currently only handles list and tuple
def container_script_repr(container,imports,prefix,settings):
result=[]
for i in container:
result.append(pprint(i,imports,prefix,settings))
## (hack to get container brackets)
if isinstance(container,list):
d1,d2='[',']'
elif isinstance(container,tuple):
d1,d2='(',')'
else:
raise NotImplementedError
rep=d1+','.join(result)+d2
# no imports to add for built-in types
return rep
def empty_script_repr(*args): # pyflakes:ignore (unused arguments):
return None
try:
# Suppress scriptrepr for objects not yet having a useful string representation
import numpy
script_repr_reg[random.Random] = empty_script_repr
script_repr_reg[numpy.random.RandomState] = empty_script_repr
except ImportError:
pass # Support added only if those libraries are available
def function_script_repr(fn,imports,prefix,settings):
name = fn.__name__
module = fn.__module__
imports.append('import %s'%module)
return module+'.'+name
def type_script_repr(type_,imports,prefix,settings):
module = type_.__module__
if module!='__builtin__':
imports.append('import %s'%module)
return module+'.'+type_.__name__
script_repr_reg[list] = container_script_repr
script_repr_reg[tuple] = container_script_repr
script_repr_reg[FunctionType] = function_script_repr
#: If not None, the value of this Parameter will be called (using '()')
#: before every call to __db_print, and is expected to evaluate to a
#: string that is suitable for prefixing messages and warnings (such
#: as some indicator of the global state).
dbprint_prefix=None
def truncate(str_, maxlen = 30):
"""Return HTML-safe truncated version of given string"""
rep = (str_[:(maxlen-2)] + '..') if (len(str_) > (maxlen-2)) else str_
return html.escape(rep)
def _get_param_repr(key, val, p, vallen=30, doclen=40):
"""HTML representation for a single Parameter object and its value"""
if isinstance(val, Parameterized) or (type(val) is type and issubclass(val, Parameterized)):
value = val.param._repr_html_(open=False)
elif hasattr(val, "_repr_html_"):
value = val._repr_html_()
else:
value = truncate(repr(val), vallen)
if hasattr(p, 'bounds'):
if p.bounds is None:
range_ = ''
elif hasattr(p,'inclusive_bounds'):
# Numeric bounds use ( and [ to indicate exclusive and inclusive
bl,bu = p.bounds
il,iu = p.inclusive_bounds
lb = '' if bl is None else ('>=' if il else '>') + str(bl)
ub = '' if bu is None else ('<=' if iu else '<') + str(bu)
range_ = lb + (', ' if lb and bu else '') + ub
else:
range_ = repr(p.bounds)
elif hasattr(p, 'objects') and p.objects:
range_ = ', '.join(list(map(repr, p.objects)))
elif hasattr(p, 'class_'):
if isinstance(p.class_, tuple):
range_ = ' | '.join(kls.__name__ for kls in p.class_)
else:
range_ = p.class_.__name__
elif hasattr(p, 'regex') and p.regex is not None:
range_ = f'regex({p.regex})'
else:
range_ = ''
if p.readonly:
range_ = ' '.join(s for s in ['<i>read-only</i>', range_] if s)
elif p.constant:
range_ = ' '.join(s for s in ['<i>constant</i>', range_] if s)
if getattr(p, 'allow_None', False):
range_ = ' '.join(s for s in ['<i>nullable</i>', range_] if s)
tooltip = f' class="param-doc-tooltip" data-tooltip="{escape(p.doc.strip())}"' if p.doc else ''
return (
f'<tr>'
f' <td><p style="margin-bottom: 0px;"{tooltip}>{key}</p></td>'
f' <td style="max-width: 200px; text-align:left;">{value}</td>'
f' <td style="text-align:left;">{p.__class__.__name__}</td>'
f' <td style="max-width: 300px;">{range_}</td>'
f'</tr>\n'
)
def _parameterized_repr_html(p, open):
"""HTML representation for a Parameterized object"""
if isinstance(p, Parameterized):
cls = p.__class__
title = cls.name + "()"
value_field = 'Value'
else:
cls = p
title = cls.name
value_field = 'Default'
tooltip_css = """
.param-doc-tooltip{
position: relative;
cursor: help;
}
.param-doc-tooltip:hover:after{
content: attr(data-tooltip);
background-color: black;
color: #fff;
border-radius: 3px;
padding: 10px;
position: absolute;
z-index: 1;
top: -5px;
left: 100%;
margin-left: 10px;
min-width: 250px;
}
.param-doc-tooltip:hover:before {
content: "";
position: absolute;
top: 50%;
left: 100%;
margin-top: -5px;
border-width: 5px;
border-style: solid;
border-color: transparent black transparent transparent;
}
"""
openstr = " open" if open else ""
param_values = p.param.values().items()
contents = "".join(_get_param_repr(key, val, p.param[key])
for key, val in param_values)
return (
f'<style>{tooltip_css}</style>\n'
f'<details {openstr}>\n'
' <summary style="display:list-item; outline:none;">\n'
f' <tt>{title}</tt>\n'
' </summary>\n'
' <div style="padding-left:10px; padding-bottom:5px;">\n'
' <table style="max-width:100%; border:1px solid #AAAAAA;">\n'
f' <tr><th style="text-align:left;">Name</th><th style="text-align:left;">{value_field}</th><th style="text-align:left;">Type</th><th>Range</th></tr>\n'
f'{contents}\n'
' </table>\n </div>\n</details>\n'
)
# _ClassPrivate and _InstancePrivate are the private namespaces of Parameterized
# classes and instance respectively, stored on the `_param__private` attribute.
# They are implemented with slots for performance reasons.
class _ClassPrivate:
"""
parameters_state: dict
Dict holding some transient states
disable_instance_params: bool
Whether to disable instance parameters
renamed: bool
Whethe the class has been renamed by a super class
params: dict
Dict of parameter_name:parameter
"""
__slots__ = [
'parameters_state',
'disable_instance_params',
'renamed',
'params',
'initialized',
'signature',
'explicit_no_refs',
]
def __init__(
self,
parameters_state=None,
disable_instance_params=False,
explicit_no_refs=None,
renamed=False,
params=None,
):
if parameters_state is None:
parameters_state = {
"BATCH_WATCH": False, # If true, Event and watcher objects are queued.
"TRIGGER": False,
"events": [], # Queue of batched events
"watchers": [] # Queue of batched watchers
}
self.parameters_state = parameters_state
self.disable_instance_params = disable_instance_params
self.renamed = renamed
self.params = {} if params is None else params
self.initialized = False
self.signature = None
self.explicit_no_refs = [] if explicit_no_refs is None else explicit_no_refs
def __getstate__(self):
return {slot: getattr(self, slot) for slot in self.__slots__}
def __setstate__(self, state):
for k, v in state.items():
setattr(self, k, v)
class _InstancePrivate:
"""
initialized: bool
Flag that can be tested to see if e.g. constant Parameters can still be set
parameters_state: dict
Dict holding some transient states
dynamic_watchers: defaultdict
Dynamic watchers
ref_watchers: list[Watcher]
Watchers used for internal references
params: dict
Dict of parameter_name:parameter
refs: dict
Dict of parameter name:reference
watchers: dict
Dict of dict:
parameter_name:
parameter_attribute (e.g. 'value'): list of `Watcher`s
values: dict
Dict of parameter name: value
"""
__slots__ = [
'initialized',
'parameters_state',
'dynamic_watchers',
'params',
'async_refs',
'refs',
'ref_watchers',
'syncing',
'watchers',
'values',
'explicit_no_refs',
]
def __init__(
self,
initialized=False,
parameters_state=None,
dynamic_watchers=None,
refs=None,
params=None,
watchers=None,
values=None,
explicit_no_refs=None
):
self.initialized = initialized
self.explicit_no_refs = [] if explicit_no_refs is None else explicit_no_refs
self.syncing = set()
if parameters_state is None:
parameters_state = {
"BATCH_WATCH": False, # If true, Event and watcher objects are queued.
"TRIGGER": False,
"events": [], # Queue of batched events
"watchers": [] # Queue of batched watchers
}
self.ref_watchers = []
self.async_refs = {}
self.parameters_state = parameters_state
self.dynamic_watchers = defaultdict(list) if dynamic_watchers is None else dynamic_watchers
self.params = {} if params is None else params
self.refs = {} if refs is None else refs
self.watchers = {} if watchers is None else watchers
self.values = {} if values is None else values
def __getstate__(self):
return {slot: getattr(self, slot) for slot in self.__slots__}
def __setstate__(self, state):
for k, v in state.items():
setattr(self, k, v)
[docs]class Parameterized(metaclass=ParameterizedMetaclass):
"""
Base class for named objects that support Parameters and message
formatting.
Automatic object naming: Every Parameterized instance has a name
parameter. If the user doesn't designate a name=<str> argument
when constructing the object, the object will be given a name
consisting of its class name followed by a unique 5-digit number.
Automatic parameter setting: The Parameterized __init__ method
will automatically read the list of keyword parameters. If any
keyword matches the name of a Parameter (see Parameter class)
defined in the object's class or any of its superclasses, that
parameter in the instance will get the value given as a keyword
argument. For example:
class Foo(Parameterized):
xx = Parameter(default=1)
foo = Foo(xx=20)
in this case foo.xx gets the value 20.
When initializing a Parameterized instance ('foo' in the example
above), the values of parameters can be supplied as keyword
arguments to the constructor (using parametername=parametervalue);
these values will override the class default values for this one
instance.
If no 'name' parameter is supplied, self.name defaults to the
object's class name with a unique number appended to it.
Message formatting: Each Parameterized instance has several
methods for optionally printing output. This functionality is
based on the standard Python 'logging' module; using the methods
provided here, wraps calls to the 'logging' module's root logger
and prepends each message with information about the instance
from which the call was made. For more information on how to set
the global logging level and change the default message prefix,
see documentation for the 'logging' module.
"""
name = String(default=None, constant=True, doc="""
String identifier for this object.""")
[docs] def __init__(self, **params):
global object_count
# Setting a Parameter value in an __init__ block before calling
# Parameterized.__init__ (via super() generally) already sets the
# _InstancePrivate namespace over the _ClassPrivate namespace
# (see Parameter.__set__) so we shouldn't override it here.
if not isinstance(self._param__private, _InstancePrivate):
self._param__private = _InstancePrivate(
explicit_no_refs=type(self)._param__private.explicit_no_refs
)
# Skip generating a custom instance name when a class in the hierarchy
# has overriden the default of the `name` Parameter.
if self.param.name.default == self.__class__.__name__:
self.param._generate_name()
refs, deps = self.param._setup_params(**params)
object_count += 1
self._param__private.initialized = True
self.param._setup_refs(deps)
self.param._update_deps(init=True)
self._param__private.refs = refs
@property
def param(self):
return Parameters(self.__class__, self=self)
#PARAM3_DEPRECATION
@property
@_deprecated(extra_msg="Use `inst.param.watchers` instead.", warning_cat=_ParamFutureWarning)
def _param_watchers(self):
return self._param__private.watchers
#PARAM3_DEPRECATION
@_param_watchers.setter
@_deprecated(extra_msg="Use `inst.param.watchers = ...` instead.", warning_cat=_ParamFutureWarning)
def _param_watchers(self, value):
self._param__private.watchers = value
# 'Special' methods
def __getstate__(self):
"""
Save the object's state: return a dictionary that is a shallow
copy of the object's __dict__ and that also includes the
object's __slots__ (if it has any).
"""
# Unclear why this is a copy and not simply state.update(self.__dict__)
state = self.__dict__.copy()
for slot in get_occupied_slots(self):
state[slot] = getattr(self,slot)
# Note that Parameterized object pickling assumes that
# attributes to be saved are only in __dict__ or __slots__
# (the standard Python places to store attributes, so that's a
# reasonable assumption). (Additionally, class attributes that
# are Parameters are also handled, even when they haven't been
# instantiated - see PickleableClassAttributes.)
return state
def __setstate__(self, state):
"""
Restore objects from the state dictionary to this object.
During this process the object is considered uninitialized.
"""
explicit_no_refs = type(self)._param__private.explicit_no_refs
self._param__private = _InstancePrivate(explicit_no_refs=explicit_no_refs)
self._param__private.initialized = False
_param__private = state.get('_param__private', None)
if _param__private is None:
_param__private = _InstancePrivate(explicit_no_refs=explicit_no_refs)
# When making a copy the internal watchers have to be
# recreated and point to the new instance
if _param__private.watchers:
param_watchers = _param__private.watchers
for p, attrs in param_watchers.items():
for attr, watchers in attrs.items():
new_watchers = []
for watcher in watchers:
watcher_args = list(watcher)
if watcher.inst is not None:
watcher_args[0] = self
fn = watcher.fn
if hasattr(fn, '_watcher_name'):
watcher_args[2] = _m_caller(self, fn._watcher_name)
elif get_method_owner(fn) is watcher.inst:
watcher_args[2] = getattr(self, fn.__name__)
new_watchers.append(Watcher(*watcher_args))
param_watchers[p][attr] = new_watchers
state.pop('param', None)
for name,value in state.items():
setattr(self,name,value)
self._param__private.initialized = True
@_recursive_repr()
def __repr__(self):
"""
Provide a nearly valid Python representation that could be used to recreate
the item with its parameters, if executed in the appropriate environment.
Returns 'classname(parameter1=x,parameter2=y,...)', listing
all the parameters of this object.
"""
try:
settings = [f'{name}={val!r}'
for name, val in self.param.values().items()]
except RuntimeError: # Handle recursion in parameter depth
settings = []
return self.__class__.__name__ + "(" + ", ".join(settings) + ")"
def __str__(self):
"""Return a short representation of the name and class of this object."""
return f"<{self.__class__.__name__} {self.name}>"
def print_all_param_defaults():
"""Print the default values for all imported Parameters."""
print("_______________________________________________________________________________")
print("")
print(" Parameter Default Values")
print("")
classes = descendents(Parameterized)
classes.sort(key=lambda x:x.__name__)
for c in classes:
c.print_param_defaults()
print("_______________________________________________________________________________")
# As of Python 2.6+, a fn's **args no longer has to be a
# dictionary. This might allow us to use a decorator to simplify using
# ParamOverrides (if that does indeed make them simpler to use).
# http://docs.python.org/whatsnew/2.6.html
[docs]class ParamOverrides(dict):
"""
A dictionary that returns the attribute of a specified object if
that attribute is not present in itself.
Used to override the parameters of an object.
"""
# NOTE: Attribute names of this object block parameters of the
# same name, so all attributes of this object should have names
# starting with an underscore (_).
[docs] def __init__(self,overridden,dict_,allow_extra_keywords=False):
"""
If allow_extra_keywords is False, then all keys in the
supplied dict_ must match parameter names on the overridden
object (otherwise a warning will be printed).
If allow_extra_keywords is True, then any items in the
supplied dict_ that are not also parameters of the overridden
object will be available via the extra_keywords() method.
"""
# This method should be fast because it's going to be
# called a lot. This _might_ be faster (not tested):
# def __init__(self,overridden,**kw):
# ...
# dict.__init__(self,**kw)
self._overridden = overridden
dict.__init__(self,dict_)
if allow_extra_keywords:
self._extra_keywords=self._extract_extra_keywords(dict_)
else:
self._check_params(dict_)
def extra_keywords(self):
"""
Return a dictionary containing items from the originally
supplied `dict_` whose names are not parameters of the
overridden object.
"""
return self._extra_keywords
def param_keywords(self):
"""
Return a dictionary containing items from the originally
supplied `dict_` whose names are parameters of the
overridden object (i.e. not extra keywords/parameters).
"""
return {key: self[key] for key in self if key not in self.extra_keywords()}
def __missing__(self,name):
# Return 'name' from the overridden object
return getattr(self._overridden,name)
def __repr__(self):
# As dict.__repr__, but indicate the overridden object
return dict.__repr__(self)+" overriding params from %s"%repr(self._overridden)
def __getattr__(self,name):
# Provide 'dot' access to entries in the dictionary.
# (This __getattr__ method is called only if 'name' isn't an
# attribute of self.)
return self.__getitem__(name)
def __setattr__(self,name,val):
# Attributes whose name starts with _ are set on self (as
# normal), but all other attributes are inserted into the
# dictionary.
if not name.startswith('_'):
self.__setitem__(name,val)
else:
dict.__setattr__(self,name,val)
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
def __contains__(self, key):
return key in self.__dict__ or key in self._overridden.param
def _check_params(self,params):
"""
Print a warning if params contains something that is not a
Parameter of the overridden object.
"""
overridden_object_params = list(self._overridden.param)
for item in params:
if item not in overridden_object_params:
self.param.warning("'%s' will be ignored (not a Parameter).",item)
def _extract_extra_keywords(self,params):
"""
Return any items in params that are not also
parameters of the overridden object.
"""
extra_keywords = {}
overridden_object_params = list(self._overridden.param)
for name, val in params.items():
if name not in overridden_object_params:
extra_keywords[name]=val
# Could remove name from params (i.e. del params[name])
# so that it's only available via extra_keywords()
return extra_keywords
# Helper function required by ParameterizedFunction.__reduce__
def _new_parameterized(cls):
return Parameterized.__new__(cls)
[docs]class ParameterizedFunction(Parameterized):
"""
Acts like a Python function, but with arguments that are Parameters.
Implemented as a subclass of Parameterized that, when instantiated,
automatically invokes __call__ and returns the result, instead of
returning an instance of the class.
To obtain an instance of this class, call instance().
"""
__abstract = True
def __str__(self):
return self.__class__.__name__+"()"
@bothmethod
def instance(self_or_cls,**params):
"""
Return an instance of this class, copying parameters from any
existing instance provided.
"""
if isinstance (self_or_cls,ParameterizedMetaclass):
cls = self_or_cls
else:
p = params
params = self_or_cls.param.values()
params.update(p)
params.pop('name')
cls = self_or_cls.__class__
inst=Parameterized.__new__(cls)
Parameterized.__init__(inst,**params)
if 'name' in params: inst.__name__ = params['name']
else: inst.__name__ = self_or_cls.name
return inst
def __new__(class_,*args,**params):
# Create and __call__() an instance of this class.
inst = class_.instance()
inst.param._set_name(class_.__name__)
return inst.__call__(*args,**params)
def __call__(self,*args,**kw):
raise NotImplementedError("Subclasses must implement __call__.")
def __reduce__(self):
# Control reconstruction (during unpickling and copying):
# ensure that ParameterizedFunction.__new__ is skipped
state = ParameterizedFunction.__getstate__(self)
# Here it's necessary to use a function defined at the
# module level rather than Parameterized.__new__ directly
# because otherwise pickle will find .__new__'s module to be
# __main__. Pretty obscure aspect of pickle.py...
return (_new_parameterized,(self.__class__,),state)
def _pprint(self, imports=None, prefix="\n ",unknown_value='<?>',
qualify=False, separator=""):
"""
Same as self.param.pprint, except that X.classname(Y
is replaced with X.classname.instance(Y
"""
r = self.param.pprint(imports,prefix,
unknown_value=unknown_value,
qualify=qualify,separator=separator)
classname=self.__class__.__name__
return r.replace(".%s("%classname,".%s.instance("%classname)
class default_label_formatter(ParameterizedFunction):
"Default formatter to turn parameter names into appropriate widget labels."
capitalize = Parameter(default=True, doc="""
Whether or not the label should be capitalized.""")
replace_underscores = Parameter(default=True, doc="""
Whether or not underscores should be replaced with spaces.""")
overrides = Parameter(default={}, doc="""
Allows custom labels to be specified for specific parameter
names using a dictionary where key is the parameter name and the
value is the desired label.""")
def __call__(self, pname):
if pname in self.overrides:
return self.overrides[pname]
if self.replace_underscores:
pname = pname.replace('_',' ')
if self.capitalize:
pname = pname[:1].upper() + pname[1:]
return pname
label_formatter = default_label_formatter
# PARAM3_DEPRECATION: Should be able to remove this; was originally
# adapted from OProperty from
# infinitesque.net/articles/2005/enhancing%20Python's%20property.xhtml
# but since python 2.6 the getter, setter, and deleter attributes of
# a property should provide similar functionality already.
class overridable_property:
"""
The same as Python's "property" attribute, but allows the accessor
methods to be overridden in subclasses.
.. deprecated:: 2.0.0
"""
# Delays looking up the accessors until they're needed, rather
# than finding them when the class is first created.
# Based on the emulation of PyProperty_Type() in Objects/descrobject.c
def __init__(self, fget=None, fset=None, fdel=None, doc=None):
warnings.warn(
message="overridable_property has been deprecated.",
category=_ParamDeprecationWarning,
stacklevel=2,
)
self.fget = fget
self.fset = fset
self.fdel = fdel
self.__doc__ = doc
def __get__(self, obj, objtype=None):
if obj is None:
return self
if self.fget is None:
raise AttributeError("unreadable attribute")
if self.fget.__name__ == '<lambda>' or not self.fget.__name__:
return self.fget(obj)
else:
return getattr(obj, self.fget.__name__)()
def __set__(self, obj, value):
if self.fset is None:
raise AttributeError("can't set attribute")
if self.fset.__name__ == '<lambda>' or not self.fset.__name__:
self.fset(obj, value)
else:
getattr(obj, self.fset.__name__)(value)
def __delete__(self, obj):
if self.fdel is None:
raise AttributeError("can't delete attribute")
if self.fdel.__name__ == '<lambda>' or not self.fdel.__name__:
self.fdel(obj)
else:
getattr(obj, self.fdel.__name__)()