From 98e23b6831cae50ef032e3ca430b10bb4ba1fa3f Mon Sep 17 00:00:00 2001 From: Jinnapat Indrapiromkul Date: Fri, 27 Jan 2023 12:11:12 +0100 Subject: [PATCH 1/3] Attach default experiment to a measurement --- qcodes/dataset/measurements.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/qcodes/dataset/measurements.py b/qcodes/dataset/measurements.py index c073df94aaf..16741a0a31f 100644 --- a/qcodes/dataset/measurements.py +++ b/qcodes/dataset/measurements.py @@ -48,10 +48,11 @@ InterDependencies_, ) from qcodes.dataset.descriptions.param_spec import ParamSpec, ParamSpecBase -from qcodes.dataset.descriptions.rundescriber import RunDescriber from qcodes.dataset.descriptions.versioning.rundescribertypes import Shapes -from qcodes.dataset.experiment_container import Experiment +from qcodes.dataset.experiment_container import Experiment, load_experiment +from qcodes.dataset.experiment_settings import get_default_experiment_id from qcodes.dataset.export_config import get_data_export_automatic +from qcodes.dataset.sqlite.database import conn_from_dbpath_or_conn from qcodes.dataset.sqlite.query_helpers import VALUE from qcodes.parameters import ( ArrayParameter, @@ -709,6 +710,8 @@ def __init__( self.enteractions: list[ActionType] = [] self.subscribers: list[SubscriberType] = [] + if not exp: + exp = load_experiment(get_default_experiment_id(conn_from_dbpath_or_conn(None, None))) self.experiment = exp self.station = station self.name = name From ed578f3a840935e14b006fa08608e6315b5e2734 Mon Sep 17 00:00:00 2001 From: Jinnapat Indrapiromkul Date: Fri, 27 Jan 2023 13:09:59 +0100 Subject: [PATCH 2/3] formatted --- qcodes/dataset/measurements.py | 522 +++++++++++++++++++-------------- 1 file changed, 297 insertions(+), 225 deletions(-) diff --git a/qcodes/dataset/measurements.py b/qcodes/dataset/measurements.py index 16741a0a31f..b27374d4b21 100644 --- a/qcodes/dataset/measurements.py +++ b/qcodes/dataset/measurements.py @@ -73,9 +73,9 @@ ActionType = Tuple[Callable[..., Any], Sequence[Any]] -SubscriberType = Tuple[Callable[..., Any], - Union[MutableSequence[Any], - MutableMapping[Any, Any]]] +SubscriberType = Tuple[ + Callable[..., Any], Union[MutableSequence[Any], MutableMapping[Any, Any]] +] class ParameterTypeError(Exception): @@ -198,21 +198,22 @@ def add_result(self, *res_tuple: res_type) -> None: if not isinstance(data, np.ndarray): raise TypeError( f"Expected data for Parameter with Array validator " - f"to be a numpy array but got: {type(data)}") + f"to be a numpy array but got: {type(data)}" + ) - if (parameter.vals.shape is not None - and data.shape != parameter.vals.shape): + if ( + parameter.vals.shape is not None + and data.shape != parameter.vals.shape + ): raise TypeError( f"Expected data with shape {parameter.vals.shape}, " f"but got {data.shape} for parameter: {parameter.full_name}" ) if isinstance(parameter, ArrayParameter): - results_dict.update( - self._unpack_arrayparameter(partial_result)) + results_dict.update(self._unpack_arrayparameter(partial_result)) elif isinstance(parameter, MultiParameter): - results_dict.update( - self._unpack_multiparameter(partial_result)) + results_dict.update(self._unpack_multiparameter(partial_result)) elif isinstance(parameter, ParameterWithSetpoints): results_dict.update( self._conditionally_expand_parameter_with_setpoints( @@ -220,9 +221,7 @@ def add_result(self, *res_tuple: res_type) -> None: ) ) else: - results_dict.update( - self._unpack_partial_result(partial_result) - ) + results_dict.update(self._unpack_partial_result(partial_result)) self._validate_result_deps(results_dict) self._validate_result_shapes(results_dict) @@ -235,25 +234,29 @@ def add_result(self, *res_tuple: res_type) -> None: self._last_save_time = perf_counter() def _conditionally_expand_parameter_with_setpoints( - self, data: values_type, parameter: ParameterWithSetpoints, - parameter_names: Sequence[str], partial_result: res_type + self, + data: values_type, + parameter: ParameterWithSetpoints, + parameter_names: Sequence[str], + partial_result: res_type, ) -> dict[ParamSpecBase, np.ndarray]: local_results = {} setpoint_names = tuple(setpoint.full_name for setpoint in parameter.setpoints) - expanded = tuple(setpoint_name in parameter_names for setpoint_name in setpoint_names) + expanded = tuple( + setpoint_name in parameter_names for setpoint_name in setpoint_names + ) if all(expanded): - local_results.update( - self._unpack_partial_result(partial_result)) + local_results.update(self._unpack_partial_result(partial_result)) elif any(expanded): - raise ValueError(f"Some of the setpoints of {parameter.full_name} " - "were explicitly given but others were not. " - "Either supply all of them or none of them.") + raise ValueError( + f"Some of the setpoints of {parameter.full_name} " + "were explicitly given but others were not. " + "Either supply all of them or none of them." + ) else: expanded_partial_result = expand_setpoints_helper(parameter, data) for res in expanded_partial_result: - local_results.update( - self._unpack_partial_result(res) - ) + local_results.update(self._unpack_partial_result(res)) return local_results def _unpack_partial_result( @@ -268,9 +271,11 @@ def _unpack_partial_result( try: parameter = self._interdeps._id_to_paramspec[str(param)] except KeyError: - raise ValueError('Can not add result for parameter ' - f'{param}, no such parameter registered ' - 'with this measurement.') + raise ValueError( + "Can not add result for parameter " + f"{param}, no such parameter registered " + "with this measurement." + ) return {parameter: np.array(values)} def _unpack_arrayparameter( @@ -284,15 +289,19 @@ def _unpack_arrayparameter( array_param = cast(ArrayParameter, array_param) if array_param.setpoints is None: - raise RuntimeError(f"{array_param.full_name} is an " - f"{type(array_param)} " - f"without setpoints. Cannot handle this.") + raise RuntimeError( + f"{array_param.full_name} is an " + f"{type(array_param)} " + f"without setpoints. Cannot handle this." + ) try: main_parameter = self._interdeps._id_to_paramspec[str(array_param)] except KeyError: - raise ValueError('Can not add result for parameter ' - f'{array_param}, no such parameter registered ' - 'with this measurement.') + raise ValueError( + "Can not add result for parameter " + f"{array_param}, no such parameter registered " + "with this measurement." + ) res_dict = {main_parameter: np.array(values_array)} @@ -301,8 +310,9 @@ def _unpack_arrayparameter( res_dict.update( self._unpack_setpoints_from_parameter( - array_param, array_param.setpoints, - sp_names, fallback_sp_name)) + array_param, array_param.setpoints, sp_names, fallback_sp_name + ) + ) return res_dict @@ -321,9 +331,11 @@ def _unpack_multiparameter( result_dict = {} if parameter.setpoints is None: - raise RuntimeError(f"{parameter.full_name} is an " - f"{type(parameter)} " - f"without setpoints. Cannot handle this.") + raise RuntimeError( + f"{parameter.full_name} is an " + f"{type(parameter)} " + f"without setpoints. Cannot handle this." + ) for i in range(len(parameter.shapes)): # if this loop runs, then 'data' is a Sequence data = cast(Sequence[Union[str, int, float, Any]], data) @@ -333,30 +345,33 @@ def _unpack_multiparameter( try: paramspec = self._interdeps._id_to_paramspec[parameter.full_names[i]] except KeyError: - raise ValueError('Can not add result for parameter ' - f'{parameter.names[i]}, ' - 'no such parameter registered ' - 'with this measurement.') + raise ValueError( + "Can not add result for parameter " + f"{parameter.names[i]}, " + "no such parameter registered " + "with this measurement." + ) result_dict.update({paramspec: np.array(data[i])}) if shape != (): # array parameter like part of the multiparameter # need to find setpoints too - fallback_sp_name = f'{parameter.full_names[i]}_setpoint' + fallback_sp_name = f"{parameter.full_names[i]}_setpoint" sp_names: Sequence[str] | None - if (parameter.setpoint_full_names is not None - and parameter.setpoint_full_names[i] is not None): + if ( + parameter.setpoint_full_names is not None + and parameter.setpoint_full_names[i] is not None + ): sp_names = parameter.setpoint_full_names[i] else: sp_names = None result_dict.update( self._unpack_setpoints_from_parameter( - parameter, - parameter.setpoints[i], - sp_names, - fallback_sp_name)) + parameter, parameter.setpoints[i], sp_names, fallback_sp_name + ) + ) return result_dict @@ -379,13 +394,15 @@ def _unpack_setpoints_from_parameter( if sp_names is not None: spname = sp_names[i] else: - spname = f'{fallback_sp_name}_{i}' + spname = f"{fallback_sp_name}_{i}" try: setpoint_parameter = self._interdeps[spname] except KeyError: - raise RuntimeError('No setpoints registered for ' - f'{type(parameter)} {parameter.full_name}!') + raise RuntimeError( + "No setpoints registered for " + f"{type(parameter)} {parameter.full_name}!" + ) sps = np.array(sps) while sps.ndim > 1: # The outermost setpoint axis or an nD param is nD @@ -396,7 +413,7 @@ def _unpack_setpoints_from_parameter( setpoint_parameters.append(setpoint_parameter) setpoint_axes.append(sps) - output_grids = np.meshgrid(*setpoint_axes, indexing='ij') + output_grids = np.meshgrid(*setpoint_axes, indexing="ij") result_dict = {} for grid, param in zip(output_grids, setpoint_parameters): result_dict.update({param: grid}) @@ -404,7 +421,8 @@ def _unpack_setpoints_from_parameter( return result_dict def _validate_result_deps( - self, results_dict: Mapping[ParamSpecBase, values_type]) -> None: + self, results_dict: Mapping[ParamSpecBase, values_type] + ) -> None: """ Validate that the dependencies of the ``results_dict`` are met, meaning that (some) values for all required setpoints and inferences @@ -413,46 +431,58 @@ def _validate_result_deps( try: self._interdeps.validate_subset(list(results_dict.keys())) except (DependencyError, InferenceError) as err: - raise ValueError('Can not add result, some required parameters ' - 'are missing.') from err + raise ValueError( + "Can not add result, some required parameters " "are missing." + ) from err def _validate_result_shapes( - self, results_dict: Mapping[ParamSpecBase, values_type]) -> None: + self, results_dict: Mapping[ParamSpecBase, values_type] + ) -> None: """ Validate that all sizes of the ``results_dict`` are consistent. This means that array-values of parameters and their setpoints are of the same size, whereas parameters with no setpoint relation to each other can have different sizes. """ - toplevel_params = (set(self._interdeps.dependencies) - .intersection(set(results_dict))) + toplevel_params = set(self._interdeps.dependencies).intersection( + set(results_dict) + ) for toplevel_param in toplevel_params: required_shape = np.shape(np.array(results_dict[toplevel_param])) for setpoint in self._interdeps.dependencies[toplevel_param]: # a setpoint is allowed to be a scalar; shape is then () setpoint_shape = np.shape(np.array(results_dict[setpoint])) if setpoint_shape not in [(), required_shape]: - raise ValueError(f'Incompatible shapes. Parameter ' - f"{toplevel_param.name} has shape " - f"{required_shape}, but its setpoint " - f"{setpoint.name} has shape " - f"{setpoint_shape}.") + raise ValueError( + f"Incompatible shapes. Parameter " + f"{toplevel_param.name} has shape " + f"{required_shape}, but its setpoint " + f"{setpoint.name} has shape " + f"{setpoint_shape}." + ) @staticmethod def _validate_result_types( - results_dict: Mapping[ParamSpecBase, np.ndarray]) -> None: + results_dict: Mapping[ParamSpecBase, np.ndarray] + ) -> None: """ Validate the type of the results """ - allowed_kinds = {'numeric': 'iuf', 'text': 'SU', 'array': 'iufcSUmM', - 'complex': 'c'} + allowed_kinds = { + "numeric": "iuf", + "text": "SU", + "array": "iufcSUmM", + "complex": "c", + } for ps, vals in results_dict.items(): - if vals.dtype.kind not in allowed_kinds[ps.type]: - raise ValueError(f'Parameter {ps.name} is of type ' - f'"{ps.type}", but got a result of ' - f'type {vals.dtype} ({vals}).') + if vals.dtype.kind not in allowed_kinds[ps.type]: + raise ValueError( + f"Parameter {ps.name} is of type " + f'"{ps.type}", but got a result of ' + f"type {vals.dtype} ({vals})." + ) def flush_data_to_database(self, block: bool = False) -> None: """ @@ -514,10 +544,10 @@ def __init__( in_memory_cache: bool = True, dataset_class: DataSetType = DataSetType.DataSet, ) -> None: - self._dataset_class = dataset_class - self.write_period = self._calculate_write_period(write_in_background, - write_period) + self.write_period = self._calculate_write_period( + write_in_background, write_period + ) self.enteractions = enteractions self.exitactions = exitactions @@ -530,7 +560,7 @@ def __init__( self.station = station self._interdependencies = interdeps self._shapes: Shapes | None = shapes - self.name = name if name else 'results' + self.name = name if name else "results" self._parent_datasets = parent_datasets self._extra_log_info = extra_log_info self._write_in_background = write_in_background @@ -542,12 +572,14 @@ def _calculate_write_period( write_in_background: bool, write_period: float | None ) -> float: write_period_changed_from_default = ( - write_period is not None and - write_period != qc.config.defaults.dataset.write_period + write_period is not None + and write_period != qc.config.defaults.dataset.write_period ) if write_in_background and write_period_changed_from_default: - warnings.warn(f"The specified write period of {write_period} s " - "will be ignored, since write_in_background==True") + warnings.warn( + f"The specified write period of {write_period} s " + "will be ignored, since write_in_background==True" + ) if write_in_background: return 0.0 if write_period is None: @@ -615,7 +647,7 @@ def __enter__(self) -> DataSaver: # register all subscribers if isinstance(self.ds, DataSet): - for (callble, state) in self.subscribers: + for callble, state in self.subscribers: # We register with minimal waiting time. # That should make all subscribers be called when data is flushed # to the database @@ -636,9 +668,10 @@ def __enter__(self) -> DataSaver: log.info(f"Using background writing: {self._write_in_background}") self.datasaver = DataSaver( - dataset=self.ds, - write_period=self.write_period, - interdeps=self._interdependencies) + dataset=self.ds, + write_period=self.write_period, + interdeps=self._interdependencies, + ) return self.datasaver @@ -659,13 +692,14 @@ def __exit__( # if an exception happened during the measurement, # log the exception stream = io.StringIO() - tb_module.print_exception(exception_type, - exception_value, - traceback, - file=stream) + tb_module.print_exception( + exception_type, exception_value, traceback, file=stream + ) exception_string = stream.getvalue() - log.warning('An exception occured in measurement with guid: ' - f'{self.ds.guid};\nTraceback:\n{exception_string}') + log.warning( + "An exception occured in measurement with guid: " + f"{self.ds.guid};\nTraceback:\n{exception_string}" + ) self.ds.add_metadata("measurement_exception", exception_string) # and finally mark the dataset as closed, thus @@ -675,13 +709,15 @@ def __exit__( self.ds.mark_completed() if get_data_export_automatic(): self.datasaver.export_data() - log.info(f'Finished measurement with guid: {self.ds.guid}. ' - f'{self._extra_log_info}') + log.info( + f"Finished measurement with guid: {self.ds.guid}. " + f"{self._extra_log_info}" + ) if isinstance(self.ds, DataSet): self.ds.unsubscribe_all() -T = TypeVar('T', bound='Measurement') +T = TypeVar("T", bound="Measurement") class Measurement: @@ -711,7 +747,9 @@ def __init__( self.subscribers: list[SubscriberType] = [] if not exp: - exp = load_experiment(get_default_experiment_id(conn_from_dbpath_or_conn(None, None))) + exp = load_experiment( + get_default_experiment_id(conn_from_dbpath_or_conn(None, None)) + ) self.experiment = exp self.station = station self.name = name @@ -719,7 +757,7 @@ def __init__( self._interdeps = InterDependencies_() self._shapes: Shapes | None = None self._parent_datasets: list[dict[str, str]] = [] - self._extra_log_info: str = '' + self._extra_log_info: str = "" @property def parameters(self) -> dict[str, ParamSpecBase]: @@ -732,10 +770,10 @@ def write_period(self) -> float: @write_period.setter def write_period(self, wp: float) -> None: if not isinstance(wp, Number): - raise ValueError('The write period must be a number (of seconds).') + raise ValueError("The write period must be a number (of seconds).") wp_float = float(wp) if wp_float < 1e-3: - raise ValueError('The write period must be at least 1 ms.') + raise ValueError("The write period must be at least 1 ms.") self._write_period = wp_float def _paramspecbase_from_strings( @@ -768,8 +806,10 @@ def _paramspecbase_from_strings( sp_psb = idps._id_to_paramspec[sp] depends_on.append(sp_psb) except KeyError: - raise ValueError(f'Unknown setpoint: {sp}.' - ' Please register that parameter first.') + raise ValueError( + f"Unknown setpoint: {sp}." + " Please register that parameter first." + ) # now handle inferred parameters inf_from = [] @@ -779,8 +819,10 @@ def _paramspecbase_from_strings( inff_psb = idps._id_to_paramspec[inff] inf_from.append(inff_psb) except KeyError: - raise ValueError(f'Unknown basis parameter: {inff}.' - ' Please register that parameter first.') + raise ValueError( + f"Unknown basis parameter: {inff}." + " Please register that parameter first." + ) return tuple(depends_on), tuple(inf_from) @@ -799,8 +841,11 @@ def register_parent( # Link object we will eventually make out of this information. We # cannot create a Link object just yet, because the DataSet of this # Measurement has not been given a GUID yet - parent_dict = {'tail': parent.guid, 'edge_type': link_type, - 'description': description} + parent_dict = { + "tail": parent.guid, + "edge_type": link_type, + "description": description, + } self._parent_datasets.append(parent_dict) return self @@ -829,52 +874,60 @@ def register_parameter( and the validator of the supplied parameter. """ if not isinstance(parameter, ParameterBase): - raise ValueError('Can not register object of type {}. Can only ' - 'register a QCoDeS Parameter.' - ''.format(type(parameter))) + raise ValueError( + "Can not register object of type {}. Can only " + "register a QCoDeS Parameter." + "".format(type(parameter)) + ) paramtype = self._infer_paramtype(parameter, paramtype) # default to numeric if paramtype is None: - paramtype = 'numeric' + paramtype = "numeric" # now the parameter type must be valid if paramtype not in ParamSpec.allowed_types: - raise RuntimeError("Trying to register a parameter with type " - f"{paramtype}. However, only " - f"{ParamSpec.allowed_types} are supported.") + raise RuntimeError( + "Trying to register a parameter with type " + f"{paramtype}. However, only " + f"{ParamSpec.allowed_types} are supported." + ) if isinstance(parameter, ArrayParameter): - self._register_arrayparameter(parameter, - setpoints, - basis, - paramtype) + self._register_arrayparameter(parameter, setpoints, basis, paramtype) elif isinstance(parameter, ParameterWithSetpoints): - self._register_parameter_with_setpoints(parameter, - setpoints, - basis, - paramtype) + self._register_parameter_with_setpoints( + parameter, setpoints, basis, paramtype + ) elif isinstance(parameter, MultiParameter): - self._register_multiparameter(parameter, - setpoints, - basis, - paramtype, - ) + self._register_multiparameter( + parameter, + setpoints, + basis, + paramtype, + ) elif isinstance(parameter, Parameter): - self._register_parameter(parameter.full_name, - parameter.label, - parameter.unit, - setpoints, - basis, paramtype) + self._register_parameter( + parameter.full_name, + parameter.label, + parameter.unit, + setpoints, + basis, + paramtype, + ) elif isinstance(parameter, GroupedParameter): - self._register_parameter(parameter.full_name, - parameter.label, - parameter.unit, - setpoints, - basis, paramtype) + self._register_parameter( + parameter.full_name, + parameter.label, + parameter.unit, + setpoints, + basis, + paramtype, + ) else: - raise RuntimeError("Does not know how to register a parameter" - f"of type {type(parameter)}") + raise RuntimeError( + "Does not know how to register a parameter" f"of type {type(parameter)}" + ) return self @@ -896,13 +949,13 @@ def _infer_paramtype(parameter: ParameterBase, paramtype: str | None) -> str | N return paramtype if isinstance(parameter.vals, vals.Arrays): - paramtype = 'array' + paramtype = "array" elif isinstance(parameter, ArrayParameter): - paramtype = 'array' + paramtype = "array" elif isinstance(parameter.vals, vals.Strings): - paramtype = 'text' + paramtype = "text" elif isinstance(parameter.vals, vals.ComplexNumbers): - paramtype = 'complex' + paramtype = "complex" # TODO should we try to figure out if parts of a multiparameter are # arrays or something else? return paramtype @@ -927,10 +980,9 @@ def _register_parameter( except KeyError: parameter = None - paramspec = ParamSpecBase(name=name, - paramtype=paramtype, - label=label, - unit=unit) + paramspec = ParamSpecBase( + name=name, paramtype=paramtype, label=label, unit=unit + ) # We want to allow the registration of the exact same parameter twice, # the reason being that e.g. two ArrayParameters could share the same @@ -938,8 +990,7 @@ def _register_parameter( # dependent (array)parameter if parameter is not None and parameter != paramspec: - raise ValueError("Parameter already registered " - "in this Measurement.") + raise ValueError("Parameter already registered " "in this Measurement.") if setpoints is not None: sp_strings = [str(sp) for sp in setpoints] @@ -952,20 +1003,20 @@ def _register_parameter( bs_strings = [] # get the ParamSpecBases - depends_on, inf_from = self._paramspecbase_from_strings(name, - sp_strings, - bs_strings) + depends_on, inf_from = self._paramspecbase_from_strings( + name, sp_strings, bs_strings + ) if depends_on: self._interdeps = self._interdeps.extend( - dependencies={paramspec: depends_on}) + dependencies={paramspec: depends_on} + ) if inf_from: - self._interdeps = self._interdeps.extend( - inferences={paramspec: inf_from}) - if not(depends_on or inf_from): + self._interdeps = self._interdeps.extend(inferences={paramspec: inf_from}) + if not (depends_on or inf_from): self._interdeps = self._interdeps.extend(standalones=(paramspec,)) - log.info(f'Registered {name} in the Measurement.') + log.info(f"Registered {name} in the Measurement.") return self @@ -982,35 +1033,41 @@ def _register_arrayparameter( """ my_setpoints = list(setpoints) if setpoints else [] for i in range(len(parameter.shape)): - if parameter.setpoint_full_names is not None and \ - parameter.setpoint_full_names[i] is not None: + if ( + parameter.setpoint_full_names is not None + and parameter.setpoint_full_names[i] is not None + ): spname = parameter.setpoint_full_names[i] else: - spname = f'{parameter.full_name}_setpoint_{i}' + spname = f"{parameter.full_name}_setpoint_{i}" if parameter.setpoint_labels: splabel = parameter.setpoint_labels[i] else: - splabel = '' + splabel = "" if parameter.setpoint_units: spunit = parameter.setpoint_units[i] else: - spunit = '' - - self._register_parameter(name=spname, - paramtype=paramtype, - label=splabel, - unit=spunit, - setpoints=None, - basis=None) + spunit = "" + + self._register_parameter( + name=spname, + paramtype=paramtype, + label=splabel, + unit=spunit, + setpoints=None, + basis=None, + ) my_setpoints += [spname] - self._register_parameter(parameter.full_name, - parameter.label, - parameter.unit, - my_setpoints, - basis, - paramtype) + self._register_parameter( + parameter.full_name, + parameter.label, + parameter.unit, + my_setpoints, + basis, + paramtype, + ) def _register_parameter_with_setpoints( self, @@ -1026,28 +1083,34 @@ def _register_parameter_with_setpoints( my_setpoints = list(setpoints) if setpoints else [] for sp in parameter.setpoints: if not isinstance(sp, Parameter): - raise RuntimeError("The setpoints of a " - "ParameterWithSetpoints " - "must be a Parameter") + raise RuntimeError( + "The setpoints of a " + "ParameterWithSetpoints " + "must be a Parameter" + ) spname = sp.full_name splabel = sp.label spunit = sp.unit - self._register_parameter(name=spname, - paramtype=paramtype, - label=splabel, - unit=spunit, - setpoints=None, - basis=None) + self._register_parameter( + name=spname, + paramtype=paramtype, + label=splabel, + unit=spunit, + setpoints=None, + basis=None, + ) my_setpoints.append(spname) - self._register_parameter(parameter.full_name, - parameter.label, - parameter.unit, - my_setpoints, - basis, - paramtype) + self._register_parameter( + parameter.full_name, + parameter.label, + parameter.unit, + my_setpoints, + basis, + paramtype, + ) def _register_multiparameter( self, @@ -1069,40 +1132,50 @@ def _register_multiparameter( else: my_setpoints = list(setpoints) if setpoints else [] for j in range(len(shape)): - if multiparameter.setpoint_full_names is not None and \ - multiparameter.setpoint_full_names[i] is not None: + if ( + multiparameter.setpoint_full_names is not None + and multiparameter.setpoint_full_names[i] is not None + ): spname = multiparameter.setpoint_full_names[i][j] else: - spname = f'{name}_setpoint_{j}' - if multiparameter.setpoint_labels is not None and \ - multiparameter.setpoint_labels[i] is not None: + spname = f"{name}_setpoint_{j}" + if ( + multiparameter.setpoint_labels is not None + and multiparameter.setpoint_labels[i] is not None + ): splabel = multiparameter.setpoint_labels[i][j] else: - splabel = '' - if multiparameter.setpoint_units is not None and \ - multiparameter.setpoint_units[i] is not None: + splabel = "" + if ( + multiparameter.setpoint_units is not None + and multiparameter.setpoint_units[i] is not None + ): spunit = multiparameter.setpoint_units[i][j] else: - spunit = '' - - self._register_parameter(name=spname, - paramtype=paramtype, - label=splabel, - unit=spunit, - setpoints=None, - basis=None) + spunit = "" + + self._register_parameter( + name=spname, + paramtype=paramtype, + label=splabel, + unit=spunit, + setpoints=None, + basis=None, + ) my_setpoints += [spname] setpoints_lists.append(my_setpoints) for i, setpoints in enumerate(setpoints_lists): - self._register_parameter(multiparameter.full_names[i], - multiparameter.labels[i], - multiparameter.units[i], - setpoints, - basis, - paramtype) + self._register_parameter( + multiparameter.full_names[i], + multiparameter.labels[i], + multiparameter.units[i], + setpoints, + basis, + paramtype, + ) def register_custom_parameter( self: T, @@ -1130,15 +1203,9 @@ def register_custom_parameter( are the setpoints of this parameter paramtype: Type of the parameter, i.e. the SQL storage class """ - return self._register_parameter(name, - label, - unit, - setpoints, - basis, - paramtype) - - def unregister_parameter(self, - parameter: setpoints_type) -> None: + return self._register_parameter(name, label, unit, setpoints, basis, paramtype) + + def unregister_parameter(self, parameter: setpoints_type) -> None: """ Remove a custom/QCoDeS parameter from the dataset produced by running this measurement @@ -1148,8 +1215,10 @@ def unregister_parameter(self, elif isinstance(parameter, str): param = parameter else: - raise ValueError('Wrong input type. Must be a QCoDeS parameter or' - ' the name (a string) of a parameter.') + raise ValueError( + "Wrong input type. Must be a QCoDeS parameter or" + " the name (a string) of a parameter." + ) try: paramspec: ParamSpecBase = self._interdeps[param] @@ -1158,7 +1227,7 @@ def unregister_parameter(self, self._interdeps = self._interdeps.remove(paramspec) - log.info(f'Removed {param} from Measurement.') + log.info(f"Removed {param} from Measurement.") def add_before_run(self: T, func: Callable[..., Any], args: Sequence[Any]) -> T: """ @@ -1171,15 +1240,16 @@ def add_before_run(self: T, func: Callable[..., Any], args: Sequence[Any]) -> T: # some tentative cheap checking nargs = len(signature(func).parameters) if len(args) != nargs: - raise ValueError('Mismatch between function call signature and ' - 'the provided arguments.') + raise ValueError( + "Mismatch between function call signature and " + "the provided arguments." + ) self.enteractions.append((func, args)) return self - def add_after_run(self: T, - func: Callable[..., Any], args: Sequence[Any]) -> T: + def add_after_run(self: T, func: Callable[..., Any], args: Sequence[Any]) -> T: """ Add an action to be performed after the measurement. @@ -1190,8 +1260,10 @@ def add_after_run(self: T, # some tentative cheap checking nargs = len(signature(func).parameters) if len(args) != nargs: - raise ValueError('Mismatch between function call signature and ' - 'the provided arguments.') + raise ValueError( + "Mismatch between function call signature and " + "the provided arguments." + ) self.exitactions.append((func, args)) From 13ecdccdf0164f2ed054d395ff37fd6432396341 Mon Sep 17 00:00:00 2001 From: Jinnapat Indrapiromkul Date: Mon, 30 Jan 2023 13:41:08 +0100 Subject: [PATCH 3/3] Undo formatting on unchanged lines --- qcodes/dataset/measurements.py | 519 ++++++++++++++------------------- 1 file changed, 225 insertions(+), 294 deletions(-) diff --git a/qcodes/dataset/measurements.py b/qcodes/dataset/measurements.py index b27374d4b21..f070fbfbfcc 100644 --- a/qcodes/dataset/measurements.py +++ b/qcodes/dataset/measurements.py @@ -48,6 +48,7 @@ InterDependencies_, ) from qcodes.dataset.descriptions.param_spec import ParamSpec, ParamSpecBase +from qcodes.dataset.descriptions.rundescriber import RunDescriber from qcodes.dataset.descriptions.versioning.rundescribertypes import Shapes from qcodes.dataset.experiment_container import Experiment, load_experiment from qcodes.dataset.experiment_settings import get_default_experiment_id @@ -73,9 +74,9 @@ ActionType = Tuple[Callable[..., Any], Sequence[Any]] -SubscriberType = Tuple[ - Callable[..., Any], Union[MutableSequence[Any], MutableMapping[Any, Any]] -] +SubscriberType = Tuple[Callable[..., Any], + Union[MutableSequence[Any], + MutableMapping[Any, Any]]] class ParameterTypeError(Exception): @@ -198,22 +199,21 @@ def add_result(self, *res_tuple: res_type) -> None: if not isinstance(data, np.ndarray): raise TypeError( f"Expected data for Parameter with Array validator " - f"to be a numpy array but got: {type(data)}" - ) + f"to be a numpy array but got: {type(data)}") - if ( - parameter.vals.shape is not None - and data.shape != parameter.vals.shape - ): + if (parameter.vals.shape is not None + and data.shape != parameter.vals.shape): raise TypeError( f"Expected data with shape {parameter.vals.shape}, " f"but got {data.shape} for parameter: {parameter.full_name}" ) if isinstance(parameter, ArrayParameter): - results_dict.update(self._unpack_arrayparameter(partial_result)) + results_dict.update( + self._unpack_arrayparameter(partial_result)) elif isinstance(parameter, MultiParameter): - results_dict.update(self._unpack_multiparameter(partial_result)) + results_dict.update( + self._unpack_multiparameter(partial_result)) elif isinstance(parameter, ParameterWithSetpoints): results_dict.update( self._conditionally_expand_parameter_with_setpoints( @@ -221,7 +221,9 @@ def add_result(self, *res_tuple: res_type) -> None: ) ) else: - results_dict.update(self._unpack_partial_result(partial_result)) + results_dict.update( + self._unpack_partial_result(partial_result) + ) self._validate_result_deps(results_dict) self._validate_result_shapes(results_dict) @@ -234,29 +236,25 @@ def add_result(self, *res_tuple: res_type) -> None: self._last_save_time = perf_counter() def _conditionally_expand_parameter_with_setpoints( - self, - data: values_type, - parameter: ParameterWithSetpoints, - parameter_names: Sequence[str], - partial_result: res_type, + self, data: values_type, parameter: ParameterWithSetpoints, + parameter_names: Sequence[str], partial_result: res_type ) -> dict[ParamSpecBase, np.ndarray]: local_results = {} setpoint_names = tuple(setpoint.full_name for setpoint in parameter.setpoints) - expanded = tuple( - setpoint_name in parameter_names for setpoint_name in setpoint_names - ) + expanded = tuple(setpoint_name in parameter_names for setpoint_name in setpoint_names) if all(expanded): - local_results.update(self._unpack_partial_result(partial_result)) + local_results.update( + self._unpack_partial_result(partial_result)) elif any(expanded): - raise ValueError( - f"Some of the setpoints of {parameter.full_name} " - "were explicitly given but others were not. " - "Either supply all of them or none of them." - ) + raise ValueError(f"Some of the setpoints of {parameter.full_name} " + "were explicitly given but others were not. " + "Either supply all of them or none of them.") else: expanded_partial_result = expand_setpoints_helper(parameter, data) for res in expanded_partial_result: - local_results.update(self._unpack_partial_result(res)) + local_results.update( + self._unpack_partial_result(res) + ) return local_results def _unpack_partial_result( @@ -271,11 +269,9 @@ def _unpack_partial_result( try: parameter = self._interdeps._id_to_paramspec[str(param)] except KeyError: - raise ValueError( - "Can not add result for parameter " - f"{param}, no such parameter registered " - "with this measurement." - ) + raise ValueError('Can not add result for parameter ' + f'{param}, no such parameter registered ' + 'with this measurement.') return {parameter: np.array(values)} def _unpack_arrayparameter( @@ -289,19 +285,15 @@ def _unpack_arrayparameter( array_param = cast(ArrayParameter, array_param) if array_param.setpoints is None: - raise RuntimeError( - f"{array_param.full_name} is an " - f"{type(array_param)} " - f"without setpoints. Cannot handle this." - ) + raise RuntimeError(f"{array_param.full_name} is an " + f"{type(array_param)} " + f"without setpoints. Cannot handle this.") try: main_parameter = self._interdeps._id_to_paramspec[str(array_param)] except KeyError: - raise ValueError( - "Can not add result for parameter " - f"{array_param}, no such parameter registered " - "with this measurement." - ) + raise ValueError('Can not add result for parameter ' + f'{array_param}, no such parameter registered ' + 'with this measurement.') res_dict = {main_parameter: np.array(values_array)} @@ -310,9 +302,8 @@ def _unpack_arrayparameter( res_dict.update( self._unpack_setpoints_from_parameter( - array_param, array_param.setpoints, sp_names, fallback_sp_name - ) - ) + array_param, array_param.setpoints, + sp_names, fallback_sp_name)) return res_dict @@ -331,11 +322,9 @@ def _unpack_multiparameter( result_dict = {} if parameter.setpoints is None: - raise RuntimeError( - f"{parameter.full_name} is an " - f"{type(parameter)} " - f"without setpoints. Cannot handle this." - ) + raise RuntimeError(f"{parameter.full_name} is an " + f"{type(parameter)} " + f"without setpoints. Cannot handle this.") for i in range(len(parameter.shapes)): # if this loop runs, then 'data' is a Sequence data = cast(Sequence[Union[str, int, float, Any]], data) @@ -345,33 +334,30 @@ def _unpack_multiparameter( try: paramspec = self._interdeps._id_to_paramspec[parameter.full_names[i]] except KeyError: - raise ValueError( - "Can not add result for parameter " - f"{parameter.names[i]}, " - "no such parameter registered " - "with this measurement." - ) + raise ValueError('Can not add result for parameter ' + f'{parameter.names[i]}, ' + 'no such parameter registered ' + 'with this measurement.') result_dict.update({paramspec: np.array(data[i])}) if shape != (): # array parameter like part of the multiparameter # need to find setpoints too - fallback_sp_name = f"{parameter.full_names[i]}_setpoint" + fallback_sp_name = f'{parameter.full_names[i]}_setpoint' sp_names: Sequence[str] | None - if ( - parameter.setpoint_full_names is not None - and parameter.setpoint_full_names[i] is not None - ): + if (parameter.setpoint_full_names is not None + and parameter.setpoint_full_names[i] is not None): sp_names = parameter.setpoint_full_names[i] else: sp_names = None result_dict.update( self._unpack_setpoints_from_parameter( - parameter, parameter.setpoints[i], sp_names, fallback_sp_name - ) - ) + parameter, + parameter.setpoints[i], + sp_names, + fallback_sp_name)) return result_dict @@ -394,15 +380,13 @@ def _unpack_setpoints_from_parameter( if sp_names is not None: spname = sp_names[i] else: - spname = f"{fallback_sp_name}_{i}" + spname = f'{fallback_sp_name}_{i}' try: setpoint_parameter = self._interdeps[spname] except KeyError: - raise RuntimeError( - "No setpoints registered for " - f"{type(parameter)} {parameter.full_name}!" - ) + raise RuntimeError('No setpoints registered for ' + f'{type(parameter)} {parameter.full_name}!') sps = np.array(sps) while sps.ndim > 1: # The outermost setpoint axis or an nD param is nD @@ -413,7 +397,7 @@ def _unpack_setpoints_from_parameter( setpoint_parameters.append(setpoint_parameter) setpoint_axes.append(sps) - output_grids = np.meshgrid(*setpoint_axes, indexing="ij") + output_grids = np.meshgrid(*setpoint_axes, indexing='ij') result_dict = {} for grid, param in zip(output_grids, setpoint_parameters): result_dict.update({param: grid}) @@ -421,8 +405,7 @@ def _unpack_setpoints_from_parameter( return result_dict def _validate_result_deps( - self, results_dict: Mapping[ParamSpecBase, values_type] - ) -> None: + self, results_dict: Mapping[ParamSpecBase, values_type]) -> None: """ Validate that the dependencies of the ``results_dict`` are met, meaning that (some) values for all required setpoints and inferences @@ -431,58 +414,46 @@ def _validate_result_deps( try: self._interdeps.validate_subset(list(results_dict.keys())) except (DependencyError, InferenceError) as err: - raise ValueError( - "Can not add result, some required parameters " "are missing." - ) from err + raise ValueError('Can not add result, some required parameters ' + 'are missing.') from err def _validate_result_shapes( - self, results_dict: Mapping[ParamSpecBase, values_type] - ) -> None: + self, results_dict: Mapping[ParamSpecBase, values_type]) -> None: """ Validate that all sizes of the ``results_dict`` are consistent. This means that array-values of parameters and their setpoints are of the same size, whereas parameters with no setpoint relation to each other can have different sizes. """ - toplevel_params = set(self._interdeps.dependencies).intersection( - set(results_dict) - ) + toplevel_params = (set(self._interdeps.dependencies) + .intersection(set(results_dict))) for toplevel_param in toplevel_params: required_shape = np.shape(np.array(results_dict[toplevel_param])) for setpoint in self._interdeps.dependencies[toplevel_param]: # a setpoint is allowed to be a scalar; shape is then () setpoint_shape = np.shape(np.array(results_dict[setpoint])) if setpoint_shape not in [(), required_shape]: - raise ValueError( - f"Incompatible shapes. Parameter " - f"{toplevel_param.name} has shape " - f"{required_shape}, but its setpoint " - f"{setpoint.name} has shape " - f"{setpoint_shape}." - ) + raise ValueError(f'Incompatible shapes. Parameter ' + f"{toplevel_param.name} has shape " + f"{required_shape}, but its setpoint " + f"{setpoint.name} has shape " + f"{setpoint_shape}.") @staticmethod def _validate_result_types( - results_dict: Mapping[ParamSpecBase, np.ndarray] - ) -> None: + results_dict: Mapping[ParamSpecBase, np.ndarray]) -> None: """ Validate the type of the results """ - allowed_kinds = { - "numeric": "iuf", - "text": "SU", - "array": "iufcSUmM", - "complex": "c", - } + allowed_kinds = {'numeric': 'iuf', 'text': 'SU', 'array': 'iufcSUmM', + 'complex': 'c'} for ps, vals in results_dict.items(): - if vals.dtype.kind not in allowed_kinds[ps.type]: - raise ValueError( - f"Parameter {ps.name} is of type " - f'"{ps.type}", but got a result of ' - f"type {vals.dtype} ({vals})." - ) + if vals.dtype.kind not in allowed_kinds[ps.type]: + raise ValueError(f'Parameter {ps.name} is of type ' + f'"{ps.type}", but got a result of ' + f'type {vals.dtype} ({vals}).') def flush_data_to_database(self, block: bool = False) -> None: """ @@ -544,10 +515,10 @@ def __init__( in_memory_cache: bool = True, dataset_class: DataSetType = DataSetType.DataSet, ) -> None: + self._dataset_class = dataset_class - self.write_period = self._calculate_write_period( - write_in_background, write_period - ) + self.write_period = self._calculate_write_period(write_in_background, + write_period) self.enteractions = enteractions self.exitactions = exitactions @@ -560,7 +531,7 @@ def __init__( self.station = station self._interdependencies = interdeps self._shapes: Shapes | None = shapes - self.name = name if name else "results" + self.name = name if name else 'results' self._parent_datasets = parent_datasets self._extra_log_info = extra_log_info self._write_in_background = write_in_background @@ -572,14 +543,12 @@ def _calculate_write_period( write_in_background: bool, write_period: float | None ) -> float: write_period_changed_from_default = ( - write_period is not None - and write_period != qc.config.defaults.dataset.write_period + write_period is not None and + write_period != qc.config.defaults.dataset.write_period ) if write_in_background and write_period_changed_from_default: - warnings.warn( - f"The specified write period of {write_period} s " - "will be ignored, since write_in_background==True" - ) + warnings.warn(f"The specified write period of {write_period} s " + "will be ignored, since write_in_background==True") if write_in_background: return 0.0 if write_period is None: @@ -647,7 +616,7 @@ def __enter__(self) -> DataSaver: # register all subscribers if isinstance(self.ds, DataSet): - for callble, state in self.subscribers: + for (callble, state) in self.subscribers: # We register with minimal waiting time. # That should make all subscribers be called when data is flushed # to the database @@ -668,10 +637,9 @@ def __enter__(self) -> DataSaver: log.info(f"Using background writing: {self._write_in_background}") self.datasaver = DataSaver( - dataset=self.ds, - write_period=self.write_period, - interdeps=self._interdependencies, - ) + dataset=self.ds, + write_period=self.write_period, + interdeps=self._interdependencies) return self.datasaver @@ -692,14 +660,13 @@ def __exit__( # if an exception happened during the measurement, # log the exception stream = io.StringIO() - tb_module.print_exception( - exception_type, exception_value, traceback, file=stream - ) + tb_module.print_exception(exception_type, + exception_value, + traceback, + file=stream) exception_string = stream.getvalue() - log.warning( - "An exception occured in measurement with guid: " - f"{self.ds.guid};\nTraceback:\n{exception_string}" - ) + log.warning('An exception occured in measurement with guid: ' + f'{self.ds.guid};\nTraceback:\n{exception_string}') self.ds.add_metadata("measurement_exception", exception_string) # and finally mark the dataset as closed, thus @@ -709,15 +676,13 @@ def __exit__( self.ds.mark_completed() if get_data_export_automatic(): self.datasaver.export_data() - log.info( - f"Finished measurement with guid: {self.ds.guid}. " - f"{self._extra_log_info}" - ) + log.info(f'Finished measurement with guid: {self.ds.guid}. ' + f'{self._extra_log_info}') if isinstance(self.ds, DataSet): self.ds.unsubscribe_all() -T = TypeVar("T", bound="Measurement") +T = TypeVar('T', bound='Measurement') class Measurement: @@ -757,7 +722,7 @@ def __init__( self._interdeps = InterDependencies_() self._shapes: Shapes | None = None self._parent_datasets: list[dict[str, str]] = [] - self._extra_log_info: str = "" + self._extra_log_info: str = '' @property def parameters(self) -> dict[str, ParamSpecBase]: @@ -770,10 +735,10 @@ def write_period(self) -> float: @write_period.setter def write_period(self, wp: float) -> None: if not isinstance(wp, Number): - raise ValueError("The write period must be a number (of seconds).") + raise ValueError('The write period must be a number (of seconds).') wp_float = float(wp) if wp_float < 1e-3: - raise ValueError("The write period must be at least 1 ms.") + raise ValueError('The write period must be at least 1 ms.') self._write_period = wp_float def _paramspecbase_from_strings( @@ -806,10 +771,8 @@ def _paramspecbase_from_strings( sp_psb = idps._id_to_paramspec[sp] depends_on.append(sp_psb) except KeyError: - raise ValueError( - f"Unknown setpoint: {sp}." - " Please register that parameter first." - ) + raise ValueError(f'Unknown setpoint: {sp}.' + ' Please register that parameter first.') # now handle inferred parameters inf_from = [] @@ -819,10 +782,8 @@ def _paramspecbase_from_strings( inff_psb = idps._id_to_paramspec[inff] inf_from.append(inff_psb) except KeyError: - raise ValueError( - f"Unknown basis parameter: {inff}." - " Please register that parameter first." - ) + raise ValueError(f'Unknown basis parameter: {inff}.' + ' Please register that parameter first.') return tuple(depends_on), tuple(inf_from) @@ -841,11 +802,8 @@ def register_parent( # Link object we will eventually make out of this information. We # cannot create a Link object just yet, because the DataSet of this # Measurement has not been given a GUID yet - parent_dict = { - "tail": parent.guid, - "edge_type": link_type, - "description": description, - } + parent_dict = {'tail': parent.guid, 'edge_type': link_type, + 'description': description} self._parent_datasets.append(parent_dict) return self @@ -874,60 +832,52 @@ def register_parameter( and the validator of the supplied parameter. """ if not isinstance(parameter, ParameterBase): - raise ValueError( - "Can not register object of type {}. Can only " - "register a QCoDeS Parameter." - "".format(type(parameter)) - ) + raise ValueError('Can not register object of type {}. Can only ' + 'register a QCoDeS Parameter.' + ''.format(type(parameter))) paramtype = self._infer_paramtype(parameter, paramtype) # default to numeric if paramtype is None: - paramtype = "numeric" + paramtype = 'numeric' # now the parameter type must be valid if paramtype not in ParamSpec.allowed_types: - raise RuntimeError( - "Trying to register a parameter with type " - f"{paramtype}. However, only " - f"{ParamSpec.allowed_types} are supported." - ) + raise RuntimeError("Trying to register a parameter with type " + f"{paramtype}. However, only " + f"{ParamSpec.allowed_types} are supported.") if isinstance(parameter, ArrayParameter): - self._register_arrayparameter(parameter, setpoints, basis, paramtype) + self._register_arrayparameter(parameter, + setpoints, + basis, + paramtype) elif isinstance(parameter, ParameterWithSetpoints): - self._register_parameter_with_setpoints( - parameter, setpoints, basis, paramtype - ) + self._register_parameter_with_setpoints(parameter, + setpoints, + basis, + paramtype) elif isinstance(parameter, MultiParameter): - self._register_multiparameter( - parameter, - setpoints, - basis, - paramtype, - ) + self._register_multiparameter(parameter, + setpoints, + basis, + paramtype, + ) elif isinstance(parameter, Parameter): - self._register_parameter( - parameter.full_name, - parameter.label, - parameter.unit, - setpoints, - basis, - paramtype, - ) + self._register_parameter(parameter.full_name, + parameter.label, + parameter.unit, + setpoints, + basis, paramtype) elif isinstance(parameter, GroupedParameter): - self._register_parameter( - parameter.full_name, - parameter.label, - parameter.unit, - setpoints, - basis, - paramtype, - ) + self._register_parameter(parameter.full_name, + parameter.label, + parameter.unit, + setpoints, + basis, paramtype) else: - raise RuntimeError( - "Does not know how to register a parameter" f"of type {type(parameter)}" - ) + raise RuntimeError("Does not know how to register a parameter" + f"of type {type(parameter)}") return self @@ -949,13 +899,13 @@ def _infer_paramtype(parameter: ParameterBase, paramtype: str | None) -> str | N return paramtype if isinstance(parameter.vals, vals.Arrays): - paramtype = "array" + paramtype = 'array' elif isinstance(parameter, ArrayParameter): - paramtype = "array" + paramtype = 'array' elif isinstance(parameter.vals, vals.Strings): - paramtype = "text" + paramtype = 'text' elif isinstance(parameter.vals, vals.ComplexNumbers): - paramtype = "complex" + paramtype = 'complex' # TODO should we try to figure out if parts of a multiparameter are # arrays or something else? return paramtype @@ -980,9 +930,10 @@ def _register_parameter( except KeyError: parameter = None - paramspec = ParamSpecBase( - name=name, paramtype=paramtype, label=label, unit=unit - ) + paramspec = ParamSpecBase(name=name, + paramtype=paramtype, + label=label, + unit=unit) # We want to allow the registration of the exact same parameter twice, # the reason being that e.g. two ArrayParameters could share the same @@ -990,7 +941,8 @@ def _register_parameter( # dependent (array)parameter if parameter is not None and parameter != paramspec: - raise ValueError("Parameter already registered " "in this Measurement.") + raise ValueError("Parameter already registered " + "in this Measurement.") if setpoints is not None: sp_strings = [str(sp) for sp in setpoints] @@ -1003,20 +955,20 @@ def _register_parameter( bs_strings = [] # get the ParamSpecBases - depends_on, inf_from = self._paramspecbase_from_strings( - name, sp_strings, bs_strings - ) + depends_on, inf_from = self._paramspecbase_from_strings(name, + sp_strings, + bs_strings) if depends_on: self._interdeps = self._interdeps.extend( - dependencies={paramspec: depends_on} - ) + dependencies={paramspec: depends_on}) if inf_from: - self._interdeps = self._interdeps.extend(inferences={paramspec: inf_from}) - if not (depends_on or inf_from): + self._interdeps = self._interdeps.extend( + inferences={paramspec: inf_from}) + if not(depends_on or inf_from): self._interdeps = self._interdeps.extend(standalones=(paramspec,)) - log.info(f"Registered {name} in the Measurement.") + log.info(f'Registered {name} in the Measurement.') return self @@ -1033,41 +985,35 @@ def _register_arrayparameter( """ my_setpoints = list(setpoints) if setpoints else [] for i in range(len(parameter.shape)): - if ( - parameter.setpoint_full_names is not None - and parameter.setpoint_full_names[i] is not None - ): + if parameter.setpoint_full_names is not None and \ + parameter.setpoint_full_names[i] is not None: spname = parameter.setpoint_full_names[i] else: - spname = f"{parameter.full_name}_setpoint_{i}" + spname = f'{parameter.full_name}_setpoint_{i}' if parameter.setpoint_labels: splabel = parameter.setpoint_labels[i] else: - splabel = "" + splabel = '' if parameter.setpoint_units: spunit = parameter.setpoint_units[i] else: - spunit = "" - - self._register_parameter( - name=spname, - paramtype=paramtype, - label=splabel, - unit=spunit, - setpoints=None, - basis=None, - ) + spunit = '' + + self._register_parameter(name=spname, + paramtype=paramtype, + label=splabel, + unit=spunit, + setpoints=None, + basis=None) my_setpoints += [spname] - self._register_parameter( - parameter.full_name, - parameter.label, - parameter.unit, - my_setpoints, - basis, - paramtype, - ) + self._register_parameter(parameter.full_name, + parameter.label, + parameter.unit, + my_setpoints, + basis, + paramtype) def _register_parameter_with_setpoints( self, @@ -1083,34 +1029,28 @@ def _register_parameter_with_setpoints( my_setpoints = list(setpoints) if setpoints else [] for sp in parameter.setpoints: if not isinstance(sp, Parameter): - raise RuntimeError( - "The setpoints of a " - "ParameterWithSetpoints " - "must be a Parameter" - ) + raise RuntimeError("The setpoints of a " + "ParameterWithSetpoints " + "must be a Parameter") spname = sp.full_name splabel = sp.label spunit = sp.unit - self._register_parameter( - name=spname, - paramtype=paramtype, - label=splabel, - unit=spunit, - setpoints=None, - basis=None, - ) + self._register_parameter(name=spname, + paramtype=paramtype, + label=splabel, + unit=spunit, + setpoints=None, + basis=None) my_setpoints.append(spname) - self._register_parameter( - parameter.full_name, - parameter.label, - parameter.unit, - my_setpoints, - basis, - paramtype, - ) + self._register_parameter(parameter.full_name, + parameter.label, + parameter.unit, + my_setpoints, + basis, + paramtype) def _register_multiparameter( self, @@ -1132,50 +1072,40 @@ def _register_multiparameter( else: my_setpoints = list(setpoints) if setpoints else [] for j in range(len(shape)): - if ( - multiparameter.setpoint_full_names is not None - and multiparameter.setpoint_full_names[i] is not None - ): + if multiparameter.setpoint_full_names is not None and \ + multiparameter.setpoint_full_names[i] is not None: spname = multiparameter.setpoint_full_names[i][j] else: - spname = f"{name}_setpoint_{j}" - if ( - multiparameter.setpoint_labels is not None - and multiparameter.setpoint_labels[i] is not None - ): + spname = f'{name}_setpoint_{j}' + if multiparameter.setpoint_labels is not None and \ + multiparameter.setpoint_labels[i] is not None: splabel = multiparameter.setpoint_labels[i][j] else: - splabel = "" - if ( - multiparameter.setpoint_units is not None - and multiparameter.setpoint_units[i] is not None - ): + splabel = '' + if multiparameter.setpoint_units is not None and \ + multiparameter.setpoint_units[i] is not None: spunit = multiparameter.setpoint_units[i][j] else: - spunit = "" - - self._register_parameter( - name=spname, - paramtype=paramtype, - label=splabel, - unit=spunit, - setpoints=None, - basis=None, - ) + spunit = '' + + self._register_parameter(name=spname, + paramtype=paramtype, + label=splabel, + unit=spunit, + setpoints=None, + basis=None) my_setpoints += [spname] setpoints_lists.append(my_setpoints) for i, setpoints in enumerate(setpoints_lists): - self._register_parameter( - multiparameter.full_names[i], - multiparameter.labels[i], - multiparameter.units[i], - setpoints, - basis, - paramtype, - ) + self._register_parameter(multiparameter.full_names[i], + multiparameter.labels[i], + multiparameter.units[i], + setpoints, + basis, + paramtype) def register_custom_parameter( self: T, @@ -1203,9 +1133,15 @@ def register_custom_parameter( are the setpoints of this parameter paramtype: Type of the parameter, i.e. the SQL storage class """ - return self._register_parameter(name, label, unit, setpoints, basis, paramtype) - - def unregister_parameter(self, parameter: setpoints_type) -> None: + return self._register_parameter(name, + label, + unit, + setpoints, + basis, + paramtype) + + def unregister_parameter(self, + parameter: setpoints_type) -> None: """ Remove a custom/QCoDeS parameter from the dataset produced by running this measurement @@ -1215,10 +1151,8 @@ def unregister_parameter(self, parameter: setpoints_type) -> None: elif isinstance(parameter, str): param = parameter else: - raise ValueError( - "Wrong input type. Must be a QCoDeS parameter or" - " the name (a string) of a parameter." - ) + raise ValueError('Wrong input type. Must be a QCoDeS parameter or' + ' the name (a string) of a parameter.') try: paramspec: ParamSpecBase = self._interdeps[param] @@ -1227,7 +1161,7 @@ def unregister_parameter(self, parameter: setpoints_type) -> None: self._interdeps = self._interdeps.remove(paramspec) - log.info(f"Removed {param} from Measurement.") + log.info(f'Removed {param} from Measurement.') def add_before_run(self: T, func: Callable[..., Any], args: Sequence[Any]) -> T: """ @@ -1240,16 +1174,15 @@ def add_before_run(self: T, func: Callable[..., Any], args: Sequence[Any]) -> T: # some tentative cheap checking nargs = len(signature(func).parameters) if len(args) != nargs: - raise ValueError( - "Mismatch between function call signature and " - "the provided arguments." - ) + raise ValueError('Mismatch between function call signature and ' + 'the provided arguments.') self.enteractions.append((func, args)) return self - def add_after_run(self: T, func: Callable[..., Any], args: Sequence[Any]) -> T: + def add_after_run(self: T, + func: Callable[..., Any], args: Sequence[Any]) -> T: """ Add an action to be performed after the measurement. @@ -1260,10 +1193,8 @@ def add_after_run(self: T, func: Callable[..., Any], args: Sequence[Any]) -> T: # some tentative cheap checking nargs = len(signature(func).parameters) if len(args) != nargs: - raise ValueError( - "Mismatch between function call signature and " - "the provided arguments." - ) + raise ValueError('Mismatch between function call signature and ' + 'the provided arguments.') self.exitactions.append((func, args))