Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

remove deprecated functions #5651

114 changes: 1 addition & 113 deletions src/qcodes/dataset/data_export.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,13 @@
from __future__ import annotations

import logging
from collections.abc import Sequence
from typing import Any, cast

import numpy as np
from typing_extensions import TypedDict

from qcodes.dataset.data_set import load_by_id
from qcodes.dataset.data_set_protocol import DataSetProtocol
from qcodes.dataset.descriptions.param_spec import ParamSpecBase
from qcodes.utils import deprecate, list_of_data_to_maybe_ragged_nd_array
from qcodes.utils import list_of_data_to_maybe_ragged_nd_array

log = logging.getLogger(__name__)

Expand All @@ -26,69 +23,6 @@ class DSPlotData(TypedDict):
shape: tuple[int, ...] | None


@deprecate(alternative="ndarray.flatten()")
def flatten_1D_data_for_plot(
rawdata: Sequence[Sequence[Any]] | np.ndarray,
) -> np.ndarray:
"""
Cast the return value of the database query to
a 1D numpy array

Args:
rawdata: The return of the get_values function

Returns:
A one-dimensional numpy array

"""
dataarray = np.array(rawdata).flatten()
return dataarray


@deprecate(alternative="dataset.get_parameter_data")
def get_data_by_id(run_id: int) -> list[list[DSPlotData]]:
"""
Load data from database and reshapes into 1D arrays with minimal
name, unit and label metadata.
Only returns data from parameters that depend on other parameters or
parameters that other parameters depend on, i.e. data for standalone
parameters are not returned.

Args:
run_id: run ID from the database

Returns:
a list of lists of dictionaries like this:

::

[
# each element in this list refers
# to one dependent (aka measured) parameter
[
# each element in this list refers
# to one independent (aka setpoint) parameter
# that the dependent parameter depends on;
# a dictionary with the data and metadata of the dependent
# parameter is in the *last* element in this list
...
{
'data': <1D numpy array of points>,
'name': <name of the parameter>,
'label': <label of the parameter or ''>,
'unit': <unit of the parameter or ''>
},
...
],
...
]

"""
ds = load_by_id(run_id)
output = _get_data_from_ds(ds)
return output


def _get_data_from_ds(ds: DataSetProtocol) -> list[list[DSPlotData]]:
dependent_parameters: tuple[ParamSpecBase, ...] = tuple(
ds.description.interdeps.dependencies.keys()
Expand Down Expand Up @@ -410,49 +344,3 @@ def reshape_2D_data(
z_to_plot[y_index, x_index] = z

return xrow, yrow, z_to_plot


@deprecate(alternative="dataset.get_parameter_data")
def get_shaped_data_by_runid(run_id: int) -> list[list[dict[str, str | np.ndarray]]]:
"""
Get data for a given run ID, but shaped according to its nature

The data might get flattened, and additionally reshaped if it falls on a
grid (equidistant or not).

Args:
run_id: The ID of the run for which to get data

Returns:
List of lists of dictionaries, the same as for `get_data_by_id`
"""
mydata = get_data_by_id(run_id)

for independet in mydata:
data_length_long_enough = len(independet) == 3 \
and len(independet[0]['data']) > 0 \
and len(independet[1]['data']) > 0

if data_length_long_enough:
independet[0]['data'] = flatten_1D_data_for_plot(
independet[0]['data'])
independet[1]['data'] = flatten_1D_data_for_plot(
independet[1]['data'])

datatype = datatype_from_setpoints_2d(
cast(np.ndarray, independet[0]['data']),
cast(np.ndarray, independet[1]['data'])
)

if datatype in ('2D_grid', '2D_equidistant'):
(
independet[0]['data'],
independet[1]['data'],
independet[2]['data']
) = reshape_2D_data(
cast(np.ndarray, independet[0]['data']),
cast(np.ndarray, independet[1]['data']),
cast(np.ndarray, independet[2]['data'])
)

return mydata
62 changes: 1 addition & 61 deletions src/qcodes/dataset/data_set.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@
connect,
get_DB_location,
)
from qcodes.dataset.sqlite.queries import ( # noqa: F401 for backwards compatibility
from qcodes.dataset.sqlite.queries import (
_check_if_table_found,
_get_result_table_name_by_guid,
_query_guids_from_run_spec,
Expand All @@ -57,7 +57,6 @@
get_experiment_name_from_experiment_id,
get_guid_from_expid_and_counter,
get_guid_from_run_id,
get_guids_from_run_spec,
get_metadata_from_run_id,
get_parameter_data,
get_parent_dataset_links,
Expand All @@ -82,8 +81,6 @@
)
from qcodes.utils import (
NumpyJSONEncoder,
deprecate,
issue_deprecation_warning,
)

from .data_set_cache import DataSetCacheWithDBBackend
Expand Down Expand Up @@ -871,54 +868,6 @@ def to_pandas_dataframe_dict(
dfs_dict = load_to_dataframe_dict(datadict)
return dfs_dict

@deprecate(
reason="This method will be removed due to inconcise naming, please "
"use the renamed method to_pandas_dataframe_dict",
alternative="to_pandas_dataframe_dict",
)
def get_data_as_pandas_dataframe(
self,
*params: str | ParamSpec | ParameterBase,
start: int | None = None,
end: int | None = None,
) -> dict[str, pd.DataFrame]:
"""
Returns the values stored in the :class:`.DataSet` for the specified parameters
and their dependencies as a dict of :py:class:`pandas.DataFrame` s
Each element in the dict is indexed by the names of the requested
parameters.

Each DataFrame contains a column for the data and is indexed by a
:py:class:`pandas.MultiIndex` formed from all the setpoints
of the parameter.

If no parameters are supplied data will be be
returned for all parameters in the :class:`.DataSet` that are not them self
dependencies of other parameters.

If provided, the start and end arguments select a range of results
by result count (index). If the range is empty - that is, if the end is
less than or equal to the start, or if start is after the current end
of the :class:`.DataSet` – then a dict of empty :py:class:`pandas.DataFrame` s is
returned.

Args:
*params: string parameter names, QCoDeS Parameter objects, and
ParamSpec objects. If no parameters are supplied data for
all parameters that are not a dependency of another
parameter will be returned.
start: start value of selection range (by result count); ignored
if None
end: end value of selection range (by results count); ignored if
None

Returns:
Dictionary from requested parameter names to
:py:class:`pandas.DataFrame` s with the requested parameter as
a column and a indexed by a :py:class:`pandas.MultiIndex` formed
by the dependencies.
"""
return self.to_pandas_dataframe_dict(*params, start=start, end=end)

def to_pandas_dataframe(
self,
Expand Down Expand Up @@ -1460,15 +1409,6 @@ def _flush_data_to_database(self, block: bool = False) -> None:
log.debug("Waiting for write queue to empty.")
writer_status.data_write_queue.join()

@property
def export_path(self) -> str | None:
issue_deprecation_warning("method export_path", alternative="export_info")
known_export_paths = list(self.export_info.export_paths.values())
if len(known_export_paths) > 0:
return known_export_paths[-1]
else:
return None

@property
def export_info(self) -> ExportInfo:
return self._export_info
Expand Down
136 changes: 0 additions & 136 deletions src/qcodes/dataset/dond/do_nd.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@
process_params_meas,
)
from qcodes.parameters import ParameterBase
from qcodes.utils import deprecate

from .sweeps import AbstractSweep, TogetherSweep

Expand Down Expand Up @@ -804,138 +803,3 @@ def _parse_dond_arguments(
else:
params_meas.append(par)
return sweep_instances, params_meas


## Unused deprecated helper functions
@deprecate("Unused internal function")
def _conditional_parameter_set(
parameter: ParameterBase,
value: float | complex,
) -> None:
"""
Reads the cache value of the given parameter and set the parameter to
the given value if the value is different from the cache value.
"""
if value != parameter.cache.get():
parameter.set(value)


@deprecate("Unused internal function")
def _make_nested_setpoints(sweeps: Sequence[AbstractSweep]) -> np.ndarray:
"""Create the cartesian product of all the setpoint values."""
if len(sweeps) == 0:
return np.array([[]]) # 0d sweep (do0d)
setpoint_values = [sweep.get_setpoints() for sweep in sweeps]
setpoint_grids = np.meshgrid(*setpoint_values, indexing="ij")
flat_setpoint_grids = [np.ravel(grid, order="C") for grid in setpoint_grids]
return np.vstack(flat_setpoint_grids).T


@deprecate("Unused internal function")
def _select_active_actions_delays(
actions: Sequence[ActionsT],
delays: Sequence[float],
setpoints: np.ndarray,
previous_setpoints: np.ndarray,
) -> tuple[list[ActionsT], list[float]]:
"""
Select ActionT (Sequence[Callable]) and delays(Sequence[float]) from
a Sequence of ActionsT and delays, respectively, if the corresponding
setpoint has changed. Otherwise, select an empty Sequence for actions
and zero for delays.
"""
actions_list: list[ActionsT] = [()] * len(setpoints)
setpoints_delay: list[float] = [0] * len(setpoints)
for ind, (new_setpoint, old_setpoint) in enumerate(
zip(setpoints, previous_setpoints)
):
if new_setpoint != old_setpoint:
actions_list[ind] = actions[ind]
setpoints_delay[ind] = delays[ind]
return actions_list, setpoints_delay


@deprecate("Unused internal function")
def _create_measurements(
all_setpoint_params: Sequence[ParameterBase],
enter_actions: ActionsT,
exit_actions: ActionsT,
experiments: Experiment | Sequence[Experiment] | None,
grouped_parameters: Mapping[str, ParameterGroup],
shapes: Shapes,
write_period: float | None,
log_info: str | None,
) -> tuple[Measurement, ...]:
meas_list: list[Measurement] = []
if log_info is not None:
_extra_log_info = log_info
else:
_extra_log_info = "Using 'qcodes.dataset.dond'"

if not isinstance(experiments, Sequence):
experiments_internal: Sequence[Experiment | None] = [
experiments for _ in grouped_parameters
]
else:
experiments_internal = experiments

if len(experiments_internal) != len(grouped_parameters):
raise ValueError(
f"Inconsistent number of "
f"parameter groups and experiments "
f"got {len(grouped_parameters)} and {len(experiments_internal)}"
)

for group, exp in zip(grouped_parameters.values(), experiments_internal):
meas_name = group["meas_name"]
meas_params = group["params"]
meas = Measurement(name=meas_name, exp=exp)
meas._extra_log_info = _extra_log_info
_register_parameters(meas, all_setpoint_params)
_register_parameters(
meas, meas_params, setpoints=all_setpoint_params, shapes=shapes
)
_set_write_period(meas, write_period)
_register_actions(meas, enter_actions, exit_actions)
meas_list.append(meas)
return tuple(meas_list)


@deprecate("Unused internal function")
def _extract_paramters_by_type_and_group(
measurement_name: str,
params_meas: Sequence[ParamMeasT | Sequence[ParamMeasT]],
) -> tuple[
tuple[ParamMeasT, ...], dict[str, ParameterGroup], tuple[ParameterBase, ...]
]:
measured_parameters: list[ParameterBase] = []
measured_all: list[ParamMeasT] = []
single_group: list[ParamMeasT] = []
multi_group: list[Sequence[ParamMeasT]] = []
grouped_parameters: dict[str, ParameterGroup] = {}
for param in params_meas:
if not isinstance(param, Sequence):
single_group.append(param)
measured_all.append(param)
if isinstance(param, ParameterBase):
measured_parameters.append(param)
elif not isinstance(param, str):
multi_group.append(param)
for nested_param in param:
measured_all.append(nested_param)
if isinstance(nested_param, ParameterBase):
measured_parameters.append(nested_param)
if single_group:
pg: ParameterGroup = {
"params": tuple(single_group),
"meas_name": measurement_name,
}
grouped_parameters["group_0"] = pg
if multi_group:
for index, par in enumerate(multi_group):
pg = {
"params": tuple(par),
"meas_name": measurement_name,
}
grouped_parameters[f"group_{index}"] = pg
return tuple(measured_all), grouped_parameters, tuple(measured_parameters)
Loading
Loading