diff --git a/.github/workflows/ci-daily.yml b/.github/workflows/ci-daily.yml index 35a3cf5e47c..48d63626403 100644 --- a/.github/workflows/ci-daily.yml +++ b/.github/workflows/ci-daily.yml @@ -14,7 +14,7 @@ jobs: name: Pytest Ubuntu strategy: matrix: - python-version: ['3.9', '3.10', '3.11'] + python-version: ['3.10', '3.11'] runs-on: ubuntu-20.04 steps: - uses: actions/checkout@v4 @@ -44,7 +44,7 @@ jobs: name: Pytest Windows strategy: matrix: - python-version: ['3.9', '3.10', '3.11'] + python-version: ['3.10', '3.11'] runs-on: windows-2019 steps: - uses: actions/checkout@v4 @@ -70,7 +70,7 @@ jobs: name: Pytest MacOS strategy: matrix: - python-version: ['3.9', '3.10', '3.11'] + python-version: ['3.10', '3.11'] runs-on: macos-latest steps: - uses: actions/checkout@v4 diff --git a/.github/workflows/ci-weekly.yml b/.github/workflows/ci-weekly.yml index f7f525e51db..7ec25512eb6 100644 --- a/.github/workflows/ci-weekly.yml +++ b/.github/workflows/ci-weekly.yml @@ -23,7 +23,7 @@ jobs: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: - python-version: '3.9' + python-version: '3.10' architecture: 'x64' - name: Install requirements run: pip install -r dev_tools/requirements/isolated-base.env.txt diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5b63193bbfa..9a1ecc74746 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -20,7 +20,7 @@ jobs: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: - python-version: '3.9' + python-version: '3.10' architecture: 'x64' - name: Misc run: check/misc @@ -31,7 +31,7 @@ jobs: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: - python-version: '3.9' + python-version: '3.10' architecture: 'x64' - name: Install dependencies run: pip install -r dev_tools/requirements/deps/packaging.txt @@ -46,7 +46,7 @@ jobs: fetch-depth: 0 - uses: actions/setup-python@v5 with: - python-version: '3.9' + python-version: '3.10' architecture: 'x64' - name: Install dependencies run: pip install -r dev_tools/requirements/deps/format.txt @@ -59,7 +59,7 @@ jobs: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: - python-version: '3.9' + python-version: '3.10' architecture: 'x64' - name: Install mypy run: pip install -r dev_tools/requirements/mypy.env.txt @@ -74,7 +74,7 @@ jobs: fetch-depth: 0 - uses: actions/setup-python@v5 with: - python-version: '3.9' + python-version: '3.10' architecture: 'x64' - name: Install changed files test dependencies run: dev_tools/conf/pip-install-minimal-for-pytest-changed-files.sh @@ -87,7 +87,7 @@ jobs: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: - python-version: '3.9' + python-version: '3.10' architecture: 'x64' - name: Install pylint run: pip install -r dev_tools/requirements/pylint.env.txt @@ -102,7 +102,7 @@ jobs: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: - python-version: '3.9' + python-version: '3.10' architecture: 'x64' - name: Install requirements run: pip install -r dev_tools/requirements/dev.env.txt @@ -117,7 +117,7 @@ jobs: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: - python-version: '3.9' + python-version: '3.10' architecture: 'x64' - name: Install requirements run: pip install -r dev_tools/requirements/deps/tensorflow-docs.txt @@ -139,7 +139,7 @@ jobs: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: - python-version: '3.9' + python-version: '3.10' architecture: 'x64' - name: Install dependencies run: pip install -r dev_tools/requirements/isolated-base.env.txt @@ -149,7 +149,7 @@ jobs: name: Pytest Ubuntu strategy: matrix: - python-version: [ '3.9', '3.10', '3.11' ] + python-version: [ '3.10', '3.11' ] runs-on: ubuntu-20.04 steps: - uses: actions/checkout@v4 @@ -178,7 +178,7 @@ jobs: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: - python-version: '3.9' + python-version: '3.10' architecture: 'x64' - name: Install requirements run: pip install pip-tools @@ -194,7 +194,7 @@ jobs: fetch-depth: 0 - uses: actions/setup-python@v5 with: - python-version: '3.9' + python-version: '3.10' architecture: 'x64' - name: Install requirements run: | @@ -210,7 +210,7 @@ jobs: fetch-depth: 0 - uses: actions/setup-python@v5 with: - python-version: '3.9' + python-version: '3.10' architecture: 'x64' - uses: actions/cache@v4 with: @@ -234,7 +234,7 @@ jobs: name: Pytest Windows strategy: matrix: - python-version: [ '3.9', '3.10', '3.11' ] + python-version: [ '3.10', '3.11' ] runs-on: windows-2019 steps: - uses: actions/checkout@v4 @@ -259,7 +259,7 @@ jobs: name: Pytest MacOS strategy: matrix: - python-version: [ '3.9', '3.10', '3.11' ] + python-version: [ '3.10', '3.11' ] runs-on: macos-latest steps: - uses: actions/checkout@v4 @@ -292,7 +292,7 @@ jobs: fetch-depth: 0 - uses: actions/setup-python@v5 with: - python-version: '3.9' + python-version: '3.10' architecture: 'x64' - name: Install requirements run: pip install -r dev_tools/requirements/isolated-base.env.txt @@ -310,7 +310,7 @@ jobs: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: - python-version: '3.9' + python-version: '3.10' architecture: 'x64' - name: Install requirements run: pip install -r dev_tools/requirements/notebooks.env.txt diff --git a/.github/workflows/release-main.yml b/.github/workflows/release-main.yml index 607583b95f3..b5301c97dbd 100644 --- a/.github/workflows/release-main.yml +++ b/.github/workflows/release-main.yml @@ -15,7 +15,7 @@ jobs: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: - python-version: '3.9' + python-version: '3.10' architecture: 'x64' - name: Install dependencies run: | diff --git a/Dockerfile b/Dockerfile index 142c212995c..64520f6d335 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.9-slim AS cirq_base +FROM python:3.10-slim AS cirq_base # Install dependencies. # rm -rf /var/lib/apt/lists/* cleans up apt cache. See https://docs.docker.com/develop/develop-images/dockerfile_best-practices/ diff --git a/asv.conf.json b/asv.conf.json index 18449918fee..07f27ab48a0 100644 --- a/asv.conf.json +++ b/asv.conf.json @@ -8,7 +8,7 @@ "dvcs": "git", "environment_type": "virtualenv", "show_commit_url": "https://github.com/quantumlib/Cirq/commit/", - "pythons": ["3.9"], + "pythons": ["3.10"], "matrix": {"env_nobuild": {"PYTHONOPTIMIZE": ["-O", ""]}}, "benchmark_dir": "benchmarks", "env_dir": ".asv/env", diff --git a/benchmarks/README.md b/benchmarks/README.md index 37286d8e295..0e9ef527d5f 100644 --- a/benchmarks/README.md +++ b/benchmarks/README.md @@ -13,7 +13,7 @@ To run all benchmarks, navigate to the root Cirq directory at the command line a You can also pass arguments to the script, which would be forwarded to the `asv run` command. For eg: ```bash -./check/asv_run --quick --bench bench_examples --python 3.9 +./check/asv_run --quick --bench bench_examples --python 3.10 ``` Please refer [Running Benchmarks guide by ASV](https://asv.readthedocs.io/en/stable/using.html#running-benchmarks) for more information. diff --git a/cirq-core/cirq/experiments/t1_decay_experiment.py b/cirq-core/cirq/experiments/t1_decay_experiment.py index 0d44db7b412..a8ee85e70b5 100644 --- a/cirq-core/cirq/experiments/t1_decay_experiment.py +++ b/cirq-core/cirq/experiments/t1_decay_experiment.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Optional, TYPE_CHECKING +from typing import Any, Optional, Sequence, TYPE_CHECKING, cast import warnings import pandas as pd @@ -77,7 +77,12 @@ def t1_decay( var = sympy.Symbol('delay_ns') - sweep = study.Linspace(var, start=min_delay_nanos, stop=max_delay_nanos, length=num_points) + if min_delay_nanos == 0: + min_delay_nanos = 0.4 + sweep_vals_ns = np.unique( + np.round(np.logspace(np.log10(min_delay_nanos), np.log10(max_delay_nanos), num_points)) + ) + sweep = study.Points(var, cast(Sequence[float], sweep_vals_ns)) circuit = circuits.Circuit( ops.X(qubit), ops.wait(qubit, nanos=var), ops.measure(qubit, key='output') @@ -118,8 +123,8 @@ def data(self) -> pd.DataFrame: def constant(self) -> float: """The t1 decay constant.""" - def exp_decay(x, t1): - return np.exp(-x / t1) + def exp_decay(x, t1, a, b): + return a * np.exp(-x / t1) + b xs = self._data['delay_ns'] ts = self._data['true_count'] @@ -132,8 +137,8 @@ def exp_decay(x, t1): # Fit to exponential decay to find the t1 constant try: - popt, _ = optimize.curve_fit(exp_decay, xs, probs, p0=[t1_guess]) - t1 = popt[0] + self.popt, _ = optimize.curve_fit(exp_decay, xs, probs, p0=[t1_guess, 1.0, 0.0]) + t1 = self.popt[0] return t1 except RuntimeError: warnings.warn("Optimal parameters could not be found for curve fit", RuntimeWarning) @@ -166,7 +171,9 @@ def plot( ax.plot(xs, ts / (fs + ts), 'ro-', **plot_kwargs) if include_fit and not np.isnan(self.constant): - ax.plot(xs, np.exp(-xs / self.constant), label='curve fit') + t1 = self.constant + t1, a, b = self.popt + ax.plot(xs, a * np.exp(-xs / t1) + b, label='curve fit') plt.legend() ax.set_xlabel(r"Delay between initialization and measurement (nanoseconds)") diff --git a/cirq-core/cirq/experiments/t1_decay_experiment_test.py b/cirq-core/cirq/experiments/t1_decay_experiment_test.py index 64d220dd5c1..acbd7526433 100644 --- a/cirq-core/cirq/experiments/t1_decay_experiment_test.py +++ b/cirq-core/cirq/experiments/t1_decay_experiment_test.py @@ -53,7 +53,7 @@ def noisy_moment(self, moment, system_qubits): repetitions=10, max_delay=cirq.Duration(nanos=500), ) - results.plot() + results.plot(include_fit=True) def test_result_eq(): @@ -61,7 +61,7 @@ def test_result_eq(): eq.make_equality_group( lambda: cirq.experiments.T1DecayResult( data=pd.DataFrame( - columns=['delay_ns', 'false_count', 'true_count'], index=[0], data=[[100.0, 2, 8]] + columns=['delay_ns', 'false_count', 'true_count'], index=[0], data=[[100, 2, 8]] ) ) ) @@ -103,7 +103,7 @@ def noisy_moment(self, moment, system_qubits): data=pd.DataFrame( columns=['delay_ns', 'false_count', 'true_count'], index=range(4), - data=[[100.0, 0, 10], [400.0, 0, 10], [700.0, 10, 0], [1000.0, 10, 0]], + data=[[100.0, 0, 10], [215.0, 0, 10], [464.0, 0, 10], [1000.0, 10, 0]], ) ) @@ -117,13 +117,14 @@ def test_all_on_results(): min_delay=cirq.Duration(nanos=100), max_delay=cirq.Duration(micros=1), ) - assert results == cirq.experiments.T1DecayResult( + desired = cirq.experiments.T1DecayResult( data=pd.DataFrame( columns=['delay_ns', 'false_count', 'true_count'], index=range(4), - data=[[100.0, 0, 10], [400.0, 0, 10], [700.0, 0, 10], [1000.0, 0, 10]], + data=[[100.0, 0, 10], [215.0, 0, 10], [464.0, 0, 10], [1000.0, 0, 10]], ) ) + assert results == desired, f'{results.data=} {desired.data=}' def test_all_off_results(): @@ -135,13 +136,14 @@ def test_all_off_results(): min_delay=cirq.Duration(nanos=100), max_delay=cirq.Duration(micros=1), ) - assert results == cirq.experiments.T1DecayResult( + desired = cirq.experiments.T1DecayResult( data=pd.DataFrame( columns=['delay_ns', 'false_count', 'true_count'], index=range(4), - data=[[100.0, 10, 0], [400.0, 10, 0], [700.0, 10, 0], [1000.0, 10, 0]], + data=[[100.0, 10, 0], [215.0, 10, 0], [464.0, 10, 0], [1000.0, 10, 0]], ) ) + assert results == desired, f'{results.data=} {desired.data=}' @pytest.mark.usefixtures('closefigures') @@ -150,28 +152,14 @@ def test_curve_fit_plot_works(): data=pd.DataFrame( columns=['delay_ns', 'false_count', 'true_count'], index=range(4), - data=[[100.0, 6, 4], [400.0, 10, 0], [700.0, 10, 0], [1000.0, 10, 0]], + data=[[100.0, 6, 4], [215.0, 10, 0], [464.0, 10, 0], [1000.0, 10, 0]], ) ) good_fit.plot(include_fit=True) -@pytest.mark.usefixtures('closefigures') -def test_curve_fit_plot_warning(): - bad_fit = cirq.experiments.T1DecayResult( - data=pd.DataFrame( - columns=['delay_ns', 'false_count', 'true_count'], - index=range(4), - data=[[100.0, 10, 0], [400.0, 10, 0], [700.0, 10, 0], [1000.0, 10, 0]], - ) - ) - - with pytest.warns(RuntimeWarning, match='Optimal parameters could not be found for curve fit'): - bad_fit.plot(include_fit=True) - - -@pytest.mark.parametrize('t1', [200, 500, 700]) +@pytest.mark.parametrize('t1', [200.0, 500.0, 700.0]) def test_noise_model_continous(t1): class GradualDecay(cirq.NoiseModel): def __init__(self, t1: float): @@ -196,10 +184,10 @@ def noisy_moment(self, moment, system_qubits): results = cirq.experiments.t1_decay( sampler=cirq.DensityMatrixSimulator(noise=GradualDecay(t1)), qubit=cirq.GridQubit(0, 0), - num_points=4, + num_points=10, repetitions=10, - min_delay=cirq.Duration(nanos=100), - max_delay=cirq.Duration(micros=1), + min_delay=cirq.Duration(nanos=1), + max_delay=cirq.Duration(micros=10), ) assert np.isclose(results.constant, t1, 50) diff --git a/dev_tools/packaging/packaging_test.sh b/dev_tools/packaging/packaging_test.sh index 30d1a51f553..320e2e8ed01 100755 --- a/dev_tools/packaging/packaging_test.sh +++ b/dev_tools/packaging/packaging_test.sh @@ -28,7 +28,7 @@ trap '{ rm -rf "${tmp_dir}"; }' EXIT # New virtual environment echo "Working in a fresh virtualenv at ${tmp_dir}/env" -python3.9 -m venv "${tmp_dir}/env" +python3.10 -m venv "${tmp_dir}/env" export CIRQ_PRE_RELEASE_VERSION CIRQ_PRE_RELEASE_VERSION=$(dev_tools/packaging/generate-dev-version-id.sh) diff --git a/dev_tools/pr_monitor/Dockerfile b/dev_tools/pr_monitor/Dockerfile index 04f7c57263e..10769b7c9a5 100644 --- a/dev_tools/pr_monitor/Dockerfile +++ b/dev_tools/pr_monitor/Dockerfile @@ -23,7 +23,7 @@ # value of the cirqbot-api-key secret. ######################################################################################## -FROM python:3.9-slim +FROM python:3.10-slim RUN mkdir -p /app/dev_tools/pr_monitor WORKDIR /app diff --git a/docs/dev/development.md b/docs/dev/development.md index 146e66df471..740db5efdd2 100644 --- a/docs/dev/development.md +++ b/docs/dev/development.md @@ -94,7 +94,7 @@ See the previous section for instructions. 1. Install system dependencies. - Make sure you have python 3.9 or greater. + Make sure you have python 3.10 or greater. You can install most other dependencies via `apt-get`: ```bash diff --git a/docs/noise/qcvv/parallel_xeb.ipynb b/docs/noise/qcvv/parallel_xeb.ipynb index 920f14b0b52..57ef5b0d34b 100644 --- a/docs/noise/qcvv/parallel_xeb.ipynb +++ b/docs/noise/qcvv/parallel_xeb.ipynb @@ -65,7 +65,7 @@ " import cirq\n", "except ImportError:\n", " print(\"installing cirq...\")\n", - " !pip install --quiet cirq\n", + " !pip install --quiet cirq --pre\n", " print(\"installed cirq.\")" ] }, @@ -88,7 +88,63 @@ "outputs": [], "source": [ "import cirq\n", - "import numpy as np" + "import numpy as np\n", + "\n", + "%matplotlib inline\n", + "from matplotlib import pyplot as plt" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Parallel XEB with library functions\n", + "The entire XEB workflow can be run by calling `cirq.experiments.parallel_two_qubit_xeb` and the combined single-qubit randomized benchmarking (RB) and XEB workflows can be run by calling `cirq.experiments.run_rb_and_xeb`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Simulation\n", + "qubits = cirq.GridQubit.rect(3, 2, 4, 3)\n", + "result = cirq.experiments.parallel_two_qubit_xeb(\n", + " sampler=cirq.DensityMatrixSimulator(noise=cirq.depolarize(5e-3)), # Any simulator or a ProcessorSampler.\n", + " qubits=qubits \n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# The returned result is an instance of the `TwoQubitXEBResult` class which provides visualization methods like \n", + "result.plot_heatmap(); # plot the heatmap of XEB errors\n", + "result.plot_fitted_exponential(*qubits[:2]); # plot the fitted model of xeb error of a qubit pair.\n", + "result.plot_histogram(); # plot a histogram of all xeb errors." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# `TwoQubitXEBResult` also has methods to retrieve errors.\n", + "print('pauli errors:', result.pauli_error())\n", + "print('xeb errors:', result.xeb_error(*qubits[:2]))\n", + "print('xeb fidelity:', result.xeb_fidelity(*qubits[:2]))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The `run_rb_and_xeb` method returns an object of type [InferredXEBResult](https://github.com/quantumlib/Cirq/blob/bc766606b94744f80da435c522d16a34529ae671/cirq-core/cirq/experiments/two_qubit_xeb.py#L188C7-L188C24) which is like [TwoQubitXEBResult](https://github.com/quantumlib/Cirq/blob/bc766606b94744f80da435c522d16a34529ae671/cirq-core/cirq/experiments/two_qubit_xeb.py#L56) except that it removes the single-qubit errors obtained from the single-qubit randomized benchmarking (RB) experiment to isolate the error from the two qubit gate." ] }, { @@ -97,6 +153,9 @@ "id": "ace31cc4d258" }, "source": [ + "# Step by step XEB\n", + "The rest of this notebook explains how the `parallel_two_qubit_xeb` works internally. Note that the notebook uses `SQRT_ISWAP` as the entangling gate while `parallel_two_qubit_xeb` and `run_rb_and_xeb` default to `CZ`.\n", + "\n", "## Set up Random Circuits\n", "\n", "We create a library of 10 random, two-qubit `circuits` using the sqrt(ISWAP) gate. These library circuits will be mixed-and-matched among all the pairs on the device we aim to characterize." @@ -224,9 +283,6 @@ }, "outputs": [], "source": [ - "%matplotlib inline\n", - "from matplotlib import pyplot as plt\n", - "\n", "fig, axes = plt.subplots(2,2, figsize=(9,6))\n", "for comb_layer, ax in zip(combs_by_layer, axes.reshape(-1)):\n", " active_qubits = np.array(comb_layer.pairs).reshape(-1)\n", @@ -500,6 +556,18 @@ "kernelspec": { "display_name": "Python 3", "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.13" } }, "nbformat": 4, diff --git a/docs/start/install.md b/docs/start/install.md index 156346f36ac..dc25e5e289c 100644 --- a/docs/start/install.md +++ b/docs/start/install.md @@ -12,7 +12,7 @@ If you want to create a development environment, see the [development page](../d ## Python version support -Cirq currently supports python 3.9 and later. +Cirq currently supports python 3.10 and later. We follow numpy's schedule for python version support defined in [NEP 29](https://numpy.org/neps/nep-0029-deprecation_policy.html), though we may deviate from that schedule by extending support for older python versions if they are needed by [Colab](https://colab.research.google.com/) @@ -20,7 +20,7 @@ or internal Google systems. ## Installing on Linux -0. Make sure you have python 3.9.0 or greater. +0. Make sure you have python 3.10.0 or greater. See [Installing Python 3 on Linux](https://docs.python-guide.org/starting/install3/linux/) @ the hitchhiker's guide to python. @@ -87,7 +87,7 @@ or internal Google systems. ## Installing on Mac OS X -0. Make sure you have python 3.9.0 or greater. +0. Make sure you have python 3.10.0 or greater. See [Installing Python 3 on Mac OS X](https://docs.python-guide.org/starting/install3/osx/) @ the hitchhiker's guide to python. @@ -154,7 +154,7 @@ or internal Google systems. 0. If you are using the [Windows Subsystem for Linux](https://docs.microsoft.com/en-us/windows/wsl/about), use the [Linux install instructions](#installing-on-linux) instead of these instructions. -1. Make sure you have python 3.9.0 or greater. +1. Make sure you have python 3.10.0 or greater. See [Installing Python 3 on Windows](https://docs.python-guide.org/starting/install3/win/) @ the hitchhiker's guide to python. diff --git a/release.md b/release.md index 1b98b1973a2..016577c40bd 100644 --- a/release.md +++ b/release.md @@ -82,7 +82,7 @@ release. ### Preparation -System requirements: Linux, python3.9 +System requirements: Linux, python3.10 For MINOR / MAJOR release: Make sure you're on an up-to-date main branch and in cirq's root directory.