diff --git a/.travis.yml b/.travis.yml
index 6ead6ab0ad..86ea40d6ec 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -9,9 +9,10 @@ matrix:
- name: "GPyTorch + PyTorch stable (Python 3.6)"
python: "3.6"
install:
- - pip install -q cython numpy
- # TODO: Remove when locked to upcoming GPyTorch release 0.3.2
- - pip install -q git+https://github.com/cornellius-gp/gpytorch.git@8f0d338d42222689f0106f5f29e62ef4777a392f
+ # TODO: Remove once GPyTorch 0.3.2 is released and marked a dep. of botorch
+ - pip install -q git+https://github.com/cornellius-gp/gpytorch.git@32911e3b51901917c0f14101972581c1295b5edb
+ # TODO: Remove once PyTorch 1.1 is released and marked a dep. of botorch
+ - pip install -q torch_nightly -f https://download.pytorch.org/whl/nightly/cpu/torch_nightly.html
- pip install -q -e .[test]
script:
- pytest -ra --cov=. --cov-report term-missing
@@ -22,9 +23,10 @@ matrix:
python: "3.7"
dist: xenial
install:
- - pip install -q cython numpy
- # TODO: Remove when locked to upcoming GPyTorch release 0.3.2
- - pip install -q git+https://github.com/cornellius-gp/gpytorch.git@8f0d338d42222689f0106f5f29e62ef4777a392f
+ # TODO: Remove once GPyTorch 0.3.2 is released and marked a dep. of botorch
+ - pip install -q git+https://github.com/cornellius-gp/gpytorch.git@32911e3b51901917c0f14101972581c1295b5edb
+ # TODO: Remove once PyTorch 1.1 is released and marked a dep. of botorch
+ - pip install -q torch_nightly -f https://download.pytorch.org/whl/nightly/cpu/torch_nightly.html
- pip install -q -e .[test]
script:
- pytest -ra --cov=. --cov-report term-missing
@@ -32,9 +34,10 @@ matrix:
- name: "GPyTorch + PyTorch latest"
python: "3.6"
install:
- - pip install -q cython numpy
- - pip install -q torch_nightly -f https://download.pytorch.org/whl/nightly/cpu/torch_nightly.html;
+ # TODO: Fix gpytorch installation to work with torch nightly in pip,
+ # so we don't have to reinstall torch nightly after installing stable
- pip install -q git+https://github.com/cornellius-gp/gpytorch.git
+ - pip install -q torch_nightly -f https://download.pytorch.org/whl/nightly/cpu/torch_nightly.html
- pip install -q -e .[test]
script:
- pytest -ra --cov=. --cov-report term-missing
@@ -57,9 +60,10 @@ matrix:
- name: "Docs: Sphinx (Python 3.6)"
python: "3.6"
install:
- - pip install -q cython numpy
- # TODO: Remove when locked to upcoming GPyTorch release 0.3.2
- - pip install -q git+https://github.com/cornellius-gp/gpytorch.git@8f0d338d42222689f0106f5f29e62ef4777a392f
+ # TODO: Remove once GPyTorch 0.3.2 is released and marked a dep. of botorch
+ - pip install -q git+https://github.com/cornellius-gp/gpytorch.git@32911e3b51901917c0f14101972581c1295b5edb
+ # TODO: Remove once PyTorch 1.1 is released and marked a dep. of botorch
+ - pip install -q torch_nightly -f https://download.pytorch.org/whl/nightly/cpu/torch_nightly.html
- pip install -q -e .[dev]
script:
# warnings treated as errors
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index d56a13e392..7d24b93e7b 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -1,11 +1,23 @@
-# Contributing to botorch
-We want to make contributing to botorch is as easy and transparent as possible.
+# Contributing to BoTorch
+We want to make contributing to BoTorch is as easy and transparent as possible.
+
+
+## Development installation
+
+To get the development installation with all the necessary dependencies for
+linting, testing, and building the documentation, run the following:
+```bash
+git clone https://github.com/facebookexternal/botorch.git
+cd botorch
+pip install -e .[dev]
+```
+
## Our Development Process
#### Code Style
-botorch uses the [black](https://github.com/ambv/black) code formatter to
+BoTorch uses the [black](https://github.com/ambv/black) code formatter to
enforce a common code style across the code base. black is installed easily via
pip using `pip install black`, and run locally by calling
```bash
@@ -20,7 +32,7 @@ Travis will fail on your PR if it does not adhere to the black formatting style.
#### Type Hints
-botorch is fully typed using python 3.6+
+BoTorch is fully typed using python 3.6+
[type hints](https://www.python.org/dev/peps/pep-0484/).
We expect any contributions to also use proper type annotations. While we
currently do not enforce full consistency of these in Travis (type checkers can
@@ -40,7 +52,7 @@ python -m unittest
#### Documentation
-botorch's website is also open source, and is part of this very repository (the
+BoTorch's website is also open source, and is part of this very repository (the
code can be found in the [website](../website/) folder).
It is built using [Docusaurus](https://docusaurus.io/), and consists of three
main elements:
@@ -89,5 +101,5 @@ outlined on that page and do not file a public issue.
## License
-By contributing to botorch, you agree that your contributions will be licensed
+By contributing to BoTorch, you agree that your contributions will be licensed
under the LICENSE file in the root directory of this source tree.
diff --git a/README.md b/README.md
index c626bf5e96..8ce9b304b3 100644
--- a/README.md
+++ b/README.md
@@ -1,47 +1,91 @@
-
+
+
+
[![Build Status](
https://travis-ci.com/facebookexternal/botorch.svg?token=esFvpzSw7sLSsfe1PAr1&branch=master
)](https://travis-ci.com/facebookexternal/botorch)
-BoTorch is a library for Bayesian Optimization in PyTorch.
+BoTorch is a library for Bayesian Optimization built on PyTorch.
-It is currently an alpha version under active development - be warned!
+*BoTorch is currently in alpha and under active development - be warned*!
-## Installation
+### Why BoTorch
+BoTorch
+* Provides a modular and easily extensible interface for composing Bayesian
+ optimization primitives, including probabilistic models, acquisition functions,
+ and optimizers.
+* Harnesses the power of PyTorch, including auto-differentiation, native support
+ for highly parallelized modern hardware (e.g. GPUs) using device-agnostic code,
+ and a dynamic computation graph.
+* Supports Monte Carlo-based acquisition functions via the
+ [reparameterization trick](https://arxiv.org/abs/1312.6114), which makes it
+ straightforward to implement new ideas without having to impose restrictive
+ assumptions about the underlying model.
+* Enables seamless integration with deep and/or convolutional architectures in PyTorch.
+* Has first-class support for state-of-the art probabilistic models in
+ [GPyTorch](http://www.gpytorch.ai/), including support for multi-task Gaussian
+ Processes (GPs) deep kernel learning, deep GPs, and approximate inference.
-##### Setup Requirements (TODO: Remove once we can use torch Sobol)
-The following are required to run the setup:
+### Target Audience
-- Python >= 3.6
-- numpy
-- cython
+The primary audience for hands-on use of BoTorch are researchers and
+sophisticated practitioners in Bayesian Optimization and AI.
+
+We recommend using BoTorch as a low-level API for implementing new algorithms
+for [Ax](https://github.com/facebook/Ax). Ax has been designed to be
+an easy-to-use platform for end-users, which at the same time is flexible enough
+for Bayesian Optimization researchers to plug into for handling of feature
+transformations, (meta-)data management, storage, etc.
+We recommend that end-users who are not actively doing research on Bayesian
+Optimization simply use Ax.
-##### Installation Requirements
-- PyTorch >= 1.0.1
-- gpytorch >= 0.3.0
+## Installation
+
+#### Installation Requirements
+
+- Python >= 3.6
+- PyTorch nightly (**TODO:** peg to PyTorch 1.1 once released)
+- gpytorch >= 0.3.1 (**TODO:** peg to GPyTorch 0.3.2 once released)
- scipy
-**Important:**
-You will want to have you PyTorch build link against **MKL** (the non-optimized
-version of botorch can be up to an order of magnitude slower). Setting this up
-manually can be tricky - to make sure this works please use the Anaconda
-installation instructions on https://pytorch.org/.
+**Important note for MacOS users:**
+* You will want to make sure your PyTorch build is linked against MKL (the
+ non-optimized version of BoTorch can be up to an order of magnitude slower in
+ some settings). Setting this up manually on MacOS can be tricky - to ensure
+ this works properly please follow the
+ [PyTorch installation instructions](https://pytorch.org/get-started/locally/).
+* If you need CUDA on MacOS, you will need to build PyTorch from source. Please
+ consult the PyTorch installation instructions above.
-### Install botorch
+#### Installing BoTorch
-To run the botorch setup, you'll need cython (**TODO:** Remove)
+The latest release of BoTorch is easily installed using either pip or conda:
```bash
-pip install cython
+pip install botorch
```
-We recommend installing botorch using pip via ssh:
+**TODO: Conda install**
+
+
+If you'd like to try our bleeding edge features (and don't mind running into an
+occasional bug here or there), you can install the latest master from GitHub
+(this will also require installing the current GPyTorch master)::
+```bash
+pip install git+https://github.com/cornellius-gp/gpytorch.git
+pip install git+https://github.com/facebookexternal/botorch.git
+```
+
+
+#### Installing BoTorch from the private repo **TODO: REMOVE**
+
+BoTorch is easily installed using pip:
```bash
pip install git+ssh://git@github.com/facebookexternal/botorch.git
```
@@ -63,14 +107,9 @@ To customize the installation, you can also run the following instead:
-## Installation using conda
-
-**TODO: conda install is unsupported until the repo is public**
-
-
## Contributing
See the [CONTRIBUTING](CONTRIBUTING.md) file for how to help out.
## License
-botorch is MIT licensed, as found in the LICENSE file.
+BoTorch is MIT licensed, as found in the LICENSE file.
diff --git a/botorch/__init__.py b/botorch/__init__.py
index 71bbf95461..e85b64ca93 100644
--- a/botorch/__init__.py
+++ b/botorch/__init__.py
@@ -1,5 +1,8 @@
#!/usr/bin/env python3
+import os
+import re
+
from . import acquisition, exceptions, models, optim, posteriors, test_functions
from .cross_validation import batch_cross_validation
from .fit import fit_gpytorch_model
@@ -7,6 +10,11 @@
from .utils import manual_seed
+# get version string from setup.py
+with open(os.path.join(os.path.dirname(__file__), os.pardir, "setup.py"), "r") as f:
+ __version__ = re.search(r"version=['\"]([^'\"]*)['\"]", f.read(), re.M).group(1)
+
+
__all__ = [
"acquisition",
"batch_cross_validation",
diff --git a/botorch/acquisition/sampler.py b/botorch/acquisition/sampler.py
index 472a785532..5fb66ad201 100644
--- a/botorch/acquisition/sampler.py
+++ b/botorch/acquisition/sampler.py
@@ -11,10 +11,10 @@
import torch
from torch import Tensor
from torch.nn import Module
+from torch.quasirandom import SobolEngine
from ..exceptions import UnsupportedError
from ..posteriors import Posterior
-from ..qmc.sobol import SobolEngine
from ..utils.sampling import draw_sobol_normal_samples, manual_seed
diff --git a/botorch/qmc/__init__.py b/botorch/qmc/__init__.py
index 298e7ec376..70eb934cae 100644
--- a/botorch/qmc/__init__.py
+++ b/botorch/qmc/__init__.py
@@ -1,7 +1,8 @@
#! /usr/bin/env python3
+from torch.quasirandom import SobolEngine
+
from .normal import MultivariateNormalQMCEngine, NormalQMCEngine
-from .sobol import SobolEngine
__all__ = ["MultivariateNormalQMCEngine", "NormalQMCEngine", "SobolEngine"]
diff --git a/botorch/qmc/normal.py b/botorch/qmc/normal.py
index 5744ec3e04..959afe6c60 100644
--- a/botorch/qmc/normal.py
+++ b/botorch/qmc/normal.py
@@ -11,12 +11,11 @@
"""
import math
-from typing import List, Optional, Union
+from typing import Optional
-import numpy as np
-from scipy.stats import norm
-
-from .sobol import SobolEngine
+import torch
+from torch import Tensor
+from torch.quasirandom import SobolEngine
class NormalQMCEngine:
@@ -50,32 +49,42 @@ def __init__(
else:
# to apply Box-Muller, we need an even number of dimensions
sobol_dim = 2 * math.ceil(d / 2)
- self._sobol_engine = SobolEngine(dimen=sobol_dim, scramble=True, seed=seed)
+ self._sobol_engine = SobolEngine(dimension=sobol_dim, scramble=True, seed=seed)
- def draw(self, n: int = 1) -> np.ndarray:
+ def draw(
+ self, n: int = 1, out: Optional[Tensor] = None, dtype: torch.dtype = torch.float
+ ) -> Optional[Tensor]:
r"""Draw n qMC samples from the standard Normal.
Args:
- n: The number of samples.
+ n: The number of samples to draw.
+ out: An option output tensor. If provided, draws are put into this
+ tensor, and the function returns None.
+ dtype: The desired torch data type (ignored if `out` is provided).
Returns:
- The samples as a numpy array.
+ A `n x d` tensor of samples if `out=None`, and `None` otherwise.
"""
# get base samples
- samples = self._sobol_engine.draw(n)
+ samples = self._sobol_engine.draw(n, dtype=dtype)
if self._inv_transform:
# apply inverse transform (values to close to 0/1 result in inf values)
- return norm.ppf(0.5 + (1 - 1e-10) * (samples - 0.5))
+ v = 0.5 + (1 - 1e-10) * (samples - 0.5)
+ samples_tf = torch.erfinv(2 * v - 1) * math.sqrt(2)
else:
# apply Box-Muller transform (note: [1] indexes starting from 1)
- even = np.arange(0, samples.shape[-1], 2)
- Rs = np.sqrt(-2 * np.log(samples[:, even]))
+ even = torch.arange(0, samples.shape[-1], 2)
+ Rs = (-2 * torch.log(samples[:, even])).sqrt()
thetas = 2 * math.pi * samples[:, 1 + even]
- cos = np.cos(thetas)
- sin = np.sin(thetas)
- transf_samples = np.stack([Rs * cos, Rs * sin], -1).reshape(n, -1)
+ cos = torch.cos(thetas)
+ sin = torch.sin(thetas)
+ samples_tf = torch.stack([Rs * cos, Rs * sin], -1).reshape(n, -1)
# make sure we only return the number of dimension requested
- return transf_samples[:, : self._d]
+ samples_tf = samples_tf[:, : self._d]
+ if out is None:
+ return samples_tf
+ else:
+ out.copy_(samples_tf)
class MultivariateNormalQMCEngine:
@@ -94,8 +103,8 @@ class MultivariateNormalQMCEngine:
def __init__(
self,
- mean: Union[float, List[float], np.ndarray],
- cov: Union[float, List[List[float]], np.ndarray],
+ mean: Tensor,
+ cov: Tensor,
seed: Optional[int] = None,
inv_transform: bool = False,
) -> None:
@@ -108,14 +117,12 @@ def __init__(
underlying SobolEngine.
inv_transform: If True, use inverse transform instead of Box-Muller.
"""
- # check for square/symmetric cov matrix and mean vector has the same d
- mean = np.array(mean, copy=False, ndmin=1)
- cov = np.array(cov, copy=False, ndmin=2)
+ # validate inputs
if not cov.shape[0] == cov.shape[1]:
raise ValueError("Covariance matrix is not square.")
if not mean.shape[0] == cov.shape[0]:
raise ValueError("Dimension mismatch between mean and covariance.")
- if not np.allclose(cov, cov.transpose()):
+ if not torch.allclose(cov, cov.transpose(-1, -2)):
raise ValueError("Covariance matrix is not symmetric.")
self._mean = mean
self._normal_engine = NormalQMCEngine(
@@ -123,23 +130,33 @@ def __init__(
)
# compute Cholesky decomp; if it fails, do the eigendecomposition
try:
- self._corr_matrix = np.linalg.cholesky(cov).transpose()
- except np.linalg.LinAlgError:
- eigval, eigvec = np.linalg.eigh(cov)
- if not np.all(eigval >= -1.0e-8):
+ self._corr_matrix = torch.cholesky(cov).transpose(-1, -2)
+ except RuntimeError:
+ eigval, eigvec = torch.symeig(cov, eigenvectors=True)
+ if not torch.all(eigval >= -1e-8):
raise ValueError("Covariance matrix not PSD.")
- eigval = np.clip(eigval, 0.0, None)
- self._corr_matrix = (eigvec * np.sqrt(eigval)).transpose()
+ eigval_root = eigval.clamp_min(0.0).sqrt()
+ self._corr_matrix = (eigvec * eigval_root).transpose(-1, -2)
- def draw(self, n: int = 1) -> np.ndarray:
+ def draw(self, n: int = 1, out: Optional[Tensor] = None) -> Optional[Tensor]:
r"""Draw n qMC samples from the multivariate Normal.
Args:
- n: The number of samples.
+ n: The number of samples to draw.
+ out: An option output tensor. If provided, draws are put into this
+ tensor, and the function returns None.
+ dtype: The desired torch data type (ignored if out provided).
Returns:
- The samples as a numpy array.
+ A `n x d` tensor of samples if `out=None`, and `None` otherwise.
"""
- base_samples = self._normal_engine.draw(n)
- qmc_samples = base_samples @ self._corr_matrix + self._mean
- return qmc_samples
+ dtype = out.dtype if out is not None else self._mean.dtype
+ device = out.device if out is not None else self._mean.device
+ base_samples = self._normal_engine.draw(n, dtype=dtype).to(device=device)
+ corr_mat = self._corr_matrix.to(dtype=dtype, device=device)
+ mean = self._mean.to(dtype=dtype, device=device)
+ qmc_samples = base_samples @ corr_mat + mean
+ if out is None:
+ return qmc_samples
+ else:
+ out.copy_(qmc_samples)
diff --git a/botorch/qmc/sobol.pyx b/botorch/qmc/sobol.pyx
deleted file mode 100644
index d0ba66665e..0000000000
--- a/botorch/qmc/sobol.pyx
+++ /dev/null
@@ -1,1690 +0,0 @@
-#!/usr/bin/env python3
-
-cimport cython
-cimport numpy as cnp
-import numpy as np
-
-cdef int MAXDIM = 1111 # max number of dimensions
-cdef int MAXDEG = 13 # max polynomial degree
-cdef int MAXBIT = 30 # max number of bits
-cdef int LARGEST_NUMBER = 2 ** MAXBIT # largest possible integer
-cdef float RECIPD = 1.0 / LARGEST_NUMBER # normalization constant
-
-cdef int poly[1111]
-
-poly = [
- 1, 3, 7, 11, 13, 19, 25, 37, 59, 47, 61, 55, 41, 67, 97, 91, 109,
- 103, 115, 131, 193, 137, 145, 143, 241, 157, 185, 167, 229, 171,
- 213, 191, 253, 203, 211, 239, 247, 285, 369, 299, 301, 333, 351,
- 355, 357, 361, 391, 397, 425, 451, 463, 487, 501, 529, 539, 545,
- 557, 563, 601, 607, 617, 623, 631, 637, 647, 661, 675, 677, 687,
- 695, 701, 719, 721, 731, 757, 761, 787, 789, 799, 803, 817, 827,
- 847, 859, 865, 875, 877, 883, 895, 901, 911, 949, 953, 967, 971,
- 973, 981, 985, 995, 1001, 1019, 1033, 1051, 1063, 1069, 1125, 1135,
- 1153, 1163, 1221, 1239, 1255, 1267, 1279, 1293, 1305, 1315, 1329,
- 1341, 1347, 1367, 1387, 1413, 1423, 1431, 1441, 1479, 1509, 1527,
- 1531, 1555, 1557, 1573, 1591, 1603, 1615, 1627, 1657, 1663, 1673,
- 1717, 1729, 1747, 1759, 1789, 1815, 1821, 1825, 1849, 1863, 1869,
- 1877, 1881, 1891, 1917, 1933, 1939, 1969, 2011, 2035, 2041, 2053,
- 2071, 2091, 2093, 2119, 2147, 2149, 2161, 2171, 2189, 2197, 2207,
- 2217, 2225, 2255, 2257, 2273, 2279, 2283, 2293, 2317, 2323, 2341,
- 2345, 2363, 2365, 2373, 2377, 2385, 2395, 2419, 2421, 2431, 2435,
- 2447, 2475, 2477, 2489, 2503, 2521, 2533, 2551, 2561, 2567, 2579,
- 2581, 2601, 2633, 2657, 2669, 2681, 2687, 2693, 2705, 2717, 2727,
- 2731, 2739, 2741, 2773, 2783, 2793, 2799, 2801, 2811, 2819, 2825,
- 2833, 2867, 2879, 2881, 2891, 2905, 2911, 2917, 2927, 2941, 2951,
- 2955, 2963, 2965, 2991, 2999, 3005, 3017, 3035, 3037, 3047, 3053,
- 3083, 3085, 3097, 3103, 3159, 3169, 3179, 3187, 3205, 3209, 3223,
- 3227, 3229, 3251, 3263, 3271, 3277, 3283, 3285, 3299, 3305, 3319,
- 3331, 3343, 3357, 3367, 3373, 3393, 3399, 3413, 3417, 3427, 3439,
- 3441, 3475, 3487, 3497, 3515, 3517, 3529, 3543, 3547, 3553, 3559,
- 3573, 3589, 3613, 3617, 3623, 3627, 3635, 3641, 3655, 3659, 3669,
- 3679, 3697, 3707, 3709, 3713, 3731, 3743, 3747, 3771, 3791, 3805,
- 3827, 3833, 3851, 3865, 3889, 3895, 3933, 3947, 3949, 3957, 3971,
- 3985, 3991, 3995, 4007, 4013, 4021, 4045, 4051, 4069, 4073, 4179,
- 4201, 4219, 4221, 4249, 4305, 4331, 4359, 4383, 4387, 4411, 4431,
- 4439, 4449, 4459, 4485, 4531, 4569, 4575, 4621, 4663, 4669, 4711,
- 4723, 4735, 4793, 4801, 4811, 4879, 4893, 4897, 4921, 4927, 4941,
- 4977, 5017, 5027, 5033, 5127, 5169, 5175, 5199, 5213, 5223, 5237,
- 5287, 5293, 5331, 5391, 5405, 5453, 5523, 5573, 5591, 5597, 5611,
- 5641, 5703, 5717, 5721, 5797, 5821, 5909, 5913, 5955, 5957, 6005,
- 6025, 6061, 6067, 6079, 6081, 6231, 6237, 6289, 6295, 6329, 6383,
- 6427, 6453, 6465, 6501, 6523, 6539, 6577, 6589, 6601, 6607, 6631,
- 6683, 6699, 6707, 6761, 6795, 6865, 6881, 6901, 6923, 6931, 6943,
- 6999, 7057, 7079, 7103, 7105, 7123, 7173, 7185, 7191, 7207, 7245,
- 7303, 7327, 7333, 7355, 7365, 7369, 7375, 7411, 7431, 7459, 7491,
- 7505, 7515, 7541, 7557, 7561, 7701, 7705, 7727, 7749, 7761, 7783,
- 7795, 7823, 7907, 7953, 7963, 7975, 8049, 8089, 8123, 8125, 8137,
- 8219, 8231, 8245, 8275, 8293, 8303, 8331, 8333, 8351, 8357, 8367,
- 8379, 8381, 8387, 8393, 8417, 8435, 8461, 8469, 8489, 8495, 8507,
- 8515, 8551, 8555, 8569, 8585, 8599, 8605, 8639, 8641, 8647, 8653,
- 8671, 8675, 8689, 8699, 8729, 8741, 8759, 8765, 8771, 8795, 8797,
- 8825, 8831, 8841, 8855, 8859, 8883, 8895, 8909, 8943, 8951, 8955,
- 8965, 8999, 9003, 9031, 9045, 9049, 9071, 9073, 9085, 9095, 9101,
- 9109, 9123, 9129, 9137, 9143, 9147, 9185, 9197, 9209, 9227, 9235,
- 9247, 9253, 9257, 9277, 9297, 9303, 9313, 9325, 9343, 9347, 9371,
- 9373, 9397, 9407, 9409, 9415, 9419, 9443, 9481, 9495, 9501, 9505,
- 9517, 9529, 9555, 9557, 9571, 9585, 9591, 9607, 9611, 9621, 9625,
- 9631, 9647, 9661, 9669, 9679, 9687, 9707, 9731, 9733, 9745, 9773,
- 9791, 9803, 9811, 9817, 9833, 9847, 9851, 9863, 9875, 9881, 9905,
- 9911, 9917, 9923, 9963, 9973, 10003, 10025, 10043, 10063, 10071,
- 10077, 10091, 10099, 10105, 10115, 10129, 10145, 10169, 10183,
- 10187, 10207, 10223, 10225, 10247, 10265, 10271, 10275, 10289,
- 10299, 10301, 10309, 10343, 10357, 10373, 10411, 10413, 10431,
- 10445, 10453, 10463, 10467, 10473, 10491, 10505, 10511, 10513,
- 10523, 10539, 10549, 10559, 10561, 10571, 10581, 10615, 10621,
- 10625, 10643, 10655, 10671, 10679, 10685, 10691, 10711, 10739,
- 10741, 10755, 10767, 10781, 10785, 10803, 10805, 10829, 10857,
- 10863, 10865, 10875, 10877, 10917, 10921, 10929, 10949, 10967,
- 10971, 10987, 10995, 11009, 11029, 11043, 11045, 11055, 11063,
- 11075, 11081, 11117, 11135, 11141, 11159, 11163, 11181, 11187,
- 11225, 11237, 11261, 11279, 11297, 11307, 11309, 11327, 11329,
- 11341, 11377, 11403, 11405, 11413, 11427, 11439, 11453, 11461,
- 11473, 11479, 11489, 11495, 11499, 11533, 11545, 11561, 11567,
- 11575, 11579, 11589, 11611, 11623, 11637, 11657, 11663, 11687,
- 11691, 11701, 11747, 11761, 11773, 11783, 11795, 11797, 11817,
- 11849, 11855, 11867, 11869, 11873, 11883, 11919, 11921, 11927,
- 11933, 11947, 11955, 11961, 11999, 12027, 12029, 12037, 12041,
- 12049, 12055, 12095, 12097, 12107, 12109, 12121, 12127, 12133,
- 12137, 12181, 12197, 12207, 12209, 12239, 12253, 12263, 12269,
- 12277, 12287, 12295, 12309, 12313, 12335, 12361, 12367, 12391,
- 12409, 12415, 12433, 12449, 12469, 12479, 12481, 12499, 12505,
- 12517, 12527, 12549, 12559, 12597, 12615, 12621, 12639, 12643,
- 12657, 12667, 12707, 12713, 12727, 12741, 12745, 12763, 12769,
- 12779, 12781, 12787, 12799, 12809, 12815, 12829, 12839, 12857,
- 12875, 12883, 12889, 12901, 12929, 12947, 12953, 12959, 12969,
- 12983, 12987, 12995, 13015, 13019, 13031, 13063, 13077, 13103,
- 13137, 13149, 13173, 13207, 13211, 13227, 13241, 13249, 13255,
- 13269, 13283, 13285, 13303, 13307, 13321, 13339, 13351, 13377,
- 13389, 13407, 13417, 13431, 13435, 13447, 13459, 13465, 13477,
- 13501, 13513, 13531, 13543, 13561, 13581, 13599, 13605, 13617,
- 13623, 13637, 13647, 13661, 13677, 13683, 13695, 13725, 13729,
- 13753, 13773, 13781, 13785, 13795, 13801, 13807, 13825, 13835,
- 13855, 13861, 13871, 13883, 13897, 13905, 13915, 13939, 13941,
- 13969, 13979, 13981, 13997, 14027, 14035, 14037, 14051, 14063,
- 14085, 14095, 14107, 14113, 14125, 14137, 14145, 14151, 14163,
- 14193, 14199, 14219, 14229, 14233, 14243, 14277, 14287, 14289,
- 14295, 14301, 14305, 14323, 14339, 14341, 14359, 14365, 14375,
- 14387, 14411, 14425, 14441, 14449, 14499, 14513, 14523, 14537,
- 14543, 14561, 14579, 14585, 14593, 14599, 14603, 14611, 14641,
- 14671, 14695, 14701, 14723, 14725, 14743, 14753, 14759, 14765,
- 14795, 14797, 14803, 14831, 14839, 14845, 14855, 14889, 14895,
- 14909, 14929, 14941, 14945, 14951, 14963, 14965, 14985, 15033,
- 15039, 15053, 15059, 15061, 15071, 15077, 15081, 15099, 15121,
- 15147, 15149, 15157, 15167, 15187, 15193, 15203, 15205, 15215,
- 15217, 15223, 15243, 15257, 15269, 15273, 15287, 15291, 15313,
- 15335, 15347, 15359, 15373, 15379, 15381, 15391, 15395, 15397,
- 15419, 15439, 15453, 15469, 15491, 15503, 15517, 15527, 15531,
- 15545, 15559, 15593, 15611, 15613, 15619, 15639, 15643, 15649,
- 15661, 15667, 15669, 15681, 15693, 15717, 15721, 15741, 15745,
- 15765, 15793, 15799, 15811, 15825, 15835, 15847, 15851, 15865,
- 15877, 15881, 15887, 15899, 15915, 15935, 15937, 15955, 15973,
- 15977, 16011, 16035, 16061, 16069, 16087, 16093, 16097, 16121,
- 16141, 16153, 16159, 16165, 16183, 16189, 16195, 16197, 16201,
- 16209, 16215, 16225, 16259, 16265, 16273, 16299, 16309, 16355,
- 16375, 16381,
-]
-
-cdef int vinit[1111][13]
-
-vinit = [
- [1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
- [1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
- [1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
- [1, 3, 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
- [1, 1, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
- [1, 3, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0],
- [1, 1, 3, 7, 0, 0, 0, 0, 0, 0, 0, 0, 0],
- [1, 3, 3, 9, 9, 0, 0, 0, 0, 0, 0, 0, 0],
- [1, 3, 7, 13, 3, 0, 0, 0, 0, 0, 0, 0, 0],
- [1, 1, 5, 11, 27, 0, 0, 0, 0, 0, 0, 0, 0],
- [1, 3, 5, 1, 15, 0, 0, 0, 0, 0, 0, 0, 0],
- [1, 1, 7, 3, 29, 0, 0, 0, 0, 0, 0, 0, 0],
- [1, 3, 7, 7, 21, 0, 0, 0, 0, 0, 0, 0, 0],
- [1, 1, 1, 9, 23, 37, 0, 0, 0, 0, 0, 0, 0],
- [1, 3, 3, 5, 19, 33, 0, 0, 0, 0, 0, 0, 0],
- [1, 1, 3, 13, 11, 7, 0, 0, 0, 0, 0, 0, 0],
- [1, 1, 7, 13, 25, 5, 0, 0, 0, 0, 0, 0, 0],
- [1, 3, 5, 11, 7, 11, 0, 0, 0, 0, 0, 0, 0],
- [1, 1, 1, 3, 13, 39, 0, 0, 0, 0, 0, 0, 0],
- [1, 3, 1, 15, 17, 63, 13, 0, 0, 0, 0, 0, 0],
- [1, 1, 5, 5, 1, 59, 33, 0, 0, 0, 0, 0, 0],
- [1, 3, 3, 3, 25, 17, 115, 0, 0, 0, 0, 0, 0],
- [1, 1, 7, 15, 29, 15, 41, 0, 0, 0, 0, 0, 0],
- [1, 3, 1, 7, 3, 23, 79, 0, 0, 0, 0, 0, 0],
- [1, 3, 7, 9, 31, 29, 17, 0, 0, 0, 0, 0, 0],
- [1, 1, 5, 13, 11, 3, 29, 0, 0, 0, 0, 0, 0],
- [1, 1, 1, 9, 5, 21, 119, 0, 0, 0, 0, 0, 0],
- [1, 1, 3, 1, 23, 13, 75, 0, 0, 0, 0, 0, 0],
- [1, 3, 7, 11, 27, 31, 73, 0, 0, 0, 0, 0, 0],
- [1, 1, 7, 7, 19, 25, 105, 0, 0, 0, 0, 0, 0],
- [1, 3, 1, 5, 21, 9, 7, 0, 0, 0, 0, 0, 0],
- [1, 1, 1, 15, 5, 49, 59, 0, 0, 0, 0, 0, 0],
- [1, 3, 1, 1, 1, 33, 65, 0, 0, 0, 0, 0, 0],
- [1, 3, 5, 15, 17, 19, 21, 0, 0, 0, 0, 0, 0],
- [1, 1, 7, 11, 13, 29, 3, 0, 0, 0, 0, 0, 0],
- [1, 3, 7, 5, 7, 11, 113, 0, 0, 0, 0, 0, 0],
- [1, 1, 5, 11, 15, 19, 61, 0, 0, 0, 0, 0, 0],
- [1, 1, 1, 1, 9, 27, 89, 7, 0, 0, 0, 0, 0],
- [1, 1, 3, 7, 31, 15, 45, 23, 0, 0, 0, 0, 0],
- [1, 3, 3, 9, 25, 25, 107, 39, 0, 0, 0, 0, 0],
- [1, 1, 7, 7, 3, 63, 21, 217, 0, 0, 0, 0, 0],
- [1, 3, 5, 7, 5, 55, 71, 141, 0, 0, 0, 0, 0],
- [1, 1, 5, 1, 23, 17, 79, 27, 0, 0, 0, 0, 0],
- [1, 1, 5, 15, 7, 63, 19, 53, 0, 0, 0, 0, 0],
- [1, 1, 3, 15, 3, 49, 71, 181, 0, 0, 0, 0, 0],
- [1, 3, 3, 15, 17, 19, 61, 169, 0, 0, 0, 0, 0],
- [1, 3, 3, 13, 23, 41, 41, 35, 0, 0, 0, 0, 0],
- [1, 1, 1, 3, 3, 59, 57, 15, 0, 0, 0, 0, 0],
- [1, 3, 1, 3, 3, 3, 121, 207, 0, 0, 0, 0, 0],
- [1, 3, 5, 15, 21, 57, 87, 45, 0, 0, 0, 0, 0],
- [1, 1, 1, 5, 25, 33, 119, 247, 0, 0, 0, 0, 0],
- [1, 1, 1, 9, 25, 49, 55, 185, 0, 0, 0, 0, 0],
- [1, 3, 5, 7, 23, 53, 85, 117, 0, 0, 0, 0, 0],
- [1, 3, 3, 13, 11, 57, 121, 41, 235, 0, 0, 0, 0],
- [1, 1, 3, 3, 19, 57, 119, 81, 307, 0, 0, 0, 0],
- [1, 3, 3, 7, 3, 39, 11, 223, 495, 0, 0, 0, 0],
- [1, 3, 3, 5, 11, 21, 23, 151, 417, 0, 0, 0, 0],
- [1, 3, 1, 11, 31, 7, 61, 81, 57, 0, 0, 0, 0],
- [1, 1, 3, 9, 7, 53, 11, 189, 151, 0, 0, 0, 0],
- [1, 3, 7, 1, 9, 9, 35, 61, 19, 0, 0, 0, 0],
- [1, 1, 5, 9, 5, 55, 33, 95, 119, 0, 0, 0, 0],
- [1, 3, 7, 1, 17, 15, 43, 185, 375, 0, 0, 0, 0],
- [1, 1, 3, 5, 23, 59, 107, 23, 451, 0, 0, 0, 0],
- [1, 1, 7, 7, 17, 19, 113, 73, 55, 0, 0, 0, 0],
- [1, 3, 1, 13, 17, 49, 101, 113, 449, 0, 0, 0, 0],
- [1, 3, 3, 9, 25, 31, 29, 239, 501, 0, 0, 0, 0],
- [1, 1, 3, 9, 13, 3, 87, 85, 53, 0, 0, 0, 0],
- [1, 1, 5, 1, 11, 39, 119, 9, 185, 0, 0, 0, 0],
- [1, 1, 1, 7, 31, 5, 97, 201, 317, 0, 0, 0, 0],
- [1, 1, 3, 3, 27, 5, 29, 83, 17, 0, 0, 0, 0],
- [1, 3, 5, 5, 19, 41, 17, 53, 21, 0, 0, 0, 0],
- [1, 1, 5, 1, 17, 9, 89, 183, 487, 0, 0, 0, 0],
- [1, 1, 7, 11, 23, 19, 5, 203, 13, 0, 0, 0, 0],
- [1, 3, 7, 11, 7, 9, 127, 91, 347, 0, 0, 0, 0],
- [1, 1, 7, 13, 5, 57, 89, 149, 393, 0, 0, 0, 0],
- [1, 1, 1, 7, 11, 25, 119, 101, 15, 0, 0, 0, 0],
- [1, 1, 1, 7, 19, 1, 117, 13, 391, 0, 0, 0, 0],
- [1, 3, 3, 9, 19, 15, 103, 111, 307, 0, 0, 0, 0],
- [1, 3, 3, 9, 7, 51, 105, 239, 189, 0, 0, 0, 0],
- [1, 1, 1, 1, 13, 11, 41, 3, 381, 0, 0, 0, 0],
- [1, 3, 1, 1, 21, 19, 83, 205, 71, 0, 0, 0, 0],
- [1, 3, 5, 3, 21, 61, 25, 253, 163, 0, 0, 0, 0],
- [1, 1, 1, 9, 7, 53, 41, 247, 99, 0, 0, 0, 0],
- [1, 3, 5, 15, 9, 29, 55, 121, 467, 0, 0, 0, 0],
- [1, 3, 7, 1, 11, 19, 69, 189, 167, 0, 0, 0, 0],
- [1, 3, 5, 5, 1, 11, 117, 169, 433, 0, 0, 0, 0],
- [1, 1, 1, 13, 5, 9, 49, 179, 337, 0, 0, 0, 0],
- [1, 3, 7, 1, 21, 21, 127, 197, 257, 0, 0, 0, 0],
- [1, 3, 5, 9, 11, 19, 29, 175, 179, 0, 0, 0, 0],
- [1, 3, 3, 9, 13, 43, 1, 217, 47, 0, 0, 0, 0],
- [1, 1, 3, 9, 25, 13, 99, 249, 385, 0, 0, 0, 0],
- [1, 3, 1, 9, 9, 13, 53, 195, 23, 0, 0, 0, 0],
- [1, 3, 5, 9, 7, 41, 83, 95, 117, 0, 0, 0, 0],
- [1, 1, 7, 13, 7, 25, 15, 63, 369, 0, 0, 0, 0],
- [1, 3, 1, 11, 27, 31, 31, 19, 425, 0, 0, 0, 0],
- [1, 3, 7, 3, 15, 9, 73, 7, 207, 0, 0, 0, 0],
- [1, 3, 5, 5, 25, 11, 115, 5, 433, 0, 0, 0, 0],
- [1, 1, 1, 11, 15, 19, 35, 75, 301, 0, 0, 0, 0],
- [1, 3, 7, 11, 21, 5, 21, 217, 147, 0, 0, 0, 0],
- [1, 1, 3, 13, 17, 53, 89, 245, 333, 0, 0, 0, 0],
- [1, 3, 1, 5, 19, 37, 5, 111, 85, 0, 0, 0, 0],
- [1, 1, 7, 3, 19, 7, 1, 189, 221, 519, 0, 0, 0],
- [1, 1, 1, 15, 21, 51, 91, 165, 423, 307, 0, 0, 0],
- [1, 3, 7, 1, 5, 45, 53, 169, 49, 931, 0, 0, 0],
- [1, 3, 3, 11, 11, 7, 35, 141, 3, 1023, 0, 0, 0],
- [1, 1, 3, 11, 3, 7, 95, 221, 43, 517, 0, 0, 0],
- [1, 3, 5, 7, 5, 61, 83, 249, 229, 771, 0, 0, 0],
- [1, 3, 7, 13, 29, 23, 19, 159, 227, 151, 0, 0, 0],
- [1, 1, 3, 15, 31, 45, 85, 253, 201, 1023, 0, 0, 0],
- [1, 1, 3, 11, 29, 7, 55, 207, 383, 539, 0, 0, 0],
- [1, 1, 5, 13, 5, 59, 51, 249, 281, 725, 0, 0, 0],
- [1, 3, 1, 9, 5, 41, 101, 219, 229, 45, 0, 0, 0],
- [1, 3, 3, 11, 1, 1, 33, 23, 207, 927, 0, 0, 0],
- [1, 1, 3, 15, 31, 29, 41, 49, 21, 707, 0, 0, 0],
- [1, 3, 1, 15, 27, 61, 55, 127, 343, 29, 0, 0, 0],
- [1, 3, 3, 13, 11, 37, 45, 237, 251, 125, 0, 0, 0],
- [1, 1, 5, 3, 13, 27, 95, 5, 397, 371, 0, 0, 0],
- [1, 3, 1, 15, 1, 47, 61, 25, 173, 275, 0, 0, 0],
- [1, 1, 3, 7, 3, 15, 27, 177, 507, 279, 0, 0, 0],
- [1, 1, 3, 9, 7, 31, 37, 37, 421, 817, 0, 0, 0],
- [1, 3, 3, 11, 11, 35, 89, 103, 443, 389, 0, 0, 0],
- [1, 3, 7, 13, 7, 31, 75, 65, 399, 453, 0, 0, 0],
- [1, 3, 1, 11, 3, 17, 57, 167, 53, 989, 0, 0, 0],
- [1, 1, 1, 9, 23, 51, 61, 81, 345, 1015, 0, 0, 0],
- [1, 1, 7, 9, 13, 13, 15, 87, 77, 29, 0, 0, 0],
- [1, 1, 3, 5, 31, 25, 117, 119, 385, 169, 0, 0, 0],
- [1, 3, 1, 13, 17, 45, 15, 45, 317, 743, 0, 0, 0],
- [1, 1, 3, 9, 1, 5, 21, 79, 155, 99, 0, 0, 0],
- [1, 1, 7, 1, 27, 5, 27, 143, 187, 923, 0, 0, 0],
- [1, 3, 5, 13, 11, 33, 25, 57, 269, 981, 0, 0, 0],
- [1, 1, 5, 7, 25, 39, 27, 79, 501, 181, 0, 0, 0],
- [1, 1, 7, 7, 1, 5, 123, 187, 19, 693, 0, 0, 0],
- [1, 3, 5, 7, 23, 47, 39, 143, 169, 309, 0, 0, 0],
- [1, 3, 5, 7, 29, 29, 109, 183, 235, 227, 0, 0, 0],
- [1, 1, 3, 7, 17, 35, 93, 75, 415, 111, 0, 0, 0],
- [1, 3, 1, 5, 25, 47, 51, 97, 61, 219, 0, 0, 0],
- [1, 1, 3, 9, 7, 63, 21, 211, 247, 897, 0, 0, 0],
- [1, 3, 3, 7, 25, 45, 91, 149, 183, 377, 0, 0, 0],
- [1, 3, 3, 13, 27, 37, 109, 175, 5, 425, 0, 0, 0],
- [1, 3, 1, 11, 17, 47, 107, 37, 257, 609, 0, 0, 0],
- [1, 3, 3, 9, 13, 59, 45, 135, 401, 227, 0, 0, 0],
- [1, 1, 3, 11, 17, 21, 15, 189, 451, 19, 0, 0, 0],
- [1, 1, 7, 15, 23, 59, 93, 225, 95, 221, 0, 0, 0],
- [1, 1, 3, 3, 5, 33, 127, 241, 455, 143, 0, 0, 0],
- [1, 3, 3, 13, 17, 51, 3, 63, 49, 581, 0, 0, 0],
- [1, 3, 1, 11, 5, 9, 53, 33, 489, 147, 0, 0, 0],
- [1, 1, 7, 1, 13, 27, 81, 43, 75, 919, 0, 0, 0],
- [1, 1, 5, 11, 11, 13, 79, 13, 459, 127, 0, 0, 0],
- [1, 3, 1, 3, 21, 25, 107, 73, 377, 725, 0, 0, 0],
- [1, 1, 7, 3, 5, 43, 79, 213, 87, 793, 0, 0, 0],
- [1, 1, 7, 9, 11, 3, 87, 57, 463, 289, 0, 0, 0],
- [1, 1, 5, 11, 5, 17, 35, 239, 155, 411, 0, 0, 0],
- [1, 1, 7, 1, 9, 21, 109, 183, 233, 835, 0, 0, 0],
- [1, 1, 5, 7, 31, 59, 73, 117, 115, 921, 0, 0, 0],
- [1, 1, 1, 1, 19, 61, 35, 21, 429, 957, 0, 0, 0],
- [1, 3, 3, 15, 17, 27, 83, 29, 211, 443, 0, 0, 0],
- [1, 1, 1, 15, 9, 47, 107, 115, 419, 349, 0, 0, 0],
- [1, 3, 7, 3, 9, 57, 1, 43, 143, 813, 0, 0, 0],
- [1, 1, 3, 1, 27, 11, 51, 205, 487, 5, 0, 0, 0],
- [1, 1, 7, 9, 21, 17, 7, 223, 195, 105, 0, 0, 0],
- [1, 1, 3, 1, 15, 39, 59, 15, 209, 457, 0, 0, 0],
- [1, 3, 5, 7, 15, 1, 33, 3, 461, 393, 7, 0, 0],
- [1, 1, 7, 13, 1, 63, 115, 159, 193, 539, 2011, 0, 0],
- [1, 3, 3, 11, 1, 21, 43, 51, 157, 101, 1001, 0, 0],
- [1, 1, 1, 3, 29, 59, 111, 101, 193, 197, 49, 0, 0],
- [1, 3, 3, 13, 5, 17, 45, 127, 363, 697, 825, 0, 0],
- [1, 3, 3, 11, 31, 13, 121, 99, 181, 27, 415, 0, 0],
- [1, 3, 3, 7, 11, 31, 105, 239, 271, 343, 1441, 0, 0],
- [1, 1, 1, 3, 17, 3, 125, 171, 445, 515, 383, 0, 0],
- [1, 1, 5, 3, 23, 31, 87, 113, 381, 69, 1581, 0, 0],
- [1, 3, 7, 5, 19, 7, 101, 171, 231, 485, 623, 0, 0],
- [1, 3, 3, 13, 21, 9, 41, 119, 135, 383, 1621, 0, 0],
- [1, 1, 3, 11, 25, 27, 95, 189, 327, 855, 1319, 0, 0],
- [1, 3, 7, 5, 15, 37, 75, 245, 403, 693, 1387, 0, 0],
- [1, 1, 7, 11, 11, 23, 1, 201, 171, 133, 619, 0, 0],
- [1, 3, 7, 1, 5, 31, 57, 27, 197, 87, 839, 0, 0],
- [1, 1, 5, 3, 5, 9, 117, 185, 181, 743, 217, 0, 0],
- [1, 1, 3, 9, 1, 45, 21, 229, 343, 747, 75, 0, 0],
- [1, 3, 1, 7, 19, 43, 27, 105, 113, 475, 1955, 0, 0],
- [1, 1, 7, 15, 19, 31, 67, 153, 313, 87, 505, 0, 0],
- [1, 3, 1, 7, 19, 63, 29, 189, 393, 469, 281, 0, 0],
- [1, 1, 3, 5, 7, 21, 53, 33, 311, 763, 1629, 0, 0],
- [1, 3, 7, 13, 13, 39, 117, 35, 415, 721, 1379, 0, 0],
- [1, 1, 5, 7, 21, 51, 63, 137, 267, 345, 53, 0, 0],
- [1, 3, 3, 9, 17, 27, 1, 77, 247, 479, 1111, 0, 0],
- [1, 1, 3, 13, 17, 7, 77, 97, 425, 965, 1399, 0, 0],
- [1, 1, 3, 15, 25, 53, 89, 17, 233, 527, 301, 0, 0],
- [1, 1, 7, 13, 23, 11, 115, 181, 289, 121, 209, 0, 0],
- [1, 3, 1, 9, 19, 1, 49, 55, 55, 271, 49, 0, 0],
- [1, 3, 1, 7, 23, 59, 127, 197, 39, 353, 155, 0, 0],
- [1, 1, 3, 15, 15, 39, 15, 201, 247, 467, 1647, 0, 0],
- [1, 3, 1, 7, 13, 23, 79, 155, 327, 177, 631, 0, 0],
- [1, 3, 5, 9, 5, 49, 81, 37, 141, 245, 129, 0, 0],
- [1, 1, 7, 5, 19, 23, 29, 197, 5, 627, 1569, 0, 0],
- [1, 3, 1, 11, 25, 7, 65, 137, 189, 113, 335, 0, 0],
- [1, 1, 3, 11, 9, 55, 103, 223, 183, 357, 67, 0, 0],
- [1, 1, 5, 13, 7, 59, 33, 25, 27, 7, 1955, 0, 0],
- [1, 1, 3, 13, 3, 3, 73, 179, 337, 691, 1611, 0, 0],
- [1, 3, 5, 9, 21, 19, 79, 91, 341, 725, 2021, 0, 0],
- [1, 1, 3, 3, 17, 35, 29, 23, 327, 355, 1305, 0, 0],
- [1, 3, 3, 5, 25, 13, 21, 235, 87, 889, 121, 0, 0],
- [1, 1, 7, 13, 1, 9, 113, 53, 429, 635, 37, 0, 0],
- [1, 1, 5, 9, 27, 13, 31, 253, 357, 737, 877, 0, 0],
- [1, 3, 5, 11, 25, 15, 33, 49, 265, 429, 835, 0, 0],
- [1, 1, 3, 15, 27, 23, 107, 181, 251, 545, 1457, 0, 0],
- [1, 1, 3, 11, 25, 9, 95, 249, 437, 925, 669, 0, 0],
- [1, 3, 1, 7, 9, 7, 111, 53, 201, 357, 1405, 0, 0],
- [1, 3, 3, 1, 13, 43, 59, 173, 29, 873, 935, 0, 0],
- [1, 1, 7, 7, 3, 55, 99, 97, 339, 187, 1735, 0, 0],
- [1, 1, 7, 13, 17, 3, 117, 247, 257, 351, 665, 0, 0],
- [1, 3, 7, 3, 25, 19, 63, 67, 377, 677, 551, 0, 0],
- [1, 3, 1, 13, 23, 9, 63, 115, 17, 999, 789, 0, 0],
- [1, 3, 5, 3, 9, 27, 99, 103, 53, 921, 1543, 0, 0],
- [1, 1, 7, 13, 25, 33, 39, 159, 327, 477, 1267, 0, 0],
- [1, 3, 1, 9, 9, 27, 9, 239, 47, 233, 1027, 0, 0],
- [1, 3, 3, 15, 13, 49, 35, 69, 375, 765, 1, 0, 0],
- [1, 3, 1, 7, 17, 23, 63, 173, 393, 495, 1911, 0, 0],
- [1, 1, 1, 13, 17, 47, 125, 217, 369, 81, 163, 0, 0],
- [1, 3, 7, 13, 3, 19, 99, 95, 403, 953, 1929, 0, 0],
- [1, 1, 1, 3, 15, 7, 45, 221, 125, 479, 67, 0, 0],
- [1, 3, 3, 13, 7, 11, 93, 247, 429, 89, 1975, 0, 0],
- [1, 1, 1, 15, 7, 55, 33, 97, 257, 173, 1681, 0, 0],
- [1, 1, 7, 15, 29, 27, 93, 91, 157, 473, 1413, 0, 0],
- [1, 1, 1, 11, 3, 35, 9, 123, 217, 131, 191, 0, 0],
- [1, 3, 5, 9, 19, 5, 105, 223, 85, 961, 1711, 0, 0],
- [1, 1, 3, 13, 29, 5, 75, 213, 267, 411, 1307, 0, 0],
- [1, 1, 5, 9, 29, 55, 51, 129, 117, 291, 401, 0, 0],
- [1, 1, 3, 15, 19, 35, 115, 181, 337, 967, 725, 0, 0],
- [1, 3, 1, 1, 29, 37, 11, 87, 447, 65, 1229, 0, 0],
- [1, 1, 1, 1, 13, 9, 37, 239, 219, 511, 1403, 0, 0],
- [1, 1, 5, 15, 15, 33, 17, 85, 501, 13, 1609, 0, 0],
- [1, 1, 5, 11, 25, 29, 41, 89, 41, 805, 2035, 0, 0],
- [1, 1, 3, 11, 27, 47, 21, 249, 41, 945, 917, 0, 0],
- [1, 1, 3, 7, 1, 25, 43, 141, 193, 369, 921, 0, 0],
- [1, 3, 5, 1, 3, 11, 73, 39, 509, 827, 1789, 0, 0],
- [1, 3, 7, 11, 9, 47, 19, 57, 131, 295, 41, 0, 0],
- [1, 3, 1, 13, 9, 53, 93, 249, 207, 163, 2003, 0, 0],
- [1, 1, 5, 9, 13, 61, 7, 71, 505, 835, 187, 0, 0],
- [1, 1, 3, 13, 31, 59, 95, 101, 421, 259, 67, 0, 0],
- [1, 1, 7, 3, 29, 3, 81, 159, 149, 207, 1635, 0, 0],
- [1, 1, 7, 5, 31, 53, 93, 33, 111, 331, 717, 0, 0],
- [1, 3, 3, 11, 5, 47, 79, 137, 177, 29, 1449, 0, 0],
- [1, 3, 5, 13, 15, 5, 81, 189, 167, 315, 277, 0, 0],
- [1, 3, 3, 9, 29, 19, 55, 71, 223, 999, 1903, 0, 0],
- [1, 1, 3, 9, 1, 59, 9, 253, 291, 133, 1179, 0, 0],
- [1, 3, 1, 13, 19, 5, 51, 205, 91, 967, 363, 0, 0],
- [1, 3, 7, 1, 5, 47, 63, 171, 29, 41, 1211, 0, 0],
- [1, 1, 3, 11, 9, 23, 45, 13, 305, 117, 1231, 0, 0],
- [1, 1, 1, 15, 19, 45, 89, 249, 151, 677, 647, 0, 0],
- [1, 1, 3, 13, 5, 53, 73, 109, 177, 471, 1261, 0, 0],
- [1, 1, 5, 3, 15, 3, 19, 131, 337, 717, 1029, 0, 0],
- [1, 3, 7, 13, 3, 49, 115, 199, 183, 881, 1485, 0, 0],
- [1, 1, 1, 7, 5, 61, 39, 189, 361, 755, 1309, 0, 0],
- [1, 1, 3, 15, 7, 47, 47, 179, 435, 351, 1149, 0, 0],
- [1, 3, 7, 1, 15, 39, 81, 31, 307, 723, 317, 0, 0],
- [1, 1, 1, 15, 17, 29, 39, 99, 507, 259, 1335, 0, 0],
- [1, 3, 5, 3, 17, 17, 5, 113, 77, 879, 171, 0, 0],
- [1, 3, 1, 3, 23, 57, 5, 41, 181, 455, 243, 0, 0],
- [1, 1, 3, 11, 11, 5, 45, 173, 507, 721, 271, 0, 0],
- [1, 1, 1, 7, 9, 17, 53, 23, 315, 289, 1055, 0, 0],
- [1, 3, 5, 13, 23, 31, 65, 189, 145, 149, 1601, 0, 0],
- [1, 3, 3, 7, 19, 23, 49, 197, 423, 199, 1129, 0, 0],
- [1, 1, 1, 7, 3, 41, 17, 3, 71, 805, 1653, 0, 0],
- [1, 1, 7, 9, 17, 39, 105, 135, 103, 987, 205, 0, 0],
- [1, 1, 1, 7, 1, 5, 13, 9, 493, 851, 1463, 0, 0],
- [1, 1, 5, 5, 27, 27, 107, 95, 271, 423, 1681, 0, 0],
- [1, 3, 5, 15, 9, 7, 5, 195, 469, 597, 1621, 0, 0],
- [1, 1, 5, 9, 9, 29, 5, 27, 339, 129, 197, 0, 0],
- [1, 3, 3, 5, 17, 29, 19, 183, 237, 11, 951, 0, 0],
- [1, 3, 7, 5, 13, 33, 73, 1, 437, 733, 573, 0, 0],
- [1, 1, 1, 7, 25, 31, 59, 123, 483, 549, 1697, 0, 0],
- [1, 3, 1, 15, 29, 41, 43, 73, 31, 153, 1265, 0, 0],
- [1, 3, 7, 13, 23, 31, 83, 53, 219, 285, 1321, 0, 0],
- [1, 1, 3, 15, 29, 29, 97, 99, 61, 451, 1805, 0, 0],
- [1, 1, 1, 5, 11, 17, 115, 197, 131, 559, 1235, 0, 0],
- [1, 1, 1, 15, 31, 29, 27, 59, 391, 377, 1853, 0, 0],
- [1, 3, 7, 5, 25, 29, 1, 27, 233, 109, 1307, 0, 0],
- [1, 3, 5, 3, 21, 9, 69, 101, 219, 357, 945, 0, 0],
- [1, 3, 7, 1, 29, 9, 103, 55, 69, 143, 1197, 0, 0],
- [1, 1, 5, 11, 19, 31, 3, 193, 57, 693, 1411, 0, 0],
- [1, 3, 7, 7, 27, 27, 99, 31, 459, 615, 833, 0, 0],
- [1, 3, 7, 1, 31, 53, 103, 61, 225, 677, 273, 0, 0],
- [1, 1, 3, 5, 3, 35, 63, 119, 421, 701, 1517, 0, 0],
- [1, 3, 7, 7, 5, 5, 67, 11, 7, 475, 1747, 0, 0],
- [1, 3, 1, 9, 3, 61, 25, 7, 461, 767, 1095, 0, 0],
- [1, 1, 3, 3, 3, 1, 121, 255, 111, 85, 1345, 0, 0],
- [1, 3, 7, 11, 13, 49, 97, 233, 451, 229, 869, 0, 0],
- [1, 1, 7, 1, 21, 13, 77, 53, 277, 509, 57, 0, 0],
- [1, 3, 3, 15, 9, 57, 13, 157, 185, 547, 1383, 0, 0],
- [1, 3, 5, 1, 29, 29, 83, 193, 193, 151, 221, 0, 0],
- [1, 3, 1, 3, 3, 5, 103, 97, 125, 389, 1713, 0, 0],
- [1, 1, 1, 15, 17, 21, 41, 83, 251, 711, 335, 0, 0],
- [1, 3, 7, 11, 11, 43, 11, 65, 199, 785, 1751, 0, 0],
- [1, 1, 1, 13, 11, 25, 27, 81, 73, 657, 1141, 0, 0],
- [1, 1, 5, 5, 9, 57, 81, 239, 71, 319, 839, 0, 0],
- [1, 3, 5, 13, 21, 49, 37, 167, 7, 509, 523, 0, 0],
- [1, 1, 5, 1, 19, 37, 33, 69, 409, 99, 1861, 0, 0],
- [1, 3, 1, 7, 7, 27, 125, 71, 417, 1007, 1105, 0, 0],
- [1, 1, 5, 1, 17, 11, 71, 109, 149, 775, 389, 0, 0],
- [1, 1, 1, 15, 31, 61, 41, 97, 193, 359, 1177, 0, 0],
- [1, 1, 7, 7, 25, 37, 41, 137, 53, 697, 1877, 0, 0],
- [1, 3, 5, 5, 1, 49, 59, 71, 437, 677, 805, 0, 0],
- [1, 3, 5, 1, 27, 5, 41, 193, 29, 85, 93, 0, 0],
- [1, 3, 7, 1, 5, 63, 87, 189, 467, 497, 1591, 0, 0],
- [1, 1, 1, 15, 15, 63, 123, 115, 229, 105, 423, 0, 0],
- [1, 1, 1, 13, 27, 3, 43, 79, 31, 615, 1835, 0, 0],
- [1, 3, 7, 11, 29, 45, 101, 205, 35, 891, 99, 0, 0],
- [1, 1, 1, 11, 29, 37, 63, 37, 75, 71, 1781, 0, 0],
- [1, 3, 7, 13, 29, 63, 45, 227, 105, 449, 1515, 0, 0],
- [1, 1, 7, 5, 25, 21, 39, 53, 503, 835, 1909, 0, 0],
- [1, 1, 1, 11, 27, 21, 21, 33, 75, 609, 1011, 0, 0],
- [1, 1, 1, 7, 25, 19, 97, 91, 317, 377, 303, 0, 0],
- [1, 1, 3, 9, 3, 27, 15, 229, 401, 693, 385, 0, 0],
- [1, 1, 3, 7, 21, 59, 97, 245, 367, 665, 1635, 0, 0],
- [1, 1, 3, 1, 17, 21, 111, 105, 131, 627, 357, 0, 0],
- [1, 3, 7, 5, 25, 45, 21, 77, 365, 215, 973, 0, 0],
- [1, 1, 7, 3, 13, 23, 49, 229, 441, 911, 1781, 0, 0],
- [1, 1, 5, 9, 15, 13, 13, 161, 433, 503, 1707, 0, 0],
- [1, 3, 3, 5, 17, 15, 17, 103, 93, 729, 1363, 0, 0],
- [1, 1, 7, 5, 13, 3, 79, 93, 377, 131, 1053, 0, 0],
- [1, 3, 3, 11, 23, 43, 91, 13, 405, 19, 649, 0, 0],
- [1, 3, 1, 5, 9, 63, 65, 161, 465, 895, 1469, 0, 0],
- [1, 1, 3, 1, 3, 39, 105, 229, 259, 199, 623, 0, 0],
- [1, 1, 7, 7, 11, 19, 75, 223, 283, 161, 1429, 0, 0],
- [1, 1, 5, 1, 7, 63, 1, 69, 443, 239, 1241, 0, 0],
- [1, 1, 3, 11, 9, 31, 45, 15, 143, 633, 1151, 0, 0],
- [1, 3, 3, 7, 9, 41, 67, 25, 445, 1013, 1055, 0, 0],
- [1, 1, 5, 9, 7, 41, 83, 23, 3, 537, 503, 0, 0],
- [1, 3, 7, 13, 17, 15, 107, 233, 461, 255, 921, 0, 0],
- [1, 1, 1, 15, 7, 43, 125, 93, 329, 23, 3, 0, 0],
- [1, 3, 1, 13, 1, 63, 87, 25, 309, 149, 349, 0, 0],
- [1, 1, 5, 3, 27, 53, 15, 217, 77, 679, 1149, 0, 0],
- [1, 1, 5, 1, 1, 1, 81, 247, 323, 1021, 293, 0, 0],
- [1, 1, 7, 11, 9, 63, 95, 61, 155, 595, 45, 0, 0],
- [1, 1, 7, 13, 5, 31, 105, 75, 347, 199, 303, 0, 0],
- [1, 3, 1, 15, 31, 7, 65, 27, 45, 557, 877, 0, 0],
- [1, 3, 1, 1, 21, 17, 45, 9, 381, 659, 1565, 0, 0],
- [1, 1, 1, 1, 25, 11, 59, 223, 315, 251, 1583, 3915, 0],
- [1, 1, 1, 11, 25, 61, 103, 213, 463, 829, 1001, 97, 0],
- [1, 1, 5, 9, 21, 31, 23, 55, 207, 727, 663, 3047, 0],
- [1, 1, 5, 13, 11, 51, 103, 197, 321, 439, 1535, 937, 0],
- [1, 1, 5, 3, 1, 37, 99, 145, 157, 495, 395, 2897, 0],
- [1, 3, 7, 13, 23, 29, 67, 89, 109, 647, 1141, 953, 0],
- [1, 3, 5, 11, 19, 59, 99, 199, 479, 223, 1481, 127, 0],
- [1, 3, 7, 15, 27, 25, 47, 41, 313, 949, 1797, 1201, 0],
- [1, 1, 1, 13, 15, 63, 117, 201, 345, 625, 643, 3819, 0],
- [1, 1, 1, 9, 3, 59, 71, 5, 167, 87, 1507, 193, 0],
- [1, 3, 3, 9, 5, 47, 89, 149, 439, 481, 465, 2053, 0],
- [1, 3, 5, 9, 23, 15, 35, 35, 307, 85, 2027, 3061, 0],
- [1, 3, 1, 5, 9, 27, 53, 119, 235, 799, 1695, 3759, 0],
- [1, 3, 3, 5, 25, 19, 73, 183, 473, 917, 367, 1553, 0],
- [1, 3, 3, 5, 7, 29, 9, 53, 79, 769, 937, 2007, 0],
- [1, 1, 7, 5, 29, 45, 115, 11, 101, 949, 719, 2493, 0],
- [1, 3, 3, 1, 11, 35, 49, 13, 245, 739, 545, 603, 0],
- [1, 3, 7, 15, 9, 55, 37, 3, 19, 115, 1991, 3343, 0],
- [1, 1, 5, 5, 13, 39, 1, 179, 381, 499, 83, 3751, 0],
- [1, 3, 3, 9, 5, 19, 35, 229, 251, 945, 819, 1059, 0],
- [1, 3, 5, 11, 11, 43, 9, 43, 35, 547, 239, 783, 0],
- [1, 3, 3, 7, 1, 21, 45, 55, 25, 225, 1791, 1789, 0],
- [1, 3, 1, 15, 3, 19, 81, 187, 107, 1015, 1461, 1589, 0],
- [1, 1, 7, 5, 31, 13, 19, 233, 187, 469, 1647, 283, 0],
- [1, 1, 1, 3, 27, 17, 127, 47, 115, 737, 1501, 1093, 0],
- [1, 1, 7, 13, 3, 51, 17, 133, 113, 495, 1161, 3919, 0],
- [1, 1, 7, 5, 17, 37, 17, 91, 321, 353, 1629, 2747, 0],
- [1, 1, 1, 3, 27, 5, 105, 47, 115, 103, 139, 277, 0],
- [1, 1, 1, 11, 11, 33, 89, 71, 445, 17, 1595, 2605, 0],
- [1, 3, 7, 5, 13, 35, 49, 93, 61, 665, 1921, 2169, 0],
- [1, 1, 7, 1, 15, 49, 101, 105, 77, 639, 1267, 2905, 0],
- [1, 1, 7, 11, 29, 25, 7, 145, 293, 525, 1415, 721, 0],
- [1, 3, 5, 13, 15, 45, 37, 45, 405, 75, 509, 4069, 0],
- [1, 1, 5, 9, 1, 1, 33, 255, 13, 447, 347, 233, 0],
- [1, 1, 1, 11, 15, 63, 11, 221, 53, 185, 777, 261, 0],
- [1, 1, 1, 3, 23, 47, 95, 115, 17, 43, 1083, 1137, 0],
- [1, 3, 7, 7, 25, 9, 95, 175, 171, 729, 363, 3993, 0],
- [1, 1, 5, 13, 13, 63, 17, 19, 299, 577, 269, 3619, 0],
- [1, 1, 5, 15, 21, 15, 111, 129, 41, 863, 1015, 2881, 0],
- [1, 1, 7, 1, 15, 25, 105, 5, 79, 735, 1809, 1275, 0],
- [1, 3, 5, 7, 3, 25, 41, 209, 3, 317, 1105, 3865, 0],
- [1, 3, 1, 11, 29, 15, 115, 197, 485, 99, 1429, 1299, 0],
- [1, 3, 1, 1, 29, 41, 5, 57, 331, 17, 1471, 3757, 0],
- [1, 1, 5, 13, 5, 13, 69, 177, 13, 477, 2019, 1193, 0],
- [1, 3, 5, 1, 25, 3, 101, 115, 257, 893, 381, 733, 0],
- [1, 1, 5, 15, 17, 19, 27, 187, 59, 537, 2025, 993, 0],
- [1, 1, 5, 1, 11, 51, 27, 119, 201, 519, 1223, 1153, 0],
- [1, 3, 5, 9, 7, 49, 101, 77, 497, 1017, 827, 2945, 0],
- [1, 3, 5, 7, 15, 37, 103, 211, 81, 375, 1733, 3163, 0],
- [1, 3, 1, 3, 5, 25, 53, 111, 451, 297, 887, 3179, 0],
- [1, 1, 3, 9, 21, 49, 9, 33, 199, 325, 1321, 437, 0],
- [1, 3, 1, 11, 7, 13, 21, 113, 171, 999, 803, 271, 0],
- [1, 3, 5, 1, 31, 53, 43, 23, 81, 353, 1951, 3493, 0],
- [1, 1, 7, 9, 13, 47, 79, 87, 253, 343, 1297, 3971, 0],
- [1, 3, 3, 13, 11, 23, 91, 137, 365, 729, 1995, 1005, 0],
- [1, 1, 3, 13, 23, 35, 65, 41, 75, 135, 833, 2615, 0],
- [1, 3, 5, 3, 5, 29, 117, 7, 451, 489, 1107, 2253, 0],
- [1, 3, 7, 11, 7, 33, 87, 83, 149, 859, 1135, 1131, 0],
- [1, 1, 3, 7, 23, 21, 125, 43, 483, 267, 1181, 585, 0],
- [1, 3, 7, 9, 27, 35, 55, 121, 81, 141, 1251, 2775, 0],
- [1, 3, 1, 1, 21, 23, 45, 145, 453, 831, 983, 2171, 0],
- [1, 3, 7, 7, 29, 3, 63, 5, 469, 141, 1389, 2383, 0],
- [1, 1, 7, 15, 15, 43, 85, 219, 485, 893, 1565, 2937, 0],
- [1, 1, 1, 9, 7, 31, 83, 27, 305, 249, 273, 2447, 0],
- [1, 3, 3, 1, 27, 63, 97, 11, 163, 807, 137, 1745, 0],
- [1, 3, 5, 5, 27, 9, 45, 111, 401, 53, 71, 663, 0],
- [1, 1, 1, 13, 19, 1, 83, 207, 15, 613, 735, 1515, 0],
- [1, 3, 5, 5, 7, 61, 87, 55, 91, 131, 1005, 3767, 0],
- [1, 1, 5, 11, 15, 43, 113, 97, 3, 547, 933, 2709, 0],
- [1, 3, 3, 3, 27, 3, 93, 63, 129, 977, 67, 1767, 0],
- [1, 1, 7, 9, 27, 11, 95, 229, 35, 131, 1471, 3185, 0],
- [1, 1, 3, 15, 19, 55, 5, 53, 239, 999, 551, 3017, 0],
- [1, 1, 7, 11, 19, 11, 17, 33, 355, 175, 457, 2815, 0],
- [1, 3, 7, 13, 9, 35, 77, 149, 211, 31, 1667, 1829, 0],
- [1, 3, 5, 5, 15, 1, 77, 23, 387, 341, 1729, 87, 0],
- [1, 3, 7, 1, 1, 63, 127, 187, 101, 739, 919, 3341, 0],
- [1, 3, 5, 7, 3, 35, 123, 153, 299, 467, 285, 793, 0],
- [1, 1, 7, 7, 29, 49, 45, 91, 67, 675, 1629, 2627, 0],
- [1, 3, 1, 5, 29, 19, 81, 193, 375, 241, 1815, 2169, 0],
- [1, 1, 1, 13, 5, 45, 85, 183, 405, 645, 653, 1875, 0],
- [1, 1, 5, 7, 27, 9, 121, 59, 357, 247, 1919, 3745, 0],
- [1, 3, 3, 7, 31, 57, 119, 211, 267, 391, 1039, 367, 0],
- [1, 1, 5, 9, 9, 51, 27, 93, 363, 583, 531, 3783, 0],
- [1, 3, 1, 5, 1, 1, 85, 139, 79, 183, 393, 783, 0],
- [1, 1, 5, 11, 7, 47, 41, 59, 83, 973, 1411, 827, 0],
- [1, 1, 3, 11, 3, 41, 49, 179, 437, 433, 359, 3253, 0],
- [1, 1, 7, 1, 19, 9, 15, 163, 457, 367, 221, 2639, 0],
- [1, 3, 1, 1, 19, 11, 107, 209, 39, 131, 699, 2955, 0],
- [1, 1, 5, 15, 29, 37, 21, 77, 97, 467, 1485, 3539, 0],
- [1, 3, 7, 3, 9, 19, 51, 39, 473, 571, 471, 1579, 0],
- [1, 1, 7, 13, 3, 55, 119, 111, 289, 309, 1357, 2109, 0],
- [1, 3, 3, 9, 21, 23, 11, 79, 179, 385, 1715, 379, 0],
- [1, 1, 5, 13, 31, 55, 87, 229, 57, 977, 595, 2939, 0],
- [1, 3, 1, 9, 29, 55, 101, 85, 23, 111, 1677, 3019, 0],
- [1, 3, 3, 9, 25, 13, 115, 237, 49, 917, 153, 1999, 0],
- [1, 3, 5, 11, 1, 7, 63, 199, 79, 935, 1903, 2253, 0],
- [1, 3, 1, 5, 3, 47, 63, 137, 71, 473, 1281, 2911, 0],
- [1, 3, 5, 5, 9, 37, 37, 147, 341, 345, 215, 3733, 0],
- [1, 3, 3, 13, 27, 11, 121, 25, 287, 411, 781, 481, 0],
- [1, 3, 3, 15, 5, 43, 109, 73, 95, 313, 543, 1767, 0],
- [1, 3, 3, 3, 27, 17, 7, 121, 229, 97, 293, 1055, 0],
- [1, 1, 7, 9, 25, 3, 43, 129, 271, 149, 1807, 4019, 0],
- [1, 3, 3, 15, 21, 25, 69, 83, 475, 959, 965, 4085, 0],
- [1, 3, 5, 3, 11, 19, 19, 87, 49, 841, 1695, 105, 0],
- [1, 3, 1, 11, 29, 55, 77, 93, 241, 839, 443, 1829, 0],
- [1, 3, 3, 11, 31, 59, 49, 205, 261, 669, 1985, 2097, 0],
- [1, 3, 7, 15, 27, 37, 71, 167, 495, 431, 321, 2379, 0],
- [1, 1, 7, 15, 21, 33, 59, 53, 353, 51, 879, 1567, 0],
- [1, 3, 3, 3, 29, 43, 35, 107, 381, 41, 1227, 2713, 0],
- [1, 1, 7, 11, 17, 1, 7, 229, 13, 301, 1915, 737, 0],
- [1, 3, 5, 15, 9, 5, 13, 213, 291, 247, 839, 3423, 0],
- [1, 3, 3, 15, 17, 21, 55, 95, 37, 1015, 1945, 3941, 0],
- [1, 3, 3, 3, 13, 5, 101, 219, 251, 377, 1993, 2659, 0],
- [1, 1, 1, 1, 11, 63, 127, 109, 105, 329, 1165, 3961, 0],
- [1, 3, 7, 3, 25, 49, 103, 175, 399, 945, 51, 1755, 0],
- [1, 1, 5, 1, 15, 61, 85, 13, 81, 269, 557, 3613, 0],
- [1, 3, 1, 3, 21, 21, 109, 209, 89, 67, 723, 1937, 0],
- [1, 1, 1, 3, 11, 51, 29, 97, 265, 979, 1491, 1559, 0],
- [1, 3, 3, 1, 19, 15, 61, 61, 507, 581, 817, 2287, 0],
- [1, 3, 7, 3, 31, 19, 67, 147, 205, 643, 1237, 2743, 0],
- [1, 1, 1, 13, 3, 43, 21, 19, 145, 823, 947, 67, 0],
- [1, 3, 7, 1, 19, 47, 111, 13, 331, 557, 1215, 2859, 0],
- [1, 3, 1, 11, 5, 17, 67, 123, 129, 91, 1911, 325, 0],
- [1, 3, 7, 5, 3, 9, 23, 73, 119, 405, 1225, 2601, 0],
- [1, 3, 3, 15, 3, 53, 57, 35, 503, 117, 1965, 1149, 0],
- [1, 3, 7, 7, 9, 45, 75, 141, 249, 801, 1889, 3259, 0],
- [1, 3, 3, 15, 13, 11, 71, 81, 1, 509, 1503, 2403, 0],
- [1, 3, 5, 9, 13, 51, 101, 19, 289, 347, 1177, 3947, 0],
- [1, 3, 7, 1, 3, 25, 123, 171, 463, 893, 73, 2011, 0],
- [1, 3, 3, 7, 29, 11, 41, 255, 163, 303, 1767, 175, 0],
- [1, 1, 5, 1, 7, 25, 107, 111, 443, 227, 303, 3389, 0],
- [1, 1, 3, 9, 5, 47, 101, 107, 63, 783, 177, 3915, 0],
- [1, 1, 1, 11, 9, 47, 107, 233, 123, 555, 1897, 1315, 0],
- [1, 1, 1, 15, 23, 1, 125, 113, 361, 867, 1401, 2447, 0],
- [1, 1, 1, 1, 13, 43, 27, 133, 261, 99, 321, 141, 0],
- [1, 1, 5, 13, 21, 29, 47, 89, 49, 703, 921, 359, 0],
- [1, 3, 7, 9, 23, 17, 119, 9, 429, 111, 217, 3609, 0],
- [1, 3, 7, 13, 21, 31, 41, 231, 137, 797, 1779, 3933, 0],
- [1, 1, 3, 11, 31, 15, 19, 95, 355, 873, 327, 729, 0],
- [1, 1, 3, 7, 11, 59, 127, 69, 175, 541, 1889, 2051, 0],
- [1, 3, 1, 3, 7, 27, 33, 33, 507, 919, 333, 1755, 0],
- [1, 3, 1, 7, 7, 63, 31, 1, 59, 513, 615, 2149, 0],
- [1, 1, 1, 3, 3, 11, 109, 253, 277, 343, 1665, 2107, 0],
- [1, 1, 5, 13, 23, 41, 7, 219, 391, 319, 1825, 1741, 0],
- [1, 1, 5, 7, 1, 51, 91, 253, 25, 517, 1639, 1051, 2319],
- [1, 3, 7, 9, 23, 29, 91, 247, 185, 135, 237, 3681, 653],
- [1, 3, 3, 7, 5, 7, 39, 129, 381, 871, 1205, 471, 1379],
- [1, 1, 1, 7, 9, 27, 125, 11, 197, 917, 361, 1055, 1675],
- [1, 1, 1, 3, 17, 63, 105, 251, 39, 285, 129, 845, 1951],
- [1, 3, 3, 3, 21, 31, 47, 221, 5, 663, 1655, 257, 7075],
- [1, 3, 3, 9, 1, 43, 125, 153, 429, 301, 983, 1559, 2087],
- [1, 3, 7, 9, 17, 3, 123, 35, 119, 15, 1089, 1061, 7147],
- [1, 3, 3, 7, 29, 29, 91, 103, 247, 763, 1171, 2803, 1427],
- [1, 1, 3, 5, 7, 39, 9, 239, 177, 89, 401, 2219, 893],
- [1, 1, 5, 11, 5, 3, 103, 7, 329, 323, 677, 1315, 171],
- [1, 3, 1, 13, 17, 59, 45, 27, 465, 757, 643, 1369, 2019],
- [1, 1, 3, 13, 13, 59, 23, 235, 421, 317, 749, 3211, 7235],
- [1, 3, 7, 7, 25, 1, 117, 181, 271, 807, 303, 4027, 5697],
- [1, 3, 3, 7, 17, 53, 9, 5, 467, 309, 1407, 105, 3615],
- [1, 1, 3, 15, 9, 63, 125, 207, 151, 1013, 1873, 11, 1961],
- [1, 3, 7, 9, 19, 23, 73, 53, 45, 345, 1579, 1077, 7517],
- [1, 3, 3, 5, 9, 63, 11, 149, 429, 499, 1491, 2857, 6849],
- [1, 1, 5, 5, 5, 47, 37, 155, 137, 279, 1393, 337, 2893],
- [1, 1, 7, 3, 7, 51, 61, 225, 471, 711, 1247, 3553, 1883],
- [1, 1, 5, 3, 21, 23, 79, 165, 11, 915, 789, 3503, 2863],
- [1, 3, 7, 13, 19, 61, 21, 137, 17, 411, 763, 3917, 2173],
- [1, 3, 7, 3, 13, 39, 5, 155, 409, 281, 49, 2665, 4543],
- [1, 3, 3, 9, 9, 47, 47, 201, 347, 193, 5, 3823, 73],
- [1, 1, 3, 3, 7, 21, 117, 97, 199, 739, 1607, 3403, 381],
- [1, 1, 5, 1, 3, 39, 67, 245, 463, 365, 1891, 3711, 3893],
- [1, 3, 1, 11, 9, 15, 53, 203, 177, 315, 735, 2085, 6045],
- [1, 3, 3, 1, 3, 3, 85, 47, 11, 375, 1557, 1103, 1643],
- [1, 3, 5, 3, 15, 9, 33, 39, 51, 809, 1909, 1641, 7669],
- [1, 3, 3, 11, 31, 57, 81, 35, 361, 469, 1765, 701, 1027],
- [1, 3, 1, 15, 29, 61, 121, 105, 95, 487, 1777, 4095, 1549],
- [1, 1, 3, 11, 29, 39, 47, 239, 497, 621, 1127, 2883, 3983],
- [1, 1, 5, 11, 25, 37, 61, 49, 163, 857, 813, 1435, 1985],
- [1, 1, 1, 11, 13, 21, 51, 15, 351, 975, 695, 653, 6589],
- [1, 3, 1, 9, 9, 51, 127, 253, 127, 537, 97, 2363, 7497],
- [1, 1, 3, 13, 21, 1, 29, 7, 395, 939, 731, 1597, 2745],
- [1, 3, 7, 7, 9, 23, 65, 237, 511, 585, 1503, 767, 2375],
- [1, 3, 7, 9, 31, 43, 45, 213, 327, 129, 1751, 869, 7047],
- [1, 1, 1, 15, 7, 27, 41, 55, 353, 625, 333, 1825, 1117],
- [1, 3, 5, 9, 15, 25, 95, 87, 49, 447, 769, 1117, 1171],
- [1, 3, 1, 11, 5, 11, 57, 199, 105, 129, 865, 1297, 1975],
- [1, 3, 3, 1, 31, 13, 73, 27, 151, 1017, 693, 501, 5199],
- [1, 3, 7, 3, 7, 21, 33, 175, 321, 133, 377, 505, 3915],
- [1, 1, 3, 3, 15, 43, 117, 49, 331, 83, 1919, 149, 3695],
- [1, 1, 7, 9, 27, 7, 61, 41, 329, 3, 957, 873, 8113],
- [1, 3, 3, 7, 25, 11, 111, 229, 509, 415, 1359, 2673, 4303],
- [1, 1, 5, 15, 19, 33, 59, 85, 107, 661, 1627, 551, 3773],
- [1, 1, 1, 13, 9, 55, 123, 3, 109, 53, 1039, 1499, 7705],
- [1, 3, 7, 13, 9, 1, 65, 149, 303, 115, 1783, 2793, 6855],
- [1, 1, 1, 7, 25, 37, 47, 179, 467, 903, 1065, 3277, 1675],
- [1, 3, 1, 15, 25, 35, 105, 129, 287, 49, 1665, 2143, 2245],
- [1, 1, 3, 9, 23, 27, 23, 185, 161, 79, 1917, 3663, 2817],
- [1, 3, 5, 13, 1, 61, 29, 249, 45, 55, 1947, 533, 1719],
- [1, 1, 3, 9, 9, 39, 107, 197, 385, 385, 991, 3991, 569],
- [1, 3, 7, 15, 7, 5, 37, 15, 289, 261, 1997, 575, 1021],
- [1, 3, 1, 13, 11, 19, 81, 97, 363, 345, 841, 1877, 2077],
- [1, 1, 5, 15, 15, 61, 67, 197, 331, 297, 459, 1009, 5945],
- [1, 1, 5, 9, 19, 61, 29, 139, 265, 199, 221, 3929, 1833],
- [1, 3, 1, 13, 15, 57, 115, 203, 407, 385, 327, 473, 2631],
- [1, 3, 1, 1, 27, 59, 119, 63, 37, 617, 1595, 3009, 4851],
- [1, 1, 3, 11, 17, 21, 75, 33, 433, 25, 1881, 2595, 6371],
- [1, 3, 1, 7, 11, 59, 73, 251, 315, 515, 1269, 3249, 833],
- [1, 3, 3, 11, 11, 61, 99, 217, 343, 275, 1007, 675, 7987],
- [1, 1, 3, 3, 31, 57, 103, 199, 63, 849, 129, 3593, 331],
- [1, 3, 7, 13, 13, 25, 7, 199, 51, 401, 1413, 2453, 1899],
- [1, 3, 1, 5, 25, 55, 57, 99, 185, 471, 475, 1567, 8093],
- [1, 1, 7, 1, 25, 27, 45, 249, 71, 377, 1105, 973, 6719],
- [1, 1, 3, 7, 9, 31, 61, 33, 27, 661, 791, 595, 6903],
- [1, 3, 1, 15, 7, 41, 95, 229, 267, 535, 1983, 1335, 5903],
- [1, 1, 7, 3, 13, 33, 49, 177, 503, 505, 1359, 1715, 5657],
- [1, 3, 3, 13, 29, 63, 101, 13, 239, 939, 503, 589, 5007],
- [1, 3, 1, 7, 19, 19, 101, 209, 293, 465, 691, 85, 2689],
- [1, 1, 7, 13, 5, 57, 35, 147, 245, 225, 659, 2265, 6637],
- [1, 1, 3, 13, 19, 35, 47, 97, 281, 929, 691, 3069, 2675],
- [1, 3, 5, 11, 31, 13, 119, 31, 297, 219, 343, 461, 1645],
- [1, 1, 3, 3, 25, 63, 39, 125, 75, 955, 1375, 1659, 1819],
- [1, 3, 5, 5, 13, 35, 67, 177, 461, 659, 1919, 2627, 689],
- [1, 1, 7, 3, 25, 17, 31, 137, 371, 441, 263, 1307, 6709],
- [1, 3, 3, 13, 15, 11, 103, 187, 129, 117, 1373, 1731, 7717],
- [1, 1, 3, 11, 5, 11, 7, 11, 189, 527, 603, 1501, 6295],
- [1, 1, 3, 9, 9, 49, 61, 91, 189, 427, 1383, 1699, 7013],
- [1, 3, 5, 9, 29, 41, 127, 223, 339, 515, 297, 3545, 7695],
- [1, 3, 1, 3, 31, 55, 87, 29, 287, 287, 781, 3803, 3705],
- [1, 1, 7, 11, 9, 5, 3, 169, 111, 191, 145, 2157, 7069],
- [1, 1, 7, 11, 29, 45, 35, 231, 111, 33, 285, 453, 2621],
- [1, 1, 1, 7, 27, 17, 29, 59, 379, 389, 767, 2813, 3631],
- [1, 3, 3, 9, 25, 35, 73, 31, 93, 197, 1739, 2047, 6571],
- [1, 3, 1, 13, 27, 5, 95, 163, 27, 825, 1715, 2999, 6259],
- [1, 1, 3, 11, 11, 31, 103, 41, 185, 63, 715, 3841, 7261],
- [1, 3, 7, 7, 17, 31, 71, 57, 347, 417, 317, 2361, 3397],
- [1, 1, 7, 15, 5, 37, 75, 87, 337, 949, 1333, 1079, 7645],
- [1, 1, 1, 13, 17, 17, 51, 247, 247, 35, 85, 573, 1115],
- [1, 3, 3, 7, 3, 45, 87, 25, 507, 571, 831, 69, 4753],
- [1, 3, 7, 5, 23, 51, 57, 127, 161, 9, 1615, 1363, 2047],
- [1, 1, 3, 3, 15, 1, 97, 101, 231, 131, 81, 1597, 7579],
- [1, 1, 1, 1, 9, 39, 11, 207, 43, 609, 1667, 3427, 2271],
- [1, 3, 5, 5, 9, 49, 105, 187, 499, 439, 1467, 2899, 5403],
- [1, 1, 3, 15, 17, 55, 87, 73, 73, 95, 1457, 2771, 4911],
- [1, 3, 1, 15, 17, 19, 41, 61, 327, 19, 1453, 1327, 7629],
- [1, 1, 1, 3, 31, 41, 73, 105, 263, 569, 1825, 1117, 4225],
- [1, 1, 1, 11, 11, 13, 109, 27, 331, 893, 109, 1523, 1209],
- [1, 1, 5, 1, 19, 5, 69, 91, 249, 451, 387, 3521, 6955],
- [1, 1, 3, 7, 25, 51, 35, 171, 493, 397, 1207, 2393, 6951],
- [1, 1, 3, 3, 13, 5, 121, 243, 37, 971, 2039, 2537, 1829],
- [1, 3, 7, 15, 23, 49, 39, 33, 25, 801, 213, 1979, 5579],
- [1, 1, 1, 11, 15, 1, 111, 3, 115, 125, 1351, 3179, 5231],
- [1, 1, 5, 5, 25, 21, 1, 1, 3, 471, 1329, 683, 1783],
- [1, 1, 3, 5, 21, 13, 77, 21, 167, 187, 1173, 2453, 4285],
- [1, 1, 5, 3, 31, 17, 39, 229, 197, 257, 57, 453, 7425],
- [1, 3, 1, 5, 19, 59, 47, 93, 127, 67, 1769, 1227, 599],
- [1, 1, 3, 5, 3, 51, 53, 71, 357, 949, 951, 779, 5785],
- [1, 3, 1, 1, 11, 11, 91, 61, 497, 621, 183, 671, 3275],
- [1, 1, 3, 15, 25, 3, 3, 37, 103, 453, 23, 3483, 5643],
- [1, 1, 1, 5, 7, 61, 17, 183, 125, 411, 451, 2135, 2263],
- [1, 3, 5, 1, 15, 1, 51, 65, 191, 621, 1155, 3139, 657],
- [1, 3, 7, 5, 19, 33, 83, 211, 165, 955, 1551, 3381, 6769],
- [1, 1, 7, 3, 7, 37, 39, 53, 55, 309, 2037, 3945, 6261],
- [1, 1, 1, 7, 5, 33, 125, 11, 101, 783, 811, 57, 1251],
- [1, 3, 1, 5, 3, 61, 85, 151, 95, 893, 635, 1541, 3249],
- [1, 1, 5, 11, 13, 25, 111, 165, 79, 597, 1671, 3405, 4447],
- [1, 3, 3, 3, 13, 27, 21, 47, 351, 377, 1451, 3381, 4111],
- [1, 1, 1, 13, 1, 59, 69, 5, 341, 753, 863, 2371, 3991],
- [1, 3, 5, 9, 23, 7, 85, 129, 43, 145, 1499, 2879, 1215],
- [1, 3, 1, 13, 5, 49, 29, 79, 125, 637, 1673, 1985, 131],
- [1, 3, 1, 15, 25, 13, 55, 101, 135, 941, 363, 987, 4397],
- [1, 1, 7, 5, 11, 63, 11, 147, 173, 593, 1029, 3017, 3487],
- [1, 3, 7, 3, 25, 3, 117, 169, 289, 317, 1077, 3031, 7585],
- [1, 3, 3, 5, 15, 33, 1, 181, 373, 555, 1525, 3839, 5565],
- [1, 3, 5, 9, 13, 3, 47, 19, 133, 375, 277, 1401, 7199],
- [1, 1, 5, 5, 21, 15, 17, 95, 421, 575, 1023, 3749, 3573],
- [1, 1, 1, 3, 11, 9, 65, 77, 241, 175, 655, 2977, 7105],
- [1, 3, 7, 11, 23, 13, 63, 139, 281, 403, 665, 681, 7409],
- [1, 3, 1, 1, 29, 35, 47, 197, 213, 571, 1869, 1175, 1671],
- [1, 3, 5, 13, 5, 39, 117, 219, 177, 555, 1255, 1519, 949],
- [1, 1, 1, 9, 17, 11, 17, 97, 363, 109, 965, 3355, 3889],
- [1, 1, 1, 15, 27, 59, 115, 239, 151, 377, 277, 907, 5971],
- [1, 1, 3, 3, 9, 59, 51, 183, 227, 931, 1601, 117, 3333],
- [1, 1, 1, 5, 19, 1, 25, 143, 145, 499, 329, 771, 225],
- [1, 3, 5, 11, 15, 57, 33, 9, 363, 649, 1603, 3741, 3647],
- [1, 1, 7, 9, 5, 11, 123, 13, 239, 653, 1901, 3337, 5403],
- [1, 3, 5, 1, 29, 5, 123, 209, 431, 329, 395, 1743, 3409],
- [1, 1, 7, 3, 23, 57, 83, 23, 81, 279, 65, 1227, 7459],
- [1, 3, 7, 15, 19, 13, 51, 215, 397, 271, 1307, 3335, 6879],
- [1, 1, 1, 9, 1, 31, 113, 53, 241, 647, 2029, 2755, 5789],
- [1, 1, 5, 9, 27, 13, 95, 137, 67, 721, 21, 1909, 6567],
- [1, 3, 1, 9, 3, 11, 121, 203, 291, 665, 1321, 3603, 5581],
- [1, 3, 1, 11, 23, 55, 51, 19, 255, 429, 543, 2397, 4919],
- [1, 1, 3, 7, 21, 45, 91, 151, 405, 957, 1569, 653, 1927],
- [1, 1, 5, 5, 19, 9, 109, 171, 421, 803, 1185, 87, 4407],
- [1, 1, 1, 13, 27, 55, 43, 133, 399, 767, 1905, 2025, 8085],
- [1, 3, 5, 1, 11, 55, 55, 219, 75, 425, 1701, 2617, 4691],
- [1, 3, 5, 15, 17, 19, 35, 231, 399, 477, 413, 3257, 611],
- [1, 1, 3, 3, 13, 25, 55, 3, 105, 995, 2041, 287, 3005],
- [1, 3, 1, 13, 27, 41, 87, 15, 329, 105, 1697, 3051, 591],
- [1, 1, 3, 9, 11, 23, 33, 253, 41, 495, 725, 3809, 753],
- [1, 3, 1, 13, 31, 45, 37, 225, 425, 575, 1417, 897, 589],
- [1, 1, 5, 5, 23, 29, 5, 33, 7, 687, 1847, 2215, 171],
- [1, 1, 5, 1, 5, 63, 3, 111, 283, 385, 411, 63, 5729],
- [1, 1, 3, 5, 9, 59, 45, 183, 375, 227, 211, 2043, 5891],
- [1, 1, 3, 1, 21, 27, 21, 213, 475, 923, 915, 1757, 1033],
- [1, 1, 3, 13, 31, 39, 105, 169, 427, 563, 1891, 3671, 3049],
- [1, 1, 3, 13, 29, 21, 127, 119, 277, 723, 17, 297, 6567],
- [1, 3, 1, 7, 11, 37, 35, 111, 209, 481, 1877, 3131, 5257],
- [1, 1, 1, 7, 21, 7, 17, 15, 411, 717, 1699, 1305, 8003],
- [1, 3, 3, 1, 17, 61, 35, 201, 3, 111, 687, 293, 1757],
- [1, 3, 1, 9, 15, 49, 37, 123, 137, 633, 1089, 3865, 4489],
- [1, 1, 3, 5, 7, 35, 97, 121, 195, 113, 1973, 3173, 4923],
- [1, 3, 5, 11, 15, 39, 97, 225, 289, 369, 1809, 3397, 6379],
- [1, 3, 5, 9, 7, 9, 21, 113, 509, 955, 851, 2269, 5171],
- [1, 3, 7, 11, 9, 29, 77, 113, 121, 253, 1495, 3673, 1757],
- [1, 1, 5, 13, 21, 7, 123, 225, 55, 321, 1257, 717, 689],
- [1, 3, 5, 3, 27, 25, 17, 161, 147, 409, 63, 3041, 3081],
- [1, 1, 7, 15, 25, 23, 89, 165, 275, 909, 1323, 3341, 1389],
- [1, 1, 5, 15, 29, 57, 53, 1, 251, 367, 1307, 3595, 4113],
- [1, 3, 7, 13, 11, 5, 105, 139, 19, 33, 609, 3819, 455],
- [1, 3, 1, 15, 3, 19, 75, 55, 129, 967, 881, 2871, 2761],
- [1, 1, 3, 7, 21, 15, 25, 3, 285, 453, 1543, 3973, 847],
- [1, 1, 7, 5, 13, 33, 125, 93, 415, 863, 177, 1129, 7575],
- [1, 3, 7, 7, 23, 49, 13, 217, 487, 449, 617, 513, 5829],
- [1, 3, 3, 9, 19, 37, 47, 193, 491, 539, 1505, 871, 633],
- [1, 1, 5, 7, 27, 25, 21, 97, 193, 781, 1747, 1485, 6629],
- [1, 1, 5, 9, 17, 17, 125, 29, 219, 911, 1537, 3977, 1103],
- [1, 1, 7, 9, 29, 45, 23, 69, 403, 113, 925, 2473, 7635],
- [1, 3, 5, 9, 25, 29, 55, 231, 23, 7, 183, 1171, 803],
- [1, 1, 5, 11, 17, 15, 63, 161, 97, 219, 77, 1143, 6175],
- [1, 3, 3, 9, 9, 25, 61, 93, 65, 725, 1723, 3063, 6587],
- [1, 3, 3, 3, 1, 3, 5, 69, 285, 1015, 1877, 3547, 2711],
- [1, 1, 3, 11, 19, 3, 17, 143, 75, 971, 1703, 2183, 3879],
- [1, 1, 1, 15, 23, 49, 93, 137, 21, 1021, 397, 3993, 67],
- [1, 3, 7, 13, 5, 11, 57, 9, 373, 525, 459, 133, 1179],
- [1, 1, 1, 13, 23, 39, 121, 87, 261, 785, 521, 2529, 4761],
- [1, 1, 5, 5, 1, 15, 69, 183, 339, 873, 257, 2699, 7281],
- [1, 3, 5, 9, 17, 19, 73, 113, 239, 191, 1177, 233, 1557],
- [1, 1, 5, 15, 17, 57, 93, 183, 495, 893, 389, 2355, 3379],
- [1, 3, 3, 1, 13, 39, 121, 73, 415, 297, 1947, 231, 2459],
- [1, 1, 3, 1, 27, 15, 105, 215, 333, 507, 1553, 3241, 4273],
- [1, 1, 5, 9, 23, 11, 75, 137, 107, 215, 1583, 611, 4127],
- [1, 1, 1, 5, 7, 3, 91, 89, 435, 21, 1831, 1309, 7147],
- [1, 3, 3, 13, 7, 57, 67, 251, 297, 153, 261, 3829, 35],
- [1, 3, 1, 3, 11, 31, 95, 163, 213, 645, 485, 1839, 3549],
- [1, 3, 3, 13, 13, 55, 75, 41, 149, 913, 289, 1495, 395],
- [1, 3, 3, 15, 17, 61, 9, 227, 463, 755, 1281, 301, 3735],
- [1, 1, 3, 3, 13, 19, 69, 145, 199, 371, 1543, 1169, 5787],
- [1, 1, 7, 1, 11, 5, 97, 57, 323, 881, 1591, 1613, 4179],
- [1, 3, 1, 3, 21, 41, 99, 81, 45, 113, 1123, 2673, 5889],
- [1, 3, 7, 11, 13, 35, 93, 57, 19, 903, 573, 243, 5057],
- [1, 1, 7, 13, 23, 59, 11, 11, 301, 225, 821, 3601, 7473],
- [1, 1, 3, 1, 1, 61, 53, 135, 121, 49, 1065, 3669, 4713],
- [1, 1, 7, 15, 27, 39, 19, 145, 499, 587, 1933, 2813, 2133],
- [1, 1, 1, 9, 13, 41, 73, 161, 187, 201, 1373, 2671, 2897],
- [1, 3, 1, 9, 9, 53, 5, 175, 229, 927, 2005, 2679, 1841],
- [1, 1, 5, 3, 7, 53, 33, 159, 63, 429, 905, 3463, 2125],
- [1, 1, 7, 1, 1, 63, 79, 25, 425, 599, 207, 2477, 1029],
- [1, 3, 1, 9, 27, 31, 107, 55, 99, 513, 173, 1795, 1695],
- [1, 3, 7, 1, 29, 9, 65, 167, 281, 97, 1573, 617, 6523],
- [1, 3, 1, 9, 5, 59, 69, 157, 35, 319, 1597, 2317, 1143],
- [1, 1, 7, 1, 13, 13, 79, 211, 125, 331, 573, 1855, 5105],
- [1, 1, 7, 13, 25, 35, 125, 97, 349, 833, 1883, 1057, 7133],
- [1, 3, 1, 11, 21, 55, 25, 247, 87, 325, 1795, 1703, 3351],
- [1, 3, 3, 15, 3, 41, 93, 249, 101, 887, 1499, 1761, 2775],
- [1, 1, 7, 7, 31, 49, 55, 23, 59, 139, 1743, 2515, 3971],
- [1, 3, 5, 11, 15, 5, 61, 129, 195, 927, 553, 801, 4503],
- [1, 3, 1, 15, 13, 41, 17, 159, 511, 399, 335, 1205, 7589],
- [1, 1, 3, 13, 3, 25, 117, 71, 355, 163, 333, 1311, 5155],
- [1, 1, 5, 15, 19, 27, 69, 197, 73, 307, 1645, 473, 4305],
- [1, 3, 5, 1, 13, 43, 97, 127, 263, 803, 791, 3963, 1641],
- [1, 3, 5, 9, 1, 5, 87, 141, 243, 169, 871, 697, 4717],
- [1, 3, 1, 9, 27, 5, 111, 219, 101, 1019, 1157, 1221, 2427],
- [1, 3, 1, 7, 15, 43, 37, 5, 165, 869, 969, 251, 5617],
- [1, 3, 7, 3, 17, 5, 93, 233, 141, 537, 557, 381, 1267],
- [1, 3, 1, 5, 1, 5, 59, 131, 11, 907, 141, 3887, 399],
- [1, 3, 7, 11, 3, 17, 79, 217, 389, 479, 223, 1761, 5831],
- [1, 1, 1, 7, 13, 5, 95, 101, 219, 335, 1129, 3093, 4305],
- [1, 3, 7, 3, 13, 15, 53, 131, 187, 697, 1685, 3721, 4241],
- [1, 3, 7, 9, 13, 27, 115, 33, 449, 479, 423, 2079, 3395],
- [1, 1, 3, 5, 31, 29, 53, 157, 447, 353, 1069, 4085, 3045],
- [1, 3, 1, 15, 29, 17, 85, 173, 393, 769, 391, 379, 4899],
- [1, 1, 1, 7, 27, 9, 85, 69, 477, 787, 99, 3601, 1713],
- [1, 3, 5, 5, 7, 3, 65, 207, 305, 1023, 95, 3845, 171],
- [1, 1, 1, 3, 7, 55, 59, 239, 221, 855, 1847, 433, 411],
- [1, 1, 5, 13, 21, 31, 23, 81, 51, 493, 531, 1781, 7099],
- [1, 3, 1, 7, 29, 1, 75, 205, 355, 883, 1859, 29, 5473],
- [1, 3, 5, 1, 15, 45, 21, 11, 209, 521, 1833, 1897, 5209],
- [1, 1, 3, 1, 17, 45, 67, 41, 499, 735, 1833, 1599, 1195],
- [1, 1, 5, 9, 17, 13, 27, 169, 479, 297, 341, 2163, 1077],
- [1, 1, 5, 15, 21, 57, 99, 65, 265, 1011, 237, 75, 1309],
- [1, 3, 5, 15, 19, 17, 79, 193, 377, 991, 1997, 3475, 2953],
- [1, 1, 5, 15, 17, 3, 27, 77, 145, 879, 1799, 3957, 7343],
- [1, 3, 5, 11, 3, 61, 3, 201, 411, 855, 409, 1641, 4887],
- [1, 3, 3, 3, 15, 15, 95, 173, 173, 591, 431, 3911, 3229],
- [1, 1, 3, 5, 5, 49, 27, 1, 11, 415, 1917, 2959, 6759],
- [1, 3, 7, 15, 27, 15, 69, 221, 433, 917, 363, 2833, 6721],
- [1, 3, 3, 13, 27, 47, 19, 157, 483, 375, 335, 1279, 6775],
- [1, 1, 3, 7, 3, 9, 75, 1, 135, 453, 1039, 1099, 675],
- [1, 3, 5, 15, 31, 37, 47, 15, 385, 553, 1085, 403, 4039],
- [1, 1, 5, 15, 31, 45, 59, 113, 341, 189, 1657, 799, 2493],
- [1, 1, 3, 11, 7, 9, 41, 147, 89, 841, 1975, 2183, 7511],
- [1, 3, 7, 11, 21, 51, 85, 137, 209, 339, 1527, 2699, 3269],
- [1, 3, 1, 9, 3, 61, 77, 205, 391, 211, 1111, 1711, 4199],
- [1, 3, 5, 5, 13, 21, 99, 225, 33, 601, 659, 2037, 6625],
- [1, 1, 7, 15, 11, 33, 55, 73, 395, 57, 389, 727, 7943],
- [1, 1, 5, 9, 17, 11, 49, 45, 319, 765, 899, 289, 2013],
- [1, 1, 1, 7, 27, 21, 93, 49, 451, 745, 595, 1785, 4145],
- [1, 3, 5, 3, 25, 63, 93, 149, 119, 621, 1439, 1575, 667],
- [1, 1, 5, 13, 1, 63, 119, 113, 341, 209, 1861, 3633, 513],
- [1, 1, 3, 1, 9, 47, 51, 253, 227, 875, 1979, 2367, 2303],
- [1, 1, 5, 1, 7, 57, 125, 99, 375, 639, 1569, 1261, 4591],
- [1, 3, 5, 5, 29, 61, 63, 17, 61, 7, 1087, 3953, 7941],
- [1, 3, 7, 1, 27, 49, 13, 119, 331, 595, 1009, 1735, 2741],
- [1, 3, 5, 3, 21, 9, 15, 105, 493, 971, 165, 171, 987],
- [1, 1, 3, 1, 23, 59, 45, 117, 411, 263, 1895, 1959, 8061],
- [1, 3, 5, 7, 13, 19, 61, 129, 293, 1009, 1481, 2867, 3161],
- [1, 3, 5, 1, 25, 29, 19, 243, 47, 201, 1583, 859, 5951],
- [1, 1, 5, 1, 29, 21, 105, 75, 203, 23, 29, 2951, 1431],
- [1, 3, 1, 5, 15, 23, 115, 203, 375, 77, 1193, 3211, 831],
- [1, 1, 5, 1, 17, 55, 17, 53, 167, 621, 1673, 15, 5559],
- [1, 1, 5, 11, 29, 23, 83, 29, 395, 33, 1075, 1279, 7405],
- [1, 3, 5, 11, 9, 43, 7, 247, 155, 535, 301, 1323, 1357],
- [1, 3, 5, 9, 15, 41, 7, 35, 5, 963, 1081, 599, 4319],
- [1, 3, 1, 9, 3, 57, 11, 247, 237, 661, 1377, 1651, 4235],
- [1, 1, 3, 5, 21, 9, 61, 171, 361, 523, 1747, 3951, 5421],
- [1, 3, 5, 13, 15, 39, 37, 31, 489, 263, 1497, 1011, 2559],
- [1, 3, 3, 7, 17, 27, 63, 199, 127, 917, 1103, 315, 4415],
- [1, 1, 1, 7, 17, 41, 89, 213, 21, 103, 1789, 3513, 2439],
- [1, 1, 7, 7, 31, 35, 95, 29, 345, 623, 887, 3351, 823],
- [1, 1, 5, 1, 9, 61, 119, 251, 101, 231, 739, 1725, 1725],
- [1, 3, 5, 1, 9, 29, 113, 7, 371, 47, 1577, 3793, 6219],
- [1, 1, 7, 9, 23, 57, 67, 251, 233, 301, 313, 2399, 4903],
- [1, 3, 1, 9, 19, 63, 123, 187, 431, 549, 1367, 287, 6699],
- [1, 3, 5, 11, 25, 21, 91, 91, 109, 337, 1299, 4017, 5451],
- [1, 3, 3, 11, 3, 31, 33, 11, 119, 675, 1801, 3571, 349],
- [1, 3, 3, 15, 1, 59, 37, 149, 277, 189, 1131, 1007, 7703],
- [1, 3, 1, 7, 11, 35, 99, 13, 125, 357, 1837, 541, 2927],
- [1, 3, 5, 5, 27, 49, 43, 205, 263, 1005, 73, 3115, 7809],
- [1, 3, 3, 5, 29, 3, 11, 37, 73, 789, 1865, 429, 6179],
- [1, 3, 7, 3, 1, 49, 33, 249, 135, 189, 1065, 1585, 1417],
- [1, 1, 1, 11, 31, 47, 65, 137, 123, 319, 843, 1285, 5987],
- [1, 3, 7, 1, 29, 49, 81, 139, 83, 721, 635, 755, 3017],
- [1, 3, 5, 3, 25, 33, 79, 9, 123, 1005, 55, 1211, 4983],
- [1, 1, 1, 7, 29, 21, 81, 7, 405, 525, 1655, 3047, 3479],
- [1, 3, 1, 13, 1, 19, 107, 113, 69, 675, 913, 915, 4525],
- [1, 1, 3, 7, 23, 21, 63, 183, 75, 539, 1037, 3611, 4643],
- [1, 1, 1, 7, 29, 35, 63, 205, 287, 191, 223, 2697, 4911],
- [1, 3, 1, 7, 25, 11, 55, 187, 401, 813, 1871, 2129, 227],
- [1, 3, 7, 3, 13, 17, 89, 39, 23, 917, 1161, 3669, 5475],
- [1, 3, 1, 15, 3, 37, 91, 3, 283, 51, 461, 81, 2287],
- [1, 1, 5, 15, 31, 23, 25, 79, 393, 167, 479, 3939, 5581],
- [1, 3, 5, 11, 25, 59, 93, 155, 41, 415, 511, 2437, 6817],
- [1, 3, 3, 9, 5, 13, 101, 227, 379, 579, 1721, 915, 1937],
- [1, 3, 7, 3, 5, 37, 27, 89, 431, 755, 1107, 779, 1421],
- [1, 3, 3, 9, 11, 35, 55, 185, 11, 605, 389, 3567, 4415],
- [1, 3, 7, 3, 3, 55, 75, 51, 475, 721, 151, 3701, 7977],
- [1, 1, 5, 15, 21, 57, 121, 127, 505, 837, 35, 2479, 1789],
- [1, 3, 3, 13, 9, 1, 79, 63, 19, 529, 375, 3807, 3907],
- [1, 3, 1, 5, 23, 29, 43, 83, 365, 31, 1099, 1893, 6815],
- [1, 3, 1, 3, 7, 45, 125, 41, 265, 327, 937, 3927, 6789],
- [1, 1, 3, 3, 11, 11, 73, 133, 271, 799, 1185, 2619, 6003],
- [1, 1, 1, 3, 23, 1, 27, 183, 499, 961, 1701, 2543, 5609],
- [1, 1, 3, 5, 11, 15, 109, 181, 489, 279, 769, 3633, 4507],
- [1, 3, 5, 9, 1, 9, 35, 127, 443, 409, 639, 2007, 337],
- [1, 3, 5, 15, 1, 33, 21, 19, 165, 847, 1633, 3857, 7427],
- [1, 1, 7, 9, 3, 19, 71, 255, 91, 649, 1609, 3837, 7943],
- [1, 3, 5, 9, 23, 53, 113, 219, 83, 241, 379, 487, 3075],
- [1, 3, 3, 1, 25, 43, 89, 59, 291, 285, 1613, 1769, 6427],
- [1, 1, 7, 5, 23, 39, 59, 251, 319, 545, 2031, 3759, 1019],
- [1, 3, 7, 9, 1, 23, 95, 3, 199, 407, 685, 3105, 7121],
- [1, 1, 7, 9, 23, 7, 41, 187, 107, 161, 289, 2727, 4763],
- [1, 3, 3, 15, 3, 13, 45, 57, 245, 591, 975, 3155, 81],
- [1, 1, 7, 5, 27, 13, 113, 217, 389, 73, 671, 2479, 3587],
- [1, 3, 3, 15, 9, 1, 119, 115, 143, 313, 1599, 1341, 2929],
- [1, 1, 7, 7, 27, 19, 113, 217, 137, 811, 1447, 1657, 1795],
- [1, 3, 1, 9, 3, 41, 39, 229, 89, 17, 871, 2767, 8067],
- [1, 3, 3, 1, 23, 55, 59, 181, 125, 663, 647, 2541, 2415],
- [1, 3, 1, 9, 25, 1, 73, 185, 281, 269, 99, 577, 1265],
- [1, 3, 7, 9, 19, 13, 15, 149, 381, 261, 139, 2105, 4025],
- [1, 3, 7, 5, 29, 15, 13, 83, 215, 37, 1427, 799, 5599],
- [1, 3, 1, 11, 29, 59, 59, 115, 131, 783, 959, 17, 4771],
- [1, 1, 7, 5, 13, 55, 67, 11, 299, 127, 89, 2871, 3025],
- [1, 1, 3, 15, 27, 15, 121, 123, 249, 917, 117, 3637, 2313],
- [1, 3, 7, 15, 5, 3, 27, 19, 375, 231, 841, 953, 6129],
- [1, 1, 3, 11, 9, 57, 7, 109, 455, 577, 891, 65, 7611],
- [1, 3, 7, 7, 29, 37, 105, 165, 43, 975, 1959, 69, 6881],
- [1, 1, 3, 7, 29, 31, 15, 103, 73, 793, 223, 2897, 5253],
- [1, 1, 7, 7, 13, 17, 59, 123, 281, 921, 1697, 3841, 4413],
- [1, 1, 3, 1, 17, 1, 59, 219, 217, 343, 1145, 3559, 7869],
- [1, 1, 5, 1, 3, 3, 35, 129, 297, 751, 499, 4067, 105],
- [1, 1, 1, 11, 23, 21, 91, 155, 229, 139, 1435, 2335, 3173],
- [1, 3, 1, 11, 19, 29, 89, 207, 431, 221, 1809, 3409, 1629],
- [1, 1, 7, 13, 7, 25, 23, 177, 357, 79, 1413, 1087, 2537],
- [1, 1, 3, 15, 13, 55, 125, 9, 81, 817, 1445, 425, 1023],
- [1, 1, 1, 3, 3, 9, 97, 49, 357, 393, 1675, 2813, 4409],
- [1, 3, 5, 13, 19, 37, 53, 181, 171, 545, 171, 1705, 7209],
- [1, 1, 5, 5, 23, 33, 41, 231, 451, 11, 1073, 1701, 4413],
- [1, 3, 7, 1, 5, 53, 91, 33, 481, 781, 1349, 1237, 7107],
- [1, 1, 1, 7, 29, 41, 111, 233, 13, 71, 1545, 821, 7469],
- [1, 1, 5, 1, 29, 51, 29, 67, 387, 1, 2039, 1375, 33],
- [1, 3, 5, 11, 13, 19, 31, 155, 491, 699, 1027, 3673, 1955],
- [1, 3, 5, 3, 13, 57, 3, 41, 489, 767, 1563, 2693, 2881],
- [1, 3, 7, 13, 5, 13, 103, 9, 439, 917, 859, 3925, 5167],
- [1, 1, 1, 15, 19, 63, 61, 95, 385, 9, 215, 1541, 6451],
- [1, 3, 5, 3, 5, 43, 71, 123, 487, 107, 1673, 1871, 4211],
- [1, 1, 5, 5, 17, 19, 35, 65, 177, 341, 1919, 2285, 179],
- [1, 3, 1, 3, 9, 7, 7, 117, 393, 587, 1633, 847, 5573],
- [1, 1, 5, 5, 11, 13, 119, 249, 33, 903, 779, 4035, 7879],
- [1, 1, 5, 7, 11, 37, 29, 85, 71, 965, 411, 1101, 3387],
- [1, 3, 3, 3, 29, 33, 45, 169, 375, 599, 1845, 2029, 7759],
- [1, 1, 1, 9, 27, 19, 49, 129, 443, 507, 1477, 855, 5455],
- [1, 3, 3, 9, 23, 15, 111, 241, 129, 843, 1489, 2733, 7157],
- [1, 3, 1, 5, 19, 63, 41, 173, 407, 739, 447, 2503, 1891],
- [1, 1, 7, 1, 17, 51, 109, 251, 395, 579, 1545, 121, 5683],
- [1, 3, 3, 7, 25, 11, 59, 225, 127, 397, 351, 2855, 5689],
- [1, 1, 1, 11, 13, 49, 125, 147, 65, 397, 1989, 1069, 6535],
- [1, 3, 3, 9, 1, 23, 13, 165, 333, 325, 495, 3463, 3109],
- [1, 3, 5, 3, 13, 57, 27, 69, 309, 775, 183, 3505, 6555],
- [1, 1, 7, 5, 3, 47, 19, 81, 119, 565, 1639, 1539, 6873],
- [1, 3, 7, 11, 11, 51, 79, 239, 197, 925, 1385, 607, 1249],
- [1, 3, 7, 13, 1, 15, 9, 95, 435, 75, 1805, 1349, 4251],
- [1, 1, 1, 13, 17, 53, 75, 23, 497, 55, 1097, 575, 6437],
- [1, 3, 1, 13, 29, 41, 83, 83, 373, 979, 1249, 2301, 49],
- [1, 3, 7, 9, 1, 1, 81, 227, 71, 931, 1431, 2321, 2745],
- [1, 3, 3, 15, 13, 15, 33, 249, 379, 93, 1571, 1101, 1201],
- [1, 3, 1, 5, 17, 37, 91, 143, 509, 957, 591, 333, 7327],
- [1, 3, 5, 7, 9, 61, 109, 171, 387, 857, 697, 291, 4179],
- [1, 3, 5, 1, 17, 11, 33, 193, 159, 753, 1509, 2171, 6783],
- [1, 1, 5, 15, 21, 35, 29, 9, 265, 965, 709, 4085, 623],
- [1, 3, 1, 11, 1, 29, 107, 21, 477, 795, 31, 2173, 2779],
- [1, 1, 1, 9, 11, 33, 111, 57, 463, 67, 1563, 2541, 5963],
- [1, 1, 1, 15, 1, 23, 101, 73, 449, 5, 165, 1195, 2585],
- [1, 3, 1, 15, 1, 55, 107, 97, 47, 87, 513, 925, 6927],
- [1, 3, 1, 13, 25, 11, 109, 57, 353, 909, 1425, 4039, 5333],
- [1, 3, 5, 13, 5, 59, 65, 29, 249, 97, 1299, 1379, 4033],
- [1, 1, 3, 13, 7, 19, 59, 239, 335, 995, 1081, 699, 285],
- [1, 1, 5, 1, 29, 61, 43, 151, 505, 271, 145, 1979, 7467],
- [1, 3, 1, 11, 29, 61, 37, 159, 89, 875, 1841, 275, 4443],
- [1, 3, 3, 9, 19, 45, 1, 191, 141, 671, 1211, 953, 4917],
- [1, 3, 5, 15, 19, 13, 9, 47, 55, 613, 941, 1755, 3],
- [1, 3, 3, 9, 1, 49, 15, 51, 235, 33, 609, 1643, 4319],
- [1, 3, 1, 5, 29, 13, 109, 1, 187, 351, 845, 325, 5517],
- [1, 3, 1, 15, 13, 63, 37, 223, 87, 69, 1169, 101, 3449],
- [1, 3, 1, 5, 3, 5, 111, 251, 363, 811, 1865, 2263, 813],
- [1, 1, 1, 7, 1, 61, 113, 251, 93, 669, 1593, 3329, 5499],
- [1, 3, 3, 3, 31, 5, 119, 151, 363, 729, 347, 3673, 2515],
- [1, 3, 7, 11, 15, 31, 79, 41, 101, 401, 293, 3413, 5771],
- [1, 3, 3, 3, 13, 17, 73, 119, 67, 647, 1277, 1977, 3357],
- [1, 3, 7, 15, 3, 61, 65, 127, 215, 241, 157, 2727, 2073],
- [1, 1, 5, 7, 1, 63, 71, 131, 321, 435, 211, 2313, 4395],
- [1, 3, 7, 13, 11, 13, 93, 33, 331, 447, 93, 1419, 4925],
- [1, 1, 1, 11, 19, 27, 17, 209, 305, 721, 1679, 887, 2643],
- [1, 3, 5, 7, 5, 57, 101, 123, 261, 271, 1799, 609, 7215],
- [1, 3, 5, 3, 29, 1, 87, 53, 411, 745, 527, 2475, 5817],
- [1, 3, 7, 7, 13, 21, 97, 241, 491, 53, 41, 591, 1199],
- [1, 1, 5, 13, 29, 5, 43, 25, 479, 775, 473, 2613, 1597],
- [1, 3, 3, 5, 23, 11, 23, 31, 65, 99, 563, 2081, 1619],
- [1, 1, 3, 13, 3, 39, 75, 183, 307, 343, 187, 3805, 7535],
- [1, 3, 7, 15, 1, 57, 109, 107, 469, 451, 1525, 3435, 4833],
- [1, 1, 5, 5, 31, 51, 41, 25, 415, 427, 575, 2409, 609],
- [1, 1, 3, 13, 13, 53, 49, 115, 131, 593, 1579, 111, 4797],
- [1, 1, 1, 9, 19, 39, 53, 39, 315, 339, 857, 3557, 8171],
- [1, 3, 1, 1, 17, 25, 31, 11, 487, 845, 703, 3607, 6847],
- [1, 3, 3, 15, 5, 41, 97, 213, 83, 243, 1211, 903, 793],
- [1, 1, 1, 11, 5, 39, 105, 239, 455, 345, 647, 231, 6757],
- [1, 3, 3, 5, 1, 37, 109, 219, 19, 17, 709, 3059, 8165],
- [1, 1, 1, 5, 29, 23, 119, 109, 113, 573, 981, 473, 3371],
- [1, 1, 1, 1, 23, 31, 51, 185, 163, 421, 285, 2959, 2431],
- [1, 3, 3, 11, 3, 25, 9, 35, 503, 517, 697, 2925, 5235],
- [1, 3, 7, 3, 19, 33, 53, 133, 99, 971, 163, 3861, 4739],
- [1, 1, 1, 3, 25, 17, 113, 123, 499, 499, 981, 2043, 7703],
- [1, 3, 7, 7, 19, 57, 97, 185, 251, 435, 153, 3887, 7223],
- [1, 1, 1, 1, 27, 29, 73, 27, 239, 769, 1515, 351, 6525],
- [1, 1, 1, 9, 9, 27, 89, 55, 81, 75, 47, 2865, 5891],
- [1, 1, 5, 7, 27, 23, 79, 245, 167, 203, 1553, 369, 5605],
- [1, 1, 1, 15, 13, 47, 49, 61, 391, 793, 599, 1377, 4433],
- [1, 3, 7, 9, 15, 41, 61, 75, 255, 985, 225, 2639, 3533],
- [1, 1, 5, 9, 29, 29, 105, 205, 317, 343, 1147, 1261, 5267],
- [1, 3, 3, 3, 23, 19, 13, 213, 363, 955, 381, 3625, 5125],
- [1, 1, 7, 11, 13, 47, 99, 169, 359, 735, 135, 3279, 5037],
- [1, 1, 3, 15, 25, 41, 53, 163, 395, 523, 821, 2201, 225],
- [1, 3, 5, 7, 25, 25, 71, 63, 419, 659, 1965, 2949, 6717],
- [1, 1, 3, 1, 17, 5, 7, 55, 307, 703, 609, 3049, 1121],
- [1, 3, 1, 3, 19, 51, 87, 49, 251, 303, 1033, 449, 5741],
- [1, 1, 1, 1, 17, 43, 21, 83, 267, 421, 983, 1297, 2013],
- [1, 3, 5, 1, 15, 39, 101, 195, 171, 951, 503, 897, 4327],
- [1, 3, 5, 1, 27, 29, 5, 51, 461, 405, 1117, 1891, 4839],
- [1, 3, 1, 9, 3, 7, 71, 31, 183, 631, 327, 411, 569],
- [1, 3, 7, 1, 25, 31, 31, 41, 465, 825, 453, 2773, 5227],
- [1, 3, 7, 5, 17, 45, 123, 15, 165, 735, 2005, 749, 7677],
- [1, 3, 3, 15, 27, 51, 121, 203, 163, 433, 1257, 2753, 4315],
- [1, 1, 7, 15, 3, 49, 121, 41, 293, 841, 343, 1825, 2391],
- [1, 3, 3, 7, 27, 55, 73, 63, 477, 485, 1649, 853, 5551],
- [1, 3, 7, 5, 31, 17, 79, 127, 223, 49, 1199, 2775, 859],
- [1, 3, 1, 5, 23, 43, 115, 161, 403, 749, 599, 3547, 3627],
- [1, 3, 5, 7, 13, 49, 13, 5, 389, 107, 1877, 3923, 6377],
- [1, 1, 1, 9, 31, 45, 39, 143, 97, 669, 569, 3923, 3903],
- [1, 3, 5, 7, 11, 9, 101, 7, 335, 211, 695, 987, 4311],
- [1, 3, 3, 15, 15, 29, 19, 199, 357, 497, 1587, 3723, 6527],
- [1, 1, 7, 13, 7, 3, 37, 251, 297, 143, 1475, 2189, 7573],
- [1, 3, 3, 13, 21, 5, 51, 95, 19, 99, 187, 3877, 4905],
- [1, 3, 5, 11, 19, 47, 83, 75, 469, 57, 973, 3577, 7731],
- [1, 3, 7, 1, 27, 9, 97, 101, 501, 277, 233, 297, 1909],
- [1, 3, 7, 9, 19, 15, 55, 15, 249, 969, 511, 2763, 1555],
- [1, 3, 7, 11, 21, 19, 81, 43, 85, 107, 51, 1845, 3279],
- [1, 1, 3, 1, 29, 51, 91, 237, 213, 397, 1083, 3083, 1949],
- [1, 1, 3, 13, 7, 45, 127, 197, 311, 563, 665, 2951, 1887],
- [1, 1, 1, 1, 31, 57, 105, 117, 265, 551, 1321, 483, 6675],
- [1, 1, 1, 7, 13, 63, 89, 167, 379, 447, 531, 2169, 5509],
- [1, 3, 5, 15, 9, 9, 63, 155, 297, 381, 1875, 3985, 2033],
- [1, 3, 5, 15, 9, 21, 47, 21, 283, 187, 1939, 245, 5473],
- [1, 3, 3, 5, 7, 59, 49, 83, 393, 57, 859, 3655, 3539],
- [1, 1, 7, 5, 21, 3, 75, 205, 449, 405, 1507, 3441, 5033],
- [1, 3, 1, 1, 13, 9, 37, 255, 463, 731, 1979, 1023, 5935],
- [1, 3, 1, 11, 11, 13, 77, 49, 289, 769, 1203, 235, 6095],
- [1, 1, 1, 3, 9, 45, 15, 101, 159, 923, 1965, 835, 4761],
- [1, 1, 3, 9, 11, 23, 49, 213, 289, 955, 737, 3693, 1771],
- [1, 3, 5, 11, 29, 15, 107, 237, 499, 915, 921, 3585, 1271],
- [1, 3, 3, 9, 19, 31, 23, 135, 407, 737, 1565, 327, 1717],
- [1, 1, 1, 9, 11, 21, 23, 135, 129, 595, 1943, 1003, 4415],
- [1, 1, 1, 9, 19, 15, 35, 21, 137, 341, 819, 543, 5083],
- [1, 3, 3, 1, 21, 51, 19, 73, 221, 253, 223, 3059, 6277],
- [1, 3, 3, 9, 5, 35, 69, 93, 43, 823, 365, 2637, 3147],
- [1, 1, 7, 3, 29, 9, 17, 115, 89, 197, 167, 2923, 7695],
- [1, 3, 5, 5, 13, 11, 59, 7, 403, 321, 1705, 87, 2461],
- [1, 1, 1, 15, 7, 61, 63, 85, 271, 315, 413, 3617, 4783],
- [1, 1, 1, 1, 19, 23, 73, 223, 75, 181, 1577, 1031, 4539],
- [1, 3, 3, 1, 19, 53, 29, 237, 83, 885, 745, 1043, 5833],
- [1, 1, 7, 9, 27, 29, 125, 79, 445, 497, 1573, 903, 5583],
- [1, 3, 1, 7, 23, 51, 61, 89, 453, 159, 655, 2913, 651],
- [1, 3, 5, 3, 31, 45, 65, 5, 389, 571, 1633, 2177, 1419],
- [1, 3, 7, 3, 1, 31, 95, 57, 149, 981, 1003, 2641, 2605],
- [1, 3, 3, 1, 27, 29, 101, 239, 143, 899, 91, 3279, 5511],
- [1, 3, 7, 9, 21, 5, 81, 67, 423, 785, 1123, 389, 3913],
- [1, 1, 5, 9, 7, 35, 57, 65, 499, 947, 477, 2009, 5795],
- [1, 3, 5, 11, 3, 29, 69, 201, 317, 217, 1741, 525, 2333],
- [1, 1, 7, 9, 7, 53, 83, 155, 445, 217, 1663, 4085, 2329],
- [1, 1, 3, 9, 11, 35, 37, 71, 157, 135, 35, 3299, 4431],
- [1, 3, 5, 13, 23, 17, 11, 85, 137, 753, 715, 987, 3725],
- [1, 3, 3, 13, 13, 59, 37, 195, 453, 623, 37, 2409, 6069],
- [1, 3, 1, 3, 29, 55, 95, 89, 163, 565, 1513, 813, 2699],
- [1, 3, 5, 13, 11, 27, 1, 181, 87, 717, 815, 2683, 7055],
- [1, 1, 3, 11, 31, 51, 73, 119, 23, 903, 941, 373, 6879],
- [1, 3, 1, 13, 19, 59, 27, 135, 391, 581, 1379, 2695, 1017],
- [1, 1, 1, 5, 1, 27, 29, 147, 119, 955, 263, 3775, 3121],
- [1, 1, 7, 1, 5, 47, 57, 237, 427, 621, 1831, 2375, 2547],
- [1, 3, 5, 5, 5, 15, 7, 173, 323, 361, 1735, 1119, 4603],
- [1, 3, 1, 5, 11, 29, 65, 41, 173, 869, 1111, 2791, 2385],
- [1, 3, 7, 9, 5, 37, 83, 155, 89, 87, 1449, 223, 6915],
- [1, 3, 3, 9, 3, 7, 99, 67, 259, 943, 353, 325, 6103],
- [1, 3, 7, 3, 27, 49, 69, 113, 377, 907, 1941, 587, 5669],
- [1, 3, 5, 13, 5, 55, 19, 111, 511, 853, 1655, 1379, 7833],
- [1, 1, 1, 13, 7, 5, 103, 21, 249, 353, 1349, 2877, 2001],
- [1, 1, 7, 9, 11, 19, 43, 183, 31, 335, 877, 2867, 4287],
- [1, 3, 1, 15, 31, 45, 95, 23, 363, 197, 285, 3793, 6619],
- [1, 1, 7, 9, 1, 29, 25, 103, 229, 771, 1723, 655, 955],
- [1, 3, 7, 11, 27, 19, 19, 207, 353, 433, 125, 831, 2761],
- [1, 1, 1, 7, 31, 57, 103, 253, 329, 743, 1753, 3425, 5711],
- [1, 1, 1, 11, 31, 33, 41, 69, 493, 195, 985, 1663, 6291],
- [1, 3, 7, 9, 23, 53, 125, 219, 427, 91, 723, 1681, 3415],
- [1, 1, 1, 13, 5, 45, 97, 205, 57, 1023, 175, 2657, 3909],
- [1, 1, 5, 9, 21, 21, 71, 195, 205, 63, 439, 1865, 2841],
- [1, 1, 5, 1, 27, 9, 105, 43, 389, 301, 791, 3943, 5627],
- [1, 1, 1, 15, 9, 3, 83, 197, 91, 647, 1051, 2977, 4939],
- [1, 3, 1, 9, 25, 35, 83, 229, 83, 205, 1261, 1979, 7671],
- [1, 3, 7, 7, 3, 29, 61, 139, 13, 485, 717, 2271, 6059],
- [1, 1, 5, 7, 15, 43, 39, 177, 219, 927, 1555, 3247, 6275],
- [1, 1, 7, 1, 19, 31, 9, 129, 439, 1003, 1757, 1267, 6517],
- [1, 3, 1, 7, 1, 39, 45, 69, 45, 987, 1777, 1747, 1931],
- [1, 1, 5, 9, 19, 3, 117, 97, 35, 359, 577, 811, 4583],
- [1, 1, 3, 9, 9, 45, 63, 201, 371, 577, 1583, 159, 7301],
- [1, 1, 5, 15, 5, 1, 31, 163, 441, 147, 1957, 429, 1267],
- [1, 3, 3, 1, 25, 41, 5, 189, 17, 141, 873, 2001, 7509],
- [1, 1, 3, 11, 21, 29, 117, 11, 267, 1017, 331, 1195, 1435],
- [1, 3, 7, 1, 15, 5, 67, 99, 501, 701, 1163, 3065, 2169],
- [1, 1, 1, 13, 25, 59, 125, 91, 53, 273, 313, 553, 6939],
- [1, 1, 5, 13, 29, 41, 41, 253, 25, 89, 1, 1499, 3515],
- [1, 3, 1, 15, 15, 33, 117, 239, 333, 589, 1963, 3529, 2985],
- [1, 3, 1, 9, 21, 35, 43, 91, 17, 487, 963, 1081, 2787],
- [1, 1, 5, 13, 11, 27, 77, 145, 201, 859, 1905, 2877, 2123],
- [1, 3, 5, 7, 19, 19, 97, 19, 475, 343, 821, 3077, 1969],
- [1, 1, 3, 15, 15, 13, 15, 179, 257, 91, 1677, 845, 3307],
- [1, 1, 3, 3, 3, 25, 29, 231, 417, 847, 185, 1793, 353],
- [1, 3, 7, 9, 7, 27, 5, 121, 345, 341, 709, 2409, 4359],
- [1, 3, 5, 3, 13, 43, 59, 7, 381, 173, 545, 3995, 7059],
- [1, 3, 5, 1, 11, 33, 25, 225, 377, 287, 1723, 2559, 5273],
- [1, 3, 1, 13, 25, 35, 63, 237, 55, 1003, 215, 4081, 5873],
- [1, 3, 1, 7, 17, 17, 87, 125, 403, 289, 1885, 1195, 6657],
- [1, 1, 1, 5, 1, 17, 39, 191, 77, 639, 1249, 2955, 6765],
- [1, 3, 3, 9, 5, 23, 39, 119, 389, 983, 583, 1117, 6229],
- [1, 1, 1, 3, 31, 7, 77, 59, 347, 685, 1803, 1409, 3179],
- [1, 1, 5, 1, 13, 35, 85, 175, 363, 697, 839, 785, 1583],
- [1, 1, 7, 7, 29, 15, 37, 237, 211, 35, 885, 287, 6237],
- [1, 3, 7, 1, 23, 61, 81, 131, 413, 701, 485, 1521, 2155],
- [1, 1, 1, 1, 9, 61, 73, 79, 419, 645, 413, 1607, 371],
- [1, 1, 7, 13, 5, 53, 89, 43, 5, 911, 1767, 85, 273],
- [1, 1, 5, 3, 29, 5, 29, 45, 167, 501, 425, 3055, 7491],
- [1, 3, 7, 3, 7, 15, 125, 205, 219, 705, 129, 3123, 3309],
- [1, 1, 3, 11, 17, 23, 109, 199, 201, 873, 1035, 2533, 6805],
- [1, 1, 7, 1, 27, 11, 21, 251, 285, 763, 329, 2329, 3015],
- [1, 3, 3, 7, 7, 13, 23, 153, 425, 745, 1263, 3477, 6831],
- [1, 1, 1, 13, 17, 43, 119, 207, 11, 657, 1881, 799, 7819],
- [1, 3, 3, 15, 31, 55, 105, 37, 77, 559, 1779, 3683, 713],
- [1, 3, 7, 15, 9, 47, 43, 179, 269, 699, 1565, 3715, 4747],
- [1, 3, 3, 5, 31, 25, 93, 113, 489, 315, 359, 337, 3935],
- [1, 3, 1, 7, 9, 43, 97, 255, 281, 347, 367, 3139, 4109],
- [1, 3, 5, 13, 9, 15, 15, 107, 403, 429, 453, 3311, 1311],
- [1, 1, 5, 13, 7, 57, 125, 217, 79, 197, 707, 431, 709],
- [1, 1, 3, 15, 21, 45, 29, 61, 425, 165, 1419, 3511, 3089],
- [1, 1, 5, 11, 3, 1, 51, 7, 125, 955, 831, 2299, 7059],
- [1, 3, 1, 13, 3, 49, 69, 181, 81, 859, 1889, 365, 4247],
- [1, 3, 3, 1, 3, 63, 37, 247, 331, 167, 887, 2941, 2989],
- [1, 3, 5, 13, 9, 57, 45, 31, 437, 303, 1871, 3067, 1509],
- [1, 3, 5, 13, 11, 15, 31, 13, 271, 833, 1869, 1331, 4919],
- [1, 1, 5, 3, 21, 31, 75, 113, 397, 531, 747, 1081, 1841],
- [1, 3, 1, 9, 11, 31, 109, 145, 299, 473, 223, 1097, 3045],
- [1, 3, 1, 15, 31, 7, 119, 107, 475, 635, 1547, 2853, 3821],
- [1, 3, 7, 15, 9, 53, 53, 233, 271, 641, 1799, 2299, 6929],
- [1, 3, 7, 11, 25, 27, 5, 233, 249, 195, 433, 495, 4655],
- [1, 1, 1, 15, 5, 15, 101, 43, 413, 589, 1441, 1745, 1333],
- [1, 1, 5, 9, 1, 47, 125, 79, 233, 821, 553, 749, 6429],
- [1, 3, 5, 15, 31, 23, 121, 23, 261, 205, 2021, 3819, 6649],
- [1, 3, 1, 1, 13, 7, 35, 169, 495, 3, 1303, 619, 2131],
- [1, 3, 3, 13, 29, 29, 29, 137, 171, 635, 1505, 1059, 5265],
- [1, 1, 5, 15, 9, 53, 7, 129, 69, 371, 1735, 3559, 1051],
- [1, 3, 1, 1, 29, 47, 63, 183, 27, 891, 1619, 183, 261],
- [1, 1, 5, 1, 1, 9, 17, 53, 409, 249, 1065, 3743, 8057],
- [1, 1, 3, 5, 11, 53, 63, 91, 21, 123, 1161, 723, 3379],
- [1, 3, 5, 11, 19, 3, 13, 55, 421, 77, 2047, 949, 2179],
- [1, 3, 3, 5, 7, 25, 69, 103, 367, 623, 347, 3501, 1993],
- [1, 1, 3, 1, 27, 55, 15, 223, 81, 993, 867, 733, 5655],
- [1, 3, 7, 11, 13, 45, 105, 87, 483, 401, 881, 2599, 3063],
- [1, 3, 5, 11, 31, 63, 51, 177, 255, 525, 1447, 3983, 6381],
- [1, 1, 7, 5, 7, 21, 127, 157, 15, 427, 329, 3961, 3587],
- [1, 1, 3, 3, 31, 17, 105, 79, 219, 71, 781, 911, 7417],
- [1, 1, 7, 9, 7, 23, 9, 213, 365, 655, 1065, 1899, 1579],
- [1, 1, 3, 1, 25, 31, 57, 139, 497, 951, 219, 985, 1541],
- [1, 1, 1, 3, 23, 27, 95, 183, 181, 357, 589, 2493, 2107],
- [1, 3, 3, 5, 21, 27, 59, 231, 75, 851, 645, 1795, 5085],
- [1, 1, 7, 13, 29, 43, 109, 205, 431, 899, 1257, 653, 2873],
- [1, 1, 7, 9, 11, 63, 35, 143, 99, 535, 1833, 157, 6141],
- [1, 3, 3, 7, 11, 55, 49, 129, 325, 493, 749, 433, 955],
- [1, 3, 3, 7, 13, 63, 23, 243, 407, 323, 1841, 2361, 3537],
- [1, 1, 1, 1, 11, 45, 33, 205, 229, 1003, 1733, 3093, 2157],
- [1, 1, 1, 9, 27, 51, 107, 93, 281, 343, 1179, 3119, 841],
- [1, 1, 3, 9, 1, 15, 55, 59, 63, 515, 1191, 3679, 1999],
- [1, 3, 3, 15, 23, 27, 33, 15, 83, 859, 1025, 2367, 1465],
- [1, 1, 3, 7, 31, 5, 57, 89, 493, 1017, 1639, 1701, 5171],
- [1, 1, 3, 5, 21, 37, 79, 9, 5, 5, 1955, 1445, 5651],
- [1, 3, 3, 5, 23, 43, 73, 11, 113, 423, 1423, 1321, 1535],
- [1, 3, 5, 15, 21, 11, 69, 47, 15, 315, 1685, 2397, 7235],
- [1, 1, 5, 13, 19, 27, 59, 133, 271, 1011, 1711, 1241, 4349],
- [1, 3, 3, 9, 31, 5, 107, 227, 37, 703, 493, 3305, 1263],
- [1, 3, 3, 7, 5, 27, 55, 75, 87, 41, 549, 3985, 1453],
- [1, 3, 3, 13, 31, 59, 11, 9, 451, 777, 783, 2349, 1005],
- [1, 3, 1, 3, 25, 21, 63, 91, 299, 163, 1653, 4067, 6893],
- [1, 3, 3, 13, 25, 7, 95, 19, 83, 95, 397, 3805, 2919],
- [1, 3, 5, 11, 19, 39, 103, 171, 451, 831, 895, 3073, 1947],
- [1, 3, 7, 13, 17, 27, 23, 163, 311, 79, 233, 2837, 1635],
- [1, 3, 7, 7, 11, 63, 125, 79, 441, 975, 759, 1567, 3963],
- [1, 1, 1, 9, 25, 35, 91, 7, 47, 235, 1505, 3783, 397],
- [1, 1, 5, 13, 7, 47, 31, 103, 455, 633, 677, 451, 969],
- [1, 3, 7, 13, 13, 55, 91, 5, 47, 723, 1449, 2441, 4569],
- [1, 3, 3, 13, 1, 17, 51, 119, 253, 297, 1573, 1181, 655],
- [1, 1, 7, 15, 29, 17, 65, 155, 13, 589, 1297, 487, 6737],
- [1, 1, 1, 9, 17, 17, 61, 75, 109, 317, 1821, 543, 2995],
- [1, 3, 1, 5, 23, 3, 75, 11, 369, 679, 1691, 1201, 7235],
- [1, 1, 3, 5, 15, 19, 69, 71, 347, 981, 791, 3735, 7713],
- [1, 3, 5, 3, 7, 21, 107, 95, 11, 195, 289, 2517, 973],
- [1, 3, 7, 3, 29, 13, 65, 17, 409, 399, 1187, 733, 4821],
- [1, 3, 5, 3, 17, 49, 101, 13, 275, 1003, 867, 1535, 2377],
- [1, 3, 3, 1, 13, 61, 59, 243, 63, 121, 1535, 2175, 1673],
- [1, 3, 3, 3, 3, 39, 35, 207, 441, 501, 575, 3613, 1],
- [1, 1, 3, 15, 17, 15, 15, 187, 15, 155, 183, 3019, 6541],
-]
-
-@cython.boundscheck(False)
-@cython.wraparound(False)
-cdef int bit_length(const int n):
- cdef int bits = 0
- cdef int nloc = n
- while nloc != 0:
- nloc >>= 1
- bits += 1
- return bits
-
-
-@cython.boundscheck(False)
-@cython.wraparound(False)
-cdef int low_0_bit(const int x) nogil:
- """Get the position of the right-most 0 bit for an integer.
-
- Examples:
- >>> low_0_bit(0)
- 1
- >>> low_0_bit(1)
- 2
- >>> low_0_bit(2)
- 1
- >>> low_0_bit(5)
- 2
- >>> low_0_bit(7)
- 4
-
- Args:
- x (int): an integer.
-
- Returns:
- int: position of the right-most 0 bit.
-
- """
- cdef int z = x
- cdef int i = 0
- while True:
- i += 1
- if z % 2 == 0:
- break
- z = z // 2
- return i
-
-
-@cython.boundscheck(False)
-@cython.wraparound(False)
-cdef int ibits(const int x, const int pos, const int length) nogil:
- """Extract a sequence of bits from the bit representation of an integer.
-
- Extract the sequence from position `pos` (inclusive) to `pos + length`
- (not inclusive), leftwise.
-
- Examples:
- >>> ibits(1, 0, 1)
- 1
- >>> ibits(1, 1, 1)
- 0
- >>> ibits(2, 0, 1)
- 0
- >>> ibits(2, 0, 2)
- 2
- >>> ibits(25, 1, 5)
- 12
-
- Args:
- x (int): integer to convert to bit representation.
- pos (int): starting position of sequence in bit representation of integer.
- length (int): length of sequence (number of bits).
-
- Returns:
- int: integer value corresponding to bit sequence.
-
- """
- return (x >> pos) & ((1 << length) - 1)
-
-
-@cython.boundscheck(False)
-@cython.wraparound(False)
-cdef void initialize_v(cnp.int_t[:, :] v, const int dimen):
- cdef int d, i, j, k, m, p, newv, pow2
-
- if dimen == 0:
- return
-
- # first row of v is all 1s
- for i in range(MAXBIT):
- v[0, i] = 1
-
- # Remaining rows of v (row 2 through dim, indexed by [1:dimen])
- for d in range(1, dimen):
- p = poly[d]
- m = bit_length(p) - 1
-
- # First m elements of row d comes from vinit
- for j in range(m):
- v[d, j] = vinit[d][j]
-
- # Fill in remaining elements of V per Bratley and Fox, Section 2
- # @TODO: UPDATE
- for j in range(m, MAXBIT):
- newv = v[d, j - m]
- pow2 = 1
- for k in range(m):
- pow2 = pow2 << 1
- if (p >> (m - 1 - k)) & 1:
- newv = newv ^ (pow2 * v[d, j - k - 1])
- v[d, j] = newv
-
- # Multiply each column of v by power of 2:
- # v * [2^(maxbit-1), 2^(maxbit-2),..., 2, 1]
- pow2 = 1
- for d in range(MAXBIT):
- for i in range(dimen):
- v[i, MAXBIT - 1 - d] *= pow2
- pow2 = pow2 << 1
-
-
-@cython.boundscheck(False)
-@cython.wraparound(False)
-cdef void _draw(
- const int n,
- const int num_gen,
- const int dimen,
- cnp.int_t[:, :] sv,
- cnp.int_t[:] quasi,
- cnp.float_t[:, :] result,
-) nogil:
- cdef int i, j, l, qtmp
- cdef int num_gen_loc = num_gen
- for i in range(n):
- l = low_0_bit(num_gen_loc)
- for j in range(dimen):
- qtmp = quasi[j] ^ sv[j, l - 1]
- quasi[j] = qtmp
- result[i, j] = qtmp * RECIPD
- num_gen_loc += 1
-
-
-@cython.boundscheck(False)
-@cython.wraparound(False)
-cdef void _fast_forward(
- const int n,
- const int num_gen,
- const int dimen,
- cnp.int_t[:, :] sv,
- cnp.int_t[:] quasi,
-) nogil:
- cdef int i, j, l
- cdef int num_gen_loc = num_gen
- for i in range(n):
- l = low_0_bit(num_gen_loc)
- for j in range(dimen):
- quasi[j] = quasi[j] ^ sv[j, l - 1]
- num_gen_loc += 1
-
-
-@cython.boundscheck(False)
-@cython.wraparound(False)
-cdef int cdot_pow2(cnp.int_t[:] a) nogil:
- cdef int i
- cdef int size = a.shape[0]
- cdef int z = 0
- cdef int pow2 = 1
- for i in range(size):
- z += a[size - 1 - i] * pow2
- pow2 *= 2
- return z
-
-
-@cython.boundscheck(False)
-@cython.wraparound(False)
-cdef void _cscramble(
- const int dimen,
- cnp.int_t[:, :, :] ltm,
- cnp.int_t[:, :] sv,
-) nogil:
- cdef int d, i, j, k, l, lsm, lsmdp, p, t1, t2, vdj
-
- # Set diagonals of maxbit x maxbit arrays to 1
- for d in range(dimen):
- for i in range(MAXBIT):
- ltm[d, i, i] = 1
-
- for d in range(dimen):
- for j in range(MAXBIT):
- vdj = sv[d, j]
- l = 1
- t2 = 0
- for p in range(MAXBIT - 1, -1, -1):
- lsmdp = cdot_pow2(ltm[d, p, :])
- t1 = 0
- for k in range(MAXBIT):
- t1 += ibits(lsmdp, k, 1) * ibits(vdj, k, 1)
- t1 = t1 % 2
- t2 = t2 + t1 * l
- l = 2 * l
- sv[d, j] = t2
-
-
-class SobolEngine:
- """Engine for generating (scrambled) Sobol sequences.
-
- Sobol sequences are low-discrepancy, quasi-random numbers. Reference for 1111
- direction numbers: http://web.maths.unsw.edu.au/~fkuo/sobol/joe-kuo-old.1111.
-
- References:
- Art B. Owen. Scrambling Sobol and Niederreiter-Xing points. Journal of
- Complexity, 14(4):466-489, December 1998.
-
- I. M. Sobol. The distribution of points in a cube and the accurate
- evaluation of integrals. Zh. Vychisl. Mat. i Mat. Phys., 7:784-802, 1967.
-
- Args:
- dimen (int): dimensionality of the sequence. Max dimensionality is 1111.
- scramble (bool, optional): if True, use Owen scrambling.
- seed (int, optional): seed for reproducibility of scrambling.
-
- """
-
- MAXDIM = 1111
-
- def __init__(self, dimen, scramble=False, seed=None):
- # type: (int, bool, Optional[int]) -> None
- if dimen > 1111:
- raise ValueError("Maximum supported dimensionality is 1111.")
- self.dimen = dimen
-
- # v is dimen x MAXBIT matrix;
- self._sv = np.zeros((dimen, MAXBIT), dtype=np.int)
- initialize_v(self._sv, dimen)
-
- if not scramble:
- self._shift = np.zeros(dimen, dtype=np.int)
- else:
- self._scramble(seed=seed)
-
- self._quasi = self._shift.copy()
- self.num_generated = 0
-
- def _scramble(self, seed=None):
- # type: (Optional[int]) -> None
- rs = np.random.RandomState(seed=seed)
- # Generate shift vector
- self._shift = np.dot(
- rs.randint(2, size=(self.dimen, MAXBIT)),
- np.array([2**j for j in range(MAXBIT)]),
- )
- self._quasi = self._shift.copy()
- # Generate lower triangular matrices (stacked across dimensions)
- ltm = np.tril(rs.randint(2, size=(self.dimen, MAXBIT, MAXBIT)))
- _cscramble(self.dimen, ltm, self._sv)
- self.num_generated = 0
-
- def draw(self, n=1):
- # type: (int) -> np.ndarray
- """Draw next point(s) in the Sobol sequence.
-
- Args:
- n (int, optional): number of points to return. Default is 1.
-
- Returns:
- np.array: `n x D` array, where `n` is the number of points requested
- and `D` is the dimensionality of the sequence.
-
- """
- result = np.empty((n, self.dimen), dtype=np.float)
- _draw(n, self.num_generated, self.dimen, self._sv, self._quasi, result)
- self.num_generated += n
- return result
-
- def reset(self):
- # type: () -> SobolEngine
- """Reset the engine to base state.
-
- Returns:
- SobolEngine: engine, reset to its base state.
-
- """
- self._quasi = self._shift.copy()
- self.num_generated = 0
- return self
-
- def fast_forward(self, n):
- # type: (int) -> SobolEngine
- """Fast-forward the sequence by n positions.
-
- Args:
- n (int): number of points to skip in the sequence.
-
- Returns:
- SobolEngine: the fast-forwarded engine.
-
- """
- _fast_forward(n, self.num_generated, self.dimen, self._sv, self._quasi)
- self.num_generated += n
- return self
-
-
-def multinomial_qmc(n, pvals, seed=None):
- # type: (int, Iterable[float], Optional[int]) -> np.ndarray
- """Draw low-discreancy quasi-random samples from multinomial distribution.
-
- Args:
- n (int): Number of experiments.
- pvals (Iterable[float]): float vector of probabilities of size `p`.
- Elements must be non-negative and sum to 1.
- seed (int, optional): The seed for the random number generator.
-
- Returns:
- np.ndarray: int vector of size `p` summing to `n`.
-
- """
- if np.min(pvals) < 0:
- raise ValueError('Elements of pvals must be non-negative')
- if not np.isclose(np.sum(pvals), 1):
- raise ValueError('Elements of pvals must sum to 1')
- sobol = SobolEngine(1, scramble=True, seed=seed)
- draws = sobol.draw(n).ravel()
- p_cumulative = np.empty_like(pvals, dtype=np.float)
- _fill_p_cumulative(np.array(pvals, dtype=np.float), p_cumulative)
- result = np.zeros_like(pvals, dtype=np.int)
- _categorize(draws, p_cumulative, result)
- return result
-
-
-@cython.boundscheck(False)
-@cython.wraparound(False)
-cdef void _fill_p_cumulative(
- cnp.float_t[:] p,
- cnp.float_t[:] p_cumulative,
-) nogil:
- cdef int i
- cdef int len_p = p.shape[0]
- cdef float tot = 0
- cdef float t
- for i in range(len_p):
- t = tot + p[i]
- p_cumulative[i] = t
- tot = t
-
-
-@cython.boundscheck(False)
-@cython.wraparound(False)
-cdef void _categorize(
- cnp.float_t[:] draws,
- cnp.float_t[:] p_cumulative,
- cnp.int_t[:] result,
-) nogil:
- cdef int i
- cdef int n_p = p_cumulative.shape[0]
- for i in range(draws.shape[0]):
- j = _find_index(p_cumulative, n_p, draws[i])
- result[j] = result[j] + 1
-
-
-@cython.boundscheck(False)
-@cython.wraparound(False)
-cdef int _find_index(
- cnp.float_t[:] p_cumulative,
- const int size,
- const float value,
-) nogil:
- cdef int l = 0
- cdef int r = size - 1
- cdef int m
- while r > l:
- m = (l + r) // 2
- if value > p_cumulative[m]:
- l = m + 1
- else:
- r = m
- return r
-
-
-def _test_find_index(p_cumulative, size, value):
- # type: (np.ndarray, int, float) -> int
- """Wrapper for testing in python"""
- return _find_index(p_cumulative, size, value)
-
-
-def star_L2(draw):
- # type: (np.ndarray) -> float
- """Star L2-discrepancy.
-
- Args:
- draw (np.ndarray): numpy array of draws.
-
- Returns:
- float: the star L2-discrepancy.
-
- References:
-
- T. T. Warnock. Computational investigations of low discrepancy point sets.
- In S.K. Zaremba (editor), Applications of Number Theory to Numerical
- Analysis, Academic Press, New York.
-
- """
- n, d = draw.shape
- return np.sqrt(
- 3**(-d) - 2**(1 - d) / n * np.sum(np.prod(1 - draw**2, axis=1)) +
- np.sum([
- np.prod(1 - np.maximum(draw[k, :], draw[j, :]))
- for k in range(n) for j in range(n)
- ]) / n**2
- )
-
-
-def centered_L2(draw):
- # type: (np.ndarray) -> float
- """Centered L2-discrepancy.
-
- Args:
- draw (np.ndarray): numpy array of draws.
-
- Returns:
- float: the centered L2-discrepancy.
-
- References:
-
- Fred J. Hickernell, A generalized discrepancy and quadrature error bound,
- Mathematics of Computation, v.67 n.221, p.299-322, Jan. 1998.
-
- """
- n, d = draw.shape
- return np.sqrt(
- (13 / 12)**d - 2 / n * np.sum(
- [np.prod(_dev_half(draw[k, :])) for k in range(n)]
- ) +
- np.sum([
- np.prod(_joint_dev(draw[k, :], draw[j, :]))
- for k in range(n) for j in range(n)
- ]) / n**2
- )
-
-
-def _dev_half(points):
- return 1 + 0.5 * np.abs(points - 0.5) - 0.5 * (points - 0.5)**2
-
-
-def _joint_dev(points1, points2):
- return (
- 1 +
- 0.5 * np.abs(points1 - 0.5) +
- 0.5 * np.abs(points2 - 0.5) -
- 0.5 * np.abs(points1 - points2)
- )
diff --git a/botorch/utils/sampling.py b/botorch/utils/sampling.py
index 022015bef2..fa5d16972b 100644
--- a/botorch/utils/sampling.py
+++ b/botorch/utils/sampling.py
@@ -10,14 +10,12 @@
import torch
from torch import Tensor
+from torch.quasirandom import SobolEngine
from ..exceptions.warnings import SamplingWarning
from ..posteriors.posterior import Posterior
from ..qmc.normal import NormalQMCEngine
-# TODO: Use torch Sobol engine in torch 1.1
-from ..qmc.sobol import SobolEngine
-
def construct_base_samples(
batch_shape: torch.Size,
@@ -127,10 +125,8 @@ def draw_sobol_samples(
lower = bounds[0]
rng = bounds[1] - bounds[0]
sobol_engine = SobolEngine(d, scramble=True, seed=seed)
- samples_np = sobol_engine.draw(n * q).reshape(n, q, d)
- samples_raw = torch.from_numpy(samples_np).to(
- device=lower.device, dtype=lower.dtype
- )
+ samples_raw = sobol_engine.draw(n * q, dtype=lower.dtype).view(n, q, d)
+ samples_raw = samples_raw.to(device=lower.device)
return lower + rng * samples_raw
@@ -162,11 +158,8 @@ def draw_sobol_normal_samples(
>>> samples = draw_sobol_samples(2, 10, seed=1234)
"""
normal_qmc_engine = NormalQMCEngine(d=d, seed=seed, inv_transform=True)
- samples_np = normal_qmc_engine.draw(n)
- return torch.from_numpy(samples_np).to(
- dtype=torch.float if dtype is None else dtype,
- device=device, # None here will leave it on the cpu
- )
+ samples = normal_qmc_engine.draw(n, dtype=torch.float if dtype is None else dtype)
+ return samples.to(device=device)
@contextmanager
diff --git a/setup.py b/setup.py
index cfdd90819b..0c1ea17f30 100755
--- a/setup.py
+++ b/setup.py
@@ -3,7 +3,6 @@
import sys
from setuptools import find_packages, setup
-from setuptools.extension import Extension
REQUIRED_MAJOR = 3
@@ -35,20 +34,6 @@ def missing(package_name):
)
-# check for numpy (required for building Sobol cython)
-try:
- import numpy
-except ImportError:
- missing("numpy")
-
-
-# check for Cython itself
-try:
- from Cython.Build import cythonize
-except ImportError:
- missing("cython")
-
-
# error out if setup dependencies not met
if fatals:
sys.exit(
@@ -57,10 +42,6 @@ def missing(package_name):
)
-# TODO: Use torch Sobol once torch 1.1 is released
-EXTENSIONS = [Extension("botorch.qmc.sobol", ["botorch/qmc/sobol.pyx"])]
-
-
TEST_REQUIRES = ["pytest>=3.6", "pytest-cov"]
DEV_REQUIRES = TEST_REQUIRES + ["black", "flake8", "sphinx", "sphinx-autodoc-typehints"]
@@ -83,9 +64,7 @@ def missing(package_name):
python_requires=">=3.6",
setup_requires=["cython", "numpy"],
install_requires=["torch>=1.0.1", "gpytorch>=0.3.1", "scipy"],
- include_dirs=[numpy.get_include()],
packages=find_packages(),
- ext_modules=cythonize(EXTENSIONS),
extras_require={
"dev": DEV_REQUIRES,
"test": TEST_REQUIRES,
diff --git a/test/qmc/test_normal.py b/test/qmc/test_normal.py
index 53398e2bb0..651d3afe7e 100644
--- a/test/qmc/test_normal.py
+++ b/test/qmc/test_normal.py
@@ -1,9 +1,12 @@
#!/usr/bin/env python3
+import math
import unittest
import numpy as np
+import torch
from botorch.qmc import MultivariateNormalQMCEngine, NormalQMCEngine
+from botorch.utils.sampling import manual_seed
from scipy.stats import shapiro
@@ -12,305 +15,517 @@ def test_NormalQMCEngine(self):
# d = 1
engine = NormalQMCEngine(d=1)
samples = engine.draw()
- self.assertEqual(samples.shape, (1, 1))
+ self.assertEqual(samples.dtype, torch.float)
+ self.assertEqual(samples.shape, torch.Size([1, 1]))
samples = engine.draw(n=5)
- self.assertEqual(samples.shape, (5, 1))
+ self.assertEqual(samples.shape, torch.Size([5, 1]))
# d = 2
engine = NormalQMCEngine(d=2)
samples = engine.draw()
- self.assertEqual(samples.shape, (1, 2))
+ self.assertEqual(samples.shape, torch.Size([1, 2]))
samples = engine.draw(n=5)
- self.assertEqual(samples.shape, (5, 2))
+ self.assertEqual(samples.shape, torch.Size([5, 2]))
+ # test double dtype
+ samples = engine.draw(dtype=torch.double)
+ self.assertEqual(samples.dtype, torch.double)
def test_NormalQMCEngineInvTransform(self):
# d = 1
engine = NormalQMCEngine(d=1, inv_transform=True)
samples = engine.draw()
- self.assertEqual(samples.shape, (1, 1))
+ self.assertEqual(samples.dtype, torch.float)
+ self.assertEqual(samples.shape, torch.Size([1, 1]))
samples = engine.draw(n=5)
- self.assertEqual(samples.shape, (5, 1))
+ self.assertEqual(samples.shape, torch.Size([5, 1]))
# d = 2
engine = NormalQMCEngine(d=2, inv_transform=True)
samples = engine.draw()
- self.assertEqual(samples.shape, (1, 2))
+ self.assertEqual(samples.shape, torch.Size([1, 2]))
samples = engine.draw(n=5)
- self.assertEqual(samples.shape, (5, 2))
+ self.assertEqual(samples.shape, torch.Size([5, 2]))
+ # test double dtype
+ samples = engine.draw(dtype=torch.double)
+ self.assertEqual(samples.dtype, torch.double)
def test_NormalQMCEngineSeeded(self):
# test even dimension
engine = NormalQMCEngine(d=2, seed=12345)
samples = engine.draw(n=2)
- samples_expected = np.array(
+ self.assertEqual(samples.dtype, torch.float)
+ samples_expected = torch.tensor(
[[-0.63099602, -1.32950772], [0.29625805, 1.86425618]]
)
- self.assertTrue(np.allclose(samples, samples_expected))
+ self.assertTrue(torch.allclose(samples, samples_expected))
# test odd dimension
engine = NormalQMCEngine(d=3, seed=12345)
samples = engine.draw(n=2)
- samples_expected = np.array(
+ samples_expected = torch.tensor(
[
[1.83169884, -1.40473647, 0.24334828],
[0.36596099, 1.2987395, -1.47556275],
]
)
- self.assertTrue(np.allclose(samples, samples_expected))
+ self.assertTrue(torch.allclose(samples, samples_expected))
+
+ def test_NormalQMCEngineSeededOut(self):
+ # test even dimension
+ engine = NormalQMCEngine(d=2, seed=12345)
+ out = torch.empty(2, 2)
+ self.assertIsNone(engine.draw(n=2, out=out))
+ samples_expected = torch.tensor(
+ [[-0.63099602, -1.32950772], [0.29625805, 1.86425618]]
+ )
+ self.assertTrue(torch.allclose(out, samples_expected))
+ # test odd dimension
+ engine = NormalQMCEngine(d=3, seed=12345)
+ out = torch.empty(2, 3)
+ self.assertIsNone(engine.draw(n=2, out=out))
+ samples_expected = torch.tensor(
+ [
+ [1.83169884, -1.40473647, 0.24334828],
+ [0.36596099, 1.2987395, -1.47556275],
+ ]
+ )
+ self.assertTrue(torch.allclose(out, samples_expected))
def test_NormalQMCEngineSeededInvTransform(self):
# test even dimension
engine = NormalQMCEngine(d=2, seed=12345, inv_transform=True)
samples = engine.draw(n=2)
- samples_expected = np.array(
+ self.assertEqual(samples.dtype, torch.float)
+ samples_expected = torch.tensor(
[[-0.41622922, 0.46622792], [-0.96063897, -0.75568963]]
)
- self.assertTrue(np.allclose(samples, samples_expected))
+ self.assertTrue(torch.allclose(samples, samples_expected))
# test odd dimension
engine = NormalQMCEngine(d=3, seed=12345, inv_transform=True)
samples = engine.draw(n=2)
- samples_expected = np.array(
+ samples_expected = torch.tensor(
[
[-1.40525266, 1.37652443, -0.8519666],
[-0.166497, -2.3153681, -0.15975676],
]
)
- self.assertTrue(np.allclose(samples, samples_expected))
+ self.assertTrue(torch.allclose(samples, samples_expected))
def test_NormalQMCEngineShapiro(self):
engine = NormalQMCEngine(d=2, seed=12345)
samples = engine.draw(n=250)
- self.assertTrue(all(np.abs(samples.mean(axis=0)) < 1e-2))
- self.assertTrue(all(np.abs(samples.std(axis=0) - 1) < 1e-2))
+ self.assertEqual(samples.dtype, torch.float)
+ self.assertTrue(torch.all(torch.abs(samples.mean(dim=0)) < 1e-2))
+ self.assertTrue(torch.all(torch.abs(samples.std(dim=0) - 1) < 1e-2))
# perform Shapiro-Wilk test for normality
for i in (0, 1):
_, pval = shapiro(samples[:, i])
self.assertGreater(pval, 0.9)
# make sure samples are uncorrelated
- cov = np.cov(samples.transpose())
+ cov = np.cov(samples.numpy().transpose())
self.assertLess(np.abs(cov[0, 1]), 1e-2)
def test_NormalQMCEngineShapiroInvTransform(self):
engine = NormalQMCEngine(d=2, seed=12345, inv_transform=True)
samples = engine.draw(n=250)
- self.assertTrue(all(np.abs(samples.mean(axis=0)) < 1e-2))
- self.assertTrue(all(np.abs(samples.std(axis=0) - 1) < 1e-2))
+ self.assertEqual(samples.dtype, torch.float)
+ self.assertTrue(torch.all(torch.abs(samples.mean(dim=0)) < 1e-2))
+ self.assertTrue(torch.all(torch.abs(samples.std(dim=0) - 1) < 1e-2))
# perform Shapiro-Wilk test for normality
for i in (0, 1):
_, pval = shapiro(samples[:, i])
self.assertGreater(pval, 0.9)
# make sure samples are uncorrelated
- cov = np.cov(samples.transpose())
+ cov = np.cov(samples.numpy().transpose())
self.assertLess(np.abs(cov[0, 1]), 1e-2)
class MultivariateNormalQMCTests(unittest.TestCase):
- def test_MultivariateNormalQMCEngineNonPSD(self):
- # try with non-psd, non-pd cov and expect an assertion error
- self.assertRaises(
- ValueError, MultivariateNormalQMCEngine, [0, 0], [[1, 2], [2, 1]]
- )
+ def test_MultivariateNormalQMCEngineNonPSD(self, cuda=False):
+ device = torch.device("cuda") if cuda else torch.device("cpu")
+ for dtype in (torch.float, torch.double):
+ # try with non-psd, non-pd cov and expect an assertion error
+ mean = torch.zeros(2, device=device, dtype=dtype)
+ cov = torch.tensor([[1, 2], [2, 1]], device=device, dtype=dtype)
+ with self.assertRaises(ValueError):
+ MultivariateNormalQMCEngine(mean=mean, cov=cov)
- def test_MultivariateNormalQMCEngineNonPD(self):
- # try with non-pd but psd cov; should work
- engine = MultivariateNormalQMCEngine(
- [0, 0, 0], [[1, 0, 1], [0, 1, 1], [1, 1, 2]]
- )
- self.assertTrue(engine._corr_matrix is not None)
+ def test_MultivariateNormalQMCEngineNonPSD_cuda(self):
+ if torch.cuda.is_available():
+ self.test_MultivariateNormalQMCEngineNonPSD(cuda=True)
- def test_MultivariateNormalQMCEngineSymmetric(self):
- # try with non-symmetric cov and expect an error
- self.assertRaises(
- ValueError, MultivariateNormalQMCEngine, [0, 0], [[1, 0], [2, 1]]
- )
+ def test_MultivariateNormalQMCEngineNonPD(self, cuda=False):
+ device = torch.device("cuda") if cuda else torch.device("cpu")
+ for dtype in (torch.float, torch.double):
+ mean = torch.zeros(3, device=device, dtype=dtype)
+ cov = torch.tensor(
+ [[1, 0, 1], [0, 1, 1], [1, 1, 2]], device=device, dtype=dtype
+ )
+ # try with non-pd but psd cov; should work
+ engine = MultivariateNormalQMCEngine(mean=mean, cov=cov)
+ self.assertTrue(engine._corr_matrix is not None)
- def test_MultivariateNormalQMCEngine(self):
- # d = 1 scalar
- engine = MultivariateNormalQMCEngine(mean=0, cov=5)
- samples = engine.draw()
- self.assertEqual(samples.shape, (1, 1))
- samples = engine.draw(n=5)
- self.assertEqual(samples.shape, (5, 1))
+ def test_MultivariateNormalQMCEngineNonPD_cuda(self):
+ if torch.cuda.is_available():
+ self.test_MultivariateNormalQMCEngineNonPD(cuda=True)
- # d = 2 list
- engine = MultivariateNormalQMCEngine(mean=[0, 1], cov=[[1, 0], [0, 1]])
- samples = engine.draw()
- self.assertEqual(samples.shape, (1, 2))
- samples = engine.draw(n=5)
- self.assertEqual(samples.shape, (5, 2))
+ def test_MultivariateNormalQMCEngineSymmetric(self, cuda=False):
+ device = torch.device("cuda") if cuda else torch.device("cpu")
+ for dtype in (torch.float, torch.double):
+ # try with non-symmetric cov and expect an error
+ mean = torch.zeros(2, device=device, dtype=dtype)
+ cov = torch.tensor([[1, 0], [2, 1]], device=device, dtype=dtype)
+ with self.assertRaises(ValueError):
+ MultivariateNormalQMCEngine(mean=mean, cov=cov)
- # d = 3 np.array
- mean = np.array([0, 1, 2])
- cov = np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]])
- engine = MultivariateNormalQMCEngine(mean, cov)
- samples = engine.draw()
- self.assertEqual(samples.shape, (1, 3))
- samples = engine.draw(n=5)
- self.assertEqual(samples.shape, (5, 3))
+ def test_MultivariateNormalQMCEngineSymmetric_cuda(self):
+ if torch.cuda.is_available():
+ self.test_MultivariateNormalQMCEngineSymmetric(cuda=True)
- def test_MultivariateNormalQMCEngineInvTransform(self):
- # d = 1 scalar
- engine = MultivariateNormalQMCEngine(mean=0, cov=5, inv_transform=True)
- samples = engine.draw()
- self.assertEqual(samples.shape, (1, 1))
- samples = engine.draw(n=5)
- self.assertEqual(samples.shape, (5, 1))
+ def test_MultivariateNormalQMCEngine(self, cuda=False):
+ device = torch.device("cuda") if cuda else torch.device("cpu")
+ for dtype in (torch.float, torch.double):
- # d = 2 list
- engine = MultivariateNormalQMCEngine(
- mean=[0, 1], cov=[[1, 0], [0, 1]], inv_transform=True
- )
- samples = engine.draw()
- self.assertEqual(samples.shape, (1, 2))
- samples = engine.draw(n=5)
- self.assertEqual(samples.shape, (5, 2))
+ # d = 1 scalar
+ mean = torch.tensor([0], device=device, dtype=dtype)
+ cov = torch.tensor([[5]], device=device, dtype=dtype)
+ engine = MultivariateNormalQMCEngine(mean=mean, cov=cov)
+ samples = engine.draw()
+ self.assertEqual(samples.dtype, dtype)
+ self.assertEqual(samples.device.type, device.type)
+ self.assertEqual(samples.shape, torch.Size([1, 1]))
+ samples = engine.draw(n=5)
+ self.assertEqual(samples.shape, torch.Size([5, 1]))
- # d = 3 np.array
- mean = np.array([0, 1, 2])
- cov = np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]])
- engine = MultivariateNormalQMCEngine(mean, cov, inv_transform=True)
- samples = engine.draw()
- self.assertEqual(samples.shape, (1, 3))
- samples = engine.draw(n=5)
- self.assertEqual(samples.shape, (5, 3))
+ # d = 2 list
+ mean = torch.tensor([0, 1], device=device, dtype=dtype)
+ cov = torch.eye(2, device=device, dtype=dtype)
+ engine = MultivariateNormalQMCEngine(mean=mean, cov=cov)
+ samples = engine.draw()
+ self.assertEqual(samples.dtype, dtype)
+ self.assertEqual(samples.device.type, device.type)
+ self.assertEqual(samples.shape, torch.Size([1, 2]))
+ samples = engine.draw(n=5)
+ self.assertEqual(samples.shape, torch.Size([5, 2]))
- def test_MultivariateNormalQMCEngineSeeded(self):
- # test even dimension
- np.random.seed(54321)
- a = np.random.randn(2, 2)
- A = a @ a.transpose() + np.diag(np.random.rand(2))
- engine = MultivariateNormalQMCEngine(np.array([0, 0]), A, seed=12345)
- samples = engine.draw(n=2)
- samples_expected = np.array(
- [[-0.67595995, -2.27437872], [0.317369, 2.66203577]]
- )
- self.assertTrue(np.allclose(samples, samples_expected))
+ # d = 3 Tensor
+ mean = torch.tensor([0, 1, 2], device=device, dtype=dtype)
+ cov = torch.eye(3, device=device, dtype=dtype)
+ engine = MultivariateNormalQMCEngine(mean=mean, cov=cov)
+ samples = engine.draw()
+ self.assertEqual(samples.dtype, dtype)
+ self.assertEqual(samples.device.type, device.type)
+ self.assertEqual(samples.shape, torch.Size([1, 3]))
+ samples = engine.draw(n=5)
+ self.assertEqual(samples.shape, torch.Size([5, 3]))
- # test odd dimension
- np.random.seed(54321)
- a = np.random.randn(3, 3)
- A = a @ a.transpose() + np.diag(np.random.rand(3))
- engine = MultivariateNormalQMCEngine(np.array([0, 0, 0]), A, seed=12345)
- samples = engine.draw(n=2)
- samples_expected = np.array(
- [
- [2.05178452, -6.35744194, 0.67944512],
- [0.40993262, 2.60517697, -1.69415825],
- ]
- )
- self.assertTrue(np.allclose(samples, samples_expected))
+ def test_MultivariateNormalQMCEngine_cuda(self):
+ if torch.cuda.is_available():
+ self.test_MultivariateNormalQMCEngine(cuda=True)
- def test_MultivariateNormalQMCEngineSeededInvTransform(self):
- # test even dimension
- np.random.seed(54321)
- a = np.random.randn(2, 2)
- A = a @ a.transpose() + np.diag(np.random.rand(2))
- engine = MultivariateNormalQMCEngine(
- np.array([0, 0]), A, seed=12345, inv_transform=True
- )
- samples = engine.draw(n=2)
- samples_expected = np.array(
- [[-0.44588916, 0.22657776], [-1.02909281, -1.83193033]]
- )
- self.assertTrue(np.allclose(samples, samples_expected))
+ def test_MultivariateNormalQMCEngineInvTransform(self, cuda=False):
+ device = torch.device("cuda") if cuda else torch.device("cpu")
+ for dtype in (torch.float, torch.double):
- # test odd dimension
- np.random.seed(54321)
- a = np.random.randn(3, 3)
- A = a @ a.transpose() + np.diag(np.random.rand(3))
- engine = MultivariateNormalQMCEngine(
- np.array([0, 0, 0]), A, seed=12345, inv_transform=True
- )
- samples = engine.draw(n=2)
- samples_expected = np.array(
- [
- [-1.5740992, 5.61057598, -1.28218525],
- [-0.18650226, -5.41662685, 0.023199],
- ]
- )
- self.assertTrue(np.allclose(samples, samples_expected))
+ # d = 1 scalar
+ mean = torch.tensor([0], device=device, dtype=dtype)
+ cov = torch.tensor([[5]], device=device, dtype=dtype)
+ engine = MultivariateNormalQMCEngine(mean=mean, cov=cov, inv_transform=True)
+ samples = engine.draw()
+ self.assertEqual(samples.dtype, dtype)
+ self.assertEqual(samples.device.type, device.type)
+ self.assertEqual(samples.shape, torch.Size([1, 1]))
+ samples = engine.draw(n=5)
+ self.assertEqual(samples.shape, torch.Size([5, 1]))
- def test_MultivariateNormalQMCEngineShapiro(self):
- # test the standard case
- engine = MultivariateNormalQMCEngine(
- mean=[0, 0], cov=[[1, 0], [0, 1]], seed=12345
- )
- samples = engine.draw(n=250)
- self.assertTrue(all(np.abs(samples.mean(axis=0)) < 1e-2))
- self.assertTrue(all(np.abs(samples.std(axis=0) - 1) < 1e-2))
- # perform Shapiro-Wilk test for normality
- for i in (0, 1):
- _, pval = shapiro(samples[:, i])
- self.assertGreater(pval, 0.9)
- # make sure samples are uncorrelated
- cov = np.cov(samples.transpose())
- self.assertLess(np.abs(cov[0, 1]), 1e-2)
+ # d = 2 list
+ mean = torch.tensor([0, 1], device=device, dtype=dtype)
+ cov = torch.eye(2, device=device, dtype=dtype)
+ engine = MultivariateNormalQMCEngine(mean=mean, cov=cov, inv_transform=True)
+ samples = engine.draw()
+ self.assertEqual(samples.dtype, dtype)
+ self.assertEqual(samples.device.type, device.type)
+ self.assertEqual(samples.shape, torch.Size([1, 2]))
+ samples = engine.draw(n=5)
+ self.assertEqual(samples.shape, torch.Size([5, 2]))
- # test the correlated, non-zero mean case
- engine = MultivariateNormalQMCEngine(
- mean=[1.0, 2.0], cov=[[1.5, 0.5], [0.5, 1.5]], seed=12345
- )
- samples = engine.draw(n=250)
- self.assertTrue(all(np.abs(samples.mean(axis=0) - [1, 2]) < 1e-2))
- self.assertTrue(all(np.abs(samples.std(axis=0) - np.sqrt(1.5)) < 1e-2))
- # perform Shapiro-Wilk test for normality
- for i in (0, 1):
- _, pval = shapiro(samples[:, i])
- self.assertGreater(pval, 0.9)
- # check covariance
- cov = np.cov(samples.transpose())
- self.assertLess(np.abs(cov[0, 1] - 0.5), 1e-2)
-
- def test_MultivariateNormalQMCEngineShapiroInvTransform(self):
- # test the standard case
- engine = MultivariateNormalQMCEngine(
- mean=[0, 0], cov=[[1, 0], [0, 1]], seed=12345, inv_transform=True
- )
- samples = engine.draw(n=250)
- self.assertTrue(all(np.abs(samples.mean(axis=0)) < 1e-2))
- self.assertTrue(all(np.abs(samples.std(axis=0) - 1) < 1e-2))
- # perform Shapiro-Wilk test for normality
- for i in (0, 1):
- _, pval = shapiro(samples[:, i])
- self.assertGreater(pval, 0.9)
- # make sure samples are uncorrelated
- cov = np.cov(samples.transpose())
- self.assertLess(np.abs(cov[0, 1]), 1e-2)
+ # d = 3 Tensor
+ mean = torch.tensor([0, 1, 2], device=device, dtype=dtype)
+ cov = torch.eye(3, device=device, dtype=dtype)
+ engine = MultivariateNormalQMCEngine(mean=mean, cov=cov, inv_transform=True)
+ samples = engine.draw()
+ self.assertEqual(samples.dtype, dtype)
+ self.assertEqual(samples.device.type, device.type)
+ self.assertEqual(samples.shape, torch.Size([1, 3]))
+ samples = engine.draw(n=5)
+ self.assertEqual(samples.shape, torch.Size([5, 3]))
- # test the correlated, non-zero mean case
- engine = MultivariateNormalQMCEngine(
- mean=[1.0, 2.0],
- cov=[[1.5, 0.5], [0.5, 1.5]],
- seed=12345,
- inv_transform=True,
- )
- samples = engine.draw(n=250)
- self.assertTrue(all(np.abs(samples.mean(axis=0) - [1, 2]) < 1e-2))
- self.assertTrue(all(np.abs(samples.std(axis=0) - np.sqrt(1.5)) < 1e-2))
- # perform Shapiro-Wilk test for normality
- for i in (0, 1):
- _, pval = shapiro(samples[:, i])
- self.assertGreater(pval, 0.9)
- # check covariance
- cov = np.cov(samples.transpose())
- self.assertLess(np.abs(cov[0, 1] - 0.5), 1e-2)
-
- def test_MultivariateNormalQMCEngineDegenerate(self):
- # X, Y iid standard Normal and Z = X + Y, random vector (X, Y, Z)
- engine = MultivariateNormalQMCEngine(
- mean=[0.0, 0.0, 0.0],
- cov=[[1.0, 0.0, 1.0], [0.0, 1.0, 1.0], [1.0, 1.0, 2.0]],
- seed=12345,
- )
- samples = engine.draw(n=2000)
- self.assertTrue(all(np.abs(samples.mean(axis=0)) < 1e-2))
- self.assertTrue(np.abs(np.std(samples[:, 0]) - 1) < 1e-2)
- self.assertTrue(np.abs(np.std(samples[:, 1]) - 1) < 1e-2)
- self.assertTrue(np.abs(np.std(samples[:, 2]) - np.sqrt(2)) < 1e-2)
- for i in (0, 1, 2):
- _, pval = shapiro(samples[:, i])
- self.assertGreater(pval, 0.9)
- cov = np.cov(samples.transpose())
- self.assertLess(np.abs(cov[0, 1]), 1e-2)
- self.assertLess(np.abs(cov[0, 2] - 1), 1e-2)
- # check to see if X + Y = Z almost exactly
- self.assertTrue(
- all(np.abs(samples[:, 0] + samples[:, 1] - samples[:, 2]) < 1e-5)
- )
+ def test_MultivariateNormalQMCEngineInvTransform_cuda(self):
+ if torch.cuda.is_available():
+ self.test_MultivariateNormalQMCEngineInvTransform(cuda=True)
+
+ def test_MultivariateNormalQMCEngineSeeded(self, cuda=False):
+ device = torch.device("cuda") if cuda else torch.device("cpu")
+ for dtype in (torch.float, torch.double):
+
+ # test even dimension
+ with manual_seed(54321):
+ a = torch.randn(2, 2)
+ cov = a @ a.transpose(-1, -2) + torch.rand(2).diag()
+
+ mean = torch.zeros(2, device=device, dtype=dtype)
+ cov = cov.to(device=device, dtype=dtype)
+ engine = MultivariateNormalQMCEngine(mean=mean, cov=cov, seed=12345)
+ samples = engine.draw(n=2)
+ self.assertEqual(samples.dtype, dtype)
+ self.assertEqual(samples.device.type, device.type)
+ samples_expected = torch.tensor(
+ [[-0.849047422, -0.713852942], [0.398635030, 1.350660801]],
+ device=device,
+ dtype=dtype,
+ )
+ self.assertTrue(torch.allclose(samples, samples_expected))
+
+ # test odd dimension
+ with manual_seed(54321):
+ a = torch.randn(3, 3)
+ cov = a @ a.transpose(-1, -2) + torch.rand(3).diag()
+
+ mean = torch.zeros(3, device=device, dtype=dtype)
+ cov = cov.to(device=device, dtype=dtype)
+ engine = MultivariateNormalQMCEngine(mean, cov, seed=12345)
+ samples = engine.draw(n=2)
+ self.assertEqual(samples.dtype, dtype)
+ self.assertEqual(samples.device.type, device.type)
+ samples_expected = torch.tensor(
+ [
+ [3.113158941, -3.262257099, -0.819938779],
+ [0.621987879, 2.352285624, -1.992680788],
+ ],
+ device=device,
+ dtype=dtype,
+ )
+ self.assertTrue(torch.allclose(samples, samples_expected))
+
+ def test_MultivariateNormalQMCEngineSeeded_cuda(self):
+ if torch.cuda.is_available():
+ self.test_MultivariateNormalQMCEngineSeeded(cuda=True)
+
+ def test_MultivariateNormalQMCEngineSeededOut(self, cuda=False):
+ device = torch.device("cuda") if cuda else torch.device("cpu")
+ for dtype in (torch.float, torch.double):
+
+ # test even dimension
+ with manual_seed(54321):
+ a = torch.randn(2, 2)
+ cov = a @ a.transpose(-1, -2) + torch.rand(2).diag()
+
+ mean = torch.zeros(2, device=device, dtype=dtype)
+ cov = cov.to(device=device, dtype=dtype)
+ engine = MultivariateNormalQMCEngine(mean=mean, cov=cov, seed=12345)
+ out = torch.empty(2, 2, device=device, dtype=dtype)
+ self.assertIsNone(engine.draw(n=2, out=out))
+ samples_expected = torch.tensor(
+ [[-0.849047422, -0.713852942], [0.398635030, 1.350660801]],
+ device=device,
+ dtype=dtype,
+ )
+ self.assertTrue(torch.allclose(out, samples_expected))
+
+ # test odd dimension
+ with manual_seed(54321):
+ a = torch.randn(3, 3)
+ cov = a @ a.transpose(-1, -2) + torch.rand(3).diag()
+
+ mean = torch.zeros(3, device=device, dtype=dtype)
+ cov = cov.to(device=device, dtype=dtype)
+ engine = MultivariateNormalQMCEngine(mean, cov, seed=12345)
+ out = torch.empty(2, 3, device=device, dtype=dtype)
+ self.assertIsNone(engine.draw(n=2, out=out))
+ samples_expected = torch.tensor(
+ [
+ [3.113158941, -3.262257099, -0.819938779],
+ [0.621987879, 2.352285624, -1.992680788],
+ ],
+ device=device,
+ dtype=dtype,
+ )
+ self.assertTrue(torch.allclose(out, samples_expected))
+
+ def test_MultivariateNormalQMCEngineSeededOut_cuda(self):
+ if torch.cuda.is_available():
+ self.test_MultivariateNormalQMCEngineSeededOut(cuda=True)
+
+ def test_MultivariateNormalQMCEngineSeededInvTransform(self, cuda=False):
+ device = torch.device("cuda") if cuda else torch.device("cpu")
+ for dtype in (torch.float, torch.double):
+ # test even dimension
+ with manual_seed(54321):
+ a = torch.randn(2, 2)
+ cov = a @ a.transpose(-1, -2) + torch.rand(2).diag()
+
+ mean = torch.zeros(2, device=device, dtype=dtype)
+ cov = cov.to(device=device, dtype=dtype)
+ engine = MultivariateNormalQMCEngine(
+ mean=mean, cov=cov, seed=12345, inv_transform=True
+ )
+ samples = engine.draw(n=2)
+ self.assertEqual(samples.dtype, dtype)
+ self.assertEqual(samples.device.type, device.type)
+ samples_expected = torch.tensor(
+ [[-0.560064316, 0.629113674], [-1.292604208, -0.048077226]],
+ device=device,
+ dtype=dtype,
+ )
+ self.assertTrue(torch.allclose(samples, samples_expected))
+
+ # test odd dimension
+ with manual_seed(54321):
+ a = torch.randn(3, 3)
+ cov = a @ a.transpose(-1, -2) + torch.rand(3).diag()
+
+ mean = torch.zeros(3, device=device, dtype=dtype)
+ cov = cov.to(device=device, dtype=dtype)
+ engine = MultivariateNormalQMCEngine(
+ mean=mean, cov=cov, seed=12345, inv_transform=True
+ )
+ samples = engine.draw(n=2)
+ self.assertEqual(samples.dtype, dtype)
+ self.assertEqual(samples.device.type, device.type)
+ samples_expected = torch.tensor(
+ [
+ [-2.388370037, 3.071142435, -0.319439292],
+ [-0.282978594, -4.350236893, -1.085214734],
+ ],
+ device=device,
+ dtype=dtype,
+ )
+ self.assertTrue(torch.allclose(samples, samples_expected))
+
+ def test_MultivariateNormalQMCEngineSeededInvTransform_cuda(self):
+ if torch.cuda.is_available():
+ self.test_MultivariateNormalQMCEngineSeededInvTransform(cuda=True)
+
+ def test_MultivariateNormalQMCEngineShapiro(self, cuda=False):
+ device = torch.device("cuda") if cuda else torch.device("cpu")
+ for dtype in (torch.float, torch.double):
+ # test the standard case
+ mean = torch.zeros(2, device=device, dtype=dtype)
+ cov = torch.eye(2, device=device, dtype=dtype)
+ engine = MultivariateNormalQMCEngine(mean=mean, cov=cov, seed=12345)
+ samples = engine.draw(n=250)
+ self.assertEqual(samples.dtype, dtype)
+ self.assertEqual(samples.device.type, device.type)
+ self.assertTrue(torch.all(torch.abs(samples.mean(dim=0)) < 1e-2))
+ self.assertTrue(torch.all(torch.abs(samples.std(dim=0) - 1) < 1e-2))
+ # perform Shapiro-Wilk test for normality
+ samples = samples.cpu().numpy()
+ for i in (0, 1):
+ _, pval = shapiro(samples[:, i])
+ self.assertGreater(pval, 0.9)
+ # make sure samples are uncorrelated
+ cov = np.cov(samples.transpose())
+ self.assertLess(np.abs(cov[0, 1]), 1e-2)
+
+ # test the correlated, non-zero mean case
+ mean = torch.tensor([1.0, 2.0], device=device, dtype=dtype)
+ cov = torch.tensor([[1.5, 0.5], [0.5, 1.5]], device=device, dtype=dtype)
+ engine = MultivariateNormalQMCEngine(mean=mean, cov=cov, seed=12345)
+ samples = engine.draw(n=250)
+ self.assertEqual(samples.dtype, dtype)
+ self.assertEqual(samples.device.type, device.type)
+ self.assertTrue(torch.all(torch.abs(samples.mean(dim=0) - mean) < 1e-2))
+ self.assertTrue(
+ torch.all(torch.abs(samples.std(dim=0) - math.sqrt(1.5)) < 1e-2)
+ )
+ # perform Shapiro-Wilk test for normality
+ samples = samples.cpu().numpy()
+ for i in (0, 1):
+ _, pval = shapiro(samples[:, i])
+ self.assertGreater(pval, 0.9)
+ # check covariance
+ cov = np.cov(samples.transpose())
+ self.assertLess(np.abs(cov[0, 1] - 0.5), 1e-2)
+
+ def test_MultivariateNormalQMCEngineShapiro_cuda(self):
+ if torch.cuda.is_available():
+ self.test_MultivariateNormalQMCEngineShapiro(cuda=True)
+
+ def test_MultivariateNormalQMCEngineShapiroInvTransform(self, cuda=False):
+ device = torch.device("cuda") if cuda else torch.device("cpu")
+ for dtype in (torch.float, torch.double):
+ # test the standard case
+ mean = torch.zeros(2, device=device, dtype=dtype)
+ cov = torch.eye(2, device=device, dtype=dtype)
+ engine = MultivariateNormalQMCEngine(
+ mean=mean, cov=cov, seed=12345, inv_transform=True
+ )
+ samples = engine.draw(n=250)
+ self.assertEqual(samples.dtype, dtype)
+ self.assertEqual(samples.device.type, device.type)
+ self.assertTrue(torch.all(torch.abs(samples.mean(dim=0)) < 1e-2))
+ self.assertTrue(torch.all(torch.abs(samples.std(dim=0) - 1) < 1e-2))
+ # perform Shapiro-Wilk test for normality
+ samples = samples.cpu().numpy()
+ for i in (0, 1):
+ _, pval = shapiro(samples[:, i])
+ self.assertGreater(pval, 0.9)
+ # make sure samples are uncorrelated
+ cov = np.cov(samples.transpose())
+ self.assertLess(np.abs(cov[0, 1]), 1e-2)
+
+ # test the correlated, non-zero mean case
+ mean = torch.tensor([1.0, 2.0], device=device, dtype=dtype)
+ cov = torch.tensor([[1.5, 0.5], [0.5, 1.5]], device=device, dtype=dtype)
+ engine = MultivariateNormalQMCEngine(
+ mean=mean, cov=cov, seed=12345, inv_transform=True
+ )
+ samples = engine.draw(n=250)
+ self.assertEqual(samples.dtype, dtype)
+ self.assertEqual(samples.device.type, device.type)
+ self.assertTrue(torch.all(torch.abs(samples.mean(dim=0) - mean) < 1e-2))
+ self.assertTrue(
+ torch.all(torch.abs(samples.std(dim=0) - math.sqrt(1.5)) < 1e-2)
+ )
+ # perform Shapiro-Wilk test for normality
+ samples = samples.cpu().numpy()
+ for i in (0, 1):
+ _, pval = shapiro(samples[:, i])
+ self.assertGreater(pval, 0.9)
+ # check covariance
+ cov = np.cov(samples.transpose())
+ self.assertLess(np.abs(cov[0, 1] - 0.5), 1e-2)
+
+ def test_MultivariateNormalQMCEngineShapiroInvTransform_cuda(self):
+ if torch.cuda.is_available():
+ self.test_MultivariateNormalQMCEngineShapiroInvTransform(cuda=True)
+
+ def test_MultivariateNormalQMCEngineDegenerate(self, cuda=False):
+ device = torch.device("cuda") if cuda else torch.device("cpu")
+ for dtype in (torch.float, torch.double):
+ # X, Y iid standard Normal and Z = X + Y, random vector (X, Y, Z)
+ mean = torch.zeros(3, device=device, dtype=dtype)
+ cov = torch.tensor(
+ [[1, 0, 1], [0, 1, 1], [1, 1, 2]], device=device, dtype=dtype
+ )
+ engine = MultivariateNormalQMCEngine(mean=mean, cov=cov, seed=12345)
+ samples = engine.draw(n=2000)
+ self.assertEqual(samples.dtype, dtype)
+ self.assertEqual(samples.device.type, device.type)
+ self.assertTrue(torch.all(torch.abs(samples.mean(dim=0)) < 1e-2))
+ self.assertTrue(torch.abs(torch.std(samples[:, 0]) - 1) < 1e-2)
+ self.assertTrue(torch.abs(torch.std(samples[:, 1]) - 1) < 1e-2)
+ self.assertTrue(torch.abs(torch.std(samples[:, 2]) - math.sqrt(2)) < 1e-2)
+ for i in (0, 1, 2):
+ _, pval = shapiro(samples[:, i].cpu().numpy())
+ self.assertGreater(pval, 0.9)
+ cov = np.cov(samples.cpu().numpy().transpose())
+ self.assertLess(np.abs(cov[0, 1]), 1e-2)
+ self.assertLess(np.abs(cov[0, 2] - 1), 1e-2)
+ # check to see if X + Y = Z almost exactly
+ self.assertTrue(
+ torch.all(
+ torch.abs(samples[:, 0] + samples[:, 1] - samples[:, 2]) < 1e-5
+ )
+ )
+
+ def test_MultivariateNormalQMCEngineDegenerate_cuda(self):
+ if torch.cuda.is_available():
+ self.test_MultivariateNormalQMCEngineDegenerate(cuda=True)
diff --git a/test/qmc/test_sobol.py b/test/qmc/test_sobol.py
deleted file mode 100644
index 4c0822a2c6..0000000000
--- a/test/qmc/test_sobol.py
+++ /dev/null
@@ -1,232 +0,0 @@
-#!/usr/bin/env python3
-
-import unittest
-from collections import Counter
-
-import numpy as np
-from botorch.qmc.sobol import SobolEngine, _test_find_index, multinomial_qmc
-
-
-class SobolTests(unittest.TestCase):
- # set maxDiff to None to show all differences when tests fail
- maxDiff = None
-
- def setUp(self):
- engine_unscrambled_1d = SobolEngine(1)
- self.draws_unscrambled_1d = engine_unscrambled_1d.draw(10)
- engine_unscrambled_3d = SobolEngine(3)
- self.draws_unscrambled_3d = engine_unscrambled_3d.draw(10)
- engine_scrambled_1d = SobolEngine(1, scramble=True, seed=12345)
- self.draws_scrambled_1d = engine_scrambled_1d.draw(10)
- engine_scrambled_3d = SobolEngine(3, scramble=True, seed=12345)
- self.draws_scrambled_3d = engine_scrambled_3d.draw(10)
-
- def test_Unscrambled1DSobol(self):
- expected = [0.5, 0.75, 0.25, 0.375, 0.875, 0.625, 0.125, 0.1875, 0.6875, 0.9375]
- self.assertEqual(self.draws_unscrambled_1d.shape[0], 10)
- self.assertEqual(self.draws_unscrambled_1d.shape[1], 1)
- self.assertTrue(
- np.array_equal(self.draws_unscrambled_1d.flatten(), np.array(expected))
- )
-
- def test_Unscrambled3DSobol(self):
- expected_dim3 = [
- 0.5,
- 0.75,
- 0.25,
- 0.625,
- 0.125,
- 0.375,
- 0.875,
- 0.3125,
- 0.8125,
- 0.5625,
- ]
- self.assertEqual(self.draws_unscrambled_3d.shape[0], 10)
- self.assertEqual(self.draws_unscrambled_3d.shape[1], 3)
- self.assertTrue(
- np.array_equal(self.draws_unscrambled_3d[:, 2], np.array(expected_dim3))
- )
- self.assertTrue(
- np.array_equal(
- self.draws_unscrambled_3d[:, 0], self.draws_unscrambled_1d.flatten()
- )
- )
-
- def test_Unscrambled3DAsyncSobol(self):
- engine_unscrambled_3d = SobolEngine(3)
- draws = np.vstack([engine_unscrambled_3d.draw() for i in range(10)])
- self.assertTrue(np.array_equal(self.draws_unscrambled_3d, draws))
-
- def test_UnscrambledFastForwardAndResetSobol(self):
- engine_unscrambled_3d = SobolEngine(3).fast_forward(5)
- draws = engine_unscrambled_3d.draw(5)
- self.assertTrue(np.array_equal(self.draws_unscrambled_3d[5:10, :], draws))
-
- engine_unscrambled_3d.reset()
- even_draws = []
- for i in range(10):
- if i % 2 == 0:
- even_draws.append(engine_unscrambled_3d.draw())
- else:
- engine_unscrambled_3d.fast_forward(1)
- self.assertTrue(
- np.array_equal(
- self.draws_unscrambled_3d[[i for i in range(10) if i % 2 == 0],],
- np.vstack(even_draws),
- )
- )
-
- def test_UnscrambledHighDimSobol(self):
- engine = SobolEngine(1111)
- count1 = Counter(engine.draw().flatten().tolist())
- count2 = Counter(engine.draw().flatten().tolist())
- count3 = Counter(engine.draw().flatten().tolist())
- self.assertEqual(count1, Counter({0.5: 1111}))
- self.assertEqual(count2, Counter({0.25: 580, 0.75: 531}))
- self.assertEqual(count3, Counter({0.25: 531, 0.75: 580}))
-
- def test_UnscrambledSobolBounds(self):
- engine = SobolEngine(1111)
- draws = engine.draw(1000)
- self.assertTrue(np.all(draws >= 0))
- self.assertTrue(np.all(draws <= 1))
-
- def test_UnscrambledDistributionSobol(self):
- engine = SobolEngine(1111)
- draws = engine.draw(1000)
- self.assertTrue(
- np.allclose(np.mean(draws, axis=0), np.repeat(0.5, 1111), atol=0.01)
- )
- self.assertTrue(
- np.allclose(
- np.percentile(draws, 25, axis=0), np.repeat(0.25, 1111), atol=0.01
- )
- )
- self.assertTrue(
- np.allclose(
- np.percentile(draws, 75, axis=0), np.repeat(0.75, 1111), atol=0.01
- )
- )
-
- def test_Scrambled1DSobol(self):
- expected = [
- 0.46784395,
- 0.03562005,
- 0.91319746,
- 0.86014303,
- 0.23796839,
- 0.25856809,
- 0.63636296,
- 0.69455189,
- 0.316758,
- 0.18673652,
- ]
- print(self.draws_scrambled_1d.flatten())
- self.assertEqual(self.draws_scrambled_1d.shape[0], 10)
- self.assertEqual(self.draws_scrambled_1d.shape[1], 1)
- self.assertTrue(
- np.allclose(self.draws_scrambled_1d.flatten(), np.array(expected))
- )
-
- def test_Scrambled3DSobol(self):
- expected_dim3 = [
- 0.19711632,
- 0.43653634,
- 0.79965184,
- 0.08670237,
- 0.70811484,
- 0.90994149,
- 0.29499525,
- 0.83833538,
- 0.46057166,
- 0.15769824,
- ]
- self.assertEqual(self.draws_scrambled_3d.shape[0], 10)
- self.assertEqual(self.draws_scrambled_3d.shape[1], 3)
- self.assertTrue(
- np.allclose(
- self.draws_scrambled_3d[:, 2], np.array(expected_dim3), atol=1e-5
- )
- )
-
- def test_Scrambled3DAsyncSobol(self):
- engine_unscrambled_3d = SobolEngine(3)
- draws = np.vstack([engine_unscrambled_3d.draw() for i in range(10)])
- self.assertTrue(np.array_equal(self.draws_unscrambled_3d, draws))
-
- def test_ScrambledSobolBounds(self):
- engine = SobolEngine(100, scramble=True)
- draws = engine.draw(1000)
- self.assertTrue(np.all(draws >= 0))
- self.assertTrue(np.all(draws <= 1))
-
- def test_ScrambledFastForwardAndResetSobol(self):
- engine_scrambled_3d = SobolEngine(3, scramble=True, seed=12345).fast_forward(5)
- draws = engine_scrambled_3d.draw(5)
- self.assertTrue(np.array_equal(self.draws_scrambled_3d[5:10,], draws))
-
- engine_scrambled_3d.reset()
- even_draws = []
- for i in range(10):
- if i % 2 == 0:
- even_draws.append(engine_scrambled_3d.draw())
- else:
- engine_scrambled_3d.fast_forward(1)
- self.assertTrue(
- np.array_equal(
- self.draws_scrambled_3d[[i for i in range(10) if i % 2 == 0],],
- np.vstack(even_draws),
- )
- )
-
- def test_ScrambledDistributionSobol(self):
- engine = SobolEngine(10, scramble=True, seed=12345)
- draws = engine.draw(1000)
- self.assertTrue(
- np.allclose(np.mean(draws, axis=0), np.repeat(0.5, 10), atol=0.01)
- )
- self.assertTrue(
- np.allclose(
- np.percentile(draws, 25, axis=0), np.repeat(0.25, 10), atol=0.01
- )
- )
- self.assertTrue(
- np.allclose(
- np.percentile(draws, 75, axis=0), np.repeat(0.75, 10), atol=0.01
- )
- )
-
- def test_0Dim(self):
- engine = SobolEngine(0)
- draws = engine.draw(5)
- self.assertTrue(np.array_equal(np.empty((5, 0)), draws))
-
-
-class MultinomialQMCTests(unittest.TestCase):
- def test_MultinomialNegativePs(self):
- p = np.array([0.12, 0.26, -0.05, 0.35, 0.22])
- self.assertRaises(ValueError, multinomial_qmc, 10, p)
-
- def test_MultinomialSumOfPTooLarge(self):
- p = np.array([0.12, 0.26, 0.1, 0.35, 0.22])
- self.assertRaises(ValueError, multinomial_qmc, 10, p)
-
- def test_MultinomialBasicDraw(self):
- p = np.array([0.12, 0.26, 0.05, 0.35, 0.22])
- expected = np.array([12, 25, 6, 34, 23])
- self.assertTrue(np.array_equal(multinomial_qmc(100, p, seed=12345), expected))
-
- def test_MultinomialDistribution(self):
- p = np.array([0.12, 0.26, 0.05, 0.35, 0.22])
- draws = multinomial_qmc(10000, p, seed=12345)
- np.testing.assert_almost_equal(draws / np.sum(draws), p, decimal=4)
-
- def test_FindIndex(self):
- p_cumulative = np.array([0.1, 0.4, 0.45, 0.6, 0.75, 0.9, 0.99, 1.0])
- size = len(p_cumulative)
- self.assertEqual(_test_find_index(p_cumulative, size, 0.0), 0)
- self.assertEqual(_test_find_index(p_cumulative, size, 0.4), 2)
- self.assertEqual(_test_find_index(p_cumulative, size, 0.44999), 2)
- self.assertEqual(_test_find_index(p_cumulative, size, 0.45001), 3)
- self.assertEqual(_test_find_index(p_cumulative, size, 1.0), size - 1)
diff --git a/test/utils/test_sampling.py b/test/utils/test_sampling.py
index 0ab808968d..8279ccd5db 100644
--- a/test/utils/test_sampling.py
+++ b/test/utils/test_sampling.py
@@ -6,13 +6,13 @@
import torch
from botorch.exceptions.warnings import SamplingWarning
from botorch.posteriors.gpytorch import GPyTorchPosterior
-from botorch.qmc.sobol import SobolEngine
from botorch.utils.sampling import (
construct_base_samples,
construct_base_samples_from_posterior,
manual_seed,
)
from gpytorch.distributions import MultitaskMultivariateNormal, MultivariateNormal
+from torch.quasirandom import SobolEngine
class TestConstructBaseSamples(unittest.TestCase):