diff --git a/CMakeLists.txt b/CMakeLists.txt index 665906bc..d7c5d772 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -5,9 +5,6 @@ add_compile_definitions(PERFECT_MATCHING_DOUBLE) file(GLOB SOURCES "src/pymatching/*.cpp") -add_subdirectory(lib/blossom5-v2.05.src) -target_compile_options(Blossom5 PRIVATE "-fPIC") - add_subdirectory(lib/pybind11) pybind11_add_module(_cpp_mwpm ${SOURCES}) @@ -19,11 +16,8 @@ target_link_libraries(_cpp_mwpm PRIVATE lemon) include_directories(${CMAKE_SOURCE_DIR}/lib/boost_1_72_0) -target_link_libraries(_cpp_mwpm PRIVATE Blossom5) - target_include_directories(_cpp_mwpm PRIVATE "${PROJECT_BINARY_DIR}" - "${PROJECT_SOURCE_DIR}/lib/blossom5-v2.05.src" "${CMAKE_SOURCE_DIR}/lib/lemon" "${CMAKE_BINARY_DIR}/lib/lemon" ) diff --git a/docs/_static/pymatching_vs_networkx.png b/docs/_static/pymatching_vs_networkx.png index 9fbe75c2..f430b872 100644 Binary files a/docs/_static/pymatching_vs_networkx.png and b/docs/_static/pymatching_vs_networkx.png differ diff --git a/docs/index.rst b/docs/index.rst index 11268ea8..6ab32457 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -14,24 +14,31 @@ While a Python library such as NetworkX can also be used to implement MWPM, it is far too slow to be used for large fault-tolerance simulations, which often require matching graphs with many thousands of nodes. On the other hand, -the excellent C++ BlossomV library is fast, but using it to decode +the widly used C++ BlossomV library is fast, but using it to decode quantum codes also requires path-finding algorithms, which must also be implemented in C++ for a fast implementation. Furthermore, attempting to solve the full matching problem even with BlossomV can become prohibitively expensive for matching graphs with more than a few thousand nodes, since the average complexity is -empirically roughly quadratic in the number of nodes. +empirically roughly quadratic in the number of nodes. BlossomV +is also not open-source since it does not have a permissive license. + PyMatching is intended to provide the best of both -worlds: the algorithms and data structures are implemented -in C++ for good performance (with the help of BlossomV and the Boost Graph library), +worlds: all the +core functionality of PyMatching is useable via the Python +bindings, making it easy to use +in conjunction with numpy, scipy and NetworkX. However the +core algorithms and data structures are implemented +in C++ for good performance (with the help of the open-source +Lemon and the Boost Graph libraries), using a local variant of the matching decoder given in the Appendix of https://arxiv.org/abs/2010.09626, which empirically has an average runtime roughly linear in the number of nodes -and gives the same output as full matching in practice. -Furthermore, Python bindings are provided for all the -core functionality of PyMatching, making it easy to use -in conjunction with numpy, scipy and NetworkX. -PyMatching can be applied to any quantum code for which +and gives the same output as full matching in practice. Since +PyMatching uses the open-source Lemon C++ library for matching, +which has similar performance to BlossomV, both PyMatching and +its dependencies have permissive licenses. PyMatching can be +applied to any quantum code for which defects come in pairs (or in isolation at a boundary), and does not require knowledge of the specific geometry used. diff --git a/lib/blossom5-v2.05.src/.gitignore b/lib/blossom5-v2.05.src/.gitignore deleted file mode 100644 index 8f3a283f..00000000 --- a/lib/blossom5-v2.05.src/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -* -!.gitignore -!CMakeLists.txt diff --git a/lib/blossom5-v2.05.src/CMakeLists.txt b/lib/blossom5-v2.05.src/CMakeLists.txt deleted file mode 100644 index 813746de..00000000 --- a/lib/blossom5-v2.05.src/CMakeLists.txt +++ /dev/null @@ -1,6 +0,0 @@ -file(GLOB_RECURSE BLOSSOM_SOURCES "*.cpp") - -add_library(Blossom5 ${BLOSSOM_SOURCES}) -if(UNIX AND NOT APPLE) - target_link_libraries(Blossom5 rt) -endif() diff --git a/setup.py b/setup.py index 3df3a567..718ea557 100644 --- a/setup.py +++ b/setup.py @@ -29,13 +29,6 @@ def download_and_extract(pkg_url, pkg_fn, pkg_orig_dir=None, pkg_new_dir=None): if os.path.isfile(pkg_fn): os.remove(pkg_fn) -blossom5_url = "https://pub.ist.ac.at/~vnk/software/blossom5-v2.05.src.tar.gz" -blossom5_fn = os.path.join(root_dir, "blossom5-v2.05.src.tar.gz") -blossom5_dir = os.path.join(lib_dir, "blossom5-v2.05.src") - -if not os.path.isfile(os.path.join(blossom5_dir, "PerfectMatching.h")): - download_and_extract(blossom5_url, blossom5_fn) - lemon_url = "http://lemon.cs.elte.hu/pub/sources/lemon-1.3.1.tar.gz" lemon_fn = os.path.join(root_dir, "lemon-1.3.1.tar.gz") lemon_old_dir = os.path.join(lib_dir, "lemon-1.3.1") diff --git a/src/pymatching/__init__.py b/src/pymatching/__init__.py index ec7f083b..94a6045d 100644 --- a/src/pymatching/__init__.py +++ b/src/pymatching/__init__.py @@ -1,5 +1,4 @@ -from ._cpp_mwpm import (PerfectMatching, Options, randomize, set_seed, - rand_float) +from ._cpp_mwpm import (randomize, set_seed, rand_float) from .matching import * randomize() # Set random seed using std::random_device diff --git a/src/pymatching/bindings.cpp b/src/pymatching/bindings.cpp index 38ec9cf6..8d888a72 100644 --- a/src/pymatching/bindings.cpp +++ b/src/pymatching/bindings.cpp @@ -1,36 +1,14 @@ #include #include -#include"mwpm.h" -#include #include "stabiliser_graph.h" #include "weighted_stabiliser_graph.h" #include "rand_gen.h" -#include "boost_mwpm.h" #include "lemon_mwpm.h" namespace py = pybind11; using namespace pybind11::literals; PYBIND11_MODULE(_cpp_mwpm, m) { - py::class_(m, "PerfectMatching") - .def(py::init(), "nodeNum"_a, "edgeNumMax"_a) - .def("add_edge", &PerfectMatching::AddEdge, - "i"_a, "j"_a, "cost"_a) - .def("solve", &PerfectMatching::Solve, "finish"_a=true) - .def("get_match", &PerfectMatching::GetMatch, "i"_a) - .def("get_solution", &PerfectMatching::GetSolution, "e"_a) - .def_readwrite("options", &PerfectMatching::options); - - py::class_(m, "Options") - .def(py::init<>()) - .def_readwrite("fractional_jumpstart", &PerfectMatching::Options::fractional_jumpstart) - .def_readwrite("dual_greedy_update_option", &PerfectMatching::Options::dual_greedy_update_option) - .def_readwrite("dual_LP_threshold", &PerfectMatching::Options::dual_LP_threshold) - .def_readwrite("update_duals_before", &PerfectMatching::Options::update_duals_before) - .def_readwrite("update_duals_after", &PerfectMatching::Options::update_duals_after) - .def_readwrite("single_tree_threshold", &PerfectMatching::Options::single_tree_threshold) - .def_readwrite("verbose", &PerfectMatching::Options::verbose); - py::class_(m, "IStabiliserGraph") .def("distance", &IStabiliserGraph::Distance, "node1"_a, "node2"_a) .def("space_time_distance", &IStabiliserGraph::SpaceTimeDistance, "node1"_a, "node2"_a) @@ -59,16 +37,10 @@ PYBIND11_MODULE(_cpp_mwpm, m) { "source"_a, "num_neighbours"_a, "defect_id"_a) .def("get_path", &WeightedStabiliserGraph::GetPath, "source"_a, "target"_a); - m.def("bv_decode", &Decode, "sg"_a, "defects"_a); - m.def("bv_decode_match_neighbourhood", &DecodeMatchNeighbourhood, - "sg"_a ,"defects"_a, "num_neighbours"_a); m.def("randomize", &randomize); m.def("set_seed", &set_seed, "s"_a); m.def("rand_float", &rand_float, "from"_a, "to"_a); - m.def("boost_decode_match_neighbourhood", &BoostDecodeMatchNeighbourhood, "sg"_a ,"defects"_a, "num_neighbours"_a); - m.def("boost_decode", &BoostDecode, "sg"_a, "defects"_a); - m.def("decode_match_neighbourhood", &LemonDecodeMatchNeighbourhood); m.def("decode", &LemonDecode); } diff --git a/src/pymatching/boost_mwpm.cpp b/src/pymatching/boost_mwpm.cpp deleted file mode 100644 index 4eef3ee3..00000000 --- a/src/pymatching/boost_mwpm.cpp +++ /dev/null @@ -1,122 +0,0 @@ -#include -#include -#include -#include "weighted_stabiliser_graph.h" -#include -#include -#include "boost_mwpm.h" -#include "stabiliser_graph.h" -#include -#include -#include -#include - -typedef boost::property< boost::edge_weight_t, float, boost::property< boost::edge_index_t, int > > - Weight; -typedef boost::adjacency_list< boost::vecS, boost::vecS, - boost::undirectedS, boost::no_property, Weight > - defect_graph; - -py::array_t BoostDecodeMatchNeighbourhood(WeightedStabiliserGraph& sg, const py::array_t& defects, int num_neighbours){ - auto d = defects.unchecked<1>(); - int num_defects = d.shape(0); - - int num_nodes = boost::num_vertices(sg.stabiliser_graph); - - std::vector defect_id(num_nodes, -1); - for (int i=0; i::vertex_descriptor > match(num_defects); - std::vector> neighbours; - std::vector> adj_list(num_defects); - int j; - bool is_in; - for (int i=0; i(N, 0); - - std::set remaining_defects; - for (int i=0; i path; - int i; - std::set qids; - while (remaining_defects.size() > 0){ - i = *remaining_defects.begin(); - remaining_defects.erase(remaining_defects.begin()); - j = match[i]; - remaining_defects.erase(j); - path = sg.GetPath(d(i), d(j)); - for (std::vector::size_type k=0; k= 0) && (qid < N)){ - (*correction)[qid] = ((*correction)[qid] + 1) % 2; - } - } - } - } - auto capsule = py::capsule(correction, [](void *correction) { delete reinterpret_cast*>(correction); }); - return py::array_t(correction->size(), correction->data(), capsule); -} - -py::array_t BoostDecode(IStabiliserGraph& sg, const py::array_t& defects){ - if (!sg.HasComputedAllPairsShortestPaths()){ - sg.ComputeAllPairsShortestPaths(); - } - auto d = defects.unchecked<1>(); - int num_nodes = d.shape(0); - int num_edges = num_nodes*(num_nodes-1)/2; - - defect_graph g(num_nodes); - std::vector< boost::graph_traits< defect_graph >::vertex_descriptor > match(num_nodes); - - for (py::size_t i = 0; i(N, 0); - std::set qids; - for (py::size_t i = 0; i path = sg.SpaceTimeShortestPath(d(i), d(j)); - for (std::vector::size_type k=0; k= 0) && (qid < N)){ - (*correction)[qid] = ((*correction)[qid] + 1) % 2; - } - } - } - } - } - - auto capsule = py::capsule(correction, [](void *correction) { delete reinterpret_cast*>(correction); }); - return py::array_t(correction->size(), correction->data(), capsule); -} \ No newline at end of file diff --git a/src/pymatching/boost_mwpm.h b/src/pymatching/boost_mwpm.h deleted file mode 100644 index 541604fc..00000000 --- a/src/pymatching/boost_mwpm.h +++ /dev/null @@ -1,8 +0,0 @@ -#include -#include -#include "weighted_stabiliser_graph.h" - -namespace py = pybind11; - -py::array_t BoostDecodeMatchNeighbourhood(WeightedStabiliserGraph& sg, const py::array_t& defects, int num_neighbours); -py::array_t BoostDecode(IStabiliserGraph& sg, const py::array_t& defects); \ No newline at end of file diff --git a/src/pymatching/mwpm.cpp b/src/pymatching/mwpm.cpp deleted file mode 100644 index 97ceeb76..00000000 --- a/src/pymatching/mwpm.cpp +++ /dev/null @@ -1,117 +0,0 @@ -#include "mwpm.h" -#include -#include "stabiliser_graph.h" -#include "weighted_stabiliser_graph.h" -#include -#include -#include -#include -#include -#include - - -py::array_t Decode(IStabiliserGraph& sg, const py::array_t& defects){ - if (!sg.HasComputedAllPairsShortestPaths()){ - sg.ComputeAllPairsShortestPaths(); - } - auto d = defects.unchecked<1>(); - int num_nodes = d.shape(0); - int num_edges = num_nodes*(num_nodes-1)/2; - - PerfectMatching *pm = new PerfectMatching(num_nodes, num_edges); - for (py::size_t i = 0; iAddEdge(i, j, sg.SpaceTimeDistance(d(i), d(j))); - } - }; - pm->options.verbose = false; - pm->Solve(); - // after_solve = std::clock(); - int N = sg.GetNumQubits(); - auto correction = new std::vector(N, 0); - std::set qids; - for (py::size_t i = 0; iGetMatch(i); - if (i path = sg.SpaceTimeShortestPath(d(i), d(j)); - for (std::vector::size_type k=0; k= 0) && (qid < N)){ - (*correction)[qid] = ((*correction)[qid] + 1) % 2; - } - } - } - } - } - delete pm; - - auto capsule = py::capsule(correction, [](void *correction) { delete reinterpret_cast*>(correction); }); - return py::array_t(correction->size(), correction->data(), capsule); -} - - -py::array_t DecodeMatchNeighbourhood(WeightedStabiliserGraph& sg, const py::array_t& defects, int num_neighbours){ - auto d = defects.unchecked<1>(); - int num_defects = d.shape(0); - - int num_nodes = boost::num_vertices(sg.stabiliser_graph); - - std::vector defect_id(num_nodes, -1); - for (int i=0; i> neighbours; - std::vector> adj_list(num_defects); - int j; - bool is_in; - for (int i=0; iAddEdge(i, j, neighbour.second); - adj_list[i].insert(j); - adj_list[j].insert(i); - } - } - } - - pm->options.verbose = false; - pm->Solve(); - - int N = sg.GetNumQubits(); - auto correction = new std::vector(N, 0); - - std::set remaining_defects; - for (int i=0; i path; - int i; - std::set qids; - while (remaining_defects.size() > 0){ - i = *remaining_defects.begin(); - remaining_defects.erase(remaining_defects.begin()); - j = pm->GetMatch(i); - remaining_defects.erase(j); - path = sg.GetPath(d(i), d(j)); - for (std::vector::size_type k=0; k= 0) && (qid < N)){ - (*correction)[qid] = ((*correction)[qid] + 1) % 2; - } - } - } - } - delete pm; - auto capsule = py::capsule(correction, [](void *correction) { delete reinterpret_cast*>(correction); }); - return py::array_t(correction->size(), correction->data(), capsule); -} \ No newline at end of file diff --git a/src/pymatching/mwpm.h b/src/pymatching/mwpm.h deleted file mode 100644 index f6483ff2..00000000 --- a/src/pymatching/mwpm.h +++ /dev/null @@ -1,11 +0,0 @@ -#pragma once - -#include -#include -#include "stabiliser_graph.h" -#include "weighted_stabiliser_graph.h" - -namespace py = pybind11; - -py::array_t Decode(IStabiliserGraph& sg, const py::array_t& defects); -py::array_t DecodeMatchNeighbourhood(WeightedStabiliserGraph& sg, const py::array_t& defects, int num_neighbours); \ No newline at end of file