diff --git a/src/libspark/bpplus.cpp b/src/libspark/bpplus.cpp index 393b04425e..0241045795 100644 --- a/src/libspark/bpplus.cpp +++ b/src/libspark/bpplus.cpp @@ -351,6 +351,18 @@ bool BPPlus::verify(const std::vector>& unpadded_C, co scalars.emplace_back(ZERO); } + std::vector> serialized_Gi; + serialized_Gi.resize(Gi.size()); + std::vector> serialized_Hi; + serialized_Hi.resize(Hi.size()); + // Serialize and cash Gi and Hi vectors + for (std::size_t i = 0; i < Gi.size(); i++) { + serialized_Gi[i].resize(GroupElement::serialize_size); + Gi[i].serialize(serialized_Gi[i].data()); + serialized_Hi[i].resize(GroupElement::serialize_size); + Hi[i].serialize(serialized_Hi[i].data()); + } + // Process each proof and add to the batch for (std::size_t k_proofs = 0; k_proofs < N_proofs; k_proofs++) { const BPPlusProof proof = proofs[k_proofs]; @@ -367,8 +379,8 @@ bool BPPlus::verify(const std::vector>& unpadded_C, co Transcript transcript(LABEL_TRANSCRIPT_BPPLUS); transcript.add("G", G); transcript.add("H", H); - transcript.add("Gi", Gi); - transcript.add("Hi", Hi); + transcript.add("Gi", serialized_Gi); + transcript.add("Hi", serialized_Hi); transcript.add("N", Scalar(N)); transcript.add("C", unpadded_C[k_proofs]); transcript.add("A", proof.A); diff --git a/src/libspark/spend_transaction.cpp b/src/libspark/spend_transaction.cpp index d98dc67a15..acb747c1dc 100644 --- a/src/libspark/spend_transaction.cpp +++ b/src/libspark/spend_transaction.cpp @@ -14,6 +14,7 @@ SpendTransaction::SpendTransaction( const SpendKey& spend_key, const std::vector& inputs, const std::unordered_map& cover_set_data, + const std::unordered_map>& cover_sets, const uint64_t f, const uint64_t vout, const std::vector& outputs @@ -55,10 +56,10 @@ SpendTransaction::SpendTransaction( // Parse out cover set data for this spend uint64_t set_id = inputs[u].cover_set_id; this->cover_set_ids.emplace_back(set_id); - if (cover_set_data.count(set_id) == 0) + if (cover_set_data.count(set_id) == 0 || cover_sets.count(set_id) == 0) throw std::invalid_argument("Required set is not passed"); - const auto& cover_set = cover_set_data.at(set_id).cover_set; + const auto& cover_set = cover_sets.at(set_id); std::size_t set_size = cover_set.size(); if (set_size > N) throw std::invalid_argument("Wrong set size"); diff --git a/src/libspark/spend_transaction.h b/src/libspark/spend_transaction.h index 24da12bea2..220a3d580c 100644 --- a/src/libspark/spend_transaction.h +++ b/src/libspark/spend_transaction.h @@ -26,7 +26,7 @@ struct InputCoinData { }; struct CoverSetData { - std::vector cover_set; // set of coins used as a cover set for the spend + std::size_t cover_set_size; // set of coins used as a cover set for the spend std::vector cover_set_representation; // a unique representation for the ordered elements of the partial `cover_set` used in the spend }; @@ -47,6 +47,7 @@ class SpendTransaction { const SpendKey& spend_key, const std::vector& inputs, const std::unordered_map& cover_set_data, + const std::unordered_map>& cover_sets, const uint64_t f, const uint64_t vout, const std::vector& outputs @@ -97,7 +98,7 @@ class SpendTransaction { void setCoverSets(const std::unordered_map& cover_set_data) { for (const auto& data : cover_set_data) { - this->cover_set_sizes[data.first] = data.second.cover_set.size(); + this->cover_set_sizes[data.first] = data.second.cover_set_size; this->cover_set_representations[data.first] = data.second.cover_set_representation; } } diff --git a/src/libspark/test/spend_transaction_test.cpp b/src/libspark/test/spend_transaction_test.cpp index 4527b26893..146c392c91 100644 --- a/src/libspark/test/spend_transaction_test.cpp +++ b/src/libspark/test/spend_transaction_test.cpp @@ -65,6 +65,8 @@ BOOST_AUTO_TEST_CASE(generate_verify) std::vector spend_coin_data; std::unordered_map cover_set_data; const std::size_t w = spend_indices.size(); + std::unordered_map> cover_sets; + for (std::size_t u = 0; u < w; u++) { IdentifiedCoinData identified_coin_data = in_coins[spend_indices[u]].identify(incoming_view_key); RecoveredCoinData recovered_coin_data = in_coins[spend_indices[u]].recover(full_view_key, identified_coin_data); @@ -74,9 +76,10 @@ BOOST_AUTO_TEST_CASE(generate_verify) spend_coin_data.back().cover_set_id = cover_set_id; CoverSetData setData; - setData.cover_set = in_coins; + setData.cover_set_size = in_coins.size(); setData.cover_set_representation = random_char_vector(); cover_set_data[cover_set_id] = setData; + cover_sets[cover_set_id] = in_coins; spend_coin_data.back().index = spend_indices[u]; spend_coin_data.back().k = identified_coin_data.k; spend_coin_data.back().s = recovered_coin_data.s; @@ -118,6 +121,7 @@ BOOST_AUTO_TEST_CASE(generate_verify) spend_key, spend_coin_data, cover_set_data, + cover_sets, f, 0, out_coin_data @@ -125,9 +129,6 @@ BOOST_AUTO_TEST_CASE(generate_verify) // Verify transaction.setCoverSets(cover_set_data); - std::unordered_map> cover_sets; - for (const auto set_data : cover_set_data) - cover_sets[set_data.first] = set_data.second.cover_set; BOOST_CHECK(SpendTransaction::verify(transaction, cover_sets)); } diff --git a/src/libspark/transcript.cpp b/src/libspark/transcript.cpp index 5cd67c63c0..b75f8c4d31 100644 --- a/src/libspark/transcript.cpp +++ b/src/libspark/transcript.cpp @@ -95,6 +95,16 @@ void Transcript::add(const std::string label, const std::vector& include_data(data); } +// Add arbitrary data, such as serialized group elements or scalars +void Transcript::add(const std::string label, const std::vector>& data) { + include_flag(FLAG_VECTOR); + size(data.size()); + include_label(label); + for (std::size_t i = 0; i < data.size(); i++) { + include_data(data[i]); + } +} + // Produce a challenge Scalar Transcript::challenge(const std::string label) { // Ensure we can properly populate a scalar diff --git a/src/libspark/transcript.h b/src/libspark/transcript.h index eef2f9f59b..a0dd360720 100644 --- a/src/libspark/transcript.h +++ b/src/libspark/transcript.h @@ -17,6 +17,7 @@ class Transcript { void add(const std::string, const GroupElement&); void add(const std::string, const std::vector&); void add(const std::string, const std::vector&); + void add(const std::string label, const std::vector>& data); Scalar challenge(const std::string); private: diff --git a/src/spark/sparkwallet.cpp b/src/spark/sparkwallet.cpp index 4c767b4fe5..79c973703b 100644 --- a/src/spark/sparkwallet.cpp +++ b/src/spark/sparkwallet.cpp @@ -1451,6 +1451,7 @@ CWalletTx CSparkWallet::CreateSparkSpendTransaction( std::vector inputs; std::map idAndBlockHashes; std::unordered_map cover_set_data; + std::unordered_map> cover_sets; for (auto& coin : estimated.second) { spark::CSparkState::SparkCoinGroupInfo nextCoinGroupInfo; uint64_t groupId = coin.nId; @@ -1475,10 +1476,11 @@ CWalletTx CSparkWallet::CreateSparkSpendTransaction( _("Has to have at least two mint coins with at least 1 confirmation in order to spend a coin")); spark::CoverSetData coverSetData; - coverSetData.cover_set = set; + coverSetData.cover_set_size = set.size(); coverSetData.cover_set_representation = setHash; coverSetData.cover_set_representation.insert(coverSetData.cover_set_representation.end(), sig.begin(), sig.end()); cover_set_data[groupId] = coverSetData; + cover_sets[groupId] = set; idAndBlockHashes[groupId] = blockHash; } @@ -1486,7 +1488,7 @@ CWalletTx CSparkWallet::CreateSparkSpendTransaction( spark::InputCoinData inputCoinData; inputCoinData.cover_set_id = groupId; std::size_t index = 0; - if (!getIndex(coin.coin, cover_set_data[groupId].cover_set, index)) + if (!getIndex(coin.coin, cover_sets[groupId], index)) throw std::runtime_error( _("No such coin in set")); inputCoinData.index = index; @@ -1507,7 +1509,7 @@ CWalletTx CSparkWallet::CreateSparkSpendTransaction( } - spark::SpendTransaction spendTransaction(params, fullViewKey, spendKey, inputs, cover_set_data, fee, transparentOut, privOutputs); + spark::SpendTransaction spendTransaction(params, fullViewKey, spendKey, inputs, cover_set_data, cover_sets, fee, transparentOut, privOutputs); spendTransaction.setBlockHashes(idAndBlockHashes); CDataStream serialized(SER_NETWORK, PROTOCOL_VERSION); serialized << spendTransaction; diff --git a/src/spark/state.cpp b/src/spark/state.cpp index 70acd8e0d5..5359a7a9a0 100644 --- a/src/spark/state.cpp +++ b/src/spark/state.cpp @@ -462,14 +462,13 @@ bool CheckSparkSMintTransaction( CSparkTxInfo* sparkTxInfo) { LogPrintf("CheckSparkSMintTransaction txHash = %s\n", hashTx.ToString()); - out_coins.clear(); for (const auto& out : vout) { const auto& script = out.scriptPubKey; - if (script.IsSparkMint() || script.IsSparkSMint()) { + if (script.IsSparkSMint()) { try { spark::Coin coin(Params::get_default()); ParseSparkMintCoin(script, coin); - out_coins.push_back(coin); + out_coins.emplace_back(coin); } catch (const std::exception &) { return state.DoS(100, false, @@ -557,11 +556,11 @@ bool CheckSparkSpendTransaction( bool passVerify = false; uint64_t Vout = 0; - std::vector vout; + std::size_t private_num = 0; for (const CTxOut &txout : tx.vout) { const auto& script = txout.scriptPubKey; if (!script.empty() && script.IsSparkSMint()) { - vout.push_back(txout); + private_num++; } else if (script.IsSparkMint() || script.IsLelantusMint() || script.IsLelantusJMint() || @@ -570,19 +569,23 @@ bool CheckSparkSpendTransaction( } else { Vout += txout.nValue; } - } - if (vout.size() > ::Params().GetConsensus().nMaxSparkOutLimitPerTx) + if (private_num > ::Params().GetConsensus().nMaxSparkOutLimitPerTx) return false; std::vector out_coins; - if (!CheckSparkSMintTransaction(vout, state, hashTx, fStatefulSigmaCheck, out_coins, sparkTxInfo)) + out_coins.reserve(private_num); + if (!CheckSparkSMintTransaction(tx.vout, state, hashTx, fStatefulSigmaCheck, out_coins, sparkTxInfo)) return false; spend->setOutCoins(out_coins); std::unordered_map> cover_sets; std::unordered_map cover_set_data; const auto idAndBlockHashes = spend->getBlockHashes(); + + BatchProofContainer* batchProofContainer = BatchProofContainer::get_instance(); + bool useBatching = batchProofContainer->fCollectProofs && !isVerifyDB && !isCheckWallet && sparkTxInfo && !sparkTxInfo->fInfoIsComplete; + for (const auto& idAndHash : idAndBlockHashes) { CSparkState::SparkCoinGroupInfo coinGroup; if (!sparkState.GetCoinGroupInfo(idAndHash.first, coinGroup)) @@ -598,6 +601,8 @@ bool CheckSparkSpendTransaction( std::vector set_hash = GetAnonymitySetHash(index, idAndHash.first); std::vector cover_set; + cover_set.reserve(coinGroup.nCoins); + std::size_t set_size = 0; // Build a vector with all the public coins with given id before // the block on which the spend occurred. // This list of public coins is required by function "Verify" of spend. @@ -613,7 +618,9 @@ bool CheckSparkSpendTransaction( BOOST_FOREACH( const auto& coin, index->sparkMintedCoins[id]) { - cover_set.push_back(coin); + set_size++; + if (!useBatching) + cover_set.push_back(coin); } } } @@ -624,12 +631,12 @@ bool CheckSparkSpendTransaction( } CoverSetData setData; - setData.cover_set = cover_set; + setData.cover_set_size = set_size; if (!set_hash.empty()) setData.cover_set_representation = set_hash; setData.cover_set_representation.insert(setData.cover_set_representation.end(), txHashForMetadata.begin(), txHashForMetadata.end()); - cover_sets[idAndHash.first] = cover_set; + cover_sets[idAndHash.first] = std::move(cover_set); cover_set_data [idAndHash.first] = setData; } spend->setCoverSets(cover_set_data); @@ -641,9 +648,6 @@ bool CheckSparkSpendTransaction( return state.DoS(100, error("CheckSparkSpendTransaction: No cover set found.")); } - - BatchProofContainer* batchProofContainer = BatchProofContainer::get_instance(); - bool useBatching = batchProofContainer->fCollectProofs && !isVerifyDB && !isCheckWallet && sparkTxInfo && !sparkTxInfo->fInfoIsComplete; // if we are collecting proofs, skip verification and collect proofs // add proofs into container @@ -1216,7 +1220,7 @@ int CSparkState::GetCoinSetForSpend( } SparkCoinGroupInfo &coinGroup = coinGroups[coinGroupID]; - + coins_out.reserve(coinGroup.nCoins); int numberOfCoins = 0; for (CBlockIndex *block = coinGroup.lastBlock;; block = block->pprev) {