From 06b6a58cfd92639deecdb6f1e1529d0b8fa53727 Mon Sep 17 00:00:00 2001 From: tamirms Date: Wed, 15 Jan 2020 19:37:42 +0100 Subject: [PATCH] services/horizon/internal/db2/history: Remove exp_history_* tables (#2118) * Add migration to remove exp_history_* tables * Remove CheckExp functions in the processors * Modify db ingestion functions to insert into history_ * tables instead of exp_history_* tables --- .../horizon/internal/db2/history/account.go | 18 +- .../internal/db2/history/account_test.go | 49 +++ .../horizon/internal/db2/history/asset.go | 8 +- .../internal/db2/history/asset_test.go | 14 +- .../horizon/internal/db2/history/effect.go | 96 +----- .../history/effect_batch_insert_builder.go | 4 +- .../effect_batch_insert_builder_test.go | 11 +- .../internal/db2/history/effect_test.go | 181 ----------- .../horizon/internal/db2/history/ingestion.go | 26 +- .../internal/db2/history/ingestion_test.go | 50 ++-- .../horizon/internal/db2/history/ledger.go | 57 +--- .../internal/db2/history/ledger_test.go | 96 +----- .../internal/db2/history/mock_q_effects.go | 7 +- .../internal/db2/history/mock_q_ledgers.go | 7 +- .../internal/db2/history/mock_q_operations.go | 6 - .../db2/history/mock_q_participants.go | 7 +- .../internal/db2/history/mock_q_trades.go | 9 +- .../db2/history/mock_q_transactions.go | 5 - .../horizon/internal/db2/history/operation.go | 164 +--------- .../history/operation_batch_insert_builder.go | 4 +- .../operation_batch_insert_builder_test.go | 8 +- ...ration_participant_batch_insert_builder.go | 4 +- ...n_participant_batch_insert_builder_test.go | 2 +- .../internal/db2/history/operation_test.go | 280 ------------------ .../internal/db2/history/participants.go | 125 +------- .../internal/db2/history/participants_test.go | 241 +-------------- .../horizon/internal/db2/history/trade.go | 78 +---- .../db2/history/trade_batch_insert_builder.go | 6 +- .../internal/db2/history/trade_test.go | 139 +-------- .../internal/db2/history/transaction.go | 72 +---- .../transaction_batch_insert_builder.go | 4 +- .../internal/db2/history/transaction_test.go | 154 +--------- .../horizon/internal/db2/schema/bindata.go | 181 +++++++---- .../migrations/32_drop_exp_history_tables.sql | 131 ++++++++ services/horizon/internal/expingest/main.go | 2 +- .../internal/expingest/pipeline_hooks_test.go | 12 +- .../horizon/internal/expingest/pipelines.go | 4 +- .../processors/database_processor.go | 23 +- .../expingest/processors/effects_processor.go | 23 +- .../processors/effects_processor_test.go | 70 +---- .../processors/ledgers_processor_test.go | 78 +---- .../internal/expingest/processors/main.go | 2 +- .../processors/operations_processor.go | 19 -- .../processors/operations_processor_test.go | 64 +--- .../processors/participants_processor.go | 20 +- .../processors/participants_processor_test.go | 58 +--- .../expingest/processors/trades_processor.go | 27 +- .../processors/trades_processor_test.go | 28 +- .../processors/transactions_processor.go | 19 -- .../processors/transactions_processor_test.go | 80 +---- .../internal/expingest/run_ingestion_test.go | 4 +- services/horizon/internal/ingest/ingestion.go | 8 +- 52 files changed, 483 insertions(+), 2302 deletions(-) delete mode 100644 services/horizon/internal/db2/history/effect_test.go create mode 100644 services/horizon/internal/db2/schema/migrations/32_drop_exp_history_tables.sql diff --git a/services/horizon/internal/db2/history/account.go b/services/horizon/internal/db2/history/account.go index c015968542..68780e787a 100644 --- a/services/horizon/internal/db2/history/account.go +++ b/services/horizon/internal/db2/history/account.go @@ -55,16 +55,26 @@ func (q *Q) AccountsByAddresses(dest interface{}, addresses []string) error { return q.Select(dest, sql) } -// CreateAccounts creates rows for addresses in history_accounts table and -// put. `ON CONFLICT` is required when running a distributed ingestion. -func (q *Q) CreateAccounts(dest interface{}, addresses []string) error { +// CreateAccounts creates rows in the history_accounts table for a given list of addresses. +// CreateAccounts returns a mapping of account address to its corresponding id in the history_accounts table +func (q *Q) CreateAccounts(addresses []string) (map[string]int64, error) { + var accounts []Account sql := sq.Insert("history_accounts").Columns("address") for _, address := range addresses { sql = sql.Values(address) } sql = sql.Suffix("ON CONFLICT (address) DO UPDATE SET address=EXCLUDED.address RETURNING *") - return q.Select(dest, sql) + err := q.Select(&accounts, sql) + if err != nil { + return nil, err + } + + addressToID := map[string]int64{} + for _, account := range accounts { + addressToID[account.Address] = account.ID + } + return addressToID, nil } // Return id for account. If account doesn't exist, it will be created and the new id returned. diff --git a/services/horizon/internal/db2/history/account_test.go b/services/horizon/internal/db2/history/account_test.go index 97af9a8469..801db0d1aa 100644 --- a/services/horizon/internal/db2/history/account_test.go +++ b/services/horizon/internal/db2/history/account_test.go @@ -49,3 +49,52 @@ func TestIsAuthImmutable(t *testing.T) { account = AccountEntry{Flags: 0} tt.False(account.IsAuthImmutable()) } + +func assertAccountsContainAddresses(tt *test.T, accounts map[string]int64, addresses []string) { + tt.Assert.Len(accounts, len(addresses)) + set := map[int64]bool{} + for _, address := range addresses { + accountID, ok := accounts[address] + tt.Assert.True(ok) + tt.Assert.False(set[accountID]) + set[accountID] = true + } +} + +func TestCreateAccounts(t *testing.T) { + tt := test.Start(t) + defer tt.Finish() + test.ResetHorizonDB(t, tt.HorizonDB) + q := &Q{tt.HorizonSession()} + + addresses := []string{ + "GAOQJGUAB7NI7K7I62ORBXMN3J4SSWQUQ7FOEPSDJ322W2HMCNWPHXFB", + "GBRPYHIL2CI3FNQ4BXLFMNDLFJUNPU2HY3ZMFSHONUCEOASW7QC7OX2H", + } + accounts, err := q.CreateAccounts(addresses) + tt.Assert.NoError(err) + tt.Assert.Len(accounts, 2) + assertAccountsContainAddresses(tt, accounts, addresses) + + dupAccounts, err := q.CreateAccounts([]string{ + "GAOQJGUAB7NI7K7I62ORBXMN3J4SSWQUQ7FOEPSDJ322W2HMCNWPHXFB", + "GBRPYHIL2CI3FNQ4BXLFMNDLFJUNPU2HY3ZMFSHONUCEOASW7QC7OX2H", + }) + tt.Assert.NoError(err) + tt.Assert.Equal(accounts, dupAccounts) + + addresses = []string{ + "GAOQJGUAB7NI7K7I62ORBXMN3J4SSWQUQ7FOEPSDJ322W2HMCNWPHXFB", + "GBRPYHIL2CI3FNQ4BXLFMNDLFJUNPU2HY3ZMFSHONUCEOASW7QC7OX2H", + "GCYVFGI3SEQJGBNQQG7YCMFWEYOHK3XPVOVPA6C566PXWN4SN7LILZSM", + "GBYSBDAJZMHL5AMD7QXQ3JEP3Q4GLKADWIJURAAHQALNAWD6Z5XF2RAC", + } + accounts, err = q.CreateAccounts(addresses) + tt.Assert.NoError(err) + assertAccountsContainAddresses(tt, accounts, addresses) + for address, accountID := range dupAccounts { + id, ok := accounts[address] + tt.Assert.True(ok) + tt.Assert.Equal(id, accountID) + } +} diff --git a/services/horizon/internal/db2/history/asset.go b/services/horizon/internal/db2/history/asset.go index b6c05af4ab..5c60e81f74 100644 --- a/services/horizon/internal/db2/history/asset.go +++ b/services/horizon/internal/db2/history/asset.go @@ -86,12 +86,12 @@ func (q *Q) GetCreateAssetID( return } -// CreateExpAssets creates rows in the exp_history_assets table for a given list of assets. -func (q *Q) CreateExpAssets(assets []xdr.Asset) (map[string]Asset, error) { +// CreateAssets creates rows in the history_assets table for a given list of assets. +func (q *Q) CreateAssets(assets []xdr.Asset) (map[string]Asset, error) { searchStrings := make([]string, 0, len(assets)) assetToKey := map[[3]string]string{} - sql := sq.Insert("exp_history_assets").Columns("asset_type", "asset_code", "asset_issuer") + sql := sq.Insert("history_assets").Columns("asset_type", "asset_code", "asset_issuer") for _, asset := range assets { var assetType, assetCode, assetIssuer string @@ -118,7 +118,7 @@ func (q *Q) CreateExpAssets(assets []xdr.Asset) (map[string]Asset, error) { } var rows []Asset - err = q.Select(&rows, sq.Select("*").From("exp_history_assets").Where(sq.Eq{ + err = q.Select(&rows, sq.Select("*").From("history_assets").Where(sq.Eq{ "concat(asset_type, '/', asset_code, '/', asset_issuer)": searchStrings, })) if err != nil { diff --git a/services/horizon/internal/db2/history/asset_test.go b/services/horizon/internal/db2/history/asset_test.go index 67c7bddd0d..120957d7f6 100644 --- a/services/horizon/internal/db2/history/asset_test.go +++ b/services/horizon/internal/db2/history/asset_test.go @@ -7,18 +7,18 @@ import ( "github.com/stellar/go/xdr" ) -func TestCreateExpAssetIDs(t *testing.T) { +func TestCreateAssets(t *testing.T) { tt := test.Start(t) defer tt.Finish() test.ResetHorizonDB(t, tt.HorizonDB) q := &Q{tt.HorizonSession()} - // CreateExpAssets creates new rows + // CreateAssets creates new rows assets := []xdr.Asset{ nativeAsset, eurAsset, } - assetMap, err := q.CreateExpAssets(assets) + assetMap, err := q.CreateAssets(assets) tt.Assert.NoError(err) tt.Assert.Len(assetMap, len(assets)) @@ -37,8 +37,8 @@ func TestCreateExpAssetIDs(t *testing.T) { tt.Assert.Equal(row.Issuer, assetIssuer) } - // CreateExpAssets handles duplicates - assetMap, err = q.CreateExpAssets([]xdr.Asset{ + // CreateAssets handles duplicates + assetMap, err = q.CreateAssets([]xdr.Asset{ nativeAsset, nativeAsset, eurAsset, eurAsset, nativeAsset, nativeAsset, eurAsset, eurAsset, }) @@ -58,9 +58,9 @@ func TestCreateExpAssetIDs(t *testing.T) { tt.Assert.Equal(row.Issuer, assetIssuer) } - // CreateExpAssets handles duplicates and new rows + // CreateAssets handles duplicates and new rows assets = append(assets, usdAsset) - assetMap, err = q.CreateExpAssets(assets) + assetMap, err = q.CreateAssets(assets) tt.Assert.NoError(err) tt.Assert.Len(assetMap, len(assets)) diff --git a/services/horizon/internal/db2/history/effect.go b/services/horizon/internal/db2/history/effect.go index c8ef5733cd..9053eac5a7 100644 --- a/services/horizon/internal/db2/history/effect.go +++ b/services/horizon/internal/db2/history/effect.go @@ -216,102 +216,12 @@ func (q *EffectsQ) orderBookFilter(a xdr.Asset, prefix string) { q.sql = q.sql.Where(clause, typ, code, iss) } -func (q *Q) findOperationEffects( - effectsTable, - accountTable string, - seq int32, -) ([]Effect, error) { - from := toid.ID{LedgerSequence: int32(seq)}.ToInt64() - to := toid.ID{LedgerSequence: int32(seq + 1)}.ToInt64() - effects := []Effect{} - - sql := effectFields. - From( - fmt.Sprintf("%s heff", effectsTable), - ). - Join( - fmt.Sprintf( - "%s hacc ON hacc.id = heff.history_account_id", - accountTable, - ), - ). - Where("heff.history_operation_id >= ? AND heff.history_operation_id <= ? ", from, to). - OrderBy( - "heff.history_operation_id asc, heff.order asc", - ) - - err := q.Select(&effects, sql) - - return effects, err -} - -// CheckExpOperationEffects checks that the effects in exp_history_effects for -// the given ledger matches the same effects as in history_effects -func (q *Q) CheckExpOperationEffects(seq int32) (bool, error) { - expEffects, err := q.findOperationEffects( - "exp_history_effects", - "exp_history_accounts", - seq, - ) - - if err != nil { - return false, errors.Wrapf( - err, - "could not load exp_history_effects for ledger: %v", - seq, - ) - } - - effects, err := q.findOperationEffects( - "history_effects", - "history_accounts", - seq, - ) - - if err != nil { - return false, errors.Wrapf( - err, - "could not load history_effects for ledger: %v", - seq, - ) - } - - // We only proceed with the comparison if we have data in both the - // legacy ingestion system and the experimental ingestion system. - // If there is no data in either the legacy ingestion system or the - // experimental ingestion system we skip the check. - if len(expEffects) == 0 || len(effects) == 0 { - return true, nil - } - - if len(expEffects) != len(effects) { - return false, nil - } - - for i, expEffect := range expEffects { - effect := effects[i] - - // Make HistoryAccountID the same since we don't care about this value - expEffect.HistoryAccountID = 0 - effect.HistoryAccountID = 0 - - if expEffect != effect { - return false, nil - } - } - - return true, nil -} - -// QEffects defines exp_history_effects related queries. +// QEffects defines history_effects related queries. type QEffects interface { NewEffectBatchInsertBuilder(maxBatchSize int) EffectBatchInsertBuilder - CreateExpAccounts(addresses []string) (map[string]int64, error) - CheckExpOperationEffects(seq int32) (bool, error) + CreateAccounts(addresses []string) (map[string]int64, error) } -var effectFields = sq.Select("heff.*, hacc.address") - -var selectEffect = effectFields. +var selectEffect = sq.Select("heff.*, hacc.address"). From("history_effects heff"). LeftJoin("history_accounts hacc ON hacc.id = heff.history_account_id") diff --git a/services/horizon/internal/db2/history/effect_batch_insert_builder.go b/services/horizon/internal/db2/history/effect_batch_insert_builder.go index f861d8762e..90c463a1e3 100644 --- a/services/horizon/internal/db2/history/effect_batch_insert_builder.go +++ b/services/horizon/internal/db2/history/effect_batch_insert_builder.go @@ -5,7 +5,7 @@ import ( ) // EffectBatchInsertBuilder is used to insert effects into the -// exp_history_effects table +// history_effects table type EffectBatchInsertBuilder interface { Add( accountID int64, @@ -26,7 +26,7 @@ type effectBatchInsertBuilder struct { func (q *Q) NewEffectBatchInsertBuilder(maxBatchSize int) EffectBatchInsertBuilder { return &effectBatchInsertBuilder{ builder: db.BatchInsertBuilder{ - Table: q.GetTable("exp_history_effects"), + Table: q.GetTable("history_effects"), MaxBatchSize: maxBatchSize, }, } diff --git a/services/horizon/internal/db2/history/effect_batch_insert_builder_test.go b/services/horizon/internal/db2/history/effect_batch_insert_builder_test.go index c615bd4103..ba00214750 100644 --- a/services/horizon/internal/db2/history/effect_batch_insert_builder_test.go +++ b/services/horizon/internal/db2/history/effect_batch_insert_builder_test.go @@ -15,7 +15,7 @@ func TestAddEffect(t *testing.T) { q := &Q{tt.HorizonSession()} address := "GBXGQJWVLWOYHFLVTKWV5FGHA3LNYY2JQKM7OAJAUEQFU6LPCSEFVXON" - accounIDs, err := q.CreateExpAccounts([]string{address}) + accounIDs, err := q.CreateAccounts([]string{address}) tt.Assert.NoError(err) builder := q.NewEffectBatchInsertBuilder(2) @@ -38,14 +38,7 @@ func TestAddEffect(t *testing.T) { tt.Assert.NoError(err) effects := []Effect{} - err = q.Select( - &effects, - effectFields. - From("exp_history_effects heff"). - LeftJoin("exp_history_accounts hacc ON hacc.id = heff.history_account_id"), - ) - - tt.Assert.NoError(err) + tt.Assert.NoError(q.Effects().Select(&effects)) tt.Assert.Len(effects, 1) effect := effects[0] diff --git a/services/horizon/internal/db2/history/effect_test.go b/services/horizon/internal/db2/history/effect_test.go deleted file mode 100644 index e3e9287882..0000000000 --- a/services/horizon/internal/db2/history/effect_test.go +++ /dev/null @@ -1,181 +0,0 @@ -package history - -import ( - "testing" - - "github.com/guregu/null" - "github.com/stellar/go/services/horizon/internal/test" - "github.com/stellar/go/services/horizon/internal/toid" - "github.com/stellar/go/support/db" -) - -func TestCheckExpOperationEffects(t *testing.T) { - tt := test.Start(t) - defer tt.Finish() - test.ResetHorizonDB(t, tt.HorizonDB) - q := &Q{tt.HorizonSession()} - - sequence := int32(57) - - valid, err := q.CheckExpOperationEffects(sequence) - tt.Assert.NoError(err) - tt.Assert.True(valid) - - effects := []Effect{ - Effect{ - Account: "GCQZP3IU7XU6EJ63JZXKCQOYT2RNXN3HB5CNHENNUEUHSMA4VUJJJSEN", - HistoryOperationID: toid.New(sequence, 1, 1).ToInt64(), - DetailsString: null.StringFrom( - "{\"starting_balance\":\"1000.0000000\"}", - ), - Type: EffectAccountCreated, - Order: int32(1), - }, - Effect{ - Account: "GBRPYHIL2CI3FNQ4BXLFMNDLFJUNPU2HY3ZMFSHONUCEOASW7QC7OX2H", - HistoryOperationID: toid.New(sequence, 1, 1).ToInt64(), - DetailsString: null.StringFrom( - "{\"amount\":\"1000.0000000\",\"asset_type\":\"native\"}", - ), - Type: EffectAccountDebited, - Order: int32(2), - }, - Effect{ - Account: "GCQZP3IU7XU6EJ63JZXKCQOYT2RNXN3HB5CNHENNUEUHSMA4VUJJJSEN", - HistoryOperationID: toid.New(sequence, 1, 1).ToInt64(), - DetailsString: null.StringFrom( - "{\"public_key\": \"GCQZP3IU7XU6EJ63JZXKCQOYT2RNXN3HB5CNHENNUEUHSMA4VUJJJSEN\", \"weight\": 1}", - ), - Type: EffectSignerCreated, - Order: int32(3), - }, - Effect{ - Account: "GANFZDRBCNTUXIODCJEYMACPMCSZEVE4WZGZ3CZDZ3P2SXK4KH75IK6Y", - DetailsString: null.StringFrom( - "{\"amount\":\"10.0000000\",\"asset_type\":\"native\"}", - ), - Type: EffectAccountCredited, - HistoryOperationID: toid.New(sequence, 2, 1).ToInt64(), - Order: int32(1), - }, - Effect{ - Account: "GANFZDRBCNTUXIODCJEYMACPMCSZEVE4WZGZ3CZDZ3P2SXK4KH75IK6Y", - DetailsString: null.StringFrom( - "{\"amount\":\"10.0000000\",\"asset_type\": \"native\"}", - ), - Type: EffectAccountDebited, - HistoryOperationID: toid.New(sequence, 2, 1).ToInt64(), - Order: int32(2), - }, - Effect{ - Account: "GCQZP3IU7XU6EJ63JZXKCQOYT2RNXN3HB5CNHENNUEUHSMA4VUJJJSEN", - Type: EffectSequenceBumped, - HistoryOperationID: toid.New(sequence+1, 1, 1).ToInt64(), - Order: int32(1), - DetailsString: null.StringFrom( - "{\"new_seq\": 300000000000}", - ), - }, - } - - addresses := []string{ - "GANFZDRBCNTUXIODCJEYMACPMCSZEVE4WZGZ3CZDZ3P2SXK4KH75IK6Y", - "GBRPYHIL2CI3FNQ4BXLFMNDLFJUNPU2HY3ZMFSHONUCEOASW7QC7OX2H", - "GCQZP3IU7XU6EJ63JZXKCQOYT2RNXN3HB5CNHENNUEUHSMA4VUJJJSEN", - } - expAccounts, err := q.CreateExpAccounts(addresses) - tt.Assert.NoError(err) - - for i, effect := range effects { - effect.HistoryAccountID = expAccounts[effect.Account] - effects[i] = effect - } - - batch := q.NewEffectBatchInsertBuilder(0) - for _, effect := range effects { - tt.Assert.NoError( - batch.Add( - effect.HistoryAccountID, - effect.HistoryOperationID, - uint32(effect.Order), - effect.Type, - []byte(effect.DetailsString.String), - ), - ) - } - tt.Assert.NoError(batch.Exec()) - - valid, err = q.CheckExpOperationEffects(sequence) - tt.Assert.NoError(err) - tt.Assert.True(valid) - - // addresses = append(addresses, "GBXGQJWVLWOYHFLVTKWV5FGHA3LNYY2JQKM7OAJAUEQFU6LPCSEFVXON") - // transactionIDs = append(transactionIDs, toid.New(sequence, 3, 0).ToInt64()) - var accounts []Account - tt.Assert.NoError(q.CreateAccounts(&accounts, addresses)) - accountsMap := map[string]int64{} - for _, account := range accounts { - accountsMap[account.Address] = account.ID - } - - historyBatch := effectBatchInsertBuilder{ - builder: db.BatchInsertBuilder{ - Table: q.GetTable("history_effects"), - MaxBatchSize: 0, - }, - } - - for _, effect := range effects { - tt.Assert.NoError( - historyBatch.Add( - accountsMap[effect.Account], - effect.HistoryOperationID, - uint32(effect.Order), - effect.Type, - []byte(effect.DetailsString.String), - ), - ) - } - - tt.Assert.NoError(historyBatch.Exec()) - - valid, err = q.CheckExpOperationEffects(sequence) - tt.Assert.NoError(err) - tt.Assert.True(valid) - - // Add a new operation effect to history_effects but not to - // exp_history_effects, which should make the comparison fail - tt.Assert.NoError( - historyBatch.Add( - accountsMap[addresses[0]], - toid.New(sequence, 3, 1).ToInt64(), - 1, - EffectSequenceBumped, - []byte("{\"new_seq\": 300000000000}"), - ), - ) - - tt.Assert.NoError(historyBatch.Exec()) - - valid, err = q.CheckExpOperationEffects(sequence) - tt.Assert.NoError(err) - tt.Assert.False(valid) - - // Add previous effect to exp_history_effects, but make it different to - // history_effects - tt.Assert.NoError( - batch.Add( - expAccounts[addresses[0]], - toid.New(sequence, 3, 1).ToInt64(), - 1, - EffectSequenceBumped, - []byte("{\"new_seq\": 3000}"), - ), - ) - - tt.Assert.NoError(batch.Exec()) - - valid, err = q.CheckExpOperationEffects(sequence) - tt.Assert.NoError(err) - tt.Assert.False(valid) -} diff --git a/services/horizon/internal/db2/history/ingestion.go b/services/horizon/internal/db2/history/ingestion.go index daa36a2714..eb3dfce6a4 100644 --- a/services/horizon/internal/db2/history/ingestion.go +++ b/services/horizon/internal/db2/history/ingestion.go @@ -21,9 +21,9 @@ func (q *Q) TruncateExpingestStateTables() error { }) } -// ExpIngestRemovalSummary describes how many rows in the experimental ingestion -// history tables have been deleted by RemoveExpIngestHistory() -type ExpIngestRemovalSummary struct { +// IngestHistoryRemovalSummary describes how many rows in the ingestion +// history tables have been deleted by RemoveIngestHistory() +type IngestHistoryRemovalSummary struct { LedgersRemoved int64 TransactionsRemoved int64 TransactionParticipantsRemoved int64 @@ -33,13 +33,13 @@ type ExpIngestRemovalSummary struct { EffectsRemoved int64 } -// RemoveExpIngestHistory removes all rows in the experimental ingestion +// RemoveIngestHistory removes all rows in the ingestion // history tables which have a ledger sequence higher than `newerThanSequence` -func (q *Q) RemoveExpIngestHistory(newerThanSequence uint32) (ExpIngestRemovalSummary, error) { - summary := ExpIngestRemovalSummary{} +func (q *Q) RemoveIngestHistory(newerThanSequence uint32) (IngestHistoryRemovalSummary, error) { + summary := IngestHistoryRemovalSummary{} result, err := q.Exec( - sq.Delete("exp_history_ledgers"). + sq.Delete("history_ledgers"). Where("sequence > ?", newerThanSequence), ) if err != nil { @@ -52,7 +52,7 @@ func (q *Q) RemoveExpIngestHistory(newerThanSequence uint32) (ExpIngestRemovalSu } result, err = q.Exec( - sq.Delete("exp_history_transactions"). + sq.Delete("history_transactions"). Where("ledger_sequence > ?", newerThanSequence), ) if err != nil { @@ -65,7 +65,7 @@ func (q *Q) RemoveExpIngestHistory(newerThanSequence uint32) (ExpIngestRemovalSu } result, err = q.Exec( - sq.Delete("exp_history_transaction_participants"). + sq.Delete("history_transaction_participants"). Where("history_transaction_id >= ?", toid.ID{LedgerSequence: int32(newerThanSequence + 1)}.ToInt64()), ) if err != nil { @@ -78,7 +78,7 @@ func (q *Q) RemoveExpIngestHistory(newerThanSequence uint32) (ExpIngestRemovalSu } result, err = q.Exec( - sq.Delete("exp_history_operations"). + sq.Delete("history_operations"). Where("id >= ?", toid.ID{LedgerSequence: int32(newerThanSequence + 1)}.ToInt64()), ) if err != nil { @@ -91,7 +91,7 @@ func (q *Q) RemoveExpIngestHistory(newerThanSequence uint32) (ExpIngestRemovalSu } result, err = q.Exec( - sq.Delete("exp_history_operation_participants"). + sq.Delete("history_operation_participants"). Where("history_operation_id >= ?", toid.ID{LedgerSequence: int32(newerThanSequence + 1)}.ToInt64()), ) if err != nil { @@ -104,7 +104,7 @@ func (q *Q) RemoveExpIngestHistory(newerThanSequence uint32) (ExpIngestRemovalSu } result, err = q.Exec( - sq.Delete("exp_history_trades"). + sq.Delete("history_trades"). Where("history_operation_id >= ?", toid.ID{LedgerSequence: int32(newerThanSequence + 1)}.ToInt64()), ) if err != nil { @@ -117,7 +117,7 @@ func (q *Q) RemoveExpIngestHistory(newerThanSequence uint32) (ExpIngestRemovalSu } result, err = q.Exec( - sq.Delete("exp_history_effects"). + sq.Delete("history_effects"). Where("history_operation_id >= ?", toid.ID{LedgerSequence: int32(newerThanSequence + 1)}.ToInt64()), ) if err != nil { diff --git a/services/horizon/internal/db2/history/ingestion_test.go b/services/horizon/internal/db2/history/ingestion_test.go index 9c1ef80a4e..cd883e75d1 100644 --- a/services/horizon/internal/db2/history/ingestion_test.go +++ b/services/horizon/internal/db2/history/ingestion_test.go @@ -22,14 +22,14 @@ func assertCountRows(tt *test.T, q *Q, tables []string, expectedCount int) { } } -func TestRemoveExpIngestHistory(t *testing.T) { +func TestRemoveIngestHistory(t *testing.T) { tt := test.Start(t) defer tt.Finish() test.ResetHorizonDB(t, tt.HorizonDB) q := &Q{tt.HorizonSession()} - summary, err := q.RemoveExpIngestHistory(69859) - tt.Assert.Equal(ExpIngestRemovalSummary{}, summary) + summary, err := q.RemoveIngestHistory(69859) + tt.Assert.Equal(IngestHistoryRemovalSummary{}, summary) tt.Assert.NoError(err) txInsertBuilder := q.NewTransactionBatchInsertBuilder(0) @@ -41,14 +41,14 @@ func TestRemoveExpIngestHistory(t *testing.T) { effectsInsertBuilder := q.NewEffectBatchInsertBuilder(0) accountID := int64(1223) - expTables := []string{ - "exp_history_ledgers", - "exp_history_transactions", - "exp_history_transaction_participants", - "exp_history_operations", - "exp_history_operation_participants", - "exp_history_trades", - "exp_history_effects", + historyIngestionTables := []string{ + "history_ledgers", + "history_transactions", + "history_transaction_participants", + "history_operations", + "history_operation_participants", + "history_trades", + "history_effects", } ledger := Ledger{ @@ -78,7 +78,7 @@ func TestRemoveExpIngestHistory(t *testing.T) { "8db1e4f145e9ee75162040d26284795e0697e2e84084624e7c6c723ebbf80118", } - accountIDs, assetIDs := createExpAccountsAndAssets( + accountIDs, assetIDs := createAccountsAndAssets( tt, q, []string{ "GB2QIYT2IAUFMRXKLSLLPRECC6OCOGJMADSPTRK7TGNT2SFR2YGWDARD", @@ -94,7 +94,7 @@ func TestRemoveExpIngestHistory(t *testing.T) { ledger.PreviousLedgerHash = null.NewString(hashes[i-1], true) } - insertSQL := sq.Insert("exp_history_ledgers").SetMap(ledgerToMap(ledger)) + insertSQL := sq.Insert("history_ledgers").SetMap(ledgerToMap(ledger)) _, err = q.Exec(insertSQL) tt.Assert.NoError(err) @@ -157,18 +157,18 @@ func TestRemoveExpIngestHistory(t *testing.T) { ledger.Sequence++ } - assertCountRows(tt, q, expTables, 5) + assertCountRows(tt, q, historyIngestionTables, 5) - summary, err = q.RemoveExpIngestHistory(69863) - tt.Assert.Equal(ExpIngestRemovalSummary{}, summary) + summary, err = q.RemoveIngestHistory(69863) + tt.Assert.Equal(IngestHistoryRemovalSummary{}, summary) tt.Assert.NoError(err) - assertCountRows(tt, q, expTables, 5) + assertCountRows(tt, q, historyIngestionTables, 5) cutoffSequence := 69861 - summary, err = q.RemoveExpIngestHistory(uint32(cutoffSequence)) + summary, err = q.RemoveIngestHistory(uint32(cutoffSequence)) tt.Assert.Equal( - ExpIngestRemovalSummary{ + IngestHistoryRemovalSummary{ LedgersRemoved: 2, TransactionsRemoved: 2, TransactionParticipantsRemoved: 2, @@ -182,12 +182,12 @@ func TestRemoveExpIngestHistory(t *testing.T) { tt.Assert.NoError(err) var ledgers []Ledger - err = q.Select(&ledgers, selectLedgerFields.From("exp_history_ledgers hl")) + err = q.Ledgers().Select(&ledgers) tt.Assert.NoError(err) tt.Assert.Len(ledgers, 3) var transactions []Transaction - err = q.Select(&transactions, selectExpTransaction) + err = q.Transactions().Select(&transactions) tt.Assert.NoError(err) tt.Assert.Len(transactions, 3) @@ -195,7 +195,7 @@ func TestRemoveExpIngestHistory(t *testing.T) { tt.Assert.Len(txParticipants, 3) var operations []Operation - err = q.Select(&operations, selectExpOperation) + err = q.Select(&operations, selectOperation) tt.Assert.NoError(err) tt.Assert.Len(operations, 3) @@ -207,17 +207,17 @@ func TestRemoveExpIngestHistory(t *testing.T) { err = q.Select(&opParticipants, sq.Select( "hopp.history_operation_id, "+ "hopp.history_account_id"). - From("exp_history_operation_participants hopp"), + From("history_operation_participants hopp"), ) tt.Assert.NoError(err) tt.Assert.Len(opParticipants, 3) var trades []Trade - err = q.expTrades().Select(&trades) + err = q.Trades().Select(&trades) tt.Assert.NoError(err) var effects []Effect - err = q.Select(&effects, sq.Select("*").From("exp_history_effects")) + err = q.Select(&effects, sq.Select("*").From("history_effects")) tt.Assert.NoError(err) tt.Assert.Len(effects, 3) diff --git a/services/horizon/internal/db2/history/ledger.go b/services/horizon/internal/db2/history/ledger.go index 8d238d3784..70c3ada387 100644 --- a/services/horizon/internal/db2/history/ledger.go +++ b/services/horizon/internal/db2/history/ledger.go @@ -3,7 +3,6 @@ package history import ( "encoding/hex" "fmt" - "reflect" "time" sq "github.com/Masterminds/squirrel" @@ -23,18 +22,6 @@ func (q *Q) LedgerBySequence(dest interface{}, seq int32) error { return q.Get(dest, sql) } -// expLedgerBySequence returns a row from the exp_history_ledgers table -func (q *Q) expLedgerBySequence(seq int32) (Ledger, error) { - sql := selectLedgerFields. - From("exp_history_ledgers hl"). - Limit(1). - Where("sequence = ?", seq) - - var dest Ledger - err := q.Get(&dest, sql) - return dest, err -} - // Ledgers provides a helper to filter rows from the `history_ledgers` table // with pre-defined filters. See `LedgersQ` methods for the available filters. func (q *Q) Ledgers() *LedgersQ { @@ -98,10 +85,9 @@ func (q *LedgersQ) Select(dest interface{}) error { return q.Err } -// QExpLedgers defines experimental ingestion ledger related queries. -type QExpLedgers interface { - CheckExpLedger(seq int32) (bool, error) - InsertExpLedger( +// QLedgers defines ingestion ledger related queries. +type QLedgers interface { + InsertLedger( ledger xdr.LedgerHeaderHistoryEntry, successTxsCount int, failedTxsCount int, @@ -110,35 +96,9 @@ type QExpLedgers interface { ) (int64, error) } -// CheckExpLedger checks that the ledger in exp_history_ledgers -// matches the one in history_ledgers for a given sequence number -func (q *Q) CheckExpLedger(seq int32) (bool, error) { - expLedger, err := q.expLedgerBySequence(seq) - if err != nil { - return false, err - } - - var ledger Ledger - err = q.LedgerBySequence(&ledger, seq) - if err != nil { - return false, err - } - - // ignore importer version created time, and updated time - expLedger.ImporterVersion = ledger.ImporterVersion - expLedger.CreatedAt = ledger.CreatedAt - expLedger.UpdatedAt = ledger.UpdatedAt - - // compare ClosedAt separately because reflect.DeepEqual does not handle time.Time - expClosedAt := expLedger.ClosedAt - expLedger.ClosedAt = ledger.ClosedAt - - return expClosedAt.Equal(ledger.ClosedAt) && reflect.DeepEqual(expLedger, ledger), nil -} - -// InsertExpLedger creates a row in the exp_history_ledgers table. +// InsertLedger creates a row in the history_ledgers table. // Returns number of rows affected and error. -func (q *Q) InsertExpLedger( +func (q *Q) InsertLedger( ledger xdr.LedgerHeaderHistoryEntry, successTxsCount int, failedTxsCount int, @@ -156,7 +116,7 @@ func (q *Q) InsertExpLedger( return 0, err } - sql := sq.Insert("exp_history_ledgers").SetMap(m) + sql := sq.Insert("history_ledgers").SetMap(m) result, err := q.Exec(sql) if err != nil { return 0, err @@ -200,7 +160,7 @@ func ledgerHeaderToMap( }, nil } -var selectLedgerFields = sq.Select( +var selectLedger = sq.Select( "hl.id", "hl.sequence", "hl.importer_version", @@ -220,5 +180,4 @@ var selectLedgerFields = sq.Select( "hl.max_tx_set_size", "hl.protocol_version", "hl.ledger_header", -) -var selectLedger = selectLedgerFields.From("history_ledgers hl") +).From("history_ledgers hl") diff --git a/services/horizon/internal/db2/history/ledger_test.go b/services/horizon/internal/db2/history/ledger_test.go index 2037f64e84..c2ca64b104 100644 --- a/services/horizon/internal/db2/history/ledger_test.go +++ b/services/horizon/internal/db2/history/ledger_test.go @@ -6,7 +6,6 @@ import ( "testing" "time" - sq "github.com/Masterminds/squirrel" "github.com/guregu/null" "github.com/stellar/go/services/horizon/internal/test" "github.com/stellar/go/services/horizon/internal/toid" @@ -108,7 +107,7 @@ func TestInsertLedger(t *testing.T) { tt.Assert.NoError(err) expectedLedger.LedgerHeaderXDR = null.NewString(ledgerHeaderBase64, true) - rowsAffected, err := q.InsertExpLedger( + rowsAffected, err := q.InsertLedger( ledgerEntry, 12, 3, @@ -118,7 +117,8 @@ func TestInsertLedger(t *testing.T) { tt.Assert.NoError(err) tt.Assert.Equal(rowsAffected, int64(1)) - ledgerFromDB, err := q.expLedgerBySequence(69859) + var ledgerFromDB Ledger + err = q.LedgerBySequence(&ledgerFromDB, 69859) tt.Assert.NoError(err) expectedLedger.CreatedAt = ledgerFromDB.CreatedAt @@ -157,93 +157,3 @@ func ledgerToMap(ledger Ledger) map[string]interface{} { "ledger_header": ledger.LedgerHeaderXDR, } } - -func TestCheckExpLedger(t *testing.T) { - tt := test.Start(t) - defer tt.Finish() - test.ResetHorizonDB(t, tt.HorizonDB) - q := &Q{tt.HorizonSession()} - - ledger := Ledger{ - Sequence: 69859, - LedgerHash: "4db1e4f145e9ee75162040d26284795e0697e2e84084624e7c6c723ebbf80118", - PreviousLedgerHash: null.NewString("4b0b8bace3b2438b2404776ce57643966855487ba6384724a3c664c7aa4cd9e4", true), - TotalOrderID: TotalOrderID{toid.New(int32(69859), 0, 0).ToInt64()}, - ImporterVersion: 321, - TransactionCount: 12, - SuccessfulTransactionCount: new(int32), - FailedTransactionCount: new(int32), - OperationCount: 23, - TotalCoins: 23451, - FeePool: 213, - BaseReserve: 687, - MaxTxSetSize: 345, - ProtocolVersion: 12, - BaseFee: 100, - ClosedAt: time.Now().UTC().Truncate(time.Second), - LedgerHeaderXDR: null.NewString("temp", true), - } - *ledger.SuccessfulTransactionCount = 12 - *ledger.FailedTransactionCount = 3 - - _, err := q.CheckExpLedger(ledger.Sequence) - tt.Assert.Equal(err, sql.ErrNoRows) - - insertSQL := sq.Insert("exp_history_ledgers").SetMap(ledgerToMap(ledger)) - _, err = q.Exec(insertSQL) - tt.Assert.NoError(err) - - _, err = q.CheckExpLedger(ledger.Sequence) - tt.Assert.Equal(err, sql.ErrNoRows) - - ledger.CreatedAt = time.Now() - ledger.UpdatedAt = time.Now() - ledger.ImporterVersion = 123 - - insertSQL = sq.Insert("history_ledgers").SetMap(ledgerToMap(ledger)) - _, err = q.Exec(insertSQL) - tt.Assert.NoError(err) - - valid, err := q.CheckExpLedger(ledger.Sequence) - tt.Assert.NoError(err) - tt.Assert.True(valid) - - for fieldName, value := range map[string]interface{}{ - "closed_at": time.Now().Add(time.Minute).UTC().Truncate(time.Second), - "ledger_hash": "hash", - "previous_ledger_hash": "previous", - "id": 999, - "total_coins": 9999, - "fee_pool": 9999, - "base_fee": 9999, - "base_reserve": 9999, - "max_tx_set_size": 9999, - "transaction_count": 9999, - "successful_transaction_count": 9999, - "failed_transaction_count": 9999, - "operation_count": 9999, - "protocol_version": 9999, - "ledger_header": "ledger header", - } { - updateSQL := sq.Update("history_ledgers"). - Set(fieldName, value). - Where("sequence = ?", ledger.Sequence) - _, err = q.Exec(updateSQL) - tt.Assert.NoError(err) - - valid, err = q.CheckExpLedger(ledger.Sequence) - tt.Assert.NoError(err) - tt.Assert.False(valid) - - _, err = q.Exec(sq.Delete("history_ledgers").Where("sequence = ?", ledger.Sequence)) - tt.Assert.NoError(err) - - insertSQL = sq.Insert("history_ledgers").SetMap(ledgerToMap(ledger)) - _, err = q.Exec(insertSQL) - tt.Assert.NoError(err) - - valid, err := q.CheckExpLedger(ledger.Sequence) - tt.Assert.NoError(err) - tt.Assert.True(valid) - } -} diff --git a/services/horizon/internal/db2/history/mock_q_effects.go b/services/horizon/internal/db2/history/mock_q_effects.go index cc976adf16..541e965fb0 100644 --- a/services/horizon/internal/db2/history/mock_q_effects.go +++ b/services/horizon/internal/db2/history/mock_q_effects.go @@ -14,12 +14,7 @@ func (m *MockQEffects) NewEffectBatchInsertBuilder(maxBatchSize int) EffectBatch return a.Get(0).(EffectBatchInsertBuilder) } -func (m *MockQEffects) CreateExpAccounts(addresses []string) (map[string]int64, error) { +func (m *MockQEffects) CreateAccounts(addresses []string) (map[string]int64, error) { a := m.Called(addresses) return a.Get(0).(map[string]int64), a.Error(1) } - -func (m *MockQEffects) CheckExpOperationEffects(seq int32) (bool, error) { - a := m.Called(seq) - return a.Get(0).(bool), a.Error(1) -} diff --git a/services/horizon/internal/db2/history/mock_q_ledgers.go b/services/horizon/internal/db2/history/mock_q_ledgers.go index 908ba04f3a..bd1695a43d 100644 --- a/services/horizon/internal/db2/history/mock_q_ledgers.go +++ b/services/horizon/internal/db2/history/mock_q_ledgers.go @@ -9,7 +9,7 @@ type MockQLedgers struct { mock.Mock } -func (m *MockQLedgers) InsertExpLedger( +func (m *MockQLedgers) InsertLedger( ledger xdr.LedgerHeaderHistoryEntry, successTxsCount int, failedTxsCount int, @@ -19,8 +19,3 @@ func (m *MockQLedgers) InsertExpLedger( a := m.Called(ledger, successTxsCount, failedTxsCount, opCount, ingestVersion) return a.Get(0).(int64), a.Error(1) } - -func (m *MockQLedgers) CheckExpLedger(seq int32) (bool, error) { - a := m.Called(seq) - return a.Get(0).(bool), a.Error(1) -} diff --git a/services/horizon/internal/db2/history/mock_q_operations.go b/services/horizon/internal/db2/history/mock_q_operations.go index 7ca42f33a2..08a97c6da9 100644 --- a/services/horizon/internal/db2/history/mock_q_operations.go +++ b/services/horizon/internal/db2/history/mock_q_operations.go @@ -12,9 +12,3 @@ func (m *MockQOperations) NewOperationBatchInsertBuilder(maxBatchSize int) Opera a := m.Called(maxBatchSize) return a.Get(0).(OperationBatchInsertBuilder) } - -// CheckExpOperations mock -func (m *MockQOperations) CheckExpOperations(seq int32) (bool, error) { - a := m.Called(seq) - return a.Get(0).(bool), a.Error(1) -} diff --git a/services/horizon/internal/db2/history/mock_q_participants.go b/services/horizon/internal/db2/history/mock_q_participants.go index 028bd883c4..45dee5b537 100644 --- a/services/horizon/internal/db2/history/mock_q_participants.go +++ b/services/horizon/internal/db2/history/mock_q_participants.go @@ -9,12 +9,7 @@ type MockQParticipants struct { mock.Mock } -func (m *MockQParticipants) CheckExpParticipants(seq int32) (bool, error) { - a := m.Called(seq) - return a.Get(0).(bool), a.Error(1) -} - -func (m *MockQParticipants) CreateExpAccounts(addresses []string) (map[string]int64, error) { +func (m *MockQParticipants) CreateAccounts(addresses []string) (map[string]int64, error) { a := m.Called(addresses) return a.Get(0).(map[string]int64), a.Error(1) } diff --git a/services/horizon/internal/db2/history/mock_q_trades.go b/services/horizon/internal/db2/history/mock_q_trades.go index 92f5db0bd0..132e2102ac 100644 --- a/services/horizon/internal/db2/history/mock_q_trades.go +++ b/services/horizon/internal/db2/history/mock_q_trades.go @@ -9,17 +9,12 @@ type MockQTrades struct { mock.Mock } -func (m *MockQTrades) CheckExpTrades(seq int32) (bool, error) { - a := m.Called(seq) - return a.Get(0).(bool), a.Error(1) -} - -func (m *MockQTrades) CreateExpAccounts(addresses []string) (map[string]int64, error) { +func (m *MockQTrades) CreateAccounts(addresses []string) (map[string]int64, error) { a := m.Called(addresses) return a.Get(0).(map[string]int64), a.Error(1) } -func (m *MockQTrades) CreateExpAssets(assets []xdr.Asset) (map[string]Asset, error) { +func (m *MockQTrades) CreateAssets(assets []xdr.Asset) (map[string]Asset, error) { a := m.Called(assets) return a.Get(0).(map[string]Asset), a.Error(1) } diff --git a/services/horizon/internal/db2/history/mock_q_transactions.go b/services/horizon/internal/db2/history/mock_q_transactions.go index e0b89b8c4f..6fdac71c0f 100644 --- a/services/horizon/internal/db2/history/mock_q_transactions.go +++ b/services/horizon/internal/db2/history/mock_q_transactions.go @@ -11,8 +11,3 @@ func (m *MockQTransactions) NewTransactionBatchInsertBuilder(maxBatchSize int) T a := m.Called(maxBatchSize) return a.Get(0).(TransactionBatchInsertBuilder) } - -func (m *MockQTransactions) CheckExpTransactions(seq int32) (bool, error) { - a := m.Called(seq) - return a.Get(0).(bool), a.Error(1) -} diff --git a/services/horizon/internal/db2/history/operation.go b/services/horizon/internal/db2/history/operation.go index c597e6e9b5..2800143ed0 100644 --- a/services/horizon/internal/db2/history/operation.go +++ b/services/horizon/internal/db2/history/operation.go @@ -2,8 +2,6 @@ package history import ( "encoding/json" - "fmt" - "reflect" sq "github.com/Masterminds/squirrel" "github.com/go-errors/errors" @@ -281,162 +279,12 @@ func (q *OperationsQ) Fetch() ([]Operation, []Transaction, error) { return operations, transactions, nil } -// CheckExpOperations checks that the operations in exp_history_operations -// for the given ledger matches the same operations in history_operations -func (q *Q) CheckExpOperations(seq int32) (bool, error) { - var operations, expOperations []Operation - - err := q.Select( - &operations, - selectOperation. - Where("ht.ledger_sequence = ?", seq). - OrderBy("hop.id asc"), - ) - if err != nil { - return false, err - } - - err = q.Select( - &expOperations, - selectExpOperation. - Where("ht.ledger_sequence = ?", seq). - OrderBy("hop.id asc"), - ) - if err != nil { - return false, err - } - - // We only proceed with the comparison if we have operations data in both the - // legacy ingestion system and the experimental ingestion system. - // If there are no operations in either the legacy ingestion system or the - // experimental ingestion system we skip the check. - if len(operations) == 0 || len(expOperations) == 0 { - return true, nil - } - - if len(operations) != len(expOperations) { - return false, nil - } - - for i, operation := range operations { - expOperation := expOperations[i] - if !reflect.DeepEqual(operation, expOperation) { - return false, nil - } - } - - return true, nil -} - -type operationParticipant struct { - OperationID int64 `db:"history_operation_id"` - Address string `db:"address"` -} - -func (q *Q) findOperationParticipants( - participantTable, - accountTable, - operationsTable string, - seq int32, -) ([]operationParticipant, error) { - from := toid.ID{LedgerSequence: int32(seq)}.ToInt64() - to := toid.ID{LedgerSequence: int32(seq + 1)}.ToInt64() - participants := []operationParticipant{} - - fields := sq.Select( - "hop.history_operation_id, " + - "ha.address as address") - - sql := fields. - From( - fmt.Sprintf("%s hop", participantTable), - ). - Join( - fmt.Sprintf( - "%s ho ON hop.history_operation_id = ho.id", - operationsTable, - ), - ). - Join( - fmt.Sprintf( - "%s ha ON hop.history_account_id = ha.id", - accountTable, - ), - ). - Where("ho.id >= ? AND ho.id <= ? ", from, to). - OrderBy( - "hop.history_operation_id asc, ha.address asc", - ) - - err := q.Select(&participants, sql) - - if err != nil { - return participants, errors.Errorf( - "could not load exp_history_operation_participants for ledger: %v", - seq, - ) - } - - return participants, nil -} - -// checkExpOperationParticipants checks that the participants in -// exp_history_operation_participants for the given ledger matches the same -// participants as in history_operation_participants -func checkExpOperationParticipants(q *Q, seq int32) (bool, error) { - expParticipants, err := q.findOperationParticipants( - "exp_history_operation_participants", - "exp_history_accounts", - "exp_history_operations", - seq, - ) - - if err != nil { - return false, errors.Errorf( - "could not load exp_history_operation_participants for ledger: %v", - seq, - ) - } - - participants, err := q.findOperationParticipants( - "history_operation_participants", - "history_accounts", - "history_operations", - seq, - ) - - if err != nil { - return false, errors.Errorf( - "could not load history_operation_participants for ledger: %v", - seq, - ) - } - - if len(expParticipants) == 0 || len(participants) == 0 { - return true, nil - } - - if len(expParticipants) != len(participants) { - return false, nil - } - - for i, expParticipant := range expParticipants { - participant := participants[i] - if expParticipant != participant { - return false, nil - } - } - - return true, nil -} - -// QOperations defines exp_history_operation related queries. +// QOperations defines history_operation related queries. type QOperations interface { NewOperationBatchInsertBuilder(maxBatchSize int) OperationBatchInsertBuilder - CheckExpOperations(seq int32) (bool, error) } -var selectOperationFields = sq.Select( +var selectOperation = sq.Select( "hop.id, " + "hop.transaction_id, " + "hop.application_order, " + @@ -445,12 +293,6 @@ var selectOperationFields = sq.Select( "hop.source_account, " + "ht.transaction_hash, " + "ht.tx_result, " + - "ht.successful as transaction_successful") - -var selectOperation = selectOperationFields. + "ht.successful as transaction_successful"). From("history_operations hop"). LeftJoin("history_transactions ht ON ht.id = hop.transaction_id") - -var selectExpOperation = selectOperationFields. - From("exp_history_operations hop"). - LeftJoin("exp_history_transactions ht ON ht.id = hop.transaction_id") diff --git a/services/horizon/internal/db2/history/operation_batch_insert_builder.go b/services/horizon/internal/db2/history/operation_batch_insert_builder.go index 2a082f0aff..78b0116b39 100644 --- a/services/horizon/internal/db2/history/operation_batch_insert_builder.go +++ b/services/horizon/internal/db2/history/operation_batch_insert_builder.go @@ -6,7 +6,7 @@ import ( ) // OperationBatchInsertBuilder is used to insert a transaction's operations into the -// exp_history_operations table +// history_operations table type OperationBatchInsertBuilder interface { Add( id int64, @@ -28,7 +28,7 @@ type operationBatchInsertBuilder struct { func (q *Q) NewOperationBatchInsertBuilder(maxBatchSize int) OperationBatchInsertBuilder { return &operationBatchInsertBuilder{ builder: db.BatchInsertBuilder{ - Table: q.GetTable("exp_history_operations"), + Table: q.GetTable("history_operations"), MaxBatchSize: maxBatchSize, }, } diff --git a/services/horizon/internal/db2/history/operation_batch_insert_builder_test.go b/services/horizon/internal/db2/history/operation_batch_insert_builder_test.go index 7deda3297f..0c310b1109 100644 --- a/services/horizon/internal/db2/history/operation_batch_insert_builder_test.go +++ b/services/horizon/internal/db2/history/operation_batch_insert_builder_test.go @@ -15,6 +15,8 @@ func TestAddOperation(t *testing.T) { test.ResetHorizonDB(t, tt.HorizonDB) q := &Q{tt.HorizonSession()} + txBatch := q.NewTransactionBatchInsertBuilder(0) + builder := q.NewOperationBatchInsertBuilder(1) transactionHash := "2a805712c6d10f9e74bb0ccf54ae92a2b4b1e586451fe8133a2433816f6b567c" @@ -32,8 +34,8 @@ func TestAddOperation(t *testing.T) { ) sequence := int32(56) - - insertTransaction(tt, q, "exp_history_transactions", transaction, sequence) + tt.Assert.NoError(txBatch.Add(transaction, uint32(sequence))) + tt.Assert.NoError(txBatch.Exec()) details, err := json.Marshal(map[string]string{ "to": "GANFZDRBCNTUXIODCJEYMACPMCSZEVE4WZGZ3CZDZ3P2SXK4KH75IK6Y", @@ -57,7 +59,7 @@ func TestAddOperation(t *testing.T) { tt.Assert.NoError(err) ops := []Operation{} - err = q.Select(&ops, selectExpOperation) + err = q.Select(&ops, selectOperation) if tt.Assert.NoError(err) { tt.Assert.Len(ops, 1) diff --git a/services/horizon/internal/db2/history/operation_participant_batch_insert_builder.go b/services/horizon/internal/db2/history/operation_participant_batch_insert_builder.go index 4bfc0bf5b1..c0aa2da3c2 100644 --- a/services/horizon/internal/db2/history/operation_participant_batch_insert_builder.go +++ b/services/horizon/internal/db2/history/operation_participant_batch_insert_builder.go @@ -5,7 +5,7 @@ import ( ) // OperationParticipantBatchInsertBuilder is used to insert a transaction's operations into the -// exp_history_operations table +// history_operations table type OperationParticipantBatchInsertBuilder interface { Add( operationID int64, @@ -23,7 +23,7 @@ type operationParticipantBatchInsertBuilder struct { func (q *Q) NewOperationParticipantBatchInsertBuilder(maxBatchSize int) OperationParticipantBatchInsertBuilder { return &operationParticipantBatchInsertBuilder{ builder: db.BatchInsertBuilder{ - Table: q.GetTable("exp_history_operation_participants"), + Table: q.GetTable("history_operation_participants"), MaxBatchSize: maxBatchSize, }, } diff --git a/services/horizon/internal/db2/history/operation_participant_batch_insert_builder_test.go b/services/horizon/internal/db2/history/operation_participant_batch_insert_builder_test.go index 84cb23ca79..612acba3bd 100644 --- a/services/horizon/internal/db2/history/operation_participant_batch_insert_builder_test.go +++ b/services/horizon/internal/db2/history/operation_participant_batch_insert_builder_test.go @@ -29,7 +29,7 @@ func TestAddOperationParticipants(t *testing.T) { err = q.Select(&ops, sq.Select( "hopp.history_operation_id, "+ "hopp.history_account_id"). - From("exp_history_operation_participants hopp"), + From("history_operation_participants hopp"), ) if tt.Assert.NoError(err) { diff --git a/services/horizon/internal/db2/history/operation_test.go b/services/horizon/internal/db2/history/operation_test.go index b9f489a3b8..b1cd8ee434 100644 --- a/services/horizon/internal/db2/history/operation_test.go +++ b/services/horizon/internal/db2/history/operation_test.go @@ -1,16 +1,10 @@ package history import ( - "encoding/json" "testing" - sq "github.com/Masterminds/squirrel" - "github.com/stellar/go/exp/ingest/io" "github.com/stellar/go/services/horizon/internal/db2" "github.com/stellar/go/services/horizon/internal/test" - "github.com/stellar/go/services/horizon/internal/toid" - "github.com/stellar/go/support/db" - "github.com/stellar/go/xdr" ) func TestOperationQueries(t *testing.T) { @@ -299,277 +293,3 @@ func TestOperationIncludeTransactions(t *testing.T) { tt.Assert.Equal(*transaction, expectedTransactions[0]) assertOperationMatchesTransaction(tt, op, *transaction) } - -func TestCheckExpOperations(t *testing.T) { - tt := test.Start(t) - defer tt.Finish() - test.ResetHorizonDB(t, tt.HorizonDB) - q := &Q{tt.HorizonSession()} - - sequence := int32(56) - - transaction := buildLedgerTransaction( - t, - testTransaction{ - index: 1, - envelopeXDR: "AAAAABpcjiETZ0uhwxJJhgBPYKWSVJy2TZ2LI87fqV1cUf/UAAAAZAAAADcAAAABAAAAAAAAAAAAAAABAAAAAAAAAAEAAAAAGlyOIRNnS6HDEkmGAE9gpZJUnLZNnYsjzt+pXVxR/9QAAAAAAAAAAAX14QAAAAAAAAAAAVxR/9QAAABAK6pcXYMzAEmH08CZ1LWmvtNDKauhx+OImtP/Lk4hVTMJRVBOebVs5WEPj9iSrgGT0EswuDCZ2i5AEzwgGof9Ag==", - resultXDR: "AAAAAAAAAGQAAAAAAAAAAQAAAAAAAAABAAAAAAAAAAA=", - metaXDR: "AAAAAQAAAAIAAAADAAAAOAAAAAAAAAAAGlyOIRNnS6HDEkmGAE9gpZJUnLZNnYsjzt+pXVxR/9QAAAACVAvjnAAAADcAAAAAAAAAAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAABAAAAOAAAAAAAAAAAGlyOIRNnS6HDEkmGAE9gpZJUnLZNnYsjzt+pXVxR/9QAAAACVAvjnAAAADcAAAABAAAAAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAABAAAAAA==", - feeChangesXDR: "AAAAAgAAAAMAAAA3AAAAAAAAAAAaXI4hE2dLocMSSYYAT2ClklSctk2diyPO36ldXFH/1AAAAAJUC+QAAAAANwAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAEAAAA4AAAAAAAAAAAaXI4hE2dLocMSSYYAT2ClklSctk2diyPO36ldXFH/1AAAAAJUC+OcAAAANwAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAA==", - hash: "2a805712c6d10f9e74bb0ccf54ae92a2b4b1e586451fe8133a2433816f6b567c", - }, - ) - - // second transaction - secondTransaction := buildLedgerTransaction( - t, - testTransaction{ - index: 2, - envelopeXDR: "AAAAAGL8HQvQkbK2HA3WVjRrKmjX00fG8sLI7m0ERwJW/AX3AAAAZAAAAAAAAAAaAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAoZftFP3p4ifbTm6hQdieotu3Zw9E05GtoSh5MBytEpQAAAACVAvkAAAAAAAAAAABVvwF9wAAAEDHU95E9wxgETD8TqxUrkgC0/7XHyNDts6Q5huRHfDRyRcoHdv7aMp/sPvC3RPkXjOMjgbKJUX7SgExUeYB5f8F", - resultXDR: "AAAAAAAAAGQAAAAAAAAAAQAAAAAAAAABAAAAAAAAAAA=", - metaXDR: "AAAAAQAAAAIAAAADAAAAOQAAAAAAAAAAYvwdC9CRsrYcDdZWNGsqaNfTR8bywsjubQRHAlb8BfcLGrZY9dZxbAAAAAAAAAAZAAAAAAAAAAEAAAAAYvwdC9CRsrYcDdZWNGsqaNfTR8bywsjubQRHAlb8BfcAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAABAAAAOQAAAAAAAAAAYvwdC9CRsrYcDdZWNGsqaNfTR8bywsjubQRHAlb8BfcLGrZY9dZxbAAAAAAAAAAaAAAAAAAAAAEAAAAAYvwdC9CRsrYcDdZWNGsqaNfTR8bywsjubQRHAlb8BfcAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAABAAAAAwAAAAMAAAA5AAAAAAAAAABi/B0L0JGythwN1lY0aypo19NHxvLCyO5tBEcCVvwF9wsatlj11nFsAAAAAAAAABoAAAAAAAAAAQAAAABi/B0L0JGythwN1lY0aypo19NHxvLCyO5tBEcCVvwF9wAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAEAAAA5AAAAAAAAAABi/B0L0JGythwN1lY0aypo19NHxvLCyO5tBEcCVvwF9wsatlahyo1sAAAAAAAAABoAAAAAAAAAAQAAAABi/B0L0JGythwN1lY0aypo19NHxvLCyO5tBEcCVvwF9wAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5AAAAAAAAAAChl+0U/eniJ9tObqFB2J6i27dnD0TTka2hKHkwHK0SlAAAAAJUC+QAAAAAOQAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAA==", - feeChangesXDR: "AAAAAgAAAAMAAAA3AAAAAAAAAABi/B0L0JGythwN1lY0aypo19NHxvLCyO5tBEcCVvwF9wsatlj11nHQAAAAAAAAABkAAAAAAAAAAQAAAABi/B0L0JGythwN1lY0aypo19NHxvLCyO5tBEcCVvwF9wAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAEAAAA5AAAAAAAAAABi/B0L0JGythwN1lY0aypo19NHxvLCyO5tBEcCVvwF9wsatlj11nFsAAAAAAAAABkAAAAAAAAAAQAAAABi/B0L0JGythwN1lY0aypo19NHxvLCyO5tBEcCVvwF9wAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAA==", - hash: "0e5bd332291e3098e49886df2cdb9b5369a5f9e0a9973f0d9e1a9489c6581ba2", - }, - ) - - // third transaction - thirdTransaction := buildLedgerTransaction( - t, - testTransaction{ - index: 3, - envelopeXDR: "AAAAAGL8HQvQkbK2HA3WVjRrKmjX00fG8sLI7m0ERwJW/AX3AAAAZAAAAAAAAAAXAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAABJeTmKR1qr+CZoIyjAfGxrIXZ/tI1VId2OfZkRowDz4AAAACVAvkAAAAAAAAAAABVvwF9wAAAEDyHwhW9GXQVXG1qibbeqSjxYzhv5IC08K2vSkxzYTwJykvQ8l0+e4M4h2guoK89s8HUfIqIOzDmoGsNTaLcYUG", - resultXDR: "AAAAAAAAAGQAAAAAAAAAAQAAAAAAAAABAAAAAAAAAAA=", - metaXDR: "AAAAAQAAAAIAAAADAAAANQAAAAAAAAAAYvwdC9CRsrYcDdZWNGsqaNfTR8bywsjubQRHAlb8BfcLGrZdne46/AAAAAAAAAAWAAAAAAAAAAEAAAAAYvwdC9CRsrYcDdZWNGsqaNfTR8bywsjubQRHAlb8BfcAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAABAAAANQAAAAAAAAAAYvwdC9CRsrYcDdZWNGsqaNfTR8bywsjubQRHAlb8BfcLGrZdne46/AAAAAAAAAAXAAAAAAAAAAEAAAAAYvwdC9CRsrYcDdZWNGsqaNfTR8bywsjubQRHAlb8BfcAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAABAAAAAwAAAAMAAAA1AAAAAAAAAABi/B0L0JGythwN1lY0aypo19NHxvLCyO5tBEcCVvwF9wsatl2d7jr8AAAAAAAAABcAAAAAAAAAAQAAAABi/B0L0JGythwN1lY0aypo19NHxvLCyO5tBEcCVvwF9wAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAEAAAA1AAAAAAAAAABi/B0L0JGythwN1lY0aypo19NHxvLCyO5tBEcCVvwF9wsatltJ4lb8AAAAAAAAABcAAAAAAAAAAQAAAABi/B0L0JGythwN1lY0aypo19NHxvLCyO5tBEcCVvwF9wAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAA1AAAAAAAAAAAEl5OYpHWqv4JmgjKMB8bGshdn+0jVUh3Y59mRGjAPPgAAAAJUC+QAAAAANQAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAA==", - feeChangesXDR: "AAAAAgAAAAMAAAAwAAAAAAAAAABi/B0L0JGythwN1lY0aypo19NHxvLCyO5tBEcCVvwF9wsatl2d7jtgAAAAAAAAABYAAAAAAAAAAQAAAABi/B0L0JGythwN1lY0aypo19NHxvLCyO5tBEcCVvwF9wAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAEAAAA1AAAAAAAAAABi/B0L0JGythwN1lY0aypo19NHxvLCyO5tBEcCVvwF9wsatl2d7jr8AAAAAAAAABYAAAAAAAAAAQAAAABi/B0L0JGythwN1lY0aypo19NHxvLCyO5tBEcCVvwF9wAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAA==", - hash: "df5f0e8b3b533dd9cda0ff7540bef3e9e19369060f8a4b0414b0e3c1b4315b1c", - }, - ) - - insertTransaction(tt, q, "exp_history_transactions", transaction, sequence) - insertTransaction(tt, q, "exp_history_transactions", secondTransaction, sequence) - insertTransaction(tt, q, "exp_history_transactions", thirdTransaction, sequence) - insertTransaction(tt, q, "history_transactions", transaction, sequence) - insertTransaction(tt, q, "history_transactions", secondTransaction, sequence) - insertTransaction(tt, q, "history_transactions", thirdTransaction, sequence) - - operationBatch := q.NewOperationBatchInsertBuilder(100) - - txs := []io.LedgerTransaction{ - transaction, - secondTransaction, - thirdTransaction, - } - - details, err := json.Marshal(map[string]interface{}{}) - tt.Assert.NoError(err) - - for _, t := range txs { - err = operationBatch.Add( - toid.New(sequence, int32(t.Index), 1).ToInt64(), - toid.New(sequence, int32(t.Index), 0).ToInt64(), - 1, - xdr.OperationTypePayment, - details, - "GAUJETIZVEP2NRYLUESJ3LS66NVCEGMON4UDCBCSBEVPIID773P2W6AY", - ) - tt.Assert.NoError(err) - } - - err = operationBatch.Exec() - tt.Assert.NoError(err) - - batchBuilder := operationBatchInsertBuilder{ - builder: db.BatchInsertBuilder{ - Table: q.GetTable("history_operations"), - MaxBatchSize: 100, - }, - } - - for _, t := range txs { - err = batchBuilder.Add( - toid.New(sequence, int32(t.Index), 1).ToInt64(), - toid.New(sequence, int32(t.Index), 0).ToInt64(), - 1, - xdr.OperationTypePayment, - details, - "GAUJETIZVEP2NRYLUESJ3LS66NVCEGMON4UDCBCSBEVPIID773P2W6AY", - ) - tt.Assert.NoError(err) - } - - err = batchBuilder.Exec() - tt.Assert.NoError(err) - - valid, err := q.CheckExpOperations(sequence) - tt.Assert.True(valid) - tt.Assert.NoError(err) - - operationID := toid.New(sequence, int32(thirdTransaction.Index), 1).ToInt64() - for fieldName, value := range map[string]interface{}{ - "application_order": 100, - "type": 13, - "details": "{\"bump_to\": \"300000000003\"}", - "source_account": "source_account", - } { - updateSQL := sq.Update("history_operations"). - Set(fieldName, value). - Where( - "id = ?", - operationID, - ) - _, err = q.Exec(updateSQL) - tt.Assert.NoError(err) - - valid, err = q.CheckExpOperations(sequence) - tt.Assert.NoError(err) - tt.Assert.False(valid) - - _, err = q.Exec(sq.Delete("history_operations"). - Where("id = ?", operationID)) - tt.Assert.NoError(err) - - err = batchBuilder.Add( - toid.New(sequence, int32(thirdTransaction.Index), 1).ToInt64(), - toid.New(sequence, int32(thirdTransaction.Index), 0).ToInt64(), - 1, - xdr.OperationTypePayment, - details, - "GAUJETIZVEP2NRYLUESJ3LS66NVCEGMON4UDCBCSBEVPIID773P2W6AY", - ) - tt.Assert.NoError(err) - err = batchBuilder.Exec() - tt.Assert.NoError(err) - - valid, err := q.CheckExpOperations(sequence) - tt.Assert.NoError(err) - tt.Assert.True(valid) - } -} - -func TestCheckExpOperationParticipants(t *testing.T) { - tt := test.Start(t) - defer tt.Finish() - test.ResetHorizonDB(t, tt.HorizonDB) - q := &Q{tt.HorizonSession()} - - sequence := int32(20) - - valid, err := checkExpOperationParticipants(q, sequence) - tt.Assert.NoError(err) - tt.Assert.True(valid) - - addresses := []string{ - "GAOQJGUAB7NI7K7I62ORBXMN3J4SSWQUQ7FOEPSDJ322W2HMCNWPHXFB", - "GBRPYHIL2CI3FNQ4BXLFMNDLFJUNPU2HY3ZMFSHONUCEOASW7QC7OX2H", - "GCYVFGI3SEQJGBNQQG7YCMFWEYOHK3XPVOVPA6C566PXWN4SN7LILZSM", - "GBYSBDAJZMHL5AMD7QXQ3JEP3Q4GLKADWIJURAAHQALNAWD6Z5XF2RAC", - } - expAccounts, err := q.CreateExpAccounts(addresses) - tt.Assert.NoError(err) - - operationIDs := []int64{ - toid.New(sequence, 1, 1).ToInt64(), - toid.New(sequence, 2, 1).ToInt64(), - toid.New(sequence, 1, 1).ToInt64(), - toid.New(sequence+1, 1, 1).ToInt64(), - } - - historyOperations := map[int64]map[string]interface{}{ - operationIDs[0]: map[string]interface{}{ - "id": operationIDs[0], - "transaction_id": toid.New(sequence, 1, 0).ToInt64(), - "application_order": 1, - "type": 1, - "details": "{}", - "source_account": addresses[0], - }, - operationIDs[1]: map[string]interface{}{ - "id": operationIDs[1], - "transaction_id": toid.New(sequence, 2, 0).ToInt64(), - "application_order": 1, - "type": 1, - "details": "{}", - "source_account": addresses[0], - }, - // We skip operationIDs[2] since it is the same operation as operationIDs[0] - operationIDs[3]: map[string]interface{}{ - "id": operationIDs[3], - "transaction_id": toid.New(sequence+1, 1, 0).ToInt64(), - "application_order": 1, - "type": 1, - "details": "{}", - "source_account": addresses[0], - }, - } - - sql := sq.Insert("exp_history_operations") - - for _, historyOperation := range historyOperations { - _, err = q.Exec(sql.SetMap(historyOperation)) - tt.Assert.NoError(err) - } - - batch := q.NewOperationParticipantBatchInsertBuilder(0) - for i, address := range addresses { - tt.Assert.NoError( - batch.Add(operationIDs[i], expAccounts[address]), - ) - } - tt.Assert.NoError(batch.Exec()) - - valid, err = checkExpOperationParticipants(q, sequence) - tt.Assert.NoError(err) - tt.Assert.True(valid) - - addresses = append(addresses, "GBXGQJWVLWOYHFLVTKWV5FGHA3LNYY2JQKM7OAJAUEQFU6LPCSEFVXON") - operationIDs = append(operationIDs, toid.New(sequence, 3, 1).ToInt64()) - - historyOperations[operationIDs[4]] = map[string]interface{}{ - "id": operationIDs[4], - "transaction_id": toid.New(sequence, 3, 0).ToInt64(), - "application_order": 1, - "type": 1, - "details": "{}", - "source_account": addresses[0], - } - - var accounts []Account - tt.Assert.NoError(q.CreateAccounts(&accounts, addresses)) - - accountsMap := map[string]int64{} - for _, account := range accounts { - accountsMap[account.Address] = account.ID - } - - for i, address := range addresses { - _, err = q.Exec(sq.Insert("history_operation_participants"). - SetMap(map[string]interface{}{ - "history_operation_id": operationIDs[i], - "history_account_id": accountsMap[address], - })) - tt.Assert.NoError(err) - - historyOperation, ok := historyOperations[operationIDs[i]] - - if ok { - _, err = q.Exec(sq.Insert("history_operations"). - SetMap(historyOperation). - Suffix("ON CONFLICT (id) DO NOTHING")) - - tt.Assert.NoError(err) - } - - valid, err = checkExpOperationParticipants(q, sequence) - tt.Assert.NoError(err) - // The first 3 operations all belong to ledger `sequence`. - // The 4th operatino belongs to the next ledger so it is - // ignored by CheckExpOperationParticipants. - // The last operation belongs to `sequence`, however, it is - // not present in exp_history_operation_participants so - // we expect CheckExpOperationParticipants to fail after the last - // transaction is added to history_operation_participants - expected := i == 2 || i == 3 - tt.Assert.Equal(expected, valid) - } -} diff --git a/services/horizon/internal/db2/history/participants.go b/services/horizon/internal/db2/history/participants.go index ac51cea0f9..c95478f1e1 100644 --- a/services/horizon/internal/db2/history/participants.go +++ b/services/horizon/internal/db2/history/participants.go @@ -1,137 +1,18 @@ package history import ( - "fmt" - - sq "github.com/Masterminds/squirrel" - "github.com/stellar/go/services/horizon/internal/toid" "github.com/stellar/go/support/db" ) // QParticipants defines ingestion participant related queries. type QParticipants interface { - CheckExpParticipants(seq int32) (bool, error) - CreateExpAccounts(addresses []string) (map[string]int64, error) + CreateAccounts(addresses []string) (map[string]int64, error) NewTransactionParticipantsBatchInsertBuilder(maxBatchSize int) TransactionParticipantsBatchInsertBuilder NewOperationParticipantBatchInsertBuilder(maxBatchSize int) OperationParticipantBatchInsertBuilder } -// CreateExpAccounts creates rows in the exp_history_accounts table for a given list of addresses. -// CreateExpAccounts returns a mapping of account address to its corresponding id in the exp_history_accounts table -func (q *Q) CreateExpAccounts(addresses []string) (map[string]int64, error) { - var accounts []Account - sql := sq.Insert("exp_history_accounts").Columns("address") - for _, address := range addresses { - sql = sql.Values(address) - } - sql = sql.Suffix("ON CONFLICT (address) DO UPDATE SET address=EXCLUDED.address RETURNING *") - - err := q.Select(&accounts, sql) - if err != nil { - return nil, err - } - - addressToID := map[string]int64{} - for _, account := range accounts { - addressToID[account.Address] = account.ID - } - return addressToID, nil -} - -type transactionParticipantPair struct { - ID int64 `db:"history_transaction_id"` - Address string `db:"address"` -} - -func (q *Q) findTransactionParticipants( - participantTable, accountTable string, seq int32, -) ([]transactionParticipantPair, error) { - var participants []transactionParticipantPair - from := toid.ID{LedgerSequence: int32(seq)}.ToInt64() - to := toid.ID{LedgerSequence: int32(seq + 1)}.ToInt64() - - err := q.Select( - &participants, - sq.Select( - "htp.history_transaction_id", - "ha.address", - ).From( - fmt.Sprintf("%s htp", participantTable), - ).Join( - fmt.Sprintf("%s ha ON ha.id = htp.history_account_id", accountTable), - ).Where( - "htp.history_transaction_id >= ? AND htp.history_transaction_id < ? ", from, to, - ).OrderBy( - "htp.history_transaction_id asc, ha.address asc", - ), - ) - - return participants, err -} - -type ingestionCheckFn func(*Q, int32) (bool, error) - -var participantChecks = []ingestionCheckFn{ - checkExpTransactionParticipants, - checkExpOperationParticipants, -} - -// CheckExpParticipants checks that the participants in the -// experimental ingestion tables matches the participants in the -// legacy ingestion tables -func (q *Q) CheckExpParticipants(seq int32) (bool, error) { - for _, checkFn := range participantChecks { - if valid, err := checkFn(q, seq); err != nil { - return false, err - } else if !valid { - return false, nil - } - } - return true, nil -} - -// checkExpTransactionParticipants checks that the participants in -// exp_history_transaction_participants for the given ledger matches -// the same participants in history_transaction_participants -func checkExpTransactionParticipants(q *Q, seq int32) (bool, error) { - participants, err := q.findTransactionParticipants( - "history_transaction_participants", "history_accounts", seq, - ) - if err != nil { - return false, err - } - - expParticipants, err := q.findTransactionParticipants( - "exp_history_transaction_participants", "exp_history_accounts", seq, - ) - if err != nil { - return false, err - } - - // We only proceed with the comparison if we have data in both the - // legacy ingestion system and the experimental ingestion system. - // If there are no participants in either the legacy ingestion system or the - // experimental ingestion system we skip the check. - if len(participants) == 0 || len(expParticipants) == 0 { - return true, nil - } - - if len(participants) != len(expParticipants) { - return false, nil - } - - for i, participant := range participants { - expParticipant := expParticipants[i] - if participant != expParticipant { - return false, nil - } - } - - return true, nil -} - // TransactionParticipantsBatchInsertBuilder is used to insert transaction participants into the -// exp_history_transaction_participants table +// history_transaction_participants table type TransactionParticipantsBatchInsertBuilder interface { Add(transactionID, accountID int64) error Exec() error @@ -145,7 +26,7 @@ type transactionParticipantsBatchInsertBuilder struct { func (q *Q) NewTransactionParticipantsBatchInsertBuilder(maxBatchSize int) TransactionParticipantsBatchInsertBuilder { return &transactionParticipantsBatchInsertBuilder{ builder: db.BatchInsertBuilder{ - Table: q.GetTable("exp_history_transaction_participants"), + Table: q.GetTable("history_transaction_participants"), MaxBatchSize: maxBatchSize, }, } diff --git a/services/horizon/internal/db2/history/participants_test.go b/services/horizon/internal/db2/history/participants_test.go index 443f6420b5..1c656ef883 100644 --- a/services/horizon/internal/db2/history/participants_test.go +++ b/services/horizon/internal/db2/history/participants_test.go @@ -1,63 +1,12 @@ package history import ( - "fmt" "testing" sq "github.com/Masterminds/squirrel" "github.com/stellar/go/services/horizon/internal/test" - "github.com/stellar/go/services/horizon/internal/toid" ) -func assertAccountsContainAddresses(tt *test.T, accounts map[string]int64, addresses []string) { - tt.Assert.Len(accounts, len(addresses)) - set := map[int64]bool{} - for _, address := range addresses { - accountID, ok := accounts[address] - tt.Assert.True(ok) - tt.Assert.False(set[accountID]) - set[accountID] = true - } -} - -func TestCreateExpAccounts(t *testing.T) { - tt := test.Start(t) - defer tt.Finish() - test.ResetHorizonDB(t, tt.HorizonDB) - q := &Q{tt.HorizonSession()} - - addresses := []string{ - "GAOQJGUAB7NI7K7I62ORBXMN3J4SSWQUQ7FOEPSDJ322W2HMCNWPHXFB", - "GBRPYHIL2CI3FNQ4BXLFMNDLFJUNPU2HY3ZMFSHONUCEOASW7QC7OX2H", - } - accounts, err := q.CreateExpAccounts(addresses) - tt.Assert.NoError(err) - tt.Assert.Len(accounts, 2) - assertAccountsContainAddresses(tt, accounts, addresses) - - dupAccounts, err := q.CreateExpAccounts([]string{ - "GAOQJGUAB7NI7K7I62ORBXMN3J4SSWQUQ7FOEPSDJ322W2HMCNWPHXFB", - "GBRPYHIL2CI3FNQ4BXLFMNDLFJUNPU2HY3ZMFSHONUCEOASW7QC7OX2H", - }) - tt.Assert.NoError(err) - tt.Assert.Equal(accounts, dupAccounts) - - addresses = []string{ - "GAOQJGUAB7NI7K7I62ORBXMN3J4SSWQUQ7FOEPSDJ322W2HMCNWPHXFB", - "GBRPYHIL2CI3FNQ4BXLFMNDLFJUNPU2HY3ZMFSHONUCEOASW7QC7OX2H", - "GCYVFGI3SEQJGBNQQG7YCMFWEYOHK3XPVOVPA6C566PXWN4SN7LILZSM", - "GBYSBDAJZMHL5AMD7QXQ3JEP3Q4GLKADWIJURAAHQALNAWD6Z5XF2RAC", - } - accounts, err = q.CreateExpAccounts(addresses) - tt.Assert.NoError(err) - assertAccountsContainAddresses(tt, accounts, addresses) - for address, accountID := range dupAccounts { - id, ok := accounts[address] - tt.Assert.True(ok) - tt.Assert.Equal(id, accountID) - } -} - type transactionParticipant struct { TransactionID int64 `db:"history_transaction_id"` AccountID int64 `db:"history_account_id"` @@ -66,7 +15,7 @@ type transactionParticipant struct { func getTransactionParticipants(tt *test.T, q *Q) []transactionParticipant { var participants []transactionParticipant sql := sq.Select("history_transaction_id", "history_account_id"). - From("exp_history_transaction_participants"). + From("history_transaction_participants"). OrderBy("(history_transaction_id, history_account_id) asc") err := q.Select(&participants, sql) @@ -107,191 +56,3 @@ func TestTransactionParticipantsBatch(t *testing.T) { participants, ) } - -func TestCheckExpTransactionParticipants(t *testing.T) { - tt := test.Start(t) - defer tt.Finish() - test.ResetHorizonDB(t, tt.HorizonDB) - q := &Q{tt.HorizonSession()} - - sequence := int32(20) - - valid, err := checkExpTransactionParticipants(q, sequence) - tt.Assert.NoError(err) - tt.Assert.True(valid) - - addresses := []string{ - "GAOQJGUAB7NI7K7I62ORBXMN3J4SSWQUQ7FOEPSDJ322W2HMCNWPHXFB", - "GBRPYHIL2CI3FNQ4BXLFMNDLFJUNPU2HY3ZMFSHONUCEOASW7QC7OX2H", - "GCYVFGI3SEQJGBNQQG7YCMFWEYOHK3XPVOVPA6C566PXWN4SN7LILZSM", - "GBYSBDAJZMHL5AMD7QXQ3JEP3Q4GLKADWIJURAAHQALNAWD6Z5XF2RAC", - } - expAccounts, err := q.CreateExpAccounts(addresses) - tt.Assert.NoError(err) - - transactionIDs := []int64{ - toid.New(sequence, 1, 0).ToInt64(), - toid.New(sequence, 2, 0).ToInt64(), - toid.New(sequence, 1, 0).ToInt64(), - toid.New(sequence+1, 1, 0).ToInt64(), - } - - batch := q.NewTransactionParticipantsBatchInsertBuilder(0) - for i, address := range addresses { - tt.Assert.NoError( - batch.Add(transactionIDs[i], expAccounts[address]), - ) - } - tt.Assert.NoError(batch.Exec()) - - valid, err = checkExpTransactionParticipants(q, sequence) - tt.Assert.NoError(err) - tt.Assert.True(valid) - - addresses = append(addresses, "GBXGQJWVLWOYHFLVTKWV5FGHA3LNYY2JQKM7OAJAUEQFU6LPCSEFVXON") - transactionIDs = append(transactionIDs, toid.New(sequence, 3, 0).ToInt64()) - var accounts []Account - tt.Assert.NoError(q.CreateAccounts(&accounts, addresses)) - accountsMap := map[string]int64{} - for _, account := range accounts { - accountsMap[account.Address] = account.ID - } - - for i, address := range addresses { - _, err := q.Exec(sq.Insert("history_transaction_participants"). - SetMap(map[string]interface{}{ - "history_transaction_id": transactionIDs[i], - "history_account_id": accountsMap[address], - })) - tt.Assert.NoError(err) - - valid, err = checkExpTransactionParticipants(q, sequence) - tt.Assert.NoError(err) - // The first 3 transactions all belong to ledger `sequence`. - // The 4th transaction belongs to the next ledger so it is - // ignored by CheckExpParticipants. - // The last transaction belongs to `sequence`, however, it is - // not present in exp_history_transaction_participants so - // we expect CheckExpParticipants to fail after the last - // transaction is added to history_transaction_participants - expected := i == 2 || i == 3 - tt.Assert.Equal(expected, valid) - } -} - -func makeCheck(valid bool, err error) ingestionCheckFn { - return func(*Q, int32) (bool, error) { - return valid, err - } -} - -func TestCheckExpParticipants(t *testing.T) { - originalChecks := participantChecks - defer func() { - participantChecks = originalChecks - }() - q := &Q{} - - firstErr := fmt.Errorf("first error") - middleErr := fmt.Errorf("middle error") - lastErr := fmt.Errorf("last error") - - for _, testCase := range []struct { - name string - checks []ingestionCheckFn - expectedBool bool - expectedError error - }{ - { - "all checks pass", - []ingestionCheckFn{ - makeCheck(true, nil), - makeCheck(true, nil), - makeCheck(true, nil), - }, - true, - nil, - }, - { - "first check fails", - []ingestionCheckFn{ - makeCheck(false, nil), - makeCheck(true, nil), - makeCheck(true, nil), - }, - false, - nil, - }, - { - "middle check fails", - []ingestionCheckFn{ - makeCheck(true, nil), - makeCheck(false, nil), - makeCheck(true, nil), - }, - false, - nil, - }, - { - "last check fails", - []ingestionCheckFn{ - makeCheck(true, nil), - makeCheck(true, nil), - makeCheck(false, nil), - }, - false, - nil, - }, - { - "first check returns error", - []ingestionCheckFn{ - makeCheck(true, firstErr), - makeCheck(true, nil), - makeCheck(true, nil), - }, - false, - firstErr, - }, - { - "middle check returns error", - []ingestionCheckFn{ - makeCheck(true, nil), - makeCheck(false, middleErr), - makeCheck(true, nil), - }, - false, - middleErr, - }, - { - "last check returns error", - []ingestionCheckFn{ - makeCheck(true, nil), - makeCheck(true, nil), - makeCheck(true, lastErr), - }, - false, - lastErr, - }, - { - "all checks returns error", - []ingestionCheckFn{ - makeCheck(true, firstErr), - makeCheck(false, middleErr), - makeCheck(true, lastErr), - }, - false, - firstErr, - }, - } { - t.Run(testCase.name, func(t *testing.T) { - participantChecks = testCase.checks - valid, err := q.CheckExpParticipants(int32(1)) - if valid != testCase.expectedBool { - t.Fatalf("expected %v but got %v", testCase.expectedBool, valid) - } - if err != testCase.expectedError { - t.Fatalf("expected %v but got %v", testCase.expectedError, err) - } - }) - } -} diff --git a/services/horizon/internal/db2/history/trade.go b/services/horizon/internal/db2/history/trade.go index f3cd088a6e..b3f396d17f 100644 --- a/services/horizon/internal/db2/history/trade.go +++ b/services/horizon/internal/db2/history/trade.go @@ -3,7 +3,6 @@ package history import ( "fmt" "math" - "reflect" sq "github.com/Masterminds/squirrel" "github.com/stellar/go/services/horizon/internal/db2" @@ -38,19 +37,6 @@ func (q *Q) Trades() *TradesQ { } } -func (q *Q) expTrades() *TradesQ { - return &TradesQ{ - parent: q, - sql: joinTradeAssets( - joinTradeAccounts( - selectTradeFields.From("exp_history_trades htrd"), - "exp_history_accounts", - ), - "exp_history_assets", - ), - } -} - // ReverseTrades provides a helper to filter rows from the `history_trades` table // with pre-defined filters and reversed base/counter. See `TradesQ` methods for the available filters. func (q *Q) ReverseTrades() *TradesQ { @@ -348,67 +334,7 @@ func getCanonicalAssetOrder(assetId1 int64, assetId2 int64) (orderPreserved bool } type QTrades interface { - CreateExpAccounts(addresses []string) (map[string]int64, error) + CreateAccounts(addresses []string) (map[string]int64, error) NewTradeBatchInsertBuilder(maxBatchSize int) TradeBatchInsertBuilder - CheckExpTrades(seq int32) (bool, error) - CreateExpAssets(assets []xdr.Asset) (map[string]Asset, error) -} - -// CheckExpTrades checks that the trades in exp_history_trades -// for the given ledger matches the same transactions in history_trades -func (q *Q) CheckExpTrades(seq int32) (bool, error) { - var trades, expTrades []Trade - - err := q.Trades().ForLedger(seq, "asc").Select(&trades) - if err != nil { - return false, err - } - - err = q.expTrades().ForLedger(seq, "asc").Select(&expTrades) - if err != nil { - return false, err - } - - // We only proceed with the comparison if we have trade data in both the - // legacy ingestion system and the experimental ingestion system. - // If there are no trades in either the legacy ingestion system or the - // experimental ingestion system we skip the check. - if len(trades) == 0 || len(expTrades) == 0 { - return true, nil - } - - if len(trades) != len(expTrades) { - return false, nil - } - - for i, trade := range trades { - expTrade := expTrades[i] - - // compare LedgerCloseTime separately - expClosedAt := expTrade.LedgerCloseTime - expTrade.LedgerCloseTime = trade.LedgerCloseTime - if expClosedAt.Unix() != trade.LedgerCloseTime.Unix() { - return false, nil - } - - // a given set of assets may not have the same ordering in history_assets and exp_history_assets - // the ordering affects the value of BaseIsSeller (see how getCanonicalAssetOrder() is used above) - if expTrade.BaseIsSeller != trade.BaseIsSeller { - expTrade.BaseOfferID, expTrade.CounterOfferID = expTrade.CounterOfferID, expTrade.BaseOfferID - expTrade.BaseAccount, expTrade.CounterAccount = expTrade.CounterAccount, expTrade.BaseAccount - expTrade.BaseAssetType, expTrade.CounterAssetType = expTrade.CounterAssetType, expTrade.BaseAssetType - expTrade.BaseAssetCode, expTrade.CounterAssetCode = expTrade.CounterAssetCode, expTrade.BaseAssetCode - expTrade.BaseAssetIssuer, expTrade.CounterAssetIssuer = expTrade.CounterAssetIssuer, expTrade.BaseAssetIssuer - expTrade.BaseAmount, expTrade.CounterAmount = expTrade.CounterAmount, expTrade.BaseAmount - expTrade.PriceN, expTrade.PriceD = expTrade.PriceD, expTrade.PriceN - - expTrade.BaseIsSeller = trade.BaseIsSeller - } - - if !reflect.DeepEqual(expTrade, trade) { - return false, nil - } - } - - return true, nil + CreateAssets(assets []xdr.Asset) (map[string]Asset, error) } diff --git a/services/horizon/internal/db2/history/trade_batch_insert_builder.go b/services/horizon/internal/db2/history/trade_batch_insert_builder.go index e4dc9e5e16..f017ab7874 100644 --- a/services/horizon/internal/db2/history/trade_batch_insert_builder.go +++ b/services/horizon/internal/db2/history/trade_batch_insert_builder.go @@ -9,7 +9,7 @@ import ( ) // InsertTrade represents the arguments to TradeBatchInsertBuilder.Add() which is used to insert -// rows into the exp_history_trades table +// rows into the history_trades table type InsertTrade struct { HistoryOperationID int64 Order int32 @@ -25,7 +25,7 @@ type InsertTrade struct { } // TradeBatchInsertBuilder is used to insert trades into the -// exp_history_trades table +// history_trades table type TradeBatchInsertBuilder interface { Add(entries ...InsertTrade) error Exec() error @@ -40,7 +40,7 @@ type tradeBatchInsertBuilder struct { func (q *Q) NewTradeBatchInsertBuilder(maxBatchSize int) TradeBatchInsertBuilder { return &tradeBatchInsertBuilder{ builder: db.BatchInsertBuilder{ - Table: q.GetTable("exp_history_trades"), + Table: q.GetTable("history_trades"), MaxBatchSize: maxBatchSize, }, } diff --git a/services/horizon/internal/db2/history/trade_test.go b/services/horizon/internal/db2/history/trade_test.go index 17c220bfa2..fc530c1452 100644 --- a/services/horizon/internal/db2/history/trade_test.go +++ b/services/horizon/internal/db2/history/trade_test.go @@ -4,7 +4,6 @@ import ( "testing" "time" - sq "github.com/Masterminds/squirrel" "github.com/guregu/null" "github.com/stellar/go/services/horizon/internal/db2" "github.com/stellar/go/services/horizon/internal/test" @@ -127,10 +126,10 @@ func createInsertTrades( return first, second, third } -func createExpAccountsAndAssets( +func createAccountsAndAssets( tt *test.T, q *Q, accounts []string, assets []xdr.Asset, ) ([]int64, []int64) { - addressToAccounts, err := q.CreateExpAccounts(accounts) + addressToAccounts, err := q.CreateAccounts(accounts) tt.Assert.NoError(err) accountIDs := []int64{} @@ -138,7 +137,7 @@ func createExpAccountsAndAssets( accountIDs = append(accountIDs, addressToAccounts[account]) } - assetMap, err := q.CreateExpAssets(assets) + assetMap, err := q.CreateAssets(assets) tt.Assert.NoError(err) assetIDs := []int64{} @@ -174,7 +173,7 @@ func buildIDtoAssetMapping(assets []xdr.Asset, ids []int64) map[int64]xdr.Asset return idToAsset } -func TestInsertExpTrade(t *testing.T) { +func TestBatchInsertTrade(t *testing.T) { tt := test.Start(t) defer tt.Finish() test.ResetHorizonDB(t, tt.HorizonDB) @@ -185,7 +184,7 @@ func TestInsertExpTrade(t *testing.T) { "GAXMF43TGZHW3QN3REOUA2U5PW5BTARXGGYJ3JIFHW3YT6QRKRL3CPPU", } assets := []xdr.Asset{eurAsset, usdAsset, nativeAsset} - accountIDs, assetIDs := createExpAccountsAndAssets( + accountIDs, assetIDs := createAccountsAndAssets( tt, q, addresses, assets, @@ -200,7 +199,7 @@ func TestInsertExpTrade(t *testing.T) { tt.Assert.NoError(builder.Exec()) var rows []Trade - tt.Assert.NoError(q.expTrades().Select(&rows)) + tt.Assert.NoError(q.Trades().Select(&rows)) idToAccount := buildIDtoAccountMapping(addresses, accountIDs) idToAsset := buildIDtoAssetMapping(assets, assetIDs) @@ -339,129 +338,3 @@ func createTradeRows( tt.Assert.NoError(err) } } - -func TestCheckExpTrades(t *testing.T) { - tt := test.Start(t) - defer tt.Finish() - test.ResetHorizonDB(t, tt.HorizonDB) - q := &Q{tt.HorizonSession()} - - sequence := int32(56) - valid, err := q.CheckExpTrades(sequence) - tt.Assert.NoError(err) - tt.Assert.True(valid) - - addresses := []string{ - "GB2QIYT2IAUFMRXKLSLLPRECC6OCOGJMADSPTRK7TGNT2SFR2YGWDARD", - "GAXMF43TGZHW3QN3REOUA2U5PW5BTARXGGYJ3JIFHW3YT6QRKRL3CPPU", - } - assets := []xdr.Asset{ - xdr.MustNewCreditAsset("CHF", issuer.Address()), - eurAsset, usdAsset, nativeAsset, - xdr.MustNewCreditAsset("BTC", issuer.Address()), - } - - expAccountIDs, expAssetIDs := createExpAccountsAndAssets( - tt, q, - addresses, - assets, - ) - - chfAssetID, btcAssetID := expAssetIDs[0], expAssetIDs[4] - assets = assets[1:4] - expAssetIDs = expAssetIDs[1:4] - - idToAccount := buildIDtoAccountMapping(addresses, expAccountIDs) - idToAsset := buildIDtoAssetMapping(assets, expAssetIDs) - - first, second, third := createInsertTrades( - expAccountIDs, expAssetIDs, sequence, - ) - - builder := q.NewTradeBatchInsertBuilder(1) - tt.Assert.NoError( - builder.Add(first, second, third), - ) - tt.Assert.NoError(builder.Exec()) - - valid, err = q.CheckExpTrades(sequence) - tt.Assert.NoError(err) - tt.Assert.True(valid) - - // create different asset id ordering in history_assets compared to exp_history_assets - _, err = q.GetCreateAssetID(assets[1]) - tt.Assert.NoError(err) - _, err = q.GetCreateAssetID(assets[0]) - tt.Assert.NoError(err) - _, err = q.GetCreateAssetID(assets[2]) - tt.Assert.NoError(err) - createTradeRows( - tt, q, idToAccount, idToAsset, first, second, third, - ) - - valid, err = q.CheckExpTrades(sequence) - tt.Assert.NoError(err) - tt.Assert.True(valid) - - tradeForOtherLedger, _, _ := createInsertTrades( - expAccountIDs, expAssetIDs, sequence+1, - ) - tt.Assert.NoError( - builder.Add(tradeForOtherLedger), - ) - tt.Assert.NoError(builder.Exec()) - - valid, err = q.CheckExpTrades(sequence) - tt.Assert.NoError(err) - tt.Assert.True(valid) - - newAddress := "GAUJETIZVEP2NRYLUESJ3LS66NVCEGMON4UDCBCSBEVPIID773P2W6AY" - newAccounts, err := q.CreateExpAccounts([]string{newAddress}) - tt.Assert.NoError(err) - newAccountID := newAccounts[newAddress] - - for fieldName, value := range map[string]interface{}{ - "ledger_closed_at": time.Now().Add(time.Hour), - "offer_id": 67, - "base_offer_id": 67, - "base_account_id": newAccountID, - "base_asset_id": chfAssetID, - "base_amount": 67, - "counter_offer_id": 67, - "counter_account_id": newAccountID, - "counter_asset_id": btcAssetID, - "counter_amount": 67, - "base_is_seller": second.SoldAssetID >= second.BoughtAssetID, - "price_n": 67, - "price_d": 67, - } { - updateSQL := sq.Update("exp_history_trades"). - Set(fieldName, value). - Where( - "history_operation_id = ? AND \"order\" = ?", - second.HistoryOperationID, second.Order, - ) - _, err = q.Exec(updateSQL) - tt.Assert.NoError(err) - - valid, err = q.CheckExpTrades(sequence) - tt.Assert.NoError(err) - tt.Assert.False(valid) - - _, err = q.Exec(sq.Delete("exp_history_trades"). - Where( - "history_operation_id = ? AND \"order\" = ?", - second.HistoryOperationID, second.Order, - )) - tt.Assert.NoError(err) - - tt.Assert.NoError( - builder.Add(second), - ) - tt.Assert.NoError(builder.Exec()) - - valid, err := q.CheckExpTrades(sequence) - tt.Assert.NoError(err) - tt.Assert.True(valid) - } -} diff --git a/services/horizon/internal/db2/history/transaction.go b/services/horizon/internal/db2/history/transaction.go index 61548f9514..aa4d62f56f 100644 --- a/services/horizon/internal/db2/history/transaction.go +++ b/services/horizon/internal/db2/history/transaction.go @@ -1,8 +1,6 @@ package history import ( - "reflect" - sq "github.com/Masterminds/squirrel" "github.com/stellar/go/services/horizon/internal/db2" "github.com/stellar/go/services/horizon/internal/toid" @@ -165,72 +163,12 @@ func (q *TransactionsQ) Select(dest interface{}) error { return nil } -// CheckExpTransactions checks that the transactions in exp_history_transactions -// for the given ledger matches the same transactions in history_transactions -func (q *Q) CheckExpTransactions(seq int32) (bool, error) { - var transactions, expTransactions []Transaction - - err := q.Select( - &transactions, - selectTransaction. - Where("ht.ledger_sequence = ?", seq). - OrderBy("ht.application_order asc"), - ) - if err != nil { - return false, err - } - - err = q.Select( - &expTransactions, - selectExpTransaction. - Where("ht.ledger_sequence = ?", seq). - OrderBy("ht.application_order asc"), - ) - if err != nil { - return false, err - } - - // We only proceed with the comparison if we have transaction data in both the - // legacy ingestion system and the experimental ingestion system. - // If there are no transactions in either the legacy ingestion system or the - // experimental ingestion system we skip the check. - if len(transactions) == 0 || len(expTransactions) == 0 { - return true, nil - } - - if len(transactions) != len(expTransactions) { - return false, nil - } - - for i, transaction := range transactions { - expTransaction := expTransactions[i] - - // ignore created time and updated time - expTransaction.CreatedAt = transaction.CreatedAt - expTransaction.UpdatedAt = transaction.UpdatedAt - - // compare ClosedAt separately because reflect.DeepEqual does not handle time.Time - expClosedAt := expTransaction.LedgerCloseTime - expTransaction.LedgerCloseTime = transaction.LedgerCloseTime - - equal := expClosedAt.Equal(transaction.LedgerCloseTime) && - reflect.DeepEqual(transaction, expTransaction) - - if !equal { - return false, nil - } - } - - return true, nil -} - // QTransactions defines transaction related queries. type QTransactions interface { NewTransactionBatchInsertBuilder(maxBatchSize int) TransactionBatchInsertBuilder - CheckExpTransactions(seq int32) (bool, error) } -var selectTransactionFields = sq.Select( +var selectTransaction = sq.Select( "ht.id, " + "ht.transaction_hash, " + "ht.ledger_sequence, " + @@ -254,12 +192,6 @@ var selectTransactionFields = sq.Select( "ht.memo, " + "lower(ht.time_bounds) AS valid_after, " + "upper(ht.time_bounds) AS valid_before, " + - "hl.closed_at AS ledger_close_time") - -var selectTransaction = selectTransactionFields. + "hl.closed_at AS ledger_close_time"). From("history_transactions ht"). LeftJoin("history_ledgers hl ON ht.ledger_sequence = hl.sequence") - -var selectExpTransaction = selectTransactionFields. - From("exp_history_transactions ht"). - LeftJoin("exp_history_ledgers hl ON ht.ledger_sequence = hl.sequence") diff --git a/services/horizon/internal/db2/history/transaction_batch_insert_builder.go b/services/horizon/internal/db2/history/transaction_batch_insert_builder.go index c5d41e9195..f5eec9f9da 100644 --- a/services/horizon/internal/db2/history/transaction_batch_insert_builder.go +++ b/services/horizon/internal/db2/history/transaction_batch_insert_builder.go @@ -20,7 +20,7 @@ import ( ) // TransactionBatchInsertBuilder is used to insert transactions into the -// exp_history_transactions table +// history_transactions table type TransactionBatchInsertBuilder interface { Add(transaction io.LedgerTransaction, sequence uint32) error Exec() error @@ -35,7 +35,7 @@ type transactionBatchInsertBuilder struct { func (q *Q) NewTransactionBatchInsertBuilder(maxBatchSize int) TransactionBatchInsertBuilder { return &transactionBatchInsertBuilder{ builder: db.BatchInsertBuilder{ - Table: q.GetTable("exp_history_transactions"), + Table: q.GetTable("history_transactions"), MaxBatchSize: maxBatchSize, }, } diff --git a/services/horizon/internal/db2/history/transaction_test.go b/services/horizon/internal/db2/history/transaction_test.go index bc1824a099..25a086e0ff 100644 --- a/services/horizon/internal/db2/history/transaction_test.go +++ b/services/horizon/internal/db2/history/transaction_test.go @@ -9,7 +9,6 @@ import ( sq "github.com/Masterminds/squirrel" "github.com/guregu/null" "github.com/stellar/go/exp/ingest/io" - "github.com/stellar/go/services/horizon/internal/db2/sqx" "github.com/stellar/go/services/horizon/internal/test" "github.com/stellar/go/services/horizon/internal/toid" "github.com/stellar/go/xdr" @@ -137,16 +136,6 @@ func TestExtraChecksTransactionSuccessfulFalseResultTrue(t *testing.T) { tt.Assert.Contains(err.Error(), "Corrupted data! `successful=false` but returned transaction is success") } -func insertTransaction( - tt *test.T, q *Q, tableName string, transaction io.LedgerTransaction, sequence int32, -) { - m, err := transactionToMap(transaction, uint32(sequence)) - tt.Assert.NoError(err) - insertSQL := sq.Insert(tableName).SetMap(m) - _, err = q.Exec(insertSQL) - tt.Assert.NoError(err) -} - type testTransaction struct { index uint32 envelopeXDR string @@ -182,128 +171,7 @@ func buildLedgerTransaction(t *testing.T, tx testTransaction) io.LedgerTransacti return transaction } -func TestCheckExpTransactions(t *testing.T) { - tt := test.Start(t) - defer tt.Finish() - test.ResetHorizonDB(t, tt.HorizonDB) - q := &Q{tt.HorizonSession()} - - transaction := buildLedgerTransaction( - tt.T, - testTransaction{ - index: 1, - envelopeXDR: "AAAAACiSTRmpH6bHC6Ekna5e82oiGY5vKDEEUgkq9CB//t+rAAAAZAEXUhsAADDGAAAAAQAAAAAAAAAAAAAAAF3v3WAAAAABAAAACjEwOTUzMDMyNTAAAAAAAAEAAAAAAAAAAQAAAAAOr5CG1ax6qG2fBEgXJlF0sw5W0irOS6N/NRDbavBm4QAAAAAAAAAAE32fwAAAAAAAAAABf/7fqwAAAEAkWgyAgV5tF3m1y1TIDYkNXP8pZLAwcxhWEi4f3jcZJK7QrKSXhKoawVGrp5NNs4y9dgKt8zHZ8KbJreFBUsIB", - resultXDR: "AAAAAAAAAGQAAAAAAAAAAQAAAAAAAAABAAAAAAAAAAA=", - feeChangesXDR: "AAAAAgAAAAMBnyVBAAAAAAAAAAAokk0ZqR+mxwuhJJ2uXvNqIhmObygxBFIJKvQgf/7fqwAALNdjj1x7ARdSGwAAMMUAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAEBnye/AAAAAAAAAAAokk0ZqR+mxwuhJJ2uXvNqIhmObygxBFIJKvQgf/7fqwAALNdjj1wXARdSGwAAMMUAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAA==", - metaXDR: "AAAAAAAAAAMAAAACAAAAAAAAAAMAAAAAAAAAABbxCy3mLg3hiTqX4VUEEp60pFOrJNxYM1JtxXTwXhY2AAAAAAvrwgAAAAADAAAAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAMAAAAAAAAAAAGUcmKO5465JxTSLQOQljwk2SfqAJmZSG6JH6wtqpwhDeC2s5t4PNQAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAwAAAAEAAAADAAAAAAAAAAABlHJijueOuScU0i0DkJY8JNkn6gCZmUhuiR+sLaqcIQAAAAAL68IAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAMAAAADAAAAAAAAAAAW8Qst5i4N4Yk6l+FVBBKetKRTqyTcWDNSbcV08F4WNgAAAAAL68IAAAAAAwAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAEAAAADAAAAAAAAAAAW8Qst5i4N4Yk6l+FVBBKetKRTqyTcWDNSbcV08F4WNg3gtrObeDzUAAAAAwAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAEAAAABAAAAAwAAAAAAAAAAAZRyYo7njrknFNItA5CWPCTZJ+oAmZlIbokfrC2qnCEAAAAAC+vCAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=", - hash: "ea1e96dd4aa8a16e357842b0fcdb66c1ab03fade7dcd3a99f88ed28ea8c30f6a", - }, - ) - - otherTransaction := buildLedgerTransaction( - tt.T, - testTransaction{ - index: 2, - envelopeXDR: "AAAAAAGUcmKO5465JxTSLQOQljwk2SfqAJmZSG6JH6wtqpwhAAABLAAAAAAAAAABAAAAAAAAAAEAAAALaGVsbG8gd29ybGQAAAAAAwAAAAAAAAAAAAAAABbxCy3mLg3hiTqX4VUEEp60pFOrJNxYM1JtxXTwXhY2AAAAAAvrwgAAAAAAAAAAAQAAAAAW8Qst5i4N4Yk6l+FVBBKetKRTqyTcWDNSbcV08F4WNgAAAAAN4Lazj4x61AAAAAAAAAAFAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABLaqcIQAAAEBKwqWy3TaOxoGnfm9eUjfTRBvPf34dvDA0Nf+B8z4zBob90UXtuCqmQqwMCyH+okOI3c05br3khkH0yP4kCwcE", - resultXDR: "AAAAAAAAASwAAAAAAAAAAwAAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAFAAAAAAAAAAA=", - feeChangesXDR: "AAAAAgAAAAMBnyVBAAAAAAAAAAAokk0ZqR+mxwuhJJ2uXvNqIhmObygxBFIJKvQgf/7fqwAALNdjj1x7ARdSGwAAMMUAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAEBnye/AAAAAAAAAAAokk0ZqR+mxwuhJJ2uXvNqIhmObygxBFIJKvQgf/7fqwAALNdjj1wXARdSGwAAMMUAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAA==", - metaXDR: "AAAAAAAAAAMAAAACAAAAAAAAHqEAAAAAAAAAAB+lHtRjj4+h2/0Tj8iBQiaUDzLo4oRCLyUnytFHzAyIAAAAAAvrwgAAAB6hAAAAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAQAAHqEAAAAAAAAAABbxCy3mLg3hiTqX4VUEEp60pFOrJNxYM1JtxXTwXhY2DeC2s4+MeHwAAAADAAAAAgAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAgAAAAAAAB6hAAAAAAAAAACzMOD+8iU8qo+qbTYewT8lxKE/s1cE3FOCVWxsqJ74GwAAAAAL68IAAAAeoQAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAEAAB6hAAAAAAAAAAAW8Qst5i4N4Yk6l+FVBBKetKRTqyTcWDNSbcV08F4WNg3gtrODoLZ8AAAAAwAAAAIAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAIAAAAAAAAeoQAAAAAAAAAASZcLtOTqf+cdbsq8HmLMkeqU06LN94UTWXuSBem5Z88AAAAAC+vCAAAAHqEAAAAAAAAAAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAABAAAeoQAAAAAAAAAAFvELLeYuDeGJOpfhVQQSnrSkU6sk3FgzUm3FdPBeFjYN4Lazd7T0fAAAAAMAAAACAAAAAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAA=", - hash: "3389e9f0f1a65f19736cacf544c2e825313e8447f569233bb8db39aa607c8889", - }, - ) - - sequence := int32(123) - valid, err := q.CheckExpTransactions(sequence) - tt.Assert.True(valid) - tt.Assert.NoError(err) - - insertTransaction(tt, q, "exp_history_transactions", transaction, sequence) - insertTransaction(tt, q, "exp_history_transactions", otherTransaction, sequence) - insertTransaction(tt, q, "history_transactions", transaction, sequence) - - ledger := Ledger{ - Sequence: sequence, - LedgerHash: "4db1e4f145e9ee75162040d26284795e0697e2e84084624e7c6c723ebbf80118", - PreviousLedgerHash: null.NewString("4b0b8bace3b2438b2404776ce57643966855487ba6384724a3c664c7aa4cd9e4", true), - TotalOrderID: TotalOrderID{toid.New(int32(69859), 0, 0).ToInt64()}, - ImporterVersion: 321, - TransactionCount: 12, - SuccessfulTransactionCount: new(int32), - FailedTransactionCount: new(int32), - OperationCount: 23, - TotalCoins: 23451, - FeePool: 213, - BaseReserve: 687, - MaxTxSetSize: 345, - ProtocolVersion: 12, - BaseFee: 100, - ClosedAt: time.Now().UTC().Truncate(time.Second), - LedgerHeaderXDR: null.NewString("temp", true), - } - *ledger.SuccessfulTransactionCount = 12 - *ledger.FailedTransactionCount = 3 - _, err = q.Exec(sq.Insert("history_ledgers").SetMap(ledgerToMap(ledger))) - tt.Assert.NoError(err) - _, err = q.Exec(sq.Insert("exp_history_ledgers").SetMap(ledgerToMap(ledger))) - tt.Assert.NoError(err) - - valid, err = q.CheckExpTransactions(sequence) - tt.Assert.False(valid) - tt.Assert.NoError(err) - - insertTransaction(tt, q, "history_transactions", otherTransaction, sequence) - - valid, err = q.CheckExpTransactions(sequence) - tt.Assert.True(valid) - tt.Assert.NoError(err) - - for fieldName, value := range map[string]interface{}{ - "id": 999, - "transaction_hash": "hash", - "account": "account", - "account_sequence": "999", - "max_fee": 999, - "fee_charged": 999, - "operation_count": 999, - "tx_envelope": "envelope", - "tx_result": "result", - "tx_meta": "meta", - "tx_fee_meta": "fee_meta", - "signatures": sqx.StringArray([]string{"sig1", "sig2"}), - "time_bounds": sq.Expr("int8range(?,?)", 123, 456), - "memo_type": "invalid", - "memo": "invalid-memo", - "successful": false, - } { - updateSQL := sq.Update("history_transactions"). - Set(fieldName, value). - Where( - "ledger_sequence = ? AND application_order = ?", - sequence, otherTransaction.Index, - ) - _, err = q.Exec(updateSQL) - tt.Assert.NoError(err) - - valid, err = q.CheckExpTransactions(sequence) - tt.Assert.NoError(err) - tt.Assert.False(valid) - - _, err = q.Exec(sq.Delete("history_transactions"). - Where( - "ledger_sequence = ? AND application_order = ?", - sequence, otherTransaction.Index, - )) - tt.Assert.NoError(err) - - insertTransaction(tt, q, "history_transactions", otherTransaction, sequence) - - valid, err := q.CheckExpTransactions(ledger.Sequence) - tt.Assert.NoError(err) - tt.Assert.True(valid) - } -} - -func TestInsertExpTransactionDoesNotAllowDuplicateIndex(t *testing.T) { +func TestInsertTransactionDoesNotAllowDuplicateIndex(t *testing.T) { tt := test.Start(t) defer tt.Finish() test.ResetHorizonDB(t, tt.HorizonDB) @@ -335,9 +203,9 @@ func TestInsertExpTransactionDoesNotAllowDuplicateIndex(t *testing.T) { tt.Assert.NoError(insertBuilder.Add(secondTransaction, sequence)) tt.Assert.EqualError( insertBuilder.Exec(), - "error adding values while inserting to exp_history_transactions: "+ + "error adding values while inserting to history_transactions: "+ "exec failed: pq: duplicate key value violates unique constraint "+ - "\"exp_history_transactions_id_idx\"", + "\"hs_transaction_by_id\"", ) ledger := Ledger{ @@ -361,11 +229,11 @@ func TestInsertExpTransactionDoesNotAllowDuplicateIndex(t *testing.T) { } *ledger.SuccessfulTransactionCount = 12 *ledger.FailedTransactionCount = 3 - _, err := q.Exec(sq.Insert("exp_history_ledgers").SetMap(ledgerToMap(ledger))) + _, err := q.Exec(sq.Insert("history_ledgers").SetMap(ledgerToMap(ledger))) tt.Assert.NoError(err) var transactions []Transaction - tt.Assert.NoError(q.Select(&transactions, selectExpTransaction)) + tt.Assert.NoError(q.Transactions().Select(&transactions)) tt.Assert.Len(transactions, 1) tt.Assert.Equal( "19aaa18db88605aedec04659fb45e06f240b022eb2d429e05133e4d53cd945ba", @@ -373,7 +241,7 @@ func TestInsertExpTransactionDoesNotAllowDuplicateIndex(t *testing.T) { ) } -func TestInsertExpTransaction(t *testing.T) { +func TestInsertTransaction(t *testing.T) { tt := test.Start(t) defer tt.Finish() test.ResetHorizonDB(t, tt.HorizonDB) @@ -403,8 +271,6 @@ func TestInsertExpTransaction(t *testing.T) { *ledger.FailedTransactionCount = 3 _, err := q.Exec(sq.Insert("history_ledgers").SetMap(ledgerToMap(ledger))) tt.Assert.NoError(err) - _, err = q.Exec(sq.Insert("exp_history_ledgers").SetMap(ledgerToMap(ledger))) - tt.Assert.NoError(err) insertBuilder := q.NewTransactionBatchInsertBuilder(0) success := new(bool) @@ -784,7 +650,7 @@ func TestInsertExpTransaction(t *testing.T) { tt.Assert.NoError(insertBuilder.Exec()) var transactions []Transaction - tt.Assert.NoError(q.Select(&transactions, selectExpTransaction)) + tt.Assert.NoError(q.Transactions().IncludeFailed().Select(&transactions)) tt.Assert.Len(transactions, 1) transaction := transactions[0] @@ -794,13 +660,13 @@ func TestInsertExpTransaction(t *testing.T) { transaction.UpdatedAt = testCase.expected.UpdatedAt // compare ClosedAt separately because reflect.DeepEqual does not handle time.Time - expClosedAt := transaction.LedgerCloseTime + closedAt := transaction.LedgerCloseTime transaction.LedgerCloseTime = testCase.expected.LedgerCloseTime - tt.Assert.True(expClosedAt.Equal(testCase.expected.LedgerCloseTime)) + tt.Assert.True(closedAt.Equal(testCase.expected.LedgerCloseTime)) tt.Assert.Equal(transaction, testCase.expected) - _, err = q.Exec(sq.Delete("exp_history_transactions")) + _, err = q.Exec(sq.Delete("history_transactions")) tt.Assert.NoError(err) }) } diff --git a/services/horizon/internal/db2/schema/bindata.go b/services/horizon/internal/db2/schema/bindata.go index 30050a9389..a5de4153e8 100644 --- a/services/horizon/internal/db2/schema/bindata.go +++ b/services/horizon/internal/db2/schema/bindata.go @@ -24,6 +24,7 @@ // migrations/2_index_participants_by_toid.sql (277B) // migrations/30_exp_history_trades.sql (2.297kB) // migrations/31_exp_history_effects.sql (209B) +// migrations/32_drop_exp_history_tables.sql (3.826kB) // migrations/3_use_sequence_in_history_accounts.sql (447B) // migrations/4_add_protocol_version.sql (188B) // migrations/5_create_trades_table.sql (1.1kB) @@ -115,7 +116,7 @@ func migrations10_add_trades_priceSql() (*asset, error) { return nil, err } - info := bindataFileInfo{name: "migrations/10_add_trades_price.sql", size: 1220, mode: os.FileMode(0644), modTime: time.Unix(1566075719, 0)} + info := bindataFileInfo{name: "migrations/10_add_trades_price.sql", size: 1220, mode: os.FileMode(0644), modTime: time.Unix(1566825316, 0)} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x85, 0x1e, 0x7f, 0x86, 0xc2, 0x2c, 0xc9, 0x91, 0xe2, 0x3f, 0x1a, 0xb2, 0x15, 0xea, 0x6c, 0x5, 0xfa, 0x1f, 0x99, 0xd2, 0xbb, 0x1, 0x5e, 0x75, 0x91, 0x8b, 0xb, 0x46, 0xa, 0x6, 0xbc, 0x61}} return a, nil } @@ -135,7 +136,7 @@ func migrations11_add_trades_account_indexSql() (*asset, error) { return nil, err } - info := bindataFileInfo{name: "migrations/11_add_trades_account_index.sql", size: 273, mode: os.FileMode(0644), modTime: time.Unix(1566075719, 0)} + info := bindataFileInfo{name: "migrations/11_add_trades_account_index.sql", size: 273, mode: os.FileMode(0644), modTime: time.Unix(1566825316, 0)} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x98, 0xa4, 0x60, 0x40, 0x6b, 0xa6, 0x5e, 0xcc, 0x67, 0xb3, 0x85, 0x82, 0xce, 0x39, 0xc6, 0xbb, 0x2c, 0xa7, 0x2e, 0xb6, 0x9a, 0xd, 0xba, 0x91, 0x28, 0x80, 0x77, 0x46, 0x8c, 0x67, 0x55, 0x9f}} return a, nil } @@ -155,7 +156,7 @@ func migrations12_asset_stats_amount_stringSql() (*asset, error) { return nil, err } - info := bindataFileInfo{name: "migrations/12_asset_stats_amount_string.sql", size: 197, mode: os.FileMode(0644), modTime: time.Unix(1566075719, 0)} + info := bindataFileInfo{name: "migrations/12_asset_stats_amount_string.sql", size: 197, mode: os.FileMode(0644), modTime: time.Unix(1566825316, 0)} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x3a, 0xd1, 0x4c, 0x37, 0xe7, 0xfd, 0xdb, 0x3a, 0xf2, 0x37, 0x9b, 0x8d, 0x77, 0x99, 0x61, 0x15, 0x10, 0x51, 0xe5, 0xe5, 0x7f, 0xec, 0x7e, 0x7, 0xe5, 0x18, 0x8a, 0xf2, 0xb4, 0x66, 0x17, 0x60}} return a, nil } @@ -175,7 +176,7 @@ func migrations13_trade_offer_idsSql() (*asset, error) { return nil, err } - info := bindataFileInfo{name: "migrations/13_trade_offer_ids.sql", size: 484, mode: os.FileMode(0644), modTime: time.Unix(1571945053, 0)} + info := bindataFileInfo{name: "migrations/13_trade_offer_ids.sql", size: 484, mode: os.FileMode(0644), modTime: time.Unix(1572426892, 0)} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x77, 0x71, 0x79, 0x45, 0x9b, 0x9e, 0x80, 0x36, 0x80, 0x5a, 0xc2, 0x1f, 0xb4, 0xba, 0xdd, 0x85, 0x65, 0xd0, 0x5c, 0x47, 0x47, 0xf, 0x4d, 0xc7, 0x9b, 0x92, 0x36, 0xd7, 0xe6, 0x57, 0x46, 0xa}} return a, nil } @@ -195,7 +196,7 @@ func migrations14_fix_asset_toml_fieldSql() (*asset, error) { return nil, err } - info := bindataFileInfo{name: "migrations/14_fix_asset_toml_field.sql", size: 156, mode: os.FileMode(0644), modTime: time.Unix(1566075719, 0)} + info := bindataFileInfo{name: "migrations/14_fix_asset_toml_field.sql", size: 156, mode: os.FileMode(0644), modTime: time.Unix(1566825316, 0)} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xdf, 0x28, 0x86, 0x8, 0xa5, 0xec, 0x44, 0x7b, 0xf7, 0x88, 0x3b, 0x6d, 0xf6, 0x5c, 0x6a, 0x17, 0x92, 0xbc, 0xd2, 0x88, 0x94, 0xd4, 0x42, 0xb1, 0xc2, 0xac, 0x97, 0xb6, 0xd0, 0xeb, 0xd8, 0xa7}} return a, nil } @@ -215,7 +216,7 @@ func migrations15_ledger_failed_txsSql() (*asset, error) { return nil, err } - info := bindataFileInfo{name: "migrations/15_ledger_failed_txs.sql", size: 333, mode: os.FileMode(0644), modTime: time.Unix(1566075719, 0)} + info := bindataFileInfo{name: "migrations/15_ledger_failed_txs.sql", size: 333, mode: os.FileMode(0644), modTime: time.Unix(1566825316, 0)} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xeb, 0x7, 0xd7, 0xc9, 0x5d, 0xbe, 0xd2, 0x21, 0xc1, 0xb0, 0x20, 0xc8, 0x5f, 0x3a, 0xe9, 0x99, 0xba, 0x5d, 0x83, 0xb4, 0xe8, 0x9f, 0x2d, 0xc3, 0x9a, 0xe1, 0x46, 0xf2, 0xd1, 0x1b, 0x48, 0x7a}} return a, nil } @@ -235,7 +236,7 @@ func migrations16_ingest_failed_transactionsSql() (*asset, error) { return nil, err } - info := bindataFileInfo{name: "migrations/16_ingest_failed_transactions.sql", size: 509, mode: os.FileMode(0644), modTime: time.Unix(1566075719, 0)} + info := bindataFileInfo{name: "migrations/16_ingest_failed_transactions.sql", size: 509, mode: os.FileMode(0644), modTime: time.Unix(1566825316, 0)} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xf9, 0x24, 0xd, 0x8a, 0x56, 0x1d, 0x41, 0x6a, 0x4, 0x7b, 0xe1, 0x9f, 0xfb, 0x78, 0x2, 0xec, 0xe2, 0x98, 0xac, 0xef, 0xc7, 0xc0, 0x96, 0xd1, 0xbf, 0x8f, 0xc6, 0x16, 0xa7, 0x3c, 0x4a, 0x33}} return a, nil } @@ -255,7 +256,7 @@ func migrations17_transaction_fee_paidSql() (*asset, error) { return nil, err } - info := bindataFileInfo{name: "migrations/17_transaction_fee_paid.sql", size: 287, mode: os.FileMode(0644), modTime: time.Unix(1566075719, 0)} + info := bindataFileInfo{name: "migrations/17_transaction_fee_paid.sql", size: 287, mode: os.FileMode(0644), modTime: time.Unix(1566825316, 0)} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x64, 0x93, 0x1c, 0x21, 0xf6, 0x63, 0xac, 0x5c, 0x8, 0x6b, 0xe9, 0x4f, 0xcb, 0xcb, 0xae, 0xdf, 0xa, 0x28, 0xa8, 0x6a, 0xfd, 0x61, 0x29, 0xc2, 0x60, 0xaf, 0xb7, 0x74, 0xfd, 0x0, 0x85, 0xb7}} return a, nil } @@ -275,7 +276,7 @@ func migrations18_account_for_signersSql() (*asset, error) { return nil, err } - info := bindataFileInfo{name: "migrations/18_account_for_signers.sql", size: 481, mode: os.FileMode(0644), modTime: time.Unix(1566075719, 0)} + info := bindataFileInfo{name: "migrations/18_account_for_signers.sql", size: 481, mode: os.FileMode(0644), modTime: time.Unix(1566913223, 0)} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x37, 0xdc, 0xe4, 0xb4, 0xd5, 0xcc, 0x53, 0xd9, 0x42, 0x8c, 0x12, 0x37, 0xcf, 0x13, 0x2c, 0x47, 0xe, 0xc7, 0xba, 0xe5, 0xc2, 0x17, 0x73, 0xe2, 0xc, 0xd2, 0x4a, 0xb3, 0x62, 0x75, 0x54, 0x7f}} return a, nil } @@ -295,7 +296,7 @@ func migrations19_offersSql() (*asset, error) { return nil, err } - info := bindataFileInfo{name: "migrations/19_offers.sql", size: 1064, mode: os.FileMode(0644), modTime: time.Unix(1568706917, 0)} + info := bindataFileInfo{name: "migrations/19_offers.sql", size: 1064, mode: os.FileMode(0644), modTime: time.Unix(1572426892, 0)} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xbd, 0xff, 0xef, 0xfe, 0xb0, 0x5b, 0xa3, 0x92, 0xac, 0x9d, 0x98, 0x6c, 0xd4, 0x90, 0x9c, 0xe9, 0xae, 0x89, 0xfc, 0x54, 0x2b, 0xe3, 0x33, 0x1f, 0x0, 0xd6, 0x24, 0x9a, 0x1, 0xe1, 0xa6, 0x39}} return a, nil } @@ -315,7 +316,7 @@ func migrations1_initial_schemaSql() (*asset, error) { return nil, err } - info := bindataFileInfo{name: "migrations/1_initial_schema.sql", size: 9977, mode: os.FileMode(0644), modTime: time.Unix(1571945053, 0)} + info := bindataFileInfo{name: "migrations/1_initial_schema.sql", size: 9977, mode: os.FileMode(0644), modTime: time.Unix(1576519637, 0)} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x6d, 0xdd, 0xc3, 0x1d, 0x39, 0xb7, 0x5e, 0x6a, 0x64, 0x2b, 0xd5, 0x66, 0x83, 0x61, 0x79, 0xf7, 0x59, 0xce, 0x51, 0x66, 0x3c, 0xd0, 0xf3, 0x14, 0x35, 0xcd, 0x21, 0xc0, 0xae, 0x24, 0x84, 0x35}} return a, nil } @@ -335,7 +336,7 @@ func migrations20_account_for_signer_indexSql() (*asset, error) { return nil, err } - info := bindataFileInfo{name: "migrations/20_account_for_signer_index.sql", size: 140, mode: os.FileMode(0644), modTime: time.Unix(1571945053, 0)} + info := bindataFileInfo{name: "migrations/20_account_for_signer_index.sql", size: 140, mode: os.FileMode(0644), modTime: time.Unix(1576519637, 0)} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x7, 0xd5, 0x6a, 0x4f, 0x6a, 0x95, 0xe0, 0x16, 0xa8, 0x25, 0xd7, 0x4, 0xec, 0x85, 0xe9, 0x13, 0x33, 0x4c, 0x9a, 0xa8, 0x74, 0xf9, 0x63, 0x52, 0xa5, 0x18, 0xaa, 0xdb, 0x4, 0xb5, 0xf7, 0x6e}} return a, nil } @@ -355,7 +356,7 @@ func migrations21_trades_remove_zero_amount_constraintsSql() (*asset, error) { return nil, err } - info := bindataFileInfo{name: "migrations/21_trades_remove_zero_amount_constraints.sql", size: 765, mode: os.FileMode(0644), modTime: time.Unix(1571945053, 0)} + info := bindataFileInfo{name: "migrations/21_trades_remove_zero_amount_constraints.sql", size: 765, mode: os.FileMode(0644), modTime: time.Unix(1572426892, 0)} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x24, 0x1a, 0x4b, 0x15, 0xd5, 0xce, 0x4, 0xe9, 0x43, 0x61, 0x69, 0xce, 0xed, 0x82, 0x11, 0x4e, 0xc7, 0x58, 0xef, 0x4a, 0x46, 0xef, 0x2a, 0x28, 0x13, 0x5e, 0x59, 0xf0, 0x69, 0x50, 0x45, 0x84}} return a, nil } @@ -375,7 +376,7 @@ func migrations22_trust_linesSql() (*asset, error) { return nil, err } - info := bindataFileInfo{name: "migrations/22_trust_lines.sql", size: 955, mode: os.FileMode(0644), modTime: time.Unix(1576794170, 0)} + info := bindataFileInfo{name: "migrations/22_trust_lines.sql", size: 955, mode: os.FileMode(0644), modTime: time.Unix(1578941604, 0)} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xe7, 0xd9, 0x90, 0x83, 0xc9, 0xb3, 0x1b, 0xc4, 0xe9, 0xc4, 0xbb, 0xcb, 0xb5, 0x92, 0x15, 0xaa, 0xef, 0x5d, 0x4e, 0xcf, 0x16, 0x6b, 0x49, 0xef, 0x85, 0x1a, 0xbf, 0xb6, 0x71, 0xb3, 0x92, 0x33}} return a, nil } @@ -395,7 +396,7 @@ func migrations23_exp_asset_statsSql() (*asset, error) { return nil, err } - info := bindataFileInfo{name: "migrations/23_exp_asset_stats.sql", size: 883, mode: os.FileMode(0644), modTime: time.Unix(1574194052, 0)} + info := bindataFileInfo{name: "migrations/23_exp_asset_stats.sql", size: 883, mode: os.FileMode(0644), modTime: time.Unix(1578941604, 0)} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x5f, 0x23, 0x96, 0xcb, 0x81, 0x52, 0xd5, 0xb, 0x3c, 0xd4, 0xb9, 0xd9, 0x24, 0xd3, 0x1a, 0x3d, 0x1a, 0xe0, 0xd2, 0x4, 0x40, 0xf5, 0x75, 0xe2, 0x1d, 0x26, 0xd3, 0x19, 0xcf, 0x70, 0xf, 0x36}} return a, nil } @@ -415,7 +416,7 @@ func migrations24_accountsSql() (*asset, error) { return nil, err } - info := bindataFileInfo{name: "migrations/24_accounts.sql", size: 1402, mode: os.FileMode(0644), modTime: time.Unix(1576794170, 0)} + info := bindataFileInfo{name: "migrations/24_accounts.sql", size: 1402, mode: os.FileMode(0644), modTime: time.Unix(1578941604, 0)} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xa5, 0xf8, 0xf7, 0xeb, 0xe2, 0x3d, 0xda, 0xe, 0xc2, 0x78, 0x88, 0x16, 0x22, 0xbf, 0x22, 0xa8, 0x5a, 0x17, 0x72, 0xd9, 0xab, 0x56, 0xa8, 0x55, 0x5a, 0x3f, 0x47, 0xf6, 0x18, 0xfa, 0x43, 0xa7}} return a, nil } @@ -435,7 +436,7 @@ func migrations25_expingest_rename_columnsSql() (*asset, error) { return nil, err } - info := bindataFileInfo{name: "migrations/25_expingest_rename_columns.sql", size: 641, mode: os.FileMode(0644), modTime: time.Unix(1576794170, 0)} + info := bindataFileInfo{name: "migrations/25_expingest_rename_columns.sql", size: 641, mode: os.FileMode(0644), modTime: time.Unix(1578941604, 0)} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x80, 0x44, 0x81, 0x62, 0xeb, 0xcf, 0x82, 0xaa, 0x9, 0x14, 0x4c, 0xb6, 0xd2, 0x2c, 0x41, 0x2d, 0xf0, 0x34, 0x4a, 0x18, 0x5a, 0x95, 0x3e, 0x8d, 0x60, 0xbe, 0x5, 0x10, 0xf5, 0xc2, 0x9c, 0x3a}} return a, nil } @@ -455,7 +456,7 @@ func migrations26_exp_history_ledgersSql() (*asset, error) { return nil, err } - info := bindataFileInfo{name: "migrations/26_exp_history_ledgers.sql", size: 209, mode: os.FileMode(0644), modTime: time.Unix(1576794170, 0)} + info := bindataFileInfo{name: "migrations/26_exp_history_ledgers.sql", size: 209, mode: os.FileMode(0644), modTime: time.Unix(1578941604, 0)} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xb0, 0xf9, 0xc6, 0xfd, 0xcb, 0x11, 0xc3, 0xc0, 0xbf, 0xac, 0x33, 0x8a, 0xc5, 0x8d, 0x47, 0xfd, 0x59, 0xd1, 0x1d, 0x69, 0x17, 0xba, 0xc7, 0xbb, 0xe6, 0x40, 0x58, 0x32, 0x14, 0x97, 0x96, 0x76}} return a, nil } @@ -475,7 +476,7 @@ func migrations27_exp_history_transactionsSql() (*asset, error) { return nil, err } - info := bindataFileInfo{name: "migrations/27_exp_history_transactions.sql", size: 630, mode: os.FileMode(0644), modTime: time.Unix(1578352512, 0)} + info := bindataFileInfo{name: "migrations/27_exp_history_transactions.sql", size: 630, mode: os.FileMode(0644), modTime: time.Unix(1579093592, 0)} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x13, 0xbf, 0x14, 0x32, 0x4a, 0xc, 0x39, 0x8a, 0xff, 0x9d, 0xaf, 0x3f, 0x8e, 0xb4, 0xe, 0xf2, 0x2b, 0x26, 0x67, 0xf6, 0xcd, 0x20, 0xe2, 0x5b, 0x98, 0x78, 0xc2, 0x10, 0xcc, 0x52, 0xd6, 0xc3}} return a, nil } @@ -495,7 +496,7 @@ func migrations28_exp_history_operationsSql() (*asset, error) { return nil, err } - info := bindataFileInfo{name: "migrations/28_exp_history_operations.sql", size: 439, mode: os.FileMode(0644), modTime: time.Unix(1578352512, 0)} + info := bindataFileInfo{name: "migrations/28_exp_history_operations.sql", size: 439, mode: os.FileMode(0644), modTime: time.Unix(1578941604, 0)} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x59, 0x2d, 0xc6, 0x70, 0x76, 0x72, 0xc5, 0xb1, 0xc6, 0xe7, 0xa0, 0xb2, 0xfc, 0x78, 0x71, 0xa, 0x43, 0x8e, 0x53, 0x9, 0x19, 0x10, 0xcb, 0x75, 0x3b, 0x66, 0x1, 0x57, 0x43, 0xd2, 0x8e, 0x89}} return a, nil } @@ -515,7 +516,7 @@ func migrations29_exp_history_assetsSql() (*asset, error) { return nil, err } - info := bindataFileInfo{name: "migrations/29_exp_history_assets.sql", size: 206, mode: os.FileMode(0644), modTime: time.Unix(1578594301, 0)} + info := bindataFileInfo{name: "migrations/29_exp_history_assets.sql", size: 206, mode: os.FileMode(0644), modTime: time.Unix(1578941604, 0)} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x25, 0xdf, 0x37, 0x6, 0x2c, 0x6e, 0x52, 0x9b, 0x16, 0x17, 0x9c, 0x29, 0x41, 0x41, 0xa6, 0x14, 0x59, 0xff, 0x29, 0xfa, 0x12, 0x2a, 0x8d, 0xce, 0x42, 0xad, 0xa6, 0x26, 0xef, 0x6f, 0x27, 0x17}} return a, nil } @@ -535,7 +536,7 @@ func migrations2_index_participants_by_toidSql() (*asset, error) { return nil, err } - info := bindataFileInfo{name: "migrations/2_index_participants_by_toid.sql", size: 277, mode: os.FileMode(0644), modTime: time.Unix(1566075719, 0)} + info := bindataFileInfo{name: "migrations/2_index_participants_by_toid.sql", size: 277, mode: os.FileMode(0644), modTime: time.Unix(1566825316, 0)} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xdd, 0x9f, 0x5c, 0xe6, 0xd0, 0x43, 0x82, 0xa3, 0x8d, 0xb3, 0x64, 0xb1, 0x2, 0x4b, 0xe1, 0x96, 0x3, 0x92, 0xb3, 0xea, 0x3c, 0x2e, 0xb2, 0xad, 0x47, 0xcd, 0x92, 0x4c, 0x6c, 0x5c, 0x46, 0xfd}} return a, nil } @@ -555,7 +556,7 @@ func migrations30_exp_history_tradesSql() (*asset, error) { return nil, err } - info := bindataFileInfo{name: "migrations/30_exp_history_trades.sql", size: 2297, mode: os.FileMode(0644), modTime: time.Unix(1578594301, 0)} + info := bindataFileInfo{name: "migrations/30_exp_history_trades.sql", size: 2297, mode: os.FileMode(0644), modTime: time.Unix(1578941604, 0)} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xaf, 0x32, 0xe, 0xc2, 0x9, 0x37, 0x62, 0x5f, 0x79, 0xfe, 0x3e, 0x11, 0x22, 0x66, 0x56, 0x36, 0x8, 0x0, 0x76, 0x70, 0xc3, 0x5c, 0x16, 0xe3, 0x2b, 0x73, 0x57, 0xe8, 0x38, 0x6c, 0x62, 0x1e}} return a, nil } @@ -575,11 +576,31 @@ func migrations31_exp_history_effectsSql() (*asset, error) { return nil, err } - info := bindataFileInfo{name: "migrations/31_exp_history_effects.sql", size: 209, mode: os.FileMode(0644), modTime: time.Unix(1578594271, 0)} + info := bindataFileInfo{name: "migrations/31_exp_history_effects.sql", size: 209, mode: os.FileMode(0644), modTime: time.Unix(1578949976, 0)} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xdf, 0x34, 0xf6, 0x6e, 0x86, 0x85, 0x4e, 0xfb, 0x74, 0xee, 0x7d, 0x4, 0xd2, 0xc7, 0x83, 0xae, 0x97, 0xab, 0xca, 0xe1, 0xb3, 0x7c, 0x25, 0xa8, 0x7f, 0x65, 0x2b, 0x1e, 0xdb, 0x88, 0x9b, 0xf}} return a, nil } +var _migrations32_drop_exp_history_tablesSql = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xbc\x57\xd1\x6e\xda\x30\x14\x7d\xe7\x2b\xae\xfa\x04\x1a\x4c\x7b\x5e\xb7\x49\x1d\xcd\x36\x54\x14\x36\x0a\xd2\xde\x22\xc7\xbe\x10\xab\xc1\x8e\x6c\x23\xda\x7d\xfd\x94\xa4\x69\x1d\xc7\xa4\x66\x43\xbc\xfa\x5c\x9f\x7b\x7c\x7c\x7c\x03\x93\x09\xbc\xdb\xf1\xad\x22\x06\x61\x5d\x0c\x06\xb7\xcb\xc5\x4f\x58\xdd\x7c\x9d\x47\x80\x8f\x45\x92\x71\x6d\xa4\x7a\x4a\x70\xb3\x41\x6a\x34\x50\xa2\x29\x61\x78\x7d\xb4\xd0\x28\xc2\x30\xa0\x8e\x68\x8d\x21\x7c\xb2\x40\x45\x0c\x97\x22\x29\x88\x32\x9c\xf2\x82\x88\x93\xf6\x85\x69\x16\x9a\xd0\x7f\xe8\x42\x28\x95\xfb\xa0\x4a\xab\x47\x40\x75\x8e\x6c\x8b\xca\x2e\xb4\xef\xe9\x56\x1e\xc4\x60\x30\x5d\x46\x37\xab\xa8\x67\xf3\x70\x00\x00\x30\x9f\xdd\x45\xe0\x40\x15\xc0\x05\xcd\xf7\x8c\x8b\x2d\x30\xdc\x90\x7d\x6e\xdc\x65\x2a\x85\x36\x8a\x70\xd1\x41\xb8\x60\xf8\x88\x7a\x30\xba\xee\x91\xd1\x3a\xb1\x47\x8b\x8d\x5f\x44\xd0\xcb\x65\x79\xc4\x34\xd8\xa5\x9d\x69\xe7\xad\xdf\xa5\x56\xed\x45\x84\x5a\x8f\xc8\x23\xed\x15\xbd\xac\x98\x37\x3d\xf3\x57\x5e\x26\x63\xf5\x58\xf3\x25\xac\x42\xce\x29\xa2\x9c\x09\x07\x04\x4a\x84\x90\x06\xa8\xc2\x72\x36\x78\x66\x31\xd1\x1f\xab\xda\xbe\x3c\x96\x75\xc3\xb2\xc8\x17\x40\x86\xba\x81\x3c\xd2\x3b\x88\xad\xbe\x03\x36\x07\x98\x4c\x60\x54\x8f\xb5\x14\x29\xd9\x6b\x04\x93\xa1\xd3\x14\x0c\x49\x73\x84\x8c\x68\x50\xb8\x41\x85\x82\xa2\xcd\x0e\x46\x76\x1e\x30\x10\xc1\x5c\xcf\x27\x93\x6a\xf5\x80\xc0\x24\x94\x5e\x1d\x88\x30\xe5\x66\x2a\x8b\x27\x30\x99\xd4\x2d\xda\xf7\xc0\x85\x36\x48\xd8\xb8\xdc\xd2\xd4\xbe\x2a\x30\x19\x96\x94\xde\xd1\x52\xf6\xf1\xe4\xa1\x3a\x88\xee\x1f\x09\xf5\x15\x94\x6e\x75\x73\xcc\x19\xa4\x7c\xcb\x85\x81\x78\xb1\x82\x78\x3d\x9f\x8f\xab\xca\x2b\xa9\x18\xaa\x2b\xe0\xc2\xe0\x16\x95\x83\xd6\xd3\x3e\xa1\xb9\xd4\xc8\x12\x62\xc0\xf0\x1d\x6a\x43\x76\x05\x1c\xb8\xc9\xe4\xbe\x5e\x81\x3f\x52\xa0\xb3\x55\x6e\x36\xa8\x8e\xb6\x4d\x89\xc6\xe6\xcc\x9e\x22\x58\x46\xdf\xa2\x65\x14\x4f\xa3\x7b\xaf\x4b\x43\xce\x46\x36\x51\xe9\xd1\x29\x34\x95\xa7\x2e\xc9\xae\xa4\xf6\xcb\xad\xba\xa2\x3a\x8b\xe2\x17\xae\xff\x14\xfd\xc2\xd3\xa3\xbb\x3a\x18\xd7\x89\xc6\x3c\x47\x05\xa9\x94\x39\x12\x51\x63\x85\xe2\x14\x13\xf1\xbc\xd1\x5e\x63\xad\xb5\x8a\xc3\xb9\xcd\xb6\x02\x2f\x38\x5d\xc4\xf7\xab\xe5\xcd\x2c\x5e\x79\x62\x9a\x58\x8e\x27\x34\x43\xfa\x00\xd3\x1f\xd1\xf4\x0e\x86\x43\xfb\x2e\xbe\x7c\x86\x0f\xa3\x51\x08\x9f\x8f\xa3\xf1\xf7\x53\xc7\xf2\x40\xce\x96\xc1\x4e\x0b\xc7\xfd\x5a\x69\x3d\x54\x9f\x9f\xe8\x2c\xbe\x8d\x7e\xd7\xbc\x46\xb1\x24\x7d\x4a\xec\xd0\xc3\x22\xf6\x3d\xdf\xf5\xfd\x2c\xfe\x0e\xa9\x51\x88\x30\x74\x1e\x89\xf5\xd9\x38\xc2\x5d\x5d\x44\x30\x73\x73\x6d\xbd\xbc\x4e\xf2\x43\xc8\xbb\x8f\x25\xa8\x43\xb0\x78\x37\x76\xbd\xec\xc1\xac\x6f\xb3\x35\x7d\x73\x29\x1f\xf6\xc5\x49\x4e\x34\xb9\x3b\xca\x5d\x10\xae\x92\x72\x94\x9e\xc0\xde\x4a\xf9\xb8\x13\xf2\x71\x67\x7a\x5b\xed\xd7\xf1\xec\xd7\xba\xab\x82\xb3\x90\xc6\xbe\xaf\xcb\xb8\xf9\x92\x1c\x3f\xe3\x89\xc7\xeb\x51\x7f\xfc\xdf\xa5\xe7\x67\xd3\x33\x74\xc6\xdf\x4d\x7f\x03\x00\x00\xff\xff\xf1\x72\xe9\x29\xf2\x0e\x00\x00") + +func migrations32_drop_exp_history_tablesSqlBytes() ([]byte, error) { + return bindataRead( + _migrations32_drop_exp_history_tablesSql, + "migrations/32_drop_exp_history_tables.sql", + ) +} + +func migrations32_drop_exp_history_tablesSql() (*asset, error) { + bytes, err := migrations32_drop_exp_history_tablesSqlBytes() + if err != nil { + return nil, err + } + + info := bindataFileInfo{name: "migrations/32_drop_exp_history_tables.sql", size: 3826, mode: os.FileMode(0644), modTime: time.Unix(1579093894, 0)} + a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x1d, 0xf2, 0xbb, 0xf, 0x6d, 0x53, 0xdf, 0x60, 0x63, 0x87, 0x3c, 0xe9, 0xdb, 0x18, 0x23, 0x51, 0x40, 0xa4, 0x38, 0xdf, 0xb2, 0x9a, 0xb, 0xa0, 0xe6, 0xbe, 0x91, 0x3b, 0x41, 0x53, 0x69, 0x9d}} + return a, nil +} + var _migrations3_use_sequence_in_history_accountsSql = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x9c\x91\x4d\x6b\xb3\x40\x14\x85\xf7\xf3\x2b\xce\x2e\xca\xfb\x66\x91\x6d\x5c\x4d\xc6\x1b\x22\x8c\x63\x3b\x5e\xdb\x64\x25\xa2\x43\x3a\x90\x6a\xeb\xd8\xaf\x7f\x5f\x48\xd3\x0f\x08\x6d\xa1\xcb\x73\x78\xe0\x39\xdc\x3b\x9f\xe3\xdf\xad\xdf\x8f\xcd\xe4\x50\xdd\x09\x65\x49\x32\xa1\xa4\xcb\x8a\x8c\x22\xdc\xf8\x30\x0d\xe3\x4b\xdd\xb4\xed\xf0\xd0\x4f\xa1\xf6\x5d\x1d\xdc\xbd\x00\x80\x92\xa5\x65\x5c\x67\xbc\xc1\xe2\x58\x64\x46\x59\xca\xc9\x30\x56\xbb\x53\x65\x0a\xe4\x99\xb9\x92\xba\xa2\x8f\x2c\xb7\x9f\x59\x49\xb5\x21\x2c\x12\x51\x92\x26\xc5\x08\x6e\x7a\x6c\x0e\xd1\xec\x1b\xef\xec\x3f\xa2\x13\x99\xcb\x6d\xe4\xbb\x18\x6b\x5b\xe4\x67\x33\xe3\x38\x11\x52\x33\x59\xb0\x5c\x69\x42\x61\xf4\xee\x0c\xc2\x1b\xa1\x0a\x5d\xe5\x06\xbe\x43\x49\x8c\x94\xd6\xb2\xd2\x8c\xde\x3d\xff\xbc\x64\xb9\x1c\xdd\xbe\x3d\x34\x21\xc4\x89\x10\x5f\xcf\x98\x0e\x4f\xfd\x1f\xec\xa9\x2d\x2e\xde\xf5\x89\x38\xa6\xdf\xde\x90\x88\xd7\x00\x00\x00\xff\xff\x55\xe2\xdd\x2c\xbf\x01\x00\x00") func migrations3_use_sequence_in_history_accountsSqlBytes() ([]byte, error) { @@ -595,7 +616,7 @@ func migrations3_use_sequence_in_history_accountsSql() (*asset, error) { return nil, err } - info := bindataFileInfo{name: "migrations/3_use_sequence_in_history_accounts.sql", size: 447, mode: os.FileMode(0644), modTime: time.Unix(1566075719, 0)} + info := bindataFileInfo{name: "migrations/3_use_sequence_in_history_accounts.sql", size: 447, mode: os.FileMode(0644), modTime: time.Unix(1566825316, 0)} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x5, 0xcc, 0xcc, 0x51, 0xec, 0x12, 0x8, 0x85, 0x4d, 0x85, 0x25, 0x32, 0x18, 0x23, 0x54, 0xc5, 0x10, 0x42, 0x5b, 0x51, 0x28, 0xe4, 0x9, 0xa2, 0x56, 0xdc, 0xb5, 0xf8, 0x41, 0x1b, 0x28, 0x8}} return a, nil } @@ -615,7 +636,7 @@ func migrations4_add_protocol_versionSql() (*asset, error) { return nil, err } - info := bindataFileInfo{name: "migrations/4_add_protocol_version.sql", size: 188, mode: os.FileMode(0644), modTime: time.Unix(1566075719, 0)} + info := bindataFileInfo{name: "migrations/4_add_protocol_version.sql", size: 188, mode: os.FileMode(0644), modTime: time.Unix(1566825316, 0)} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x26, 0x7f, 0xb6, 0x87, 0x30, 0xa5, 0x8c, 0xee, 0x55, 0xbb, 0x12, 0x6, 0x1b, 0xee, 0xfc, 0x6a, 0xa0, 0x71, 0x60, 0xcc, 0xf7, 0x36, 0x56, 0xb3, 0x39, 0x1f, 0x1a, 0xd2, 0x6, 0xe4, 0x58, 0x8e}} return a, nil } @@ -635,7 +656,7 @@ func migrations5_create_trades_tableSql() (*asset, error) { return nil, err } - info := bindataFileInfo{name: "migrations/5_create_trades_table.sql", size: 1100, mode: os.FileMode(0644), modTime: time.Unix(1566075719, 0)} + info := bindataFileInfo{name: "migrations/5_create_trades_table.sql", size: 1100, mode: os.FileMode(0644), modTime: time.Unix(1566825316, 0)} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x4f, 0x51, 0x6e, 0x49, 0x10, 0xd6, 0xf1, 0x48, 0xc6, 0x8d, 0xe5, 0xbe, 0x2, 0x94, 0xba, 0x20, 0x37, 0x7b, 0x10, 0x8b, 0x84, 0x7, 0xac, 0x1b, 0xb4, 0xac, 0xc3, 0x6d, 0xbc, 0x54, 0x81, 0xe3}} return a, nil } @@ -655,7 +676,7 @@ func migrations6_create_assets_tableSql() (*asset, error) { return nil, err } - info := bindataFileInfo{name: "migrations/6_create_assets_table.sql", size: 366, mode: os.FileMode(0644), modTime: time.Unix(1566075719, 0)} + info := bindataFileInfo{name: "migrations/6_create_assets_table.sql", size: 366, mode: os.FileMode(0644), modTime: time.Unix(1566825316, 0)} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xb1, 0x3f, 0x47, 0xea, 0x8a, 0x5d, 0x60, 0xc8, 0x90, 0x36, 0xc8, 0x4f, 0x68, 0xc5, 0xd3, 0xa0, 0xcd, 0xae, 0x5a, 0xc3, 0x75, 0xbd, 0xb4, 0xbb, 0x51, 0xf2, 0x68, 0x54, 0x79, 0xac, 0xa2, 0x8a}} return a, nil } @@ -675,7 +696,7 @@ func migrations7_modify_trades_tableSql() (*asset, error) { return nil, err } - info := bindataFileInfo{name: "migrations/7_modify_trades_table.sql", size: 2303, mode: os.FileMode(0644), modTime: time.Unix(1566075719, 0)} + info := bindataFileInfo{name: "migrations/7_modify_trades_table.sql", size: 2303, mode: os.FileMode(0644), modTime: time.Unix(1566825316, 0)} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xb1, 0x2e, 0xba, 0x36, 0x5e, 0x3a, 0x3f, 0x3a, 0x8f, 0xe4, 0xfd, 0xc6, 0xb8, 0xeb, 0xbf, 0xda, 0x2b, 0xc6, 0xcd, 0xe3, 0xb5, 0x9a, 0x78, 0xf9, 0x9c, 0x2d, 0xcf, 0xe7, 0xb1, 0x6e, 0xa0, 0x3e}} return a, nil } @@ -695,7 +716,7 @@ func migrations8_add_aggregatorsSql() (*asset, error) { return nil, err } - info := bindataFileInfo{name: "migrations/8_add_aggregators.sql", size: 907, mode: os.FileMode(0644), modTime: time.Unix(1566075719, 0)} + info := bindataFileInfo{name: "migrations/8_add_aggregators.sql", size: 907, mode: os.FileMode(0644), modTime: time.Unix(1566825316, 0)} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xae, 0xba, 0x87, 0x6f, 0x41, 0xf3, 0xf6, 0x28, 0x25, 0xc2, 0x19, 0xdf, 0x41, 0x9a, 0x4b, 0xf3, 0x8, 0x37, 0x29, 0x2b, 0x92, 0x12, 0x9f, 0xb5, 0x9f, 0x9d, 0x50, 0x82, 0x6, 0xa5, 0xbb, 0xf6}} return a, nil } @@ -715,7 +736,7 @@ func migrations8_create_asset_stats_tableSql() (*asset, error) { return nil, err } - info := bindataFileInfo{name: "migrations/8_create_asset_stats_table.sql", size: 441, mode: os.FileMode(0644), modTime: time.Unix(1566075719, 0)} + info := bindataFileInfo{name: "migrations/8_create_asset_stats_table.sql", size: 441, mode: os.FileMode(0644), modTime: time.Unix(1566825316, 0)} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x2d, 0x21, 0x5, 0xfc, 0x3f, 0xe0, 0xe1, 0xe6, 0x50, 0x2c, 0x25, 0xc0, 0x23, 0x87, 0xa3, 0x99, 0xad, 0xc8, 0xc1, 0x67, 0xca, 0x65, 0xc0, 0x91, 0xf6, 0x5c, 0x29, 0xab, 0x78, 0xbe, 0xe4, 0x5e}} return a, nil } @@ -735,7 +756,7 @@ func migrations9_add_header_xdrSql() (*asset, error) { return nil, err } - info := bindataFileInfo{name: "migrations/9_add_header_xdr.sql", size: 161, mode: os.FileMode(0644), modTime: time.Unix(1566075719, 0)} + info := bindataFileInfo{name: "migrations/9_add_header_xdr.sql", size: 161, mode: os.FileMode(0644), modTime: time.Unix(1566825316, 0)} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x44, 0xe5, 0x20, 0x47, 0xc8, 0x66, 0xd0, 0x16, 0xfa, 0xeb, 0xe, 0xba, 0x80, 0xbd, 0xc3, 0xa6, 0x0, 0x9e, 0xc, 0xb5, 0x45, 0xb9, 0x78, 0x26, 0x8, 0xef, 0x94, 0x23, 0xbe, 0x85, 0x2c, 0xe4}} return a, nil } @@ -831,38 +852,71 @@ func AssetNames() []string { // _bindata is a table, holding each asset generator, mapped to its name. var _bindata = map[string]func() (*asset, error){ - "migrations/10_add_trades_price.sql": migrations10_add_trades_priceSql, - "migrations/11_add_trades_account_index.sql": migrations11_add_trades_account_indexSql, - "migrations/12_asset_stats_amount_string.sql": migrations12_asset_stats_amount_stringSql, - "migrations/13_trade_offer_ids.sql": migrations13_trade_offer_idsSql, - "migrations/14_fix_asset_toml_field.sql": migrations14_fix_asset_toml_fieldSql, - "migrations/15_ledger_failed_txs.sql": migrations15_ledger_failed_txsSql, - "migrations/16_ingest_failed_transactions.sql": migrations16_ingest_failed_transactionsSql, - "migrations/17_transaction_fee_paid.sql": migrations17_transaction_fee_paidSql, - "migrations/18_account_for_signers.sql": migrations18_account_for_signersSql, - "migrations/19_offers.sql": migrations19_offersSql, - "migrations/1_initial_schema.sql": migrations1_initial_schemaSql, - "migrations/20_account_for_signer_index.sql": migrations20_account_for_signer_indexSql, + "migrations/10_add_trades_price.sql": migrations10_add_trades_priceSql, + + "migrations/11_add_trades_account_index.sql": migrations11_add_trades_account_indexSql, + + "migrations/12_asset_stats_amount_string.sql": migrations12_asset_stats_amount_stringSql, + + "migrations/13_trade_offer_ids.sql": migrations13_trade_offer_idsSql, + + "migrations/14_fix_asset_toml_field.sql": migrations14_fix_asset_toml_fieldSql, + + "migrations/15_ledger_failed_txs.sql": migrations15_ledger_failed_txsSql, + + "migrations/16_ingest_failed_transactions.sql": migrations16_ingest_failed_transactionsSql, + + "migrations/17_transaction_fee_paid.sql": migrations17_transaction_fee_paidSql, + + "migrations/18_account_for_signers.sql": migrations18_account_for_signersSql, + + "migrations/19_offers.sql": migrations19_offersSql, + + "migrations/1_initial_schema.sql": migrations1_initial_schemaSql, + + "migrations/20_account_for_signer_index.sql": migrations20_account_for_signer_indexSql, + "migrations/21_trades_remove_zero_amount_constraints.sql": migrations21_trades_remove_zero_amount_constraintsSql, - "migrations/22_trust_lines.sql": migrations22_trust_linesSql, - "migrations/23_exp_asset_stats.sql": migrations23_exp_asset_statsSql, - "migrations/24_accounts.sql": migrations24_accountsSql, - "migrations/25_expingest_rename_columns.sql": migrations25_expingest_rename_columnsSql, - "migrations/26_exp_history_ledgers.sql": migrations26_exp_history_ledgersSql, - "migrations/27_exp_history_transactions.sql": migrations27_exp_history_transactionsSql, - "migrations/28_exp_history_operations.sql": migrations28_exp_history_operationsSql, - "migrations/29_exp_history_assets.sql": migrations29_exp_history_assetsSql, - "migrations/2_index_participants_by_toid.sql": migrations2_index_participants_by_toidSql, - "migrations/30_exp_history_trades.sql": migrations30_exp_history_tradesSql, - "migrations/31_exp_history_effects.sql": migrations31_exp_history_effectsSql, - "migrations/3_use_sequence_in_history_accounts.sql": migrations3_use_sequence_in_history_accountsSql, - "migrations/4_add_protocol_version.sql": migrations4_add_protocol_versionSql, - "migrations/5_create_trades_table.sql": migrations5_create_trades_tableSql, - "migrations/6_create_assets_table.sql": migrations6_create_assets_tableSql, - "migrations/7_modify_trades_table.sql": migrations7_modify_trades_tableSql, - "migrations/8_add_aggregators.sql": migrations8_add_aggregatorsSql, - "migrations/8_create_asset_stats_table.sql": migrations8_create_asset_stats_tableSql, - "migrations/9_add_header_xdr.sql": migrations9_add_header_xdrSql, + + "migrations/22_trust_lines.sql": migrations22_trust_linesSql, + + "migrations/23_exp_asset_stats.sql": migrations23_exp_asset_statsSql, + + "migrations/24_accounts.sql": migrations24_accountsSql, + + "migrations/25_expingest_rename_columns.sql": migrations25_expingest_rename_columnsSql, + + "migrations/26_exp_history_ledgers.sql": migrations26_exp_history_ledgersSql, + + "migrations/27_exp_history_transactions.sql": migrations27_exp_history_transactionsSql, + + "migrations/28_exp_history_operations.sql": migrations28_exp_history_operationsSql, + + "migrations/29_exp_history_assets.sql": migrations29_exp_history_assetsSql, + + "migrations/2_index_participants_by_toid.sql": migrations2_index_participants_by_toidSql, + + "migrations/30_exp_history_trades.sql": migrations30_exp_history_tradesSql, + + "migrations/31_exp_history_effects.sql": migrations31_exp_history_effectsSql, + + "migrations/32_drop_exp_history_tables.sql": migrations32_drop_exp_history_tablesSql, + + "migrations/3_use_sequence_in_history_accounts.sql": migrations3_use_sequence_in_history_accountsSql, + + "migrations/4_add_protocol_version.sql": migrations4_add_protocol_versionSql, + + "migrations/5_create_trades_table.sql": migrations5_create_trades_tableSql, + + "migrations/6_create_assets_table.sql": migrations6_create_assets_tableSql, + + "migrations/7_modify_trades_table.sql": migrations7_modify_trades_tableSql, + + "migrations/8_add_aggregators.sql": migrations8_add_aggregatorsSql, + + "migrations/8_create_asset_stats_table.sql": migrations8_create_asset_stats_tableSql, + + "migrations/9_add_header_xdr.sql": migrations9_add_header_xdrSql, } // AssetDir returns the file names below a certain @@ -931,6 +985,7 @@ var _bintree = &bintree{nil, map[string]*bintree{ "2_index_participants_by_toid.sql": &bintree{migrations2_index_participants_by_toidSql, map[string]*bintree{}}, "30_exp_history_trades.sql": &bintree{migrations30_exp_history_tradesSql, map[string]*bintree{}}, "31_exp_history_effects.sql": &bintree{migrations31_exp_history_effectsSql, map[string]*bintree{}}, + "32_drop_exp_history_tables.sql": &bintree{migrations32_drop_exp_history_tablesSql, map[string]*bintree{}}, "3_use_sequence_in_history_accounts.sql": &bintree{migrations3_use_sequence_in_history_accountsSql, map[string]*bintree{}}, "4_add_protocol_version.sql": &bintree{migrations4_add_protocol_versionSql, map[string]*bintree{}}, "5_create_trades_table.sql": &bintree{migrations5_create_trades_tableSql, map[string]*bintree{}}, diff --git a/services/horizon/internal/db2/schema/migrations/32_drop_exp_history_tables.sql b/services/horizon/internal/db2/schema/migrations/32_drop_exp_history_tables.sql new file mode 100644 index 0000000000..25394fbdfb --- /dev/null +++ b/services/horizon/internal/db2/schema/migrations/32_drop_exp_history_tables.sql @@ -0,0 +1,131 @@ +-- +migrate Up + +DROP TABLE exp_history_effects cascade; + +DROP TABLE exp_history_trades cascade; + +DROP TABLE exp_history_assets cascade; + +DROP TABLE exp_history_operation_participants cascade; + +DROP TABLE exp_history_operations cascade; + +DROP TABLE exp_history_transaction_participants cascade; + +DROP TABLE exp_history_accounts cascade; + +DROP TABLE exp_history_transactions cascade; + +DROP TABLE exp_history_ledgers cascade; + +-- +migrate Down + +CREATE TABLE exp_history_ledgers ( + LIKE history_ledgers + including defaults + including constraints + including indexes +); + +CREATE TABLE exp_history_transactions ( + LIKE history_transactions + including defaults + including constraints + including indexes +); + +CREATE TABLE exp_history_accounts ( + LIKE history_accounts + including defaults + including constraints + including indexes +); + +CREATE TABLE exp_history_transaction_participants ( + LIKE history_transaction_participants + including defaults + including constraints + including indexes +); + +CREATE TABLE exp_history_operations ( + LIKE history_operations + including defaults + including constraints + including indexes +); + +CREATE TABLE exp_history_operation_participants ( + LIKE history_operation_participants + including defaults + including constraints + including indexes +); + +CREATE TABLE exp_history_assets ( + LIKE history_assets + including defaults + including constraints + including indexes +); + + +-- we cannot create exp_history_trades as: + +-- CREATE TABLE exp_history_trades ( +-- LIKE history_trades +-- including defaults +-- including constraints +-- including indexes +-- ); + +-- because the history_trades table has reference constraints to history_accounts and history_assets +-- and we do not want to copy those constraints. instead, we want to reference the +-- exp_history_accounts and exp_history_assets tables + +CREATE TABLE exp_history_trades ( + history_operation_id bigint NOT NULL, + "order" integer NOT NULL, + ledger_closed_at timestamp without time zone NOT NULL, + offer_id bigint NOT NULL, + base_account_id bigint NOT NULL REFERENCES exp_history_accounts(id), + base_asset_id bigint NOT NULL REFERENCES exp_history_assets(id), + base_amount bigint NOT NULL, + counter_account_id bigint NOT NULL REFERENCES exp_history_accounts(id), + counter_asset_id bigint NOT NULL REFERENCES exp_history_assets(id), + counter_amount bigint NOT NULL, + base_is_seller boolean, + price_n bigint, + price_d bigint, + base_offer_id bigint, + counter_offer_id bigint, + CONSTRAINT exp_history_trades_base_amount_check CHECK ((base_amount >= 0)), + CONSTRAINT exp_history_trades_check CHECK ((base_asset_id < counter_asset_id)), + CONSTRAINT exp_history_trades_counter_amount_check CHECK ((counter_amount >= 0)) +); + + +CREATE INDEX exp_htrd_by_base_account ON exp_history_trades USING btree (base_account_id); + +CREATE INDEX exp_htrd_by_base_offer ON exp_history_trades USING btree (base_offer_id); + +CREATE INDEX exp_htrd_by_counter_account ON exp_history_trades USING btree (counter_account_id); + +CREATE INDEX exp_htrd_by_counter_offer ON exp_history_trades USING btree (counter_offer_id); + +CREATE INDEX exp_htrd_by_offer ON exp_history_trades USING btree (offer_id); + +CREATE INDEX exp_htrd_counter_lookup ON exp_history_trades USING btree (counter_asset_id); + +CREATE INDEX exp_htrd_pair_time_lookup ON exp_history_trades USING btree (base_asset_id, counter_asset_id, ledger_closed_at); + +CREATE UNIQUE INDEX exp_htrd_pid ON exp_history_trades USING btree (history_operation_id, "order"); + +CREATE INDEX exp_htrd_time_lookup ON exp_history_trades USING btree (ledger_closed_at); + +CREATE TABLE exp_history_effects ( + LIKE history_effects + including defaults + including constraints + including indexes +); diff --git a/services/horizon/internal/expingest/main.go b/services/horizon/internal/expingest/main.go index 995881316d..a32b400ab0 100644 --- a/services/horizon/internal/expingest/main.go +++ b/services/horizon/internal/expingest/main.go @@ -78,7 +78,7 @@ type dbQ interface { GetExpStateInvalid() (bool, error) GetAllOffers() ([]history.Offer, error) TruncateExpingestStateTables() error - RemoveExpIngestHistory(uint32) (history.ExpIngestRemovalSummary, error) + RemoveIngestHistory(uint32) (history.IngestHistoryRemovalSummary, error) } type dbSession interface { diff --git a/services/horizon/internal/expingest/pipeline_hooks_test.go b/services/horizon/internal/expingest/pipeline_hooks_test.go index 1e38cbade8..a985586fcd 100644 --- a/services/horizon/internal/expingest/pipeline_hooks_test.go +++ b/services/horizon/internal/expingest/pipeline_hooks_test.go @@ -47,8 +47,8 @@ func (s *PreProcessingHookTestSuite) TearDownTest() { func (s *PreProcessingHookTestSuite) TestStateHookSucceedsWithPreExistingTx() { s.historyQ.On("GetTx").Return(&sqlx.Tx{}).Once() s.historyQ.On("GetLastLedgerExpIngest").Return(uint32(0), nil).Once() - s.historyQ.On("RemoveExpIngestHistory", s.ledgerSeqFromContext).Return( - history.ExpIngestRemovalSummary{3, 3, 3, 3, 3, 3, 3}, nil, + s.historyQ.On("RemoveIngestHistory", s.ledgerSeqFromContext).Return( + history.IngestHistoryRemovalSummary{3, 3, 3, 3, 3, 3, 3}, nil, ) newCtx, err := preProcessingHook(s.ctx, statePipeline, s.system, s.historyQ) @@ -62,8 +62,8 @@ func (s *PreProcessingHookTestSuite) TestStateHookSucceedsWithoutPreExistingTx() s.historyQ.On("GetTx").Return(nilTx).Once() s.historyQ.On("Begin").Return(nil).Once() s.historyQ.On("GetLastLedgerExpIngest").Return(uint32(0), nil).Once() - s.historyQ.On("RemoveExpIngestHistory", s.ledgerSeqFromContext).Return( - history.ExpIngestRemovalSummary{3, 3, 3, 3, 3, 3, 3}, nil, + s.historyQ.On("RemoveIngestHistory", s.ledgerSeqFromContext).Return( + history.IngestHistoryRemovalSummary{3, 3, 3, 3, 3, 3, 3}, nil, ) newCtx, err := preProcessingHook(s.ctx, statePipeline, s.system, s.historyQ) @@ -86,8 +86,8 @@ func (s *PreProcessingHookTestSuite) TestStateHookRollsbackOnGetLastLedgerExpIng func (s *PreProcessingHookTestSuite) TestStateHookRollsbackOnRemoveExpIngestHistoryError() { s.historyQ.On("GetTx").Return(&sqlx.Tx{}).Once() s.historyQ.On("GetLastLedgerExpIngest").Return(uint32(0), nil).Once() - s.historyQ.On("RemoveExpIngestHistory", s.ledgerSeqFromContext).Return( - history.ExpIngestRemovalSummary{}, errors.New("transient error"), + s.historyQ.On("RemoveIngestHistory", s.ledgerSeqFromContext).Return( + history.IngestHistoryRemovalSummary{}, errors.New("transient error"), ) s.historyQ.On("Rollback").Return(nil).Once() diff --git a/services/horizon/internal/expingest/pipelines.go b/services/horizon/internal/expingest/pipelines.go index 64ed7d97e4..7f7b9fa7d5 100644 --- a/services/horizon/internal/expingest/pipelines.go +++ b/services/horizon/internal/expingest/pipelines.go @@ -203,8 +203,8 @@ func preProcessingHook( // State pipeline is always fully run because loading offers // from a database is done outside the pipeline. updateDatabase = true - var summary history.ExpIngestRemovalSummary - summary, err = historyQ.RemoveExpIngestHistory(ledgerSeq) + var summary history.IngestHistoryRemovalSummary + summary, err = historyQ.RemoveIngestHistory(ledgerSeq) if err != nil { return ctx, errors.Wrap(err, "Error removing exp ingest history") } diff --git a/services/horizon/internal/expingest/processors/database_processor.go b/services/horizon/internal/expingest/processors/database_processor.go index 0a590f4a74..feb8728f32 100644 --- a/services/horizon/internal/expingest/processors/database_processor.go +++ b/services/horizon/internal/expingest/processors/database_processor.go @@ -459,7 +459,7 @@ func (p *DatabaseProcessor) ingestLedgerHeader( return nil } - rowsAffected, err := p.LedgersQ.InsertExpLedger( + rowsAffected, err := p.LedgersQ.InsertLedger( r.GetHeader(), successTxCount, failedTxCount, @@ -482,27 +482,6 @@ func (p *DatabaseProcessor) ingestLedgerHeader( ) } - // use an older lookup sequence because the experimental ingestion system and the - // legacy ingestion system might not be in sync - seq := int32(r.GetSequence() - 10) - - valid, err := p.LedgersQ.CheckExpLedger(seq) - // only validate the ledger if it is present in both ingestion systems - if err == sql.ErrNoRows { - return nil - } - - if err != nil { - log.WithField("sequence", seq).WithError(err). - Error("Could not compare ledger") - return nil - } - - if !valid { - log.WithField("sequence", seq). - Error("row in exp_history_ledgers does not match ledger in history_ledgers") - } - return nil } diff --git a/services/horizon/internal/expingest/processors/effects_processor.go b/services/horizon/internal/expingest/processors/effects_processor.go index 6f2272ad7d..7f373e1098 100644 --- a/services/horizon/internal/expingest/processors/effects_processor.go +++ b/services/horizon/internal/expingest/processors/effects_processor.go @@ -30,7 +30,7 @@ func (p *EffectProcessor) loadAccountIDs(accountSet map[string]int64) error { addresses = append(addresses, address) } - addressToID, err := p.EffectsQ.CreateExpAccounts(addresses) + addressToID, err := p.EffectsQ.CreateAccounts(addresses) if err != nil { return errors.Wrap(err, "Could not create account ids") } @@ -75,7 +75,7 @@ func (p *EffectProcessor) insertDBOperationsEffects(effects []effect, accountSet accountID, found := accountSet[effect.address] if !found { - return errors.Errorf("Error finding exp_history_account_id for address %v", effect.address) + return errors.Errorf("Error finding history_account_id for address %v", effect.address) } var detailsJSON []byte @@ -167,25 +167,6 @@ func (p *EffectProcessor) ProcessLedger(ctx context.Context, store *pipeline.Sto } } - // use an older lookup sequence because the experimental ingestion system and the - // legacy ingestion system might not be in sync - if sequence > 10 { - checkSequence := int32(sequence - 10) - - var valid bool - valid, err = p.EffectsQ.CheckExpOperationEffects(checkSequence) - if err != nil { - log.WithField("sequence", checkSequence).WithError(err). - Error("Could not compare effects for ledger") - return nil - } - - if !valid { - log.WithField("sequence", checkSequence). - Error("effects do not match") - } - } - return nil } diff --git a/services/horizon/internal/expingest/processors/effects_processor_test.go b/services/horizon/internal/expingest/processors/effects_processor_test.go index 78ff4315ec..1f1268e1dc 100644 --- a/services/horizon/internal/expingest/processors/effects_processor_test.go +++ b/services/horizon/internal/expingest/processors/effects_processor_test.go @@ -205,9 +205,9 @@ func (s *EffectsProcessorTestSuiteLedger) mockSuccessfulEffectBatchAdds() { ).Return(nil).Once() } -func (s *EffectsProcessorTestSuiteLedger) mockSuccessfulCreateExpAccounts() { +func (s *EffectsProcessorTestSuiteLedger) mockSuccessfulCreateAccounts() { s.mockQ.On( - "CreateExpAccounts", + "CreateAccounts", mock.AnythingOfType("[]string"), ).Run(func(args mock.Arguments) { arg := args.Get(0).([]string) @@ -228,62 +228,6 @@ func (s *EffectsProcessorTestSuiteLedger) TestNoIngestUpdateDatabase() { func (s *EffectsProcessorTestSuiteLedger) TestEmptyEffects() { s.mockLedgerReader.On("GetSequence").Return(s.sequence).Once() - s.mockLedgerReader. - On("Read"). - Return(io.LedgerTransaction{}, stdio.EOF).Once() - - s.mockQ.On("CheckExpOperationEffects", int32(s.sequence-10)). - Return(true, nil).Once() - - err := s.processor.ProcessLedger( - s.context, - &supportPipeline.Store{}, - s.mockLedgerReader, - s.mockLedgerWriter, - ) - s.Assert().NoError(err) -} -func (s *EffectsProcessorTestSuiteLedger) TestCheckExpOperationEffectsError() { - s.mockLedgerReader.On("GetSequence").Return(s.sequence).Once() - - s.mockLedgerReader. - On("Read"). - Return(io.LedgerTransaction{}, stdio.EOF).Once() - - s.mockQ.On("CheckExpOperationEffects", int32(s.sequence-10)). - Return(false, errors.New("transient error")).Once() - - err := s.processor.ProcessLedger( - s.context, - &supportPipeline.Store{}, - s.mockLedgerReader, - s.mockLedgerWriter, - ) - s.Assert().NoError(err) -} - -func (s *EffectsProcessorTestSuiteLedger) TestCheckExpOperationEffectsDoesNotMatch() { - s.mockLedgerReader.On("GetSequence").Return(s.sequence).Once() - - s.mockLedgerReader. - On("Read"). - Return(io.LedgerTransaction{}, stdio.EOF).Once() - - s.mockQ.On("CheckExpOperationEffects", int32(s.sequence-10)). - Return(false, nil).Once() - - err := s.processor.ProcessLedger( - s.context, - &supportPipeline.Store{}, - s.mockLedgerReader, - s.mockLedgerWriter, - ) - s.Assert().NoError(err) -} - -func (s *EffectsProcessorTestSuiteLedger) TestCheckExpOperationEffectsMinLedger() { - // CheckExpOperationEffects should not be called - s.mockLedgerReader.On("GetSequence").Return(uint32(10)).Once() s.mockLedgerReader. On("Read"). Return(io.LedgerTransaction{}, stdio.EOF).Once() @@ -301,11 +245,9 @@ func (s *EffectsProcessorTestSuiteLedger) TestIngestEffectsSucceeds() { s.mockLedgerReader.On("GetSequence").Return(s.sequence).Once() s.mockLedgerReads() - s.mockSuccessfulCreateExpAccounts() + s.mockSuccessfulCreateAccounts() s.mockQ.On("NewEffectBatchInsertBuilder", maxBatchSize). Return(s.mockBatchInsertBuilder).Once() - s.mockQ.On("CheckExpOperationEffects", int32(s.sequence-10)). - Return(true, nil).Once() s.mockSuccessfulEffectBatchAdds() @@ -321,12 +263,12 @@ func (s *EffectsProcessorTestSuiteLedger) TestIngestEffectsSucceeds() { s.Assert().NoError(err) } -func (s *EffectsProcessorTestSuiteLedger) TestCreateExpAccountsFails() { +func (s *EffectsProcessorTestSuiteLedger) TestCreateAccountsFails() { s.mockLedgerReader.On("GetSequence").Return(s.sequence).Once() s.mockLedgerReads() - s.mockQ.On("CreateExpAccounts", mock.AnythingOfType("[]string")). + s.mockQ.On("CreateAccounts", mock.AnythingOfType("[]string")). Return(s.addressToID, errors.New("transient error")).Once() err := s.processor.ProcessLedger( @@ -343,7 +285,7 @@ func (s *EffectsProcessorTestSuiteLedger) TestBatchAddFails() { s.mockLedgerReads() - s.mockSuccessfulCreateExpAccounts() + s.mockSuccessfulCreateAccounts() s.mockQ.On("NewEffectBatchInsertBuilder", maxBatchSize). Return(s.mockBatchInsertBuilder).Once() diff --git a/services/horizon/internal/expingest/processors/ledgers_processor_test.go b/services/horizon/internal/expingest/processors/ledgers_processor_test.go index 0ee75bd541..2c0938268e 100644 --- a/services/horizon/internal/expingest/processors/ledgers_processor_test.go +++ b/services/horizon/internal/expingest/processors/ledgers_processor_test.go @@ -2,7 +2,6 @@ package processors import ( "context" - "database/sql" stdio "io" "testing" @@ -115,7 +114,7 @@ func (s *LedgersProcessorTestSuiteLedger) TearDownTest() { s.mockLedgerWriter.AssertExpectations(s.T()) } -func (s *LedgersProcessorTestSuiteLedger) TestInsertExpLedgerIgnoredWhenNotDatabaseIngestion() { +func (s *LedgersProcessorTestSuiteLedger) TestInsertLedgerIgnoredWhenNotDatabaseIngestion() { // Clear mockLedgerReader expectations s.mockLedgerReader = &io.MockLedgerReader{} @@ -136,16 +135,15 @@ func (s *LedgersProcessorTestSuiteLedger) TestInsertExpLedgerIgnoredWhenNotDatab s.Assert().NoError(err) } -func (s *LedgersProcessorTestSuiteLedger) TestInsertExpLedgerSucceeds() { +func (s *LedgersProcessorTestSuiteLedger) TestInsertLedgerSucceeds() { s.mockQ.On( - "InsertExpLedger", + "InsertLedger", s.header, s.successCount, s.failedCount, s.opCount, s.ingestVersion, ).Return(int64(1), nil) - s.mockQ.On("CheckExpLedger", int32(10)).Return(true, nil) err := s.processor.ProcessLedger( s.context, @@ -156,71 +154,9 @@ func (s *LedgersProcessorTestSuiteLedger) TestInsertExpLedgerSucceeds() { s.Assert().NoError(err) } -func (s *LedgersProcessorTestSuiteLedger) TestCheckExpLedgerNotFound() { +func (s *LedgersProcessorTestSuiteLedger) TestInsertLedgerReturnsError() { s.mockQ.On( - "InsertExpLedger", - s.header, - s.successCount, - s.failedCount, - s.opCount, - s.ingestVersion, - ).Return(int64(1), nil) - s.mockQ.On("CheckExpLedger", int32(10)).Return(false, sql.ErrNoRows) - - err := s.processor.ProcessLedger( - s.context, - &supportPipeline.Store{}, - s.mockLedgerReader, - s.mockLedgerWriter, - ) - s.Assert().NoError(err) -} - -func (s *LedgersProcessorTestSuiteLedger) TestCheckExpLedgerError() { - s.mockQ.On( - "InsertExpLedger", - s.header, - s.successCount, - s.failedCount, - s.opCount, - s.ingestVersion, - ).Return(int64(1), nil) - s.mockQ.On("CheckExpLedger", int32(10)). - Return(false, errors.New("transient check exp ledger error")) - - err := s.processor.ProcessLedger( - s.context, - &supportPipeline.Store{}, - s.mockLedgerReader, - s.mockLedgerWriter, - ) - s.Assert().NoError(err) -} - -func (s *LedgersProcessorTestSuiteLedger) TestCheckExpLedgerDoesNotMatch() { - s.mockQ.On( - "InsertExpLedger", - s.header, - s.successCount, - s.failedCount, - s.opCount, - s.ingestVersion, - ).Return(int64(1), nil) - s.mockQ.On("CheckExpLedger", int32(10)). - Return(false, nil) - - err := s.processor.ProcessLedger( - s.context, - &supportPipeline.Store{}, - s.mockLedgerReader, - s.mockLedgerWriter, - ) - s.Assert().NoError(err) -} - -func (s *LedgersProcessorTestSuiteLedger) TestInsertExpLedgerReturnsError() { - s.mockQ.On( - "InsertExpLedger", + "InsertLedger", s.header, s.successCount, s.failedCount, @@ -238,9 +174,9 @@ func (s *LedgersProcessorTestSuiteLedger) TestInsertExpLedgerReturnsError() { s.Assert().EqualError(err, "Could not insert ledger: transient error") } -func (s *LedgersProcessorTestSuiteLedger) TestInsertExpLedgerNoRowsAffected() { +func (s *LedgersProcessorTestSuiteLedger) TestInsertLedgerNoRowsAffected() { s.mockQ.On( - "InsertExpLedger", + "InsertLedger", s.header, s.successCount, s.failedCount, diff --git a/services/horizon/internal/expingest/processors/main.go b/services/horizon/internal/expingest/processors/main.go index d2ef51ee65..05fe3589ab 100644 --- a/services/horizon/internal/expingest/processors/main.go +++ b/services/horizon/internal/expingest/processors/main.go @@ -36,7 +36,7 @@ type DatabaseProcessor struct { OffersQ history.QOffers TrustLinesQ history.QTrustLines AssetStatsQ history.QAssetStats - LedgersQ history.QExpLedgers + LedgersQ history.QLedgers Action DatabaseProcessorActionType IngestVersion int // AssetStatSet is used in TrustLines processor diff --git a/services/horizon/internal/expingest/processors/operations_processor.go b/services/horizon/internal/expingest/processors/operations_processor.go index 752096ab36..13f5aba221 100644 --- a/services/horizon/internal/expingest/processors/operations_processor.go +++ b/services/horizon/internal/expingest/processors/operations_processor.go @@ -93,25 +93,6 @@ func (p *OperationProcessor) ProcessLedger(ctx context.Context, store *pipeline. return errors.Wrap(err, "Error flushing operation batch") } - // use an older lookup sequence because the experimental ingestion system and the - // legacy ingestion system might not be in sync - if sequence > 10 { - checkSequence := int32(sequence - 10) - var valid bool - valid, err = p.OperationsQ.CheckExpOperations(checkSequence) - if err != nil { - log.WithField("sequence", checkSequence).WithError(err). - Error("Could not compare operations for ledger") - return nil - } - - if !valid { - log.WithField("sequence", checkSequence). - Error("rows for ledger in exp_history_operations does not match " + - "operations in history_operations") - } - } - return nil } diff --git a/services/horizon/internal/expingest/processors/operations_processor_test.go b/services/horizon/internal/expingest/processors/operations_processor_test.go index fcee33fc61..e98fc79b89 100644 --- a/services/horizon/internal/expingest/processors/operations_processor_test.go +++ b/services/horizon/internal/expingest/processors/operations_processor_test.go @@ -85,7 +85,7 @@ func (s *OperationsProcessorTestSuiteLedger) mockBatchInsertAdds(txs []io.Ledger return nil } -func (s *OperationsProcessorTestSuiteLedger) TestInsertExpLedgerIgnoredWhenNotDatabaseIngestion() { +func (s *OperationsProcessorTestSuiteLedger) TestInsertOperationsIgnoredWhenNotDatabaseIngestion() { s.mockQ = &history.MockQOperations{} err := s.processor.ProcessLedger( context.Background(), @@ -123,10 +123,6 @@ func (s *OperationsProcessorTestSuiteLedger) TestAddOperationSucceeds() { On("Read"). Return(io.LedgerTransaction{}, stdio.EOF).Once() - s.mockQ. - On("CheckExpOperations", int32(sequence-10)). - Return(true, nil).Once() - var err error err = s.mockBatchInsertAdds(txs, sequence) @@ -194,61 +190,3 @@ func (s *OperationsProcessorTestSuiteLedger) TestExecFails() { s.Assert().Error(err) s.Assert().EqualError(err, "Error flushing operation batch: transient error") } - -func (s *OperationsProcessorTestSuiteLedger) TestCheckExpOperationsError() { - sequence := uint32(56) - s.mockLedgerReader.On("GetSequence").Return(sequence).Once() - - firstTx := createTransaction(true, 1) - - s.mockLedgerReader. - On("Read"). - Return(firstTx, nil).Once() - - s.mockLedgerReader. - On("Read"). - Return(io.LedgerTransaction{}, stdio.EOF).Once() - - s.mockBatchInsertAdds([]io.LedgerTransaction{firstTx}, sequence) - s.mockBatchInsertBuilder.On("Exec").Return(nil).Once() - - s.mockQ. - On("CheckExpOperations", int32(sequence-10)). - Return(false, errors.New("transient check exp ledger error")).Once() - - err := s.processor.ProcessLedger( - s.context, - &supportPipeline.Store{}, - s.mockLedgerReader, - s.mockLedgerWriter, - ) - s.Assert().NoError(err) -} - -func (s *OperationsProcessorTestSuiteLedger) TestCheckExpOperationsDoesNotMatch() { - sequence := uint32(56) - s.mockLedgerReader.On("GetSequence").Return(sequence).Once() - - firstTx := createTransaction(true, 1) - - s.mockLedgerReader. - On("Read"). - Return(firstTx, nil).Once() - s.mockLedgerReader. - On("Read"). - Return(io.LedgerTransaction{}, stdio.EOF).Once() - - s.mockBatchInsertAdds([]io.LedgerTransaction{firstTx}, sequence) - s.mockBatchInsertBuilder.On("Exec").Return(nil).Once() - - s.mockQ.On("CheckExpOperations", int32(sequence-10)). - Return(false, nil).Once() - - err := s.processor.ProcessLedger( - s.context, - &supportPipeline.Store{}, - s.mockLedgerReader, - s.mockLedgerWriter, - ) - s.Assert().NoError(err) -} diff --git a/services/horizon/internal/expingest/processors/participants_processor.go b/services/horizon/internal/expingest/processors/participants_processor.go index 56702cc1da..003ae6e0c5 100644 --- a/services/horizon/internal/expingest/processors/participants_processor.go +++ b/services/horizon/internal/expingest/processors/participants_processor.go @@ -45,7 +45,7 @@ func (p *ParticipantsProcessor) loadAccountIDs(participantSet map[string]partici addresses = append(addresses, address) } - addressToID, err := p.ParticipantsQ.CreateExpAccounts(addresses) + addressToID, err := p.ParticipantsQ.CreateAccounts(addresses) if err != nil { return errors.Wrap(err, "Could not create account ids") } @@ -209,24 +209,6 @@ func (p *ParticipantsProcessor) ProcessLedger(ctx context.Context, store *pipeli } } - // use an older lookup sequence because the experimental ingestion system and the - // legacy ingestion system might not be in sync - if sequence > 10 { - checkSequence := int32(sequence - 10) - var valid bool - valid, err = p.ParticipantsQ.CheckExpParticipants(checkSequence) - if err != nil { - log.WithField("sequence", checkSequence).WithError(err). - Error("Could not compare participants for ledger") - return nil - } - - if !valid { - log.WithField("sequence", checkSequence). - Error("participants do not match") - } - } - return nil } diff --git a/services/horizon/internal/expingest/processors/participants_processor_test.go b/services/horizon/internal/expingest/processors/participants_processor_test.go index 1a16aef995..8ed72bc0dd 100644 --- a/services/horizon/internal/expingest/processors/participants_processor_test.go +++ b/services/horizon/internal/expingest/processors/participants_processor_test.go @@ -168,47 +168,6 @@ func (s *ParticipantsProcessorTestSuiteLedger) TestEmptyParticipants() { On("Read"). Return(io.LedgerTransaction{}, stdio.EOF).Once() - s.mockQ.On("CheckExpParticipants", int32(s.sequence-10)). - Return(true, nil).Once() - - err := s.processor.ProcessLedger( - s.context, - &supportPipeline.Store{}, - s.mockLedgerReader, - s.mockLedgerWriter, - ) - s.Assert().NoError(err) -} - -func (s *ParticipantsProcessorTestSuiteLedger) TestCheckExpParticipantsError() { - s.mockLedgerReader.On("GetSequence").Return(s.sequence).Once() - - s.mockLedgerReader. - On("Read"). - Return(io.LedgerTransaction{}, stdio.EOF).Once() - - s.mockQ.On("CheckExpParticipants", int32(s.sequence-10)). - Return(false, errors.New("transient error")).Once() - - err := s.processor.ProcessLedger( - s.context, - &supportPipeline.Store{}, - s.mockLedgerReader, - s.mockLedgerWriter, - ) - s.Assert().NoError(err) -} - -func (s *ParticipantsProcessorTestSuiteLedger) TestParticipantsCheckDoesNotMatch() { - s.mockLedgerReader.On("GetSequence").Return(s.sequence).Once() - - s.mockLedgerReader. - On("Read"). - Return(io.LedgerTransaction{}, stdio.EOF).Once() - - s.mockQ.On("CheckExpParticipants", int32(s.sequence-10)). - Return(false, nil).Once() - err := s.processor.ProcessLedger( s.context, &supportPipeline.Store{}, @@ -223,7 +182,7 @@ func (s *ParticipantsProcessorTestSuiteLedger) TestIngestParticipantsSucceeds() s.mockLedgerReads() - s.mockQ.On("CreateExpAccounts", mock.AnythingOfType("[]string")). + s.mockQ.On("CreateAccounts", mock.AnythingOfType("[]string")). Run(func(args mock.Arguments) { arg := args.Get(0).([]string) s.Assert().ElementsMatch( @@ -242,9 +201,6 @@ func (s *ParticipantsProcessorTestSuiteLedger) TestIngestParticipantsSucceeds() s.mockBatchInsertBuilder.On("Exec").Return(nil).Once() s.mockOperationsBatchInsertBuilder.On("Exec").Return(nil).Once() - s.mockQ.On("CheckExpParticipants", int32(s.sequence-10)). - Return(true, nil).Once() - err := s.processor.ProcessLedger( s.context, &supportPipeline.Store{}, @@ -254,12 +210,12 @@ func (s *ParticipantsProcessorTestSuiteLedger) TestIngestParticipantsSucceeds() s.Assert().NoError(err) } -func (s *ParticipantsProcessorTestSuiteLedger) TestCreateExpAccountsFails() { +func (s *ParticipantsProcessorTestSuiteLedger) TestCreateAccountsFails() { s.mockLedgerReader.On("GetSequence").Return(s.sequence).Once() s.mockLedgerReads() - s.mockQ.On("CreateExpAccounts", mock.AnythingOfType("[]string")). + s.mockQ.On("CreateAccounts", mock.AnythingOfType("[]string")). Return(s.addressToID, errors.New("transient error")).Once() err := s.processor.ProcessLedger( @@ -276,7 +232,7 @@ func (s *ParticipantsProcessorTestSuiteLedger) TestBatchAddFails() { s.mockLedgerReads() - s.mockQ.On("CreateExpAccounts", mock.AnythingOfType("[]string")). + s.mockQ.On("CreateAccounts", mock.AnythingOfType("[]string")). Run(func(args mock.Arguments) { arg := args.Get(0).([]string) s.Assert().ElementsMatch( @@ -316,7 +272,7 @@ func (s *ParticipantsProcessorTestSuiteLedger) TestOperationParticipantsBatchAdd s.mockLedgerReads() - s.mockQ.On("CreateExpAccounts", mock.AnythingOfType("[]string")). + s.mockQ.On("CreateAccounts", mock.AnythingOfType("[]string")). Run(func(args mock.Arguments) { arg := args.Get(0).([]string) s.Assert().ElementsMatch( @@ -360,7 +316,7 @@ func (s *ParticipantsProcessorTestSuiteLedger) TestBatchAddExecFails() { s.mockLedgerReads() - s.mockQ.On("CreateExpAccounts", mock.AnythingOfType("[]string")). + s.mockQ.On("CreateAccounts", mock.AnythingOfType("[]string")). Run(func(args mock.Arguments) { arg := args.Get(0).([]string) s.Assert().ElementsMatch( @@ -389,7 +345,7 @@ func (s *ParticipantsProcessorTestSuiteLedger) TestOpeartionBatchAddExecFails() s.mockLedgerReads() - s.mockQ.On("CreateExpAccounts", mock.AnythingOfType("[]string")). + s.mockQ.On("CreateAccounts", mock.AnythingOfType("[]string")). Run(func(args mock.Arguments) { arg := args.Get(0).([]string) s.Assert().ElementsMatch( diff --git a/services/horizon/internal/expingest/processors/trades_processor.go b/services/horizon/internal/expingest/processors/trades_processor.go index 5a917bac6f..3aadd98974 100644 --- a/services/horizon/internal/expingest/processors/trades_processor.go +++ b/services/horizon/internal/expingest/processors/trades_processor.go @@ -87,13 +87,13 @@ func (p *TradeProcessor) ProcessLedger(ctx context.Context, store *pipeline.Stor if len(inserts) > 0 { batch := p.TradesQ.NewTradeBatchInsertBuilder(maxBatchSize) - accountSet, err = p.TradesQ.CreateExpAccounts(mapKeysToList(accountSet)) + accountSet, err = p.TradesQ.CreateAccounts(mapKeysToList(accountSet)) if err != nil { return errors.Wrap(err, "Error creating account ids") } var assetMap map[string]history.Asset - assetMap, err = p.TradesQ.CreateExpAssets(assets) + assetMap, err = p.TradesQ.CreateAssets(assets) if err != nil { return errors.Wrap(err, "Error creating asset ids") } @@ -120,32 +120,9 @@ func (p *TradeProcessor) ProcessLedger(ctx context.Context, store *pipeline.Stor } } - p.checkTrades(ledger) - return nil } -func (p *TradeProcessor) checkTrades(ledger xdr.LedgerHeaderHistoryEntry) { - // use an older lookup sequence because the experimental ingestion system and the - // legacy ingestion system might not be in sync - if sequence := ledger.Header.LedgerSeq; sequence > 10 { - checkSequence := int32(sequence - 10) - var valid bool - valid, err := p.TradesQ.CheckExpTrades(checkSequence) - if err != nil { - log.WithField("sequence", checkSequence).WithError(err). - Error("Could not compare trades for ledger") - return - } - - if !valid { - log.WithField("sequence", checkSequence). - Error("rows for ledger in exp_history_trades does not match " + - "trades in history_trades") - } - } -} - func (p *TradeProcessor) findTradeSellPrice( transaction io.LedgerTransaction, opidx int, diff --git a/services/horizon/internal/expingest/processors/trades_processor_test.go b/services/horizon/internal/expingest/processors/trades_processor_test.go index 9d2b11348f..07d9e6f891 100644 --- a/services/horizon/internal/expingest/processors/trades_processor_test.go +++ b/services/horizon/internal/expingest/processors/trades_processor_test.go @@ -533,7 +533,7 @@ func (s *TradeProcessorTestSuiteLedger) TestIngestTradesSucceeds() { ledger := xdr.LedgerHeaderHistoryEntry{Header: xdr.LedgerHeader{LedgerSeq: 100}} inserts := s.mockReadTradeTransactions(ledger) - s.mockQ.On("CreateExpAccounts", mock.AnythingOfType("[]string")). + s.mockQ.On("CreateAccounts", mock.AnythingOfType("[]string")). Run(func(args mock.Arguments) { arg := args.Get(0).([]string) s.Assert().ElementsMatch( @@ -542,7 +542,7 @@ func (s *TradeProcessorTestSuiteLedger) TestIngestTradesSucceeds() { ) }).Return(s.accountToID, nil).Once() - s.mockQ.On("CreateExpAssets", mock.AnythingOfType("[]xdr.Asset")). + s.mockQ.On("CreateAssets", mock.AnythingOfType("[]xdr.Asset")). Run(func(args mock.Arguments) { arg := args.Get(0).([]xdr.Asset) s.Assert().ElementsMatch( @@ -558,8 +558,6 @@ func (s *TradeProcessorTestSuiteLedger) TestIngestTradesSucceeds() { } s.mockBatchInsertBuilder.On("Exec").Return(nil).Once() - s.mockQ.On("CheckExpTrades", int32(ledger.Header.LedgerSeq)-10). - Return(true, nil).Once() err := s.processor.ProcessLedger( s.context, @@ -571,11 +569,11 @@ func (s *TradeProcessorTestSuiteLedger) TestIngestTradesSucceeds() { s.Assert().NoError(err) } -func (s *TradeProcessorTestSuiteLedger) TestCreateExpAccountsError() { +func (s *TradeProcessorTestSuiteLedger) TestCreateAccountsError() { ledger := xdr.LedgerHeaderHistoryEntry{Header: xdr.LedgerHeader{LedgerSeq: 100}} s.mockReadTradeTransactions(ledger) - s.mockQ.On("CreateExpAccounts", mock.AnythingOfType("[]string")). + s.mockQ.On("CreateAccounts", mock.AnythingOfType("[]string")). Run(func(args mock.Arguments) { arg := args.Get(0).([]string) s.Assert().ElementsMatch( @@ -594,11 +592,11 @@ func (s *TradeProcessorTestSuiteLedger) TestCreateExpAccountsError() { s.Assert().EqualError(err, "Error creating account ids: create accounts error") } -func (s *TradeProcessorTestSuiteLedger) TestCreateExpAssetsError() { +func (s *TradeProcessorTestSuiteLedger) TestCreateAssetsError() { ledger := xdr.LedgerHeaderHistoryEntry{Header: xdr.LedgerHeader{LedgerSeq: 100}} s.mockReadTradeTransactions(ledger) - s.mockQ.On("CreateExpAccounts", mock.AnythingOfType("[]string")). + s.mockQ.On("CreateAccounts", mock.AnythingOfType("[]string")). Run(func(args mock.Arguments) { arg := args.Get(0).([]string) s.Assert().ElementsMatch( @@ -607,7 +605,7 @@ func (s *TradeProcessorTestSuiteLedger) TestCreateExpAssetsError() { ) }).Return(s.accountToID, nil).Once() - s.mockQ.On("CreateExpAssets", mock.AnythingOfType("[]xdr.Asset")). + s.mockQ.On("CreateAssets", mock.AnythingOfType("[]xdr.Asset")). Run(func(args mock.Arguments) { arg := args.Get(0).([]xdr.Asset) s.Assert().ElementsMatch( @@ -630,7 +628,7 @@ func (s *TradeProcessorTestSuiteLedger) TestBatchAddError() { ledger := xdr.LedgerHeaderHistoryEntry{Header: xdr.LedgerHeader{LedgerSeq: 100}} s.mockReadTradeTransactions(ledger) - s.mockQ.On("CreateExpAccounts", mock.AnythingOfType("[]string")). + s.mockQ.On("CreateAccounts", mock.AnythingOfType("[]string")). Run(func(args mock.Arguments) { arg := args.Get(0).([]string) s.Assert().ElementsMatch( @@ -639,7 +637,7 @@ func (s *TradeProcessorTestSuiteLedger) TestBatchAddError() { ) }).Return(s.accountToID, nil).Once() - s.mockQ.On("CreateExpAssets", mock.AnythingOfType("[]xdr.Asset")). + s.mockQ.On("CreateAssets", mock.AnythingOfType("[]xdr.Asset")). Run(func(args mock.Arguments) { arg := args.Get(0).([]xdr.Asset) s.Assert().ElementsMatch( @@ -665,7 +663,7 @@ func (s *TradeProcessorTestSuiteLedger) TestBatchExecError() { ledger := xdr.LedgerHeaderHistoryEntry{Header: xdr.LedgerHeader{LedgerSeq: 100}} insert := s.mockReadTradeTransactions(ledger) - s.mockQ.On("CreateExpAccounts", mock.AnythingOfType("[]string")). + s.mockQ.On("CreateAccounts", mock.AnythingOfType("[]string")). Run(func(args mock.Arguments) { arg := args.Get(0).([]string) s.Assert().ElementsMatch( @@ -674,7 +672,7 @@ func (s *TradeProcessorTestSuiteLedger) TestBatchExecError() { ) }).Return(s.accountToID, nil).Once() - s.mockQ.On("CreateExpAssets", mock.AnythingOfType("[]xdr.Asset")). + s.mockQ.On("CreateAssets", mock.AnythingOfType("[]xdr.Asset")). Run(func(args mock.Arguments) { arg := args.Get(0).([]xdr.Asset) s.Assert().ElementsMatch( @@ -701,7 +699,7 @@ func (s *TradeProcessorTestSuiteLedger) TestIgnoreCheckIfSmallLedger() { ledger := xdr.LedgerHeaderHistoryEntry{Header: xdr.LedgerHeader{LedgerSeq: 10}} insert := s.mockReadTradeTransactions(ledger) - s.mockQ.On("CreateExpAccounts", mock.AnythingOfType("[]string")). + s.mockQ.On("CreateAccounts", mock.AnythingOfType("[]string")). Run(func(args mock.Arguments) { arg := args.Get(0).([]string) s.Assert().ElementsMatch( @@ -710,7 +708,7 @@ func (s *TradeProcessorTestSuiteLedger) TestIgnoreCheckIfSmallLedger() { ) }).Return(s.accountToID, nil).Once() - s.mockQ.On("CreateExpAssets", mock.AnythingOfType("[]xdr.Asset")). + s.mockQ.On("CreateAssets", mock.AnythingOfType("[]xdr.Asset")). Run(func(args mock.Arguments) { arg := args.Get(0).([]xdr.Asset) s.Assert().ElementsMatch( diff --git a/services/horizon/internal/expingest/processors/transactions_processor.go b/services/horizon/internal/expingest/processors/transactions_processor.go index 2040c36895..16c1177b97 100644 --- a/services/horizon/internal/expingest/processors/transactions_processor.go +++ b/services/horizon/internal/expingest/processors/transactions_processor.go @@ -63,25 +63,6 @@ func (p *TransactionProcessor) ProcessLedger(ctx context.Context, store *pipelin return errors.Wrap(err, "Error flushing transaction batch") } - // use an older lookup sequence because the experimental ingestion system and the - // legacy ingestion system might not be in sync - if sequence > 10 { - checkSequence := int32(sequence - 10) - var valid bool - valid, err = p.TransactionsQ.CheckExpTransactions(checkSequence) - if err != nil { - log.WithField("sequence", checkSequence).WithError(err). - Error("Could not compare transactions for ledger") - return nil - } - - if !valid { - log.WithField("sequence", checkSequence). - Error("rows for ledger in exp_history_transactions does not match " + - "transactions in history_transactions") - } - } - return nil } diff --git a/services/horizon/internal/expingest/processors/transactions_processor_test.go b/services/horizon/internal/expingest/processors/transactions_processor_test.go index 5a0b393e06..64b153b35c 100644 --- a/services/horizon/internal/expingest/processors/transactions_processor_test.go +++ b/services/horizon/internal/expingest/processors/transactions_processor_test.go @@ -49,7 +49,7 @@ func (s *TransactionsProcessorTestSuiteLedger) TearDownTest() { s.mockLedgerWriter.AssertExpectations(s.T()) } -func (s *TransactionsProcessorTestSuiteLedger) TestInsertExpLedgerIgnoredWhenNotDatabaseIngestion() { +func (s *TransactionsProcessorTestSuiteLedger) TestInsertLedgerLedgerIgnoredWhenNotDatabaseIngestion() { s.mockLedgerReader.On("IgnoreUpgradeChanges").Once() s.mockLedgerReader. On("Close"). @@ -100,8 +100,6 @@ func (s *TransactionsProcessorTestSuiteLedger) TestAddTransactionsSucceeds() { s.mockBatchInsertBuilder.On("Add", thirdTx, sequence).Return(nil).Once() s.mockBatchInsertBuilder.On("Exec").Return(nil).Once() - s.mockQ.On("CheckExpTransactions", int32(sequence-10)).Return(true, nil).Once() - err := s.processor.ProcessLedger( s.context, &supportPipeline.Store{}, @@ -177,79 +175,3 @@ func (s *TransactionsProcessorTestSuiteLedger) TestExecFails() { s.Assert().Error(err) s.Assert().EqualError(err, "Error flushing transaction batch: transient error") } - -func (s *TransactionsProcessorTestSuiteLedger) TestCheckExpTransactionsError() { - s.mockLedgerReader.On("IgnoreUpgradeChanges").Once() - - s.mockQ. - On("NewTransactionBatchInsertBuilder", maxBatchSize). - Return(s.mockBatchInsertBuilder).Once() - - sequence := uint32(20) - s.mockLedgerReader.On("GetSequence").Return(sequence).Once() - - firstTx := createTransaction(true, 1) - - s.mockLedgerReader. - On("Read"). - Return(firstTx, nil).Once() - s.mockLedgerReader. - On("Read"). - Return(io.LedgerTransaction{}, stdio.EOF).Once() - - s.mockLedgerReader. - On("Close"). - Return(nil).Once() - - s.mockBatchInsertBuilder.On("Add", firstTx, sequence).Return(nil).Once() - s.mockBatchInsertBuilder.On("Exec").Return(nil).Once() - - s.mockQ.On("CheckExpTransactions", int32(sequence-10)). - Return(false, errors.New("transient check exp ledger error")).Once() - - err := s.processor.ProcessLedger( - s.context, - &supportPipeline.Store{}, - s.mockLedgerReader, - s.mockLedgerWriter, - ) - s.Assert().NoError(err) -} - -func (s *TransactionsProcessorTestSuiteLedger) TestCheckExpTransactionsDoesNotMatch() { - s.mockLedgerReader.On("IgnoreUpgradeChanges").Once() - - s.mockQ. - On("NewTransactionBatchInsertBuilder", maxBatchSize). - Return(s.mockBatchInsertBuilder).Once() - - sequence := uint32(20) - s.mockLedgerReader.On("GetSequence").Return(sequence).Once() - - firstTx := createTransaction(true, 1) - - s.mockLedgerReader. - On("Read"). - Return(firstTx, nil).Once() - s.mockLedgerReader. - On("Read"). - Return(io.LedgerTransaction{}, stdio.EOF).Once() - - s.mockLedgerReader. - On("Close"). - Return(nil).Once() - - s.mockBatchInsertBuilder.On("Add", firstTx, sequence).Return(nil).Once() - s.mockBatchInsertBuilder.On("Exec").Return(nil).Once() - - s.mockQ.On("CheckExpTransactions", int32(sequence-10)). - Return(false, nil).Once() - - err := s.processor.ProcessLedger( - s.context, - &supportPipeline.Store{}, - s.mockLedgerReader, - s.mockLedgerWriter, - ) - s.Assert().NoError(err) -} diff --git a/services/horizon/internal/expingest/run_ingestion_test.go b/services/horizon/internal/expingest/run_ingestion_test.go index ed077dbbf4..8bd3205c43 100644 --- a/services/horizon/internal/expingest/run_ingestion_test.go +++ b/services/horizon/internal/expingest/run_ingestion_test.go @@ -78,9 +78,9 @@ func (m *mockDBQ) GetAllOffers() ([]history.Offer, error) { return args.Get(0).([]history.Offer), args.Error(1) } -func (m *mockDBQ) RemoveExpIngestHistory(newerThanSequence uint32) (history.ExpIngestRemovalSummary, error) { +func (m *mockDBQ) RemoveIngestHistory(newerThanSequence uint32) (history.IngestHistoryRemovalSummary, error) { args := m.Called(newerThanSequence) - return args.Get(0).(history.ExpIngestRemovalSummary), args.Error(1) + return args.Get(0).(history.IngestHistoryRemovalSummary), args.Error(1) } func (m *mockDBQ) TruncateExpingestStateTables() error { diff --git a/services/horizon/internal/ingest/ingestion.go b/services/horizon/internal/ingest/ingestion.go index ab9535b39d..684e92b412 100644 --- a/services/horizon/internal/ingest/ingestion.go +++ b/services/horizon/internal/ingest/ingestion.go @@ -160,14 +160,14 @@ func (ingest *Ingestion) UpdateAccountIDs(tables []TableName) error { if len(addresses) > 0 { // TODO we should probably batch this too - dbAccounts = make([]history.Account, 0, len(addresses)) - err = q.CreateAccounts(&dbAccounts, addresses) + var accountMap map[string]int64 + accountMap, err = q.CreateAccounts(addresses) if err != nil { return errors.Wrap(err, "q.CreateAccounts error") } - for _, row := range dbAccounts { - accounts[Address(row.Address)] = row.ID + for address, rowID := range accountMap { + accounts[Address(address)] = rowID } }