From 285e2e4369210ba86e69daba0283b779065a6d4f Mon Sep 17 00:00:00 2001 From: sydneynotthecity Date: Tue, 5 Mar 2024 00:09:01 -0600 Subject: [PATCH 01/49] Add tx and operation error codes --- internal/transform/operation.go | 94 +++++- internal/transform/operation_test.go | 426 +++++++++++++++++++++---- internal/transform/schema.go | 19 +- internal/transform/transaction.go | 3 + internal/transform/transaction_test.go | 7 +- 5 files changed, 465 insertions(+), 84 deletions(-) diff --git a/internal/transform/operation.go b/internal/transform/operation.go index 9229419b..cb22c942 100644 --- a/internal/transform/operation.go +++ b/internal/transform/operation.go @@ -66,15 +66,28 @@ func TransformOperation(operation xdr.Operation, operationIndex int32, transacti return OperationOutput{}, err } + outputOperationResults, ok := transaction.Result.Result.OperationResults() + if !ok { + return OperationOutput{}, err + } + outputOperationResultCode := outputOperationResults[operationIndex].Code.String() + // TODO: add trace code details + outputOperationTraceCode, err := mapOperationTrace(*outputOperationResults[operationIndex].Tr) + if err != nil { + return OperationOutput{}, err + } + transformedOperation := OperationOutput{ - SourceAccount: outputSourceAccount, - SourceAccountMuxed: outputSourceAccountMuxed.String, - Type: outputOperationType, - TypeString: outputOperationTypeString, - TransactionID: outputTransactionID, - OperationID: outputOperationID, - OperationDetails: outputDetails, - ClosedAt: outputCloseTime, + SourceAccount: outputSourceAccount, + SourceAccountMuxed: outputSourceAccountMuxed.String, + Type: outputOperationType, + TypeString: outputOperationTypeString, + TransactionID: outputTransactionID, + OperationID: outputOperationID, + OperationDetails: outputDetails, + ClosedAt: outputCloseTime, + OperationResultCode: outputOperationResultCode, + OperationTraceCode: outputOperationTraceCode, } return transformedOperation, nil @@ -145,6 +158,71 @@ func mapOperationType(operation xdr.Operation) (string, error) { return op_string_type, nil } +func mapOperationTrace(operationTrace xdr.OperationResultTr) (string, error) { + var operationTraceDescription string + operationType := operationTrace.Type + + switch operationType { + case xdr.OperationTypeCreateAccount: + operationTraceDescription = operationTrace.CreateAccountResult.Code.String() + case xdr.OperationTypePayment: + operationTraceDescription = operationTrace.PaymentResult.Code.String() + case xdr.OperationTypePathPaymentStrictReceive: + operationTraceDescription = operationTrace.PathPaymentStrictReceiveResult.Code.String() + case xdr.OperationTypePathPaymentStrictSend: + operationTraceDescription = operationTrace.PathPaymentStrictSendResult.Code.String() + case xdr.OperationTypeManageBuyOffer: + operationTraceDescription = operationTrace.ManageBuyOfferResult.Code.String() + case xdr.OperationTypeManageSellOffer: + operationTraceDescription = operationTrace.ManageSellOfferResult.Code.String() + case xdr.OperationTypeCreatePassiveSellOffer: + operationTraceDescription = operationTrace.CreatePassiveSellOfferResult.Code.String() + case xdr.OperationTypeSetOptions: + operationTraceDescription = operationTrace.SetOptionsResult.Code.String() + case xdr.OperationTypeChangeTrust: + operationTraceDescription = operationTrace.ChangeTrustResult.Code.String() + case xdr.OperationTypeAllowTrust: + operationTraceDescription = operationTrace.AllowTrustResult.Code.String() + case xdr.OperationTypeAccountMerge: + operationTraceDescription = operationTrace.AccountMergeResult.Code.String() + case xdr.OperationTypeInflation: + operationTraceDescription = operationTrace.InflationResult.Code.String() + case xdr.OperationTypeManageData: + operationTraceDescription = operationTrace.ManageDataResult.Code.String() + case xdr.OperationTypeBumpSequence: + operationTraceDescription = operationTrace.BumpSeqResult.Code.String() + case xdr.OperationTypeCreateClaimableBalance: + operationTraceDescription = operationTrace.CreateClaimableBalanceResult.Code.String() + case xdr.OperationTypeClaimClaimableBalance: + operationTraceDescription = operationTrace.ClaimClaimableBalanceResult.Code.String() + case xdr.OperationTypeBeginSponsoringFutureReserves: + operationTraceDescription = operationTrace.BeginSponsoringFutureReservesResult.Code.String() + case xdr.OperationTypeEndSponsoringFutureReserves: + operationTraceDescription = operationTrace.EndSponsoringFutureReservesResult.Code.String() + case xdr.OperationTypeRevokeSponsorship: + operationTraceDescription = operationTrace.RevokeSponsorshipResult.Code.String() + case xdr.OperationTypeClawback: + operationTraceDescription = operationTrace.ClawbackResult.Code.String() + case xdr.OperationTypeClawbackClaimableBalance: + operationTraceDescription = operationTrace.ClawbackClaimableBalanceResult.Code.String() + case xdr.OperationTypeSetTrustLineFlags: + operationTraceDescription = operationTrace.SetTrustLineFlagsResult.Code.String() + case xdr.OperationTypeLiquidityPoolDeposit: + operationTraceDescription = operationTrace.LiquidityPoolDepositResult.Code.String() + case xdr.OperationTypeLiquidityPoolWithdraw: + operationTraceDescription = operationTrace.LiquidityPoolWithdrawResult.Code.String() + case xdr.OperationTypeInvokeHostFunction: + operationTraceDescription = operationTrace.InvokeHostFunctionResult.Code.String() + case xdr.OperationTypeExtendFootprintTtl: + operationTraceDescription = operationTrace.ExtendFootprintTtlResult.Code.String() + case xdr.OperationTypeRestoreFootprint: + operationTraceDescription = operationTrace.RestoreFootprintResult.Code.String() + default: + return operationTraceDescription, fmt.Errorf("Unknown operation type: %s", operationTrace.Type.String()) + } + return operationTraceDescription, nil +} + func PoolIDToString(id xdr.PoolId) string { return xdr.Hash(id).HexString() } diff --git a/internal/transform/operation_test.go b/internal/transform/operation_test.go index 7ae5b341..86702448 100644 --- a/internal/transform/operation_test.go +++ b/internal/transform/operation_test.go @@ -622,9 +622,33 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er } inputEnvelope.Tx.Operations = inputOperations results := []xdr.OperationResult{ - xdr.OperationResult{}, - xdr.OperationResult{}, - xdr.OperationResult{}, + xdr.OperationResult{ + Code: xdr.OperationResultCodeOpInner, + Tr: &xdr.OperationResultTr{ + Type: xdr.OperationTypeCreateAccount, + CreateAccountResult: &xdr.CreateAccountResult{ + Code: xdr.CreateAccountResultCodeCreateAccountSuccess, + }, + }, + }, + xdr.OperationResult{ + Code: xdr.OperationResultCodeOpInner, + Tr: &xdr.OperationResultTr{ + Type: xdr.OperationTypePayment, + PaymentResult: &xdr.PaymentResult{ + Code: xdr.PaymentResultCodePaymentSuccess, + }, + }, + }, + xdr.OperationResult{ + Code: xdr.OperationResultCodeOpInner, + Tr: &xdr.OperationResultTr{ + Type: xdr.OperationTypePayment, + PaymentResult: &xdr.PaymentResult{ + Code: xdr.PaymentResultCodePaymentSuccess, + }, + }, + }, // There needs to be a true result for path payment receive and send xdr.OperationResult{ Code: xdr.OperationResultCodeOpInner, @@ -638,17 +662,105 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.OperationResult{}, - xdr.OperationResult{}, - xdr.OperationResult{}, - xdr.OperationResult{}, - xdr.OperationResult{}, - xdr.OperationResult{}, - xdr.OperationResult{}, - xdr.OperationResult{}, - xdr.OperationResult{}, - xdr.OperationResult{}, - xdr.OperationResult{}, + xdr.OperationResult{ + Code: xdr.OperationResultCodeOpInner, + Tr: &xdr.OperationResultTr{ + Type: xdr.OperationTypeManageSellOffer, + ManageSellOfferResult: &xdr.ManageSellOfferResult{ + Code: xdr.ManageSellOfferResultCodeManageSellOfferSuccess, + }, + }, + }, + xdr.OperationResult{ + Code: xdr.OperationResultCodeOpInner, + Tr: &xdr.OperationResultTr{ + Type: xdr.OperationTypeManageSellOffer, + ManageSellOfferResult: &xdr.ManageSellOfferResult{ + Code: xdr.ManageSellOfferResultCodeManageSellOfferSuccess, + }, + }, + }, + xdr.OperationResult{ + Code: xdr.OperationResultCodeOpInner, + Tr: &xdr.OperationResultTr{ + Type: xdr.OperationTypeSetOptions, + SetOptionsResult: &xdr.SetOptionsResult{ + Code: xdr.SetOptionsResultCodeSetOptionsSuccess, + }, + }, + }, + xdr.OperationResult{ + Code: xdr.OperationResultCodeOpInner, + Tr: &xdr.OperationResultTr{ + Type: xdr.OperationTypeChangeTrust, + ChangeTrustResult: &xdr.ChangeTrustResult{ + Code: xdr.ChangeTrustResultCodeChangeTrustSuccess, + }, + }, + }, + xdr.OperationResult{ + Code: xdr.OperationResultCodeOpInner, + Tr: &xdr.OperationResultTr{ + Type: xdr.OperationTypeChangeTrust, + ChangeTrustResult: &xdr.ChangeTrustResult{ + Code: xdr.ChangeTrustResultCodeChangeTrustSuccess, + }, + }, + }, + xdr.OperationResult{ + Code: xdr.OperationResultCodeOpInner, + Tr: &xdr.OperationResultTr{ + Type: xdr.OperationTypeAllowTrust, + AllowTrustResult: &xdr.AllowTrustResult{ + Code: xdr.AllowTrustResultCodeAllowTrustSuccess, + }, + }, + }, + xdr.OperationResult{ + Code: xdr.OperationResultCodeOpInner, + Tr: &xdr.OperationResultTr{ + Type: xdr.OperationTypeAccountMerge, + AccountMergeResult: &xdr.AccountMergeResult{ + Code: xdr.AccountMergeResultCodeAccountMergeSuccess, + }, + }, + }, + xdr.OperationResult{ + Code: xdr.OperationResultCodeOpInner, + Tr: &xdr.OperationResultTr{ + Type: xdr.OperationTypeInflation, + InflationResult: &xdr.InflationResult{ + Code: xdr.InflationResultCodeInflationSuccess, + }, + }, + }, + xdr.OperationResult{ + Code: xdr.OperationResultCodeOpInner, + Tr: &xdr.OperationResultTr{ + Type: xdr.OperationTypeManageData, + ManageDataResult: &xdr.ManageDataResult{ + Code: xdr.ManageDataResultCodeManageDataSuccess, + }, + }, + }, + xdr.OperationResult{ + Code: xdr.OperationResultCodeOpInner, + Tr: &xdr.OperationResultTr{ + Type: xdr.OperationTypeBumpSequence, + BumpSeqResult: &xdr.BumpSequenceResult{ + Code: xdr.BumpSequenceResultCodeBumpSequenceSuccess, + }, + }, + }, + xdr.OperationResult{ + Code: xdr.OperationResultCodeOpInner, + Tr: &xdr.OperationResultTr{ + Type: xdr.OperationTypeManageBuyOffer, + ManageBuyOfferResult: &xdr.ManageBuyOfferResult{ + Code: xdr.ManageBuyOfferResultCodeManageBuyOfferSuccess, + }, + }, + }, xdr.OperationResult{ Code: xdr.OperationResultCodeOpInner, Tr: &xdr.OperationResultTr{ @@ -661,21 +773,141 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.OperationResult{}, - xdr.OperationResult{}, - xdr.OperationResult{}, - xdr.OperationResult{}, - xdr.OperationResult{}, - xdr.OperationResult{}, - xdr.OperationResult{}, - xdr.OperationResult{}, - xdr.OperationResult{}, - xdr.OperationResult{}, - xdr.OperationResult{}, - xdr.OperationResult{}, - xdr.OperationResult{}, - xdr.OperationResult{}, - xdr.OperationResult{}, + xdr.OperationResult{ + Code: xdr.OperationResultCodeOpInner, + Tr: &xdr.OperationResultTr{ + Type: xdr.OperationTypeCreateClaimableBalance, + CreateClaimableBalanceResult: &xdr.CreateClaimableBalanceResult{ + Code: xdr.CreateClaimableBalanceResultCodeCreateClaimableBalanceSuccess, + }, + }, + }, + xdr.OperationResult{ + Code: xdr.OperationResultCodeOpInner, + Tr: &xdr.OperationResultTr{ + Type: xdr.OperationTypeClaimClaimableBalance, + ClaimClaimableBalanceResult: &xdr.ClaimClaimableBalanceResult{ + Code: xdr.ClaimClaimableBalanceResultCodeClaimClaimableBalanceSuccess, + }, + }, + }, + xdr.OperationResult{ + Code: xdr.OperationResultCodeOpInner, + Tr: &xdr.OperationResultTr{ + Type: xdr.OperationTypeBeginSponsoringFutureReserves, + BeginSponsoringFutureReservesResult: &xdr.BeginSponsoringFutureReservesResult{ + Code: xdr.BeginSponsoringFutureReservesResultCodeBeginSponsoringFutureReservesSuccess, + }, + }, + }, + xdr.OperationResult{ + Code: xdr.OperationResultCodeOpInner, + Tr: &xdr.OperationResultTr{ + Type: xdr.OperationTypeRevokeSponsorship, + RevokeSponsorshipResult: &xdr.RevokeSponsorshipResult{ + Code: xdr.RevokeSponsorshipResultCodeRevokeSponsorshipSuccess, + }, + }, + }, + xdr.OperationResult{ + Code: xdr.OperationResultCodeOpInner, + Tr: &xdr.OperationResultTr{ + Type: xdr.OperationTypeRevokeSponsorship, + RevokeSponsorshipResult: &xdr.RevokeSponsorshipResult{ + Code: xdr.RevokeSponsorshipResultCodeRevokeSponsorshipSuccess, + }, + }, + }, + xdr.OperationResult{ + Code: xdr.OperationResultCodeOpInner, + Tr: &xdr.OperationResultTr{ + Type: xdr.OperationTypeRevokeSponsorship, + RevokeSponsorshipResult: &xdr.RevokeSponsorshipResult{ + Code: xdr.RevokeSponsorshipResultCodeRevokeSponsorshipSuccess, + }, + }, + }, + xdr.OperationResult{ + Code: xdr.OperationResultCodeOpInner, + Tr: &xdr.OperationResultTr{ + Type: xdr.OperationTypeRevokeSponsorship, + RevokeSponsorshipResult: &xdr.RevokeSponsorshipResult{ + Code: xdr.RevokeSponsorshipResultCodeRevokeSponsorshipSuccess, + }, + }, + }, + xdr.OperationResult{ + Code: xdr.OperationResultCodeOpInner, + Tr: &xdr.OperationResultTr{ + Type: xdr.OperationTypeRevokeSponsorship, + RevokeSponsorshipResult: &xdr.RevokeSponsorshipResult{ + Code: xdr.RevokeSponsorshipResultCodeRevokeSponsorshipSuccess, + }, + }, + }, + xdr.OperationResult{ + Code: xdr.OperationResultCodeOpInner, + Tr: &xdr.OperationResultTr{ + Type: xdr.OperationTypeRevokeSponsorship, + RevokeSponsorshipResult: &xdr.RevokeSponsorshipResult{ + Code: xdr.RevokeSponsorshipResultCodeRevokeSponsorshipSuccess, + }, + }, + }, + xdr.OperationResult{ + Code: xdr.OperationResultCodeOpInner, + Tr: &xdr.OperationResultTr{ + Type: xdr.OperationTypeRevokeSponsorship, + RevokeSponsorshipResult: &xdr.RevokeSponsorshipResult{ + Code: xdr.RevokeSponsorshipResultCodeRevokeSponsorshipSuccess, + }, + }, + }, + xdr.OperationResult{ + Code: xdr.OperationResultCodeOpInner, + Tr: &xdr.OperationResultTr{ + Type: xdr.OperationTypeClawback, + ClawbackResult: &xdr.ClawbackResult{ + Code: xdr.ClawbackResultCodeClawbackSuccess, + }, + }, + }, + xdr.OperationResult{ + Code: xdr.OperationResultCodeOpInner, + Tr: &xdr.OperationResultTr{ + Type: xdr.OperationTypeClawbackClaimableBalance, + ClawbackClaimableBalanceResult: &xdr.ClawbackClaimableBalanceResult{ + Code: xdr.ClawbackClaimableBalanceResultCodeClawbackClaimableBalanceSuccess, + }, + }, + }, + xdr.OperationResult{ + Code: xdr.OperationResultCodeOpInner, + Tr: &xdr.OperationResultTr{ + Type: xdr.OperationTypeSetTrustLineFlags, + SetTrustLineFlagsResult: &xdr.SetTrustLineFlagsResult{ + Code: xdr.SetTrustLineFlagsResultCodeSetTrustLineFlagsSuccess, + }, + }, + }, + xdr.OperationResult{ + Code: xdr.OperationResultCodeOpInner, + Tr: &xdr.OperationResultTr{ + Type: xdr.OperationTypeLiquidityPoolDeposit, + LiquidityPoolDepositResult: &xdr.LiquidityPoolDepositResult{ + Code: xdr.LiquidityPoolDepositResultCodeLiquidityPoolDepositSuccess, + }, + }, + }, + xdr.OperationResult{ + Code: xdr.OperationResultCodeOpInner, + Tr: &xdr.OperationResultTr{ + Type: xdr.OperationTypeLiquidityPoolWithdraw, + LiquidityPoolWithdrawResult: &xdr.LiquidityPoolWithdrawResult{ + Code: xdr.LiquidityPoolWithdrawResultCodeLiquidityPoolWithdrawSuccess, + }, + }, + }, //xdr.OperationResult{}, //xdr.OperationResult{}, //xdr.OperationResult{}, @@ -704,7 +936,9 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { "funder": hardCodedSourceAccountAddress, "starting_balance": 2.5, }, - ClosedAt: hardCodedLedgerClose, + ClosedAt: hardCodedLedgerClose, + OperationResultCode: "OperationResultCodeOpInner", + OperationTraceCode: "CreateAccountResultCodeCreateAccountSuccess", }, OperationOutput{ Type: 1, @@ -721,7 +955,9 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { "asset_issuer": hardCodedDestAccountAddress, "asset_id": int64(-8205667356306085451), }, - ClosedAt: hardCodedLedgerClose, + ClosedAt: hardCodedLedgerClose, + OperationResultCode: "OperationResultCodeOpInner", + OperationTraceCode: "PaymentResultCodePaymentSuccess", }, OperationOutput{ Type: 1, @@ -736,7 +972,9 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { "asset_type": "native", "asset_id": int64(-5706705804583548011), }, - ClosedAt: hardCodedLedgerClose, + ClosedAt: hardCodedLedgerClose, + OperationResultCode: "OperationResultCodeOpInner", + OperationTraceCode: "PaymentResultCodePaymentSuccess", }, OperationOutput{ Type: 2, @@ -756,7 +994,9 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { "asset_id": int64(-5706705804583548011), "path": []Path{usdtAssetPath}, }, - ClosedAt: hardCodedLedgerClose, + ClosedAt: hardCodedLedgerClose, + OperationResultCode: "OperationResultCodeOpInner", + OperationTraceCode: "PathPaymentStrictReceiveResultCodePathPaymentStrictReceiveSuccess", }, OperationOutput{ Type: 3, @@ -779,7 +1019,9 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { "buying_asset_type": "native", "buying_asset_id": int64(-5706705804583548011), }, - ClosedAt: hardCodedLedgerClose, + ClosedAt: hardCodedLedgerClose, + OperationResultCode: "OperationResultCodeOpInner", + OperationTraceCode: "ManageSellOfferResultCodeManageSellOfferSuccess", }, OperationOutput{ Type: 4, @@ -801,7 +1043,9 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { "selling_asset_type": "native", "selling_asset_id": int64(-5706705804583548011), }, - ClosedAt: hardCodedLedgerClose, + ClosedAt: hardCodedLedgerClose, + OperationResultCode: "OperationResultCodeOpInner", + OperationTraceCode: "ManageSellOfferResultCodeManageSellOfferSuccess", }, OperationOutput{ Type: 5, @@ -823,7 +1067,9 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { "signer_key": "GAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAWHF", "signer_weight": uint32(1), }, - ClosedAt: hardCodedLedgerClose, + ClosedAt: hardCodedLedgerClose, + OperationResultCode: "OperationResultCodeOpInner", + OperationTraceCode: "SetOptionsResultCodeSetOptionsSuccess", }, OperationOutput{ Type: 6, @@ -840,7 +1086,9 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { "asset_issuer": hardCodedDestAccountAddress, "asset_id": int64(6690054458235693884), }, - ClosedAt: hardCodedLedgerClose, + ClosedAt: hardCodedLedgerClose, + OperationResultCode: "OperationResultCodeOpInner", + OperationTraceCode: "ChangeTrustResultCodeChangeTrustSuccess", }, OperationOutput{ Type: 6, @@ -854,7 +1102,9 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { "asset_type": "liquidity_pool_shares", "liquidity_pool_id": "185a6b384c651552ba09b32851b79f5f6ab61e80883d303f52bea1406a4923f0", }, - ClosedAt: hardCodedLedgerClose, + ClosedAt: hardCodedLedgerClose, + OperationResultCode: "OperationResultCodeOpInner", + OperationTraceCode: "ChangeTrustResultCodeChangeTrustSuccess", }, OperationOutput{ Type: 7, @@ -871,7 +1121,9 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { "asset_issuer": hardCodedSourceAccountAddress, "asset_id": int64(8485542065083974675), }, - ClosedAt: hardCodedLedgerClose, + ClosedAt: hardCodedLedgerClose, + OperationResultCode: "OperationResultCodeOpInner", + OperationTraceCode: "AllowTrustResultCodeAllowTrustSuccess", }, OperationOutput{ Type: 8, @@ -883,16 +1135,20 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { "account": hardCodedSourceAccountAddress, "into": hardCodedDestAccountAddress, }, - ClosedAt: hardCodedLedgerClose, + ClosedAt: hardCodedLedgerClose, + OperationResultCode: "OperationResultCodeOpInner", + OperationTraceCode: "AccountMergeResultCodeAccountMergeSuccess", }, OperationOutput{ - Type: 9, - TypeString: "inflation", - SourceAccount: hardCodedSourceAccountAddress, - TransactionID: 4096, - OperationID: 4108, - OperationDetails: map[string]interface{}{}, - ClosedAt: hardCodedLedgerClose, + Type: 9, + TypeString: "inflation", + SourceAccount: hardCodedSourceAccountAddress, + TransactionID: 4096, + OperationID: 4108, + OperationDetails: map[string]interface{}{}, + ClosedAt: hardCodedLedgerClose, + OperationResultCode: "OperationResultCodeOpInner", + OperationTraceCode: "InflationResultCodeInflationSuccess", }, OperationOutput{ Type: 10, @@ -904,7 +1160,9 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { "name": "test", "value": base64.StdEncoding.EncodeToString([]byte{0x76, 0x61, 0x6c, 0x75, 0x65}), }, - ClosedAt: hardCodedLedgerClose, + ClosedAt: hardCodedLedgerClose, + OperationResultCode: "OperationResultCodeOpInner", + OperationTraceCode: "ManageDataResultCodeManageDataSuccess", }, OperationOutput{ Type: 11, @@ -915,7 +1173,9 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { OperationDetails: map[string]interface{}{ "bump_to": "100", }, - ClosedAt: hardCodedLedgerClose, + ClosedAt: hardCodedLedgerClose, + OperationResultCode: "OperationResultCodeOpInner", + OperationTraceCode: "BumpSequenceResultCodeBumpSequenceSuccess", }, OperationOutput{ Type: 12, @@ -938,7 +1198,9 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { "buying_asset_id": int64(-5706705804583548011), "offer_id": int64(100), }, - ClosedAt: hardCodedLedgerClose, + ClosedAt: hardCodedLedgerClose, + OperationResultCode: "OperationResultCodeOpInner", + OperationTraceCode: "ManageBuyOfferResultCodeManageBuyOfferSuccess", }, OperationOutput{ Type: 13, @@ -958,7 +1220,9 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { "asset_type": "native", "asset_id": int64(-5706705804583548011), }, - ClosedAt: hardCodedLedgerClose, + ClosedAt: hardCodedLedgerClose, + OperationResultCode: "OperationResultCodeOpInner", + OperationTraceCode: "PathPaymentStrictSendResultCodePathPaymentStrictSendSuccess", }, OperationOutput{ Type: 14, @@ -971,7 +1235,9 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { "amount": 123456.789, "claimants": []Claimant{testClaimantDetails}, }, - ClosedAt: hardCodedLedgerClose, + ClosedAt: hardCodedLedgerClose, + OperationResultCode: "OperationResultCodeOpInner", + OperationTraceCode: "CreateClaimableBalanceResultCodeCreateClaimableBalanceSuccess", }, OperationOutput{ Type: 15, @@ -983,7 +1249,9 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { "claimant": hardCodedSourceAccountAddress, "balance_id": "000000000102030405060708090000000000000000000000000000000000000000000000", }, - ClosedAt: hardCodedLedgerClose, + ClosedAt: hardCodedLedgerClose, + OperationResultCode: "OperationResultCodeOpInner", + OperationTraceCode: "ClaimClaimableBalanceResultCodeClaimClaimableBalanceSuccess", }, OperationOutput{ Type: 16, @@ -994,7 +1262,9 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { OperationDetails: map[string]interface{}{ "sponsored_id": hardCodedDestAccountAddress, }, - ClosedAt: hardCodedLedgerClose, + ClosedAt: hardCodedLedgerClose, + OperationResultCode: "OperationResultCodeOpInner", + OperationTraceCode: "BeginSponsoringFutureReservesResultCodeBeginSponsoringFutureReservesSuccess", }, OperationOutput{ Type: 18, @@ -1006,7 +1276,9 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { "signer_account_id": hardCodedDestAccountAddress, "signer_key": "GAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAWHF", }, - ClosedAt: hardCodedLedgerClose, + ClosedAt: hardCodedLedgerClose, + OperationResultCode: "OperationResultCodeOpInner", + OperationTraceCode: "RevokeSponsorshipResultCodeRevokeSponsorshipSuccess", }, OperationOutput{ Type: 18, @@ -1017,7 +1289,9 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { OperationDetails: map[string]interface{}{ "account_id": hardCodedDestAccountAddress, }, - ClosedAt: hardCodedLedgerClose, + ClosedAt: hardCodedLedgerClose, + OperationResultCode: "OperationResultCodeOpInner", + OperationTraceCode: "RevokeSponsorshipResultCodeRevokeSponsorshipSuccess", }, OperationOutput{ Type: 18, @@ -1028,7 +1302,9 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { OperationDetails: map[string]interface{}{ "claimable_balance_id": "000000000102030405060708090000000000000000000000000000000000000000000000", }, - ClosedAt: hardCodedLedgerClose, + ClosedAt: hardCodedLedgerClose, + OperationResultCode: "OperationResultCodeOpInner", + OperationTraceCode: "RevokeSponsorshipResultCodeRevokeSponsorshipSuccess", }, OperationOutput{ Type: 18, @@ -1040,7 +1316,9 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { "data_account_id": hardCodedDestAccountAddress, "data_name": "test", }, - ClosedAt: hardCodedLedgerClose, + ClosedAt: hardCodedLedgerClose, + OperationResultCode: "OperationResultCodeOpInner", + OperationTraceCode: "RevokeSponsorshipResultCodeRevokeSponsorshipSuccess", }, OperationOutput{ Type: 18, @@ -1051,7 +1329,9 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { OperationDetails: map[string]interface{}{ "offer_id": int64(100), }, - ClosedAt: hardCodedLedgerClose, + ClosedAt: hardCodedLedgerClose, + OperationResultCode: "OperationResultCodeOpInner", + OperationTraceCode: "RevokeSponsorshipResultCodeRevokeSponsorshipSuccess", }, OperationOutput{ Type: 18, @@ -1063,7 +1343,9 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { "trustline_account_id": testAccount3Address, "trustline_asset": "USTT:GBT4YAEGJQ5YSFUMNKX6BPBUOCPNAIOFAVZOF6MIME2CECBMEIUXFZZN", }, - ClosedAt: hardCodedLedgerClose, + ClosedAt: hardCodedLedgerClose, + OperationResultCode: "OperationResultCodeOpInner", + OperationTraceCode: "RevokeSponsorshipResultCodeRevokeSponsorshipSuccess", }, OperationOutput{ Type: 18, @@ -1074,7 +1356,9 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { OperationDetails: map[string]interface{}{ "liquidity_pool_id": "0102030405060708090000000000000000000000000000000000000000000000", }, - ClosedAt: hardCodedLedgerClose, + ClosedAt: hardCodedLedgerClose, + OperationResultCode: "OperationResultCodeOpInner", + OperationTraceCode: "RevokeSponsorshipResultCodeRevokeSponsorshipSuccess", }, OperationOutput{ Type: 19, @@ -1090,7 +1374,9 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { "asset_type": "credit_alphanum4", "asset_id": int64(-8205667356306085451), }, - ClosedAt: hardCodedLedgerClose, + ClosedAt: hardCodedLedgerClose, + OperationResultCode: "OperationResultCodeOpInner", + OperationTraceCode: "ClawbackResultCodeClawbackSuccess", }, OperationOutput{ Type: 20, @@ -1101,7 +1387,9 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { OperationDetails: map[string]interface{}{ "balance_id": "000000000102030405060708090000000000000000000000000000000000000000000000", }, - ClosedAt: hardCodedLedgerClose, + ClosedAt: hardCodedLedgerClose, + OperationResultCode: "OperationResultCodeOpInner", + OperationTraceCode: "ClawbackClaimableBalanceResultCodeClawbackClaimableBalanceSuccess", }, OperationOutput{ Type: 21, @@ -1120,7 +1408,9 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { "set_flags": []int32{4}, "set_flags_s": []string{"clawback_enabled"}, }, - ClosedAt: hardCodedLedgerClose, + ClosedAt: hardCodedLedgerClose, + OperationResultCode: "OperationResultCodeOpInner", + OperationTraceCode: "SetTrustLineFlagsResultCodeSetTrustLineFlagsSuccess", }, OperationOutput{ Type: 22, @@ -1152,7 +1442,9 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { }, "shares_received": 0.0000002, }, - ClosedAt: hardCodedLedgerClose, + ClosedAt: hardCodedLedgerClose, + OperationResultCode: "OperationResultCodeOpInner", + OperationTraceCode: "LiquidityPoolDepositResultCodeLiquidityPoolDepositSuccess", }, OperationOutput{ Type: 23, @@ -1174,7 +1466,9 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { "reserve_b_min_amount": 0.0000001, "shares": 0.0000004, }, - ClosedAt: hardCodedLedgerClose, + ClosedAt: hardCodedLedgerClose, + OperationResultCode: "OperationResultCodeOpInner", + OperationTraceCode: "LiquidityPoolWithdrawResultCodeLiquidityPoolWithdrawSuccess", }, //OperationOutput{ // Type: 24, diff --git a/internal/transform/schema.go b/internal/transform/schema.go index 8321855c..601ec5b3 100644 --- a/internal/transform/schema.go +++ b/internal/transform/schema.go @@ -64,6 +64,7 @@ type TransactionOutput struct { SorobanResourcesInstructions uint32 `json:"soroban_resources_instructions"` SorobanResourcesReadBytes uint32 `json:"soroban_resources_read_bytes"` SorobanResourcesWriteBytes uint32 `json:"soroban_resources_write_bytes"` + TransactionResultCode string `json:"transaction_result_code"` } type LedgerTransactionOutput struct { @@ -118,14 +119,16 @@ type AccountSignerOutput struct { // OperationOutput is a representation of an operation that aligns with the BigQuery table history_operations type OperationOutput struct { - SourceAccount string `json:"source_account"` - SourceAccountMuxed string `json:"source_account_muxed,omitempty"` - Type int32 `json:"type"` - TypeString string `json:"type_string"` - OperationDetails map[string]interface{} `json:"details"` //Details is a JSON object that varies based on operation type - TransactionID int64 `json:"transaction_id"` - OperationID int64 `json:"id"` - ClosedAt time.Time `json:"closed_at"` + SourceAccount string `json:"source_account"` + SourceAccountMuxed string `json:"source_account_muxed,omitempty"` + Type int32 `json:"type"` + TypeString string `json:"type_string"` + OperationDetails map[string]interface{} `json:"details"` //Details is a JSON object that varies based on operation type + TransactionID int64 `json:"transaction_id"` + OperationID int64 `json:"id"` + ClosedAt time.Time `json:"closed_at"` + OperationResultCode string `json:"operation_result_code"` + OperationTraceCode string `json:"operation_trace_code"` } // ClaimableBalanceOutput is a representation of a claimable balances that aligns with the BigQuery table claimable_balances diff --git a/internal/transform/transaction.go b/internal/transform/transaction.go index baad3770..a5e6d79f 100644 --- a/internal/transform/transaction.go +++ b/internal/transform/transaction.go @@ -155,6 +155,8 @@ func TransformTransaction(transaction ingest.LedgerTransaction, lhe xdr.LedgerHe return TransactionOutput{}, fmt.Errorf("for ledger %d; transaction %d (transaction id=%d): %v", outputLedgerSequence, transactionIndex, outputTransactionID, err) } + outputTxResultCode := transaction.Result.Result.Result.Code.String() + outputSuccessful := transaction.Result.Successful() transformedTransaction := TransactionOutput{ TransactionHash: outputTransactionHash, @@ -184,6 +186,7 @@ func TransformTransaction(transaction ingest.LedgerTransaction, lhe xdr.LedgerHe SorobanResourcesInstructions: outputSorobanResourcesInstructions, SorobanResourcesReadBytes: outputSorobanResourcesReadBytes, SorobanResourcesWriteBytes: outputSorobanResourcesWriteBytes, + TransactionResultCode: outputTxResultCode, } // Add Muxed Account Details, if exists diff --git a/internal/transform/transaction_test.go b/internal/transform/transaction_test.go index bf21e463..9ca8708b 100644 --- a/internal/transform/transaction_test.go +++ b/internal/transform/transaction_test.go @@ -105,6 +105,7 @@ func makeTransactionTestOutput() (output []TransactionOutput, err error) { SorobanResourcesInstructions: 0, SorobanResourcesReadBytes: 0, SorobanResourcesWriteBytes: 0, + TransactionResultCode: "TransactionResultCodeTxFailed", }, TransactionOutput{ TxEnvelope: "AAAABQAAAABnzACGTDuJFoxqr+C8NHCe0CHFBXLi+YhhNCIILCIpcgAAAAAAABwgAAAAAgAAAACI4aa0pXFSj6qfJuIObLw/5zyugLRGYwxb7wFSr3B9eAAAAAACFPY2AAAAfQAAAAEAAAAAAAAAAAAAAABfBqt0AAAAAQAAABdITDVhQ2dvelFISVc3c1NjNVhkY2ZtUgAAAAABAAAAAQAAAAAcR0GXGO76pFs4y38vJVAanjnLg4emNun7zAx0pHcDGAAAAAIAAAAAAAAAAAAAAAAAAAAAAQIDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA==", @@ -132,10 +133,11 @@ func makeTransactionTestOutput() (output []TransactionOutput, err error) { SorobanResourcesInstructions: 0, SorobanResourcesReadBytes: 0, SorobanResourcesWriteBytes: 0, + TransactionResultCode: "TransactionResultCodeTxFeeBumpInnerSuccess", //inner fee bump success }, TransactionOutput{ TxEnvelope: "AAAAAgAAAAAcR0GXGO76pFs4y38vJVAanjnLg4emNun7zAx0pHcDGAAAAGQBpLyvsiV6gwAAAAIAAAABAAAAAAAAAAAAAAAAXwardAAAAAEAAAAFAAAACgAAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAMCAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAABdITDVhQ2dvelFISVc3c1NjNVhkY2ZtUgAAAAABAAAAAQAAAABrWN1saJMLbQMdxbv64j76HsPwu1jCvI2TjUfB37O+cwAAAAIAAAAAAAAAAAAAAAAAAAAAAQIDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=", - TxResult: "AAAAAAAAAGT/////AAAAAQAAAAAAAAAAAAAAAAAAAAA=", + TxResult: "AAAAAAAAAGT////5AAAAAA==", TxMeta: "AAAAAQAAAAAAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACAAAAAwAAAAAAAAAFAQIDBAUGBwgJAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFVU1NEAAAAAGtY3WxokwttAx3Fu/riPvoew/C7WMK8jZONR8Hfs75zAAAAHgAAAAAAAYagAAAAAAAAA+gAAAAAAAAB9AAAAAAAAAAZAAAAAAAAAAEAAAAAAAAABQECAwQFBgcICQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABVVNTRAAAAABrWN1saJMLbQMdxbv64j76HsPwu1jCvI2TjUfB37O+cwAAAB4AAAAAAAGKiAAAAAAAAARMAAAAAAAAAfYAAAAAAAAAGgAAAAAAAAACAAAAAwAAAAAAAAAFAQIDBAUGBwgJAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFVU1NEAAAAAGtY3WxokwttAx3Fu/riPvoew/C7WMK8jZONR8Hfs75zAAAAHgAAAAAAAYagAAAAAAAAA+gAAAAAAAAB9AAAAAAAAAAZAAAAAAAAAAEAAAAAAAAABQECAwQFBgcICQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABVVNTRAAAAABrWN1saJMLbQMdxbv64j76HsPwu1jCvI2TjUfB37O+cwAAAB4AAAAAAAGKiAAAAAAAAARMAAAAAAAAAfYAAAAAAAAAGgAAAAAAAAAA", TxFeeMeta: "AAAAAA==", TransactionHash: "a87fef5eeb260269c380f2de456aad72b59bb315aaac777860456e09dac0bafb", @@ -160,6 +162,7 @@ func makeTransactionTestOutput() (output []TransactionOutput, err error) { SorobanResourcesInstructions: 0, SorobanResourcesReadBytes: 0, SorobanResourcesWriteBytes: 0, + TransactionResultCode: "TransactionResultCodeTxInsufficientBalance", }, } return @@ -360,7 +363,7 @@ func makeTransactionTestInput() (transaction []ingest.LedgerTransaction, history Result: xdr.TransactionResult{ FeeCharged: 100, Result: xdr.TransactionResultResult{ - Code: xdr.TransactionResultCodeTxFailed, + Code: xdr.TransactionResultCodeTxInsufficientBalance, Results: genericResultResults, }, }, From 190606ac426e013f9283b4f12b53efd137edaad3 Mon Sep 17 00:00:00 2001 From: sydneynotthecity Date: Tue, 5 Mar 2024 00:12:21 -0600 Subject: [PATCH 02/49] Remove todo --- internal/transform/operation.go | 1 - 1 file changed, 1 deletion(-) diff --git a/internal/transform/operation.go b/internal/transform/operation.go index cb22c942..5fe3e8af 100644 --- a/internal/transform/operation.go +++ b/internal/transform/operation.go @@ -71,7 +71,6 @@ func TransformOperation(operation xdr.Operation, operationIndex int32, transacti return OperationOutput{}, err } outputOperationResultCode := outputOperationResults[operationIndex].Code.String() - // TODO: add trace code details outputOperationTraceCode, err := mapOperationTrace(*outputOperationResults[operationIndex].Tr) if err != nil { return OperationOutput{}, err From 6fb76872a24b96f9f6bc49c89cf899910fcb7383 Mon Sep 17 00:00:00 2001 From: Simon Chow Date: Wed, 6 Mar 2024 11:02:44 -0500 Subject: [PATCH 03/49] Enable diagnostic events --- docker/stellar-core_testnet.cfg | 2 ++ internal/transform/diagnostic_events.go | 20 +++++--------------- 2 files changed, 7 insertions(+), 15 deletions(-) diff --git a/docker/stellar-core_testnet.cfg b/docker/stellar-core_testnet.cfg index e021da5e..94502b2c 100644 --- a/docker/stellar-core_testnet.cfg +++ b/docker/stellar-core_testnet.cfg @@ -5,6 +5,8 @@ NETWORK_PASSPHRASE="Test SDF Network ; September 2015" # DATABASE="sqlite3://stellar.db" +ENABLE_SOROBAN_DIAGNOSTIC_EVENTS=true + # Stellar Testnet validators [[HOME_DOMAINS]] HOME_DOMAIN="testnet.stellar.org" diff --git a/internal/transform/diagnostic_events.go b/internal/transform/diagnostic_events.go index 46161c07..b7ade6b3 100644 --- a/internal/transform/diagnostic_events.go +++ b/internal/transform/diagnostic_events.go @@ -26,18 +26,14 @@ func TransformDiagnosticEvent(transaction ingest.LedgerTransaction, lhe xdr.Ledg return []DiagnosticEventOutput{}, fmt.Errorf("for ledger %d; transaction %d (transaction id=%d): %v", outputLedgerSequence, transactionIndex, outputTransactionID, err), false } - transactionMeta, ok := transaction.UnsafeMeta.GetV3() - if !ok { - return []DiagnosticEventOutput{}, nil, false - } - - if transactionMeta.SorobanMeta == nil { + diagnosticEvents, err := transaction.GetDiagnosticEvents() + if err != nil { return []DiagnosticEventOutput{}, nil, false } var transformedDiagnosticEvents []DiagnosticEventOutput - for _, diagnoticEvent := range transactionMeta.SorobanMeta.DiagnosticEvents { + for _, diagnoticEvent := range diagnosticEvents { var outputContractId string outputInSuccessfulContractCall := diagnoticEvent.InSuccessfulContractCall @@ -45,15 +41,9 @@ func TransformDiagnosticEvent(transaction ingest.LedgerTransaction, lhe xdr.Ledg outputExtV := event.Ext.V outputType := event.Type.String() outputBodyV := event.Body.V - body, ok := event.Body.GetV0() - if !ok { - continue - } + body, _ := event.Body.GetV0() - outputBody, err := xdr.MarshalBase64(body) - if err != nil { - continue - } + outputBody, _ := xdr.MarshalBase64(body) if event.ContractId != nil { contractId := *event.ContractId From 168a61683a1621a3926f9223c64c751022c91ce4 Mon Sep 17 00:00:00 2001 From: Simon Chow Date: Wed, 6 Mar 2024 11:03:49 -0500 Subject: [PATCH 04/49] Add flags to other core cfg files --- docker/stellar-core.cfg | 2 ++ docker/stellar-core_futurenet.cfg | 2 ++ 2 files changed, 4 insertions(+) diff --git a/docker/stellar-core.cfg b/docker/stellar-core.cfg index 449e0806..753d16c7 100644 --- a/docker/stellar-core.cfg +++ b/docker/stellar-core.cfg @@ -2,6 +2,8 @@ # see https://developers.stellar.org/docs/run-core-node/ # for how to properly configure your environment +ENABLE_SOROBAN_DIAGNOSTIC_EVENTS=true + #FAILURE_SAFETY is minimum number of nodes that are allowed to fail before you no longer have quorum FAILURE_SAFETY=1 diff --git a/docker/stellar-core_futurenet.cfg b/docker/stellar-core_futurenet.cfg index ecd4873a..8ab94fb0 100644 --- a/docker/stellar-core_futurenet.cfg +++ b/docker/stellar-core_futurenet.cfg @@ -6,6 +6,8 @@ PUBLIC_HTTP_PORT=false NETWORK_PASSPHRASE="Test SDF Future Network ; October 2022" +ENABLE_SOROBAN_DIAGNOSTIC_EVENTS=true + # DATABASE="sqlite3://stellar.db" PEER_PORT=11725 From aa830edffd54d1ad9194490d492beaa839eff352 Mon Sep 17 00:00:00 2001 From: Simon Chow Date: Wed, 6 Mar 2024 22:11:16 -0500 Subject: [PATCH 05/49] Add case for null operationResultTr for operation_trace_code --- internal/transform/operation.go | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/internal/transform/operation.go b/internal/transform/operation.go index 5fe3e8af..5a6d1463 100644 --- a/internal/transform/operation.go +++ b/internal/transform/operation.go @@ -70,10 +70,15 @@ func TransformOperation(operation xdr.Operation, operationIndex int32, transacti if !ok { return OperationOutput{}, err } + outputOperationResultCode := outputOperationResults[operationIndex].Code.String() - outputOperationTraceCode, err := mapOperationTrace(*outputOperationResults[operationIndex].Tr) - if err != nil { - return OperationOutput{}, err + var outputOperationTraceCode string + operationResultTr, ok := outputOperationResults[operationIndex].GetTr() + if ok { + outputOperationTraceCode, err = mapOperationTrace(operationResultTr) + if err != nil { + return OperationOutput{}, err + } } transformedOperation := OperationOutput{ From 213cb28a5564ad5b0d02f48d2aae09ffef3b043b Mon Sep 17 00:00:00 2001 From: Simon Chow Date: Mon, 11 Mar 2024 15:02:54 -0400 Subject: [PATCH 06/49] Update soroban fees --- internal/transform/schema.go | 3 ++ internal/transform/transaction.go | 50 +++++++++++++++++++++++++++++++ 2 files changed, 53 insertions(+) diff --git a/internal/transform/schema.go b/internal/transform/schema.go index 601ec5b3..f3f64aee 100644 --- a/internal/transform/schema.go +++ b/internal/transform/schema.go @@ -65,6 +65,9 @@ type TransactionOutput struct { SorobanResourcesReadBytes uint32 `json:"soroban_resources_read_bytes"` SorobanResourcesWriteBytes uint32 `json:"soroban_resources_write_bytes"` TransactionResultCode string `json:"transaction_result_code"` + InclusionFeeBid int64 `json:"inclusion_fee_bid"` + InclusionFeeCharged int64 `json:"inclusion_fee_charged"` + ResourceFeeRefund int64 `json:"resource_fee_refund"` } type LedgerTransactionOutput struct { diff --git a/internal/transform/transaction.go b/internal/transform/transaction.go index a5e6d79f..0191ff1f 100644 --- a/internal/transform/transaction.go +++ b/internal/transform/transaction.go @@ -138,6 +138,9 @@ func TransformTransaction(transaction ingest.LedgerTransaction, lhe xdr.LedgerHe var outputSorobanResourcesInstructions uint32 var outputSorobanResourcesReadBytes uint32 var outputSorobanResourcesWriteBytes uint32 + var outputInclusionFeeBid int64 + var outputInclusionFeeCharged int64 + var outputResourceFeeRefund int64 transactionEnvelopeV1, ok := transaction.Envelope.GetV1() if ok { @@ -147,6 +150,20 @@ func TransformTransaction(transaction ingest.LedgerTransaction, lhe xdr.LedgerHe outputSorobanResourcesInstructions = uint32(sorobanData.Resources.Instructions) outputSorobanResourcesReadBytes = uint32(sorobanData.Resources.ReadBytes) outputSorobanResourcesWriteBytes = uint32(sorobanData.Resources.WriteBytes) + outputInclusionFeeBid = int64(transactionEnvelopeV1.Tx.Fee) - outputResourceFee + + accountBalanceStart, accountBalanceEnd := getAccountBalanceFromLedgerEntryChanges(transaction.FeeChanges, sourceAccount.Address()) + initialFeeCharged := accountBalanceStart - accountBalanceEnd + outputInclusionFeeCharged = initialFeeCharged - outputResourceFee + + meta, ok := transaction.UnsafeMeta.GetV3() + if ok { + accountBalanceStart, accountBalanceEnd := getAccountBalanceFromLedgerEntryChanges(meta.TxChangesAfter, sourceAccount.Address()) + outputResourceFeeRefund = accountBalanceEnd - accountBalanceStart + } + + // TODO: FeeCharged is calculated incorrectly in protocol 20. Remove when protocol is updated and the bug is fixed + outputFeeCharged = outputFeeCharged - outputResourceFeeRefund } } @@ -187,6 +204,9 @@ func TransformTransaction(transaction ingest.LedgerTransaction, lhe xdr.LedgerHe SorobanResourcesReadBytes: outputSorobanResourcesReadBytes, SorobanResourcesWriteBytes: outputSorobanResourcesWriteBytes, TransactionResultCode: outputTxResultCode, + InclusionFeeBid: outputInclusionFeeBid, + InclusionFeeCharged: outputInclusionFeeCharged, + ResourceFeeRefund: outputResourceFeeRefund, } // Add Muxed Account Details, if exists @@ -216,6 +236,36 @@ func TransformTransaction(transaction ingest.LedgerTransaction, lhe xdr.LedgerHe return transformedTransaction, nil } +func getAccountBalanceFromLedgerEntryChanges(changes xdr.LedgerEntryChanges, sourceAccountAddress string) (int64, int64) { + var accountBalanceStart int64 + var accountBalanceEnd int64 + + for _, change := range changes { + switch change.Type { + case xdr.LedgerEntryChangeTypeLedgerEntryUpdated: + accountEntry, ok := change.Updated.Data.GetAccount() + if !ok { + continue + } + + if accountEntry.AccountId.Address() == sourceAccountAddress { + accountBalanceEnd = int64(accountEntry.Balance) + } + case xdr.LedgerEntryChangeTypeLedgerEntryState: + accountEntry, ok := change.State.Data.GetAccount() + if !ok { + continue + } + + if accountEntry.AccountId.Address() == sourceAccountAddress { + accountBalanceStart = int64(accountEntry.Balance) + } + } + } + + return accountBalanceStart, accountBalanceEnd +} + func formatSigners(s []xdr.SignerKey) pq.StringArray { if s == nil { return nil From 03b814a6d4b20c0df40e3fbf21fc9d8392ff7de2 Mon Sep 17 00:00:00 2001 From: Simon Chow Date: Mon, 11 Mar 2024 20:16:27 -0400 Subject: [PATCH 07/49] Handle fee bump --- internal/transform/transaction.go | 48 +++++++++++++++++-------------- 1 file changed, 27 insertions(+), 21 deletions(-) diff --git a/internal/transform/transaction.go b/internal/transform/transaction.go index 0191ff1f..60a1a2b2 100644 --- a/internal/transform/transaction.go +++ b/internal/transform/transaction.go @@ -134,6 +134,8 @@ func TransformTransaction(transaction ingest.LedgerTransaction, lhe xdr.LedgerHe // Note: MaxFee and FeeCharged is the sum of base transaction fees + Soroban fees // Breakdown of Soroban fees can be calculated by the config_setting resource pricing * the resources used + var sorobanData xdr.SorobanTransactionData + var hasSorobanData bool var outputResourceFee int64 var outputSorobanResourcesInstructions uint32 var outputSorobanResourcesReadBytes uint32 @@ -142,29 +144,33 @@ func TransformTransaction(transaction ingest.LedgerTransaction, lhe xdr.LedgerHe var outputInclusionFeeCharged int64 var outputResourceFeeRefund int64 - transactionEnvelopeV1, ok := transaction.Envelope.GetV1() - if ok { - sorobanData, ok := transactionEnvelopeV1.Tx.Ext.GetSorobanData() - if ok { - outputResourceFee = int64(sorobanData.ResourceFee) - outputSorobanResourcesInstructions = uint32(sorobanData.Resources.Instructions) - outputSorobanResourcesReadBytes = uint32(sorobanData.Resources.ReadBytes) - outputSorobanResourcesWriteBytes = uint32(sorobanData.Resources.WriteBytes) - outputInclusionFeeBid = int64(transactionEnvelopeV1.Tx.Fee) - outputResourceFee - - accountBalanceStart, accountBalanceEnd := getAccountBalanceFromLedgerEntryChanges(transaction.FeeChanges, sourceAccount.Address()) - initialFeeCharged := accountBalanceStart - accountBalanceEnd - outputInclusionFeeCharged = initialFeeCharged - outputResourceFee - - meta, ok := transaction.UnsafeMeta.GetV3() - if ok { - accountBalanceStart, accountBalanceEnd := getAccountBalanceFromLedgerEntryChanges(meta.TxChangesAfter, sourceAccount.Address()) - outputResourceFeeRefund = accountBalanceEnd - accountBalanceStart - } + // Soroban data can exist in V1 and FeeBump transactionEnvelopes + switch transaction.Envelope.Type { + case xdr.EnvelopeTypeEnvelopeTypeTx: + sorobanData, hasSorobanData = transaction.Envelope.V1.Tx.Ext.GetSorobanData() + case xdr.EnvelopeTypeEnvelopeTypeTxFeeBump: + sorobanData, hasSorobanData = transaction.Envelope.FeeBump.Tx.InnerTx.V1.Tx.Ext.GetSorobanData() + } + + if hasSorobanData { + outputResourceFee = int64(sorobanData.ResourceFee) + outputSorobanResourcesInstructions = uint32(sorobanData.Resources.Instructions) + outputSorobanResourcesReadBytes = uint32(sorobanData.Resources.ReadBytes) + outputSorobanResourcesWriteBytes = uint32(sorobanData.Resources.WriteBytes) + outputInclusionFeeBid = int64(transaction.Envelope.Fee()) - outputResourceFee - // TODO: FeeCharged is calculated incorrectly in protocol 20. Remove when protocol is updated and the bug is fixed - outputFeeCharged = outputFeeCharged - outputResourceFeeRefund + accountBalanceStart, accountBalanceEnd := getAccountBalanceFromLedgerEntryChanges(transaction.FeeChanges, sourceAccount.Address()) + initialFeeCharged := accountBalanceStart - accountBalanceEnd + outputInclusionFeeCharged = initialFeeCharged - outputResourceFee + + meta, ok := transaction.UnsafeMeta.GetV3() + if ok { + accountBalanceStart, accountBalanceEnd := getAccountBalanceFromLedgerEntryChanges(meta.TxChangesAfter, sourceAccount.Address()) + outputResourceFeeRefund = accountBalanceEnd - accountBalanceStart } + + // TODO: FeeCharged is calculated incorrectly in protocol 20. Remove when protocol is updated and the bug is fixed + outputFeeCharged = outputFeeCharged - outputResourceFeeRefund } outputCloseTime, err := utils.TimePointToUTCTimeStamp(ledgerHeader.ScpValue.CloseTime) From 6408d337fa8d79fc460df22e2868192cf104d024 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 13 Mar 2024 23:30:54 +0000 Subject: [PATCH 08/49] Bump google.golang.org/protobuf from 1.32.0 to 1.33.0 Bumps google.golang.org/protobuf from 1.32.0 to 1.33.0. --- updated-dependencies: - dependency-name: google.golang.org/protobuf dependency-type: indirect ... Signed-off-by: dependabot[bot] --- go.mod | 2 +- go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index 8c390d07..aa4d197e 100644 --- a/go.mod +++ b/go.mod @@ -85,7 +85,7 @@ require ( google.golang.org/genproto/googleapis/api v0.0.0-20231211222908-989df2bf70f3 // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20240102182953-50ed04b92917 // indirect google.golang.org/grpc v1.60.1 // indirect - google.golang.org/protobuf v1.32.0 // indirect + google.golang.org/protobuf v1.33.0 // indirect gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/go.sum b/go.sum index b2038cdc..9817f544 100644 --- a/go.sum +++ b/go.sum @@ -649,8 +649,8 @@ google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGj google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7I= -google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= +google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= +google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= From d59fe3cba10b45ccee6b6132602cc1eea5de27b3 Mon Sep 17 00:00:00 2001 From: Laysa Bitencourt Date: Tue, 19 Mar 2024 11:44:19 -0300 Subject: [PATCH 09/49] Add files via upload --- .github/workflows/release-drafter.yml | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 .github/workflows/release-drafter.yml diff --git a/.github/workflows/release-drafter.yml b/.github/workflows/release-drafter.yml new file mode 100644 index 00000000..f627cd0e --- /dev/null +++ b/.github/workflows/release-drafter.yml @@ -0,0 +1,5 @@ +template: | + ## What's Changed + $CHANGES + + **Full Changelog**: https://github.com/$OWNER/$REPOSITORY/compare/$PREVIOUS_TAG...v$RESOLVED_VERSION \ No newline at end of file From 9371f866be7b73d2234c9774a638dc6080445873 Mon Sep 17 00:00:00 2001 From: Laysa Bitencourt Date: Tue, 19 Mar 2024 11:44:53 -0300 Subject: [PATCH 10/49] Delete .github/workflows/release-drafter.yml --- .github/workflows/release-drafter.yml | 5 ----- 1 file changed, 5 deletions(-) delete mode 100644 .github/workflows/release-drafter.yml diff --git a/.github/workflows/release-drafter.yml b/.github/workflows/release-drafter.yml deleted file mode 100644 index f627cd0e..00000000 --- a/.github/workflows/release-drafter.yml +++ /dev/null @@ -1,5 +0,0 @@ -template: | - ## What's Changed - $CHANGES - - **Full Changelog**: https://github.com/$OWNER/$REPOSITORY/compare/$PREVIOUS_TAG...v$RESOLVED_VERSION \ No newline at end of file From b7b6318437aff9564cfc9df9c812382b1abb67e6 Mon Sep 17 00:00:00 2001 From: Laysa Bitencourt Date: Tue, 19 Mar 2024 11:45:09 -0300 Subject: [PATCH 11/49] Add files via upload --- .github/release-drafter.yml | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 .github/release-drafter.yml diff --git a/.github/release-drafter.yml b/.github/release-drafter.yml new file mode 100644 index 00000000..f627cd0e --- /dev/null +++ b/.github/release-drafter.yml @@ -0,0 +1,5 @@ +template: | + ## What's Changed + $CHANGES + + **Full Changelog**: https://github.com/$OWNER/$REPOSITORY/compare/$PREVIOUS_TAG...v$RESOLVED_VERSION \ No newline at end of file From 95389f91e52cbf43f1b80c0ed892f2ab1b1a7fd8 Mon Sep 17 00:00:00 2001 From: Laysa Bitencourt Date: Tue, 19 Mar 2024 11:45:56 -0300 Subject: [PATCH 12/49] Add files via upload --- .github/workflows/release.yml | 37 +++++++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) create mode 100644 .github/workflows/release.yml diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 00000000..15581c5d --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,37 @@ +name: Release Drafter and Publisher + +on: + push: + tags: + - v* + +permissions: + contents: read + +jobs: + new_release: + permissions: + # write permission is required to create a github release + contents: write + # write permission is required for autolabeler + # otherwise, read permission is required at least + pull-requests: write + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v2 + + # ${{ github.ref }} was not giving v* as tag name, but refs/tags/v* instead, so I had to abbreviate it + - name: Get latest abbreviated tag + id: gettag + run: echo ::set-output name=TAG::$(git describe --tags --abbrev=7) + + - uses: release-drafter/release-drafter@v5 + with: + commitish: master + name: "stellar-dbt ${{ steps.gettag.outputs.TAG }}" + tag: ${{ github.ref }} + publish: true + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file From 63acee845ece4ac8c9c66af37c9c84f218ec0b1a Mon Sep 17 00:00:00 2001 From: Laysa Bitencourt Date: Wed, 20 Mar 2024 13:27:38 -0300 Subject: [PATCH 13/49] Update release.yml --- .github/workflows/release.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 15581c5d..e84156a5 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -30,8 +30,8 @@ jobs: - uses: release-drafter/release-drafter@v5 with: commitish: master - name: "stellar-dbt ${{ steps.gettag.outputs.TAG }}" + name: "stellar-etl ${{ steps.gettag.outputs.TAG }}" tag: ${{ github.ref }} publish: true env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} From c2e5a1cf15944a12ca4ed01824a5467079b73bca Mon Sep 17 00:00:00 2001 From: Simon Chow Date: Thu, 21 Mar 2024 16:53:28 -0400 Subject: [PATCH 14/49] Fix fee charged calculation --- internal/transform/transaction.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/transform/transaction.go b/internal/transform/transaction.go index 60a1a2b2..9d105bf4 100644 --- a/internal/transform/transaction.go +++ b/internal/transform/transaction.go @@ -170,7 +170,7 @@ func TransformTransaction(transaction ingest.LedgerTransaction, lhe xdr.LedgerHe } // TODO: FeeCharged is calculated incorrectly in protocol 20. Remove when protocol is updated and the bug is fixed - outputFeeCharged = outputFeeCharged - outputResourceFeeRefund + outputFeeCharged = outputResourceFee - outputResourceFeeRefund + outputInclusionFeeCharged } outputCloseTime, err := utils.TimePointToUTCTimeStamp(ledgerHeader.ScpValue.CloseTime) From 585db84c5f87232104951058e5f8794098abbc6c Mon Sep 17 00:00:00 2001 From: Simon Chow Date: Sun, 24 Mar 2024 15:12:11 -0400 Subject: [PATCH 15/49] adding ledgerbackend datastore txmeta as a data source --- cmd/export_account_signers.go | 4 +- cmd/export_accounts.go | 4 +- cmd/export_all_history.go | 6 +- cmd/export_assets.go | 12 ++- cmd/export_claimable_balances.go | 4 +- cmd/export_config_setting.go | 6 +- cmd/export_contract_code.go | 6 +- cmd/export_contract_data.go | 6 +- cmd/export_diagnostic_events.go | 6 +- cmd/export_effects.go | 6 +- cmd/export_ledger_entry_changes.go | 29 +++-- cmd/export_ledger_transaction.go | 6 +- cmd/export_ledgers.go | 13 ++- cmd/export_liquidity_pools.go | 4 +- cmd/export_offers.go | 4 +- cmd/export_operations.go | 6 +- cmd/export_orderbooks.go | 4 +- cmd/export_trades.go | 6 +- cmd/export_transactions.go | 6 +- cmd/export_trustlines.go | 4 +- cmd/export_ttl.go | 6 +- docker/Dockerfile | 4 +- go.mod | 44 +++++--- go.sum | 125 ++++++++++++++++------ internal/input/all_history.go | 5 +- internal/input/assets.go | 15 +-- internal/input/assets_history_archive.go | 51 +++++++++ internal/input/changes.go | 15 +-- internal/input/changes_test.go | 2 +- internal/input/ledger_range.go | 2 +- internal/input/ledgers.go | 54 +++++++++- internal/input/ledgers_history_archive.go | 34 ++++++ internal/input/operations.go | 14 ++- internal/input/trades.go | 17 +-- internal/input/transactions.go | 7 +- internal/utils/main.go | 54 +++++++++- 36 files changed, 423 insertions(+), 168 deletions(-) create mode 100644 internal/input/assets_history_archive.go create mode 100644 internal/input/ledgers_history_archive.go diff --git a/cmd/export_account_signers.go b/cmd/export_account_signers.go index 73d01517..3c32cbc4 100644 --- a/cmd/export_account_signers.go +++ b/cmd/export_account_signers.go @@ -22,9 +22,9 @@ should be used in an initial data dump. In order to get account information with the export_ledger_entry_changes command.`, Run: func(cmd *cobra.Command, args []string) { cmdLogger.SetLevel(logrus.InfoLevel) - endNum, strictExport, isTest, isFuture, extra := utils.MustCommonFlags(cmd.Flags(), cmdLogger) + endNum, strictExport, isTest, isFuture, extra, _, datastoreUrl := utils.MustCommonFlags(cmd.Flags(), cmdLogger) cmdLogger.StrictExport = strictExport - env := utils.GetEnvironmentDetails(isTest, isFuture) + env := utils.GetEnvironmentDetails(isTest, isFuture, datastoreUrl) path := utils.MustBucketFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) diff --git a/cmd/export_accounts.go b/cmd/export_accounts.go index 9c6d7f76..85d9bcc1 100644 --- a/cmd/export_accounts.go +++ b/cmd/export_accounts.go @@ -22,9 +22,9 @@ should be used in an initial data dump. In order to get account information with the export_ledger_entry_changes command.`, Run: func(cmd *cobra.Command, args []string) { cmdLogger.SetLevel(logrus.InfoLevel) - endNum, strictExport, isTest, isFuture, extra := utils.MustCommonFlags(cmd.Flags(), cmdLogger) + endNum, strictExport, isTest, isFuture, extra, _, datastoreUrl := utils.MustCommonFlags(cmd.Flags(), cmdLogger) cmdLogger.StrictExport = strictExport - env := utils.GetEnvironmentDetails(isTest, isFuture) + env := utils.GetEnvironmentDetails(isTest, isFuture, datastoreUrl) path := utils.MustBucketFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) diff --git a/cmd/export_all_history.go b/cmd/export_all_history.go index 2d4f12f7..d2d86299 100644 --- a/cmd/export_all_history.go +++ b/cmd/export_all_history.go @@ -20,13 +20,13 @@ This is a temporary command used to reduce the amount of requests to history arc in order to mitigate egress costs for the entity hosting history archives.`, Run: func(cmd *cobra.Command, args []string) { cmdLogger.SetLevel(logrus.InfoLevel) - endNum, strictExport, isTest, isFuture, extra := utils.MustCommonFlags(cmd.Flags(), cmdLogger) + endNum, strictExport, isTest, isFuture, extra, useCaptiveCore, datastoreUrl := utils.MustCommonFlags(cmd.Flags(), cmdLogger) cmdLogger.StrictExport = strictExport startNum, path, limit := utils.MustArchiveFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - env := utils.GetEnvironmentDetails(isTest, isFuture) + env := utils.GetEnvironmentDetails(isTest, isFuture, datastoreUrl) - allHistory, err := input.GetAllHistory(startNum, endNum, limit, env) + allHistory, err := input.GetAllHistory(startNum, endNum, limit, env, useCaptiveCore) if err != nil { cmdLogger.Fatal("could not read all history: ", err) } diff --git a/cmd/export_assets.go b/cmd/export_assets.go index a20fc2da..3c1b3773 100644 --- a/cmd/export_assets.go +++ b/cmd/export_assets.go @@ -16,14 +16,22 @@ var assetsCmd = &cobra.Command{ Long: `Exports the assets that are created from payment operations over a specified ledger range`, Run: func(cmd *cobra.Command, args []string) { cmdLogger.SetLevel(logrus.InfoLevel) - endNum, strictExport, isTest, isFuture, extra := utils.MustCommonFlags(cmd.Flags(), cmdLogger) + endNum, strictExport, isTest, isFuture, extra, useCaptiveCore, datastoreUrl := utils.MustCommonFlags(cmd.Flags(), cmdLogger) cmdLogger.StrictExport = strictExport startNum, path, limit := utils.MustArchiveFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) + env := utils.GetEnvironmentDetails(isTest, isFuture, datastoreUrl) outFile := mustOutFile(path) - paymentOps, err := input.GetPaymentOperations(startNum, endNum, limit, isTest, isFuture) + var paymentOps []input.AssetTransformInput + var err error + + if useCaptiveCore { + paymentOps, err = input.GetPaymentOperationsHistoryArchive(startNum, endNum, limit, isTest, isFuture) + } else { + paymentOps, err = input.GetPaymentOperations(startNum, endNum, limit, env, useCaptiveCore) + } if err != nil { cmdLogger.Fatal("could not read asset: ", err) } diff --git a/cmd/export_claimable_balances.go b/cmd/export_claimable_balances.go index 89af046c..f552c913 100644 --- a/cmd/export_claimable_balances.go +++ b/cmd/export_claimable_balances.go @@ -22,9 +22,9 @@ var claimableBalancesCmd = &cobra.Command{ the export_ledger_entry_changes command.`, Run: func(cmd *cobra.Command, args []string) { cmdLogger.SetLevel(logrus.InfoLevel) - endNum, strictExport, isTest, isFuture, extra := utils.MustCommonFlags(cmd.Flags(), cmdLogger) + endNum, strictExport, isTest, isFuture, extra, _, datastoreUrl := utils.MustCommonFlags(cmd.Flags(), cmdLogger) cmdLogger.StrictExport = strictExport - env := utils.GetEnvironmentDetails(isTest, isFuture) + env := utils.GetEnvironmentDetails(isTest, isFuture, datastoreUrl) path := utils.MustBucketFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) diff --git a/cmd/export_config_setting.go b/cmd/export_config_setting.go index 4de5d087..892d96eb 100644 --- a/cmd/export_config_setting.go +++ b/cmd/export_config_setting.go @@ -16,15 +16,15 @@ import ( var configSettingCmd = &cobra.Command{ Use: "export_config_setting", Short: "Exports the config setting information.", - Long: `Exports historical config settings data from the genesis ledger to the provided end-ledger to an output file. + Long: `Exports historical config settings data from the genesis ledger to the provided end-ledger to an output file. The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it should be used in an initial data dump. In order to get offer information within a specified ledger range, see the export_ledger_entry_changes command.`, Run: func(cmd *cobra.Command, args []string) { cmdLogger.SetLevel(logrus.InfoLevel) - endNum, strictExport, isTest, isFuture, extra := utils.MustCommonFlags(cmd.Flags(), cmdLogger) + endNum, strictExport, isTest, isFuture, extra, _, datastoreUrl := utils.MustCommonFlags(cmd.Flags(), cmdLogger) cmdLogger.StrictExport = strictExport - env := utils.GetEnvironmentDetails(isTest, isFuture) + env := utils.GetEnvironmentDetails(isTest, isFuture, datastoreUrl) path := utils.MustBucketFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) diff --git a/cmd/export_contract_code.go b/cmd/export_contract_code.go index 07e2ffee..4ff1d037 100644 --- a/cmd/export_contract_code.go +++ b/cmd/export_contract_code.go @@ -16,15 +16,15 @@ import ( var codeCmd = &cobra.Command{ Use: "export_contract_code", Short: "Exports the contract code information.", - Long: `Exports historical contract code data from the genesis ledger to the provided end-ledger to an output file. + Long: `Exports historical contract code data from the genesis ledger to the provided end-ledger to an output file. The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it should be used in an initial data dump. In order to get offer information within a specified ledger range, see the export_ledger_entry_changes command.`, Run: func(cmd *cobra.Command, args []string) { cmdLogger.SetLevel(logrus.InfoLevel) - endNum, strictExport, isTest, isFuture, extra := utils.MustCommonFlags(cmd.Flags(), cmdLogger) + endNum, strictExport, isTest, isFuture, extra, _, datastoreUrl := utils.MustCommonFlags(cmd.Flags(), cmdLogger) cmdLogger.StrictExport = strictExport - env := utils.GetEnvironmentDetails(isTest, isFuture) + env := utils.GetEnvironmentDetails(isTest, isFuture, datastoreUrl) path := utils.MustBucketFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) diff --git a/cmd/export_contract_data.go b/cmd/export_contract_data.go index fcd759d9..46a427fb 100644 --- a/cmd/export_contract_data.go +++ b/cmd/export_contract_data.go @@ -16,15 +16,15 @@ import ( var dataCmd = &cobra.Command{ Use: "export_contract_data", Short: "Exports the contract data information made from the genesis ledger to a specified endpoint.", - Long: `Exports historical contract data from the genesis ledger to the provided end-ledger to an output file. + Long: `Exports historical contract data from the genesis ledger to the provided end-ledger to an output file. The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it should be used in an initial data dump. In order to get offer information within a specified ledger range, see the export_ledger_entry_changes command.`, Run: func(cmd *cobra.Command, args []string) { cmdLogger.SetLevel(logrus.InfoLevel) - endNum, strictExport, isTest, isFuture, extra := utils.MustCommonFlags(cmd.Flags(), cmdLogger) + endNum, strictExport, isTest, isFuture, extra, _, datastoreUrl := utils.MustCommonFlags(cmd.Flags(), cmdLogger) cmdLogger.StrictExport = strictExport - env := utils.GetEnvironmentDetails(isTest, isFuture) + env := utils.GetEnvironmentDetails(isTest, isFuture, datastoreUrl) path := utils.MustBucketFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) diff --git a/cmd/export_diagnostic_events.go b/cmd/export_diagnostic_events.go index 17655bd9..1a888ff9 100644 --- a/cmd/export_diagnostic_events.go +++ b/cmd/export_diagnostic_events.go @@ -16,13 +16,13 @@ var diagnosticEventsCmd = &cobra.Command{ Long: `Exports the diagnostic events over a specified range to an output file.`, Run: func(cmd *cobra.Command, args []string) { cmdLogger.SetLevel(logrus.InfoLevel) - endNum, strictExport, isTest, isFuture, extra := utils.MustCommonFlags(cmd.Flags(), cmdLogger) + endNum, strictExport, isTest, isFuture, extra, useCaptiveCore, datastoreUrl := utils.MustCommonFlags(cmd.Flags(), cmdLogger) cmdLogger.StrictExport = strictExport startNum, path, limit := utils.MustArchiveFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - env := utils.GetEnvironmentDetails(isTest, isFuture) + env := utils.GetEnvironmentDetails(isTest, isFuture, datastoreUrl) - transactions, err := input.GetTransactions(startNum, endNum, limit, env) + transactions, err := input.GetTransactions(startNum, endNum, limit, env, useCaptiveCore) if err != nil { cmdLogger.Fatal("could not read transactions: ", err) } diff --git a/cmd/export_effects.go b/cmd/export_effects.go index 3fff5009..3ed1df75 100644 --- a/cmd/export_effects.go +++ b/cmd/export_effects.go @@ -14,13 +14,13 @@ var effectsCmd = &cobra.Command{ Long: "Exports the effects data over a specified range to an output file.", Run: func(cmd *cobra.Command, args []string) { cmdLogger.SetLevel(logrus.InfoLevel) - endNum, strictExport, isTest, isFuture, extra := utils.MustCommonFlags(cmd.Flags(), cmdLogger) + endNum, strictExport, isTest, isFuture, extra, useCaptiveCore, datastoreUrl := utils.MustCommonFlags(cmd.Flags(), cmdLogger) cmdLogger.StrictExport = strictExport startNum, path, limit := utils.MustArchiveFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - env := utils.GetEnvironmentDetails(isTest, isFuture) + env := utils.GetEnvironmentDetails(isTest, isFuture, datastoreUrl) - transactions, err := input.GetTransactions(startNum, endNum, limit, env) + transactions, err := input.GetTransactions(startNum, endNum, limit, env, useCaptiveCore) if err != nil { cmdLogger.Fatalf("could not read transactions in [%d, %d] (limit=%d): %v", startNum, endNum, limit, err) } diff --git a/cmd/export_ledger_entry_changes.go b/cmd/export_ledger_entry_changes.go index 21489aff..bd37e29a 100644 --- a/cmd/export_ledger_entry_changes.go +++ b/cmd/export_ledger_entry_changes.go @@ -1,12 +1,14 @@ package cmd import ( + "context" "fmt" "math" "os" "path/filepath" "github.com/spf13/cobra" + "github.com/stellar/go/ingest/ledgerbackend" "github.com/stellar/go/xdr" "github.com/stellar/stellar-etl/internal/input" "github.com/stellar/stellar-etl/internal/transform" @@ -26,14 +28,16 @@ confirmed by the Stellar network. If no data type flags are set, then by default all of them are exported. If any are set, it is assumed that the others should not be exported.`, Run: func(cmd *cobra.Command, args []string) { - endNum, strictExport, isTest, isFuture, extra := utils.MustCommonFlags(cmd.Flags(), cmdLogger) + endNum, strictExport, isTest, isFuture, extra, useCaptiveCore, datastoreUrl := utils.MustCommonFlags(cmd.Flags(), cmdLogger) cmdLogger.StrictExport = strictExport - env := utils.GetEnvironmentDetails(isTest, isFuture) + env := utils.GetEnvironmentDetails(isTest, isFuture, datastoreUrl) - execPath, configPath, startNum, batchSize, outputFolder := utils.MustCoreFlags(cmd.Flags(), cmdLogger) + _, configPath, startNum, batchSize, outputFolder := utils.MustCoreFlags(cmd.Flags(), cmdLogger) exports := utils.MustExportTypeFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) + cmd.Flags() + err := os.MkdirAll(outputFolder, os.ModePerm) if err != nil { cmdLogger.Fatalf("unable to mkdir %s: %v", outputFolder, err) @@ -46,7 +50,7 @@ be exported.`, // If none of the export flags are set, then we assume that everything should be exported allFalse := true for _, value := range exports { - if true == value { + if value { allFalse = false break } @@ -62,19 +66,15 @@ be exported.`, cmdLogger.Fatal("stellar-core needs a config file path when exporting ledgers continuously (endNum = 0)") } - execPath, err = filepath.Abs(execPath) - if err != nil { - cmdLogger.Fatal("could not get absolute filepath for stellar-core executable: ", err) - } - - configPath, err = filepath.Abs(configPath) + ctx := context.Background() + backend, err := utils.CreateLedgerBackend(ctx, useCaptiveCore, env) if err != nil { - cmdLogger.Fatal("could not get absolute filepath for the config file: ", err) + cmdLogger.Fatal("error creating a cloud storage backend: ", err) } - core, err := input.PrepareCaptiveCore(execPath, configPath, startNum, endNum, env) + err = backend.PrepareRange(ctx, ledgerbackend.BoundedRange(startNum, endNum)) if err != nil { - cmdLogger.Fatal("error creating a prepared captive core instance: ", err) + cmdLogger.Fatal("error preparing ledger range for cloud storage backend: ", err) } if endNum == 0 { @@ -83,7 +83,7 @@ be exported.`, changeChan := make(chan input.ChangeBatch) closeChan := make(chan int) - go input.StreamChanges(core, startNum, endNum, batchSize, changeChan, closeChan, env, cmdLogger) + go input.StreamChanges(&backend, startNum, endNum, batchSize, changeChan, closeChan, env, cmdLogger) for { select { @@ -295,7 +295,6 @@ func init() { utils.AddCloudStorageFlags(exportLedgerEntryChangesCmd.Flags()) exportLedgerEntryChangesCmd.MarkFlagRequired("start-ledger") - exportLedgerEntryChangesCmd.MarkFlagRequired("core-executable") /* Current flags: start-ledger: the ledger sequence number for the beginning of the export period diff --git a/cmd/export_ledger_transaction.go b/cmd/export_ledger_transaction.go index b8dca36e..c08a2017 100644 --- a/cmd/export_ledger_transaction.go +++ b/cmd/export_ledger_transaction.go @@ -16,13 +16,13 @@ var ledgerTransactionCmd = &cobra.Command{ Long: `Exports the ledger_transaction transaction data over a specified range to an output file.`, Run: func(cmd *cobra.Command, args []string) { cmdLogger.SetLevel(logrus.InfoLevel) - endNum, strictExport, isTest, isFuture, extra := utils.MustCommonFlags(cmd.Flags(), cmdLogger) + endNum, strictExport, isTest, isFuture, extra, useCaptiveCore, datastoreUrl := utils.MustCommonFlags(cmd.Flags(), cmdLogger) cmdLogger.StrictExport = strictExport startNum, path, limit := utils.MustArchiveFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - env := utils.GetEnvironmentDetails(isTest, isFuture) + env := utils.GetEnvironmentDetails(isTest, isFuture, datastoreUrl) - ledgerTransaction, err := input.GetTransactions(startNum, endNum, limit, env) + ledgerTransaction, err := input.GetTransactions(startNum, endNum, limit, env, useCaptiveCore) if err != nil { cmdLogger.Fatal("could not read ledger_transaction: ", err) } diff --git a/cmd/export_ledgers.go b/cmd/export_ledgers.go index 0bec34d2..727d9ae5 100644 --- a/cmd/export_ledgers.go +++ b/cmd/export_ledgers.go @@ -5,6 +5,7 @@ import ( "github.com/sirupsen/logrus" "github.com/spf13/cobra" + "github.com/stellar/go/historyarchive" "github.com/stellar/stellar-etl/internal/input" "github.com/stellar/stellar-etl/internal/transform" "github.com/stellar/stellar-etl/internal/utils" @@ -16,12 +17,20 @@ var ledgersCmd = &cobra.Command{ Long: `Exports ledger data within the specified range to an output file. Encodes ledgers as JSON objects and exports them to the output file.`, Run: func(cmd *cobra.Command, args []string) { cmdLogger.SetLevel(logrus.InfoLevel) - endNum, strictExport, isTest, isFuture, extra := utils.MustCommonFlags(cmd.Flags(), cmdLogger) + endNum, strictExport, isTest, isFuture, extra, useCaptiveCore, datastoreUrl := utils.MustCommonFlags(cmd.Flags(), cmdLogger) cmdLogger.StrictExport = strictExport startNum, path, limit := utils.MustArchiveFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) + env := utils.GetEnvironmentDetails(isTest, isFuture, datastoreUrl) - ledgers, err := input.GetLedgers(startNum, endNum, limit, isTest, isFuture) + var ledgers []historyarchive.Ledger + var err error + + if useCaptiveCore { + ledgers, err = input.GetLedgersHistoryArchive(startNum, endNum, limit, isTest, isFuture) + } else { + ledgers, err = input.GetLedgers(startNum, endNum, limit, env, useCaptiveCore) + } if err != nil { cmdLogger.Fatal("could not read ledgers: ", err) } diff --git a/cmd/export_liquidity_pools.go b/cmd/export_liquidity_pools.go index 7130b3e1..f05cab03 100644 --- a/cmd/export_liquidity_pools.go +++ b/cmd/export_liquidity_pools.go @@ -22,9 +22,9 @@ should be used in an initial data dump. In order to get liqudity pools informati the export_ledger_entry_changes command.`, Run: func(cmd *cobra.Command, args []string) { cmdLogger.SetLevel(logrus.InfoLevel) - endNum, strictExport, isTest, isFuture, extra := utils.MustCommonFlags(cmd.Flags(), cmdLogger) + endNum, strictExport, isTest, isFuture, extra, _, datastoreUrl := utils.MustCommonFlags(cmd.Flags(), cmdLogger) cmdLogger.StrictExport = strictExport - env := utils.GetEnvironmentDetails(isTest, isFuture) + env := utils.GetEnvironmentDetails(isTest, isFuture, datastoreUrl) path := utils.MustBucketFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) diff --git a/cmd/export_offers.go b/cmd/export_offers.go index c1827c96..8f0ea5c6 100644 --- a/cmd/export_offers.go +++ b/cmd/export_offers.go @@ -23,9 +23,9 @@ var offersCmd = &cobra.Command{ the export_ledger_entry_changes command.`, Run: func(cmd *cobra.Command, args []string) { cmdLogger.SetLevel(logrus.InfoLevel) - endNum, strictExport, isTest, isFuture, extra := utils.MustCommonFlags(cmd.Flags(), cmdLogger) + endNum, strictExport, isTest, isFuture, extra, _, datastoreUrl := utils.MustCommonFlags(cmd.Flags(), cmdLogger) cmdLogger.StrictExport = strictExport - env := utils.GetEnvironmentDetails(isTest, isFuture) + env := utils.GetEnvironmentDetails(isTest, isFuture, datastoreUrl) path := utils.MustBucketFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) diff --git a/cmd/export_operations.go b/cmd/export_operations.go index e82a2942..9b84265a 100644 --- a/cmd/export_operations.go +++ b/cmd/export_operations.go @@ -16,13 +16,13 @@ var operationsCmd = &cobra.Command{ Long: `Exports the operations data over a specified range. Each operation is an individual command that mutates the Stellar ledger.`, Run: func(cmd *cobra.Command, args []string) { cmdLogger.SetLevel(logrus.InfoLevel) - endNum, strictExport, isTest, isFuture, extra := utils.MustCommonFlags(cmd.Flags(), cmdLogger) + endNum, strictExport, isTest, isFuture, extra, useCaptiveCore, datastoreUrl := utils.MustCommonFlags(cmd.Flags(), cmdLogger) cmdLogger.StrictExport = strictExport startNum, path, limit := utils.MustArchiveFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - env := utils.GetEnvironmentDetails(isTest, isFuture) + env := utils.GetEnvironmentDetails(isTest, isFuture, datastoreUrl) - operations, err := input.GetOperations(startNum, endNum, limit, env) + operations, err := input.GetOperations(startNum, endNum, limit, env, useCaptiveCore) if err != nil { cmdLogger.Fatal("could not read operations: ", err) } diff --git a/cmd/export_orderbooks.go b/cmd/export_orderbooks.go index 2c39e8d6..91e924ab 100644 --- a/cmd/export_orderbooks.go +++ b/cmd/export_orderbooks.go @@ -27,9 +27,9 @@ var exportOrderbooksCmd = &cobra.Command{ If the end-ledger is omitted, then the stellar-core node will continue running and exporting information as new ledgers are confirmed by the Stellar network. In this unbounded case, a stellar-core config path is required to utilize the Captive Core toml.`, Run: func(cmd *cobra.Command, args []string) { - endNum, strictExport, isTest, isFuture, extra := utils.MustCommonFlags(cmd.Flags(), cmdLogger) + endNum, strictExport, isTest, isFuture, extra, _, datastoreUrl := utils.MustCommonFlags(cmd.Flags(), cmdLogger) cmdLogger.StrictExport = strictExport - env := utils.GetEnvironmentDetails(isTest, isFuture) + env := utils.GetEnvironmentDetails(isTest, isFuture, datastoreUrl) execPath, configPath, startNum, batchSize, outputFolder := utils.MustCoreFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) diff --git a/cmd/export_trades.go b/cmd/export_trades.go index 99ff880e..551263e7 100644 --- a/cmd/export_trades.go +++ b/cmd/export_trades.go @@ -19,13 +19,13 @@ var tradesCmd = &cobra.Command{ Long: `Exports trade data within the specified range to an output file`, Run: func(cmd *cobra.Command, args []string) { cmdLogger.SetLevel(logrus.InfoLevel) - endNum, strictExport, isTest, isFuture, extra := utils.MustCommonFlags(cmd.Flags(), cmdLogger) + endNum, strictExport, isTest, isFuture, extra, useCaptiveCore, datastoreUrl := utils.MustCommonFlags(cmd.Flags(), cmdLogger) cmdLogger.StrictExport = strictExport startNum, path, limit := utils.MustArchiveFlags(cmd.Flags(), cmdLogger) - env := utils.GetEnvironmentDetails(isTest, isFuture) + env := utils.GetEnvironmentDetails(isTest, isFuture, datastoreUrl) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - trades, err := input.GetTrades(startNum, endNum, limit, env) + trades, err := input.GetTrades(startNum, endNum, limit, env, useCaptiveCore) if err != nil { cmdLogger.Fatal("could not read trades ", err) } diff --git a/cmd/export_transactions.go b/cmd/export_transactions.go index 18717bd0..cd37e247 100644 --- a/cmd/export_transactions.go +++ b/cmd/export_transactions.go @@ -16,13 +16,13 @@ var transactionsCmd = &cobra.Command{ Long: `Exports the transaction data over a specified range to an output file.`, Run: func(cmd *cobra.Command, args []string) { cmdLogger.SetLevel(logrus.InfoLevel) - endNum, strictExport, isTest, isFuture, extra := utils.MustCommonFlags(cmd.Flags(), cmdLogger) + endNum, strictExport, isTest, isFuture, extra, useCaptiveCore, datastoreUrl := utils.MustCommonFlags(cmd.Flags(), cmdLogger) cmdLogger.StrictExport = strictExport startNum, path, limit := utils.MustArchiveFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - env := utils.GetEnvironmentDetails(isTest, isFuture) + env := utils.GetEnvironmentDetails(isTest, isFuture, datastoreUrl) - transactions, err := input.GetTransactions(startNum, endNum, limit, env) + transactions, err := input.GetTransactions(startNum, endNum, limit, env, useCaptiveCore) if err != nil { cmdLogger.Fatal("could not read transactions: ", err) } diff --git a/cmd/export_trustlines.go b/cmd/export_trustlines.go index 68ab838d..8b1315b5 100644 --- a/cmd/export_trustlines.go +++ b/cmd/export_trustlines.go @@ -23,9 +23,9 @@ var trustlinesCmd = &cobra.Command{ the export_ledger_entry_changes command.`, Run: func(cmd *cobra.Command, args []string) { cmdLogger.SetLevel(logrus.InfoLevel) - endNum, strictExport, isTest, isFuture, extra := utils.MustCommonFlags(cmd.Flags(), cmdLogger) + endNum, strictExport, isTest, isFuture, extra, _, datastoreUrl := utils.MustCommonFlags(cmd.Flags(), cmdLogger) cmdLogger.StrictExport = strictExport - env := utils.GetEnvironmentDetails(isTest, isFuture) + env := utils.GetEnvironmentDetails(isTest, isFuture, datastoreUrl) path := utils.MustBucketFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) diff --git a/cmd/export_ttl.go b/cmd/export_ttl.go index c55ef571..ce689fda 100644 --- a/cmd/export_ttl.go +++ b/cmd/export_ttl.go @@ -16,15 +16,15 @@ import ( var ttlCmd = &cobra.Command{ Use: "export_ttl", Short: "Exports the ttl information.", - Long: `Exports historical ttl data from the genesis ledger to the provided end-ledger to an output file. + Long: `Exports historical ttl data from the genesis ledger to the provided end-ledger to an output file. The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it should be used in an initial data dump. In order to get offer information within a specified ledger range, see the export_ledger_entry_changes command.`, Run: func(cmd *cobra.Command, args []string) { cmdLogger.SetLevel(logrus.InfoLevel) - endNum, strictExport, isTest, isFuture, extra := utils.MustCommonFlags(cmd.Flags(), cmdLogger) + endNum, strictExport, isTest, isFuture, extra, _, datastoreUrl := utils.MustCommonFlags(cmd.Flags(), cmdLogger) cmdLogger.StrictExport = strictExport - env := utils.GetEnvironmentDetails(isTest, isFuture) + env := utils.GetEnvironmentDetails(isTest, isFuture, datastoreUrl) path := utils.MustBucketFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) diff --git a/docker/Dockerfile b/docker/Dockerfile index 2fc7d358..9e36253e 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -1,7 +1,5 @@ # stage 1: build stellar-etl app -# golang 1.19, pinned by sha digest -#FROM golang@sha256:04f76f956e51797a44847e066bde1341c01e09054d3878ae88c7f77f09897c4d AS build -FROM golang:1.20.5-buster AS build +FROM golang:1.22.1-alpine AS build WORKDIR /usr/src/etl diff --git a/go.mod b/go.mod index 8c390d07..566b157d 100644 --- a/go.mod +++ b/go.mod @@ -1,9 +1,11 @@ module github.com/stellar/stellar-etl -go 1.19 +go 1.22 + +toolchain go1.22.1 require ( - cloud.google.com/go/storage v1.32.0 + cloud.google.com/go/storage v1.37.0 github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 github.com/guregu/null v4.0.0+incompatible github.com/lib/pq v1.10.9 @@ -13,12 +15,12 @@ require ( github.com/spf13/cobra v1.7.0 github.com/spf13/pflag v1.0.5 github.com/spf13/viper v1.17.0 - github.com/stellar/go v0.0.0-20240111173100-ed7ae81c8546 + github.com/stellar/go v0.0.0-20240317052942-f34a84277137 github.com/stretchr/testify v1.8.4 ) require ( - cloud.google.com/go v0.111.0 // indirect + cloud.google.com/go v0.112.0 // indirect cloud.google.com/go/compute v1.23.3 // indirect cloud.google.com/go/compute/metadata v0.2.3 // indirect cloud.google.com/go/iam v1.1.5 // indirect @@ -26,16 +28,19 @@ require ( github.com/Microsoft/go-winio v0.6.1 // indirect github.com/aws/aws-sdk-go v1.45.26 // indirect github.com/beorn7/perks v1.0.1 // indirect + github.com/cenkalti/backoff/v4 v4.2.1 // indirect github.com/cespare/xxhash/v2 v2.2.0 // indirect github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect + github.com/djherbis/fscache v0.10.1 // indirect + github.com/felixge/httpsnoop v1.0.4 // indirect github.com/fsnotify/fsnotify v1.6.0 // indirect github.com/go-errors/errors v1.5.1 // indirect - github.com/go-logr/logr v1.2.4 // indirect + github.com/go-logr/logr v1.3.0 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/golang/protobuf v1.5.3 // indirect github.com/google/s2a-go v0.1.7 // indirect - github.com/google/uuid v1.4.0 // indirect + github.com/google/uuid v1.5.0 // indirect github.com/googleapis/enterprise-certificate-proxy v0.3.2 // indirect github.com/googleapis/gax-go/v2 v2.12.0 // indirect github.com/hashicorp/golang-lru v1.0.2 // indirect @@ -66,26 +71,31 @@ require ( github.com/stretchr/objx v0.5.1 // indirect github.com/subosito/gotenv v1.6.0 // indirect go.opencensus.io v0.24.0 // indirect - go.opentelemetry.io/otel v1.19.0 // indirect - go.opentelemetry.io/otel/metric v1.19.0 // indirect - go.opentelemetry.io/otel/trace v1.19.0 // indirect + go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.46.1 // indirect + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.46.1 // indirect + go.opentelemetry.io/otel v1.21.0 // indirect + go.opentelemetry.io/otel/metric v1.21.0 // indirect + go.opentelemetry.io/otel/trace v1.21.0 // indirect go.uber.org/multierr v1.11.0 // indirect - golang.org/x/crypto v0.17.0 // indirect + golang.org/x/crypto v0.18.0 // indirect golang.org/x/exp v0.0.0-20231006140011-7918f672742d // indirect golang.org/x/mod v0.13.0 // indirect - golang.org/x/net v0.19.0 // indirect - golang.org/x/oauth2 v0.13.0 // indirect - golang.org/x/sync v0.4.0 // indirect + golang.org/x/net v0.20.0 // indirect + golang.org/x/oauth2 v0.16.0 // indirect + golang.org/x/sync v0.6.0 // indirect golang.org/x/sys v0.16.0 // indirect golang.org/x/text v0.14.0 // indirect + golang.org/x/time v0.5.0 // indirect golang.org/x/tools v0.14.0 // indirect - google.golang.org/api v0.149.0 // indirect + google.golang.org/api v0.157.0 // indirect google.golang.org/appengine v1.6.8 // indirect - google.golang.org/genproto v0.0.0-20231212172506-995d672761c0 // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20231211222908-989df2bf70f3 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20240102182953-50ed04b92917 // indirect + google.golang.org/genproto v0.0.0-20240116215550-a9fa1716bcac // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20240122161410-6c6643bf1457 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20240116215550-a9fa1716bcac // indirect google.golang.org/grpc v1.60.1 // indirect google.golang.org/protobuf v1.32.0 // indirect + gopkg.in/djherbis/atime.v1 v1.0.0 // indirect + gopkg.in/djherbis/stream.v1 v1.3.1 // indirect gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/go.sum b/go.sum index b2038cdc..17d768c2 100644 --- a/go.sum +++ b/go.sum @@ -17,8 +17,8 @@ cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHOb cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= cloud.google.com/go v0.75.0/go.mod h1:VGuuCn7PG0dwsd5XPVm2Mm3wlh3EL55/79EKB6hlPTY= -cloud.google.com/go v0.111.0 h1:YHLKNupSD1KqjDbQ3+LVdQ81h/UJbJyZG203cEfnQgM= -cloud.google.com/go v0.111.0/go.mod h1:0mibmpKP1TyOOFYQY5izo0LnT+ecvOQ0Sg3OdmMiNRU= +cloud.google.com/go v0.112.0 h1:tpFCD7hpHFlQ8yPwT3x+QeXqc2T6+n6T+hmABHfDUSM= +cloud.google.com/go v0.112.0/go.mod h1:3jEEVwZ/MHU4djK5t5RHuKOA/GbLddgTdVubX1qnPD4= cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= @@ -43,8 +43,8 @@ cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohl cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo= -cloud.google.com/go/storage v1.32.0 h1:5w6DxEGOnktmJHarxAOUywxVW9lbNWIzlzzUltG/3+o= -cloud.google.com/go/storage v1.32.0/go.mod h1:Hhh/dogNRGca7IWv1RC2YqEn0c0G77ctA/OxflYkiD8= +cloud.google.com/go/storage v1.37.0 h1:WI8CsaFO8Q9KjPVtsZ5Cmi0dXV25zMoX0FklT7c3Jm4= +cloud.google.com/go/storage v1.37.0/go.mod h1:i34TiT2IhiNDmcj65PqwCjcoUX7Z5pLzS8DEmoiFq1k= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= @@ -53,11 +53,15 @@ github.com/Masterminds/squirrel v1.5.4/go.mod h1:NNaOrjSoIDfDA40n7sr2tPNZRfjzjA4 github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow= github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM= github.com/ajg/form v0.0.0-20160822230020-523a5da1a92f h1:zvClvFQwU++UpIUBGC8YmDlfhUrweEy1R1Fj1gu5iIM= +github.com/ajg/form v0.0.0-20160822230020-523a5da1a92f/go.mod h1:uL1WgH+h2mgNtvBq0339dVnzXdBETtL2LeUXaIv25UY= github.com/andybalholm/brotli v1.0.4 h1:V7DdXeJtZscaqfNuAdSRuRFzuiKlHSC/Zh3zl9qY3JY= +github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= github.com/aws/aws-sdk-go v1.45.26 h1:PJ2NJNY5N/yeobLYe1Y+xLdavBi67ZI8gvph6ftwVCg= github.com/aws/aws-sdk-go v1.45.26/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/cenkalti/backoff/v4 v4.2.1 h1:y4OZtCnogmCPw98Zjyt5a6+QwPLGkiQsYW5oUqylYbM= +github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= @@ -68,6 +72,8 @@ github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDk github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/xds/go v0.0.0-20230607035331-e9ce68804cb4 h1:/inchEIKaYC1Akx+H+gqO04wryn5h75LSazbRlnya1k= +github.com/cncf/xds/go v0.0.0-20230607035331-e9ce68804cb4/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -75,26 +81,36 @@ github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1 github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 h1:fAjc9m62+UWV/WAFKLNi6ZS0675eEUC9y3AlwSbQu1Y= github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw= +github.com/djherbis/fscache v0.10.1 h1:hDv+RGyvD+UDKyRYuLoVNbuRTnf2SrA2K3VyR1br9lk= +github.com/djherbis/fscache v0.10.1/go.mod h1:yyPYtkNnnPXsW+81lAcQS6yab3G2CRfnPLotBvtbf0c= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/envoyproxy/protoc-gen-validate v1.0.2 h1:QkIBuU5k+x7/QXPvPPnWXWlCdaBFApVqftFV6k087DA= +github.com/envoyproxy/protoc-gen-validate v1.0.2/go.mod h1:GpiZQP3dDbg4JouG/NNS7QWXpgx6x8QiMKdmN72jogE= github.com/fatih/structs v1.0.0 h1:BrX964Rv5uQ3wwS+KRUAJCBBw5PQmgJfJ6v4yly5QwU= +github.com/fatih/structs v1.0.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= +github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= +github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/frankban/quicktest v1.14.4 h1:g2rn0vABPOOXmZUj+vbmUp0lPoXEMuhTpIluN0XL9UY= +github.com/frankban/quicktest v1.14.4/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY= github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw= github.com/gavv/monotime v0.0.0-20161010190848-47d58efa6955 h1:gmtGRvSexPU4B1T/yYo0sLOKzER1YT+b4kPxPpm0Ty4= +github.com/gavv/monotime v0.0.0-20161010190848-47d58efa6955/go.mod h1:vmp8DIyckQMXOPl0AQVHt+7n5h7Gb7hS6CUydiV8QeA= github.com/go-chi/chi v4.1.2+incompatible h1:fGFk2Gmi/YKXk0OmGfBh0WgmN3XB8lVnEyNz34tQRec= +github.com/go-chi/chi v4.1.2+incompatible/go.mod h1:eB3wogJHnLi3x/kFX2A+IbTBlXxmMeXJVKy9tTv1XzQ= github.com/go-errors/errors v1.5.1 h1:ZwEMSLRCapFLflTpT7NKaAc7ukJ8ZPEjzlxt8rPN8bk= github.com/go-errors/errors v1.5.1/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= -github.com/go-logr/logr v1.2.4 h1:g01GSCwiDw2xSZfjJ2/T9M+S6pFdcNtFYsp+Y43HYDQ= -github.com/go-logr/logr v1.2.4/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.3.0 h1:2y3SDp0ZXuc6/cjLSZ+Q3ir+QB9T/iG5yYRXqsagWSY= +github.com/go-logr/logr v1.3.0/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= @@ -144,12 +160,15 @@ github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-querystring v0.0.0-20160401233042-9235644dd9e5 h1:oERTZ1buOUYlpmKaqlO5fYmz8cZ1rYu5DieJzF4ZVmU= +github.com/google/go-querystring v0.0.0-20160401233042-9235644dd9e5/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= github.com/google/martian/v3 v3.3.2 h1:IqNFLAmvJOgVlpdEBiQbDc2EwKW77amAycfTuWKdfvw= +github.com/google/martian/v3 v3.3.2/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk= github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= @@ -164,8 +183,8 @@ github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm4 github.com/google/s2a-go v0.1.7 h1:60BLSyTrOV4/haCDW4zb1guZItoSq8foHCXrAnjBo/o= github.com/google/s2a-go v0.1.7/go.mod h1:50CgR4k1jNlWBu4UfS4AcfhVe1r6pdZPygJ3R8F0Qdw= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/uuid v1.4.0 h1:MtMxsa51/r9yyhkyLsVeVt0B+BGQZzpQiTQ4eHZ8bc4= -github.com/google/uuid v1.4.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.5.0 h1:1p67kYwdtXjb0gL0BPiP1Av9wiZPo5A8z2cWkTZ+eyU= +github.com/google/uuid v1.5.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/enterprise-certificate-proxy v0.3.2 h1:Vie5ybvEvT75RniqhfFxPRy3Bf7vr3h0cechB90XaQs= github.com/googleapis/enterprise-certificate-proxy v0.3.2/go.mod h1:VLSiSSBs/ksPL8kq3OBOQ6WRI2QnaFynd1DCjZ62+V0= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= @@ -174,6 +193,7 @@ github.com/googleapis/gax-go/v2 v2.12.0 h1:A+gCJKdRfqXkr+BIRGtZLibNXf0m1f9E4HG56 github.com/googleapis/gax-go/v2 v2.12.0/go.mod h1:y+aIqrI5eb1YGMVJfuV3185Ts/D7qKpsEkdD5+I6QGU= github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= github.com/gorilla/schema v1.2.0 h1:YufUaxZYCKGFuAq3c96BOhjgd5nmXiOY9NGzF247Tsc= +github.com/gorilla/schema v1.2.0/go.mod h1:kgLaKoK1FELgZqMAVxx/5cbj0kT+57qxUrAlIO2eleU= github.com/guregu/null v4.0.0+incompatible h1:4zw0ckM7ECd6FNNddc3Fu4aty9nTlpkkzH7dPn4/4Gw= github.com/guregu/null v4.0.0+incompatible/go.mod h1:ePGpQaN9cw0tj45IR5E5ehMvsFlLlQZAkkOXZurJ3NM= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= @@ -187,9 +207,11 @@ github.com/holiman/uint256 v1.2.3/go.mod h1:SC8Ryt4n+UBbPbIBKaG9zbbDlp4jOru9xFZm github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/imkira/go-interpol v1.1.0 h1:KIiKr0VSG2CUW1hl1jpiyuzuJeKUUpC8iM1AIE7N1Vk= +github.com/imkira/go-interpol v1.1.0/go.mod h1:z0h2/2T3XF8kyEPpRgJ3kmNv+C43p+I/CoI+jC3w2iA= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/jarcoal/httpmock v0.0.0-20161210151336-4442edb3db31 h1:Aw95BEvxJ3K6o9GGv5ppCd1P8hkeIeEJ30FO+OhOJpM= +github.com/jarcoal/httpmock v0.0.0-20161210151336-4442edb3db31/go.mod h1:ks+b9deReOc7jgqp+e7LuFiCBH6Rm5hL32cLcEAArb4= github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg= github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8= @@ -200,12 +222,15 @@ github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1 github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/klauspost/compress v1.17.0 h1:Rnbp4K9EjcDuVuHtd0dgA4qNuv9yKDYKK1ulpJwgrqM= +github.com/klauspost/compress v1.17.0/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/lann/builder v0.0.0-20180802200727-47ae307949d0 h1:SOEGU9fKiNWd/HOJuq6+3iTQz8KNCLtVX6idSoTLdUw= github.com/lann/builder v0.0.0-20180802200727-47ae307949d0/go.mod h1:dXGbAdH5GtBTC4WfIxhKZfyBF/HBFgRZSWwZ9g/He9o= github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0 h1:P6pPBnrTSX3DEVR4fDembhRWSsG5rVo6hYhAB/ADZrk= @@ -216,6 +241,7 @@ github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY= github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= github.com/manucorporat/sse v0.0.0-20160126180136-ee05b128a739 h1:ykXz+pRRTibcSjG1yRhpdSHInF8yZY/mfn+Rz2Nd1rE= +github.com/manucorporat/sse v0.0.0-20160126180136-ee05b128a739/go.mod h1:zUx1mhth20V3VKgL5jbd1BSQcW4Fy6Qs4PZvQwRFwzM= github.com/mattn/go-sqlite3 v1.14.6 h1:dNPt6NO46WmLVt2DLNpwczCmdV5boIZ6g/tlDrlRUbg= github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= github.com/matttproud/golang_protobuf_extensions v1.0.4 h1:mmDVorXM7PCGKw94cs5zkfA9PSy5pEvNWRP0ET0TIVo= @@ -225,9 +251,13 @@ github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrk github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/moul/http2curl v0.0.0-20161031194548-4e24498b31db h1:eZgFHVkk9uOTaOQLC6tgjkzdp7Ays8eEVecBcfHZlJQ= +github.com/moul/http2curl v0.0.0-20161031194548-4e24498b31db/go.mod h1:8UbvGypXm98wA/IqH45anm5Y2Z6ep6O31QGOAZ3H0fQ= github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE= +github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= +github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU= github.com/onsi/gomega v1.27.10 h1:naR28SdDFlqrG6kScpT8VWpu1xWY5nJRCF3XaYyBjhI= +github.com/onsi/gomega v1.27.10/go.mod h1:RsS8tutOdbdgzbPtzzATp12yT7kM5I5aElG3evPbQ0M= github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8= github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= github.com/pelletier/go-toml/v2 v2.1.0 h1:FnwAJ4oYMvbT/34k9zzHuZNrhlz48GB3/s6at6/MHO4= @@ -249,6 +279,7 @@ github.com/prometheus/procfs v0.12.0 h1:jluTpSng7V9hY0O2R9DzzJHYb2xULk9VTR1V1R/k github.com/prometheus/procfs v0.12.0/go.mod h1:pcuDEFsWDnvcgNzo4EEweacyhjeA9Zk3cnaOZAZEfOo= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M= +github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/sagikazarmark/locafero v0.3.0 h1:zT7VEGWC2DTflmccN/5T1etyKvxSxpHsjb9cJvm4SvQ= github.com/sagikazarmark/locafero v0.3.0/go.mod h1:w+v7UsPNFwzF1cHuOajOOzoq4U7v/ig1mpRjqV+Bu1U= @@ -257,6 +288,7 @@ github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWR github.com/segmentio/go-loggly v0.5.1-0.20171222203950-eb91657e62b2 h1:S4OC0+OBKz6mJnzuHioeEat74PuQ4Sgvbf8eus695sc= github.com/segmentio/go-loggly v0.5.1-0.20171222203950-eb91657e62b2/go.mod h1:8zLRYR5npGjaOXgPSKat5+oOh+UHd8OdbS18iqX9F6Y= github.com/sergi/go-diff v0.0.0-20161205080420-83532ca1c1ca h1:oR/RycYTFTVXzND5r4FdsvbnBn0HJXSVeNAnwaTXRwk= +github.com/sergi/go-diff v0.0.0-20161205080420-83532ca1c1ca/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo= @@ -271,8 +303,8 @@ github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/spf13/viper v1.17.0 h1:I5txKw7MJasPL/BrfkbA0Jyo/oELqVmux4pR/UxOMfI= github.com/spf13/viper v1.17.0/go.mod h1:BmMMMLQXSbcHK6KAOiFLz0l5JHrU89OdIRHvsk0+yVI= -github.com/stellar/go v0.0.0-20240111173100-ed7ae81c8546 h1:FCLk33pNq5q/A5DfaBMFvyv4V2V0rxIgDurOf2sQHlw= -github.com/stellar/go v0.0.0-20240111173100-ed7ae81c8546/go.mod h1:Ka4piwZT4Q9799f+BZeaKkAiYo4UpIWXyu0oSUbCVfM= +github.com/stellar/go v0.0.0-20240317052942-f34a84277137 h1:s/RK1BOa+KRzgl32f86F7iO3jk+XQu59vo1bli6vRHo= +github.com/stellar/go v0.0.0-20240317052942-f34a84277137/go.mod h1:ckzsX0B0qfTMVZQJtPELJLs7cJ6xXMYHPVLyIsReGsU= github.com/stellar/go-xdr v0.0.0-20231122183749-b53fb00bcac2 h1:OzCVd0SV5qE3ZcDeSFCmOWLZfEWZ3Oe8KtmSOYKEVWE= github.com/stellar/go-xdr v0.0.0-20231122183749-b53fb00bcac2/go.mod h1:yoxyU/M8nl9LKeWIoBrbDPQ7Cy+4jxRcWcOayZ4BMps= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= @@ -293,14 +325,23 @@ github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXl github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8= github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU= github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= +github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= github.com/valyala/fasthttp v1.34.0 h1:d3AAQJ2DRcxJYHm7OXNXtXt2as1vMDfxeIcFvhmGGm4= +github.com/valyala/fasthttp v1.34.0/go.mod h1:epZA5N+7pY6ZaEKRmstzOuYJx9HI8DI1oaCGZpdH4h0= github.com/xdrpp/goxdr v0.1.1 h1:E1B2c6E8eYhOVyd7yEpOyopzTPirUeF6mVOfXfGyJyc= +github.com/xdrpp/goxdr v0.1.1/go.mod h1:dXo1scL/l6s7iME1gxHWo2XCppbHEKZS7m/KyYWkNzA= github.com/xeipuuv/gojsonpointer v0.0.0-20151027082146-e0fe6f683076 h1:KM4T3G70MiR+JtqplcYkNVoNz7pDwYaBxWBXQK804So= +github.com/xeipuuv/gojsonpointer v0.0.0-20151027082146-e0fe6f683076/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= github.com/xeipuuv/gojsonreference v0.0.0-20150808065054-e02fc20de94c h1:XZWnr3bsDQWAZg4Ne+cPoXRPILrNlPNQfxBuwLl43is= +github.com/xeipuuv/gojsonreference v0.0.0-20150808065054-e02fc20de94c/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= github.com/xeipuuv/gojsonschema v0.0.0-20161231055540-f06f290571ce h1:cVSRGH8cOveJNwFEEZLXtB+XMnRqKLjUP6V/ZFYQCXI= +github.com/xeipuuv/gojsonschema v0.0.0-20161231055540-f06f290571ce/go.mod h1:5yf86TLmAcydyeJq5YvxkGPE2fm/u4myDekKRoLuqhs= github.com/yalp/jsonpath v0.0.0-20150812003900-31a79c7593bb h1:06WAhQa+mYv7BiOk13B/ywyTlkoE/S7uu6TBKU6FHnE= +github.com/yalp/jsonpath v0.0.0-20150812003900-31a79c7593bb/go.mod h1:/LWChgwKmvncFJFHJ7Gvn9wZArjbV5/FppcK2fKk/tI= github.com/yudai/gojsondiff v0.0.0-20170107030110-7b1b7adf999d h1:yJIizrfO599ot2kQ6Af1enICnwBD3XoxgX3MrMwot2M= +github.com/yudai/gojsondiff v0.0.0-20170107030110-7b1b7adf999d/go.mod h1:AY32+k2cwILAkW1fbgxQ5mUmMiZFgLIV+FBNExI05xg= github.com/yudai/golcs v0.0.0-20150405163532-d1c525dea8ce h1:888GrqRxabUce7lj4OaoShPxodm3kXOMpSa85wdYzfY= +github.com/yudai/golcs v0.0.0-20150405163532-d1c525dea8ce/go.mod h1:lgjkn3NuSvDfVJdfcVVdX+jpBxNmX4rDAzaS45IcYoM= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= @@ -314,13 +355,18 @@ go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= -go.opentelemetry.io/otel v1.19.0 h1:MuS/TNf4/j4IXsZuJegVzI1cwut7Qc00344rgH7p8bs= -go.opentelemetry.io/otel v1.19.0/go.mod h1:i0QyjOq3UPoTzff0PJB2N66fb4S0+rSbSB15/oyH9fY= -go.opentelemetry.io/otel/metric v1.19.0 h1:aTzpGtV0ar9wlV4Sna9sdJyII5jTVJEvKETPiOKwvpE= -go.opentelemetry.io/otel/metric v1.19.0/go.mod h1:L5rUsV9kM1IxCj1MmSdS+JQAcVm319EUrDVLrt7jqt8= -go.opentelemetry.io/otel/sdk v1.19.0 h1:6USY6zH+L8uMH8L3t1enZPR3WFEmSTADlqldyHtJi3o= -go.opentelemetry.io/otel/trace v1.19.0 h1:DFVQmlVbfVeOuBRrwdtaehRrWiL1JoVs9CPIQ1Dzxpg= -go.opentelemetry.io/otel/trace v1.19.0/go.mod h1:mfaSyvGyEJEI0nyV2I4qhNQnbBOUUmYZpYojqMnX2vo= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.46.1 h1:SpGay3w+nEwMpfVnbqOLH5gY52/foP8RE8UzTZ1pdSE= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.46.1/go.mod h1:4UoMYEZOC0yN/sPGH76KPkkU7zgiEWYWL9vwmbnTJPE= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.46.1 h1:aFJWCqJMNjENlcleuuOkGAPH82y0yULBScfXcIEdS24= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.46.1/go.mod h1:sEGXWArGqc3tVa+ekntsN65DmVbVeW+7lTKTjZF3/Fo= +go.opentelemetry.io/otel v1.21.0 h1:hzLeKBZEL7Okw2mGzZ0cc4k/A7Fta0uoPgaJCr8fsFc= +go.opentelemetry.io/otel v1.21.0/go.mod h1:QZzNPQPm1zLX4gZK4cMi+71eaorMSGT3A4znnUvNNEo= +go.opentelemetry.io/otel/metric v1.21.0 h1:tlYWfeo+Bocx5kLEloTjbcDwBuELRrIFxwdQ36PlJu4= +go.opentelemetry.io/otel/metric v1.21.0/go.mod h1:o1p3CA8nNHW8j5yuQLdc1eeqEaPfzug24uvsyIEJRWM= +go.opentelemetry.io/otel/sdk v1.21.0 h1:FTt8qirL1EysG6sTQRZ5TokkU8d0ugCj8htOgThZXQ8= +go.opentelemetry.io/otel/sdk v1.21.0/go.mod h1:Nna6Yv7PWTdgJHVRD9hIYywQBRx7pbox6nwBnZIxl/E= +go.opentelemetry.io/otel/trace v1.21.0 h1:WD9i5gzvoUPuXIXH24ZNBudiarZDKuekPqi/E8fpfLc= +go.opentelemetry.io/otel/trace v1.21.0/go.mod h1:LGbsEB0f9LGjN+OZaQQ26sohbOmiMR+BaslueVtS/qQ= go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= @@ -331,8 +377,8 @@ golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPh golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k= -golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= +golang.org/x/crypto v0.18.0 h1:PGVlW0xEltQnzFZ55hkuX5+KLyrMYhHld1YHO4AKcdc= +golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -406,8 +452,8 @@ golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco= -golang.org/x/net v0.19.0 h1:zTwKpTd2XuCqf8huc7Fo2iSy+4RHPd10s4KzeTnVr1c= -golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U= +golang.org/x/net v0.20.0 h1:aCL9BSgETF1k+blQaYUBx9hJ9LOGP3gAVemcZlf1Kpo= +golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -417,8 +463,8 @@ golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.13.0 h1:jDDenyj+WgFtmV3zYVoi8aE2BwtXFLWOA67ZfNWftiY= -golang.org/x/oauth2 v0.13.0/go.mod h1:/JMhi4ZRXAf4HG9LiNmxvk+45+96RUlVThiH8FzNBn0= +golang.org/x/oauth2 v0.16.0 h1:aDkGMBSYxElaoP81NpoUoz2oo2R2wHdZpGToUxfyQrQ= +golang.org/x/oauth2 v0.16.0/go.mod h1:hqZ+0LWXsiVoZpeld6jVt06P3adbS2Uu911W1SsJv2o= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -430,8 +476,8 @@ golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.4.0 h1:zxkM55ReGkDlKSM+Fu41A+zmbZuaPVbGMzvvdUPznYQ= -golang.org/x/sync v0.4.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= +golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ= +golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -492,6 +538,8 @@ golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk= +golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= @@ -546,7 +594,8 @@ golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8T golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 h1:H2TDz8ibqkAF6YGhCdN3jS9O0/s90v0rJh3X/OLHEUk= +golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 h1:+cNy6SZtPcJQH3LJVLOSmiC7MMxXNOb3PU/VUEz+EhU= +golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90= google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= @@ -566,8 +615,8 @@ google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz513 google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= -google.golang.org/api v0.149.0 h1:b2CqT6kG+zqJIVKRQ3ELJVLN1PwHZ6DJ3dW8yl82rgY= -google.golang.org/api v0.149.0/go.mod h1:Mwn1B7JTXrzXtnvmzQE2BD6bYZQ8DShKZDZbeN9I7qI= +google.golang.org/api v0.157.0 h1:ORAeqmbrrozeyw5NjnMxh7peHO0UzV4wWYSwZeCUb20= +google.golang.org/api v0.157.0/go.mod h1:+z4v4ufbZ1WEpld6yMGHyggs+PmAHiaLNj5ytP3N01g= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -613,12 +662,12 @@ google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6D google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20231212172506-995d672761c0 h1:YJ5pD9rF8o9Qtta0Cmy9rdBwkSjrTCT6XTiUQVOtIos= -google.golang.org/genproto v0.0.0-20231212172506-995d672761c0/go.mod h1:l/k7rMz0vFTBPy+tFSGvXEd3z+BcoG1k7EHbqm+YBsY= -google.golang.org/genproto/googleapis/api v0.0.0-20231211222908-989df2bf70f3 h1:EWIeHfGuUf00zrVZGEgYFxok7plSAXBGcH7NNdMAWvA= -google.golang.org/genproto/googleapis/api v0.0.0-20231211222908-989df2bf70f3/go.mod h1:k2dtGpRrbsSyKcNPKKI5sstZkrNCZwpU/ns96JoHbGg= -google.golang.org/genproto/googleapis/rpc v0.0.0-20240102182953-50ed04b92917 h1:6G8oQ016D88m1xAKljMlBOOGWDZkes4kMhgGFlf8WcQ= -google.golang.org/genproto/googleapis/rpc v0.0.0-20240102182953-50ed04b92917/go.mod h1:xtjpI3tXFPP051KaWnhvxkiubL/6dJ18vLVf7q2pTOU= +google.golang.org/genproto v0.0.0-20240116215550-a9fa1716bcac h1:ZL/Teoy/ZGnzyrqK/Optxxp2pmVh+fmJ97slxSRyzUg= +google.golang.org/genproto v0.0.0-20240116215550-a9fa1716bcac/go.mod h1:+Rvu7ElI+aLzyDQhpHMFMMltsD6m7nqpuWDd2CwJw3k= +google.golang.org/genproto/googleapis/api v0.0.0-20240122161410-6c6643bf1457 h1:KHBtwE+eQc3+NxpjmRFlQ3pJQ2FNnhhgB9xOV8kyBuU= +google.golang.org/genproto/googleapis/api v0.0.0-20240122161410-6c6643bf1457/go.mod h1:4jWUdICTdgc3Ibxmr8nAJiiLHwQBY0UI0XZcEMaFKaA= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240116215550-a9fa1716bcac h1:nUQEQmH/csSvFECKYRv6HWEyypysidKl2I6Qpsglq/0= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240116215550-a9fa1716bcac/go.mod h1:daQN87bsDqDoe316QbbvX60nMoJQa4r6Ds0ZuoAe5yA= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= @@ -654,14 +703,22 @@ google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHh gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/djherbis/atime.v1 v1.0.0 h1:eMRqB/JrLKocla2PBPKgQYg/p5UG4L6AUAs92aP7F60= +gopkg.in/djherbis/atime.v1 v1.0.0/go.mod h1:hQIUStKmJfvf7xdh/wtK84qe+DsTV5LnA9lzxxtPpJ8= +gopkg.in/djherbis/stream.v1 v1.3.1 h1:uGfmsOY1qqMjQQphhRBSGLyA9qumJ56exkRu9ASTjCw= +gopkg.in/djherbis/stream.v1 v1.3.1/go.mod h1:aEV8CBVRmSpLamVJfM903Npic1IKmb2qS30VAZ+sssg= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/gavv/httpexpect.v1 v1.0.0-20170111145843-40724cf1e4a0 h1:r5ptJ1tBxVAeqw4CrYWhXIMr0SybY3CDHuIbCg5CFVw= +gopkg.in/gavv/httpexpect.v1 v1.0.0-20170111145843-40724cf1e4a0/go.mod h1:WtiW9ZA1LdaWqtQRo1VbIL/v4XZ8NDta+O/kSpGgVek= gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= +gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/internal/input/all_history.go b/internal/input/all_history.go index 35fab98f..f38f5596 100644 --- a/internal/input/all_history.go +++ b/internal/input/all_history.go @@ -20,11 +20,10 @@ type AllHistoryTransformInput struct { // GetAllHistory returns a slice of operations, trades, effects, transactions, diagnostic events // for the ledgers in the provided range (inclusive on both ends) -func GetAllHistory(start, end uint32, limit int64, env utils.EnvironmentDetails) (AllHistoryTransformInput, error) { +func GetAllHistory(start, end uint32, limit int64, env utils.EnvironmentDetails, useCaptiveCore bool) (AllHistoryTransformInput, error) { ctx := context.Background() - backend, err := env.CreateCaptiveCoreBackend() - + backend, err := utils.CreateLedgerBackend(ctx, useCaptiveCore, env) if err != nil { return AllHistoryTransformInput{}, err } diff --git a/internal/input/assets.go b/internal/input/assets.go index 00e83984..0f00708f 100644 --- a/internal/input/assets.go +++ b/internal/input/assets.go @@ -3,9 +3,9 @@ package input import ( "context" - "github.com/stellar/stellar-etl/internal/transform" "github.com/stellar/stellar-etl/internal/utils" + "github.com/stellar/go/ingest/ledgerbackend" "github.com/stellar/go/xdr" ) @@ -17,23 +17,24 @@ type AssetTransformInput struct { } // GetPaymentOperations returns a slice of payment operations that can include new assets from the ledgers in the provided range (inclusive on both ends) -func GetPaymentOperations(start, end uint32, limit int64, isTest bool, isFuture bool) ([]AssetTransformInput, error) { - env := utils.GetEnvironmentDetails(isTest, isFuture) - backend, err := utils.CreateBackend(start, end, env.ArchiveURLs) +func GetPaymentOperations(start, end uint32, limit int64, env utils.EnvironmentDetails, useCaptiveCore bool) ([]AssetTransformInput, error) { + ctx := context.Background() + backend, err := utils.CreateLedgerBackend(ctx, useCaptiveCore, env) if err != nil { return []AssetTransformInput{}, err } assetSlice := []AssetTransformInput{} - ctx := context.Background() + err = backend.PrepareRange(ctx, ledgerbackend.BoundedRange(start, end)) + panicIf(err) for seq := start; seq <= end; seq++ { // Get ledger from sequence number - ledger, err := backend.GetLedgerArchive(ctx, seq) + ledger, err := backend.GetLedger(ctx, seq) if err != nil { return []AssetTransformInput{}, err } - transactionSet := transform.GetTransactionSet(ledger) + transactionSet := ledger.TransactionEnvelopes() for txIndex, transaction := range transactionSet { for opIndex, op := range transaction.Operations() { diff --git a/internal/input/assets_history_archive.go b/internal/input/assets_history_archive.go new file mode 100644 index 00000000..539f3e89 --- /dev/null +++ b/internal/input/assets_history_archive.go @@ -0,0 +1,51 @@ +package input + +import ( + "context" + + "github.com/stellar/stellar-etl/internal/transform" + "github.com/stellar/stellar-etl/internal/utils" + + "github.com/stellar/go/xdr" +) + +// GetPaymentOperations returns a slice of payment operations that can include new assets from the ledgers in the provided range (inclusive on both ends) +func GetPaymentOperationsHistoryArchive(start, end uint32, limit int64, isTest bool, isFuture bool) ([]AssetTransformInput, error) { + env := utils.GetEnvironmentDetails(isTest, isFuture, "") + backend, err := utils.CreateBackend(start, end, env.ArchiveURLs) + if err != nil { + return []AssetTransformInput{}, err + } + + assetSlice := []AssetTransformInput{} + ctx := context.Background() + for seq := start; seq <= end; seq++ { + // Get ledger from sequence number + ledger, err := backend.GetLedgerArchive(ctx, seq) + if err != nil { + return []AssetTransformInput{}, err + } + + transactionSet := transform.GetTransactionSet(ledger) + + for txIndex, transaction := range transactionSet { + for opIndex, op := range transaction.Operations() { + if op.Body.Type == xdr.OperationTypePayment || op.Body.Type == xdr.OperationTypeManageSellOffer { + assetSlice = append(assetSlice, AssetTransformInput{ + Operation: op, + OperationIndex: int32(opIndex), + TransactionIndex: int32(txIndex), + LedgerSeqNum: int32(seq), + }) + } + + } + + } + if int64(len(assetSlice)) >= limit && limit >= 0 { + break + } + } + + return assetSlice, nil +} diff --git a/internal/input/changes.go b/internal/input/changes.go index 7dc7dfaa..d3011e29 100644 --- a/internal/input/changes.go +++ b/internal/input/changes.go @@ -84,7 +84,7 @@ func PrepareCaptiveCore(execPath string, tomlPath string, start, end uint32, env // extractBatch gets the changes from the ledgers in the range [batchStart, batchEnd] and compacts them func extractBatch( batchStart, batchEnd uint32, - core *ledgerbackend.CaptiveStellarCore, + backend *ledgerbackend.LedgerBackend, env utils.EnvironmentDetails, logger *utils.EtlLogger) ChangeBatch { dataTypes := []xdr.LedgerEntryType{ @@ -106,16 +106,11 @@ func extractBatch( changeCompactors[dt] = ingest.NewChangeCompactor() } - latestLedger, err := core.GetLatestLedgerSequence(ctx) - if err != nil { - logger.Fatal("unable to get the latest ledger sequence: ", err) - } - // if this ledger is available, we process its changes and move on to the next ledger by incrementing seq. // Otherwise, nothing is incremented, and we try again on the next iteration of the loop var header xdr.LedgerHeaderHistoryEntry - if seq <= latestLedger { - changeReader, err := ingest.NewLedgerChangeReader(ctx, core, env.NetworkPassphrase, seq) + if seq <= batchEnd { + changeReader, err := ingest.NewLedgerChangeReader(ctx, *backend, env.NetworkPassphrase, seq) if err != nil { logger.Fatal(fmt.Sprintf("unable to create change reader for ledger %d: ", seq), err) } @@ -163,14 +158,14 @@ func extractBatch( // StreamChanges reads in ledgers, processes the changes, and send the changes to the channel matching their type // Ledgers are processed in batches of size . -func StreamChanges(core *ledgerbackend.CaptiveStellarCore, start, end, batchSize uint32, changeChannel chan ChangeBatch, closeChan chan int, env utils.EnvironmentDetails, logger *utils.EtlLogger) { +func StreamChanges(backend *ledgerbackend.LedgerBackend, start, end, batchSize uint32, changeChannel chan ChangeBatch, closeChan chan int, env utils.EnvironmentDetails, logger *utils.EtlLogger) { batchStart := start batchEnd := uint32(math.Min(float64(batchStart+batchSize), float64(end))) for batchStart < batchEnd { if batchEnd < end { batchEnd = uint32(batchEnd - 1) } - batch := ExtractBatch(batchStart, batchEnd, core, env, logger) + batch := ExtractBatch(batchStart, batchEnd, backend, env, logger) changeChannel <- batch // batchStart and batchEnd should not overlap // overlapping batches causes duplicate record loads diff --git a/internal/input/changes_test.go b/internal/input/changes_test.go index cabed9ca..f5bce40d 100644 --- a/internal/input/changes_test.go +++ b/internal/input/changes_test.go @@ -129,7 +129,7 @@ func wrapLedgerEntry(entryType xdr.LedgerEntryType, entry xdr.LedgerEntry) Chang func mockExtractBatch( batchStart, batchEnd uint32, - core *ledgerbackend.CaptiveStellarCore, + backend *ledgerbackend.LedgerBackend, env utils.EnvironmentDetails, logger *utils.EtlLogger) ChangeBatch { log.Errorf("mock called") return ChangeBatch{ diff --git a/internal/input/ledger_range.go b/internal/input/ledger_range.go index 81eb9630..f4ec07ee 100644 --- a/internal/input/ledger_range.go +++ b/internal/input/ledger_range.go @@ -32,7 +32,7 @@ const avgCloseTime = time.Second * 5 // average time to close a stellar ledger func GetLedgerRange(startTime, endTime time.Time, isTest bool, isFuture bool) (int64, int64, error) { startTime = startTime.UTC() endTime = endTime.UTC() - env := utils.GetEnvironmentDetails(isTest, isFuture) + env := utils.GetEnvironmentDetails(isTest, isFuture, "") if startTime.After(endTime) { return 0, 0, fmt.Errorf("start time must be less than or equal to the end time") diff --git a/internal/input/ledgers.go b/internal/input/ledgers.go index 7c3865b9..c83b3b55 100644 --- a/internal/input/ledgers.go +++ b/internal/input/ledgers.go @@ -6,24 +6,68 @@ import ( "github.com/stellar/stellar-etl/internal/utils" "github.com/stellar/go/historyarchive" + "github.com/stellar/go/ingest/ledgerbackend" + "github.com/stellar/go/xdr" ) // GetLedgers returns a slice of ledger close metas for the ledgers in the provided range (inclusive on both ends) -func GetLedgers(start, end uint32, limit int64, isTest bool, isFuturenet bool) ([]historyarchive.Ledger, error) { - env := utils.GetEnvironmentDetails(isTest, isFuturenet) - backend, err := utils.CreateBackend(start, end, env.ArchiveURLs) +func GetLedgers(start, end uint32, limit int64, env utils.EnvironmentDetails, useCaptiveCore bool) ([]historyarchive.Ledger, error) { + ctx := context.Background() + backend, err := utils.CreateLedgerBackend(ctx, useCaptiveCore, env) if err != nil { return []historyarchive.Ledger{}, err } ledgerSlice := []historyarchive.Ledger{} - ctx := context.Background() + err = backend.PrepareRange(ctx, ledgerbackend.BoundedRange(start, end)) + panicIf(err) for seq := start; seq <= end; seq++ { - ledger, err := backend.GetLedgerArchive(ctx, seq) + lcm, err := backend.GetLedger(ctx, seq) if err != nil { return []historyarchive.Ledger{}, err } + var ext xdr.TransactionHistoryEntryExt + var transactionResultPair []xdr.TransactionResultPair + + switch lcm.V { + case 0: + ext = xdr.TransactionHistoryEntryExt{ + V: 0, + GeneralizedTxSet: nil, + } + for _, transactionResultMeta := range lcm.V0.TxProcessing { + transactionResultPair = append(transactionResultPair, transactionResultMeta.Result) + } + case 1: + ext = xdr.TransactionHistoryEntryExt{ + V: 1, + GeneralizedTxSet: &lcm.V1.TxSet, + } + for _, transactionResultMeta := range lcm.V1.TxProcessing { + transactionResultPair = append(transactionResultPair, transactionResultMeta.Result) + } + } + + ledger := historyarchive.Ledger{ + Header: lcm.LedgerHeaderHistoryEntry(), + Transaction: xdr.TransactionHistoryEntry{ + LedgerSeq: lcm.LedgerHeaderHistoryEntry().Header.LedgerSeq, + TxSet: xdr.TransactionSet{ + PreviousLedgerHash: lcm.LedgerHeaderHistoryEntry().Header.PreviousLedgerHash, + Txs: lcm.TransactionEnvelopes(), + }, + Ext: ext, + }, + TransactionResult: xdr.TransactionHistoryResultEntry{ + LedgerSeq: lcm.LedgerHeaderHistoryEntry().Header.LedgerSeq, + TxResultSet: xdr.TransactionResultSet{ + Results: transactionResultPair, + }, + Ext: xdr.TransactionHistoryResultEntryExt{}, + }, + } + ledgerSlice = append(ledgerSlice, ledger) if int64(len(ledgerSlice)) >= limit && limit >= 0 { break diff --git a/internal/input/ledgers_history_archive.go b/internal/input/ledgers_history_archive.go new file mode 100644 index 00000000..9c91f40c --- /dev/null +++ b/internal/input/ledgers_history_archive.go @@ -0,0 +1,34 @@ +package input + +import ( + "context" + + "github.com/stellar/stellar-etl/internal/utils" + + "github.com/stellar/go/historyarchive" +) + +// GetLedgers returns a slice of ledger close metas for the ledgers in the provided range (inclusive on both ends) +func GetLedgersHistoryArchive(start, end uint32, limit int64, isTest bool, isFuturenet bool) ([]historyarchive.Ledger, error) { + env := utils.GetEnvironmentDetails(isTest, isFuturenet, "") + backend, err := utils.CreateBackend(start, end, env.ArchiveURLs) + if err != nil { + return []historyarchive.Ledger{}, err + } + + ledgerSlice := []historyarchive.Ledger{} + ctx := context.Background() + for seq := start; seq <= end; seq++ { + ledger, err := backend.GetLedgerArchive(ctx, seq) + if err != nil { + return []historyarchive.Ledger{}, err + } + + ledgerSlice = append(ledgerSlice, ledger) + if int64(len(ledgerSlice)) >= limit && limit >= 0 { + break + } + } + + return ledgerSlice, nil +} diff --git a/internal/input/operations.go b/internal/input/operations.go index e53437ae..5290691c 100644 --- a/internal/input/operations.go +++ b/internal/input/operations.go @@ -27,11 +27,10 @@ func panicIf(err error) { } // GetOperations returns a slice of operations for the ledgers in the provided range (inclusive on both ends) -func GetOperations(start, end uint32, limit int64, env utils.EnvironmentDetails) ([]OperationTransformInput, error) { +func GetOperations(start, end uint32, limit int64, env utils.EnvironmentDetails, useCaptiveCore bool) ([]OperationTransformInput, error) { ctx := context.Background() - backend, err := env.CreateCaptiveCoreBackend() - + backend, err := utils.CreateLedgerBackend(ctx, useCaptiveCore, env) if err != nil { return []OperationTransformInput{}, err } @@ -40,15 +39,14 @@ func GetOperations(start, end uint32, limit int64, env utils.EnvironmentDetails) err = backend.PrepareRange(ctx, ledgerbackend.BoundedRange(start, end)) panicIf(err) for seq := start; seq <= end; seq++ { - changeReader, err := ingest.NewLedgerChangeReader(ctx, backend, env.NetworkPassphrase, seq) + ledgerCloseMeta, err := backend.GetLedger(ctx, seq) if err != nil { - return []OperationTransformInput{}, err + return []OperationTransformInput{}, fmt.Errorf("error getting ledger seq %d from the backend: %v", seq, err) } - txReader := changeReader.LedgerTransactionReader - ledgerCloseMeta, err := backend.GetLedger(ctx, seq) + txReader, err := ingest.NewLedgerTransactionReaderFromLedgerCloseMeta(env.NetworkPassphrase, ledgerCloseMeta) if err != nil { - return nil, fmt.Errorf("error getting ledger seq %d from the backend: %v", seq, err) + return []OperationTransformInput{}, err } for int64(len(opSlice)) < limit || limit < 0 { diff --git a/internal/input/trades.go b/internal/input/trades.go index 3d69a02b..9c26cdf4 100644 --- a/internal/input/trades.go +++ b/internal/input/trades.go @@ -10,6 +10,7 @@ import ( "github.com/stellar/go/ingest" "github.com/stellar/go/ingest/ledgerbackend" + "github.com/stellar/go/support/errors" "github.com/stellar/go/xdr" ) @@ -22,26 +23,30 @@ type TradeTransformInput struct { } // GetTrades returns a slice of trades for the ledgers in the provided range (inclusive on both ends) -func GetTrades(start, end uint32, limit int64, env utils.EnvironmentDetails) ([]TradeTransformInput, error) { +func GetTrades(start, end uint32, limit int64, env utils.EnvironmentDetails, useCaptiveCore bool) ([]TradeTransformInput, error) { ctx := context.Background() - backend, err := env.CreateCaptiveCoreBackend() + backend, err := utils.CreateLedgerBackend(ctx, useCaptiveCore, env) + if err != nil { + return []TradeTransformInput{}, err + } tradeSlice := []TradeTransformInput{} err = backend.PrepareRange(ctx, ledgerbackend.BoundedRange(start, end)) panicIf(err) for seq := start; seq <= end; seq++ { - changeReader, err := ingest.NewLedgerChangeReader(ctx, backend, env.NetworkPassphrase, seq) + ledgerCloseMeta, err := backend.GetLedger(ctx, seq) if err != nil { - return []TradeTransformInput{}, err + return []TradeTransformInput{}, errors.Wrap(err, "error getting ledger from the backend") } - txReader := changeReader.LedgerTransactionReader - closeTime, err := utils.TimePointToUTCTimeStamp(txReader.GetHeader().Header.ScpValue.CloseTime) + txReader, err := ingest.NewLedgerTransactionReaderFromLedgerCloseMeta(env.NetworkPassphrase, ledgerCloseMeta) if err != nil { return []TradeTransformInput{}, err } + closeTime, err := utils.TimePointToUTCTimeStamp(txReader.GetHeader().Header.ScpValue.CloseTime) + for int64(len(tradeSlice)) < limit || limit < 0 { tx, err := txReader.Read() if err == io.EOF { diff --git a/internal/input/transactions.go b/internal/input/transactions.go index 8bc79fcd..b82e1829 100644 --- a/internal/input/transactions.go +++ b/internal/input/transactions.go @@ -20,11 +20,10 @@ type LedgerTransformInput struct { } // GetTransactions returns a slice of transactions for the ledgers in the provided range (inclusive on both ends) -func GetTransactions(start, end uint32, limit int64, env utils.EnvironmentDetails) ([]LedgerTransformInput, error) { +func GetTransactions(start, end uint32, limit int64, env utils.EnvironmentDetails, useCaptiveCore bool) ([]LedgerTransformInput, error) { ctx := context.Background() - backend, err := env.CreateCaptiveCoreBackend() - + backend, err := utils.CreateLedgerBackend(ctx, useCaptiveCore, env) if err != nil { return []LedgerTransformInput{}, err } @@ -35,7 +34,7 @@ func GetTransactions(start, end uint32, limit int64, env utils.EnvironmentDetail for seq := start; seq <= end; seq++ { ledgerCloseMeta, err := backend.GetLedger(ctx, seq) if err != nil { - return nil, errors.Wrap(err, "error getting ledger from the backend") + return []LedgerTransformInput{}, errors.Wrap(err, "error getting ledger from the backend") } txReader, err := ingest.NewLedgerTransactionReaderFromLedgerCloseMeta(env.NetworkPassphrase, ledgerCloseMeta) diff --git a/internal/utils/main.go b/internal/utils/main.go index 1a36eabb..eaf7c611 100644 --- a/internal/utils/main.go +++ b/internal/utils/main.go @@ -233,6 +233,8 @@ func AddCommonFlags(flags *pflag.FlagSet) { flags.Bool("testnet", false, "If set, will connect to Testnet instead of Mainnet.") flags.Bool("futurenet", false, "If set, will connect to Futurenet instead of Mainnet.") flags.StringToStringP("extra-fields", "u", map[string]string{}, "Additional fields to append to output jsons. Used for appending metadata") + flags.Bool("captive-core", false, "If set, run captive core to retrieve data. Otherwise use TxMeta file datastore.") + flags.String("datastore-url", "", "Datastore url to read txmeta files from.") } // AddArchiveFlags adds the history archive specific flags: start-ledger, output, and limit @@ -279,8 +281,20 @@ func AddExportTypeFlags(flags *pflag.FlagSet) { flags.BoolP("export-ttl", "", false, "set in order to export ttl changes") } -// MustCommonFlags gets the values of the the flags common to all commands: end-ledger and strict-export. If any do not exist, it stops the program fatally using the logger -func MustCommonFlags(flags *pflag.FlagSet, logger *EtlLogger) (endNum uint32, strictExport, isTest bool, isFuture bool, extra map[string]string) { +// MustCommonFlags gets the values of the the flags common to all commands: end-ledger and strict-export. +// If any do not exist, it stops the program fatally using the logger +func MustCommonFlags( + flags *pflag.FlagSet, + logger *EtlLogger, +) ( + endNum uint32, + strictExport, + isTest bool, + isFuture bool, + extra map[string]string, + useCaptiveCore bool, + datastoreUrl string, +) { endNum, err := flags.GetUint32("end-ledger") if err != nil { logger.Fatal("could not get end sequence number: ", err) @@ -305,6 +319,17 @@ func MustCommonFlags(flags *pflag.FlagSet, logger *EtlLogger) (endNum uint32, st if err != nil { logger.Fatal("could not get extra fields string: ", err) } + + useCaptiveCore, err = flags.GetBool("captive-core") + if err != nil { + logger.Fatal("could not get captive-core flag: ", err) + } + + datastoreUrl, err = flags.GetString("datastore-url") + if err != nil { + logger.Fatal("could not get datastore-url string: ", err) + } + return } @@ -622,16 +647,19 @@ type EnvironmentDetails struct { ArchiveURLs []string BinaryPath string CoreConfig string + StorageURL string } // GetPassphrase returns the correct Network Passphrase based on env preference -func GetEnvironmentDetails(isTest bool, isFuture bool) (details EnvironmentDetails) { +func GetEnvironmentDetails(isTest bool, isFuture bool, datastoreUrl string) (details EnvironmentDetails) { if isTest { // testnet passphrase to be used for testing details.NetworkPassphrase = network.TestNetworkPassphrase details.ArchiveURLs = testArchiveURLs details.BinaryPath = "/usr/bin/stellar-core" details.CoreConfig = "docker/stellar-core_testnet.cfg" + // TODO: change exporter-test to the real bucket whatever that is + details.StorageURL = datastoreUrl return details } else if isFuture { // details.NetworkPassphrase = network.FutureNetworkPassphrase @@ -639,6 +667,7 @@ func GetEnvironmentDetails(isTest bool, isFuture bool) (details EnvironmentDetai details.ArchiveURLs = futureArchiveURLs details.BinaryPath = "/usr/bin/stellar-core" details.CoreConfig = "docker/stellar-core_futurenet.cfg" + details.StorageURL = datastoreUrl return details } else { // default: mainnet @@ -646,6 +675,7 @@ func GetEnvironmentDetails(isTest bool, isFuture bool) (details EnvironmentDetai details.ArchiveURLs = mainArchiveURLs details.BinaryPath = "/usr/bin/stellar-core" details.CoreConfig = "docker/stellar-core.cfg" + details.StorageURL = datastoreUrl return details } } @@ -713,3 +743,21 @@ func LedgerEntryToLedgerKeyHash(ledgerEntry xdr.LedgerEntry) string { return ledgerKeyHash } + +// CreateLedgerBackend creates a ledger backend using captive core or datastore +// Defaults to using datastore +func CreateLedgerBackend(ctx context.Context, useCaptiveCore bool, env EnvironmentDetails) (ledgerbackend.LedgerBackend, error) { + if useCaptiveCore { + backend, err := env.CreateCaptiveCoreBackend() + if err != nil { + return nil, err + } + return backend, nil + } + + backend, err := ledgerbackend.NewCloudStorageBackend(ctx, env.StorageURL) + if err != nil { + return nil, err + } + return backend, nil +} From 05b37a9a3bc2133d8944d6e70c0f35fa0e1c57d9 Mon Sep 17 00:00:00 2001 From: Simon Chow Date: Tue, 26 Mar 2024 11:21:58 -0400 Subject: [PATCH 16/49] Fix operation trace code bug --- internal/transform/operation.go | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/internal/transform/operation.go b/internal/transform/operation.go index 5a6d1463..f42bbf06 100644 --- a/internal/transform/operation.go +++ b/internal/transform/operation.go @@ -66,18 +66,17 @@ func TransformOperation(operation xdr.Operation, operationIndex int32, transacti return OperationOutput{}, err } - outputOperationResults, ok := transaction.Result.Result.OperationResults() - if !ok { - return OperationOutput{}, err - } - - outputOperationResultCode := outputOperationResults[operationIndex].Code.String() + var outputOperationResultCode string var outputOperationTraceCode string - operationResultTr, ok := outputOperationResults[operationIndex].GetTr() + outputOperationResults, ok := transaction.Result.Result.OperationResults() if ok { - outputOperationTraceCode, err = mapOperationTrace(operationResultTr) - if err != nil { - return OperationOutput{}, err + outputOperationResultCode = outputOperationResults[operationIndex].Code.String() + operationResultTr, ok := outputOperationResults[operationIndex].GetTr() + if ok { + outputOperationTraceCode, err = mapOperationTrace(operationResultTr) + if err != nil { + return OperationOutput{}, err + } } } From 5e8f662b0af05cbcfe1acb50cded13a8d7f005c7 Mon Sep 17 00:00:00 2001 From: Simon Chow Date: Tue, 26 Mar 2024 14:54:00 -0400 Subject: [PATCH 17/49] Fix operations trace code --- internal/transform/operation.go | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/internal/transform/operation.go b/internal/transform/operation.go index 5a6d1463..d2f64689 100644 --- a/internal/transform/operation.go +++ b/internal/transform/operation.go @@ -48,7 +48,7 @@ func TransformOperation(operation xdr.Operation, operationIndex int32, transacti outputOperationType := int32(operation.Body.Type) if outputOperationType < 0 { - return OperationOutput{}, fmt.Errorf("The operation type (%d) is negative for operation %d (operation id=%d)", outputOperationType, operationIndex, outputOperationID) + return OperationOutput{}, fmt.Errorf("the operation type (%d) is negative for operation %d (operation id=%d)", outputOperationType, operationIndex, outputOperationID) } outputDetails, err := extractOperationDetails(operation, transaction, operationIndex, network) @@ -66,18 +66,17 @@ func TransformOperation(operation xdr.Operation, operationIndex int32, transacti return OperationOutput{}, err } - outputOperationResults, ok := transaction.Result.Result.OperationResults() - if !ok { - return OperationOutput{}, err - } - - outputOperationResultCode := outputOperationResults[operationIndex].Code.String() + var outputOperationResultCode string var outputOperationTraceCode string - operationResultTr, ok := outputOperationResults[operationIndex].GetTr() + outputOperationResults, ok := transaction.Result.Result.OperationResults() if ok { - outputOperationTraceCode, err = mapOperationTrace(operationResultTr) - if err != nil { - return OperationOutput{}, err + outputOperationResultCode = outputOperationResults[operationIndex].Code.String() + operationResultTr, ok := outputOperationResults[operationIndex].GetTr() + if ok { + outputOperationTraceCode, err = mapOperationTrace(operationResultTr) + if err != nil { + return OperationOutput{}, err + } } } From 395f1f8f11a21c197b0a34f47aff98256cdf5de9 Mon Sep 17 00:00:00 2001 From: lucas zanotelli Date: Mon, 15 Apr 2024 09:43:10 -0300 Subject: [PATCH 18/49] Add `ledger_key_hash` to `history_operations` (#237) * add `ledgerKeyHashFromTxEnvelope` function * add `LedgerKeyToLedgerKeyHash` function to `utils` package * add `ledger_key_hash` to `OperationDetails` * store `ledger_key_hash` in an array --- internal/transform/operation.go | 27 +++++++++++++++++++++++++++ internal/utils/main.go | 8 ++++++++ 2 files changed, 35 insertions(+) diff --git a/internal/transform/operation.go b/internal/transform/operation.go index f42bbf06..3dc7fec8 100644 --- a/internal/transform/operation.go +++ b/internal/transform/operation.go @@ -1031,6 +1031,7 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT details["type"] = "invoke_contract" transactionEnvelope := getTransactionV1Envelope(transaction.Envelope) + details["ledger_key_hash"] = ledgerKeyHashFromTxEnvelope(transactionEnvelope) details["contract_id"] = contractIdFromTxEnvelope(transactionEnvelope) details["contract_code_hash"] = contractCodeHashFromTxEnvelope(transactionEnvelope) @@ -1068,6 +1069,7 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT details["type"] = "create_contract" transactionEnvelope := getTransactionV1Envelope(transaction.Envelope) + details["ledger_key_hash"] = ledgerKeyHashFromTxEnvelope(transactionEnvelope) details["contract_id"] = contractIdFromTxEnvelope(transactionEnvelope) details["contract_code_hash"] = contractCodeHashFromTxEnvelope(transactionEnvelope) @@ -1089,6 +1091,7 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT case xdr.HostFunctionTypeHostFunctionTypeUploadContractWasm: details["type"] = "upload_wasm" transactionEnvelope := getTransactionV1Envelope(transaction.Envelope) + details["ledger_key_hash"] = ledgerKeyHashFromTxEnvelope(transactionEnvelope) details["contract_code_hash"] = contractCodeHashFromTxEnvelope(transactionEnvelope) default: panic(fmt.Errorf("unknown host function type: %s", op.HostFunction.Type)) @@ -1099,12 +1102,14 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT details["extend_to"] = op.ExtendTo transactionEnvelope := getTransactionV1Envelope(transaction.Envelope) + details["ledger_key_hash"] = ledgerKeyHashFromTxEnvelope(transactionEnvelope) details["contract_id"] = contractIdFromTxEnvelope(transactionEnvelope) details["contract_code_hash"] = contractCodeHashFromTxEnvelope(transactionEnvelope) case xdr.OperationTypeRestoreFootprint: details["type"] = "restore_footprint" transactionEnvelope := getTransactionV1Envelope(transaction.Envelope) + details["ledger_key_hash"] = ledgerKeyHashFromTxEnvelope(transactionEnvelope) details["contract_id"] = contractIdFromTxEnvelope(transactionEnvelope) details["contract_code_hash"] = contractCodeHashFromTxEnvelope(transactionEnvelope) default: @@ -1629,6 +1634,7 @@ func (operation *transactionOperationWrapper) Details() (map[string]interface{}, details["type"] = "invoke_contract" transactionEnvelope := getTransactionV1Envelope(operation.transaction.Envelope) + details["ledger_key_hash"] = ledgerKeyHashFromTxEnvelope(transactionEnvelope) details["contract_id"] = contractIdFromTxEnvelope(transactionEnvelope) details["contract_code_hash"] = contractCodeHashFromTxEnvelope(transactionEnvelope) @@ -1666,6 +1672,7 @@ func (operation *transactionOperationWrapper) Details() (map[string]interface{}, details["type"] = "create_contract" transactionEnvelope := getTransactionV1Envelope(operation.transaction.Envelope) + details["ledger_key_hash"] = ledgerKeyHashFromTxEnvelope(transactionEnvelope) details["contract_id"] = contractIdFromTxEnvelope(transactionEnvelope) details["contract_code_hash"] = contractCodeHashFromTxEnvelope(transactionEnvelope) @@ -1687,6 +1694,7 @@ func (operation *transactionOperationWrapper) Details() (map[string]interface{}, case xdr.HostFunctionTypeHostFunctionTypeUploadContractWasm: details["type"] = "upload_wasm" transactionEnvelope := getTransactionV1Envelope(operation.transaction.Envelope) + details["ledger_key_hash"] = ledgerKeyHashFromTxEnvelope(transactionEnvelope) details["contract_code_hash"] = contractCodeHashFromTxEnvelope(transactionEnvelope) default: panic(fmt.Errorf("unknown host function type: %s", op.HostFunction.Type)) @@ -1697,12 +1705,14 @@ func (operation *transactionOperationWrapper) Details() (map[string]interface{}, details["extend_to"] = op.ExtendTo transactionEnvelope := getTransactionV1Envelope(operation.transaction.Envelope) + details["ledger_key_hash"] = ledgerKeyHashFromTxEnvelope(transactionEnvelope) details["contract_id"] = contractIdFromTxEnvelope(transactionEnvelope) details["contract_code_hash"] = contractCodeHashFromTxEnvelope(transactionEnvelope) case xdr.OperationTypeRestoreFootprint: details["type"] = "restore_footprint" transactionEnvelope := getTransactionV1Envelope(operation.transaction.Envelope) + details["ledger_key_hash"] = ledgerKeyHashFromTxEnvelope(transactionEnvelope) details["contract_id"] = contractIdFromTxEnvelope(transactionEnvelope) details["contract_code_hash"] = contractCodeHashFromTxEnvelope(transactionEnvelope) default: @@ -1782,6 +1792,23 @@ func contractCodeHashFromTxEnvelope(transactionEnvelope xdr.TransactionV1Envelop return "" } +func ledgerKeyHashFromTxEnvelope(transactionEnvelope xdr.TransactionV1Envelope) []string { + var ledgerKeyHash []string + for _, ledgerKey := range transactionEnvelope.Tx.Ext.SorobanData.Resources.Footprint.ReadOnly { + if utils.LedgerKeyToLedgerKeyHash(ledgerKey) != "" { + ledgerKeyHash = append(ledgerKeyHash, utils.LedgerKeyToLedgerKeyHash(ledgerKey)) + } + } + + for _, ledgerKey := range transactionEnvelope.Tx.Ext.SorobanData.Resources.Footprint.ReadWrite { + if utils.LedgerKeyToLedgerKeyHash(ledgerKey) != "" { + ledgerKeyHash = append(ledgerKeyHash, utils.LedgerKeyToLedgerKeyHash(ledgerKey)) + } + } + + return ledgerKeyHash +} + func contractCodeFromContractData(ledgerKey xdr.LedgerKey) string { contractCode, ok := ledgerKey.GetContractCode() if !ok { diff --git a/internal/utils/main.go b/internal/utils/main.go index 1a36eabb..bd2be6ce 100644 --- a/internal/utils/main.go +++ b/internal/utils/main.go @@ -713,3 +713,11 @@ func LedgerEntryToLedgerKeyHash(ledgerEntry xdr.LedgerEntry) string { return ledgerKeyHash } + +func LedgerKeyToLedgerKeyHash(ledgerKey xdr.LedgerKey) string { + ledgerKeyByte, _ := ledgerKey.MarshalBinary() + hashedLedgerKeyByte := hash.Hash(ledgerKeyByte) + ledgerKeyHash := hex.EncodeToString(hashedLedgerKeyByte[:]) + + return ledgerKeyHash +} From e50d7f0dc06476e2010b05b52fbc37c4ad7a316c Mon Sep 17 00:00:00 2001 From: Eduardo Alves Date: Tue, 16 Apr 2024 19:51:44 -0300 Subject: [PATCH 19/49] Changed image workdir --- docker/Dockerfile | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docker/Dockerfile b/docker/Dockerfile index 2fc7d358..b4d94368 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -20,6 +20,9 @@ WORKDIR /etl COPY --from=build /usr/local/bin/stellar-etl /usr/local/bin/stellar-etl COPY --from=build /usr/src/etl/docker docker +# changing workdir to a new path in order to use mounted empty ephemeral volumes as storage +WORKDIR /etl/data + # clear entrypoint from stellar-core image ENTRYPOINT [] From b30535ebc8816c8032559065f8613afe84b390e4 Mon Sep 17 00:00:00 2001 From: Eduardo Alves Date: Tue, 16 Apr 2024 21:23:58 -0300 Subject: [PATCH 20/49] Set CoreConfig environment detail value to a fixed reference as defined on the stellar-etl image --- internal/utils/main.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/internal/utils/main.go b/internal/utils/main.go index bd2be6ce..129aeb92 100644 --- a/internal/utils/main.go +++ b/internal/utils/main.go @@ -631,21 +631,21 @@ func GetEnvironmentDetails(isTest bool, isFuture bool) (details EnvironmentDetai details.NetworkPassphrase = network.TestNetworkPassphrase details.ArchiveURLs = testArchiveURLs details.BinaryPath = "/usr/bin/stellar-core" - details.CoreConfig = "docker/stellar-core_testnet.cfg" + details.CoreConfig = "/etl/docker/stellar-core_testnet.cfg" return details } else if isFuture { // details.NetworkPassphrase = network.FutureNetworkPassphrase details.NetworkPassphrase = "Test SDF Future Network ; October 2022" details.ArchiveURLs = futureArchiveURLs details.BinaryPath = "/usr/bin/stellar-core" - details.CoreConfig = "docker/stellar-core_futurenet.cfg" + details.CoreConfig = "/etl/docker/stellar-core_futurenet.cfg" return details } else { // default: mainnet details.NetworkPassphrase = network.PublicNetworkPassphrase details.ArchiveURLs = mainArchiveURLs details.BinaryPath = "/usr/bin/stellar-core" - details.CoreConfig = "docker/stellar-core.cfg" + details.CoreConfig = "/etl/docker/stellar-core.cfg" return details } } From 43cdcf2f1cc199ca0338de5f2e57a076106b73d7 Mon Sep 17 00:00:00 2001 From: Eduardo Alves Date: Thu, 18 Apr 2024 16:39:10 -0300 Subject: [PATCH 21/49] Set absolute reference for stellar-core config path --- cmd/export_ledger_entry_changes_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmd/export_ledger_entry_changes_test.go b/cmd/export_ledger_entry_changes_test.go index 8a518e1d..364c3fde 100644 --- a/cmd/export_ledger_entry_changes_test.go +++ b/cmd/export_ledger_entry_changes_test.go @@ -6,7 +6,7 @@ import ( ) const coreExecutablePath = "../stellar-core/src/stellar-core" -const coreConfigPath = "./docker/stellar-core.cfg" +const coreConfigPath = "/etl/docker/stellar-core.cfg" func TestExportChanges(t *testing.T) { From 0e3bf80d8bd17afc84a95427350b5bf0f0871a31 Mon Sep 17 00:00:00 2001 From: Simon Chow Date: Fri, 19 Apr 2024 00:53:50 -0400 Subject: [PATCH 22/49] Address comments and updates --- cmd/export_account_signers.go | 2 +- cmd/export_assets.go | 2 +- cmd/export_ledger_entry_changes.go | 2 +- go.mod | 79 +++++----- go.sum | 181 ++++++++++------------ internal/input/assets_history_archive.go | 3 +- internal/input/ledgers_history_archive.go | 3 +- internal/input/transactions.go | 3 + internal/transform/contract_code_test.go | 2 +- internal/utils/main.go | 71 +++++++++ 10 files changed, 197 insertions(+), 151 deletions(-) diff --git a/cmd/export_account_signers.go b/cmd/export_account_signers.go index 3c32cbc4..9e4b9876 100644 --- a/cmd/export_account_signers.go +++ b/cmd/export_account_signers.go @@ -39,7 +39,7 @@ the export_ledger_entry_changes command.`, numSigners := 0 var header xdr.LedgerHeaderHistoryEntry for _, acc := range accounts { - if acc.AccountSignersChanged() { + if utils.AccountSignersChanged(acc) { transformed, err := transform.TransformSigners(acc, header) if err != nil { cmdLogger.LogError(fmt.Errorf("could not json transform account signer: %v", err)) diff --git a/cmd/export_assets.go b/cmd/export_assets.go index 3c1b3773..5de3aa4d 100644 --- a/cmd/export_assets.go +++ b/cmd/export_assets.go @@ -28,7 +28,7 @@ var assetsCmd = &cobra.Command{ var err error if useCaptiveCore { - paymentOps, err = input.GetPaymentOperationsHistoryArchive(startNum, endNum, limit, isTest, isFuture) + paymentOps, err = input.GetPaymentOperationsHistoryArchive(startNum, endNum, limit, env, useCaptiveCore) } else { paymentOps, err = input.GetPaymentOperations(startNum, endNum, limit, env, useCaptiveCore) } diff --git a/cmd/export_ledger_entry_changes.go b/cmd/export_ledger_entry_changes.go index bd37e29a..726f5cee 100644 --- a/cmd/export_ledger_entry_changes.go +++ b/cmd/export_ledger_entry_changes.go @@ -126,7 +126,7 @@ be exported.`, } transformedOutputs["accounts"] = append(transformedOutputs["accounts"], acc) } - if change.AccountSignersChanged() { + if utils.AccountSignersChanged(change) { signers, err := transform.TransformSigners(change, changes.LedgerHeaders[i]) if err != nil { entry, _, _, _ := utils.ExtractEntryFromChange(change) diff --git a/go.mod b/go.mod index 12727594..c5c9c51e 100644 --- a/go.mod +++ b/go.mod @@ -5,7 +5,7 @@ go 1.22 toolchain go1.22.1 require ( - cloud.google.com/go/storage v1.37.0 + cloud.google.com/go/storage v1.40.0 github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 github.com/guregu/null v4.0.0+incompatible github.com/lib/pq v1.10.9 @@ -15,34 +15,35 @@ require ( github.com/spf13/cobra v1.7.0 github.com/spf13/pflag v1.0.5 github.com/spf13/viper v1.17.0 - github.com/stellar/go v0.0.0-20240317052942-f34a84277137 - github.com/stretchr/testify v1.8.4 + github.com/stellar/go v0.0.0-20240419044405-2d7308b67c07 + github.com/stretchr/testify v1.9.0 ) require ( - cloud.google.com/go v0.112.0 // indirect - cloud.google.com/go/compute v1.23.3 // indirect - cloud.google.com/go/compute/metadata v0.2.3 // indirect - cloud.google.com/go/iam v1.1.5 // indirect + cloud.google.com/go v0.112.2 // indirect + cloud.google.com/go/auth v0.2.1 // indirect + cloud.google.com/go/auth/oauth2adapt v0.2.1 // indirect + cloud.google.com/go/compute/metadata v0.3.0 // indirect + cloud.google.com/go/iam v1.1.7 // indirect github.com/Masterminds/squirrel v1.5.4 // indirect github.com/Microsoft/go-winio v0.6.1 // indirect - github.com/aws/aws-sdk-go v1.45.26 // indirect + github.com/aws/aws-sdk-go v1.51.24 // indirect github.com/beorn7/perks v1.0.1 // indirect - github.com/cenkalti/backoff/v4 v4.2.1 // indirect - github.com/cespare/xxhash/v2 v2.2.0 // indirect + github.com/cenkalti/backoff/v4 v4.3.0 // indirect + github.com/cespare/xxhash/v2 v2.3.0 // indirect github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect github.com/djherbis/fscache v0.10.1 // indirect github.com/felixge/httpsnoop v1.0.4 // indirect github.com/fsnotify/fsnotify v1.6.0 // indirect github.com/go-errors/errors v1.5.1 // indirect - github.com/go-logr/logr v1.3.0 // indirect + github.com/go-logr/logr v1.4.1 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect - github.com/golang/protobuf v1.5.3 // indirect + github.com/golang/protobuf v1.5.4 // indirect github.com/google/s2a-go v0.1.7 // indirect - github.com/google/uuid v1.5.0 // indirect + github.com/google/uuid v1.6.0 // indirect github.com/googleapis/enterprise-certificate-proxy v0.3.2 // indirect - github.com/googleapis/gax-go/v2 v2.12.0 // indirect + github.com/googleapis/gax-go/v2 v2.12.3 // indirect github.com/hashicorp/golang-lru v1.0.2 // indirect github.com/hashicorp/hcl v1.0.0 // indirect github.com/holiman/uint256 v1.2.3 // indirect @@ -52,15 +53,14 @@ require ( github.com/lann/builder v0.0.0-20180802200727-47ae307949d0 // indirect github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0 // indirect github.com/magiconair/properties v1.8.7 // indirect - github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/pelletier/go-toml v1.9.5 // indirect github.com/pelletier/go-toml/v2 v2.1.0 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect - github.com/prometheus/client_golang v1.17.0 // indirect - github.com/prometheus/client_model v0.5.0 // indirect - github.com/prometheus/common v0.44.0 // indirect - github.com/prometheus/procfs v0.12.0 // indirect + github.com/prometheus/client_golang v1.19.0 // indirect + github.com/prometheus/client_model v0.6.1 // indirect + github.com/prometheus/common v0.53.0 // indirect + github.com/prometheus/procfs v0.13.0 // indirect github.com/sagikazarmark/locafero v0.3.0 // indirect github.com/sagikazarmark/slog-shim v0.1.0 // indirect github.com/segmentio/go-loggly v0.5.1-0.20171222203950-eb91657e62b2 // indirect @@ -68,34 +68,33 @@ require ( github.com/spf13/afero v1.10.0 // indirect github.com/spf13/cast v1.5.1 // indirect github.com/stellar/go-xdr v0.0.0-20231122183749-b53fb00bcac2 // indirect - github.com/stretchr/objx v0.5.1 // indirect + github.com/stretchr/objx v0.5.2 // indirect github.com/subosito/gotenv v1.6.0 // indirect go.opencensus.io v0.24.0 // indirect - go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.46.1 // indirect - go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.46.1 // indirect - go.opentelemetry.io/otel v1.21.0 // indirect - go.opentelemetry.io/otel/metric v1.21.0 // indirect - go.opentelemetry.io/otel/trace v1.21.0 // indirect + go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.50.0 // indirect + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.50.0 // indirect + go.opentelemetry.io/otel v1.25.0 // indirect + go.opentelemetry.io/otel/metric v1.25.0 // indirect + go.opentelemetry.io/otel/trace v1.25.0 // indirect go.uber.org/multierr v1.11.0 // indirect - golang.org/x/crypto v0.18.0 // indirect - golang.org/x/exp v0.0.0-20231006140011-7918f672742d // indirect - golang.org/x/mod v0.13.0 // indirect - golang.org/x/net v0.20.0 // indirect - golang.org/x/oauth2 v0.16.0 // indirect - golang.org/x/sync v0.6.0 // indirect - golang.org/x/sys v0.16.0 // indirect + golang.org/x/crypto v0.22.0 // indirect + golang.org/x/exp v0.0.0-20240416160154-fe59bbe5cc7f // indirect + golang.org/x/mod v0.17.0 // indirect + golang.org/x/net v0.24.0 // indirect + golang.org/x/oauth2 v0.19.0 // indirect + golang.org/x/sync v0.7.0 // indirect + golang.org/x/sys v0.19.0 // indirect golang.org/x/text v0.14.0 // indirect golang.org/x/time v0.5.0 // indirect - golang.org/x/tools v0.14.0 // indirect - google.golang.org/api v0.157.0 // indirect - google.golang.org/appengine v1.6.8 // indirect - google.golang.org/genproto v0.0.0-20240116215550-a9fa1716bcac // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20240122161410-6c6643bf1457 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20240116215550-a9fa1716bcac // indirect - google.golang.org/grpc v1.60.1 // indirect + golang.org/x/tools v0.20.0 // indirect + google.golang.org/api v0.174.0 // indirect + google.golang.org/genproto v0.0.0-20240415180920-8c6c420018be // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20240415180920-8c6c420018be // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20240415180920-8c6c420018be // indirect + google.golang.org/grpc v1.63.2 // indirect + google.golang.org/protobuf v1.33.0 // indirect gopkg.in/djherbis/atime.v1 v1.0.0 // indirect gopkg.in/djherbis/stream.v1 v1.3.1 // indirect - google.golang.org/protobuf v1.33.0 // indirect gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/go.sum b/go.sum index e64687f6..b247522d 100644 --- a/go.sum +++ b/go.sum @@ -17,22 +17,24 @@ cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHOb cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= cloud.google.com/go v0.75.0/go.mod h1:VGuuCn7PG0dwsd5XPVm2Mm3wlh3EL55/79EKB6hlPTY= -cloud.google.com/go v0.112.0 h1:tpFCD7hpHFlQ8yPwT3x+QeXqc2T6+n6T+hmABHfDUSM= -cloud.google.com/go v0.112.0/go.mod h1:3jEEVwZ/MHU4djK5t5RHuKOA/GbLddgTdVubX1qnPD4= +cloud.google.com/go v0.112.2 h1:ZaGT6LiG7dBzi6zNOvVZwacaXlmf3lRqnC4DQzqyRQw= +cloud.google.com/go v0.112.2/go.mod h1:iEqjp//KquGIJV/m+Pk3xecgKNhV+ry+vVTsy4TbDms= +cloud.google.com/go/auth v0.2.1 h1:RMl6PI2MH1Qc3CM7XNJJHGwbC4WHQppSAjL0Cvu/M/g= +cloud.google.com/go/auth v0.2.1/go.mod h1:khQRBNrvNoHiHhV1iu2x8fSnlNbCaVHilznW5MAI5GY= +cloud.google.com/go/auth/oauth2adapt v0.2.1 h1:VSPmMmUlT8CkIZ2PzD9AlLN+R3+D1clXMWHHa6vG/Ag= +cloud.google.com/go/auth/oauth2adapt v0.2.1/go.mod h1:tOdK/k+D2e4GEwfBRA48dKNQiDsqIXxLh7VU319eV0g= cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= -cloud.google.com/go/compute v1.23.3 h1:6sVlXXBmbd7jNX0Ipq0trII3e4n1/MsADLK6a+aiVlk= -cloud.google.com/go/compute v1.23.3/go.mod h1:VCgBUoMnIVIR0CscqQiPJLAG25E3ZRZMzcFZeQ+h8CI= -cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY= -cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA= +cloud.google.com/go/compute/metadata v0.3.0 h1:Tz+eQXMEqDIKRsmY3cHTL6FVaynIjX2QxYC4trgAKZc= +cloud.google.com/go/compute/metadata v0.3.0/go.mod h1:zFmK7XCadkQkj6TtorcaGlCW1hT1fIilQDwofLpJ20k= cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= -cloud.google.com/go/iam v1.1.5 h1:1jTsCu4bcsNsE4iiqNT5SHwrDRCfRmIaaaVFhRveTJI= -cloud.google.com/go/iam v1.1.5/go.mod h1:rB6P/Ic3mykPbFio+vo7403drjlgvoWfYpJhMXEbzv8= +cloud.google.com/go/iam v1.1.7 h1:z4VHOhwKLF/+UYXAJDFwGtNF0b6gjsW1Pk9Ml0U/IoM= +cloud.google.com/go/iam v1.1.7/go.mod h1:J4PMPg8TtyurAUvSmPj8FF3EDgY1SPRZxcUGrn7WXGA= cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= @@ -43,8 +45,8 @@ cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohl cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo= -cloud.google.com/go/storage v1.37.0 h1:WI8CsaFO8Q9KjPVtsZ5Cmi0dXV25zMoX0FklT7c3Jm4= -cloud.google.com/go/storage v1.37.0/go.mod h1:i34TiT2IhiNDmcj65PqwCjcoUX7Z5pLzS8DEmoiFq1k= +cloud.google.com/go/storage v1.40.0 h1:VEpDQV5CJxFmJ6ueWNsKxcr1QAYOXEgxDa+sBbJahPw= +cloud.google.com/go/storage v1.40.0/go.mod h1:Rrj7/hKlG87BLqDJYtwR0fbPld8uJPbQ2ucUMY7Ir0g= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= @@ -56,15 +58,15 @@ github.com/ajg/form v0.0.0-20160822230020-523a5da1a92f h1:zvClvFQwU++UpIUBGC8YmD github.com/ajg/form v0.0.0-20160822230020-523a5da1a92f/go.mod h1:uL1WgH+h2mgNtvBq0339dVnzXdBETtL2LeUXaIv25UY= github.com/andybalholm/brotli v1.0.4 h1:V7DdXeJtZscaqfNuAdSRuRFzuiKlHSC/Zh3zl9qY3JY= github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= -github.com/aws/aws-sdk-go v1.45.26 h1:PJ2NJNY5N/yeobLYe1Y+xLdavBi67ZI8gvph6ftwVCg= -github.com/aws/aws-sdk-go v1.45.26/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI= +github.com/aws/aws-sdk-go v1.51.24 h1:nwL5MaommPkwb7Ixk24eWkdx5HY4of1gD10kFFVAl6A= +github.com/aws/aws-sdk-go v1.51.24/go.mod h1:LF8svs817+Nz+DmiMQKTO3ubZ/6IaTpq3TjupRn3Eqk= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= -github.com/cenkalti/backoff/v4 v4.2.1 h1:y4OZtCnogmCPw98Zjyt5a6+QwPLGkiQsYW5oUqylYbM= -github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= +github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= +github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= -github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= -github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= +github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= @@ -72,8 +74,6 @@ github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDk github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= -github.com/cncf/xds/go v0.0.0-20230607035331-e9ce68804cb4 h1:/inchEIKaYC1Akx+H+gqO04wryn5h75LSazbRlnya1k= -github.com/cncf/xds/go v0.0.0-20230607035331-e9ce68804cb4/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -89,8 +89,6 @@ github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1m github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= -github.com/envoyproxy/protoc-gen-validate v1.0.2 h1:QkIBuU5k+x7/QXPvPPnWXWlCdaBFApVqftFV6k087DA= -github.com/envoyproxy/protoc-gen-validate v1.0.2/go.mod h1:GpiZQP3dDbg4JouG/NNS7QWXpgx6x8QiMKdmN72jogE= github.com/fatih/structs v1.0.0 h1:BrX964Rv5uQ3wwS+KRUAJCBBw5PQmgJfJ6v4yly5QwU= github.com/fatih/structs v1.0.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= @@ -109,8 +107,8 @@ github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9 github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= -github.com/go-logr/logr v1.3.0 h1:2y3SDp0ZXuc6/cjLSZ+Q3ir+QB9T/iG5yYRXqsagWSY= -github.com/go-logr/logr v1.3.0/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ= +github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= @@ -142,10 +140,8 @@ github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvq github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= -github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= -github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= -github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= -github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= +github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= @@ -158,7 +154,6 @@ github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-querystring v0.0.0-20160401233042-9235644dd9e5 h1:oERTZ1buOUYlpmKaqlO5fYmz8cZ1rYu5DieJzF4ZVmU= @@ -183,14 +178,14 @@ github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm4 github.com/google/s2a-go v0.1.7 h1:60BLSyTrOV4/haCDW4zb1guZItoSq8foHCXrAnjBo/o= github.com/google/s2a-go v0.1.7/go.mod h1:50CgR4k1jNlWBu4UfS4AcfhVe1r6pdZPygJ3R8F0Qdw= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/uuid v1.5.0 h1:1p67kYwdtXjb0gL0BPiP1Av9wiZPo5A8z2cWkTZ+eyU= -github.com/google/uuid v1.5.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/enterprise-certificate-proxy v0.3.2 h1:Vie5ybvEvT75RniqhfFxPRy3Bf7vr3h0cechB90XaQs= github.com/googleapis/enterprise-certificate-proxy v0.3.2/go.mod h1:VLSiSSBs/ksPL8kq3OBOQ6WRI2QnaFynd1DCjZ62+V0= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= -github.com/googleapis/gax-go/v2 v2.12.0 h1:A+gCJKdRfqXkr+BIRGtZLibNXf0m1f9E4HG56etFpas= -github.com/googleapis/gax-go/v2 v2.12.0/go.mod h1:y+aIqrI5eb1YGMVJfuV3185Ts/D7qKpsEkdD5+I6QGU= +github.com/googleapis/gax-go/v2 v2.12.3 h1:5/zPPDvw8Q1SuXjrqrZslrqT7dL/uJT2CQii/cLCKqA= +github.com/googleapis/gax-go/v2 v2.12.3/go.mod h1:AKloxT6GtNbaLm8QTNSidHUVsHYcBHwWRvkNFJUQcS4= github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= github.com/gorilla/schema v1.2.0 h1:YufUaxZYCKGFuAq3c96BOhjgd5nmXiOY9NGzF247Tsc= github.com/gorilla/schema v1.2.0/go.mod h1:kgLaKoK1FELgZqMAVxx/5cbj0kT+57qxUrAlIO2eleU= @@ -244,8 +239,6 @@ github.com/manucorporat/sse v0.0.0-20160126180136-ee05b128a739 h1:ykXz+pRRTibcSj github.com/manucorporat/sse v0.0.0-20160126180136-ee05b128a739/go.mod h1:zUx1mhth20V3VKgL5jbd1BSQcW4Fy6Qs4PZvQwRFwzM= github.com/mattn/go-sqlite3 v1.14.6 h1:dNPt6NO46WmLVt2DLNpwczCmdV5boIZ6g/tlDrlRUbg= github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= -github.com/matttproud/golang_protobuf_extensions v1.0.4 h1:mmDVorXM7PCGKw94cs5zkfA9PSy5pEvNWRP0ET0TIVo= -github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= @@ -268,15 +261,15 @@ github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qR github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/prometheus/client_golang v1.17.0 h1:rl2sfwZMtSthVU752MqfjQozy7blglC+1SOtjMAMh+Q= -github.com/prometheus/client_golang v1.17.0/go.mod h1:VeL+gMmOAxkS2IqfCq0ZmHSL+LjWfWDUmp1mBz9JgUY= +github.com/prometheus/client_golang v1.19.0 h1:ygXvpU1AoN1MhdzckN+PyD9QJOSD4x7kmXYlnfbA6JU= +github.com/prometheus/client_golang v1.19.0/go.mod h1:ZRM9uEAypZakd+q/x7+gmsvXdURP+DABIEIjnmDdp+k= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/client_model v0.5.0 h1:VQw1hfvPvk3Uv6Qf29VrPF32JB6rtbgI6cYPYQjL0Qw= -github.com/prometheus/client_model v0.5.0/go.mod h1:dTiFglRmd66nLR9Pv9f0mZi7B7fk5Pm3gvsjB5tr+kI= -github.com/prometheus/common v0.44.0 h1:+5BrQJwiBB9xsMygAB3TNvpQKOwlkc25LbISbrdOOfY= -github.com/prometheus/common v0.44.0/go.mod h1:ofAIvZbQ1e/nugmZGz4/qCb9Ap1VoSTIO7x0VV9VvuY= -github.com/prometheus/procfs v0.12.0 h1:jluTpSng7V9hY0O2R9DzzJHYb2xULk9VTR1V1R/k6Bo= -github.com/prometheus/procfs v0.12.0/go.mod h1:pcuDEFsWDnvcgNzo4EEweacyhjeA9Zk3cnaOZAZEfOo= +github.com/prometheus/client_model v0.6.1 h1:ZKSh/rekM+n3CeS952MLRAdFwIKqeY8b62p8ais2e9E= +github.com/prometheus/client_model v0.6.1/go.mod h1:OrxVMOVHjw3lKMa8+x6HeMGkHMQyHDk9E3jmP2AmGiY= +github.com/prometheus/common v0.53.0 h1:U2pL9w9nmJwJDa4qqLQ3ZaePJ6ZTwt7cMD3AG3+aLCE= +github.com/prometheus/common v0.53.0/go.mod h1:BrxBKv3FWBIGXw89Mg1AeBq7FSyRzXWI3l3e7W3RN5U= +github.com/prometheus/procfs v0.13.0 h1:GqzLlQyfsPbaEHaQkO7tbDlriv/4o5Hudv6OXHGKX7o= +github.com/prometheus/procfs v0.13.0/go.mod h1:cd4PFCR54QLnGKPaKGA6l+cfuNXtht43ZKY6tow0Y1g= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M= github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA= @@ -303,15 +296,15 @@ github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/spf13/viper v1.17.0 h1:I5txKw7MJasPL/BrfkbA0Jyo/oELqVmux4pR/UxOMfI= github.com/spf13/viper v1.17.0/go.mod h1:BmMMMLQXSbcHK6KAOiFLz0l5JHrU89OdIRHvsk0+yVI= -github.com/stellar/go v0.0.0-20240317052942-f34a84277137 h1:s/RK1BOa+KRzgl32f86F7iO3jk+XQu59vo1bli6vRHo= -github.com/stellar/go v0.0.0-20240317052942-f34a84277137/go.mod h1:ckzsX0B0qfTMVZQJtPELJLs7cJ6xXMYHPVLyIsReGsU= +github.com/stellar/go v0.0.0-20240419044405-2d7308b67c07 h1:AgxlbRmsFAc9VaOLj29K9RpsSmlmtJ0KCVnGxV7bUwo= +github.com/stellar/go v0.0.0-20240419044405-2d7308b67c07/go.mod h1:ckzsX0B0qfTMVZQJtPELJLs7cJ6xXMYHPVLyIsReGsU= github.com/stellar/go-xdr v0.0.0-20231122183749-b53fb00bcac2 h1:OzCVd0SV5qE3ZcDeSFCmOWLZfEWZ3Oe8KtmSOYKEVWE= github.com/stellar/go-xdr v0.0.0-20231122183749-b53fb00bcac2/go.mod h1:yoxyU/M8nl9LKeWIoBrbDPQ7Cy+4jxRcWcOayZ4BMps= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= -github.com/stretchr/objx v0.5.1 h1:4VhoImhV/Bm0ToFkXFi8hXNXwpDRZ/ynw3amt82mzq0= -github.com/stretchr/objx v0.5.1/go.mod h1:/iHQpkQwBD6DLUmQ4pE+s1TXdob1mORJ4/UFdrifcy0= +github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= @@ -319,9 +312,9 @@ github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/ github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= -github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= -github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8= github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU= github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= @@ -346,7 +339,6 @@ github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9de github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= @@ -355,18 +347,18 @@ go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= -go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.46.1 h1:SpGay3w+nEwMpfVnbqOLH5gY52/foP8RE8UzTZ1pdSE= -go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.46.1/go.mod h1:4UoMYEZOC0yN/sPGH76KPkkU7zgiEWYWL9vwmbnTJPE= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.46.1 h1:aFJWCqJMNjENlcleuuOkGAPH82y0yULBScfXcIEdS24= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.46.1/go.mod h1:sEGXWArGqc3tVa+ekntsN65DmVbVeW+7lTKTjZF3/Fo= -go.opentelemetry.io/otel v1.21.0 h1:hzLeKBZEL7Okw2mGzZ0cc4k/A7Fta0uoPgaJCr8fsFc= -go.opentelemetry.io/otel v1.21.0/go.mod h1:QZzNPQPm1zLX4gZK4cMi+71eaorMSGT3A4znnUvNNEo= -go.opentelemetry.io/otel/metric v1.21.0 h1:tlYWfeo+Bocx5kLEloTjbcDwBuELRrIFxwdQ36PlJu4= -go.opentelemetry.io/otel/metric v1.21.0/go.mod h1:o1p3CA8nNHW8j5yuQLdc1eeqEaPfzug24uvsyIEJRWM= -go.opentelemetry.io/otel/sdk v1.21.0 h1:FTt8qirL1EysG6sTQRZ5TokkU8d0ugCj8htOgThZXQ8= -go.opentelemetry.io/otel/sdk v1.21.0/go.mod h1:Nna6Yv7PWTdgJHVRD9hIYywQBRx7pbox6nwBnZIxl/E= -go.opentelemetry.io/otel/trace v1.21.0 h1:WD9i5gzvoUPuXIXH24ZNBudiarZDKuekPqi/E8fpfLc= -go.opentelemetry.io/otel/trace v1.21.0/go.mod h1:LGbsEB0f9LGjN+OZaQQ26sohbOmiMR+BaslueVtS/qQ= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.50.0 h1:zvpPXY7RfYAGSdYQLjp6zxdJNSYD/+FFoCTQN9IPxBs= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.50.0/go.mod h1:BMn8NB1vsxTljvuorms2hyOs8IBuuBEq0pl7ltOfy30= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.50.0 h1:cEPbyTSEHlQR89XVlyo78gqluF8Y3oMeBkXGWzQsfXY= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.50.0/go.mod h1:DKdbWcT4GH1D0Y3Sqt/PFXt2naRKDWtU+eE6oLdFNA8= +go.opentelemetry.io/otel v1.25.0 h1:gldB5FfhRl7OJQbUHt/8s0a7cE8fbsPAtdpRaApKy4k= +go.opentelemetry.io/otel v1.25.0/go.mod h1:Wa2ds5NOXEMkCmUou1WA7ZBfLTHWIsp034OVD7AO+Vg= +go.opentelemetry.io/otel/metric v1.25.0 h1:LUKbS7ArpFL/I2jJHdJcqMGxkRdxpPHE0VU/D4NuEwA= +go.opentelemetry.io/otel/metric v1.25.0/go.mod h1:rkDLUSd2lC5lq2dFNrX9LGAbINP5B7WBkC78RXCpH5s= +go.opentelemetry.io/otel/sdk v1.22.0 h1:6coWHw9xw7EfClIC/+O31R8IY3/+EiRFHevmHafB2Gw= +go.opentelemetry.io/otel/sdk v1.22.0/go.mod h1:iu7luyVGYovrRpe2fmj3CVKouQNdTOkxtLzPvPz1DOc= +go.opentelemetry.io/otel/trace v1.25.0 h1:tqukZGLwQYRIFtSQM2u2+yfMVTgGVeqRLPUYx1Dq6RM= +go.opentelemetry.io/otel/trace v1.25.0/go.mod h1:hCCs70XM/ljO+BeQkyFnbK28SBIJ/Emuha+ccrCRT7I= go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= @@ -375,10 +367,9 @@ golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8U golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= -golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.18.0 h1:PGVlW0xEltQnzFZ55hkuX5+KLyrMYhHld1YHO4AKcdc= -golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg= +golang.org/x/crypto v0.22.0 h1:g1v0xeRhjcugydODzvb3mEM9SQ0HGp9s/nh3COQ/C30= +golang.org/x/crypto v0.22.0/go.mod h1:vr6Su+7cTlO45qkww3VDJlzDn0ctJvRgYbC2NvXHt+M= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -389,8 +380,8 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0 golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= -golang.org/x/exp v0.0.0-20231006140011-7918f672742d h1:jtJma62tbqLibJ5sFQz8bKtEM8rJBtfilJ2qTU199MI= -golang.org/x/exp v0.0.0-20231006140011-7918f672742d/go.mod h1:ldy0pHrwJyGW56pPQzzkH36rKxoZW1tw7ZJpeKx+hdo= +golang.org/x/exp v0.0.0-20240416160154-fe59bbe5cc7f h1:99ci1mjWVBWwJiEKYY6jWa4d2nTQVIEhZIptnrVb1XY= +golang.org/x/exp v0.0.0-20240416160154-fe59bbe5cc7f/go.mod h1:/lliqkxwWAhPjf5oSOIJup2XcqJaw8RGS6k3TGEc7GI= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= @@ -414,9 +405,8 @@ golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= -golang.org/x/mod v0.13.0 h1:I/DsJXRlw/8l/0c24sM9yb0T4z9liZTduXvdAWYiysY= -golang.org/x/mod v0.13.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/mod v0.17.0 h1:zY54UmvipHiNd+pm+m0x9KhZ9hl1/7QNMyxXbc6ICqA= +golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -450,10 +440,8 @@ golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco= -golang.org/x/net v0.20.0 h1:aCL9BSgETF1k+blQaYUBx9hJ9LOGP3gAVemcZlf1Kpo= -golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY= +golang.org/x/net v0.24.0 h1:1PcaxkF854Fu3+lvBIx5SYn9wRlBzzcnHZSiaFFAb0w= +golang.org/x/net v0.24.0/go.mod h1:2Q7sJY5mzlzWjKtYUEXSlBWCdyaioyXzRB2RtU8KVE8= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -463,8 +451,8 @@ golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.16.0 h1:aDkGMBSYxElaoP81NpoUoz2oo2R2wHdZpGToUxfyQrQ= -golang.org/x/oauth2 v0.16.0/go.mod h1:hqZ+0LWXsiVoZpeld6jVt06P3adbS2Uu911W1SsJv2o= +golang.org/x/oauth2 v0.19.0 h1:9+E/EZBCbTLNrbN35fHv/a/d/mOBatymz1zbtQrXpIg= +golang.org/x/oauth2 v0.19.0/go.mod h1:vYi7skDa1x015PmRRYZ7+s1cWyPgrPiSYRe4rnsexc8= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -475,9 +463,8 @@ golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ= -golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M= +golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -513,16 +500,11 @@ golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.16.0 h1:xWw16ngr6ZMtmxDyKyIgsE93KNKz5HKmMa3b8ALHidU= -golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.19.0 h1:q5f1RH2jigJ1MoAWp2KTp3gm5zAGFUTarQZ5U386+4o= +golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= -golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -531,8 +513,6 @@ golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= -golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= -golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -587,9 +567,8 @@ golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4f golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= -golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= -golang.org/x/tools v0.14.0 h1:jvNa2pY0M4r62jkRQ6RwEZZyPcymeL9XZMLBbV7U2nc= -golang.org/x/tools v0.14.0/go.mod h1:uYBEerGOWcJyEORxN+Ek8+TT266gXkNlHdJBwexUsBg= +golang.org/x/tools v0.20.0 h1:hz/CVckiOxybQvFw6h7b/q80NTr9IUQb4s1IIzW7KNY= +golang.org/x/tools v0.20.0/go.mod h1:WvitBU7JJf6A4jOdg4S1tviW9bhUxkgeCui/0JHctQg= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -615,8 +594,8 @@ google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz513 google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= -google.golang.org/api v0.157.0 h1:ORAeqmbrrozeyw5NjnMxh7peHO0UzV4wWYSwZeCUb20= -google.golang.org/api v0.157.0/go.mod h1:+z4v4ufbZ1WEpld6yMGHyggs+PmAHiaLNj5ytP3N01g= +google.golang.org/api v0.174.0 h1:zB1BWl7ocxfTea2aQ9mgdzXjnfPySllpPOskdnO+q34= +google.golang.org/api v0.174.0/go.mod h1:aC7tB6j0HR1Nl0ni5ghpx6iLasmAX78Zkh/wgxAAjLg= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -624,8 +603,6 @@ google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM= -google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= @@ -662,12 +639,12 @@ google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6D google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20240116215550-a9fa1716bcac h1:ZL/Teoy/ZGnzyrqK/Optxxp2pmVh+fmJ97slxSRyzUg= -google.golang.org/genproto v0.0.0-20240116215550-a9fa1716bcac/go.mod h1:+Rvu7ElI+aLzyDQhpHMFMMltsD6m7nqpuWDd2CwJw3k= -google.golang.org/genproto/googleapis/api v0.0.0-20240122161410-6c6643bf1457 h1:KHBtwE+eQc3+NxpjmRFlQ3pJQ2FNnhhgB9xOV8kyBuU= -google.golang.org/genproto/googleapis/api v0.0.0-20240122161410-6c6643bf1457/go.mod h1:4jWUdICTdgc3Ibxmr8nAJiiLHwQBY0UI0XZcEMaFKaA= -google.golang.org/genproto/googleapis/rpc v0.0.0-20240116215550-a9fa1716bcac h1:nUQEQmH/csSvFECKYRv6HWEyypysidKl2I6Qpsglq/0= -google.golang.org/genproto/googleapis/rpc v0.0.0-20240116215550-a9fa1716bcac/go.mod h1:daQN87bsDqDoe316QbbvX60nMoJQa4r6Ds0ZuoAe5yA= +google.golang.org/genproto v0.0.0-20240415180920-8c6c420018be h1:g4aX8SUFA8V5F4LrSY5EclyGYw1OZN4HS1jTyjB9ZDc= +google.golang.org/genproto v0.0.0-20240415180920-8c6c420018be/go.mod h1:FeSdT5fk+lkxatqJP38MsUicGqHax5cLtmy/6TAuxO4= +google.golang.org/genproto/googleapis/api v0.0.0-20240415180920-8c6c420018be h1:Zz7rLWqp0ApfsR/l7+zSHhY3PMiH2xqgxlfYfAfNpoU= +google.golang.org/genproto/googleapis/api v0.0.0-20240415180920-8c6c420018be/go.mod h1:dvdCTIoAGbkWbcIKBniID56/7XHTt6WfxXNMxuziJ+w= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240415180920-8c6c420018be h1:LG9vZxsWGOmUKieR8wPAUR3u3MpnYFQZROPIMaXh7/A= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240415180920-8c6c420018be/go.mod h1:WtryC6hu0hhx87FDGxWCDptyssuo68sk10vYjF+T9fY= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= @@ -684,8 +661,8 @@ google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.60.1 h1:26+wFr+cNqSGFcOXcabYC0lUVJVRa2Sb2ortSK7VrEU= -google.golang.org/grpc v1.60.1/go.mod h1:OlCHIeLYqSSsLi6i49B5QGdzaMZK9+M7LXN2FKz4eGM= +google.golang.org/grpc v1.63.2 h1:MUeiw1B2maTVZthpU5xvASfTh3LDbxHd6IJ6QQVU+xM= +google.golang.org/grpc v1.63.2/go.mod h1:WAX/8DgncnokcFUldAxq7GeB5DXHDbMF+lLvDomNkRA= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= @@ -696,8 +673,6 @@ google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2 google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= -google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= -google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= diff --git a/internal/input/assets_history_archive.go b/internal/input/assets_history_archive.go index 539f3e89..da7d5f83 100644 --- a/internal/input/assets_history_archive.go +++ b/internal/input/assets_history_archive.go @@ -10,8 +10,7 @@ import ( ) // GetPaymentOperations returns a slice of payment operations that can include new assets from the ledgers in the provided range (inclusive on both ends) -func GetPaymentOperationsHistoryArchive(start, end uint32, limit int64, isTest bool, isFuture bool) ([]AssetTransformInput, error) { - env := utils.GetEnvironmentDetails(isTest, isFuture, "") +func GetPaymentOperationsHistoryArchive(start, end uint32, limit int64, env utils.EnvironmentDetails, useCaptivere bool) ([]AssetTransformInput, error) { backend, err := utils.CreateBackend(start, end, env.ArchiveURLs) if err != nil { return []AssetTransformInput{}, err diff --git a/internal/input/ledgers_history_archive.go b/internal/input/ledgers_history_archive.go index 9c91f40c..613efd1e 100644 --- a/internal/input/ledgers_history_archive.go +++ b/internal/input/ledgers_history_archive.go @@ -9,8 +9,7 @@ import ( ) // GetLedgers returns a slice of ledger close metas for the ledgers in the provided range (inclusive on both ends) -func GetLedgersHistoryArchive(start, end uint32, limit int64, isTest bool, isFuturenet bool) ([]historyarchive.Ledger, error) { - env := utils.GetEnvironmentDetails(isTest, isFuturenet, "") +func GetLedgersHistoryArchive(start, end uint32, limit int64, env utils.EnvironmentDetails, useCaptiveCore bool) ([]historyarchive.Ledger, error) { backend, err := utils.CreateBackend(start, end, env.ArchiveURLs) if err != nil { return []historyarchive.Ledger{}, err diff --git a/internal/input/transactions.go b/internal/input/transactions.go index b82e1829..b5e4019c 100644 --- a/internal/input/transactions.go +++ b/internal/input/transactions.go @@ -30,6 +30,9 @@ func GetTransactions(start, end uint32, limit int64, env utils.EnvironmentDetail txSlice := []LedgerTransformInput{} err = backend.PrepareRange(ctx, ledgerbackend.BoundedRange(start, end)) + if err != nil { + return []LedgerTransformInput{}, err + } panicIf(err) for seq := start; seq <= end; seq++ { ledgerCloseMeta, err := backend.GetLedger(ctx, seq) diff --git a/internal/transform/contract_code_test.go b/internal/transform/contract_code_test.go index 812bd446..e55e13ae 100644 --- a/internal/transform/contract_code_test.go +++ b/internal/transform/contract_code_test.go @@ -67,7 +67,7 @@ func makeContractCodeTestInput() []ingest.Change { Type: xdr.LedgerEntryTypeContractCode, ContractCode: &xdr.ContractCodeEntry{ Hash: hash, - Ext: xdr.ExtensionPoint{ + Ext: xdr.ContractCodeEntryExt{ V: 1, }, }, diff --git a/internal/utils/main.go b/internal/utils/main.go index 3d464146..977a6197 100644 --- a/internal/utils/main.go +++ b/internal/utils/main.go @@ -769,3 +769,74 @@ func LedgerKeyToLedgerKeyHash(ledgerKey xdr.LedgerKey) string { return ledgerKeyHash } + +// AccountSignersChanged returns true if account signers have changed. +// Notice: this will return true on master key changes too! +func AccountSignersChanged(c ingest.Change) bool { + if c.Type != xdr.LedgerEntryTypeAccount { + panic("This should not be called on changes other than Account changes") + } + + // New account so new master key (which is also a signer) + if c.Pre == nil { + return true + } + + // Account merged. Account being merge can still have signers. + // c.Pre != nil at this point. + if c.Post == nil { + return true + } + + // c.Pre != nil && c.Post != nil at this point. + preAccountEntry := c.Pre.Data.MustAccount() + postAccountEntry := c.Post.Data.MustAccount() + + preSigners := preAccountEntry.SignerSummary() + postSigners := postAccountEntry.SignerSummary() + + if len(preSigners) != len(postSigners) { + return true + } + + for postSigner, postWeight := range postSigners { + preWeight, exist := preSigners[postSigner] + if !exist { + return true + } + + if preWeight != postWeight { + return true + } + } + + preSignerSponsors := preAccountEntry.SignerSponsoringIDs() + postSignerSponsors := postAccountEntry.SignerSponsoringIDs() + + if len(preSignerSponsors) != len(postSignerSponsors) { + return true + } + + for i := 0; i < len(preSignerSponsors); i++ { + preSponsor := preSignerSponsors[i] + postSponsor := postSignerSponsors[i] + + if preSponsor == nil && postSponsor != nil { + return true + } else if preSponsor != nil && postSponsor == nil { + return true + } else if preSponsor != nil && postSponsor != nil { + preSponsorAccountID := xdr.AccountId(*preSponsor) + preSponsorAddress := preSponsorAccountID.Address() + + postSponsorAccountID := xdr.AccountId(*postSponsor) + postSponsorAddress := postSponsorAccountID.Address() + + if preSponsorAddress != postSponsorAddress { + return true + } + } + } + + return false +} From e8b009dce22118f5c842232a9b4adddf00b1efe3 Mon Sep 17 00:00:00 2001 From: Simon Chow Date: Fri, 19 Apr 2024 01:12:36 -0400 Subject: [PATCH 23/49] fix params --- cmd/export_ledgers.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmd/export_ledgers.go b/cmd/export_ledgers.go index 727d9ae5..de3baa13 100644 --- a/cmd/export_ledgers.go +++ b/cmd/export_ledgers.go @@ -27,7 +27,7 @@ var ledgersCmd = &cobra.Command{ var err error if useCaptiveCore { - ledgers, err = input.GetLedgersHistoryArchive(startNum, endNum, limit, isTest, isFuture) + ledgers, err = input.GetLedgersHistoryArchive(startNum, endNum, limit, env, useCaptiveCore) } else { ledgers, err = input.GetLedgers(startNum, endNum, limit, env, useCaptiveCore) } From 29f8147b27fb5a07a0f4d7b8309deb0bed1c4821 Mon Sep 17 00:00:00 2001 From: Simon Chow Date: Fri, 19 Apr 2024 11:07:52 -0400 Subject: [PATCH 24/49] Fix test --- internal/transform/operation_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/transform/operation_test.go b/internal/transform/operation_test.go index 86702448..de6ff439 100644 --- a/internal/transform/operation_test.go +++ b/internal/transform/operation_test.go @@ -45,7 +45,7 @@ func TestTransformOperation(t *testing.T) { { negativeOpTypeInput, OperationOutput{}, - fmt.Errorf("The operation type (-1) is negative for operation 1 (operation id=4098)"), + fmt.Errorf("the operation type (-1) is negative for operation 1 (operation id=4098)"), }, { unknownOpTypeInput, From dd99736f74ad03e988702ccb062751528fbd3537 Mon Sep 17 00:00:00 2001 From: Simon Chow Date: Mon, 22 Apr 2024 13:50:13 -0400 Subject: [PATCH 25/49] Use new ledgerbackend changes --- go.mod | 2 +- go.sum | 2 ++ internal/utils/main.go | 29 ++++++++++++++++++++++++----- 3 files changed, 27 insertions(+), 6 deletions(-) diff --git a/go.mod b/go.mod index c5c9c51e..e87fbc1a 100644 --- a/go.mod +++ b/go.mod @@ -15,7 +15,7 @@ require ( github.com/spf13/cobra v1.7.0 github.com/spf13/pflag v1.0.5 github.com/spf13/viper v1.17.0 - github.com/stellar/go v0.0.0-20240419044405-2d7308b67c07 + github.com/stellar/go v0.0.0-20240419222646-3a79646669ab github.com/stretchr/testify v1.9.0 ) diff --git a/go.sum b/go.sum index b247522d..3d6f1fa8 100644 --- a/go.sum +++ b/go.sum @@ -298,6 +298,8 @@ github.com/spf13/viper v1.17.0 h1:I5txKw7MJasPL/BrfkbA0Jyo/oELqVmux4pR/UxOMfI= github.com/spf13/viper v1.17.0/go.mod h1:BmMMMLQXSbcHK6KAOiFLz0l5JHrU89OdIRHvsk0+yVI= github.com/stellar/go v0.0.0-20240419044405-2d7308b67c07 h1:AgxlbRmsFAc9VaOLj29K9RpsSmlmtJ0KCVnGxV7bUwo= github.com/stellar/go v0.0.0-20240419044405-2d7308b67c07/go.mod h1:ckzsX0B0qfTMVZQJtPELJLs7cJ6xXMYHPVLyIsReGsU= +github.com/stellar/go v0.0.0-20240419222646-3a79646669ab h1:+uTCn/DrOc1cXugQ8PKZPAkZS3KWeHk5f2aKk9jdrDs= +github.com/stellar/go v0.0.0-20240419222646-3a79646669ab/go.mod h1:ckzsX0B0qfTMVZQJtPELJLs7cJ6xXMYHPVLyIsReGsU= github.com/stellar/go-xdr v0.0.0-20231122183749-b53fb00bcac2 h1:OzCVd0SV5qE3ZcDeSFCmOWLZfEWZ3Oe8KtmSOYKEVWE= github.com/stellar/go-xdr v0.0.0-20231122183749-b53fb00bcac2/go.mod h1:yoxyU/M8nl9LKeWIoBrbDPQ7Cy+4jxRcWcOayZ4BMps= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= diff --git a/internal/utils/main.go b/internal/utils/main.go index b6bb39c1..a91e6ef2 100644 --- a/internal/utils/main.go +++ b/internal/utils/main.go @@ -6,6 +6,7 @@ import ( "errors" "fmt" "math/big" + "net/url" "time" "github.com/spf13/pflag" @@ -657,8 +658,7 @@ func GetEnvironmentDetails(isTest bool, isFuture bool, datastoreUrl string) (det details.NetworkPassphrase = network.TestNetworkPassphrase details.ArchiveURLs = testArchiveURLs details.BinaryPath = "/usr/bin/stellar-core" - details.CoreConfig = "/etl/docker/stellar-core_testnet.cfg" - // TODO: change exporter-test to the real bucket whatever that is + details.CoreConfig = "/etl/docker/stellar-core_testnet.cfg" details.StorageURL = datastoreUrl return details } else if isFuture { @@ -666,7 +666,7 @@ func GetEnvironmentDetails(isTest bool, isFuture bool, datastoreUrl string) (det details.NetworkPassphrase = "Test SDF Future Network ; October 2022" details.ArchiveURLs = futureArchiveURLs details.BinaryPath = "/usr/bin/stellar-core" - details.CoreConfig = "/etl/docker/stellar-core_futurenet.cfg" + details.CoreConfig = "/etl/docker/stellar-core_futurenet.cfg" details.StorageURL = datastoreUrl return details } else { @@ -747,6 +747,7 @@ func LedgerEntryToLedgerKeyHash(ledgerEntry xdr.LedgerEntry) string { // CreateLedgerBackend creates a ledger backend using captive core or datastore // Defaults to using datastore func CreateLedgerBackend(ctx context.Context, useCaptiveCore bool, env EnvironmentDetails) (ledgerbackend.LedgerBackend, error) { + // Create ledger backend from captive core if useCaptiveCore { backend, err := env.CreateCaptiveCoreBackend() if err != nil { @@ -755,11 +756,29 @@ func CreateLedgerBackend(ctx context.Context, useCaptiveCore bool, env Environme return backend, nil } - backend, err := ledgerbackend.NewCloudStorageBackend(ctx, env.StorageURL) + // Create ledger backend from datastore + fileConfig := ledgerbackend.LCMFileConfig{ + StorageURL: env.StorageURL, + FileSuffix: ".xdr.gz", + LedgersPerFile: 1, + FilesPerPartition: 64000, + } + + parsed, err := url.Parse(env.StorageURL) if err != nil { return nil, err } - return backend, nil + + // Using the GCS datastore backend + if parsed.Scheme == "gcs" { + backend, err := ledgerbackend.NewGCSBackend(ctx, fileConfig) + if err != nil { + return nil, err + } + return backend, nil + } + + return nil, errors.New("no valid ledgerbackend selected") } func LedgerKeyToLedgerKeyHash(ledgerKey xdr.LedgerKey) string { From 3438a5bd435f4cb1d8a6ea339e9c977d00c9608c Mon Sep 17 00:00:00 2001 From: Simon Chow Date: Mon, 29 Apr 2024 14:24:06 -0400 Subject: [PATCH 26/49] Add p21 fee fields; formatting for warnings --- cmd/export_ledger_entry_changes.go | 2 +- cmd/export_ledgers.go | 7 +- cmd/export_ledgers_test.go | 20 +- go.mod | 2 +- go.sum | 6 +- internal/input/ledgers.go | 15 +- internal/input/ledgers_history_archive.go | 16 +- internal/transform/account.go | 10 +- internal/transform/asset_test.go | 8 +- internal/transform/claimable_balance.go | 2 +- internal/transform/config_setting.go | 8 +- internal/transform/config_setting_test.go | 6 +- internal/transform/contract_code.go | 2 +- internal/transform/contract_data.go | 36 +-- internal/transform/diagnostic_events_test.go | 8 +- internal/transform/effects.go | 8 +- internal/transform/effects_test.go | 81 +++---- internal/transform/ledger.go | 13 +- internal/transform/ledger_test.go | 68 +++++- internal/transform/ledger_transaction_test.go | 32 ++- internal/transform/liquidity_pool.go | 9 +- internal/transform/liquidity_pool_test.go | 24 +- internal/transform/offer.go | 12 +- internal/transform/offer_normalized.go | 6 +- internal/transform/operation.go | 100 ++++----- internal/transform/operation_test.go | 210 +++++++++--------- internal/transform/schema.go | 76 ++++--- internal/transform/test_variables_test.go | 133 +++++------ internal/transform/trade.go | 36 +-- internal/transform/trade_test.go | 70 +++--- internal/transform/transaction.go | 83 ++++--- internal/transform/transaction_test.go | 34 ++- internal/transform/trustline.go | 6 +- internal/transform/ttl.go | 2 +- internal/utils/main.go | 5 + 35 files changed, 609 insertions(+), 547 deletions(-) diff --git a/cmd/export_ledger_entry_changes.go b/cmd/export_ledger_entry_changes.go index 726f5cee..7b727680 100644 --- a/cmd/export_ledger_entry_changes.go +++ b/cmd/export_ledger_entry_changes.go @@ -57,7 +57,7 @@ be exported.`, } if allFalse { - for export_name, _ := range exports { + for export_name := range exports { exports[export_name] = true } } diff --git a/cmd/export_ledgers.go b/cmd/export_ledgers.go index de3baa13..f5000289 100644 --- a/cmd/export_ledgers.go +++ b/cmd/export_ledgers.go @@ -5,7 +5,6 @@ import ( "github.com/sirupsen/logrus" "github.com/spf13/cobra" - "github.com/stellar/go/historyarchive" "github.com/stellar/stellar-etl/internal/input" "github.com/stellar/stellar-etl/internal/transform" "github.com/stellar/stellar-etl/internal/utils" @@ -23,7 +22,7 @@ var ledgersCmd = &cobra.Command{ cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) env := utils.GetEnvironmentDetails(isTest, isFuture, datastoreUrl) - var ledgers []historyarchive.Ledger + var ledgers []utils.HistoryArchiveLedgerAndLCM var err error if useCaptiveCore { @@ -39,8 +38,8 @@ var ledgersCmd = &cobra.Command{ numFailures := 0 totalNumBytes := 0 - for i, lcm := range ledgers { - transformed, err := transform.TransformLedger(lcm) + for i, ledger := range ledgers { + transformed, err := transform.TransformLedger(ledger.Ledger, ledger.LCM) if err != nil { cmdLogger.LogError(fmt.Errorf("could not json transform ledger %d: %s", startNum+uint32(i), err)) numFailures += 1 diff --git a/cmd/export_ledgers_test.go b/cmd/export_ledgers_test.go index 68d3f0f5..8006ea6d 100644 --- a/cmd/export_ledgers_test.go +++ b/cmd/export_ledgers_test.go @@ -121,7 +121,7 @@ func indexOf(l []string, s string) int { return -1 } -func sortByName(files []os.FileInfo) { +func sortByName(files []os.DirEntry) { sort.Slice(files, func(i, j int) bool { return files[i].Name() < files[j].Name() }) @@ -144,14 +144,14 @@ func runCLITest(t *testing.T, test cliTest, goldenFolder string) { // If the output arg specified is a directory, concat the contents for comparison. if stat.IsDir() { - files, err := ioutil.ReadDir(outLocation) + files, err := os.ReadDir(outLocation) if err != nil { log.Fatal(err) } var buf bytes.Buffer sortByName(files) for _, f := range files { - b, err := ioutil.ReadFile(filepath.Join(outLocation, f.Name())) + b, err := os.ReadFile(filepath.Join(outLocation, f.Name())) if err != nil { log.Fatal(err) } @@ -160,7 +160,7 @@ func runCLITest(t *testing.T, test cliTest, goldenFolder string) { testOutput = buf.Bytes() } else { // If the output is written to a file, read the contents of the file for comparison. - testOutput, err = ioutil.ReadFile(outLocation) + testOutput, err = os.ReadFile(outLocation) if err != nil { log.Fatal(err) } @@ -197,16 +197,6 @@ func extractErrorMsg(loggerOutput string) string { return loggerOutput[errIndex : errIndex+endIndex] } -func removeCoreLogging(loggerOutput string) string { - endIndex := strings.Index(loggerOutput, "{\"") - // if there is no bracket, then nothing was exported except logs - if endIndex == -1 { - return "" - } - - return loggerOutput[endIndex:] -} - func getLastSeqNum(archiveURLs []string) uint32 { num, err := utils.GetLatestLedgerSequence(archiveURLs) if err != nil { @@ -218,10 +208,10 @@ func getLastSeqNum(archiveURLs []string) uint32 { func getGolden(t *testing.T, goldenFile string, actual string, update bool) (string, error) { t.Helper() f, err := os.OpenFile(goldenFile, os.O_RDWR, 0644) - defer f.Close() if err != nil { return "", err } + defer f.Close() // If the update flag is true, clear the current contents of the golden file and write the actual output // This is useful for when new tests or added or functionality changes that breaks current tests diff --git a/go.mod b/go.mod index e87fbc1a..56f483d1 100644 --- a/go.mod +++ b/go.mod @@ -15,7 +15,7 @@ require ( github.com/spf13/cobra v1.7.0 github.com/spf13/pflag v1.0.5 github.com/spf13/viper v1.17.0 - github.com/stellar/go v0.0.0-20240419222646-3a79646669ab + github.com/stellar/go v0.0.0-20240423031611-e1c5206ad1ba github.com/stretchr/testify v1.9.0 ) diff --git a/go.sum b/go.sum index 3d6f1fa8..92a52cf7 100644 --- a/go.sum +++ b/go.sum @@ -296,10 +296,8 @@ github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/spf13/viper v1.17.0 h1:I5txKw7MJasPL/BrfkbA0Jyo/oELqVmux4pR/UxOMfI= github.com/spf13/viper v1.17.0/go.mod h1:BmMMMLQXSbcHK6KAOiFLz0l5JHrU89OdIRHvsk0+yVI= -github.com/stellar/go v0.0.0-20240419044405-2d7308b67c07 h1:AgxlbRmsFAc9VaOLj29K9RpsSmlmtJ0KCVnGxV7bUwo= -github.com/stellar/go v0.0.0-20240419044405-2d7308b67c07/go.mod h1:ckzsX0B0qfTMVZQJtPELJLs7cJ6xXMYHPVLyIsReGsU= -github.com/stellar/go v0.0.0-20240419222646-3a79646669ab h1:+uTCn/DrOc1cXugQ8PKZPAkZS3KWeHk5f2aKk9jdrDs= -github.com/stellar/go v0.0.0-20240419222646-3a79646669ab/go.mod h1:ckzsX0B0qfTMVZQJtPELJLs7cJ6xXMYHPVLyIsReGsU= +github.com/stellar/go v0.0.0-20240423031611-e1c5206ad1ba h1:2UPb78V6mL07B0nJ6/89nJ2cimVD3xPMCFxawwRvpJ0= +github.com/stellar/go v0.0.0-20240423031611-e1c5206ad1ba/go.mod h1:ckzsX0B0qfTMVZQJtPELJLs7cJ6xXMYHPVLyIsReGsU= github.com/stellar/go-xdr v0.0.0-20231122183749-b53fb00bcac2 h1:OzCVd0SV5qE3ZcDeSFCmOWLZfEWZ3Oe8KtmSOYKEVWE= github.com/stellar/go-xdr v0.0.0-20231122183749-b53fb00bcac2/go.mod h1:yoxyU/M8nl9LKeWIoBrbDPQ7Cy+4jxRcWcOayZ4BMps= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= diff --git a/internal/input/ledgers.go b/internal/input/ledgers.go index c83b3b55..d70012de 100644 --- a/internal/input/ledgers.go +++ b/internal/input/ledgers.go @@ -11,20 +11,20 @@ import ( ) // GetLedgers returns a slice of ledger close metas for the ledgers in the provided range (inclusive on both ends) -func GetLedgers(start, end uint32, limit int64, env utils.EnvironmentDetails, useCaptiveCore bool) ([]historyarchive.Ledger, error) { +func GetLedgers(start, end uint32, limit int64, env utils.EnvironmentDetails, useCaptiveCore bool) ([]utils.HistoryArchiveLedgerAndLCM, error) { ctx := context.Background() backend, err := utils.CreateLedgerBackend(ctx, useCaptiveCore, env) if err != nil { - return []historyarchive.Ledger{}, err + return []utils.HistoryArchiveLedgerAndLCM{}, err } - ledgerSlice := []historyarchive.Ledger{} + ledgerSlice := []utils.HistoryArchiveLedgerAndLCM{} err = backend.PrepareRange(ctx, ledgerbackend.BoundedRange(start, end)) panicIf(err) for seq := start; seq <= end; seq++ { lcm, err := backend.GetLedger(ctx, seq) if err != nil { - return []historyarchive.Ledger{}, err + return []utils.HistoryArchiveLedgerAndLCM{}, err } var ext xdr.TransactionHistoryEntryExt @@ -68,7 +68,12 @@ func GetLedgers(start, end uint32, limit int64, env utils.EnvironmentDetails, us }, } - ledgerSlice = append(ledgerSlice, ledger) + ledgerLCM := utils.HistoryArchiveLedgerAndLCM{ + Ledger: ledger, + LCM: lcm, + } + + ledgerSlice = append(ledgerSlice, ledgerLCM) if int64(len(ledgerSlice)) >= limit && limit >= 0 { break } diff --git a/internal/input/ledgers_history_archive.go b/internal/input/ledgers_history_archive.go index 613efd1e..5b42ba5c 100644 --- a/internal/input/ledgers_history_archive.go +++ b/internal/input/ledgers_history_archive.go @@ -4,26 +4,28 @@ import ( "context" "github.com/stellar/stellar-etl/internal/utils" - - "github.com/stellar/go/historyarchive" ) // GetLedgers returns a slice of ledger close metas for the ledgers in the provided range (inclusive on both ends) -func GetLedgersHistoryArchive(start, end uint32, limit int64, env utils.EnvironmentDetails, useCaptiveCore bool) ([]historyarchive.Ledger, error) { +func GetLedgersHistoryArchive(start, end uint32, limit int64, env utils.EnvironmentDetails, useCaptiveCore bool) ([]utils.HistoryArchiveLedgerAndLCM, error) { backend, err := utils.CreateBackend(start, end, env.ArchiveURLs) if err != nil { - return []historyarchive.Ledger{}, err + return []utils.HistoryArchiveLedgerAndLCM{}, err } - ledgerSlice := []historyarchive.Ledger{} + ledgerSlice := []utils.HistoryArchiveLedgerAndLCM{} ctx := context.Background() for seq := start; seq <= end; seq++ { ledger, err := backend.GetLedgerArchive(ctx, seq) if err != nil { - return []historyarchive.Ledger{}, err + return []utils.HistoryArchiveLedgerAndLCM{}, err + } + + ledgerLCM := utils.HistoryArchiveLedgerAndLCM{ + Ledger: ledger, } - ledgerSlice = append(ledgerSlice, ledger) + ledgerSlice = append(ledgerSlice, ledgerLCM) if int64(len(ledgerSlice)) >= limit && limit >= 0 { break } diff --git a/internal/transform/account.go b/internal/transform/account.go index 1a605ac6..84f4c706 100644 --- a/internal/transform/account.go +++ b/internal/transform/account.go @@ -18,7 +18,7 @@ func TransformAccount(ledgerChange ingest.Change, header xdr.LedgerHeaderHistory accountEntry, accountFound := ledgerEntry.Data.GetAccount() if !accountFound { - return AccountOutput{}, fmt.Errorf("Could not extract account data from ledger entry; actual type is %s", ledgerEntry.Data.Type) + return AccountOutput{}, fmt.Errorf("could not extract account data from ledger entry; actual type is %s", ledgerEntry.Data.Type) } outputID, err := accountEntry.AccountId.GetAddress() @@ -28,7 +28,7 @@ func TransformAccount(ledgerChange ingest.Change, header xdr.LedgerHeaderHistory outputBalance := accountEntry.Balance if outputBalance < 0 { - return AccountOutput{}, fmt.Errorf("Balance is negative (%d) for account: %s", outputBalance, outputID) + return AccountOutput{}, fmt.Errorf("balance is negative (%d) for account: %s", outputBalance, outputID) } //The V1 struct is the first version of the extender from accountEntry. It contains information on liabilities, and in the future @@ -39,17 +39,17 @@ func TransformAccount(ledgerChange ingest.Change, header xdr.LedgerHeaderHistory liabilities := accountExtensionInfo.Liabilities outputBuyingLiabilities, outputSellingLiabilities = liabilities.Buying, liabilities.Selling if outputBuyingLiabilities < 0 { - return AccountOutput{}, fmt.Errorf("The buying liabilities count is negative (%d) for account: %s", outputBuyingLiabilities, outputID) + return AccountOutput{}, fmt.Errorf("the buying liabilities count is negative (%d) for account: %s", outputBuyingLiabilities, outputID) } if outputSellingLiabilities < 0 { - return AccountOutput{}, fmt.Errorf("The selling liabilities count is negative (%d) for account: %s", outputSellingLiabilities, outputID) + return AccountOutput{}, fmt.Errorf("the selling liabilities count is negative (%d) for account: %s", outputSellingLiabilities, outputID) } } outputSequenceNumber := int64(accountEntry.SeqNum) if outputSequenceNumber < 0 { - return AccountOutput{}, fmt.Errorf("Account sequence number is negative (%d) for account: %s", outputSequenceNumber, outputID) + return AccountOutput{}, fmt.Errorf("account sequence number is negative (%d) for account: %s", outputSequenceNumber, outputID) } outputSequenceLedger := accountEntry.SeqLedger() outputSequenceTime := accountEntry.SeqTime() diff --git a/internal/transform/asset_test.go b/internal/transform/asset_test.go index deedbe49..cc5f813b 100644 --- a/internal/transform/asset_test.go +++ b/internal/transform/asset_test.go @@ -68,7 +68,7 @@ func makeAssetTestInput() (inputTransaction ingest.LedgerTransaction, err error) inputEnvelope.Tx.SourceAccount = testAccount1 inputOperations := []xdr.Operation{ - xdr.Operation{ + { SourceAccount: nil, Body: xdr.OperationBody{ Type: xdr.OperationTypePayment, @@ -79,7 +79,7 @@ func makeAssetTestInput() (inputTransaction ingest.LedgerTransaction, err error) }, }, }, - xdr.Operation{ + { SourceAccount: nil, Body: xdr.OperationBody{ Type: xdr.OperationTypePayment, @@ -99,7 +99,7 @@ func makeAssetTestInput() (inputTransaction ingest.LedgerTransaction, err error) func makeAssetTestOutput() (transformedAssets []AssetOutput) { transformedAssets = []AssetOutput{ - AssetOutput{ + { AssetCode: "USDT", AssetIssuer: "GBVVRXLMNCJQW3IDDXC3X6XCH35B5Q7QXNMMFPENSOGUPQO7WO7HGZPA", AssetType: "credit_alphanum4", @@ -107,7 +107,7 @@ func makeAssetTestOutput() (transformedAssets []AssetOutput) { ID: -8205667356306085451, }, - AssetOutput{ + { AssetCode: "", AssetIssuer: "", AssetType: "native", diff --git a/internal/transform/claimable_balance.go b/internal/transform/claimable_balance.go index 73cfc0a0..ca1cf6fb 100644 --- a/internal/transform/claimable_balance.go +++ b/internal/transform/claimable_balance.go @@ -33,7 +33,7 @@ func TransformClaimableBalance(ledgerChange ingest.Change, header xdr.LedgerHead } balanceID, err := xdr.MarshalHex(balanceEntry.BalanceId) if err != nil { - return ClaimableBalanceOutput{}, fmt.Errorf("Invalid balanceId in op: %d", uint32(ledgerEntry.LastModifiedLedgerSeq)) + return ClaimableBalanceOutput{}, fmt.Errorf("invalid balanceId in op: %d", uint32(ledgerEntry.LastModifiedLedgerSeq)) } outputFlags := uint32(balanceEntry.Flags()) outputAsset, err := transformSingleAsset(balanceEntry.Asset) diff --git a/internal/transform/config_setting.go b/internal/transform/config_setting.go index c98d17c1..f110b2ab 100644 --- a/internal/transform/config_setting.go +++ b/internal/transform/config_setting.go @@ -18,7 +18,7 @@ func TransformConfigSetting(ledgerChange ingest.Change, header xdr.LedgerHeaderH configSetting, ok := ledgerEntry.Data.GetConfigSetting() if !ok { - return ConfigSettingOutput{}, fmt.Errorf("Could not extract config setting from ledger entry; actual type is %s", ledgerEntry.Data.Type) + return ConfigSettingOutput{}, fmt.Errorf("could not extract config setting from ledger entry; actual type is %s", ledgerEntry.Data.Type) } configSettingId := configSetting.ConfigSettingId @@ -48,7 +48,7 @@ func TransformConfigSetting(ledgerChange ingest.Change, header xdr.LedgerHeaderH writeFee1KbBucketListHigh := contractLedgerCost.WriteFee1KbBucketListHigh bucketListWriteFeeGrowthFactor := contractLedgerCost.BucketListWriteFeeGrowthFactor - contractHistoricalData, ok := configSetting.GetContractHistoricalData() + contractHistoricalData, _ := configSetting.GetContractHistoricalData() feeHistorical1Kb := contractHistoricalData.FeeHistorical1Kb contractMetaData, _ := configSetting.GetContractEvents() @@ -66,9 +66,9 @@ func TransformConfigSetting(ledgerChange ingest.Change, header xdr.LedgerHeaderH paramsMemBytes, _ := configSetting.GetContractCostParamsMemBytes() contractCostParamsMemBytes := serializeParams(paramsMemBytes) - contractDataKeySizeBytes, ok := configSetting.GetContractDataKeySizeBytes() + contractDataKeySizeBytes, _ := configSetting.GetContractDataKeySizeBytes() - contractDataEntrySizeBytes, ok := configSetting.GetContractDataEntrySizeBytes() + contractDataEntrySizeBytes, _ := configSetting.GetContractDataEntrySizeBytes() stateArchivalSettings, _ := configSetting.GetStateArchivalSettings() maxEntryTtl := stateArchivalSettings.MaxEntryTtl diff --git a/internal/transform/config_setting_test.go b/internal/transform/config_setting_test.go index b780c3fe..59163e88 100644 --- a/internal/transform/config_setting_test.go +++ b/internal/transform/config_setting_test.go @@ -31,7 +31,7 @@ func TestTransformConfigSetting(t *testing.T) { }, }, }, - ConfigSettingOutput{}, fmt.Errorf("Could not extract config setting from ledger entry; actual type is LedgerEntryTypeOffer"), + ConfigSettingOutput{}, fmt.Errorf("could not extract config setting from ledger entry; actual type is LedgerEntryTypeOffer"), }, } @@ -82,8 +82,8 @@ func makeConfigSettingTestInput() []ingest.Change { } func makeConfigSettingTestOutput() []ConfigSettingOutput { - contractMapType := make([]map[string]string, 0, 0) - bucket := make([]uint64, 0, 0) + contractMapType := make([]map[string]string, 0) + bucket := make([]uint64, 0) return []ConfigSettingOutput{ { diff --git a/internal/transform/contract_code.go b/internal/transform/contract_code.go index 8bfcf574..bea834a3 100644 --- a/internal/transform/contract_code.go +++ b/internal/transform/contract_code.go @@ -17,7 +17,7 @@ func TransformContractCode(ledgerChange ingest.Change, header xdr.LedgerHeaderHi contractCode, ok := ledgerEntry.Data.GetContractCode() if !ok { - return ContractCodeOutput{}, fmt.Errorf("Could not extract contract code from ledger entry; actual type is %s", ledgerEntry.Data.Type) + return ContractCodeOutput{}, fmt.Errorf("could not extract contract code from ledger entry; actual type is %s", ledgerEntry.Data.Type) } // LedgerEntryChange must contain a contract code change to be parsed, otherwise skip diff --git a/internal/transform/contract_data.go b/internal/transform/contract_data.go index 30fec49b..75c248f4 100644 --- a/internal/transform/contract_data.go +++ b/internal/transform/contract_data.go @@ -10,28 +10,28 @@ import ( "github.com/stellar/stellar-etl/internal/utils" ) -const ( - scDecimalPrecision = 7 -) +//const ( +// scDecimalPrecision = 7 +//) var ( // https://github.com/stellar/rs-soroban-env/blob/v0.0.16/soroban-env-host/src/native_contract/token/public_types.rs#L22 - nativeAssetSym = xdr.ScSymbol("Native") + //nativeAssetSym = xdr.ScSymbol("Native") // these are storage DataKey enum // https://github.com/stellar/rs-soroban-env/blob/v0.0.16/soroban-env-host/src/native_contract/token/storage_types.rs#L23 balanceMetadataSym = xdr.ScSymbol("Balance") - metadataSym = xdr.ScSymbol("METADATA") - metadataNameSym = xdr.ScSymbol("name") - metadataSymbolSym = xdr.ScSymbol("symbol") - adminSym = xdr.ScSymbol("Admin") - issuerSym = xdr.ScSymbol("issuer") - assetCodeSym = xdr.ScSymbol("asset_code") - alphaNum4Sym = xdr.ScSymbol("AlphaNum4") - alphaNum12Sym = xdr.ScSymbol("AlphaNum12") - decimalSym = xdr.ScSymbol("decimal") - assetInfoSym = xdr.ScSymbol("AssetInfo") - decimalVal = xdr.Uint32(scDecimalPrecision) - assetInfoVec = &xdr.ScVec{ + //metadataSym = xdr.ScSymbol("METADATA") + //metadataNameSym = xdr.ScSymbol("name") + //metadataSymbolSym = xdr.ScSymbol("symbol") + //adminSym = xdr.ScSymbol("Admin") + issuerSym = xdr.ScSymbol("issuer") + assetCodeSym = xdr.ScSymbol("asset_code") + //alphaNum4Sym = xdr.ScSymbol("AlphaNum4") + //alphaNum12Sym = xdr.ScSymbol("AlphaNum12") + //decimalSym = xdr.ScSymbol("decimal") + assetInfoSym = xdr.ScSymbol("AssetInfo") + //decimalVal = xdr.Uint32(scDecimalPrecision) + assetInfoVec = &xdr.ScVec{ xdr.ScVal{ Type: xdr.ScValTypeScvSymbol, Sym: &assetInfoSym, @@ -67,7 +67,7 @@ func (t *TransformContractDataStruct) TransformContractData(ledgerChange ingest. contractData, ok := ledgerEntry.Data.GetContractData() if !ok { - return ContractDataOutput{}, fmt.Errorf("Could not extract contract data from ledger entry; actual type is %s", ledgerEntry.Data.Type), false + return ContractDataOutput{}, fmt.Errorf("could not extract contract data from ledger entry; actual type is %s", ledgerEntry.Data.Type), false } if contractData.Key.Type.String() == "ScValTypeScvLedgerKeyNonce" { @@ -100,7 +100,7 @@ func (t *TransformContractDataStruct) TransformContractData(ledgerChange ingest. contractDataContractId, ok := contractData.Contract.GetContractId() if !ok { - return ContractDataOutput{}, fmt.Errorf("Could not extract contractId data information from contractData"), false + return ContractDataOutput{}, fmt.Errorf("could not extract contractId data information from contractData"), false } contractDataKeyType := contractData.Key.Type.String() diff --git a/internal/transform/diagnostic_events_test.go b/internal/transform/diagnostic_events_test.go index 39a71de1..ed6f0c47 100644 --- a/internal/transform/diagnostic_events_test.go +++ b/internal/transform/diagnostic_events_test.go @@ -98,7 +98,7 @@ func makeDiagnosticEventTestInput() (transaction []ingest.LedgerTransaction, his } genericResultResults := &[]xdr.OperationResult{ - xdr.OperationResult{ + { Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypeCreateAccount, CreateAccountResult: &xdr.CreateAccountResult{ @@ -118,7 +118,7 @@ func makeDiagnosticEventTestInput() (transaction []ingest.LedgerTransaction, his } transaction = []ingest.LedgerTransaction{ - ingest.LedgerTransaction{ + { Index: 1, UnsafeMeta: hardCodedMeta, Envelope: xdr.TransactionEnvelope{ @@ -140,7 +140,7 @@ func makeDiagnosticEventTestInput() (transaction []ingest.LedgerTransaction, his }, }, Operations: []xdr.Operation{ - xdr.Operation{ + { SourceAccount: &testAccount2, Body: xdr.OperationBody{ Type: xdr.OperationTypePathPaymentStrictReceive, @@ -166,7 +166,7 @@ func makeDiagnosticEventTestInput() (transaction []ingest.LedgerTransaction, his }, } historyHeader = []xdr.LedgerHeaderHistoryEntry{ - xdr.LedgerHeaderHistoryEntry{ + { Header: xdr.LedgerHeader{ LedgerSeq: 30521816, ScpValue: xdr.StellarValue{CloseTime: 1594272522}, diff --git a/internal/transform/effects.go b/internal/transform/effects.go index f1867347..711ed60e 100644 --- a/internal/transform/effects.go +++ b/internal/transform/effects.go @@ -131,7 +131,7 @@ func (operation *transactionOperationWrapper) effects() ([]EffectOutput, error) case xdr.OperationTypeRestoreFootprint: err = wrapper.addRestoreFootprintExpirationEffect() default: - return nil, fmt.Errorf("Unknown operation type: %s", op.Body.Type) + return nil, fmt.Errorf("unknown operation type: %s", op.Body.Type) } if err != nil { return nil, err @@ -905,7 +905,7 @@ func (e *effectsWrapper) addClaimClaimableBalanceEffects(changes []ingest.Change balanceID, err := xdr.MarshalHex(op.BalanceId) if err != nil { - return fmt.Errorf("Invalid balanceId in op: %d", e.operation.index) + return fmt.Errorf("invalid balanceId in op: %d", e.operation.index) } var cBalance xdr.ClaimableBalanceEntry @@ -919,7 +919,7 @@ func (e *effectsWrapper) addClaimClaimableBalanceEffects(changes []ingest.Change cBalance = change.Pre.Data.MustClaimableBalance() preBalanceID, err := xdr.MarshalHex(cBalance.BalanceId) if err != nil { - return fmt.Errorf("Invalid balanceId in meta changes for op: %d", e.operation.index) + return fmt.Errorf("invalid balanceId in meta changes for op: %d", e.operation.index) } if preBalanceID == balanceID { @@ -930,7 +930,7 @@ func (e *effectsWrapper) addClaimClaimableBalanceEffects(changes []ingest.Change } if !found { - return fmt.Errorf("Change not found for balanceId : %s", balanceID) + return fmt.Errorf("change not found for balanceId : %s", balanceID) } details := map[string]interface{}{ diff --git a/internal/transform/effects_test.go b/internal/transform/effects_test.go index d99cfb4c..df5e75e1 100644 --- a/internal/transform/effects_test.go +++ b/internal/transform/effects_test.go @@ -349,6 +349,7 @@ func TestOperationEffects(t *testing.T) { harCodedCloseMetaInput := makeLedgerCloseMeta() LedgerClosed, err := utils.GetCloseTime(harCodedCloseMetaInput) + assert.NoError(t, err) revokeSponsorshipMeta, revokeSponsorshipEffects := getRevokeSponsorshipMeta(t) @@ -2565,12 +2566,12 @@ func TestLiquidityPoolEffects(t *testing.T) { "id": poolIDStr, "reserves": []base.AssetAmount{ { - "native", - "0.0000200", + Asset: "native", + Amount: "0.0000200", }, { - "USD:GAUJETIZVEP2NRYLUESJ3LS66NVCEGMON4UDCBCSBEVPIID773P2W6AY", - "0.0000100", + Asset: "USD:GAUJETIZVEP2NRYLUESJ3LS66NVCEGMON4UDCBCSBEVPIID773P2W6AY", + Amount: "0.0000100", }, }, "total_shares": "0.0001000", @@ -2623,12 +2624,12 @@ func TestLiquidityPoolEffects(t *testing.T) { "id": poolIDStr, "reserves": []base.AssetAmount{ { - "native", - "0.0000250", + Asset: "native", + Amount: "0.0000250", }, { - "USD:GAUJETIZVEP2NRYLUESJ3LS66NVCEGMON4UDCBCSBEVPIID773P2W6AY", - "0.0000160", + Asset: "USD:GAUJETIZVEP2NRYLUESJ3LS66NVCEGMON4UDCBCSBEVPIID773P2W6AY", + Amount: "0.0000160", }, }, "total_shares": "0.0001010", @@ -2637,12 +2638,12 @@ func TestLiquidityPoolEffects(t *testing.T) { }, "reserves_deposited": []base.AssetAmount{ { - "native", - "0.0000050", + Asset: "native", + Amount: "0.0000050", }, { - "USD:GAUJETIZVEP2NRYLUESJ3LS66NVCEGMON4UDCBCSBEVPIID773P2W6AY", - "0.0000060", + Asset: "USD:GAUJETIZVEP2NRYLUESJ3LS66NVCEGMON4UDCBCSBEVPIID773P2W6AY", + Amount: "0.0000060", }, }, "shares_received": "0.0000010", @@ -2685,12 +2686,12 @@ func TestLiquidityPoolEffects(t *testing.T) { "id": poolIDStr, "reserves": []base.AssetAmount{ { - "native", - "0.0000189", + Asset: "native", + Amount: "0.0000189", }, { - "USD:GAUJETIZVEP2NRYLUESJ3LS66NVCEGMON4UDCBCSBEVPIID773P2W6AY", - "0.0000094", + Asset: "USD:GAUJETIZVEP2NRYLUESJ3LS66NVCEGMON4UDCBCSBEVPIID773P2W6AY", + Amount: "0.0000094", }, }, "total_shares": "0.0000990", @@ -2699,12 +2700,12 @@ func TestLiquidityPoolEffects(t *testing.T) { }, "reserves_received": []base.AssetAmount{ { - "native", - "0.0000011", + Asset: "native", + Amount: "0.0000011", }, { - "USD:GAUJETIZVEP2NRYLUESJ3LS66NVCEGMON4UDCBCSBEVPIID773P2W6AY", - "0.0000006", + Asset: "USD:GAUJETIZVEP2NRYLUESJ3LS66NVCEGMON4UDCBCSBEVPIID773P2W6AY", + Amount: "0.0000006", }, }, "shares_redeemed": "0.0000010", @@ -2805,12 +2806,12 @@ func TestLiquidityPoolEffects(t *testing.T) { "id": poolIDStr, "reserves": []base.AssetAmount{ { - "native", - "0.0000189", + Asset: "native", + Amount: "0.0000189", }, { - "USD:GAUJETIZVEP2NRYLUESJ3LS66NVCEGMON4UDCBCSBEVPIID773P2W6AY", - "0.0000094", + Asset: "USD:GAUJETIZVEP2NRYLUESJ3LS66NVCEGMON4UDCBCSBEVPIID773P2W6AY", + Amount: "0.0000094", }, }, "total_shares": "0.0000990", @@ -3002,12 +3003,12 @@ func TestLiquidityPoolEffects(t *testing.T) { "id": poolIDStr, "reserves": []base.AssetAmount{ { - "native", - "0.0000200", + Asset: "native", + Amount: "0.0000200", }, { - "USD:GAUJETIZVEP2NRYLUESJ3LS66NVCEGMON4UDCBCSBEVPIID773P2W6AY", - "0.0000100", + Asset: "USD:GAUJETIZVEP2NRYLUESJ3LS66NVCEGMON4UDCBCSBEVPIID773P2W6AY", + Amount: "0.0000100", }, }, "total_shares": "0.0001000", @@ -3326,28 +3327,28 @@ func getRevokeSponsorshipMeta(t *testing.T) (string, []EffectOutput) { type ClaimClaimableBalanceEffectsTestSuite struct { suite.Suite - ops []xdr.Operation - tx ingest.LedgerTransaction + //ops []xdr.Operation + //tx ingest.LedgerTransaction } type CreateClaimableBalanceEffectsTestSuite struct { suite.Suite - ops []xdr.Operation - tx ingest.LedgerTransaction + //ops []xdr.Operation + //tx ingest.LedgerTransaction } const ( networkPassphrase = "Arbitrary Testing Passphrase" ) -type effect struct { - address string - addressMuxed null.String - operationID int64 - details map[string]interface{} - effectType EffectType - order uint32 -} +//type effect struct { +// address string +// addressMuxed null.String +// operationID int64 +// details map[string]interface{} +// effectType EffectType +// order uint32 +//} func TestInvokeHostFunctionEffects(t *testing.T) { randAddr := func() string { diff --git a/internal/transform/ledger.go b/internal/transform/ledger.go index 2239eaae..924a0d89 100644 --- a/internal/transform/ledger.go +++ b/internal/transform/ledger.go @@ -12,7 +12,7 @@ import ( ) // TransformLedger converts a ledger from the history archive ingestion system into a form suitable for BigQuery -func TransformLedger(inputLedger historyarchive.Ledger) (LedgerOutput, error) { +func TransformLedger(inputLedger historyarchive.Ledger, lcm xdr.LedgerCloseMeta) (LedgerOutput, error) { ledgerHeader := inputLedger.Header.Header outputSequence := uint32(ledgerHeader.LedgerSeq) @@ -55,6 +55,12 @@ func TransformLedger(inputLedger historyarchive.Ledger) (LedgerOutput, error) { outputProtocolVersion := uint32(ledgerHeader.LedgerVersion) + var outputSorobanFeeWrite1Kb int64 + lcmV1, ok := lcm.GetV1() + if ok { + outputSorobanFeeWrite1Kb = int64(lcmV1.Ext.V1.SorobanFeeWrite1Kb) + } + transformedLedger := LedgerOutput{ Sequence: outputSequence, LedgerID: outputLedgerID, @@ -73,6 +79,7 @@ func TransformLedger(inputLedger historyarchive.Ledger) (LedgerOutput, error) { BaseReserve: outputBaseReserve, MaxTxSetSize: outputMaxTxSetSize, ProtocolVersion: outputProtocolVersion, + SorobanFeeWrite1Kb: outputSorobanFeeWrite1Kb, } return transformedLedger, nil } @@ -93,7 +100,7 @@ func extractCounts(ledger historyarchive.Ledger) (transactionCount int32, operat results := ledger.TransactionResult.TxResultSet.Results txCount := len(transactions) if txCount != len(results) { - err = fmt.Errorf("The number of transactions and results are different (%d != %d)", txCount, len(results)) + err = fmt.Errorf("the number of transactions and results are different (%d != %d)", txCount, len(results)) return } @@ -107,7 +114,7 @@ func extractCounts(ledger historyarchive.Ledger) (transactionCount int32, operat if results[i].Result.Successful() { operationResults, ok := results[i].Result.OperationResults() if !ok { - err = fmt.Errorf("Could not access operation results for result %d", i) + err = fmt.Errorf("could not access operation results for result %d", i) return } diff --git a/internal/transform/ledger_test.go b/internal/transform/ledger_test.go index ba40e28d..962175eb 100644 --- a/internal/transform/ledger_test.go +++ b/internal/transform/ledger_test.go @@ -14,7 +14,7 @@ import ( func TestTransformLedger(t *testing.T) { type transformTest struct { - input historyarchive.Ledger + input utils.HistoryArchiveLedgerAndLCM wantOutput LedgerOutput wantErr error } @@ -26,10 +26,23 @@ func TestTransformLedger(t *testing.T) { tests := []transformTest{ { - historyarchive.Ledger{ - Header: xdr.LedgerHeaderHistoryEntry{ - Header: xdr.LedgerHeader{ - TotalCoins: -1, + utils.HistoryArchiveLedgerAndLCM{ + Ledger: historyarchive.Ledger{ + Header: xdr.LedgerHeaderHistoryEntry{ + Header: xdr.LedgerHeader{ + TotalCoins: -1, + }, + }, + }, + LCM: xdr.LedgerCloseMeta{ + V: 1, + V1: &xdr.LedgerCloseMetaV1{ + Ext: xdr.LedgerCloseMetaExt{ + V: 1, + V1: &xdr.LedgerCloseMetaExtV1{ + SorobanFeeWrite1Kb: xdr.Int64(1234), + }, + }, }, }, }, @@ -37,10 +50,23 @@ func TestTransformLedger(t *testing.T) { fmt.Errorf("the total number of coins (-1) is negative for ledger 0 (ledger id=0)"), }, { - historyarchive.Ledger{ - Header: xdr.LedgerHeaderHistoryEntry{ - Header: xdr.LedgerHeader{ - FeePool: -1, + utils.HistoryArchiveLedgerAndLCM{ + Ledger: historyarchive.Ledger{ + Header: xdr.LedgerHeaderHistoryEntry{ + Header: xdr.LedgerHeader{ + FeePool: -1, + }, + }, + }, + LCM: xdr.LedgerCloseMeta{ + V: 1, + V1: &xdr.LedgerCloseMetaV1{ + Ext: xdr.LedgerCloseMetaExt{ + V: 1, + V1: &xdr.LedgerCloseMetaExtV1{ + SorobanFeeWrite1Kb: xdr.Int64(1234), + }, + }, }, }, }, @@ -55,7 +81,7 @@ func TestTransformLedger(t *testing.T) { } for _, test := range tests { - actualOutput, actualError := TransformLedger(test.input) + actualOutput, actualError := TransformLedger(test.input.Ledger, test.input.LCM) assert.Equal(t, test.wantErr, actualError) assert.Equal(t, test.wantOutput, actualOutput) } @@ -88,11 +114,13 @@ func makeLedgerTestOutput() (output LedgerOutput, err error) { SuccessfulTransactionCount: 1, FailedTransactionCount: 1, TxSetOperationCount: "13", + + SorobanFeeWrite1Kb: 1234, } return } -func makeLedgerTestInput() (lcm historyarchive.Ledger, err error) { +func makeLedgerTestInput() (lcm utils.HistoryArchiveLedgerAndLCM, err error) { hardCodedTxSet := xdr.TransactionSet{ Txs: []xdr.TransactionEnvelope{ utils.CreateSampleTx(0, 3), @@ -103,7 +131,7 @@ func makeLedgerTestInput() (lcm historyarchive.Ledger, err error) { utils.CreateSampleResultPair(false, 3), utils.CreateSampleResultPair(true, 10), } - lcm = historyarchive.Ledger{ + ledger := historyarchive.Ledger{ Header: xdr.LedgerHeaderHistoryEntry{ Header: xdr.LedgerHeader{ LedgerSeq: 30578981, @@ -130,5 +158,21 @@ func makeLedgerTestInput() (lcm historyarchive.Ledger, err error) { Ext: xdr.TransactionHistoryResultEntryExt{}, }, } + + lcm = utils.HistoryArchiveLedgerAndLCM{ + Ledger: ledger, + LCM: xdr.LedgerCloseMeta{ + V: 1, + V1: &xdr.LedgerCloseMetaV1{ + Ext: xdr.LedgerCloseMetaExt{ + V: 1, + V1: &xdr.LedgerCloseMetaExtV1{ + SorobanFeeWrite1Kb: xdr.Int64(1234), + }, + }, + }, + }, + } + return lcm, nil } diff --git a/internal/transform/ledger_transaction_test.go b/internal/transform/ledger_transaction_test.go index 7e53e63d..2c471744 100644 --- a/internal/transform/ledger_transaction_test.go +++ b/internal/transform/ledger_transaction_test.go @@ -45,7 +45,7 @@ func TestTransformTx(t *testing.T) { func makeLedgerTransactionTestOutput() (output []LedgerTransactionOutput, err error) { output = []LedgerTransactionOutput{ - LedgerTransactionOutput{ + { TxEnvelope: "AAAAAgAAAACI4aa0pXFSj6qfJuIObLw/5zyugLRGYwxb7wFSr3B9eAABX5ABjydzAABBtwAAAAEAAAAAAAAAAAAAAABfBqt0AAAAAQAAABdITDVhQ2dvelFISVc3c1NjNVhkY2ZtUgAAAAABAAAAAQAAAAAcR0GXGO76pFs4y38vJVAanjnLg4emNun7zAx0pHcDGAAAAAIAAAAAAAAAAAAAAAAAAAAAAQIDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=", TxResult: "qH/vXusmAmnDgPLeRWqtcrWbsxWqrHd4YEVuCdrAuvsAAAAAAAABLP////8AAAABAAAAAAAAAAAAAAAAAAAAAA==", TxMeta: "AAAAAQAAAAAAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACAAAAAwAAAAAAAAAFAQIDBAUGBwgJAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFVU1NEAAAAAGtY3WxokwttAx3Fu/riPvoew/C7WMK8jZONR8Hfs75zAAAAHgAAAAAAAYagAAAAAAAAA+gAAAAAAAAB9AAAAAAAAAAZAAAAAAAAAAEAAAAAAAAABQECAwQFBgcICQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABVVNTRAAAAABrWN1saJMLbQMdxbv64j76HsPwu1jCvI2TjUfB37O+cwAAAB4AAAAAAAGKiAAAAAAAAARMAAAAAAAAAfYAAAAAAAAAGgAAAAAAAAACAAAAAwAAAAAAAAAFAQIDBAUGBwgJAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFVU1NEAAAAAGtY3WxokwttAx3Fu/riPvoew/C7WMK8jZONR8Hfs75zAAAAHgAAAAAAAYagAAAAAAAAA+gAAAAAAAAB9AAAAAAAAAAZAAAAAAAAAAEAAAAAAAAABQECAwQFBgcICQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABVVNTRAAAAABrWN1saJMLbQMdxbv64j76HsPwu1jCvI2TjUfB37O+cwAAAB4AAAAAAAGKiAAAAAAAAARMAAAAAAAAAfYAAAAAAAAAGgAAAAAAAAAA", @@ -54,7 +54,7 @@ func makeLedgerTransactionTestOutput() (output []LedgerTransactionOutput, err er LedgerSequence: 30521816, ClosedAt: time.Date(2020, time.July, 9, 5, 28, 42, 0, time.UTC), }, - LedgerTransactionOutput{ + { TxEnvelope: "AAAABQAAAABnzACGTDuJFoxqr+C8NHCe0CHFBXLi+YhhNCIILCIpcgAAAAAAABwgAAAAAgAAAACI4aa0pXFSj6qfJuIObLw/5zyugLRGYwxb7wFSr3B9eAAAAAACFPY2AAAAfQAAAAEAAAAAAAAAAAAAAABfBqt0AAAAAQAAABdITDVhQ2dvelFISVc3c1NjNVhkY2ZtUgAAAAABAAAAAQAAAAAcR0GXGO76pFs4y38vJVAanjnLg4emNun7zAx0pHcDGAAAAAIAAAAAAAAAAAAAAAAAAAAAAQIDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA==", TxResult: "qH/vXusmAmnDgPLeRWqtcrWbsxWqrHd4YEVuCdrAuvsAAAAAAAABLAAAAAGof+9e6yYCacOA8t5Faq1ytZuzFaqsd3hgRW4J2sC6+wAAAAAAAABkAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAA==", TxMeta: "AAAAAQAAAAAAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACAAAAAwAAAAAAAAAFAQIDBAUGBwgJAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFVU1NEAAAAAGtY3WxokwttAx3Fu/riPvoew/C7WMK8jZONR8Hfs75zAAAAHgAAAAAAAYagAAAAAAAAA+gAAAAAAAAB9AAAAAAAAAAZAAAAAAAAAAEAAAAAAAAABQECAwQFBgcICQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABVVNTRAAAAABrWN1saJMLbQMdxbv64j76HsPwu1jCvI2TjUfB37O+cwAAAB4AAAAAAAGKiAAAAAAAAARMAAAAAAAAAfYAAAAAAAAAGgAAAAAAAAACAAAAAwAAAAAAAAAFAQIDBAUGBwgJAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFVU1NEAAAAAGtY3WxokwttAx3Fu/riPvoew/C7WMK8jZONR8Hfs75zAAAAHgAAAAAAAYagAAAAAAAAA+gAAAAAAAAB9AAAAAAAAAAZAAAAAAAAAAEAAAAAAAAABQECAwQFBgcICQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABVVNTRAAAAABrWN1saJMLbQMdxbv64j76HsPwu1jCvI2TjUfB37O+cwAAAB4AAAAAAAGKiAAAAAAAAARMAAAAAAAAAfYAAAAAAAAAGgAAAAAAAAAA", @@ -63,7 +63,7 @@ func makeLedgerTransactionTestOutput() (output []LedgerTransactionOutput, err er LedgerSequence: 30521817, ClosedAt: time.Date(2020, time.July, 9, 5, 28, 42, 0, time.UTC), }, - LedgerTransactionOutput{ + { TxEnvelope: "AAAAAgAAAAAcR0GXGO76pFs4y38vJVAanjnLg4emNun7zAx0pHcDGAAAAGQBpLyvsiV6gwAAAAIAAAABAAAAAAAAAAAAAAAAXwardAAAAAEAAAAFAAAACgAAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAMCAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAABdITDVhQ2dvelFISVc3c1NjNVhkY2ZtUgAAAAABAAAAAQAAAABrWN1saJMLbQMdxbv64j76HsPwu1jCvI2TjUfB37O+cwAAAAIAAAAAAAAAAAAAAAAAAAAAAQIDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=", TxResult: "qH/vXusmAmnDgPLeRWqtcrWbsxWqrHd4YEVuCdrAuvsAAAAAAAAAZP////8AAAABAAAAAAAAAAAAAAAAAAAAAA==", TxMeta: "AAAAAQAAAAAAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACAAAAAwAAAAAAAAAFAQIDBAUGBwgJAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFVU1NEAAAAAGtY3WxokwttAx3Fu/riPvoew/C7WMK8jZONR8Hfs75zAAAAHgAAAAAAAYagAAAAAAAAA+gAAAAAAAAB9AAAAAAAAAAZAAAAAAAAAAEAAAAAAAAABQECAwQFBgcICQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABVVNTRAAAAABrWN1saJMLbQMdxbv64j76HsPwu1jCvI2TjUfB37O+cwAAAB4AAAAAAAGKiAAAAAAAAARMAAAAAAAAAfYAAAAAAAAAGgAAAAAAAAACAAAAAwAAAAAAAAAFAQIDBAUGBwgJAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFVU1NEAAAAAGtY3WxokwttAx3Fu/riPvoew/C7WMK8jZONR8Hfs75zAAAAHgAAAAAAAYagAAAAAAAAA+gAAAAAAAAB9AAAAAAAAAAZAAAAAAAAAAEAAAAAAAAABQECAwQFBgcICQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABVVNTRAAAAABrWN1saJMLbQMdxbv64j76HsPwu1jCvI2TjUfB37O+cwAAAB4AAAAAAAGKiAAAAAAAAARMAAAAAAAAAfYAAAAAAAAAGgAAAAAAAAAA", @@ -79,7 +79,7 @@ func makeLedgerTransactionTestInput() (transaction []ingest.LedgerTransaction, h hardCodedMemoText := "HL5aCgozQHIW7sSc5XdcfmR" hardCodedTransactionHash := xdr.Hash([32]byte{0xa8, 0x7f, 0xef, 0x5e, 0xeb, 0x26, 0x2, 0x69, 0xc3, 0x80, 0xf2, 0xde, 0x45, 0x6a, 0xad, 0x72, 0xb5, 0x9b, 0xb3, 0x15, 0xaa, 0xac, 0x77, 0x78, 0x60, 0x45, 0x6e, 0x9, 0xda, 0xc0, 0xba, 0xfb}) genericResultResults := &[]xdr.OperationResult{ - xdr.OperationResult{ + { Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypeCreateAccount, CreateAccountResult: &xdr.CreateAccountResult{ @@ -106,7 +106,7 @@ func makeLedgerTransactionTestInput() (transaction []ingest.LedgerTransaction, h Ed25519: source.Ed25519, } transaction = []ingest.LedgerTransaction{ - ingest.LedgerTransaction{ + { Index: 1, UnsafeMeta: hardCodedMeta, Envelope: xdr.TransactionEnvelope{ @@ -128,7 +128,7 @@ func makeLedgerTransactionTestInput() (transaction []ingest.LedgerTransaction, h }, }, Operations: []xdr.Operation{ - xdr.Operation{ + { SourceAccount: &testAccount2, Body: xdr.OperationBody{ Type: xdr.OperationTypePathPaymentStrictReceive, @@ -152,7 +152,7 @@ func makeLedgerTransactionTestInput() (transaction []ingest.LedgerTransaction, h }, }, }, - ingest.LedgerTransaction{ + { Index: 1, UnsafeMeta: hardCodedMeta, Envelope: xdr.TransactionEnvelope{ @@ -179,7 +179,7 @@ func makeLedgerTransactionTestInput() (transaction []ingest.LedgerTransaction, h }, }, Operations: []xdr.Operation{ - xdr.Operation{ + { SourceAccount: &testAccount2, Body: xdr.OperationBody{ Type: xdr.OperationTypePathPaymentStrictReceive, @@ -208,7 +208,7 @@ func makeLedgerTransactionTestInput() (transaction []ingest.LedgerTransaction, h Result: xdr.InnerTransactionResultResult{ Code: xdr.TransactionResultCodeTxSuccess, Results: &[]xdr.OperationResult{ - xdr.OperationResult{ + { Tr: &xdr.OperationResultTr{ CreateAccountResult: &xdr.CreateAccountResult{}, }, @@ -217,14 +217,12 @@ func makeLedgerTransactionTestInput() (transaction []ingest.LedgerTransaction, h }, }, }, - Results: &[]xdr.OperationResult{ - xdr.OperationResult{}, - }, + Results: &[]xdr.OperationResult{{}}, }, }, }, }, - ingest.LedgerTransaction{ + { Index: 1, UnsafeMeta: hardCodedMeta, Envelope: xdr.TransactionEnvelope{ @@ -253,7 +251,7 @@ func makeLedgerTransactionTestInput() (transaction []ingest.LedgerTransaction, h }, }, Operations: []xdr.Operation{ - xdr.Operation{ + { SourceAccount: &testAccount4, Body: xdr.OperationBody{ Type: xdr.OperationTypePathPaymentStrictReceive, @@ -279,19 +277,19 @@ func makeLedgerTransactionTestInput() (transaction []ingest.LedgerTransaction, h }, } historyHeader = []xdr.LedgerHeaderHistoryEntry{ - xdr.LedgerHeaderHistoryEntry{ + { Header: xdr.LedgerHeader{ LedgerSeq: 30521816, ScpValue: xdr.StellarValue{CloseTime: 1594272522}, }, }, - xdr.LedgerHeaderHistoryEntry{ + { Header: xdr.LedgerHeader{ LedgerSeq: 30521817, ScpValue: xdr.StellarValue{CloseTime: 1594272522}, }, }, - xdr.LedgerHeaderHistoryEntry{ + { Header: xdr.LedgerHeader{ LedgerSeq: 30521818, ScpValue: xdr.StellarValue{CloseTime: 1594272522}, diff --git a/internal/transform/liquidity_pool.go b/internal/transform/liquidity_pool.go index 30cbaa24..eacc6be3 100644 --- a/internal/transform/liquidity_pool.go +++ b/internal/transform/liquidity_pool.go @@ -22,17 +22,17 @@ func TransformPool(ledgerChange ingest.Change, header xdr.LedgerHeaderHistoryEnt lp, ok := ledgerEntry.Data.GetLiquidityPool() if !ok { - return PoolOutput{}, fmt.Errorf("Could not extract liquidity pool data from ledger entry; actual type is %s", ledgerEntry.Data.Type) + return PoolOutput{}, fmt.Errorf("could not extract liquidity pool data from ledger entry; actual type is %s", ledgerEntry.Data.Type) } cp, ok := lp.Body.GetConstantProduct() if !ok { - return PoolOutput{}, fmt.Errorf("Could not extract constant product information for liquidity pool %s", xdr.Hash(lp.LiquidityPoolId).HexString()) + return PoolOutput{}, fmt.Errorf("could not extract constant product information for liquidity pool %s", xdr.Hash(lp.LiquidityPoolId).HexString()) } poolType, ok := xdr.LiquidityPoolTypeToString[lp.Body.Type] if !ok { - return PoolOutput{}, fmt.Errorf("Unknown liquidity pool type: %d", lp.Body.Type) + return PoolOutput{}, fmt.Errorf("unknown liquidity pool type: %d", lp.Body.Type) } var assetAType, assetACode, assetAIssuer string @@ -44,6 +44,9 @@ func TransformPool(ledgerChange ingest.Change, header xdr.LedgerHeaderHistoryEnt var assetBType, assetBCode, assetBIssuer string err = cp.Params.AssetB.Extract(&assetBType, &assetBCode, &assetBIssuer) + if err != nil { + return PoolOutput{}, err + } assetBID := FarmHashAsset(assetBCode, assetBIssuer, assetBType) closedAt, err := utils.TimePointToUTCTimeStamp(header.Header.ScpValue.CloseTime) diff --git a/internal/transform/liquidity_pool_test.go b/internal/transform/liquidity_pool_test.go index 8a60f59d..829aa7bb 100644 --- a/internal/transform/liquidity_pool_test.go +++ b/internal/transform/liquidity_pool_test.go @@ -61,18 +61,18 @@ func TestTransformPool(t *testing.T) { } } -func wrapPoolEntry(poolEntry xdr.LiquidityPoolEntry, lastModified int) ingest.Change { - return ingest.Change{ - Type: xdr.LedgerEntryTypeLiquidityPool, - Pre: &xdr.LedgerEntry{ - LastModifiedLedgerSeq: xdr.Uint32(lastModified), - Data: xdr.LedgerEntryData{ - Type: xdr.LedgerEntryTypeLiquidityPool, - LiquidityPool: &poolEntry, - }, - }, - } -} +//func wrapPoolEntry(poolEntry xdr.LiquidityPoolEntry, lastModified int) ingest.Change { +// return ingest.Change{ +// Type: xdr.LedgerEntryTypeLiquidityPool, +// Pre: &xdr.LedgerEntry{ +// LastModifiedLedgerSeq: xdr.Uint32(lastModified), +// Data: xdr.LedgerEntryData{ +// Type: xdr.LedgerEntryTypeLiquidityPool, +// LiquidityPool: &poolEntry, +// }, +// }, +// } +//} func makePoolTestInput() ingest.Change { ledgerEntry := xdr.LedgerEntry{ diff --git a/internal/transform/offer.go b/internal/transform/offer.go index 21e1c4b4..fd88846b 100644 --- a/internal/transform/offer.go +++ b/internal/transform/offer.go @@ -18,7 +18,7 @@ func TransformOffer(ledgerChange ingest.Change, header xdr.LedgerHeaderHistoryEn offerEntry, offerFound := ledgerEntry.Data.GetOffer() if !offerFound { - return OfferOutput{}, fmt.Errorf("Could not extract offer data from ledger entry; actual type is %s", ledgerEntry.Data.Type) + return OfferOutput{}, fmt.Errorf("could not extract offer data from ledger entry; actual type is %s", ledgerEntry.Data.Type) } outputSellerID, err := offerEntry.SellerId.GetAddress() @@ -28,7 +28,7 @@ func TransformOffer(ledgerChange ingest.Change, header xdr.LedgerHeaderHistoryEn outputOfferID := int64(offerEntry.OfferId) if outputOfferID < 0 { - return OfferOutput{}, fmt.Errorf("OfferID is negative (%d) for offer from account: %s", outputOfferID, outputSellerID) + return OfferOutput{}, fmt.Errorf("offerID is negative (%d) for offer from account: %s", outputOfferID, outputSellerID) } outputSellingAsset, err := transformSingleAsset(offerEntry.Selling) @@ -43,21 +43,21 @@ func TransformOffer(ledgerChange ingest.Change, header xdr.LedgerHeaderHistoryEn outputAmount := offerEntry.Amount if outputAmount < 0 { - return OfferOutput{}, fmt.Errorf("Amount is negative (%d) for offer %d", outputAmount, outputOfferID) + return OfferOutput{}, fmt.Errorf("amount is negative (%d) for offer %d", outputAmount, outputOfferID) } outputPriceN := int32(offerEntry.Price.N) if outputPriceN < 0 { - return OfferOutput{}, fmt.Errorf("Price numerator is negative (%d) for offer %d", outputPriceN, outputOfferID) + return OfferOutput{}, fmt.Errorf("price numerator is negative (%d) for offer %d", outputPriceN, outputOfferID) } outputPriceD := int32(offerEntry.Price.D) if outputPriceD == 0 { - return OfferOutput{}, fmt.Errorf("Price denominator is 0 for offer %d", outputOfferID) + return OfferOutput{}, fmt.Errorf("price denominator is 0 for offer %d", outputOfferID) } if outputPriceD < 0 { - return OfferOutput{}, fmt.Errorf("Price denominator is negative (%d) for offer %d", outputPriceD, outputOfferID) + return OfferOutput{}, fmt.Errorf("price denominator is negative (%d) for offer %d", outputPriceD, outputOfferID) } var outputPrice float64 diff --git a/internal/transform/offer_normalized.go b/internal/transform/offer_normalized.go index 83eced04..0276509e 100644 --- a/internal/transform/offer_normalized.go +++ b/internal/transform/offer_normalized.go @@ -25,7 +25,7 @@ func TransformOfferNormalized(ledgerChange ingest.Change, ledgerSeq uint32) (Nor return NormalizedOfferOutput{}, fmt.Errorf("offer %d is deleted", transformed.OfferID) } - buyingAsset, sellingAsset, err := extractAssets(ledgerChange, transformed) + buyingAsset, sellingAsset, err := extractAssets(ledgerChange) if err != nil { return NormalizedOfferOutput{}, err } @@ -57,7 +57,7 @@ func TransformOfferNormalized(ledgerChange ingest.Change, ledgerSeq uint32) (Nor } // extractAssets extracts the buying and selling assets as strings of the format code:issuer -func extractAssets(ledgerChange ingest.Change, transformed OfferOutput) (string, string, error) { +func extractAssets(ledgerChange ingest.Change) (string, string, error) { ledgerEntry, _, _, err := utils.ExtractEntryFromChange(ledgerChange) if err != nil { return "", "", err @@ -65,7 +65,7 @@ func extractAssets(ledgerChange ingest.Change, transformed OfferOutput) (string, offerEntry, offerFound := ledgerEntry.Data.GetOffer() if !offerFound { - return "", "", fmt.Errorf("Could not extract offer data from ledger entry; actual type is %s", ledgerEntry.Data.Type) + return "", "", fmt.Errorf("could not extract offer data from ledger entry; actual type is %s", ledgerEntry.Data.Type) } var sellType, sellCode, sellIssuer string diff --git a/internal/transform/operation.go b/internal/transform/operation.go index b8badaba..91e4e8de 100644 --- a/internal/transform/operation.go +++ b/internal/transform/operation.go @@ -156,7 +156,7 @@ func mapOperationType(operation xdr.Operation) (string, error) { case xdr.OperationTypeRestoreFootprint: op_string_type = "restore_footprint" default: - return op_string_type, fmt.Errorf("Unknown operation type: %s", operation.Body.Type.String()) + return op_string_type, fmt.Errorf("unknown operation type: %s", operation.Body.Type.String()) } return op_string_type, nil } @@ -221,7 +221,7 @@ func mapOperationTrace(operationTrace xdr.OperationResultTr) (string, error) { case xdr.OperationTypeRestoreFootprint: operationTraceDescription = operationTrace.RestoreFootprintResult.Code.String() default: - return operationTraceDescription, fmt.Errorf("Unknown operation type: %s", operationTrace.Type.String()) + return operationTraceDescription, fmt.Errorf("unknown operation type: %s", operationTrace.Type.String()) } return operationTraceDescription, nil } @@ -277,7 +277,7 @@ func getLiquidityPoolAndProductDelta(operationIndex int32, transaction ingest.Le return lp, delta, nil } - return nil, nil, fmt.Errorf("Liquidity pool change not found") + return nil, nil, fmt.Errorf("liquidity pool change not found") } func getOperationSourceAccount(operation xdr.Operation, transaction ingest.LedgerTransaction) xdr.MuxedAccount { @@ -564,7 +564,7 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT case xdr.OperationTypeCreateAccount: op, ok := operation.Body.GetCreateAccountOp() if !ok { - return details, fmt.Errorf("Could not access CreateAccount info for this operation (index %d)", operationIndex) + return details, fmt.Errorf("could not access CreateAccount info for this operation (index %d)", operationIndex) } if err := addAccountAndMuxedAccountDetails(details, sourceAccount, "funder"); err != nil { @@ -576,7 +576,7 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT case xdr.OperationTypePayment: op, ok := operation.Body.GetPaymentOp() if !ok { - return details, fmt.Errorf("Could not access Payment info for this operation (index %d)", operationIndex) + return details, fmt.Errorf("could not access Payment info for this operation (index %d)", operationIndex) } if err := addAccountAndMuxedAccountDetails(details, sourceAccount, "from"); err != nil { @@ -593,7 +593,7 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT case xdr.OperationTypePathPaymentStrictReceive: op, ok := operation.Body.GetPathPaymentStrictReceiveOp() if !ok { - return details, fmt.Errorf("Could not access PathPaymentStrictReceive info for this operation (index %d)", operationIndex) + return details, fmt.Errorf("could not access PathPaymentStrictReceive info for this operation (index %d)", operationIndex) } if err := addAccountAndMuxedAccountDetails(details, sourceAccount, "from"); err != nil { @@ -615,16 +615,16 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT if transaction.Result.Successful() { allOperationResults, ok := transaction.Result.OperationResults() if !ok { - return details, fmt.Errorf("Could not access any results for this transaction") + return details, fmt.Errorf("could not access any results for this transaction") } currentOperationResult := allOperationResults[operationIndex] resultBody, ok := currentOperationResult.GetTr() if !ok { - return details, fmt.Errorf("Could not access result body for this operation (index %d)", operationIndex) + return details, fmt.Errorf("could not access result body for this operation (index %d)", operationIndex) } result, ok := resultBody.GetPathPaymentStrictReceiveResult() if !ok { - return details, fmt.Errorf("Could not access PathPaymentStrictReceive result info for this operation (index %d)", operationIndex) + return details, fmt.Errorf("could not access PathPaymentStrictReceive result info for this operation (index %d)", operationIndex) } details["source_amount"] = utils.ConvertStroopValueToReal(result.SendAmount()) } @@ -634,7 +634,7 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT case xdr.OperationTypePathPaymentStrictSend: op, ok := operation.Body.GetPathPaymentStrictSendOp() if !ok { - return details, fmt.Errorf("Could not access PathPaymentStrictSend info for this operation (index %d)", operationIndex) + return details, fmt.Errorf("could not access PathPaymentStrictSend info for this operation (index %d)", operationIndex) } if err := addAccountAndMuxedAccountDetails(details, sourceAccount, "from"); err != nil { @@ -656,16 +656,16 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT if transaction.Result.Successful() { allOperationResults, ok := transaction.Result.OperationResults() if !ok { - return details, fmt.Errorf("Could not access any results for this transaction") + return details, fmt.Errorf("could not access any results for this transaction") } currentOperationResult := allOperationResults[operationIndex] resultBody, ok := currentOperationResult.GetTr() if !ok { - return details, fmt.Errorf("Could not access result body for this operation (index %d)", operationIndex) + return details, fmt.Errorf("could not access result body for this operation (index %d)", operationIndex) } result, ok := resultBody.GetPathPaymentStrictSendResult() if !ok { - return details, fmt.Errorf("Could not access GetPathPaymentStrictSendResult result info for this operation (index %d)", operationIndex) + return details, fmt.Errorf("could not access GetPathPaymentStrictSendResult result info for this operation (index %d)", operationIndex) } details["amount"] = utils.ConvertStroopValueToReal(result.DestAmount()) } @@ -675,7 +675,7 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT case xdr.OperationTypeManageBuyOffer: op, ok := operation.Body.GetManageBuyOfferOp() if !ok { - return details, fmt.Errorf("Could not access ManageBuyOffer info for this operation (index %d)", operationIndex) + return details, fmt.Errorf("could not access ManageBuyOffer info for this operation (index %d)", operationIndex) } details["offer_id"] = int64(op.OfferId) @@ -694,7 +694,7 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT case xdr.OperationTypeManageSellOffer: op, ok := operation.Body.GetManageSellOfferOp() if !ok { - return details, fmt.Errorf("Could not access ManageSellOffer info for this operation (index %d)", operationIndex) + return details, fmt.Errorf("could not access ManageSellOffer info for this operation (index %d)", operationIndex) } details["offer_id"] = int64(op.OfferId) @@ -713,7 +713,7 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT case xdr.OperationTypeCreatePassiveSellOffer: op, ok := operation.Body.GetCreatePassiveSellOfferOp() if !ok { - return details, fmt.Errorf("Could not access CreatePassiveSellOffer info for this operation (index %d)", operationIndex) + return details, fmt.Errorf("could not access CreatePassiveSellOffer info for this operation (index %d)", operationIndex) } details["amount"] = utils.ConvertStroopValueToReal(op.Amount) @@ -731,7 +731,7 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT case xdr.OperationTypeSetOptions: op, ok := operation.Body.GetSetOptionsOp() if !ok { - return details, fmt.Errorf("Could not access GetSetOptions info for this operation (index %d)", operationIndex) + return details, fmt.Errorf("could not access GetSetOptions info for this operation (index %d)", operationIndex) } if op.InflationDest != nil { @@ -774,7 +774,7 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT case xdr.OperationTypeChangeTrust: op, ok := operation.Body.GetChangeTrustOp() if !ok { - return details, fmt.Errorf("Could not access GetChangeTrust info for this operation (index %d)", operationIndex) + return details, fmt.Errorf("could not access GetChangeTrust info for this operation (index %d)", operationIndex) } if op.Line.Type == xdr.AssetTypeAssetTypePoolShare { @@ -796,7 +796,7 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT case xdr.OperationTypeAllowTrust: op, ok := operation.Body.GetAllowTrustOp() if !ok { - return details, fmt.Errorf("Could not access AllowTrust info for this operation (index %d)", operationIndex) + return details, fmt.Errorf("could not access AllowTrust info for this operation (index %d)", operationIndex) } if err := addAssetDetailsToOperationDetails(details, op.Asset.ToAsset(sourceAccount.ToAccountId()), ""); err != nil { @@ -820,7 +820,7 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT case xdr.OperationTypeAccountMerge: destinationAccount, ok := operation.Body.GetDestination() if !ok { - return details, fmt.Errorf("Could not access Destination info for this operation (index %d)", operationIndex) + return details, fmt.Errorf("could not access Destination info for this operation (index %d)", operationIndex) } if err := addAccountAndMuxedAccountDetails(details, sourceAccount, "account"); err != nil { @@ -835,7 +835,7 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT case xdr.OperationTypeManageData: op, ok := operation.Body.GetManageDataOp() if !ok { - return details, fmt.Errorf("Could not access GetManageData info for this operation (index %d)", operationIndex) + return details, fmt.Errorf("could not access GetManageData info for this operation (index %d)", operationIndex) } details["name"] = string(op.DataName) @@ -848,7 +848,7 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT case xdr.OperationTypeBumpSequence: op, ok := operation.Body.GetBumpSequenceOp() if !ok { - return details, fmt.Errorf("Could not access BumpSequence info for this operation (index %d)", operationIndex) + return details, fmt.Errorf("could not access BumpSequence info for this operation (index %d)", operationIndex) } details["bump_to"] = fmt.Sprintf("%d", op.BumpTo) @@ -862,7 +862,7 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT op := operation.Body.MustClaimClaimableBalanceOp() balanceID, err := xdr.MarshalHex(op.BalanceId) if err != nil { - return details, fmt.Errorf("Invalid balanceId in op: %d", operationIndex) + return details, fmt.Errorf("invalid balanceId in op: %d", operationIndex) } details["balance_id"] = balanceID if err := addAccountAndMuxedAccountDetails(details, sourceAccount, "claimant"); err != nil { @@ -908,7 +908,7 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT op := operation.Body.MustClawbackClaimableBalanceOp() balanceID, err := xdr.MarshalHex(op.BalanceId) if err != nil { - return details, fmt.Errorf("Invalid balanceId in op: %d", operationIndex) + return details, fmt.Errorf("invalid balanceId in op: %d", operationIndex) } details["balance_id"] = balanceID @@ -1113,7 +1113,7 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT details["contract_id"] = contractIdFromTxEnvelope(transactionEnvelope) details["contract_code_hash"] = contractCodeHashFromTxEnvelope(transactionEnvelope) default: - return details, fmt.Errorf("Unknown operation type: %s", operation.Body.Type.String()) + return details, fmt.Errorf("unknown operation type: %s", operation.Body.Type.String()) } sponsor, err := getSponsor(operation, transaction, operationIndex) @@ -1506,7 +1506,7 @@ func (operation *transactionOperationWrapper) Details() (map[string]interface{}, op := operation.operation.Body.MustClaimClaimableBalanceOp() balanceID, err := xdr.MarshalHex(op.BalanceId) if err != nil { - panic(fmt.Errorf("Invalid balanceId in op: %d", operation.index)) + panic(fmt.Errorf("invalid balanceId in op: %d", operation.index)) } details["balance_id"] = balanceID addAccountAndMuxedAccountDetails(details, *source, "claimant") @@ -1539,7 +1539,7 @@ func (operation *transactionOperationWrapper) Details() (map[string]interface{}, op := operation.operation.Body.MustClawbackClaimableBalanceOp() balanceID, err := xdr.MarshalHex(op.BalanceId) if err != nil { - panic(fmt.Errorf("Invalid balanceId in op: %d", operation.index)) + panic(fmt.Errorf("invalid balanceId in op: %d", operation.index)) } details["balance_id"] = balanceID case xdr.OperationTypeSetTrustLineFlags: @@ -1716,7 +1716,7 @@ func (operation *transactionOperationWrapper) Details() (map[string]interface{}, details["contract_id"] = contractIdFromTxEnvelope(transactionEnvelope) details["contract_code_hash"] = contractCodeHashFromTxEnvelope(transactionEnvelope) default: - panic(fmt.Errorf("Unknown operation type: %s", operation.OperationType())) + panic(fmt.Errorf("unknown operation type: %s", operation.OperationType())) } sponsor, err := operation.getSponsor() @@ -2135,7 +2135,7 @@ func (operation *transactionOperationWrapper) Participants() ([]xdr.AccountId, e case xdr.OperationTypeRestoreFootprint: // the only direct participant is the source_account default: - return participants, fmt.Errorf("Unknown operation type: %s", op.Body.Type) + return participants, fmt.Errorf("unknown operation type: %s", op.Body.Type) } sponsor, err := operation.getSponsor() @@ -2162,24 +2162,24 @@ func dedupeParticipants(in []xdr.AccountId) (out []xdr.AccountId) { return } -// OperationsParticipants returns a map with all participants per operation -func operationsParticipants(transaction ingest.LedgerTransaction, sequence uint32) (map[int64][]xdr.AccountId, error) { - participants := map[int64][]xdr.AccountId{} - - for opi, op := range transaction.Envelope.Operations() { - operation := transactionOperationWrapper{ - index: uint32(opi), - transaction: transaction, - operation: op, - ledgerSequence: sequence, - } - - p, err := operation.Participants() - if err != nil { - return participants, errors.Wrapf(err, "reading operation %v participants", operation.ID()) - } - participants[operation.ID()] = p - } - - return participants, nil -} +//// OperationsParticipants returns a map with all participants per operation +//func operationsParticipants(transaction ingest.LedgerTransaction, sequence uint32) (map[int64][]xdr.AccountId, error) { +// participants := map[int64][]xdr.AccountId{} +// +// for opi, op := range transaction.Envelope.Operations() { +// operation := transactionOperationWrapper{ +// index: uint32(opi), +// transaction: transaction, +// operation: op, +// ledgerSequence: sequence, +// } +// +// p, err := operation.Participants() +// if err != nil { +// return participants, errors.Wrapf(err, "reading operation %v participants", operation.ID()) +// } +// participants[operation.ID()] = p +// } +// +// return participants, nil +//} diff --git a/internal/transform/operation_test.go b/internal/transform/operation_test.go index de6ff439..c2d2c433 100644 --- a/internal/transform/operation_test.go +++ b/internal/transform/operation_test.go @@ -129,7 +129,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er hardCodedDataValue := xdr.DataValue([]byte{0x76, 0x61, 0x6c, 0x75, 0x65}) hardCodedSequenceNumber := xdr.SequenceNumber(100) inputOperations := []xdr.Operation{ - xdr.Operation{ + { SourceAccount: nil, Body: xdr.OperationBody{ Type: xdr.OperationTypeCreateAccount, @@ -139,7 +139,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.Operation{ + { SourceAccount: nil, Body: xdr.OperationBody{ Type: xdr.OperationTypePayment, @@ -150,7 +150,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.Operation{ + { SourceAccount: nil, Body: xdr.OperationBody{ Type: xdr.OperationTypePayment, @@ -161,7 +161,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.Operation{ + { SourceAccount: &testAccount3, Body: xdr.OperationBody{ Type: xdr.OperationTypePathPaymentStrictReceive, @@ -175,7 +175,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.Operation{ + { SourceAccount: nil, Body: xdr.OperationBody{ Type: xdr.OperationTypeManageSellOffer, @@ -191,7 +191,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.Operation{ + { SourceAccount: nil, Body: xdr.OperationBody{ Type: xdr.OperationTypeCreatePassiveSellOffer, @@ -206,7 +206,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.Operation{ + { SourceAccount: nil, Body: xdr.OperationBody{ Type: xdr.OperationTypeSetOptions, @@ -223,7 +223,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.Operation{ + { SourceAccount: nil, Body: xdr.OperationBody{ Type: xdr.OperationTypeChangeTrust, @@ -233,7 +233,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.Operation{ + { SourceAccount: nil, Body: xdr.OperationBody{ Type: xdr.OperationTypeChangeTrust, @@ -243,7 +243,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.Operation{ + { SourceAccount: nil, Body: xdr.OperationBody{ Type: xdr.OperationTypeAllowTrust, @@ -254,20 +254,20 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.Operation{ + { SourceAccount: nil, Body: xdr.OperationBody{ Type: xdr.OperationTypeAccountMerge, Destination: &testAccount4, }, }, - xdr.Operation{ + { SourceAccount: nil, Body: xdr.OperationBody{ Type: xdr.OperationTypeInflation, }, }, - xdr.Operation{ + { SourceAccount: nil, Body: xdr.OperationBody{ Type: xdr.OperationTypeManageData, @@ -277,7 +277,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.Operation{ + { SourceAccount: nil, Body: xdr.OperationBody{ Type: xdr.OperationTypeBumpSequence, @@ -286,7 +286,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.Operation{ + { SourceAccount: nil, Body: xdr.OperationBody{ Type: xdr.OperationTypeManageBuyOffer, @@ -302,7 +302,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.Operation{ + { SourceAccount: nil, Body: xdr.OperationBody{ Type: xdr.OperationTypePathPaymentStrictSend, @@ -316,7 +316,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.Operation{ + { SourceAccount: nil, Body: xdr.OperationBody{ Type: xdr.OperationTypeCreateClaimableBalance, @@ -327,7 +327,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.Operation{ + { SourceAccount: &testAccount3, Body: xdr.OperationBody{ Type: xdr.OperationTypeClaimClaimableBalance, @@ -336,7 +336,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.Operation{ + { SourceAccount: nil, Body: xdr.OperationBody{ Type: xdr.OperationTypeBeginSponsoringFutureReserves, @@ -345,7 +345,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.Operation{ + { SourceAccount: nil, Body: xdr.OperationBody{ Type: xdr.OperationTypeRevokeSponsorship, @@ -358,7 +358,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.Operation{ + { SourceAccount: nil, Body: xdr.OperationBody{ Type: xdr.OperationTypeRevokeSponsorship, @@ -373,7 +373,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.Operation{ + { SourceAccount: nil, Body: xdr.OperationBody{ Type: xdr.OperationTypeRevokeSponsorship, @@ -388,7 +388,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.Operation{ + { SourceAccount: nil, Body: xdr.OperationBody{ Type: xdr.OperationTypeRevokeSponsorship, @@ -404,7 +404,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.Operation{ + { SourceAccount: nil, Body: xdr.OperationBody{ Type: xdr.OperationTypeRevokeSponsorship, @@ -420,7 +420,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.Operation{ + { SourceAccount: nil, Body: xdr.OperationBody{ Type: xdr.OperationTypeRevokeSponsorship, @@ -436,7 +436,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.Operation{ + { SourceAccount: nil, Body: xdr.OperationBody{ Type: xdr.OperationTypeRevokeSponsorship, @@ -451,7 +451,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.Operation{ + { SourceAccount: nil, Body: xdr.OperationBody{ Type: xdr.OperationTypeClawback, @@ -462,7 +462,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.Operation{ + { SourceAccount: nil, Body: xdr.OperationBody{ Type: xdr.OperationTypeClawbackClaimableBalance, @@ -471,7 +471,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.Operation{ + { SourceAccount: nil, Body: xdr.OperationBody{ Type: xdr.OperationTypeSetTrustLineFlags, @@ -483,7 +483,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.Operation{ + { SourceAccount: nil, Body: xdr.OperationBody{ Type: xdr.OperationTypeLiquidityPoolDeposit, @@ -502,7 +502,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.Operation{ + { SourceAccount: nil, Body: xdr.OperationBody{ Type: xdr.OperationTypeLiquidityPoolWithdraw, @@ -514,7 +514,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - //xdr.Operation{ + //{ // SourceAccount: nil, // Body: xdr.OperationBody{ // Type: xdr.OperationTypeInvokeHostFunction, @@ -533,7 +533,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er // }, // }, //}, - //xdr.Operation{ + //{ // SourceAccount: nil, // Body: xdr.OperationBody{ // Type: xdr.OperationTypeInvokeHostFunction, @@ -557,7 +557,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er // }, // }, //}, - //xdr.Operation{ + //{ // SourceAccount: nil, // Body: xdr.OperationBody{ // Type: xdr.OperationTypeInvokeHostFunction, @@ -584,7 +584,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er // }, // }, //}, - //xdr.Operation{ + //{ // SourceAccount: nil, // Body: xdr.OperationBody{ // Type: xdr.OperationTypeInvokeHostFunction, @@ -596,7 +596,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er // }, // }, //}, - //xdr.Operation{ + //{ // SourceAccount: nil, // Body: xdr.OperationBody{ // Type: xdr.OperationTypeBumpFootprintExpiration, @@ -608,7 +608,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er // }, // }, //}, - //xdr.Operation{ + //{ // SourceAccount: nil, // Body: xdr.OperationBody{ // Type: xdr.OperationTypeRestoreFootprint, @@ -622,7 +622,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er } inputEnvelope.Tx.Operations = inputOperations results := []xdr.OperationResult{ - xdr.OperationResult{ + { Code: xdr.OperationResultCodeOpInner, Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypeCreateAccount, @@ -631,7 +631,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.OperationResult{ + { Code: xdr.OperationResultCodeOpInner, Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypePayment, @@ -640,7 +640,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.OperationResult{ + { Code: xdr.OperationResultCodeOpInner, Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypePayment, @@ -650,7 +650,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, // There needs to be a true result for path payment receive and send - xdr.OperationResult{ + { Code: xdr.OperationResultCodeOpInner, Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypePathPaymentStrictReceive, @@ -662,7 +662,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.OperationResult{ + { Code: xdr.OperationResultCodeOpInner, Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypeManageSellOffer, @@ -671,7 +671,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.OperationResult{ + { Code: xdr.OperationResultCodeOpInner, Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypeManageSellOffer, @@ -680,7 +680,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.OperationResult{ + { Code: xdr.OperationResultCodeOpInner, Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypeSetOptions, @@ -689,7 +689,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.OperationResult{ + { Code: xdr.OperationResultCodeOpInner, Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypeChangeTrust, @@ -698,7 +698,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.OperationResult{ + { Code: xdr.OperationResultCodeOpInner, Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypeChangeTrust, @@ -707,7 +707,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.OperationResult{ + { Code: xdr.OperationResultCodeOpInner, Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypeAllowTrust, @@ -716,7 +716,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.OperationResult{ + { Code: xdr.OperationResultCodeOpInner, Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypeAccountMerge, @@ -725,7 +725,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.OperationResult{ + { Code: xdr.OperationResultCodeOpInner, Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypeInflation, @@ -734,7 +734,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.OperationResult{ + { Code: xdr.OperationResultCodeOpInner, Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypeManageData, @@ -743,7 +743,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.OperationResult{ + { Code: xdr.OperationResultCodeOpInner, Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypeBumpSequence, @@ -752,7 +752,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.OperationResult{ + { Code: xdr.OperationResultCodeOpInner, Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypeManageBuyOffer, @@ -761,7 +761,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.OperationResult{ + { Code: xdr.OperationResultCodeOpInner, Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypePathPaymentStrictSend, @@ -773,7 +773,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.OperationResult{ + { Code: xdr.OperationResultCodeOpInner, Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypeCreateClaimableBalance, @@ -782,7 +782,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.OperationResult{ + { Code: xdr.OperationResultCodeOpInner, Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypeClaimClaimableBalance, @@ -791,7 +791,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.OperationResult{ + { Code: xdr.OperationResultCodeOpInner, Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypeBeginSponsoringFutureReserves, @@ -800,7 +800,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.OperationResult{ + { Code: xdr.OperationResultCodeOpInner, Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypeRevokeSponsorship, @@ -809,7 +809,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.OperationResult{ + { Code: xdr.OperationResultCodeOpInner, Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypeRevokeSponsorship, @@ -818,7 +818,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.OperationResult{ + { Code: xdr.OperationResultCodeOpInner, Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypeRevokeSponsorship, @@ -827,7 +827,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.OperationResult{ + { Code: xdr.OperationResultCodeOpInner, Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypeRevokeSponsorship, @@ -836,7 +836,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.OperationResult{ + { Code: xdr.OperationResultCodeOpInner, Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypeRevokeSponsorship, @@ -845,7 +845,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.OperationResult{ + { Code: xdr.OperationResultCodeOpInner, Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypeRevokeSponsorship, @@ -854,7 +854,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.OperationResult{ + { Code: xdr.OperationResultCodeOpInner, Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypeRevokeSponsorship, @@ -863,7 +863,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.OperationResult{ + { Code: xdr.OperationResultCodeOpInner, Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypeClawback, @@ -872,7 +872,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.OperationResult{ + { Code: xdr.OperationResultCodeOpInner, Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypeClawbackClaimableBalance, @@ -881,7 +881,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.OperationResult{ + { Code: xdr.OperationResultCodeOpInner, Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypeSetTrustLineFlags, @@ -890,7 +890,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.OperationResult{ + { Code: xdr.OperationResultCodeOpInner, Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypeLiquidityPoolDeposit, @@ -899,7 +899,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - xdr.OperationResult{ + { Code: xdr.OperationResultCodeOpInner, Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypeLiquidityPoolWithdraw, @@ -908,12 +908,12 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er }, }, }, - //xdr.OperationResult{}, - //xdr.OperationResult{}, - //xdr.OperationResult{}, - //xdr.OperationResult{}, - //xdr.OperationResult{}, - //xdr.OperationResult{}, + //{}, + //{}, + //{}, + //{}, + //{}, + //{}, } inputTransaction.Result.Result.Result.Results = &results inputTransaction.Envelope.V1 = &inputEnvelope @@ -925,7 +925,7 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { hardCodedDestAccountAddress := testAccount4Address hardCodedLedgerClose := genericCloseTime.UTC() transformedOperations = []OperationOutput{ - OperationOutput{ + { SourceAccount: hardCodedSourceAccountAddress, Type: 0, TypeString: "create_account", @@ -940,7 +940,7 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { OperationResultCode: "OperationResultCodeOpInner", OperationTraceCode: "CreateAccountResultCodeCreateAccountSuccess", }, - OperationOutput{ + { Type: 1, TypeString: "payment", SourceAccount: hardCodedSourceAccountAddress, @@ -959,7 +959,7 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { OperationResultCode: "OperationResultCodeOpInner", OperationTraceCode: "PaymentResultCodePaymentSuccess", }, - OperationOutput{ + { Type: 1, TypeString: "payment", SourceAccount: hardCodedSourceAccountAddress, @@ -976,7 +976,7 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { OperationResultCode: "OperationResultCodeOpInner", OperationTraceCode: "PaymentResultCodePaymentSuccess", }, - OperationOutput{ + { Type: 2, TypeString: "path_payment_strict_receive", SourceAccount: hardCodedSourceAccountAddress, @@ -998,7 +998,7 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { OperationResultCode: "OperationResultCodeOpInner", OperationTraceCode: "PathPaymentStrictReceiveResultCodePathPaymentStrictReceiveSuccess", }, - OperationOutput{ + { Type: 3, TypeString: "manage_sell_offer", SourceAccount: hardCodedSourceAccountAddress, @@ -1023,7 +1023,7 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { OperationResultCode: "OperationResultCodeOpInner", OperationTraceCode: "ManageSellOfferResultCodeManageSellOfferSuccess", }, - OperationOutput{ + { Type: 4, TypeString: "create_passive_sell_offer", SourceAccount: hardCodedSourceAccountAddress, @@ -1047,7 +1047,7 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { OperationResultCode: "OperationResultCodeOpInner", OperationTraceCode: "ManageSellOfferResultCodeManageSellOfferSuccess", }, - OperationOutput{ + { Type: 5, TypeString: "set_options", SourceAccount: hardCodedSourceAccountAddress, @@ -1071,7 +1071,7 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { OperationResultCode: "OperationResultCodeOpInner", OperationTraceCode: "SetOptionsResultCodeSetOptionsSuccess", }, - OperationOutput{ + { Type: 6, TypeString: "change_trust", SourceAccount: hardCodedSourceAccountAddress, @@ -1090,7 +1090,7 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { OperationResultCode: "OperationResultCodeOpInner", OperationTraceCode: "ChangeTrustResultCodeChangeTrustSuccess", }, - OperationOutput{ + { Type: 6, TypeString: "change_trust", SourceAccount: hardCodedSourceAccountAddress, @@ -1106,7 +1106,7 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { OperationResultCode: "OperationResultCodeOpInner", OperationTraceCode: "ChangeTrustResultCodeChangeTrustSuccess", }, - OperationOutput{ + { Type: 7, TypeString: "allow_trust", SourceAccount: hardCodedSourceAccountAddress, @@ -1125,7 +1125,7 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { OperationResultCode: "OperationResultCodeOpInner", OperationTraceCode: "AllowTrustResultCodeAllowTrustSuccess", }, - OperationOutput{ + { Type: 8, TypeString: "account_merge", SourceAccount: hardCodedSourceAccountAddress, @@ -1139,7 +1139,7 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { OperationResultCode: "OperationResultCodeOpInner", OperationTraceCode: "AccountMergeResultCodeAccountMergeSuccess", }, - OperationOutput{ + { Type: 9, TypeString: "inflation", SourceAccount: hardCodedSourceAccountAddress, @@ -1150,7 +1150,7 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { OperationResultCode: "OperationResultCodeOpInner", OperationTraceCode: "InflationResultCodeInflationSuccess", }, - OperationOutput{ + { Type: 10, TypeString: "manage_data", SourceAccount: hardCodedSourceAccountAddress, @@ -1164,7 +1164,7 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { OperationResultCode: "OperationResultCodeOpInner", OperationTraceCode: "ManageDataResultCodeManageDataSuccess", }, - OperationOutput{ + { Type: 11, TypeString: "bump_sequence", SourceAccount: hardCodedSourceAccountAddress, @@ -1177,7 +1177,7 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { OperationResultCode: "OperationResultCodeOpInner", OperationTraceCode: "BumpSequenceResultCodeBumpSequenceSuccess", }, - OperationOutput{ + { Type: 12, TypeString: "manage_buy_offer", SourceAccount: hardCodedSourceAccountAddress, @@ -1202,7 +1202,7 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { OperationResultCode: "OperationResultCodeOpInner", OperationTraceCode: "ManageBuyOfferResultCodeManageBuyOfferSuccess", }, - OperationOutput{ + { Type: 13, TypeString: "path_payment_strict_send", SourceAccount: hardCodedSourceAccountAddress, @@ -1224,7 +1224,7 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { OperationResultCode: "OperationResultCodeOpInner", OperationTraceCode: "PathPaymentStrictSendResultCodePathPaymentStrictSendSuccess", }, - OperationOutput{ + { Type: 14, TypeString: "create_claimable_balance", SourceAccount: hardCodedSourceAccountAddress, @@ -1239,7 +1239,7 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { OperationResultCode: "OperationResultCodeOpInner", OperationTraceCode: "CreateClaimableBalanceResultCodeCreateClaimableBalanceSuccess", }, - OperationOutput{ + { Type: 15, TypeString: "claim_claimable_balance", SourceAccount: testAccount3Address, @@ -1253,7 +1253,7 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { OperationResultCode: "OperationResultCodeOpInner", OperationTraceCode: "ClaimClaimableBalanceResultCodeClaimClaimableBalanceSuccess", }, - OperationOutput{ + { Type: 16, TypeString: "begin_sponsoring_future_reserves", SourceAccount: hardCodedSourceAccountAddress, @@ -1266,7 +1266,7 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { OperationResultCode: "OperationResultCodeOpInner", OperationTraceCode: "BeginSponsoringFutureReservesResultCodeBeginSponsoringFutureReservesSuccess", }, - OperationOutput{ + { Type: 18, TypeString: "revoke_sponsorship", SourceAccount: hardCodedSourceAccountAddress, @@ -1280,7 +1280,7 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { OperationResultCode: "OperationResultCodeOpInner", OperationTraceCode: "RevokeSponsorshipResultCodeRevokeSponsorshipSuccess", }, - OperationOutput{ + { Type: 18, TypeString: "revoke_sponsorship", SourceAccount: hardCodedSourceAccountAddress, @@ -1293,7 +1293,7 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { OperationResultCode: "OperationResultCodeOpInner", OperationTraceCode: "RevokeSponsorshipResultCodeRevokeSponsorshipSuccess", }, - OperationOutput{ + { Type: 18, TypeString: "revoke_sponsorship", SourceAccount: hardCodedSourceAccountAddress, @@ -1306,7 +1306,7 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { OperationResultCode: "OperationResultCodeOpInner", OperationTraceCode: "RevokeSponsorshipResultCodeRevokeSponsorshipSuccess", }, - OperationOutput{ + { Type: 18, TypeString: "revoke_sponsorship", SourceAccount: hardCodedSourceAccountAddress, @@ -1320,7 +1320,7 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { OperationResultCode: "OperationResultCodeOpInner", OperationTraceCode: "RevokeSponsorshipResultCodeRevokeSponsorshipSuccess", }, - OperationOutput{ + { Type: 18, TypeString: "revoke_sponsorship", SourceAccount: hardCodedSourceAccountAddress, @@ -1333,7 +1333,7 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { OperationResultCode: "OperationResultCodeOpInner", OperationTraceCode: "RevokeSponsorshipResultCodeRevokeSponsorshipSuccess", }, - OperationOutput{ + { Type: 18, TypeString: "revoke_sponsorship", SourceAccount: hardCodedSourceAccountAddress, @@ -1347,7 +1347,7 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { OperationResultCode: "OperationResultCodeOpInner", OperationTraceCode: "RevokeSponsorshipResultCodeRevokeSponsorshipSuccess", }, - OperationOutput{ + { Type: 18, TypeString: "revoke_sponsorship", SourceAccount: hardCodedSourceAccountAddress, @@ -1360,7 +1360,7 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { OperationResultCode: "OperationResultCodeOpInner", OperationTraceCode: "RevokeSponsorshipResultCodeRevokeSponsorshipSuccess", }, - OperationOutput{ + { Type: 19, TypeString: "clawback", SourceAccount: hardCodedSourceAccountAddress, @@ -1378,7 +1378,7 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { OperationResultCode: "OperationResultCodeOpInner", OperationTraceCode: "ClawbackResultCodeClawbackSuccess", }, - OperationOutput{ + { Type: 20, TypeString: "clawback_claimable_balance", SourceAccount: hardCodedSourceAccountAddress, @@ -1391,7 +1391,7 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { OperationResultCode: "OperationResultCodeOpInner", OperationTraceCode: "ClawbackClaimableBalanceResultCodeClawbackClaimableBalanceSuccess", }, - OperationOutput{ + { Type: 21, TypeString: "set_trust_line_flags", SourceAccount: hardCodedSourceAccountAddress, @@ -1412,7 +1412,7 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { OperationResultCode: "OperationResultCodeOpInner", OperationTraceCode: "SetTrustLineFlagsResultCodeSetTrustLineFlagsSuccess", }, - OperationOutput{ + { Type: 22, TypeString: "liquidity_pool_deposit", SourceAccount: hardCodedSourceAccountAddress, @@ -1446,7 +1446,7 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) { OperationResultCode: "OperationResultCodeOpInner", OperationTraceCode: "LiquidityPoolDepositResultCodeLiquidityPoolDepositSuccess", }, - OperationOutput{ + { Type: 23, TypeString: "liquidity_pool_withdraw", SourceAccount: hardCodedSourceAccountAddress, diff --git a/internal/transform/schema.go b/internal/transform/schema.go index f3f64aee..5f78dba3 100644 --- a/internal/transform/schema.go +++ b/internal/transform/schema.go @@ -28,46 +28,50 @@ type LedgerOutput struct { MaxTxSetSize uint32 `json:"max_tx_set_size"` ProtocolVersion uint32 `json:"protocol_version"` LedgerID int64 `json:"id"` + SorobanFeeWrite1Kb int64 `json:"soroban_fee_write_1kb"` } // TransactionOutput is a representation of a transaction that aligns with the BigQuery table history_transactions type TransactionOutput struct { - TransactionHash string `json:"transaction_hash"` - LedgerSequence uint32 `json:"ledger_sequence"` - Account string `json:"account"` - AccountMuxed string `json:"account_muxed,omitempty"` - AccountSequence int64 `json:"account_sequence"` - MaxFee uint32 `json:"max_fee"` - FeeCharged int64 `json:"fee_charged"` - OperationCount int32 `json:"operation_count"` - TxEnvelope string `json:"tx_envelope"` - TxResult string `json:"tx_result"` - TxMeta string `json:"tx_meta"` - TxFeeMeta string `json:"tx_fee_meta"` - CreatedAt time.Time `json:"created_at"` - MemoType string `json:"memo_type"` - Memo string `json:"memo"` - TimeBounds string `json:"time_bounds"` - Successful bool `json:"successful"` - TransactionID int64 `json:"id"` - FeeAccount string `json:"fee_account,omitempty"` - FeeAccountMuxed string `json:"fee_account_muxed,omitempty"` - InnerTransactionHash string `json:"inner_transaction_hash,omitempty"` - NewMaxFee uint32 `json:"new_max_fee,omitempty"` - LedgerBounds string `json:"ledger_bounds"` - MinAccountSequence null.Int `json:"min_account_sequence"` - MinAccountSequenceAge null.Int `json:"min_account_sequence_age"` - MinAccountSequenceLedgerGap null.Int `json:"min_account_sequence_ledger_gap"` - ExtraSigners pq.StringArray `json:"extra_signers"` - ClosedAt time.Time `json:"closed_at"` - ResourceFee int64 `json:"resource_fee"` - SorobanResourcesInstructions uint32 `json:"soroban_resources_instructions"` - SorobanResourcesReadBytes uint32 `json:"soroban_resources_read_bytes"` - SorobanResourcesWriteBytes uint32 `json:"soroban_resources_write_bytes"` - TransactionResultCode string `json:"transaction_result_code"` - InclusionFeeBid int64 `json:"inclusion_fee_bid"` - InclusionFeeCharged int64 `json:"inclusion_fee_charged"` - ResourceFeeRefund int64 `json:"resource_fee_refund"` + TransactionHash string `json:"transaction_hash"` + LedgerSequence uint32 `json:"ledger_sequence"` + Account string `json:"account"` + AccountMuxed string `json:"account_muxed,omitempty"` + AccountSequence int64 `json:"account_sequence"` + MaxFee uint32 `json:"max_fee"` + FeeCharged int64 `json:"fee_charged"` + OperationCount int32 `json:"operation_count"` + TxEnvelope string `json:"tx_envelope"` + TxResult string `json:"tx_result"` + TxMeta string `json:"tx_meta"` + TxFeeMeta string `json:"tx_fee_meta"` + CreatedAt time.Time `json:"created_at"` + MemoType string `json:"memo_type"` + Memo string `json:"memo"` + TimeBounds string `json:"time_bounds"` + Successful bool `json:"successful"` + TransactionID int64 `json:"id"` + FeeAccount string `json:"fee_account,omitempty"` + FeeAccountMuxed string `json:"fee_account_muxed,omitempty"` + InnerTransactionHash string `json:"inner_transaction_hash,omitempty"` + NewMaxFee uint32 `json:"new_max_fee,omitempty"` + LedgerBounds string `json:"ledger_bounds"` + MinAccountSequence null.Int `json:"min_account_sequence"` + MinAccountSequenceAge null.Int `json:"min_account_sequence_age"` + MinAccountSequenceLedgerGap null.Int `json:"min_account_sequence_ledger_gap"` + ExtraSigners pq.StringArray `json:"extra_signers"` + ClosedAt time.Time `json:"closed_at"` + ResourceFee int64 `json:"resource_fee"` + SorobanResourcesInstructions uint32 `json:"soroban_resources_instructions"` + SorobanResourcesReadBytes uint32 `json:"soroban_resources_read_bytes"` + SorobanResourcesWriteBytes uint32 `json:"soroban_resources_write_bytes"` + TransactionResultCode string `json:"transaction_result_code"` + InclusionFeeBid int64 `json:"inclusion_fee_bid"` + InclusionFeeCharged int64 `json:"inclusion_fee_charged"` + ResourceFeeRefund int64 `json:"resource_fee_refund"` + TotalNonRefundableResourceFeeCharged int64 `json:"non_refundable_resource_fee_charged"` + TotalRefundableResourceFeeCharged int64 `json:"refundable_resource_fee_charged"` + RentFeeCharged int64 `json:"rent_fee_charged"` } type LedgerTransactionOutput struct { diff --git a/internal/transform/test_variables_test.go b/internal/transform/test_variables_test.go index 9621723b..be582641 100644 --- a/internal/transform/test_variables_test.go +++ b/internal/transform/test_variables_test.go @@ -95,63 +95,63 @@ var testAccount4Address = "GBVVRXLMNCJQW3IDDXC3X6XCH35B5Q7QXNMMFPENSOGUPQO7WO7HG var testAccount4ID, _ = xdr.AddressToAccountId(testAccount4Address) var testAccount4 = testAccount4ID.ToMuxedAccount() -// a selection of hardcoded Liquidity Pools -var lpDepositChanges = []xdr.OperationMeta{ - { - Changes: xdr.LedgerEntryChanges{ - xdr.LedgerEntryChange{ - Type: xdr.LedgerEntryChangeTypeLedgerEntryState, - State: &xdr.LedgerEntry{ - Data: xdr.LedgerEntryData{ - Type: xdr.LedgerEntryTypeLiquidityPool, - LiquidityPool: &xdr.LiquidityPoolEntry{ - LiquidityPoolId: xdr.PoolId{1, 2, 3, 4, 5, 6, 7, 8, 9}, - Body: xdr.LiquidityPoolEntryBody{ - Type: xdr.LiquidityPoolTypeLiquidityPoolConstantProduct, - ConstantProduct: &xdr.LiquidityPoolEntryConstantProduct{ - Params: xdr.LiquidityPoolConstantProductParameters{ - AssetA: lpAssetA, - AssetB: lpAssetB, - Fee: 30, - }, - ReserveA: 100000, - ReserveB: 1000, - TotalPoolShares: 500, - PoolSharesTrustLineCount: 25, - }, - }, - }, - }, - }, - }, - xdr.LedgerEntryChange{ - Type: xdr.LedgerEntryChangeTypeLedgerEntryUpdated, - Updated: &xdr.LedgerEntry{ - Data: xdr.LedgerEntryData{ - Type: xdr.LedgerEntryTypeLiquidityPool, - LiquidityPool: &xdr.LiquidityPoolEntry{ - LiquidityPoolId: xdr.PoolId{1, 2, 3, 4, 5, 6, 7, 8, 9}, - Body: xdr.LiquidityPoolEntryBody{ - Type: xdr.LiquidityPoolTypeLiquidityPoolConstantProduct, - ConstantProduct: &xdr.LiquidityPoolEntryConstantProduct{ - Params: xdr.LiquidityPoolConstantProductParameters{ - AssetA: lpAssetA, - AssetB: lpAssetB, - Fee: 30, - }, - ReserveA: 101000, - ReserveB: 1100, - TotalPoolShares: 502, - PoolSharesTrustLineCount: 26, - }, - }, - }, - }, - }, - }, - }, - }, -} +//// a selection of hardcoded Liquidity Pools +//var lpDepositChanges = []xdr.OperationMeta{ +// { +// Changes: xdr.LedgerEntryChanges{ +// xdr.LedgerEntryChange{ +// Type: xdr.LedgerEntryChangeTypeLedgerEntryState, +// State: &xdr.LedgerEntry{ +// Data: xdr.LedgerEntryData{ +// Type: xdr.LedgerEntryTypeLiquidityPool, +// LiquidityPool: &xdr.LiquidityPoolEntry{ +// LiquidityPoolId: xdr.PoolId{1, 2, 3, 4, 5, 6, 7, 8, 9}, +// Body: xdr.LiquidityPoolEntryBody{ +// Type: xdr.LiquidityPoolTypeLiquidityPoolConstantProduct, +// ConstantProduct: &xdr.LiquidityPoolEntryConstantProduct{ +// Params: xdr.LiquidityPoolConstantProductParameters{ +// AssetA: lpAssetA, +// AssetB: lpAssetB, +// Fee: 30, +// }, +// ReserveA: 100000, +// ReserveB: 1000, +// TotalPoolShares: 500, +// PoolSharesTrustLineCount: 25, +// }, +// }, +// }, +// }, +// }, +// }, +// xdr.LedgerEntryChange{ +// Type: xdr.LedgerEntryChangeTypeLedgerEntryUpdated, +// Updated: &xdr.LedgerEntry{ +// Data: xdr.LedgerEntryData{ +// Type: xdr.LedgerEntryTypeLiquidityPool, +// LiquidityPool: &xdr.LiquidityPoolEntry{ +// LiquidityPoolId: xdr.PoolId{1, 2, 3, 4, 5, 6, 7, 8, 9}, +// Body: xdr.LiquidityPoolEntryBody{ +// Type: xdr.LiquidityPoolTypeLiquidityPoolConstantProduct, +// ConstantProduct: &xdr.LiquidityPoolEntryConstantProduct{ +// Params: xdr.LiquidityPoolConstantProductParameters{ +// AssetA: lpAssetA, +// AssetB: lpAssetB, +// Fee: 30, +// }, +// ReserveA: 101000, +// ReserveB: 1100, +// TotalPoolShares: 502, +// PoolSharesTrustLineCount: 26, +// }, +// }, +// }, +// }, +// }, +// }, +// }, +// }, +//} // a selection of hardcoded assets and their AssetOutput representations @@ -225,11 +225,11 @@ var ethTrustLineAsset = xdr.TrustLineAsset{ }, } -var ethAssetPath = Path{ - AssetType: "credit_alphanum4", - AssetCode: "ETH", - AssetIssuer: testAccount1Address, -} +//var ethAssetPath = Path{ +// AssetType: "credit_alphanum4", +// AssetCode: "ETH", +// AssetIssuer: testAccount1Address, +//} var liquidityPoolAsset = xdr.TrustLineAsset{ Type: xdr.AssetTypeAssetTypePoolShare, @@ -237,11 +237,12 @@ var liquidityPoolAsset = xdr.TrustLineAsset{ } var nativeAsset = xdr.MustNewNativeAsset() -var nativeAssetPath = Path{ - AssetType: "native", -} -var nativeTrustLineAsset = xdr.MustNewNativeAsset().ToTrustLineAsset() +//var nativeAssetPath = Path{ +// AssetType: "native", +//} + +//var nativeTrustLineAsset = xdr.MustNewNativeAsset().ToTrustLineAsset() var genericClaimableBalance = xdr.ClaimableBalanceId{ Type: xdr.ClaimableBalanceIdTypeClaimableBalanceIdTypeV0, diff --git a/internal/transform/trade.go b/internal/transform/trade.go index 27dfc0da..06ac05f0 100644 --- a/internal/transform/trade.go +++ b/internal/transform/trade.go @@ -20,11 +20,11 @@ import ( func TransformTrade(operationIndex int32, operationID int64, transaction ingest.LedgerTransaction, ledgerCloseTime time.Time) ([]TradeOutput, error) { operationResults, ok := transaction.Result.OperationResults() if !ok { - return []TradeOutput{}, fmt.Errorf("Could not get any results from this transaction") + return []TradeOutput{}, fmt.Errorf("could not get any results from this transaction") } if !transaction.Result.Successful() { - return []TradeOutput{}, fmt.Errorf("Transaction failed; no trades") + return []TradeOutput{}, fmt.Errorf("transaction failed; no trades") } operation := transaction.Envelope.Operations()[operationIndex] @@ -50,7 +50,7 @@ func TransformTrade(operationIndex int32, operationID int64, transaction ingest. outputSellingAmount := claimOffer.AmountSold() if outputSellingAmount < 0 { - return []TradeOutput{}, fmt.Errorf("Amount sold is negative (%d) for operation at index %d", outputSellingAmount, operationIndex) + return []TradeOutput{}, fmt.Errorf("amount sold is negative (%d) for operation at index %d", outputSellingAmount, operationIndex) } var outputBuyingAssetType, outputBuyingAssetCode, outputBuyingAssetIssuer string @@ -62,7 +62,7 @@ func TransformTrade(operationIndex int32, operationID int64, transaction ingest. outputBuyingAmount := int64(claimOffer.AmountBought()) if outputBuyingAmount < 0 { - return []TradeOutput{}, fmt.Errorf("Amount bought is negative (%d) for operation at index %d", outputBuyingAmount, operationIndex) + return []TradeOutput{}, fmt.Errorf("amount bought is negative (%d) for operation at index %d", outputBuyingAmount, operationIndex) } if outputSellingAmount == 0 && outputBuyingAmount == 0 { @@ -87,7 +87,7 @@ func TransformTrade(operationIndex int32, operationID int64, transaction ingest. tradeType = int32(2) var fee uint32 if fee, err = findPoolFee(transaction, operationIndex, id); err != nil { - return []TradeOutput{}, fmt.Errorf("Cannot parse fee for liquidity pool %v", liquidityPoolID) + return []TradeOutput{}, fmt.Errorf("cannot parse fee for liquidity pool %v", liquidityPoolID) } outputPoolFee = null.IntFrom(int64(fee)) @@ -156,25 +156,25 @@ func TransformTrade(operationIndex int32, operationID int64, transaction ingest. func extractClaimedOffers(operationResults []xdr.OperationResult, operationIndex int32, operationType xdr.OperationType) (claimedOffers []xdr.ClaimAtom, BuyingOffer *xdr.OfferEntry, sellerIsExact null.Bool, err error) { if operationIndex >= int32(len(operationResults)) { - err = fmt.Errorf("Operation index of %d is out of bounds in result slice (len = %d)", operationIndex, len(operationResults)) + err = fmt.Errorf("operation index of %d is out of bounds in result slice (len = %d)", operationIndex, len(operationResults)) return } if operationResults[operationIndex].Tr == nil { - err = fmt.Errorf("Could not get result Tr for operation at index %d", operationIndex) + err = fmt.Errorf("could not get result Tr for operation at index %d", operationIndex) return } operationTr, ok := operationResults[operationIndex].GetTr() if !ok { - err = fmt.Errorf("Could not get result Tr for operation at index %d", operationIndex) + err = fmt.Errorf("could not get result Tr for operation at index %d", operationIndex) return } switch operationType { case xdr.OperationTypeManageBuyOffer: var buyOfferResult xdr.ManageBuyOfferResult if buyOfferResult, ok = operationTr.GetManageBuyOfferResult(); !ok { - err = fmt.Errorf("Could not get ManageBuyOfferResult for operation at index %d", operationIndex) + err = fmt.Errorf("could not get ManageBuyOfferResult for operation at index %d", operationIndex) return } if success, ok := buyOfferResult.GetSuccess(); ok { @@ -183,12 +183,12 @@ func extractClaimedOffers(operationResults []xdr.OperationResult, operationIndex return } - err = fmt.Errorf("Could not get ManageOfferSuccess for operation at index %d", operationIndex) + err = fmt.Errorf("could not get ManageOfferSuccess for operation at index %d", operationIndex) case xdr.OperationTypeManageSellOffer: var sellOfferResult xdr.ManageSellOfferResult if sellOfferResult, ok = operationTr.GetManageSellOfferResult(); !ok { - err = fmt.Errorf("Could not get ManageSellOfferResult for operation at index %d", operationIndex) + err = fmt.Errorf("could not get ManageSellOfferResult for operation at index %d", operationIndex) return } @@ -198,7 +198,7 @@ func extractClaimedOffers(operationResults []xdr.OperationResult, operationIndex return } - err = fmt.Errorf("Could not get ManageOfferSuccess for operation at index %d", operationIndex) + err = fmt.Errorf("could not get ManageOfferSuccess for operation at index %d", operationIndex) case xdr.OperationTypeCreatePassiveSellOffer: // KNOWN ISSUE: stellar-core creates results for CreatePassiveOffer operations @@ -219,7 +219,7 @@ func extractClaimedOffers(operationResults []xdr.OperationResult, operationIndex var pathSendResult xdr.PathPaymentStrictSendResult sellerIsExact = null.BoolFrom(false) if pathSendResult, ok = operationTr.GetPathPaymentStrictSendResult(); !ok { - err = fmt.Errorf("Could not get PathPaymentStrictSendResult for operation at index %d", operationIndex) + err = fmt.Errorf("could not get PathPaymentStrictSendResult for operation at index %d", operationIndex) return } @@ -229,13 +229,13 @@ func extractClaimedOffers(operationResults []xdr.OperationResult, operationIndex return } - err = fmt.Errorf("Could not get PathPaymentStrictSendSuccess for operation at index %d", operationIndex) + err = fmt.Errorf("could not get PathPaymentStrictSendSuccess for operation at index %d", operationIndex) case xdr.OperationTypePathPaymentStrictReceive: var pathReceiveResult xdr.PathPaymentStrictReceiveResult sellerIsExact = null.BoolFrom(true) if pathReceiveResult, ok = operationTr.GetPathPaymentStrictReceiveResult(); !ok { - err = fmt.Errorf("Could not get PathPaymentStrictReceiveResult for operation at index %d", operationIndex) + err = fmt.Errorf("could not get PathPaymentStrictReceiveResult for operation at index %d", operationIndex) return } @@ -244,10 +244,10 @@ func extractClaimedOffers(operationResults []xdr.OperationResult, operationIndex return } - err = fmt.Errorf("Could not get GetPathPaymentStrictReceiveSuccess for operation at index %d", operationIndex) + err = fmt.Errorf("could not get GetPathPaymentStrictReceiveSuccess for operation at index %d", operationIndex) default: - err = fmt.Errorf("Operation of type %s at index %d does not result in trades", operationType, operationIndex) + err = fmt.Errorf("operation of type %s at index %d does not result in trades", operationType, operationIndex) return } @@ -386,7 +386,7 @@ func roundingSlippage(t ingest.LedgerTransaction, operationIndex int32, trade xd } return null.IntFrom(int64(roundingSlippageBips)), nil default: - return null.Int{}, fmt.Errorf("Unexpected trade operation type: %v", op.Body.Type) + return null.Int{}, fmt.Errorf("unexpected trade operation type: %v", op.Body.Type) } } diff --git a/internal/transform/trade_test.go b/internal/transform/trade_test.go index 8e963366..a4a573f2 100644 --- a/internal/transform/trade_test.go +++ b/internal/transform/trade_test.go @@ -65,14 +65,14 @@ func TestTransformTrade(t *testing.T) { noTrEnvelope := genericManageBuyOfferEnvelope noTrInput.transaction.Envelope.V1 = &noTrEnvelope noTrInput.transaction.Result = wrapOperationsResultsSlice([]xdr.OperationResult{ - xdr.OperationResult{Tr: nil}, + {Tr: nil}, }, true) failedResultInput := genericInput failedResultEnvelope := genericManageBuyOfferEnvelope failedResultInput.transaction.Envelope.V1 = &failedResultEnvelope failedResultInput.transaction.Result = wrapOperationsResultsSlice([]xdr.OperationResult{ - xdr.OperationResult{ + { Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypeManageBuyOffer, ManageBuyOfferResult: &xdr.ManageBuyOfferResult{ @@ -85,14 +85,14 @@ func TestTransformTrade(t *testing.T) { negBaseAmountEnvelope := genericManageBuyOfferEnvelope negBaseAmountInput.transaction.Envelope.V1 = &negBaseAmountEnvelope negBaseAmountInput.transaction.Result = wrapOperationsResultsSlice([]xdr.OperationResult{ - xdr.OperationResult{ + { Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypeManageBuyOffer, ManageBuyOfferResult: &xdr.ManageBuyOfferResult{ Code: xdr.ManageBuyOfferResultCodeManageBuyOfferSuccess, Success: &xdr.ManageOfferSuccessResult{ OffersClaimed: []xdr.ClaimAtom{ - xdr.ClaimAtom{ + { Type: xdr.ClaimAtomTypeClaimAtomTypeOrderBook, OrderBook: &xdr.ClaimOfferAtom{ SellerId: genericAccountID, @@ -109,14 +109,14 @@ func TestTransformTrade(t *testing.T) { negCounterAmountEnvelope := genericManageBuyOfferEnvelope negCounterAmountInput.transaction.Envelope.V1 = &negCounterAmountEnvelope negCounterAmountInput.transaction.Result = wrapOperationsResultsSlice([]xdr.OperationResult{ - xdr.OperationResult{ + { Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypeManageBuyOffer, ManageBuyOfferResult: &xdr.ManageBuyOfferResult{ Code: xdr.ManageBuyOfferResultCodeManageBuyOfferSuccess, Success: &xdr.ManageOfferSuccessResult{ OffersClaimed: []xdr.ClaimAtom{ - xdr.ClaimAtom{ + { Type: xdr.ClaimAtomTypeClaimAtomTypeOrderBook, OrderBook: &xdr.ClaimOfferAtom{ SellerId: genericAccountID, @@ -240,23 +240,21 @@ func makeTradeTestInput() (inputTransaction ingest.LedgerTransaction) { } inputOperations := []xdr.Operation{ - - xdr.Operation{ + { SourceAccount: nil, Body: xdr.OperationBody{ Type: xdr.OperationTypeManageSellOffer, ManageSellOfferOp: &xdr.ManageSellOfferOp{}, }, }, - - xdr.Operation{ + { SourceAccount: nil, Body: xdr.OperationBody{ Type: xdr.OperationTypeManageBuyOffer, ManageBuyOfferOp: &xdr.ManageBuyOfferOp{}, }, }, - xdr.Operation{ + { SourceAccount: nil, Body: xdr.OperationBody{ Type: xdr.OperationTypePathPaymentStrictSend, @@ -265,7 +263,7 @@ func makeTradeTestInput() (inputTransaction ingest.LedgerTransaction) { }, }, }, - xdr.Operation{ + { SourceAccount: &testAccount3, Body: xdr.OperationBody{ Type: xdr.OperationTypePathPaymentStrictReceive, @@ -274,21 +272,21 @@ func makeTradeTestInput() (inputTransaction ingest.LedgerTransaction) { }, }, }, - xdr.Operation{ + { SourceAccount: &testAccount3, Body: xdr.OperationBody{ Type: xdr.OperationTypePathPaymentStrictSend, PathPaymentStrictSendOp: &xdr.PathPaymentStrictSendOp{}, }, }, - xdr.Operation{ + { SourceAccount: &testAccount3, Body: xdr.OperationBody{ Type: xdr.OperationTypePathPaymentStrictReceive, PathPaymentStrictReceiveOp: &xdr.PathPaymentStrictReceiveOp{}, }, }, - xdr.Operation{ + { SourceAccount: nil, Body: xdr.OperationBody{ Type: xdr.OperationTypeCreatePassiveSellOffer, @@ -298,7 +296,7 @@ func makeTradeTestInput() (inputTransaction ingest.LedgerTransaction) { } inputEnvelope.Tx.Operations = inputOperations results := []xdr.OperationResult{ - xdr.OperationResult{ + { Code: xdr.OperationResultCodeOpInner, Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypeManageSellOffer, @@ -313,7 +311,7 @@ func makeTradeTestInput() (inputTransaction ingest.LedgerTransaction) { }, }, - xdr.OperationResult{ + { Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypeManageBuyOffer, ManageBuyOfferResult: &xdr.ManageBuyOfferResult{ @@ -326,7 +324,7 @@ func makeTradeTestInput() (inputTransaction ingest.LedgerTransaction) { }, }, }, - xdr.OperationResult{ + { Code: xdr.OperationResultCodeOpInner, Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypePathPaymentStrictSend, @@ -340,7 +338,7 @@ func makeTradeTestInput() (inputTransaction ingest.LedgerTransaction) { }, }, }, - xdr.OperationResult{ + { Code: xdr.OperationResultCodeOpInner, Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypePathPaymentStrictReceive, @@ -354,7 +352,7 @@ func makeTradeTestInput() (inputTransaction ingest.LedgerTransaction) { }, }, }, - xdr.OperationResult{ + { Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypePathPaymentStrictSend, PathPaymentStrictSendResult: &xdr.PathPaymentStrictSendResult{ @@ -367,7 +365,7 @@ func makeTradeTestInput() (inputTransaction ingest.LedgerTransaction) { }, }, }, - xdr.OperationResult{ + { Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypePathPaymentStrictReceive, PathPaymentStrictReceiveResult: &xdr.PathPaymentStrictReceiveResult{ @@ -380,7 +378,7 @@ func makeTradeTestInput() (inputTransaction ingest.LedgerTransaction) { }, }, }, - xdr.OperationResult{ + { Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypeCreatePassiveSellOffer, CreatePassiveSellOfferResult: &xdr.ManageSellOfferResult{ @@ -395,7 +393,7 @@ func makeTradeTestInput() (inputTransaction ingest.LedgerTransaction) { unsafeMeta := xdr.TransactionMetaV1{ Operations: []xdr.OperationMeta{ - xdr.OperationMeta{ + { Changes: xdr.LedgerEntryChanges{ xdr.LedgerEntryChange{ Type: xdr.LedgerEntryChangeTypeLedgerEntryState, @@ -431,7 +429,7 @@ func makeTradeTestInput() (inputTransaction ingest.LedgerTransaction) { }, }, }, - xdr.OperationMeta{ + { Changes: xdr.LedgerEntryChanges{ xdr.LedgerEntryChange{ Type: xdr.LedgerEntryChangeTypeLedgerEntryState, @@ -467,7 +465,7 @@ func makeTradeTestInput() (inputTransaction ingest.LedgerTransaction) { }, }, }, - xdr.OperationMeta{ + { Changes: xdr.LedgerEntryChanges{ xdr.LedgerEntryChange{ Type: xdr.LedgerEntryChangeTypeLedgerEntryState, @@ -535,7 +533,7 @@ func makeTradeTestInput() (inputTransaction ingest.LedgerTransaction) { }, }, }, - xdr.OperationMeta{ + { Changes: xdr.LedgerEntryChanges{ xdr.LedgerEntryChange{ Type: xdr.LedgerEntryChangeTypeLedgerEntryState, @@ -602,7 +600,7 @@ func makeTradeTestInput() (inputTransaction ingest.LedgerTransaction) { }, }, }, - xdr.OperationMeta{ + { Changes: xdr.LedgerEntryChanges{ xdr.LedgerEntryChange{ Type: xdr.LedgerEntryChangeTypeLedgerEntryState, @@ -656,7 +654,7 @@ func makeTradeTestInput() (inputTransaction ingest.LedgerTransaction) { }, }, }, - xdr.OperationMeta{ + { Changes: xdr.LedgerEntryChanges{ xdr.LedgerEntryChange{ Type: xdr.LedgerEntryChangeTypeLedgerEntryState, @@ -710,7 +708,7 @@ func makeTradeTestInput() (inputTransaction ingest.LedgerTransaction) { }, }, }, - xdr.OperationMeta{}, + {}, }} inputTransaction.Result.Result.Result.Results = &results @@ -834,13 +832,13 @@ func makeTradeTestOutput() [][]TradeOutput { offerTwoOutputSecondPlace.SellerIsExact = null.BoolFrom(false) output := [][]TradeOutput{ - []TradeOutput{offerOneOutput}, - []TradeOutput{offerTwoOutput}, - []TradeOutput{onePriceIsAmount, offerTwoOutputSecondPlace}, - []TradeOutput{twoPriceIsAmount, offerOneOutputSecondPlace}, - []TradeOutput{lPOneOutput}, - []TradeOutput{lPTwoOutput}, - []TradeOutput{}, + {offerOneOutput}, + {offerTwoOutput}, + {onePriceIsAmount, offerTwoOutputSecondPlace}, + {twoPriceIsAmount, offerOneOutputSecondPlace}, + {lPOneOutput}, + {lPTwoOutput}, + {}, } return output } diff --git a/internal/transform/transaction.go b/internal/transform/transaction.go index 9d105bf4..f4eb255d 100644 --- a/internal/transform/transaction.go +++ b/internal/transform/transaction.go @@ -33,17 +33,14 @@ func TransformTransaction(transaction ingest.LedgerTransaction, lhe xdr.LedgerHe outputAccountSequence := transaction.Envelope.SeqNum() if outputAccountSequence < 0 { - return TransactionOutput{}, fmt.Errorf("The account's sequence number (%d) is negative for ledger %d; transaction %d (transaction id=%d)", outputAccountSequence, outputLedgerSequence, transactionIndex, outputTransactionID) + return TransactionOutput{}, fmt.Errorf("the account's sequence number (%d) is negative for ledger %d; transaction %d (transaction id=%d)", outputAccountSequence, outputLedgerSequence, transactionIndex, outputTransactionID) } outputMaxFee := transaction.Envelope.Fee() - if outputMaxFee < 0 { - return TransactionOutput{}, fmt.Errorf("The fee (%d) is negative for ledger %d; transaction %d (transaction id=%d)", outputMaxFee, outputLedgerSequence, transactionIndex, outputTransactionID) - } outputFeeCharged := int64(transaction.Result.Result.FeeCharged) if outputFeeCharged < 0 { - return TransactionOutput{}, fmt.Errorf("The fee charged (%d) is negative for ledger %d; transaction %d (transaction id=%d)", outputFeeCharged, outputLedgerSequence, transactionIndex, outputTransactionID) + return TransactionOutput{}, fmt.Errorf("the fee charged (%d) is negative for ledger %d; transaction %d (transaction id=%d)", outputFeeCharged, outputLedgerSequence, transactionIndex, outputTransactionID) } outputOperationCount := int32(len(transaction.Envelope.Operations())) @@ -94,7 +91,7 @@ func TransformTransaction(transaction ingest.LedgerTransaction, lhe xdr.LedgerHe if timeBound != nil { if timeBound.MaxTime < timeBound.MinTime && timeBound.MaxTime != 0 { - return TransactionOutput{}, fmt.Errorf("The max time is earlier than the min time (%d < %d) for ledger %d; transaction %d (transaction id=%d)", + return TransactionOutput{}, fmt.Errorf("the max time is earlier than the min time (%d < %d) for ledger %d; transaction %d (transaction id=%d)", timeBound.MaxTime, timeBound.MinTime, outputLedgerSequence, transactionIndex, outputTransactionID) } @@ -143,6 +140,9 @@ func TransformTransaction(transaction ingest.LedgerTransaction, lhe xdr.LedgerHe var outputInclusionFeeBid int64 var outputInclusionFeeCharged int64 var outputResourceFeeRefund int64 + var outputTotalNonRefundableResourceFeeCharged int64 + var outputTotalRefundableResourceFeeCharged int64 + var outputRentFeeCharged int64 // Soroban data can exist in V1 and FeeBump transactionEnvelopes switch transaction.Envelope.Type { @@ -167,6 +167,12 @@ func TransformTransaction(transaction ingest.LedgerTransaction, lhe xdr.LedgerHe if ok { accountBalanceStart, accountBalanceEnd := getAccountBalanceFromLedgerEntryChanges(meta.TxChangesAfter, sourceAccount.Address()) outputResourceFeeRefund = accountBalanceEnd - accountBalanceStart + extV1, ok := meta.SorobanMeta.Ext.GetV1() + if ok { + outputTotalNonRefundableResourceFeeCharged = int64(extV1.TotalNonRefundableResourceFeeCharged) + outputTotalRefundableResourceFeeCharged = int64(extV1.TotalRefundableResourceFeeCharged) + outputRentFeeCharged = int64(extV1.RentFeeCharged) + } } // TODO: FeeCharged is calculated incorrectly in protocol 20. Remove when protocol is updated and the bug is fixed @@ -182,37 +188,40 @@ func TransformTransaction(transaction ingest.LedgerTransaction, lhe xdr.LedgerHe outputSuccessful := transaction.Result.Successful() transformedTransaction := TransactionOutput{ - TransactionHash: outputTransactionHash, - LedgerSequence: outputLedgerSequence, - TransactionID: outputTransactionID, - Account: outputAccount, - AccountSequence: outputAccountSequence, - MaxFee: outputMaxFee, - FeeCharged: outputFeeCharged, - OperationCount: outputOperationCount, - TxEnvelope: outputTxEnvelope, - TxResult: outputTxResult, - TxMeta: outputTxMeta, - TxFeeMeta: outputTxFeeMeta, - CreatedAt: outputCreatedAt, - MemoType: outputMemoType, - Memo: outputMemoContents, - TimeBounds: outputTimeBounds, - Successful: outputSuccessful, - LedgerBounds: outputLedgerBound, - MinAccountSequence: outputMinSequence, - MinAccountSequenceAge: outputMinSequenceAge, - MinAccountSequenceLedgerGap: outputMinSequenceLedgerGap, - ExtraSigners: formatSigners(transaction.Envelope.ExtraSigners()), - ClosedAt: outputCloseTime, - ResourceFee: outputResourceFee, - SorobanResourcesInstructions: outputSorobanResourcesInstructions, - SorobanResourcesReadBytes: outputSorobanResourcesReadBytes, - SorobanResourcesWriteBytes: outputSorobanResourcesWriteBytes, - TransactionResultCode: outputTxResultCode, - InclusionFeeBid: outputInclusionFeeBid, - InclusionFeeCharged: outputInclusionFeeCharged, - ResourceFeeRefund: outputResourceFeeRefund, + TransactionHash: outputTransactionHash, + LedgerSequence: outputLedgerSequence, + TransactionID: outputTransactionID, + Account: outputAccount, + AccountSequence: outputAccountSequence, + MaxFee: outputMaxFee, + FeeCharged: outputFeeCharged, + OperationCount: outputOperationCount, + TxEnvelope: outputTxEnvelope, + TxResult: outputTxResult, + TxMeta: outputTxMeta, + TxFeeMeta: outputTxFeeMeta, + CreatedAt: outputCreatedAt, + MemoType: outputMemoType, + Memo: outputMemoContents, + TimeBounds: outputTimeBounds, + Successful: outputSuccessful, + LedgerBounds: outputLedgerBound, + MinAccountSequence: outputMinSequence, + MinAccountSequenceAge: outputMinSequenceAge, + MinAccountSequenceLedgerGap: outputMinSequenceLedgerGap, + ExtraSigners: formatSigners(transaction.Envelope.ExtraSigners()), + ClosedAt: outputCloseTime, + ResourceFee: outputResourceFee, + SorobanResourcesInstructions: outputSorobanResourcesInstructions, + SorobanResourcesReadBytes: outputSorobanResourcesReadBytes, + SorobanResourcesWriteBytes: outputSorobanResourcesWriteBytes, + TransactionResultCode: outputTxResultCode, + InclusionFeeBid: outputInclusionFeeBid, + InclusionFeeCharged: outputInclusionFeeCharged, + ResourceFeeRefund: outputResourceFeeRefund, + TotalNonRefundableResourceFeeCharged: outputTotalNonRefundableResourceFeeCharged, + TotalRefundableResourceFeeCharged: outputTotalRefundableResourceFeeCharged, + RentFeeCharged: outputRentFeeCharged, } // Add Muxed Account Details, if exists diff --git a/internal/transform/transaction_test.go b/internal/transform/transaction_test.go index 9ca8708b..875791ac 100644 --- a/internal/transform/transaction_test.go +++ b/internal/transform/transaction_test.go @@ -47,7 +47,7 @@ func TestTransformTransaction(t *testing.T) { assert.NoError(t, err) tests := []transformTest{ - transformTest{ + { negativeSeqInput, TransactionOutput{}, fmt.Errorf("The account's sequence number (-1) is negative for ledger 0; transaction 1 (transaction id=4096)"), @@ -82,7 +82,7 @@ func TestTransformTransaction(t *testing.T) { func makeTransactionTestOutput() (output []TransactionOutput, err error) { correctTime, err := time.Parse("2006-1-2 15:04:05 MST", "2020-07-09 05:28:42 UTC") output = []TransactionOutput{ - TransactionOutput{ + { TxEnvelope: "AAAAAgAAAACI4aa0pXFSj6qfJuIObLw/5zyugLRGYwxb7wFSr3B9eAABX5ABjydzAABBtwAAAAEAAAAAAAAAAAAAAABfBqt0AAAAAQAAABdITDVhQ2dvelFISVc3c1NjNVhkY2ZtUgAAAAABAAAAAQAAAAAcR0GXGO76pFs4y38vJVAanjnLg4emNun7zAx0pHcDGAAAAAIAAAAAAAAAAAAAAAAAAAAAAQIDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=", TxResult: "AAAAAAAAASz/////AAAAAQAAAAAAAAAAAAAAAAAAAAA=", TxMeta: "AAAAAQAAAAAAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACAAAAAwAAAAAAAAAFAQIDBAUGBwgJAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFVU1NEAAAAAGtY3WxokwttAx3Fu/riPvoew/C7WMK8jZONR8Hfs75zAAAAHgAAAAAAAYagAAAAAAAAA+gAAAAAAAAB9AAAAAAAAAAZAAAAAAAAAAEAAAAAAAAABQECAwQFBgcICQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABVVNTRAAAAABrWN1saJMLbQMdxbv64j76HsPwu1jCvI2TjUfB37O+cwAAAB4AAAAAAAGKiAAAAAAAAARMAAAAAAAAAfYAAAAAAAAAGgAAAAAAAAACAAAAAwAAAAAAAAAFAQIDBAUGBwgJAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFVU1NEAAAAAGtY3WxokwttAx3Fu/riPvoew/C7WMK8jZONR8Hfs75zAAAAHgAAAAAAAYagAAAAAAAAA+gAAAAAAAAB9AAAAAAAAAAZAAAAAAAAAAEAAAAAAAAABQECAwQFBgcICQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABVVNTRAAAAABrWN1saJMLbQMdxbv64j76HsPwu1jCvI2TjUfB37O+cwAAAB4AAAAAAAGKiAAAAAAAAARMAAAAAAAAAfYAAAAAAAAAGgAAAAAAAAAA", @@ -107,7 +107,7 @@ func makeTransactionTestOutput() (output []TransactionOutput, err error) { SorobanResourcesWriteBytes: 0, TransactionResultCode: "TransactionResultCodeTxFailed", }, - TransactionOutput{ + { TxEnvelope: "AAAABQAAAABnzACGTDuJFoxqr+C8NHCe0CHFBXLi+YhhNCIILCIpcgAAAAAAABwgAAAAAgAAAACI4aa0pXFSj6qfJuIObLw/5zyugLRGYwxb7wFSr3B9eAAAAAACFPY2AAAAfQAAAAEAAAAAAAAAAAAAAABfBqt0AAAAAQAAABdITDVhQ2dvelFISVc3c1NjNVhkY2ZtUgAAAAABAAAAAQAAAAAcR0GXGO76pFs4y38vJVAanjnLg4emNun7zAx0pHcDGAAAAAIAAAAAAAAAAAAAAAAAAAAAAQIDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA==", TxResult: "AAAAAAAAASwAAAABqH/vXusmAmnDgPLeRWqtcrWbsxWqrHd4YEVuCdrAuvsAAAAAAAAAZAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAA=", TxMeta: "AAAAAQAAAAAAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACAAAAAwAAAAAAAAAFAQIDBAUGBwgJAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFVU1NEAAAAAGtY3WxokwttAx3Fu/riPvoew/C7WMK8jZONR8Hfs75zAAAAHgAAAAAAAYagAAAAAAAAA+gAAAAAAAAB9AAAAAAAAAAZAAAAAAAAAAEAAAAAAAAABQECAwQFBgcICQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABVVNTRAAAAABrWN1saJMLbQMdxbv64j76HsPwu1jCvI2TjUfB37O+cwAAAB4AAAAAAAGKiAAAAAAAAARMAAAAAAAAAfYAAAAAAAAAGgAAAAAAAAACAAAAAwAAAAAAAAAFAQIDBAUGBwgJAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFVU1NEAAAAAGtY3WxokwttAx3Fu/riPvoew/C7WMK8jZONR8Hfs75zAAAAHgAAAAAAAYagAAAAAAAAA+gAAAAAAAAB9AAAAAAAAAAZAAAAAAAAAAEAAAAAAAAABQECAwQFBgcICQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABVVNTRAAAAABrWN1saJMLbQMdxbv64j76HsPwu1jCvI2TjUfB37O+cwAAAB4AAAAAAAGKiAAAAAAAAARMAAAAAAAAAfYAAAAAAAAAGgAAAAAAAAAA", @@ -135,7 +135,7 @@ func makeTransactionTestOutput() (output []TransactionOutput, err error) { SorobanResourcesWriteBytes: 0, TransactionResultCode: "TransactionResultCodeTxFeeBumpInnerSuccess", //inner fee bump success }, - TransactionOutput{ + { TxEnvelope: "AAAAAgAAAAAcR0GXGO76pFs4y38vJVAanjnLg4emNun7zAx0pHcDGAAAAGQBpLyvsiV6gwAAAAIAAAABAAAAAAAAAAAAAAAAXwardAAAAAEAAAAFAAAACgAAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAMCAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAABdITDVhQ2dvelFISVc3c1NjNVhkY2ZtUgAAAAABAAAAAQAAAABrWN1saJMLbQMdxbv64j76HsPwu1jCvI2TjUfB37O+cwAAAAIAAAAAAAAAAAAAAAAAAAAAAQIDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=", TxResult: "AAAAAAAAAGT////5AAAAAA==", TxMeta: "AAAAAQAAAAAAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACAAAAAwAAAAAAAAAFAQIDBAUGBwgJAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFVU1NEAAAAAGtY3WxokwttAx3Fu/riPvoew/C7WMK8jZONR8Hfs75zAAAAHgAAAAAAAYagAAAAAAAAA+gAAAAAAAAB9AAAAAAAAAAZAAAAAAAAAAEAAAAAAAAABQECAwQFBgcICQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABVVNTRAAAAABrWN1saJMLbQMdxbv64j76HsPwu1jCvI2TjUfB37O+cwAAAB4AAAAAAAGKiAAAAAAAAARMAAAAAAAAAfYAAAAAAAAAGgAAAAAAAAACAAAAAwAAAAAAAAAFAQIDBAUGBwgJAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFVU1NEAAAAAGtY3WxokwttAx3Fu/riPvoew/C7WMK8jZONR8Hfs75zAAAAHgAAAAAAAYagAAAAAAAAA+gAAAAAAAAB9AAAAAAAAAAZAAAAAAAAAAEAAAAAAAAABQECAwQFBgcICQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABVVNTRAAAAABrWN1saJMLbQMdxbv64j76HsPwu1jCvI2TjUfB37O+cwAAAB4AAAAAAAGKiAAAAAAAAARMAAAAAAAAAfYAAAAAAAAAGgAAAAAAAAAA", @@ -171,7 +171,7 @@ func makeTransactionTestInput() (transaction []ingest.LedgerTransaction, history hardCodedMemoText := "HL5aCgozQHIW7sSc5XdcfmR" hardCodedTransactionHash := xdr.Hash([32]byte{0xa8, 0x7f, 0xef, 0x5e, 0xeb, 0x26, 0x2, 0x69, 0xc3, 0x80, 0xf2, 0xde, 0x45, 0x6a, 0xad, 0x72, 0xb5, 0x9b, 0xb3, 0x15, 0xaa, 0xac, 0x77, 0x78, 0x60, 0x45, 0x6e, 0x9, 0xda, 0xc0, 0xba, 0xfb}) genericResultResults := &[]xdr.OperationResult{ - xdr.OperationResult{ + { Tr: &xdr.OperationResultTr{ Type: xdr.OperationTypeCreateAccount, CreateAccountResult: &xdr.CreateAccountResult{ @@ -198,7 +198,7 @@ func makeTransactionTestInput() (transaction []ingest.LedgerTransaction, history Ed25519: source.Ed25519, } transaction = []ingest.LedgerTransaction{ - ingest.LedgerTransaction{ + { Index: 1, UnsafeMeta: hardCodedMeta, Envelope: xdr.TransactionEnvelope{ @@ -220,7 +220,7 @@ func makeTransactionTestInput() (transaction []ingest.LedgerTransaction, history }, }, Operations: []xdr.Operation{ - xdr.Operation{ + { SourceAccount: &testAccount2, Body: xdr.OperationBody{ Type: xdr.OperationTypePathPaymentStrictReceive, @@ -244,7 +244,7 @@ func makeTransactionTestInput() (transaction []ingest.LedgerTransaction, history }, }, }, - ingest.LedgerTransaction{ + { Index: 1, UnsafeMeta: hardCodedMeta, Envelope: xdr.TransactionEnvelope{ @@ -271,7 +271,7 @@ func makeTransactionTestInput() (transaction []ingest.LedgerTransaction, history }, }, Operations: []xdr.Operation{ - xdr.Operation{ + { SourceAccount: &testAccount2, Body: xdr.OperationBody{ Type: xdr.OperationTypePathPaymentStrictReceive, @@ -300,7 +300,7 @@ func makeTransactionTestInput() (transaction []ingest.LedgerTransaction, history Result: xdr.InnerTransactionResultResult{ Code: xdr.TransactionResultCodeTxSuccess, Results: &[]xdr.OperationResult{ - xdr.OperationResult{ + { Tr: &xdr.OperationResultTr{ CreateAccountResult: &xdr.CreateAccountResult{}, }, @@ -309,14 +309,12 @@ func makeTransactionTestInput() (transaction []ingest.LedgerTransaction, history }, }, }, - Results: &[]xdr.OperationResult{ - xdr.OperationResult{}, - }, + Results: &[]xdr.OperationResult{{}}, }, }, }, }, - ingest.LedgerTransaction{ + { Index: 1, UnsafeMeta: hardCodedMeta, Envelope: xdr.TransactionEnvelope{ @@ -345,7 +343,7 @@ func makeTransactionTestInput() (transaction []ingest.LedgerTransaction, history }, }, Operations: []xdr.Operation{ - xdr.Operation{ + { SourceAccount: &testAccount4, Body: xdr.OperationBody{ Type: xdr.OperationTypePathPaymentStrictReceive, @@ -371,19 +369,19 @@ func makeTransactionTestInput() (transaction []ingest.LedgerTransaction, history }, } historyHeader = []xdr.LedgerHeaderHistoryEntry{ - xdr.LedgerHeaderHistoryEntry{ + { Header: xdr.LedgerHeader{ LedgerSeq: 30521816, ScpValue: xdr.StellarValue{CloseTime: 1594272522}, }, }, - xdr.LedgerHeaderHistoryEntry{ + { Header: xdr.LedgerHeader{ LedgerSeq: 30521817, ScpValue: xdr.StellarValue{CloseTime: 1594272522}, }, }, - xdr.LedgerHeaderHistoryEntry{ + { Header: xdr.LedgerHeader{ LedgerSeq: 30521818, ScpValue: xdr.StellarValue{CloseTime: 1594272522}, diff --git a/internal/transform/trustline.go b/internal/transform/trustline.go index 3099f306..dd0f3c26 100644 --- a/internal/transform/trustline.go +++ b/internal/transform/trustline.go @@ -22,7 +22,7 @@ func TransformTrustline(ledgerChange ingest.Change, header xdr.LedgerHeaderHisto trustEntry, ok := ledgerEntry.Data.GetTrustLine() if !ok { - return TrustlineOutput{}, fmt.Errorf("Could not extract trustline data from ledger entry; actual type is %s", ledgerEntry.Data.Type) + return TrustlineOutput{}, fmt.Errorf("could not extract trustline data from ledger entry; actual type is %s", ledgerEntry.Data.Type) } outputAccountID, err := trustEntry.AccountId.GetAddress() @@ -86,12 +86,12 @@ func trustLineEntryToLedgerKeyString(trustLine xdr.TrustLineEntry) (string, erro ledgerKey := &xdr.LedgerKey{} err := ledgerKey.SetTrustline(trustLine.AccountId, trustLine.Asset) if err != nil { - return "", fmt.Errorf("Error running ledgerKey.SetTrustline when calculating ledger key") + return "", fmt.Errorf("error running ledgerKey.SetTrustline when calculating ledger key") } key, err := ledgerKey.MarshalBinary() if err != nil { - return "", fmt.Errorf("Error running MarshalBinaryCompress when calculating ledger key") + return "", fmt.Errorf("error running MarshalBinaryCompress when calculating ledger key") } return base64.StdEncoding.EncodeToString(key), nil diff --git a/internal/transform/ttl.go b/internal/transform/ttl.go index cb9218e1..c0e6fe9c 100644 --- a/internal/transform/ttl.go +++ b/internal/transform/ttl.go @@ -17,7 +17,7 @@ func TransformTtl(ledgerChange ingest.Change, header xdr.LedgerHeaderHistoryEntr ttl, ok := ledgerEntry.Data.GetTtl() if !ok { - return TtlOutput{}, fmt.Errorf("Could not extract ttl from ledger entry; actual type is %s", ledgerEntry.Data.Type) + return TtlOutput{}, fmt.Errorf("could not extract ttl from ledger entry; actual type is %s", ledgerEntry.Data.Type) } // LedgerEntryChange must contain a ttl change to be parsed, otherwise skip diff --git a/internal/utils/main.go b/internal/utils/main.go index a91e6ef2..c1259a50 100644 --- a/internal/utils/main.go +++ b/internal/utils/main.go @@ -859,3 +859,8 @@ func AccountSignersChanged(c ingest.Change) bool { return false } + +type HistoryArchiveLedgerAndLCM struct { + Ledger historyarchive.Ledger + LCM xdr.LedgerCloseMeta +} From 574b855c801fff098f17e7a41a44675dd70143d3 Mon Sep 17 00:00:00 2001 From: Simon Chow Date: Mon, 29 Apr 2024 14:34:13 -0400 Subject: [PATCH 27/49] Fix broken tests --- internal/transform/account_test.go | 10 +++++----- internal/transform/contract_code_test.go | 2 +- internal/transform/contract_data_test.go | 2 +- internal/transform/effects_test.go | 2 +- internal/transform/offer_test.go | 12 ++++++------ internal/transform/operation_test.go | 2 +- internal/transform/trade_test.go | 14 +++++++------- internal/transform/transaction_test.go | 6 +++--- internal/transform/trustline_test.go | 2 +- internal/transform/ttl_test.go | 2 +- 10 files changed, 27 insertions(+), 27 deletions(-) diff --git a/internal/transform/account_test.go b/internal/transform/account_test.go index c99b66fc..af050c03 100644 --- a/internal/transform/account_test.go +++ b/internal/transform/account_test.go @@ -38,7 +38,7 @@ func TestTransformAccount(t *testing.T) { }, }, }, - AccountOutput{}, fmt.Errorf("Could not extract account data from ledger entry; actual type is LedgerEntryTypeOffer"), + AccountOutput{}, fmt.Errorf("could not extract account data from ledger entry; actual type is LedgerEntryTypeOffer"), }, { inputStruct{wrapAccountEntry(xdr.AccountEntry{ @@ -46,7 +46,7 @@ func TestTransformAccount(t *testing.T) { Balance: -1, }, 0), }, - AccountOutput{}, fmt.Errorf("Balance is negative (-1) for account: %s", genericAccountAddress), + AccountOutput{}, fmt.Errorf("balance is negative (-1) for account: %s", genericAccountAddress), }, { inputStruct{wrapAccountEntry(xdr.AccountEntry{ @@ -61,7 +61,7 @@ func TestTransformAccount(t *testing.T) { }, }, 0), }, - AccountOutput{}, fmt.Errorf("The buying liabilities count is negative (-1) for account: %s", genericAccountAddress), + AccountOutput{}, fmt.Errorf("the buying liabilities count is negative (-1) for account: %s", genericAccountAddress), }, { inputStruct{wrapAccountEntry(xdr.AccountEntry{ @@ -76,7 +76,7 @@ func TestTransformAccount(t *testing.T) { }, }, 0), }, - AccountOutput{}, fmt.Errorf("The selling liabilities count is negative (-2) for account: %s", genericAccountAddress), + AccountOutput{}, fmt.Errorf("the selling liabilities count is negative (-2) for account: %s", genericAccountAddress), }, { inputStruct{wrapAccountEntry(xdr.AccountEntry{ @@ -84,7 +84,7 @@ func TestTransformAccount(t *testing.T) { SeqNum: -3, }, 0), }, - AccountOutput{}, fmt.Errorf("Account sequence number is negative (-3) for account: %s", genericAccountAddress), + AccountOutput{}, fmt.Errorf("account sequence number is negative (-3) for account: %s", genericAccountAddress), }, { inputStruct{ diff --git a/internal/transform/contract_code_test.go b/internal/transform/contract_code_test.go index e55e13ae..cc710081 100644 --- a/internal/transform/contract_code_test.go +++ b/internal/transform/contract_code_test.go @@ -31,7 +31,7 @@ func TestTransformContractCode(t *testing.T) { }, }, }, - ContractCodeOutput{}, fmt.Errorf("Could not extract contract code from ledger entry; actual type is LedgerEntryTypeOffer"), + ContractCodeOutput{}, fmt.Errorf("could not extract contract code from ledger entry; actual type is LedgerEntryTypeOffer"), }, } diff --git a/internal/transform/contract_data_test.go b/internal/transform/contract_data_test.go index d09ab588..2777fae9 100644 --- a/internal/transform/contract_data_test.go +++ b/internal/transform/contract_data_test.go @@ -34,7 +34,7 @@ func TestTransformContractData(t *testing.T) { }, }, "unit test", - ContractDataOutput{}, fmt.Errorf("Could not extract contract data from ledger entry; actual type is LedgerEntryTypeOffer"), + ContractDataOutput{}, fmt.Errorf("could not extract contract data from ledger entry; actual type is LedgerEntryTypeOffer"), }, } diff --git a/internal/transform/effects_test.go b/internal/transform/effects_test.go index df5e75e1..a6596dfd 100644 --- a/internal/transform/effects_test.go +++ b/internal/transform/effects_test.go @@ -78,7 +78,7 @@ func TestEffectsCoversAllOperationTypes(t *testing.T) { } // calling effects should error due to the unknown operation _, err := operation.effects() - assert.Contains(t, err.Error(), "Unknown operation type") + assert.Contains(t, err.Error(), "unknown operation type") } func TestOperationEffects(t *testing.T) { diff --git a/internal/transform/offer_test.go b/internal/transform/offer_test.go index 303693eb..8279d620 100644 --- a/internal/transform/offer_test.go +++ b/internal/transform/offer_test.go @@ -37,7 +37,7 @@ func TestTransformOffer(t *testing.T) { }, }, }, - OfferOutput{}, fmt.Errorf("Could not extract offer data from ledger entry; actual type is LedgerEntryTypeAccount"), + OfferOutput{}, fmt.Errorf("could not extract offer data from ledger entry; actual type is LedgerEntryTypeAccount"), }, { inputStruct{wrapOfferEntry(xdr.OfferEntry{ @@ -45,7 +45,7 @@ func TestTransformOffer(t *testing.T) { OfferId: -1, }, 0), }, - OfferOutput{}, fmt.Errorf("OfferID is negative (-1) for offer from account: %s", genericAccountAddress), + OfferOutput{}, fmt.Errorf("offerID is negative (-1) for offer from account: %s", genericAccountAddress), }, { inputStruct{wrapOfferEntry(xdr.OfferEntry{ @@ -53,7 +53,7 @@ func TestTransformOffer(t *testing.T) { Amount: -2, }, 0), }, - OfferOutput{}, fmt.Errorf("Amount is negative (-2) for offer 0"), + OfferOutput{}, fmt.Errorf("amount is negative (-2) for offer 0"), }, { inputStruct{wrapOfferEntry(xdr.OfferEntry{ @@ -64,7 +64,7 @@ func TestTransformOffer(t *testing.T) { }, }, 0), }, - OfferOutput{}, fmt.Errorf("Price numerator is negative (-3) for offer 0"), + OfferOutput{}, fmt.Errorf("price numerator is negative (-3) for offer 0"), }, { inputStruct{wrapOfferEntry(xdr.OfferEntry{ @@ -75,7 +75,7 @@ func TestTransformOffer(t *testing.T) { }, }, 0), }, - OfferOutput{}, fmt.Errorf("Price denominator is negative (-4) for offer 0"), + OfferOutput{}, fmt.Errorf("price denominator is negative (-4) for offer 0"), }, { inputStruct{wrapOfferEntry(xdr.OfferEntry{ @@ -86,7 +86,7 @@ func TestTransformOffer(t *testing.T) { }, }, 0), }, - OfferOutput{}, fmt.Errorf("Price denominator is 0 for offer 0"), + OfferOutput{}, fmt.Errorf("price denominator is 0 for offer 0"), }, { inputStruct{ diff --git a/internal/transform/operation_test.go b/internal/transform/operation_test.go index c2d2c433..97959107 100644 --- a/internal/transform/operation_test.go +++ b/internal/transform/operation_test.go @@ -50,7 +50,7 @@ func TestTransformOperation(t *testing.T) { { unknownOpTypeInput, OperationOutput{}, - fmt.Errorf("Unknown operation type: "), + fmt.Errorf("unknown operation type: "), }, } hardCodedInputTransaction, err := makeOperationTestInput() diff --git a/internal/transform/trade_test.go b/internal/transform/trade_test.go index a4a573f2..8cc46241 100644 --- a/internal/transform/trade_test.go +++ b/internal/transform/trade_test.go @@ -132,31 +132,31 @@ func TestTransformTrade(t *testing.T) { tests := []transformTest{ { wrongTypeInput, - []TradeOutput{}, fmt.Errorf("Operation of type OperationTypeBumpSequence at index 0 does not result in trades"), + []TradeOutput{}, fmt.Errorf("operation of type OperationTypeBumpSequence at index 0 does not result in trades"), }, { resultOutOfRangeInput, - []TradeOutput{}, fmt.Errorf("Operation index of 0 is out of bounds in result slice (len = 0)"), + []TradeOutput{}, fmt.Errorf("operation index of 0 is out of bounds in result slice (len = 0)"), }, { failedTxInput, - []TradeOutput{}, fmt.Errorf("Transaction failed; no trades"), + []TradeOutput{}, fmt.Errorf("transaction failed; no trades"), }, { noTrInput, - []TradeOutput{}, fmt.Errorf("Could not get result Tr for operation at index 0"), + []TradeOutput{}, fmt.Errorf("could not get result Tr for operation at index 0"), }, { failedResultInput, - []TradeOutput{}, fmt.Errorf("Could not get ManageOfferSuccess for operation at index 0"), + []TradeOutput{}, fmt.Errorf("could not get ManageOfferSuccess for operation at index 0"), }, { negBaseAmountInput, - []TradeOutput{}, fmt.Errorf("Amount sold is negative (-1) for operation at index 0"), + []TradeOutput{}, fmt.Errorf("amount sold is negative (-1) for operation at index 0"), }, { negCounterAmountInput, - []TradeOutput{}, fmt.Errorf("Amount bought is negative (-2) for operation at index 0"), + []TradeOutput{}, fmt.Errorf("amount bought is negative (-2) for operation at index 0"), }, } diff --git a/internal/transform/transaction_test.go b/internal/transform/transaction_test.go index 875791ac..fd1d8b47 100644 --- a/internal/transform/transaction_test.go +++ b/internal/transform/transaction_test.go @@ -50,17 +50,17 @@ func TestTransformTransaction(t *testing.T) { { negativeSeqInput, TransactionOutput{}, - fmt.Errorf("The account's sequence number (-1) is negative for ledger 0; transaction 1 (transaction id=4096)"), + fmt.Errorf("the account's sequence number (-1) is negative for ledger 0; transaction 1 (transaction id=4096)"), }, { badFeeChargedInput, TransactionOutput{}, - fmt.Errorf("The fee charged (-1) is negative for ledger 0; transaction 1 (transaction id=4096)"), + fmt.Errorf("the fee charged (-1) is negative for ledger 0; transaction 1 (transaction id=4096)"), }, { badTimeboundInput, TransactionOutput{}, - fmt.Errorf("The max time is earlier than the min time (100 < 1594586912) for ledger 0; transaction 1 (transaction id=4096)"), + fmt.Errorf("the max time is earlier than the min time (100 < 1594586912) for ledger 0; transaction 1 (transaction id=4096)"), }, } diff --git a/internal/transform/trustline_test.go b/internal/transform/trustline_test.go index 24efa2f5..437fd7dc 100644 --- a/internal/transform/trustline_test.go +++ b/internal/transform/trustline_test.go @@ -37,7 +37,7 @@ func TestTransformTrustline(t *testing.T) { }, }, }, - TrustlineOutput{}, fmt.Errorf("Could not extract trustline data from ledger entry; actual type is LedgerEntryTypeOffer"), + TrustlineOutput{}, fmt.Errorf("could not extract trustline data from ledger entry; actual type is LedgerEntryTypeOffer"), }, } diff --git a/internal/transform/ttl_test.go b/internal/transform/ttl_test.go index 4d49a54b..8f14e089 100644 --- a/internal/transform/ttl_test.go +++ b/internal/transform/ttl_test.go @@ -31,7 +31,7 @@ func TestTransformTtl(t *testing.T) { }, }, }, - TtlOutput{}, fmt.Errorf("Could not extract ttl from ledger entry; actual type is LedgerEntryTypeOffer"), + TtlOutput{}, fmt.Errorf("could not extract ttl from ledger entry; actual type is LedgerEntryTypeOffer"), }, } From b7ffb3ca7c0ed8e282891c8841c818f0060a7a38 Mon Sep 17 00:00:00 2001 From: Simon Chow Date: Mon, 29 Apr 2024 16:08:46 -0400 Subject: [PATCH 28/49] Update core image --- docker/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index d85a126c..76842e8e 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -11,7 +11,7 @@ COPY . . RUN go build -v -o /usr/local/bin ./... # stage 2: runtime enviroment -FROM stellar/stellar-core:20.2.0-1716.rc3.34d82fc00.focal +FROM stellar/unsafe-stellar-core:21.0.0-1812.rc1.a10329cca.focal WORKDIR /etl From 83d77734ab01b7e1698804a498b7d2ce6b7736dd Mon Sep 17 00:00:00 2001 From: Simon Chow Date: Mon, 29 Apr 2024 16:11:34 -0400 Subject: [PATCH 29/49] Revert "Update core image" This reverts commit b7ffb3ca7c0ed8e282891c8841c818f0060a7a38. --- docker/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index 76842e8e..d85a126c 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -11,7 +11,7 @@ COPY . . RUN go build -v -o /usr/local/bin ./... # stage 2: runtime enviroment -FROM stellar/unsafe-stellar-core:21.0.0-1812.rc1.a10329cca.focal +FROM stellar/stellar-core:20.2.0-1716.rc3.34d82fc00.focal WORKDIR /etl From e4fc0b182266abb2ea30a91994cd21ad405d0047 Mon Sep 17 00:00:00 2001 From: Simon Chow Date: Mon, 29 Apr 2024 16:12:57 -0400 Subject: [PATCH 30/49] Revert "Revert "Update core image"" This reverts commit 83d77734ab01b7e1698804a498b7d2ce6b7736dd. --- docker/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index d85a126c..76842e8e 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -11,7 +11,7 @@ COPY . . RUN go build -v -o /usr/local/bin ./... # stage 2: runtime enviroment -FROM stellar/stellar-core:20.2.0-1716.rc3.34d82fc00.focal +FROM stellar/unsafe-stellar-core:21.0.0-1812.rc1.a10329cca.focal WORKDIR /etl From d43e728fe285fd51bd2cd30fb18e5c6634bff6a6 Mon Sep 17 00:00:00 2001 From: Simon Chow Date: Mon, 29 Apr 2024 16:58:41 -0400 Subject: [PATCH 31/49] Use getextv1 --- internal/transform/ledger.go | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/internal/transform/ledger.go b/internal/transform/ledger.go index 924a0d89..9324e21e 100644 --- a/internal/transform/ledger.go +++ b/internal/transform/ledger.go @@ -58,7 +58,10 @@ func TransformLedger(inputLedger historyarchive.Ledger, lcm xdr.LedgerCloseMeta) var outputSorobanFeeWrite1Kb int64 lcmV1, ok := lcm.GetV1() if ok { - outputSorobanFeeWrite1Kb = int64(lcmV1.Ext.V1.SorobanFeeWrite1Kb) + extV1, ok := lcmV1.Ext.GetV1() + if ok { + outputSorobanFeeWrite1Kb = int64(extV1.SorobanFeeWrite1Kb) + } } transformedLedger := LedgerOutput{ From 064de898775e314e449e1732cb9b57b7904af9d5 Mon Sep 17 00:00:00 2001 From: Simon Chow Date: Mon, 29 Apr 2024 23:54:09 -0400 Subject: [PATCH 32/49] Fixes from comments --- docker/stellar-core.cfg | 2 + docker/stellar-core_futurenet.cfg | 2 + docker/stellar-core_testnet.cfg | 2 + internal/transform/contract_data.go | 22 ++----- internal/transform/effects_test.go | 13 ----- internal/transform/ledger_test.go | 3 +- internal/transform/liquidity_pool_test.go | 13 ----- internal/transform/operation.go | 22 ------- internal/transform/test_variables_test.go | 70 ----------------------- 9 files changed, 11 insertions(+), 138 deletions(-) diff --git a/docker/stellar-core.cfg b/docker/stellar-core.cfg index 753d16c7..b7c710d8 100644 --- a/docker/stellar-core.cfg +++ b/docker/stellar-core.cfg @@ -3,6 +3,8 @@ # for how to properly configure your environment ENABLE_SOROBAN_DIAGNOSTIC_EVENTS=true +EMIT_SOROBAN_TRANSACTION_META_EXT_V1=true +EMIT_LEDGER_CLOSE_META_EXT_V1=true #FAILURE_SAFETY is minimum number of nodes that are allowed to fail before you no longer have quorum FAILURE_SAFETY=1 diff --git a/docker/stellar-core_futurenet.cfg b/docker/stellar-core_futurenet.cfg index 8ab94fb0..832c75c4 100644 --- a/docker/stellar-core_futurenet.cfg +++ b/docker/stellar-core_futurenet.cfg @@ -7,6 +7,8 @@ PUBLIC_HTTP_PORT=false NETWORK_PASSPHRASE="Test SDF Future Network ; October 2022" ENABLE_SOROBAN_DIAGNOSTIC_EVENTS=true +EMIT_SOROBAN_TRANSACTION_META_EXT_V1=true +EMIT_LEDGER_CLOSE_META_EXT_V1=true # DATABASE="sqlite3://stellar.db" PEER_PORT=11725 diff --git a/docker/stellar-core_testnet.cfg b/docker/stellar-core_testnet.cfg index 94502b2c..2d8a0426 100644 --- a/docker/stellar-core_testnet.cfg +++ b/docker/stellar-core_testnet.cfg @@ -6,6 +6,8 @@ NETWORK_PASSPHRASE="Test SDF Network ; September 2015" # DATABASE="sqlite3://stellar.db" ENABLE_SOROBAN_DIAGNOSTIC_EVENTS=true +EMIT_SOROBAN_TRANSACTION_META_EXT_V1=true +EMIT_LEDGER_CLOSE_META_EXT_V1=true # Stellar Testnet validators [[HOME_DOMAINS]] diff --git a/internal/transform/contract_data.go b/internal/transform/contract_data.go index 75c248f4..614c8655 100644 --- a/internal/transform/contract_data.go +++ b/internal/transform/contract_data.go @@ -10,28 +10,14 @@ import ( "github.com/stellar/stellar-etl/internal/utils" ) -//const ( -// scDecimalPrecision = 7 -//) - var ( - // https://github.com/stellar/rs-soroban-env/blob/v0.0.16/soroban-env-host/src/native_contract/token/public_types.rs#L22 - //nativeAssetSym = xdr.ScSymbol("Native") // these are storage DataKey enum // https://github.com/stellar/rs-soroban-env/blob/v0.0.16/soroban-env-host/src/native_contract/token/storage_types.rs#L23 balanceMetadataSym = xdr.ScSymbol("Balance") - //metadataSym = xdr.ScSymbol("METADATA") - //metadataNameSym = xdr.ScSymbol("name") - //metadataSymbolSym = xdr.ScSymbol("symbol") - //adminSym = xdr.ScSymbol("Admin") - issuerSym = xdr.ScSymbol("issuer") - assetCodeSym = xdr.ScSymbol("asset_code") - //alphaNum4Sym = xdr.ScSymbol("AlphaNum4") - //alphaNum12Sym = xdr.ScSymbol("AlphaNum12") - //decimalSym = xdr.ScSymbol("decimal") - assetInfoSym = xdr.ScSymbol("AssetInfo") - //decimalVal = xdr.Uint32(scDecimalPrecision) - assetInfoVec = &xdr.ScVec{ + issuerSym = xdr.ScSymbol("issuer") + assetCodeSym = xdr.ScSymbol("asset_code") + assetInfoSym = xdr.ScSymbol("AssetInfo") + assetInfoVec = &xdr.ScVec{ xdr.ScVal{ Type: xdr.ScValTypeScvSymbol, Sym: &assetInfoSym, diff --git a/internal/transform/effects_test.go b/internal/transform/effects_test.go index a6596dfd..09fd2722 100644 --- a/internal/transform/effects_test.go +++ b/internal/transform/effects_test.go @@ -3327,29 +3327,16 @@ func getRevokeSponsorshipMeta(t *testing.T) (string, []EffectOutput) { type ClaimClaimableBalanceEffectsTestSuite struct { suite.Suite - //ops []xdr.Operation - //tx ingest.LedgerTransaction } type CreateClaimableBalanceEffectsTestSuite struct { suite.Suite - //ops []xdr.Operation - //tx ingest.LedgerTransaction } const ( networkPassphrase = "Arbitrary Testing Passphrase" ) -//type effect struct { -// address string -// addressMuxed null.String -// operationID int64 -// details map[string]interface{} -// effectType EffectType -// order uint32 -//} - func TestInvokeHostFunctionEffects(t *testing.T) { randAddr := func() string { return keypair.MustRandom().Address() diff --git a/internal/transform/ledger_test.go b/internal/transform/ledger_test.go index 962175eb..0e58e2e8 100644 --- a/internal/transform/ledger_test.go +++ b/internal/transform/ledger_test.go @@ -114,8 +114,7 @@ func makeLedgerTestOutput() (output LedgerOutput, err error) { SuccessfulTransactionCount: 1, FailedTransactionCount: 1, TxSetOperationCount: "13", - - SorobanFeeWrite1Kb: 1234, + SorobanFeeWrite1Kb: 1234, } return } diff --git a/internal/transform/liquidity_pool_test.go b/internal/transform/liquidity_pool_test.go index 829aa7bb..af97587a 100644 --- a/internal/transform/liquidity_pool_test.go +++ b/internal/transform/liquidity_pool_test.go @@ -61,19 +61,6 @@ func TestTransformPool(t *testing.T) { } } -//func wrapPoolEntry(poolEntry xdr.LiquidityPoolEntry, lastModified int) ingest.Change { -// return ingest.Change{ -// Type: xdr.LedgerEntryTypeLiquidityPool, -// Pre: &xdr.LedgerEntry{ -// LastModifiedLedgerSeq: xdr.Uint32(lastModified), -// Data: xdr.LedgerEntryData{ -// Type: xdr.LedgerEntryTypeLiquidityPool, -// LiquidityPool: &poolEntry, -// }, -// }, -// } -//} - func makePoolTestInput() ingest.Change { ledgerEntry := xdr.LedgerEntry{ LastModifiedLedgerSeq: 30705278, diff --git a/internal/transform/operation.go b/internal/transform/operation.go index 91e4e8de..b3a79def 100644 --- a/internal/transform/operation.go +++ b/internal/transform/operation.go @@ -2161,25 +2161,3 @@ func dedupeParticipants(in []xdr.AccountId) (out []xdr.AccountId) { } return } - -//// OperationsParticipants returns a map with all participants per operation -//func operationsParticipants(transaction ingest.LedgerTransaction, sequence uint32) (map[int64][]xdr.AccountId, error) { -// participants := map[int64][]xdr.AccountId{} -// -// for opi, op := range transaction.Envelope.Operations() { -// operation := transactionOperationWrapper{ -// index: uint32(opi), -// transaction: transaction, -// operation: op, -// ledgerSequence: sequence, -// } -// -// p, err := operation.Participants() -// if err != nil { -// return participants, errors.Wrapf(err, "reading operation %v participants", operation.ID()) -// } -// participants[operation.ID()] = p -// } -// -// return participants, nil -//} diff --git a/internal/transform/test_variables_test.go b/internal/transform/test_variables_test.go index be582641..e007aa19 100644 --- a/internal/transform/test_variables_test.go +++ b/internal/transform/test_variables_test.go @@ -95,64 +95,6 @@ var testAccount4Address = "GBVVRXLMNCJQW3IDDXC3X6XCH35B5Q7QXNMMFPENSOGUPQO7WO7HG var testAccount4ID, _ = xdr.AddressToAccountId(testAccount4Address) var testAccount4 = testAccount4ID.ToMuxedAccount() -//// a selection of hardcoded Liquidity Pools -//var lpDepositChanges = []xdr.OperationMeta{ -// { -// Changes: xdr.LedgerEntryChanges{ -// xdr.LedgerEntryChange{ -// Type: xdr.LedgerEntryChangeTypeLedgerEntryState, -// State: &xdr.LedgerEntry{ -// Data: xdr.LedgerEntryData{ -// Type: xdr.LedgerEntryTypeLiquidityPool, -// LiquidityPool: &xdr.LiquidityPoolEntry{ -// LiquidityPoolId: xdr.PoolId{1, 2, 3, 4, 5, 6, 7, 8, 9}, -// Body: xdr.LiquidityPoolEntryBody{ -// Type: xdr.LiquidityPoolTypeLiquidityPoolConstantProduct, -// ConstantProduct: &xdr.LiquidityPoolEntryConstantProduct{ -// Params: xdr.LiquidityPoolConstantProductParameters{ -// AssetA: lpAssetA, -// AssetB: lpAssetB, -// Fee: 30, -// }, -// ReserveA: 100000, -// ReserveB: 1000, -// TotalPoolShares: 500, -// PoolSharesTrustLineCount: 25, -// }, -// }, -// }, -// }, -// }, -// }, -// xdr.LedgerEntryChange{ -// Type: xdr.LedgerEntryChangeTypeLedgerEntryUpdated, -// Updated: &xdr.LedgerEntry{ -// Data: xdr.LedgerEntryData{ -// Type: xdr.LedgerEntryTypeLiquidityPool, -// LiquidityPool: &xdr.LiquidityPoolEntry{ -// LiquidityPoolId: xdr.PoolId{1, 2, 3, 4, 5, 6, 7, 8, 9}, -// Body: xdr.LiquidityPoolEntryBody{ -// Type: xdr.LiquidityPoolTypeLiquidityPoolConstantProduct, -// ConstantProduct: &xdr.LiquidityPoolEntryConstantProduct{ -// Params: xdr.LiquidityPoolConstantProductParameters{ -// AssetA: lpAssetA, -// AssetB: lpAssetB, -// Fee: 30, -// }, -// ReserveA: 101000, -// ReserveB: 1100, -// TotalPoolShares: 502, -// PoolSharesTrustLineCount: 26, -// }, -// }, -// }, -// }, -// }, -// }, -// }, -// }, -//} - // a selection of hardcoded assets and their AssetOutput representations var usdtAsset = xdr.Asset{ @@ -225,12 +167,6 @@ var ethTrustLineAsset = xdr.TrustLineAsset{ }, } -//var ethAssetPath = Path{ -// AssetType: "credit_alphanum4", -// AssetCode: "ETH", -// AssetIssuer: testAccount1Address, -//} - var liquidityPoolAsset = xdr.TrustLineAsset{ Type: xdr.AssetTypeAssetTypePoolShare, LiquidityPoolId: &xdr.PoolId{1, 3, 4, 5, 7, 9}, @@ -238,12 +174,6 @@ var liquidityPoolAsset = xdr.TrustLineAsset{ var nativeAsset = xdr.MustNewNativeAsset() -//var nativeAssetPath = Path{ -// AssetType: "native", -//} - -//var nativeTrustLineAsset = xdr.MustNewNativeAsset().ToTrustLineAsset() - var genericClaimableBalance = xdr.ClaimableBalanceId{ Type: xdr.ClaimableBalanceIdTypeClaimableBalanceIdTypeV0, V0: &xdr.Hash{1, 2, 3, 4, 5, 6, 7, 8, 9}, From a3aa552eceb166642a780bb4db38958c84f38730 Mon Sep 17 00:00:00 2001 From: Simon Chow Date: Tue, 30 Apr 2024 11:12:05 -0400 Subject: [PATCH 33/49] Add contract code fees --- internal/transform/contract_code.go | 35 ++++++++++++++++++++++++ internal/transform/contract_code_test.go | 24 ++++++++++++++++ internal/transform/schema.go | 10 +++++++ 3 files changed, 69 insertions(+) diff --git a/internal/transform/contract_code.go b/internal/transform/contract_code.go index bea834a3..b026d11d 100644 --- a/internal/transform/contract_code.go +++ b/internal/transform/contract_code.go @@ -38,6 +38,31 @@ func TransformContractCode(ledgerChange ingest.Change, header xdr.LedgerHeaderHi ledgerSequence := header.Header.LedgerSeq + var outputNInstructions uint32 + var outputNFunctions uint32 + var outputNGlobals uint32 + var outputNTableEntries uint32 + var outputNTypes uint32 + var outputNDataSegments uint32 + var outputNElemSegments uint32 + var outputNImports uint32 + var outputNExports uint32 + var outputNDataSegmentBytes uint32 + + extV1, ok := contractCode.Ext.GetV1() + if ok { + outputNInstructions = uint32(extV1.CostInputs.NInstructions) + outputNFunctions = uint32(extV1.CostInputs.NFunctions) + outputNGlobals = uint32(extV1.CostInputs.NGlobals) + outputNTableEntries = uint32(extV1.CostInputs.NTableEntries) + outputNTypes = uint32(extV1.CostInputs.NTypes) + outputNDataSegments = uint32(extV1.CostInputs.NDataSegments) + outputNElemSegments = uint32(extV1.CostInputs.NElemSegments) + outputNImports = uint32(extV1.CostInputs.NImports) + outputNExports = uint32(extV1.CostInputs.NExports) + outputNDataSegmentBytes = uint32(extV1.CostInputs.NDataSegmentBytes) + } + transformedCode := ContractCodeOutput{ ContractCodeHash: contractCodeHash, ContractCodeExtV: int32(contractCodeExtV), @@ -47,6 +72,16 @@ func TransformContractCode(ledgerChange ingest.Change, header xdr.LedgerHeaderHi ClosedAt: closedAt, LedgerSequence: uint32(ledgerSequence), LedgerKeyHash: ledgerKeyHash, + NInstructions: outputNInstructions, + NFunctions: outputNFunctions, + NGlobals: outputNGlobals, + NTableEntries: outputNTableEntries, + NTypes: outputNTypes, + NDataSegments: outputNDataSegments, + NElemSegments: outputNElemSegments, + NImports: outputNImports, + NExports: outputNExports, + NDataSegmentBytes: outputNDataSegmentBytes, } return transformedCode, nil } diff --git a/internal/transform/contract_code_test.go b/internal/transform/contract_code_test.go index cc710081..c15467a6 100644 --- a/internal/transform/contract_code_test.go +++ b/internal/transform/contract_code_test.go @@ -69,6 +69,20 @@ func makeContractCodeTestInput() []ingest.Change { Hash: hash, Ext: xdr.ContractCodeEntryExt{ V: 1, + V1: &xdr.ContractCodeEntryV1{ + CostInputs: xdr.ContractCodeCostInputs{ + NInstructions: 1, + NFunctions: 2, + NGlobals: 3, + NTableEntries: 4, + NTypes: 5, + NDataSegments: 6, + NElemSegments: 7, + NImports: 8, + NExports: 9, + NDataSegmentBytes: 10, + }, + }, }, }, }, @@ -94,6 +108,16 @@ func makeContractCodeTestOutput() []ContractCodeOutput { LedgerSequence: 10, ClosedAt: time.Date(1970, time.January, 1, 0, 16, 40, 0, time.UTC), LedgerKeyHash: "dfed061dbe464e0ff320744fcd604ac08b39daa74fa24110936654cbcb915ccc", + NInstructions: 1, + NFunctions: 2, + NGlobals: 3, + NTableEntries: 4, + NTypes: 5, + NDataSegments: 6, + NElemSegments: 7, + NImports: 8, + NExports: 9, + NDataSegmentBytes: 10, }, } } diff --git a/internal/transform/schema.go b/internal/transform/schema.go index 5f78dba3..59fec01a 100644 --- a/internal/transform/schema.go +++ b/internal/transform/schema.go @@ -522,6 +522,16 @@ type ContractCodeOutput struct { LedgerSequence uint32 `json:"ledger_sequence"` LedgerKeyHash string `json:"ledger_key_hash"` //ContractCodeCode string `json:"contract_code"` + NInstructions uint32 `json:"n_instructions"` + NFunctions uint32 `json:"n_functions"` + NGlobals uint32 `json:"n_globals"` + NTableEntries uint32 `json:"n_table_entries"` + NTypes uint32 `json:"n_types"` + NDataSegments uint32 `json:"n_data_segments"` + NElemSegments uint32 `json:"n_elem_segments"` + NImports uint32 `json:"n_imports"` + NExports uint32 `json:"n_exports"` + NDataSegmentBytes uint32 `json:"n_data_segment_bytes"` } // ConfigSettingOutput is a representation of soroban config settings that aligns with the Bigquery table config_settings From fdc8857c2d1297c487c7057be781571cb822f2e8 Mon Sep 17 00:00:00 2001 From: Simon Chow Date: Tue, 7 May 2024 11:10:24 -0400 Subject: [PATCH 34/49] Update to use BufferedStorageBackend to read txmeta files --- cmd/export_account_signers.go | 10 +-- cmd/export_accounts.go | 10 +-- cmd/export_all_history.go | 18 ++-- cmd/export_assets.go | 14 ++-- cmd/export_claimable_balances.go | 10 +-- cmd/export_config_setting.go | 10 +-- cmd/export_contract_code.go | 10 +-- cmd/export_contract_data.go | 10 +-- cmd/export_diagnostic_events.go | 10 +-- cmd/export_effects.go | 12 +-- cmd/export_ledger_entry_changes.go | 29 ++++--- cmd/export_ledger_transaction.go | 10 +-- cmd/export_ledgers.go | 14 ++-- cmd/export_liquidity_pools.go | 10 +-- cmd/export_offers.go | 10 +-- cmd/export_operations.go | 10 +-- cmd/export_orderbooks.go | 24 +++--- cmd/export_trades.go | 10 +-- cmd/export_transactions.go | 10 +-- cmd/export_trustlines.go | 10 +-- cmd/export_ttl.go | 10 +-- go.mod | 2 +- go.sum | 4 +- internal/input/operations.go | 1 + internal/utils/main.go | 127 ++++++++++++++++++++--------- 25 files changed, 229 insertions(+), 166 deletions(-) diff --git a/cmd/export_account_signers.go b/cmd/export_account_signers.go index 9e4b9876..f5d7eb47 100644 --- a/cmd/export_account_signers.go +++ b/cmd/export_account_signers.go @@ -22,13 +22,13 @@ should be used in an initial data dump. In order to get account information with the export_ledger_entry_changes command.`, Run: func(cmd *cobra.Command, args []string) { cmdLogger.SetLevel(logrus.InfoLevel) - endNum, strictExport, isTest, isFuture, extra, _, datastoreUrl := utils.MustCommonFlags(cmd.Flags(), cmdLogger) - cmdLogger.StrictExport = strictExport - env := utils.GetEnvironmentDetails(isTest, isFuture, datastoreUrl) + commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger) + cmdLogger.StrictExport = commonArgs.StrictExport + env := utils.GetEnvironmentDetails(commonArgs.IsTest, commonArgs.IsFuture, commonArgs.DatastorePath) path := utils.MustBucketFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - accounts, err := input.GetEntriesFromGenesis(endNum, xdr.LedgerEntryTypeAccount, env.ArchiveURLs) + accounts, err := input.GetEntriesFromGenesis(commonArgs.EndNum, xdr.LedgerEntryTypeAccount, env.ArchiveURLs) if err != nil { cmdLogger.Fatal("could not read accounts: ", err) } @@ -48,7 +48,7 @@ the export_ledger_entry_changes command.`, } for _, entry := range transformed { - numBytes, err := exportEntry(entry, outFile, extra) + numBytes, err := exportEntry(entry, outFile, commonArgs.Extra) if err != nil { cmdLogger.LogError(fmt.Errorf("could not export entry: %v", err)) numFailures += 1 diff --git a/cmd/export_accounts.go b/cmd/export_accounts.go index 85d9bcc1..257a68f7 100644 --- a/cmd/export_accounts.go +++ b/cmd/export_accounts.go @@ -22,13 +22,13 @@ should be used in an initial data dump. In order to get account information with the export_ledger_entry_changes command.`, Run: func(cmd *cobra.Command, args []string) { cmdLogger.SetLevel(logrus.InfoLevel) - endNum, strictExport, isTest, isFuture, extra, _, datastoreUrl := utils.MustCommonFlags(cmd.Flags(), cmdLogger) - cmdLogger.StrictExport = strictExport - env := utils.GetEnvironmentDetails(isTest, isFuture, datastoreUrl) + commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger) + cmdLogger.StrictExport = commonArgs.StrictExport + env := utils.GetEnvironmentDetails(commonArgs.IsTest, commonArgs.IsFuture, commonArgs.DatastorePath) path := utils.MustBucketFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - accounts, err := input.GetEntriesFromGenesis(endNum, xdr.LedgerEntryTypeAccount, env.ArchiveURLs) + accounts, err := input.GetEntriesFromGenesis(commonArgs.EndNum, xdr.LedgerEntryTypeAccount, env.ArchiveURLs) if err != nil { cmdLogger.Fatal("could not read accounts: ", err) } @@ -45,7 +45,7 @@ the export_ledger_entry_changes command.`, continue } - numBytes, err := exportEntry(transformed, outFile, extra) + numBytes, err := exportEntry(transformed, outFile, commonArgs.Extra) if err != nil { cmdLogger.LogError(fmt.Errorf("could not export entry: %v", err)) numFailures += 1 diff --git a/cmd/export_all_history.go b/cmd/export_all_history.go index d2d86299..0b197157 100644 --- a/cmd/export_all_history.go +++ b/cmd/export_all_history.go @@ -20,23 +20,23 @@ This is a temporary command used to reduce the amount of requests to history arc in order to mitigate egress costs for the entity hosting history archives.`, Run: func(cmd *cobra.Command, args []string) { cmdLogger.SetLevel(logrus.InfoLevel) - endNum, strictExport, isTest, isFuture, extra, useCaptiveCore, datastoreUrl := utils.MustCommonFlags(cmd.Flags(), cmdLogger) - cmdLogger.StrictExport = strictExport + commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger) + cmdLogger.StrictExport = commonArgs.StrictExport startNum, path, limit := utils.MustArchiveFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - env := utils.GetEnvironmentDetails(isTest, isFuture, datastoreUrl) + env := utils.GetEnvironmentDetails(commonArgs.IsTest, commonArgs.IsFuture, commonArgs.DatastorePath) - allHistory, err := input.GetAllHistory(startNum, endNum, limit, env, useCaptiveCore) + allHistory, err := input.GetAllHistory(startNum, commonArgs.EndNum, limit, env, commonArgs.UseCaptiveCore) if err != nil { cmdLogger.Fatal("could not read all history: ", err) } cmdLogger.Info("start doing other exports") - getOperations(allHistory.Operations, extra, cloudStorageBucket, cloudCredentials, cloudProvider, path+"exported_operations.txt", env) - getTrades(allHistory.Trades, extra, cloudStorageBucket, cloudCredentials, cloudProvider, path+"exported_trades.txt") - getEffects(allHistory.Ledgers, extra, cloudStorageBucket, cloudCredentials, cloudProvider, path+"exported_effects.txt", env) - getTransactions(allHistory.Ledgers, extra, cloudStorageBucket, cloudCredentials, cloudProvider, path+"exported_transactions.txt") - getDiagnosticEvents(allHistory.Ledgers, extra, cloudStorageBucket, cloudCredentials, cloudProvider, path+"exported_diagnostic_events.txt") + getOperations(allHistory.Operations, commonArgs.Extra, cloudStorageBucket, cloudCredentials, cloudProvider, path+"exported_operations.txt", env) + getTrades(allHistory.Trades, commonArgs.Extra, cloudStorageBucket, cloudCredentials, cloudProvider, path+"exported_trades.txt") + getEffects(allHistory.Ledgers, commonArgs.Extra, cloudStorageBucket, cloudCredentials, cloudProvider, path+"exported_effects.txt", env) + getTransactions(allHistory.Ledgers, commonArgs.Extra, cloudStorageBucket, cloudCredentials, cloudProvider, path+"exported_transactions.txt") + getDiagnosticEvents(allHistory.Ledgers, commonArgs.Extra, cloudStorageBucket, cloudCredentials, cloudProvider, path+"exported_diagnostic_events.txt") cmdLogger.Info("done doing other exports") }, } diff --git a/cmd/export_assets.go b/cmd/export_assets.go index 5de3aa4d..7ea33a69 100644 --- a/cmd/export_assets.go +++ b/cmd/export_assets.go @@ -16,21 +16,21 @@ var assetsCmd = &cobra.Command{ Long: `Exports the assets that are created from payment operations over a specified ledger range`, Run: func(cmd *cobra.Command, args []string) { cmdLogger.SetLevel(logrus.InfoLevel) - endNum, strictExport, isTest, isFuture, extra, useCaptiveCore, datastoreUrl := utils.MustCommonFlags(cmd.Flags(), cmdLogger) - cmdLogger.StrictExport = strictExport + commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger) + cmdLogger.StrictExport = commonArgs.StrictExport startNum, path, limit := utils.MustArchiveFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - env := utils.GetEnvironmentDetails(isTest, isFuture, datastoreUrl) + env := utils.GetEnvironmentDetails(commonArgs.IsTest, commonArgs.IsFuture, commonArgs.DatastorePath) outFile := mustOutFile(path) var paymentOps []input.AssetTransformInput var err error - if useCaptiveCore { - paymentOps, err = input.GetPaymentOperationsHistoryArchive(startNum, endNum, limit, env, useCaptiveCore) + if commonArgs.UseCaptiveCore { + paymentOps, err = input.GetPaymentOperationsHistoryArchive(startNum, commonArgs.EndNum, limit, env, commonArgs.UseCaptiveCore) } else { - paymentOps, err = input.GetPaymentOperations(startNum, endNum, limit, env, useCaptiveCore) + paymentOps, err = input.GetPaymentOperations(startNum, commonArgs.EndNum, limit, env, commonArgs.UseCaptiveCore) } if err != nil { cmdLogger.Fatal("could not read asset: ", err) @@ -55,7 +55,7 @@ var assetsCmd = &cobra.Command{ } seenIDs[transformed.AssetID] = true - numBytes, err := exportEntry(transformed, outFile, extra) + numBytes, err := exportEntry(transformed, outFile, commonArgs.Extra) if err != nil { cmdLogger.Error(err) numFailures += 1 diff --git a/cmd/export_claimable_balances.go b/cmd/export_claimable_balances.go index f552c913..684c5113 100644 --- a/cmd/export_claimable_balances.go +++ b/cmd/export_claimable_balances.go @@ -22,13 +22,13 @@ var claimableBalancesCmd = &cobra.Command{ the export_ledger_entry_changes command.`, Run: func(cmd *cobra.Command, args []string) { cmdLogger.SetLevel(logrus.InfoLevel) - endNum, strictExport, isTest, isFuture, extra, _, datastoreUrl := utils.MustCommonFlags(cmd.Flags(), cmdLogger) - cmdLogger.StrictExport = strictExport - env := utils.GetEnvironmentDetails(isTest, isFuture, datastoreUrl) + commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger) + cmdLogger.StrictExport = commonArgs.StrictExport + env := utils.GetEnvironmentDetails(commonArgs.IsTest, commonArgs.IsFuture, commonArgs.DatastorePath) path := utils.MustBucketFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - balances, err := input.GetEntriesFromGenesis(endNum, xdr.LedgerEntryTypeClaimableBalance, env.ArchiveURLs) + balances, err := input.GetEntriesFromGenesis(commonArgs.EndNum, xdr.LedgerEntryTypeClaimableBalance, env.ArchiveURLs) if err != nil { cmdLogger.Fatal("could not read balances: ", err) } @@ -45,7 +45,7 @@ var claimableBalancesCmd = &cobra.Command{ continue } - numBytes, err := exportEntry(transformed, outFile, extra) + numBytes, err := exportEntry(transformed, outFile, commonArgs.Extra) if err != nil { cmdLogger.LogError(fmt.Errorf("could not export balance %+v: %v", balance, err)) numFailures += 1 diff --git a/cmd/export_config_setting.go b/cmd/export_config_setting.go index 892d96eb..58c944c2 100644 --- a/cmd/export_config_setting.go +++ b/cmd/export_config_setting.go @@ -22,13 +22,13 @@ var configSettingCmd = &cobra.Command{ the export_ledger_entry_changes command.`, Run: func(cmd *cobra.Command, args []string) { cmdLogger.SetLevel(logrus.InfoLevel) - endNum, strictExport, isTest, isFuture, extra, _, datastoreUrl := utils.MustCommonFlags(cmd.Flags(), cmdLogger) - cmdLogger.StrictExport = strictExport - env := utils.GetEnvironmentDetails(isTest, isFuture, datastoreUrl) + commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger) + cmdLogger.StrictExport = commonArgs.StrictExport + env := utils.GetEnvironmentDetails(commonArgs.IsTest, commonArgs.IsFuture, commonArgs.DatastorePath) path := utils.MustBucketFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - settings, err := input.GetEntriesFromGenesis(endNum, xdr.LedgerEntryTypeConfigSetting, env.ArchiveURLs) + settings, err := input.GetEntriesFromGenesis(commonArgs.EndNum, xdr.LedgerEntryTypeConfigSetting, env.ArchiveURLs) if err != nil { cmdLogger.Fatal("Error getting ledger entries: ", err) } @@ -45,7 +45,7 @@ var configSettingCmd = &cobra.Command{ continue } - numBytes, err := exportEntry(transformed, outFile, extra) + numBytes, err := exportEntry(transformed, outFile, commonArgs.Extra) if err != nil { cmdLogger.LogError(fmt.Errorf("could not export config setting %+v: %v", setting, err)) numFailures += 1 diff --git a/cmd/export_contract_code.go b/cmd/export_contract_code.go index 4ff1d037..c9b5978b 100644 --- a/cmd/export_contract_code.go +++ b/cmd/export_contract_code.go @@ -22,13 +22,13 @@ var codeCmd = &cobra.Command{ the export_ledger_entry_changes command.`, Run: func(cmd *cobra.Command, args []string) { cmdLogger.SetLevel(logrus.InfoLevel) - endNum, strictExport, isTest, isFuture, extra, _, datastoreUrl := utils.MustCommonFlags(cmd.Flags(), cmdLogger) - cmdLogger.StrictExport = strictExport - env := utils.GetEnvironmentDetails(isTest, isFuture, datastoreUrl) + commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger) + cmdLogger.StrictExport = commonArgs.StrictExport + env := utils.GetEnvironmentDetails(commonArgs.IsTest, commonArgs.IsFuture, commonArgs.DatastorePath) path := utils.MustBucketFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - codes, err := input.GetEntriesFromGenesis(endNum, xdr.LedgerEntryTypeContractCode, env.ArchiveURLs) + codes, err := input.GetEntriesFromGenesis(commonArgs.EndNum, xdr.LedgerEntryTypeContractCode, env.ArchiveURLs) if err != nil { cmdLogger.Fatal("Error getting ledger entries: ", err) } @@ -45,7 +45,7 @@ var codeCmd = &cobra.Command{ continue } - numBytes, err := exportEntry(transformed, outFile, extra) + numBytes, err := exportEntry(transformed, outFile, commonArgs.Extra) if err != nil { cmdLogger.LogError(fmt.Errorf("could not export contract code %+v: %v", code, err)) numFailures += 1 diff --git a/cmd/export_contract_data.go b/cmd/export_contract_data.go index 46a427fb..dc7f8c97 100644 --- a/cmd/export_contract_data.go +++ b/cmd/export_contract_data.go @@ -22,13 +22,13 @@ var dataCmd = &cobra.Command{ the export_ledger_entry_changes command.`, Run: func(cmd *cobra.Command, args []string) { cmdLogger.SetLevel(logrus.InfoLevel) - endNum, strictExport, isTest, isFuture, extra, _, datastoreUrl := utils.MustCommonFlags(cmd.Flags(), cmdLogger) - cmdLogger.StrictExport = strictExport - env := utils.GetEnvironmentDetails(isTest, isFuture, datastoreUrl) + commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger) + cmdLogger.StrictExport = commonArgs.StrictExport + env := utils.GetEnvironmentDetails(commonArgs.IsTest, commonArgs.IsFuture, commonArgs.DatastorePath) path := utils.MustBucketFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - datas, err := input.GetEntriesFromGenesis(endNum, xdr.LedgerEntryTypeContractData, env.ArchiveURLs) + datas, err := input.GetEntriesFromGenesis(commonArgs.EndNum, xdr.LedgerEntryTypeContractData, env.ArchiveURLs) if err != nil { cmdLogger.Fatal("Error getting ledger entries: ", err) } @@ -50,7 +50,7 @@ var dataCmd = &cobra.Command{ continue } - numBytes, err := exportEntry(transformed, outFile, extra) + numBytes, err := exportEntry(transformed, outFile, commonArgs.Extra) if err != nil { cmdLogger.LogError(fmt.Errorf("could not export contract data %+v: %v", data, err)) numFailures += 1 diff --git a/cmd/export_diagnostic_events.go b/cmd/export_diagnostic_events.go index 1a888ff9..3a5e9ad2 100644 --- a/cmd/export_diagnostic_events.go +++ b/cmd/export_diagnostic_events.go @@ -16,13 +16,13 @@ var diagnosticEventsCmd = &cobra.Command{ Long: `Exports the diagnostic events over a specified range to an output file.`, Run: func(cmd *cobra.Command, args []string) { cmdLogger.SetLevel(logrus.InfoLevel) - endNum, strictExport, isTest, isFuture, extra, useCaptiveCore, datastoreUrl := utils.MustCommonFlags(cmd.Flags(), cmdLogger) - cmdLogger.StrictExport = strictExport + commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger) + cmdLogger.StrictExport = commonArgs.StrictExport startNum, path, limit := utils.MustArchiveFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - env := utils.GetEnvironmentDetails(isTest, isFuture, datastoreUrl) + env := utils.GetEnvironmentDetails(commonArgs.IsTest, commonArgs.IsFuture, commonArgs.DatastorePath) - transactions, err := input.GetTransactions(startNum, endNum, limit, env, useCaptiveCore) + transactions, err := input.GetTransactions(startNum, commonArgs.EndNum, limit, env, commonArgs.UseCaptiveCore) if err != nil { cmdLogger.Fatal("could not read transactions: ", err) } @@ -42,7 +42,7 @@ var diagnosticEventsCmd = &cobra.Command{ continue } for _, diagnosticEvent := range transformed { - _, err := exportEntry(diagnosticEvent, outFile, extra) + _, err := exportEntry(diagnosticEvent, outFile, commonArgs.Extra) if err != nil { cmdLogger.LogError(fmt.Errorf("could not export diagnostic event: %v", err)) numFailures += 1 diff --git a/cmd/export_effects.go b/cmd/export_effects.go index 3ed1df75..03e0f4d4 100644 --- a/cmd/export_effects.go +++ b/cmd/export_effects.go @@ -14,15 +14,15 @@ var effectsCmd = &cobra.Command{ Long: "Exports the effects data over a specified range to an output file.", Run: func(cmd *cobra.Command, args []string) { cmdLogger.SetLevel(logrus.InfoLevel) - endNum, strictExport, isTest, isFuture, extra, useCaptiveCore, datastoreUrl := utils.MustCommonFlags(cmd.Flags(), cmdLogger) - cmdLogger.StrictExport = strictExport + commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger) + cmdLogger.StrictExport = commonArgs.StrictExport startNum, path, limit := utils.MustArchiveFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - env := utils.GetEnvironmentDetails(isTest, isFuture, datastoreUrl) + env := utils.GetEnvironmentDetails(commonArgs.IsTest, commonArgs.IsFuture, commonArgs.DatastorePath) - transactions, err := input.GetTransactions(startNum, endNum, limit, env, useCaptiveCore) + transactions, err := input.GetTransactions(startNum, commonArgs.EndNum, limit, env, commonArgs.UseCaptiveCore) if err != nil { - cmdLogger.Fatalf("could not read transactions in [%d, %d] (limit=%d): %v", startNum, endNum, limit, err) + cmdLogger.Fatalf("could not read transactions in [%d, %d] (limit=%d): %v", startNum, commonArgs.EndNum, limit, err) } outFile := mustOutFile(path) @@ -39,7 +39,7 @@ var effectsCmd = &cobra.Command{ } for _, transformed := range effects { - numBytes, err := exportEntry(transformed, outFile, extra) + numBytes, err := exportEntry(transformed, outFile, commonArgs.Extra) if err != nil { cmdLogger.LogError(err) numFailures += 1 diff --git a/cmd/export_ledger_entry_changes.go b/cmd/export_ledger_entry_changes.go index 7b727680..aec74de1 100644 --- a/cmd/export_ledger_entry_changes.go +++ b/cmd/export_ledger_entry_changes.go @@ -28,9 +28,9 @@ confirmed by the Stellar network. If no data type flags are set, then by default all of them are exported. If any are set, it is assumed that the others should not be exported.`, Run: func(cmd *cobra.Command, args []string) { - endNum, strictExport, isTest, isFuture, extra, useCaptiveCore, datastoreUrl := utils.MustCommonFlags(cmd.Flags(), cmdLogger) - cmdLogger.StrictExport = strictExport - env := utils.GetEnvironmentDetails(isTest, isFuture, datastoreUrl) + commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger) + cmdLogger.StrictExport = commonArgs.StrictExport + env := utils.GetEnvironmentDetails(commonArgs.IsTest, commonArgs.IsFuture, commonArgs.DatastorePath) _, configPath, startNum, batchSize, outputFolder := utils.MustCoreFlags(cmd.Flags(), cmdLogger) exports := utils.MustExportTypeFlags(cmd.Flags(), cmdLogger) @@ -62,28 +62,28 @@ be exported.`, } } - if configPath == "" && endNum == 0 { + if configPath == "" && commonArgs.EndNum == 0 { cmdLogger.Fatal("stellar-core needs a config file path when exporting ledgers continuously (endNum = 0)") } ctx := context.Background() - backend, err := utils.CreateLedgerBackend(ctx, useCaptiveCore, env) + backend, err := utils.CreateLedgerBackend(ctx, commonArgs.UseCaptiveCore, env) if err != nil { cmdLogger.Fatal("error creating a cloud storage backend: ", err) } - err = backend.PrepareRange(ctx, ledgerbackend.BoundedRange(startNum, endNum)) + err = backend.PrepareRange(ctx, ledgerbackend.BoundedRange(startNum, commonArgs.EndNum)) if err != nil { cmdLogger.Fatal("error preparing ledger range for cloud storage backend: ", err) } - if endNum == 0 { - endNum = math.MaxInt32 + if commonArgs.EndNum == 0 { + commonArgs.EndNum = math.MaxInt32 } changeChan := make(chan input.ChangeBatch) closeChan := make(chan int) - go input.StreamChanges(&backend, startNum, endNum, batchSize, changeChan, closeChan, env, cmdLogger) + go input.StreamChanges(&backend, startNum, commonArgs.EndNum, batchSize, changeChan, closeChan, env, cmdLogger) for { select { @@ -252,7 +252,16 @@ be exported.`, } } - err := exportTransformedData(batch.BatchStart, batch.BatchEnd, outputFolder, transformedOutputs, cloudCredentials, cloudStorageBucket, cloudProvider, extra) + err := exportTransformedData( + batch.BatchStart, + batch.BatchEnd, + outputFolder, + transformedOutputs, + cloudCredentials, + cloudStorageBucket, + cloudProvider, + commonArgs.Extra, + ) if err != nil { cmdLogger.LogError(err) continue diff --git a/cmd/export_ledger_transaction.go b/cmd/export_ledger_transaction.go index c08a2017..4054b63f 100644 --- a/cmd/export_ledger_transaction.go +++ b/cmd/export_ledger_transaction.go @@ -16,13 +16,13 @@ var ledgerTransactionCmd = &cobra.Command{ Long: `Exports the ledger_transaction transaction data over a specified range to an output file.`, Run: func(cmd *cobra.Command, args []string) { cmdLogger.SetLevel(logrus.InfoLevel) - endNum, strictExport, isTest, isFuture, extra, useCaptiveCore, datastoreUrl := utils.MustCommonFlags(cmd.Flags(), cmdLogger) - cmdLogger.StrictExport = strictExport + commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger) + cmdLogger.StrictExport = commonArgs.StrictExport startNum, path, limit := utils.MustArchiveFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - env := utils.GetEnvironmentDetails(isTest, isFuture, datastoreUrl) + env := utils.GetEnvironmentDetails(commonArgs.IsTest, commonArgs.IsFuture, commonArgs.DatastorePath) - ledgerTransaction, err := input.GetTransactions(startNum, endNum, limit, env, useCaptiveCore) + ledgerTransaction, err := input.GetTransactions(startNum, commonArgs.EndNum, limit, env, commonArgs.UseCaptiveCore) if err != nil { cmdLogger.Fatal("could not read ledger_transaction: ", err) } @@ -39,7 +39,7 @@ var ledgerTransactionCmd = &cobra.Command{ continue } - numBytes, err := exportEntry(transformed, outFile, extra) + numBytes, err := exportEntry(transformed, outFile, commonArgs.Extra) if err != nil { cmdLogger.LogError(fmt.Errorf("could not export transaction: %v", err)) numFailures += 1 diff --git a/cmd/export_ledgers.go b/cmd/export_ledgers.go index f5000289..501e649b 100644 --- a/cmd/export_ledgers.go +++ b/cmd/export_ledgers.go @@ -16,19 +16,19 @@ var ledgersCmd = &cobra.Command{ Long: `Exports ledger data within the specified range to an output file. Encodes ledgers as JSON objects and exports them to the output file.`, Run: func(cmd *cobra.Command, args []string) { cmdLogger.SetLevel(logrus.InfoLevel) - endNum, strictExport, isTest, isFuture, extra, useCaptiveCore, datastoreUrl := utils.MustCommonFlags(cmd.Flags(), cmdLogger) - cmdLogger.StrictExport = strictExport + commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger) + cmdLogger.StrictExport = commonArgs.StrictExport startNum, path, limit := utils.MustArchiveFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - env := utils.GetEnvironmentDetails(isTest, isFuture, datastoreUrl) + env := utils.GetEnvironmentDetails(commonArgs.IsTest, commonArgs.IsFuture, commonArgs.DatastorePath) var ledgers []utils.HistoryArchiveLedgerAndLCM var err error - if useCaptiveCore { - ledgers, err = input.GetLedgersHistoryArchive(startNum, endNum, limit, env, useCaptiveCore) + if commonArgs.UseCaptiveCore { + ledgers, err = input.GetLedgersHistoryArchive(startNum, commonArgs.EndNum, limit, env, commonArgs.UseCaptiveCore) } else { - ledgers, err = input.GetLedgers(startNum, endNum, limit, env, useCaptiveCore) + ledgers, err = input.GetLedgers(startNum, commonArgs.EndNum, limit, env, commonArgs.UseCaptiveCore) } if err != nil { cmdLogger.Fatal("could not read ledgers: ", err) @@ -46,7 +46,7 @@ var ledgersCmd = &cobra.Command{ continue } - numBytes, err := exportEntry(transformed, outFile, extra) + numBytes, err := exportEntry(transformed, outFile, commonArgs.Extra) if err != nil { cmdLogger.LogError(fmt.Errorf("could not export ledger %d: %s", startNum+uint32(i), err)) numFailures += 1 diff --git a/cmd/export_liquidity_pools.go b/cmd/export_liquidity_pools.go index f05cab03..d5b3de65 100644 --- a/cmd/export_liquidity_pools.go +++ b/cmd/export_liquidity_pools.go @@ -22,13 +22,13 @@ should be used in an initial data dump. In order to get liqudity pools informati the export_ledger_entry_changes command.`, Run: func(cmd *cobra.Command, args []string) { cmdLogger.SetLevel(logrus.InfoLevel) - endNum, strictExport, isTest, isFuture, extra, _, datastoreUrl := utils.MustCommonFlags(cmd.Flags(), cmdLogger) - cmdLogger.StrictExport = strictExport - env := utils.GetEnvironmentDetails(isTest, isFuture, datastoreUrl) + commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger) + cmdLogger.StrictExport = commonArgs.StrictExport + env := utils.GetEnvironmentDetails(commonArgs.IsTest, commonArgs.IsFuture, commonArgs.DatastorePath) path := utils.MustBucketFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - pools, err := input.GetEntriesFromGenesis(endNum, xdr.LedgerEntryTypeLiquidityPool, env.ArchiveURLs) + pools, err := input.GetEntriesFromGenesis(commonArgs.EndNum, xdr.LedgerEntryTypeLiquidityPool, env.ArchiveURLs) if err != nil { cmdLogger.Fatal("could not read accounts: ", err) } @@ -45,7 +45,7 @@ the export_ledger_entry_changes command.`, continue } - numBytes, err := exportEntry(transformed, outFile, extra) + numBytes, err := exportEntry(transformed, outFile, commonArgs.Extra) if err != nil { cmdLogger.LogError(fmt.Errorf("could not export pool %+v: %v", pool, err)) numFailures += 1 diff --git a/cmd/export_offers.go b/cmd/export_offers.go index 8f0ea5c6..cf7a425f 100644 --- a/cmd/export_offers.go +++ b/cmd/export_offers.go @@ -23,13 +23,13 @@ var offersCmd = &cobra.Command{ the export_ledger_entry_changes command.`, Run: func(cmd *cobra.Command, args []string) { cmdLogger.SetLevel(logrus.InfoLevel) - endNum, strictExport, isTest, isFuture, extra, _, datastoreUrl := utils.MustCommonFlags(cmd.Flags(), cmdLogger) - cmdLogger.StrictExport = strictExport - env := utils.GetEnvironmentDetails(isTest, isFuture, datastoreUrl) + commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger) + cmdLogger.StrictExport = commonArgs.StrictExport + env := utils.GetEnvironmentDetails(commonArgs.IsTest, commonArgs.IsFuture, commonArgs.DatastorePath) path := utils.MustBucketFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - offers, err := input.GetEntriesFromGenesis(endNum, xdr.LedgerEntryTypeOffer, env.ArchiveURLs) + offers, err := input.GetEntriesFromGenesis(commonArgs.EndNum, xdr.LedgerEntryTypeOffer, env.ArchiveURLs) if err != nil { cmdLogger.Fatal("could not read offers: ", err) } @@ -46,7 +46,7 @@ var offersCmd = &cobra.Command{ continue } - numBytes, err := exportEntry(transformed, outFile, extra) + numBytes, err := exportEntry(transformed, outFile, commonArgs.Extra) if err != nil { cmdLogger.LogError(fmt.Errorf("could not export offer %+v: %v", offer, err)) numFailures += 1 diff --git a/cmd/export_operations.go b/cmd/export_operations.go index 9b84265a..e8418e6d 100644 --- a/cmd/export_operations.go +++ b/cmd/export_operations.go @@ -16,13 +16,13 @@ var operationsCmd = &cobra.Command{ Long: `Exports the operations data over a specified range. Each operation is an individual command that mutates the Stellar ledger.`, Run: func(cmd *cobra.Command, args []string) { cmdLogger.SetLevel(logrus.InfoLevel) - endNum, strictExport, isTest, isFuture, extra, useCaptiveCore, datastoreUrl := utils.MustCommonFlags(cmd.Flags(), cmdLogger) - cmdLogger.StrictExport = strictExport + commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger) + cmdLogger.StrictExport = commonArgs.StrictExport startNum, path, limit := utils.MustArchiveFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - env := utils.GetEnvironmentDetails(isTest, isFuture, datastoreUrl) + env := utils.GetEnvironmentDetails(commonArgs.IsTest, commonArgs.IsFuture, commonArgs.DatastorePath) - operations, err := input.GetOperations(startNum, endNum, limit, env, useCaptiveCore) + operations, err := input.GetOperations(startNum, commonArgs.EndNum, limit, env, commonArgs.UseCaptiveCore) if err != nil { cmdLogger.Fatal("could not read operations: ", err) } @@ -39,7 +39,7 @@ var operationsCmd = &cobra.Command{ continue } - numBytes, err := exportEntry(transformed, outFile, extra) + numBytes, err := exportEntry(transformed, outFile, commonArgs.Extra) if err != nil { cmdLogger.LogError(fmt.Errorf("could not export operation: %v", err)) numFailures += 1 diff --git a/cmd/export_orderbooks.go b/cmd/export_orderbooks.go index 91e924ab..58816fae 100644 --- a/cmd/export_orderbooks.go +++ b/cmd/export_orderbooks.go @@ -27,9 +27,9 @@ var exportOrderbooksCmd = &cobra.Command{ If the end-ledger is omitted, then the stellar-core node will continue running and exporting information as new ledgers are confirmed by the Stellar network. In this unbounded case, a stellar-core config path is required to utilize the Captive Core toml.`, Run: func(cmd *cobra.Command, args []string) { - endNum, strictExport, isTest, isFuture, extra, _, datastoreUrl := utils.MustCommonFlags(cmd.Flags(), cmdLogger) - cmdLogger.StrictExport = strictExport - env := utils.GetEnvironmentDetails(isTest, isFuture, datastoreUrl) + commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger) + cmdLogger.StrictExport = commonArgs.StrictExport + env := utils.GetEnvironmentDetails(commonArgs.IsTest, commonArgs.IsFuture, commonArgs.DatastorePath) execPath, configPath, startNum, batchSize, outputFolder := utils.MustCoreFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) @@ -38,7 +38,7 @@ var exportOrderbooksCmd = &cobra.Command{ cmdLogger.Fatalf("batch-size (%d) must be greater than 0", batchSize) } - if configPath == "" && endNum == 0 { + if configPath == "" && commonArgs.EndNum == 0 { cmdLogger.Fatal("stellar-core needs a config file path when exporting ledgers continuously (endNum = 0)") } @@ -54,7 +54,7 @@ var exportOrderbooksCmd = &cobra.Command{ } checkpointSeq := utils.GetMostRecentCheckpoint(startNum) - core, err := input.PrepareCaptiveCore(execPath, configPath, checkpointSeq, endNum, env) + core, err := input.PrepareCaptiveCore(execPath, configPath, checkpointSeq, commonArgs.EndNum, env) if err != nil { cmdLogger.Fatal("error creating a prepared captive core instance: ", err) } @@ -66,21 +66,21 @@ var exportOrderbooksCmd = &cobra.Command{ orderbookChannel := make(chan input.OrderbookBatch) - go input.StreamOrderbooks(core, startNum, endNum, batchSize, orderbookChannel, orderbook, env, cmdLogger) + go input.StreamOrderbooks(core, startNum, commonArgs.EndNum, batchSize, orderbookChannel, orderbook, env, cmdLogger) // If the end sequence number is defined, we work in a closed range and export a finite number of batches - if endNum != 0 { - batchCount := uint32(math.Ceil(float64(endNum-startNum+1) / float64(batchSize))) + if commonArgs.EndNum != 0 { + batchCount := uint32(math.Ceil(float64(commonArgs.EndNum-startNum+1) / float64(batchSize))) for i := uint32(0); i < batchCount; i++ { batchStart := startNum + i*batchSize // Subtract 1 from the end batch number because batches do not include the last batch in the range batchEnd := batchStart + batchSize - 1 - if batchEnd > endNum { - batchEnd = endNum + if batchEnd > commonArgs.EndNum { + batchEnd = commonArgs.EndNum } parser := input.ReceiveParsedOrderbooks(orderbookChannel, cmdLogger) - exportOrderbook(batchStart, batchEnd, outputFolder, parser, cloudCredentials, cloudStorageBucket, cloudProvider, extra) + exportOrderbook(batchStart, batchEnd, outputFolder, parser, cloudCredentials, cloudStorageBucket, cloudProvider, commonArgs.Extra) } } else { // otherwise, we export in an unbounded manner where batches are constantly exported @@ -89,7 +89,7 @@ var exportOrderbooksCmd = &cobra.Command{ batchStart := startNum + batchNum*batchSize batchEnd := batchStart + batchSize - 1 parser := input.ReceiveParsedOrderbooks(orderbookChannel, cmdLogger) - exportOrderbook(batchStart, batchEnd, outputFolder, parser, cloudCredentials, cloudStorageBucket, cloudProvider, extra) + exportOrderbook(batchStart, batchEnd, outputFolder, parser, cloudCredentials, cloudStorageBucket, cloudProvider, commonArgs.Extra) batchNum++ } } diff --git a/cmd/export_trades.go b/cmd/export_trades.go index 551263e7..94441860 100644 --- a/cmd/export_trades.go +++ b/cmd/export_trades.go @@ -19,13 +19,13 @@ var tradesCmd = &cobra.Command{ Long: `Exports trade data within the specified range to an output file`, Run: func(cmd *cobra.Command, args []string) { cmdLogger.SetLevel(logrus.InfoLevel) - endNum, strictExport, isTest, isFuture, extra, useCaptiveCore, datastoreUrl := utils.MustCommonFlags(cmd.Flags(), cmdLogger) - cmdLogger.StrictExport = strictExport + commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger) + cmdLogger.StrictExport = commonArgs.StrictExport startNum, path, limit := utils.MustArchiveFlags(cmd.Flags(), cmdLogger) - env := utils.GetEnvironmentDetails(isTest, isFuture, datastoreUrl) + env := utils.GetEnvironmentDetails(commonArgs.IsTest, commonArgs.IsFuture, commonArgs.DatastorePath) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - trades, err := input.GetTrades(startNum, endNum, limit, env, useCaptiveCore) + trades, err := input.GetTrades(startNum, commonArgs.EndNum, limit, env, commonArgs.UseCaptiveCore) if err != nil { cmdLogger.Fatal("could not read trades ", err) } @@ -43,7 +43,7 @@ var tradesCmd = &cobra.Command{ } for _, transformed := range trades { - numBytes, err := exportEntry(transformed, outFile, extra) + numBytes, err := exportEntry(transformed, outFile, commonArgs.Extra) if err != nil { cmdLogger.LogError(err) numFailures += 1 diff --git a/cmd/export_transactions.go b/cmd/export_transactions.go index cd37e247..966cd0fa 100644 --- a/cmd/export_transactions.go +++ b/cmd/export_transactions.go @@ -16,13 +16,13 @@ var transactionsCmd = &cobra.Command{ Long: `Exports the transaction data over a specified range to an output file.`, Run: func(cmd *cobra.Command, args []string) { cmdLogger.SetLevel(logrus.InfoLevel) - endNum, strictExport, isTest, isFuture, extra, useCaptiveCore, datastoreUrl := utils.MustCommonFlags(cmd.Flags(), cmdLogger) - cmdLogger.StrictExport = strictExport + commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger) + cmdLogger.StrictExport = commonArgs.StrictExport startNum, path, limit := utils.MustArchiveFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - env := utils.GetEnvironmentDetails(isTest, isFuture, datastoreUrl) + env := utils.GetEnvironmentDetails(commonArgs.IsTest, commonArgs.IsFuture, commonArgs.DatastorePath) - transactions, err := input.GetTransactions(startNum, endNum, limit, env, useCaptiveCore) + transactions, err := input.GetTransactions(startNum, commonArgs.EndNum, limit, env, commonArgs.UseCaptiveCore) if err != nil { cmdLogger.Fatal("could not read transactions: ", err) } @@ -39,7 +39,7 @@ var transactionsCmd = &cobra.Command{ continue } - numBytes, err := exportEntry(transformed, outFile, extra) + numBytes, err := exportEntry(transformed, outFile, commonArgs.Extra) if err != nil { cmdLogger.LogError(fmt.Errorf("could not export transaction: %v", err)) numFailures += 1 diff --git a/cmd/export_trustlines.go b/cmd/export_trustlines.go index 8b1315b5..01434be9 100644 --- a/cmd/export_trustlines.go +++ b/cmd/export_trustlines.go @@ -23,13 +23,13 @@ var trustlinesCmd = &cobra.Command{ the export_ledger_entry_changes command.`, Run: func(cmd *cobra.Command, args []string) { cmdLogger.SetLevel(logrus.InfoLevel) - endNum, strictExport, isTest, isFuture, extra, _, datastoreUrl := utils.MustCommonFlags(cmd.Flags(), cmdLogger) - cmdLogger.StrictExport = strictExport - env := utils.GetEnvironmentDetails(isTest, isFuture, datastoreUrl) + commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger) + cmdLogger.StrictExport = commonArgs.StrictExport + env := utils.GetEnvironmentDetails(commonArgs.IsTest, commonArgs.IsFuture, commonArgs.DatastorePath) path := utils.MustBucketFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - trustlines, err := input.GetEntriesFromGenesis(endNum, xdr.LedgerEntryTypeTrustline, env.ArchiveURLs) + trustlines, err := input.GetEntriesFromGenesis(commonArgs.EndNum, xdr.LedgerEntryTypeTrustline, env.ArchiveURLs) if err != nil { cmdLogger.Fatal("could not read trustlines: ", err) } @@ -46,7 +46,7 @@ var trustlinesCmd = &cobra.Command{ continue } - numBytes, err := exportEntry(transformed, outFile, extra) + numBytes, err := exportEntry(transformed, outFile, commonArgs.Extra) if err != nil { cmdLogger.LogError(fmt.Errorf("could not export trustline %+v: %v", trust, err)) numFailures += 1 diff --git a/cmd/export_ttl.go b/cmd/export_ttl.go index ce689fda..89adba3b 100644 --- a/cmd/export_ttl.go +++ b/cmd/export_ttl.go @@ -22,13 +22,13 @@ var ttlCmd = &cobra.Command{ the export_ledger_entry_changes command.`, Run: func(cmd *cobra.Command, args []string) { cmdLogger.SetLevel(logrus.InfoLevel) - endNum, strictExport, isTest, isFuture, extra, _, datastoreUrl := utils.MustCommonFlags(cmd.Flags(), cmdLogger) - cmdLogger.StrictExport = strictExport - env := utils.GetEnvironmentDetails(isTest, isFuture, datastoreUrl) + commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger) + cmdLogger.StrictExport = commonArgs.StrictExport + env := utils.GetEnvironmentDetails(commonArgs.IsTest, commonArgs.IsFuture, commonArgs.DatastorePath) path := utils.MustBucketFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - ttls, err := input.GetEntriesFromGenesis(endNum, xdr.LedgerEntryTypeTtl, env.ArchiveURLs) + ttls, err := input.GetEntriesFromGenesis(commonArgs.EndNum, xdr.LedgerEntryTypeTtl, env.ArchiveURLs) if err != nil { cmdLogger.Fatal("Error getting ledger entries: ", err) } @@ -45,7 +45,7 @@ var ttlCmd = &cobra.Command{ continue } - numBytes, err := exportEntry(transformed, outFile, extra) + numBytes, err := exportEntry(transformed, outFile, commonArgs.Extra) if err != nil { cmdLogger.LogError(fmt.Errorf("could not export ttl %+v: %v", ttl, err)) numFailures += 1 diff --git a/go.mod b/go.mod index 56f483d1..fc76db68 100644 --- a/go.mod +++ b/go.mod @@ -15,7 +15,7 @@ require ( github.com/spf13/cobra v1.7.0 github.com/spf13/pflag v1.0.5 github.com/spf13/viper v1.17.0 - github.com/stellar/go v0.0.0-20240423031611-e1c5206ad1ba + github.com/stellar/go v0.0.0-20240507142223-735600adb2d4 github.com/stretchr/testify v1.9.0 ) diff --git a/go.sum b/go.sum index 92a52cf7..568dfe39 100644 --- a/go.sum +++ b/go.sum @@ -296,8 +296,8 @@ github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/spf13/viper v1.17.0 h1:I5txKw7MJasPL/BrfkbA0Jyo/oELqVmux4pR/UxOMfI= github.com/spf13/viper v1.17.0/go.mod h1:BmMMMLQXSbcHK6KAOiFLz0l5JHrU89OdIRHvsk0+yVI= -github.com/stellar/go v0.0.0-20240423031611-e1c5206ad1ba h1:2UPb78V6mL07B0nJ6/89nJ2cimVD3xPMCFxawwRvpJ0= -github.com/stellar/go v0.0.0-20240423031611-e1c5206ad1ba/go.mod h1:ckzsX0B0qfTMVZQJtPELJLs7cJ6xXMYHPVLyIsReGsU= +github.com/stellar/go v0.0.0-20240507142223-735600adb2d4 h1:4dmEOaVcttNCZTIXE8y5VwNvduqVwE+D7oFLAu2nn/k= +github.com/stellar/go v0.0.0-20240507142223-735600adb2d4/go.mod h1:kxiz7GJ94uVORlLZ/q7BrEQZAvBgkNXly7I19axD3EA= github.com/stellar/go-xdr v0.0.0-20231122183749-b53fb00bcac2 h1:OzCVd0SV5qE3ZcDeSFCmOWLZfEWZ3Oe8KtmSOYKEVWE= github.com/stellar/go-xdr v0.0.0-20231122183749-b53fb00bcac2/go.mod h1:yoxyU/M8nl9LKeWIoBrbDPQ7Cy+4jxRcWcOayZ4BMps= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= diff --git a/internal/input/operations.go b/internal/input/operations.go index 5290691c..0fca21ed 100644 --- a/internal/input/operations.go +++ b/internal/input/operations.go @@ -71,6 +71,7 @@ func GetOperations(start, end uint32, limit int64, env utils.EnvironmentDetails, } txReader.Close() + if int64(len(opSlice)) >= limit && limit >= 0 { break } diff --git a/internal/utils/main.go b/internal/utils/main.go index c1259a50..b68ca190 100644 --- a/internal/utils/main.go +++ b/internal/utils/main.go @@ -6,7 +6,6 @@ import ( "errors" "fmt" "math/big" - "net/url" "time" "github.com/spf13/pflag" @@ -17,6 +16,7 @@ import ( "github.com/stellar/go/ingest/ledgerbackend" "github.com/stellar/go/keypair" "github.com/stellar/go/network" + "github.com/stellar/go/support/datastore" "github.com/stellar/go/support/storage" "github.com/stellar/go/txnbuild" "github.com/stellar/go/xdr" @@ -235,7 +235,10 @@ func AddCommonFlags(flags *pflag.FlagSet) { flags.Bool("futurenet", false, "If set, will connect to Futurenet instead of Mainnet.") flags.StringToStringP("extra-fields", "u", map[string]string{}, "Additional fields to append to output jsons. Used for appending metadata") flags.Bool("captive-core", false, "If set, run captive core to retrieve data. Otherwise use TxMeta file datastore.") - flags.String("datastore-url", "", "Datastore url to read txmeta files from.") + flags.String("datastore-path", "ledger-exporter/ledgers", "Datastore bucket path to read txmeta files from.") + flags.Uint32("num-workers", 5, "Number of workers to spawn that read txmeta files from the datastore.") + flags.Uint32("retry-limit", 3, "Datastore GetLedger retry limit.") + flags.Uint32("retry-wait", 5, "Time in seconds to wait for GetLedger retry.") } // AddArchiveFlags adds the history archive specific flags: start-ledger, output, and limit @@ -282,56 +285,84 @@ func AddExportTypeFlags(flags *pflag.FlagSet) { flags.BoolP("export-ttl", "", false, "set in order to export ttl changes") } +type CommonFlagValues struct { + EndNum uint32 + StrictExport bool + IsTest bool + IsFuture bool + Extra map[string]string + UseCaptiveCore bool + DatastorePath string + NumWorkers uint32 + RetryLimit uint32 + RetryWait uint32 +} + // MustCommonFlags gets the values of the the flags common to all commands: end-ledger and strict-export. // If any do not exist, it stops the program fatally using the logger -func MustCommonFlags( - flags *pflag.FlagSet, - logger *EtlLogger, -) ( - endNum uint32, - strictExport, - isTest bool, - isFuture bool, - extra map[string]string, - useCaptiveCore bool, - datastoreUrl string, -) { +func MustCommonFlags(flags *pflag.FlagSet, logger *EtlLogger) CommonFlagValues { endNum, err := flags.GetUint32("end-ledger") if err != nil { logger.Fatal("could not get end sequence number: ", err) } - strictExport, err = flags.GetBool("strict-export") + strictExport, err := flags.GetBool("strict-export") if err != nil { logger.Fatal("could not get strict-export boolean: ", err) } - isTest, err = flags.GetBool("testnet") + isTest, err := flags.GetBool("testnet") if err != nil { logger.Fatal("could not get testnet boolean: ", err) } - isFuture, err = flags.GetBool("futurenet") + isFuture, err := flags.GetBool("futurenet") if err != nil { logger.Fatal("could not get futurenet boolean: ", err) } - extra, err = flags.GetStringToString("extra-fields") + extra, err := flags.GetStringToString("extra-fields") if err != nil { logger.Fatal("could not get extra fields string: ", err) } - useCaptiveCore, err = flags.GetBool("captive-core") + useCaptiveCore, err := flags.GetBool("captive-core") if err != nil { logger.Fatal("could not get captive-core flag: ", err) } - datastoreUrl, err = flags.GetString("datastore-url") + datastorePath, err := flags.GetString("datastore-path") if err != nil { - logger.Fatal("could not get datastore-url string: ", err) + logger.Fatal("could not get datastore-bucket-path string: ", err) } - return + numWorkers, err := flags.GetUint32("num-workers") + if err != nil { + logger.Fatal("could not get num-workers uint32: ", err) + } + + retryLimit, err := flags.GetUint32("retry-limit") + if err != nil { + logger.Fatal("could not get retry-limit uint32: ", err) + } + + retryWait, err := flags.GetUint32("retry-wait") + if err != nil { + logger.Fatal("could not get retry-wait uint32: ", err) + } + + return CommonFlagValues{ + EndNum: endNum, + StrictExport: strictExport, + IsTest: isTest, + IsFuture: isFuture, + Extra: extra, + UseCaptiveCore: useCaptiveCore, + DatastorePath: datastorePath, + NumWorkers: numWorkers, + RetryLimit: retryLimit, + RetryWait: retryWait, + } } // MustArchiveFlags gets the values of the the history archive specific flags: start-ledger, output, and limit @@ -430,7 +461,7 @@ func MustExportTypeFlags(flags *pflag.FlagSet, logger *EtlLogger) map[string]boo "export-ttl": false, } - for export_name, _ := range exports { + for export_name := range exports { exports[export_name], err = flags.GetBool(export_name) if err != nil { logger.Fatalf("could not get %s flag: %v", export_name, err) @@ -649,6 +680,7 @@ type EnvironmentDetails struct { BinaryPath string CoreConfig string StorageURL string + Network string } // GetPassphrase returns the correct Network Passphrase based on env preference @@ -660,6 +692,7 @@ func GetEnvironmentDetails(isTest bool, isFuture bool, datastoreUrl string) (det details.BinaryPath = "/usr/bin/stellar-core" details.CoreConfig = "/etl/docker/stellar-core_testnet.cfg" details.StorageURL = datastoreUrl + details.Network = "testnet" return details } else if isFuture { // details.NetworkPassphrase = network.FutureNetworkPassphrase @@ -668,6 +701,7 @@ func GetEnvironmentDetails(isTest bool, isFuture bool, datastoreUrl string) (det details.BinaryPath = "/usr/bin/stellar-core" details.CoreConfig = "/etl/docker/stellar-core_futurenet.cfg" details.StorageURL = datastoreUrl + details.Network = "futurenet" return details } else { // default: mainnet @@ -676,6 +710,7 @@ func GetEnvironmentDetails(isTest bool, isFuture bool, datastoreUrl string) (det details.BinaryPath = "/usr/bin/stellar-core" details.CoreConfig = "/etl/docker/stellar-core.cfg" details.StorageURL = datastoreUrl + details.Network = "pubnet" return details } } @@ -714,6 +749,9 @@ func (e EnvironmentDetails) GetUnboundedLedgerCloseMeta(end uint32) (xdr.LedgerC ctx := context.Background() backend, err := e.CreateCaptiveCoreBackend() + if err != nil { + return xdr.LedgerCloseMeta{}, err + } ledgerRange := ledgerbackend.UnboundedRange(end) @@ -757,28 +795,43 @@ func CreateLedgerBackend(ctx context.Context, useCaptiveCore bool, env Environme } // Create ledger backend from datastore - fileConfig := ledgerbackend.LCMFileConfig{ - StorageURL: env.StorageURL, - FileSuffix: ".xdr.gz", - LedgersPerFile: 1, - FilesPerPartition: 64000, + params := make(map[string]string) + //params["destination_bucket_path"] = "ledger-exporter/ledgers" + params["destination_bucket_path"] = env.StorageURL + dataStoreConfig := datastore.DataStoreConfig{ + Type: "GCS", + Params: params, } - parsed, err := url.Parse(env.StorageURL) + dataStore, err := datastore.NewDataStore(ctx, dataStoreConfig, env.Network) if err != nil { return nil, err } - // Using the GCS datastore backend - if parsed.Scheme == "gcs" { - backend, err := ledgerbackend.NewGCSBackend(ctx, fileConfig) - if err != nil { - return nil, err - } - return backend, nil + // TODO: In the future these will come from a config file written by ledgerexporter + // Hard code ledger batch values for now + ledgerBatchConfig := datastore.LedgerBatchConfig{ + LedgersPerFile: 1, + FilesPerPartition: 64000, + FileSuffix: ".xdr.gz", + } + + // TODO: In the future CompressionType should be removed as it won't be configurable + BSBackendConfig := ledgerbackend.BufferedStorageBackendConfig{ + LedgerBatchConfig: ledgerBatchConfig, + CompressionType: "gzip", + DataStore: dataStore, + BufferSize: 1000, + NumWorkers: 5, + RetryLimit: 3, + RetryWait: 5, } - return nil, errors.New("no valid ledgerbackend selected") + backend, err := ledgerbackend.NewBufferedStorageBackend(ctx, BSBackendConfig) + if err != nil { + return nil, err + } + return backend, nil } func LedgerKeyToLedgerKeyHash(ledgerKey xdr.LedgerKey) string { From f527b68baa87d321fd4495710a89d16521bc89bf Mon Sep 17 00:00:00 2001 From: Simon Chow Date: Tue, 7 May 2024 11:15:59 -0400 Subject: [PATCH 35/49] Rename storageurl --- internal/utils/main.go | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/internal/utils/main.go b/internal/utils/main.go index b68ca190..1bf01ed0 100644 --- a/internal/utils/main.go +++ b/internal/utils/main.go @@ -679,19 +679,19 @@ type EnvironmentDetails struct { ArchiveURLs []string BinaryPath string CoreConfig string - StorageURL string + DatastorePath string Network string } // GetPassphrase returns the correct Network Passphrase based on env preference -func GetEnvironmentDetails(isTest bool, isFuture bool, datastoreUrl string) (details EnvironmentDetails) { +func GetEnvironmentDetails(isTest bool, isFuture bool, datastorePath string) (details EnvironmentDetails) { if isTest { // testnet passphrase to be used for testing details.NetworkPassphrase = network.TestNetworkPassphrase details.ArchiveURLs = testArchiveURLs details.BinaryPath = "/usr/bin/stellar-core" details.CoreConfig = "/etl/docker/stellar-core_testnet.cfg" - details.StorageURL = datastoreUrl + details.DatastorePath = datastorePath details.Network = "testnet" return details } else if isFuture { @@ -700,7 +700,7 @@ func GetEnvironmentDetails(isTest bool, isFuture bool, datastoreUrl string) (det details.ArchiveURLs = futureArchiveURLs details.BinaryPath = "/usr/bin/stellar-core" details.CoreConfig = "/etl/docker/stellar-core_futurenet.cfg" - details.StorageURL = datastoreUrl + details.DatastorePath = datastorePath details.Network = "futurenet" return details } else { @@ -709,7 +709,7 @@ func GetEnvironmentDetails(isTest bool, isFuture bool, datastoreUrl string) (det details.ArchiveURLs = mainArchiveURLs details.BinaryPath = "/usr/bin/stellar-core" details.CoreConfig = "/etl/docker/stellar-core.cfg" - details.StorageURL = datastoreUrl + details.DatastorePath = datastorePath details.Network = "pubnet" return details } @@ -797,7 +797,7 @@ func CreateLedgerBackend(ctx context.Context, useCaptiveCore bool, env Environme // Create ledger backend from datastore params := make(map[string]string) //params["destination_bucket_path"] = "ledger-exporter/ledgers" - params["destination_bucket_path"] = env.StorageURL + params["destination_bucket_path"] = env.DatastorePath dataStoreConfig := datastore.DataStoreConfig{ Type: "GCS", Params: params, From d74a721ecbc17e0e71695471784efda4ef69e6b3 Mon Sep 17 00:00:00 2001 From: Simon Chow Date: Tue, 7 May 2024 16:58:38 -0400 Subject: [PATCH 36/49] Remove unused export commands --- cmd/export_account_signers.go | 78 ---------- cmd/export_account_signers_test.go | 29 ---- cmd/export_accounts.go | 81 ---------- cmd/export_accounts_test.go | 26 ---- cmd/export_all_history.go | 207 -------------------------- cmd/export_claimable_balances.go | 82 ---------- cmd/export_claimable_balances_test.go | 20 --- cmd/export_config_setting.go | 81 ---------- cmd/export_config_setting_test.go | 22 --- cmd/export_contract_code.go | 81 ---------- cmd/export_contract_code_test.go | 22 --- cmd/export_contract_data.go | 86 ----------- cmd/export_contract_data_test.go | 22 --- cmd/export_liquidity_pools.go | 81 ---------- cmd/export_offers.go | 82 ---------- cmd/export_offers_test.go | 26 ---- cmd/export_orderbooks.go | 185 ----------------------- cmd/export_orderbooks_test.go | 41 ----- cmd/export_trustlines.go | 85 ----------- cmd/export_trustlines_test.go | 26 ---- cmd/export_ttl.go | 81 ---------- cmd/export_ttl_test.go | 22 --- 22 files changed, 1466 deletions(-) delete mode 100644 cmd/export_account_signers.go delete mode 100644 cmd/export_account_signers_test.go delete mode 100644 cmd/export_accounts.go delete mode 100644 cmd/export_accounts_test.go delete mode 100644 cmd/export_all_history.go delete mode 100644 cmd/export_claimable_balances.go delete mode 100644 cmd/export_claimable_balances_test.go delete mode 100644 cmd/export_config_setting.go delete mode 100644 cmd/export_config_setting_test.go delete mode 100644 cmd/export_contract_code.go delete mode 100644 cmd/export_contract_code_test.go delete mode 100644 cmd/export_contract_data.go delete mode 100644 cmd/export_contract_data_test.go delete mode 100644 cmd/export_liquidity_pools.go delete mode 100644 cmd/export_offers.go delete mode 100644 cmd/export_offers_test.go delete mode 100644 cmd/export_orderbooks.go delete mode 100644 cmd/export_orderbooks_test.go delete mode 100644 cmd/export_trustlines.go delete mode 100644 cmd/export_trustlines_test.go delete mode 100644 cmd/export_ttl.go delete mode 100644 cmd/export_ttl_test.go diff --git a/cmd/export_account_signers.go b/cmd/export_account_signers.go deleted file mode 100644 index f5d7eb47..00000000 --- a/cmd/export_account_signers.go +++ /dev/null @@ -1,78 +0,0 @@ -package cmd - -import ( - "fmt" - - "github.com/sirupsen/logrus" - "github.com/spf13/cobra" - - "github.com/stellar/stellar-etl/internal/input" - "github.com/stellar/stellar-etl/internal/transform" - "github.com/stellar/stellar-etl/internal/utils" - - "github.com/stellar/go/xdr" -) - -var accountSignersCmd = &cobra.Command{ - Use: "export_signers", - Short: "Exports the account signers data.", - Long: `Exports historical account signers data from the genesis ledger to the provided end-ledger to an output file. -The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it -should be used in an initial data dump. In order to get account information within a specified ledger range, see -the export_ledger_entry_changes command.`, - Run: func(cmd *cobra.Command, args []string) { - cmdLogger.SetLevel(logrus.InfoLevel) - commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger) - cmdLogger.StrictExport = commonArgs.StrictExport - env := utils.GetEnvironmentDetails(commonArgs.IsTest, commonArgs.IsFuture, commonArgs.DatastorePath) - path := utils.MustBucketFlags(cmd.Flags(), cmdLogger) - cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - - accounts, err := input.GetEntriesFromGenesis(commonArgs.EndNum, xdr.LedgerEntryTypeAccount, env.ArchiveURLs) - if err != nil { - cmdLogger.Fatal("could not read accounts: ", err) - } - - outFile := mustOutFile(path) - numFailures := 0 - totalNumBytes := 0 - numSigners := 0 - var header xdr.LedgerHeaderHistoryEntry - for _, acc := range accounts { - if utils.AccountSignersChanged(acc) { - transformed, err := transform.TransformSigners(acc, header) - if err != nil { - cmdLogger.LogError(fmt.Errorf("could not json transform account signer: %v", err)) - numFailures += 1 - continue - } - - for _, entry := range transformed { - numBytes, err := exportEntry(entry, outFile, commonArgs.Extra) - if err != nil { - cmdLogger.LogError(fmt.Errorf("could not export entry: %v", err)) - numFailures += 1 - continue - } - numSigners += 1 - totalNumBytes += numBytes - } - } - } - - outFile.Close() - cmdLogger.Info("Number of bytes written: ", totalNumBytes) - - printTransformStats(numSigners, numFailures) - - maybeUpload(cloudCredentials, cloudStorageBucket, cloudProvider, path) - }, -} - -func init() { - rootCmd.AddCommand(accountSignersCmd) - utils.AddCommonFlags(accountSignersCmd.Flags()) - utils.AddBucketFlags("signers", accountSignersCmd.Flags()) - utils.AddCloudStorageFlags(accountSignersCmd.Flags()) - accountSignersCmd.MarkFlagRequired("end-ledger") -} diff --git a/cmd/export_account_signers_test.go b/cmd/export_account_signers_test.go deleted file mode 100644 index 33193444..00000000 --- a/cmd/export_account_signers_test.go +++ /dev/null @@ -1,29 +0,0 @@ -package cmd - -import ( -"testing" -) - -func TestExportSigners(t *testing.T) { - tests := []cliTest{ - { - name: "signers: bucket list with exact checkpoint", - args: []string{"export_signers", "-e", "78975", "-o", gotTestDir(t, "bucket_read_exact.txt")}, - golden: "bucket_read_exact.golden", - wantErr: nil, - sortForComparison: true, - }, - { - name: "signers: bucket list with end not on checkpoint", - args: []string{"export_signers", "-e", "80210", "-o", gotTestDir(t, "bucket_read_off.txt")}, - golden: "bucket_read_off.golden", - wantErr: nil, - sortForComparison: true, - }, - } - - for _, test := range tests { - runCLITest(t, test, "testdata/signers/") - } -} - diff --git a/cmd/export_accounts.go b/cmd/export_accounts.go deleted file mode 100644 index 257a68f7..00000000 --- a/cmd/export_accounts.go +++ /dev/null @@ -1,81 +0,0 @@ -package cmd - -import ( - "fmt" - - "github.com/sirupsen/logrus" - "github.com/spf13/cobra" - - "github.com/stellar/stellar-etl/internal/input" - "github.com/stellar/stellar-etl/internal/transform" - "github.com/stellar/stellar-etl/internal/utils" - - "github.com/stellar/go/xdr" -) - -var accountsCmd = &cobra.Command{ - Use: "export_accounts", - Short: "Exports the account data.", - Long: `Exports historical account data from the genesis ledger to the provided end-ledger to an output file. -The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it -should be used in an initial data dump. In order to get account information within a specified ledger range, see -the export_ledger_entry_changes command.`, - Run: func(cmd *cobra.Command, args []string) { - cmdLogger.SetLevel(logrus.InfoLevel) - commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger) - cmdLogger.StrictExport = commonArgs.StrictExport - env := utils.GetEnvironmentDetails(commonArgs.IsTest, commonArgs.IsFuture, commonArgs.DatastorePath) - path := utils.MustBucketFlags(cmd.Flags(), cmdLogger) - cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - - accounts, err := input.GetEntriesFromGenesis(commonArgs.EndNum, xdr.LedgerEntryTypeAccount, env.ArchiveURLs) - if err != nil { - cmdLogger.Fatal("could not read accounts: ", err) - } - - outFile := mustOutFile(path) - numFailures := 0 - totalNumBytes := 0 - var header xdr.LedgerHeaderHistoryEntry - for _, acc := range accounts { - transformed, err := transform.TransformAccount(acc, header) - if err != nil { - cmdLogger.LogError(fmt.Errorf("could not json transform account: %v", err)) - numFailures += 1 - continue - } - - numBytes, err := exportEntry(transformed, outFile, commonArgs.Extra) - if err != nil { - cmdLogger.LogError(fmt.Errorf("could not export entry: %v", err)) - numFailures += 1 - continue - } - totalNumBytes += numBytes - } - - outFile.Close() - cmdLogger.Info("Number of bytes written: ", totalNumBytes) - - printTransformStats(len(accounts), numFailures) - - maybeUpload(cloudCredentials, cloudStorageBucket, cloudProvider, path) - }, -} - -func init() { - rootCmd.AddCommand(accountsCmd) - utils.AddCommonFlags(accountsCmd.Flags()) - utils.AddBucketFlags("accounts", accountsCmd.Flags()) - utils.AddCloudStorageFlags(accountsCmd.Flags()) - accountsCmd.MarkFlagRequired("end-ledger") - /* - Current flags: - end-ledger: the ledger sequence number for the end of the export range (required) - output-file: filename of the output file - - TODO: implement extra flags if possible - serialize-method: the method for serialization of the output data (JSON, XDR, etc) - end time as a replacement for end sequence numbers - */ -} diff --git a/cmd/export_accounts_test.go b/cmd/export_accounts_test.go deleted file mode 100644 index c1981fbf..00000000 --- a/cmd/export_accounts_test.go +++ /dev/null @@ -1,26 +0,0 @@ -package cmd - -import ( - "testing" -) - -func TestExportAccounts(t *testing.T) { - tests := []cliTest{ - { - name: "accounts: bucket list with exact checkpoint", - args: []string{"export_accounts", "-e", "78975", "-o", gotTestDir(t, "bucket_read_exact.txt")}, - golden: "bucket_read_exact.golden", - wantErr: nil, - }, - { - name: "accounts: bucket list with end not on checkpoint", - args: []string{"export_accounts", "-e", "80210", "-o", gotTestDir(t, "bucket_read_off.txt")}, - golden: "bucket_read_off.golden", - wantErr: nil, - }, - } - - for _, test := range tests { - runCLITest(t, test, "testdata/accounts/") - } -} diff --git a/cmd/export_all_history.go b/cmd/export_all_history.go deleted file mode 100644 index 0b197157..00000000 --- a/cmd/export_all_history.go +++ /dev/null @@ -1,207 +0,0 @@ -package cmd - -import ( - "fmt" - - "github.com/sirupsen/logrus" - "github.com/spf13/cobra" - - "github.com/stellar/stellar-etl/internal/input" - "github.com/stellar/stellar-etl/internal/toid" - "github.com/stellar/stellar-etl/internal/transform" - "github.com/stellar/stellar-etl/internal/utils" -) - -var allHistoryCmd = &cobra.Command{ - Use: "export_all_history", - Short: "Exports all stellar network history.", - Long: `Exports historical stellar network data between provided start-ledger/end-ledger to output files. -This is a temporary command used to reduce the amount of requests to history archives -in order to mitigate egress costs for the entity hosting history archives.`, - Run: func(cmd *cobra.Command, args []string) { - cmdLogger.SetLevel(logrus.InfoLevel) - commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger) - cmdLogger.StrictExport = commonArgs.StrictExport - startNum, path, limit := utils.MustArchiveFlags(cmd.Flags(), cmdLogger) - cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - env := utils.GetEnvironmentDetails(commonArgs.IsTest, commonArgs.IsFuture, commonArgs.DatastorePath) - - allHistory, err := input.GetAllHistory(startNum, commonArgs.EndNum, limit, env, commonArgs.UseCaptiveCore) - if err != nil { - cmdLogger.Fatal("could not read all history: ", err) - } - - cmdLogger.Info("start doing other exports") - getOperations(allHistory.Operations, commonArgs.Extra, cloudStorageBucket, cloudCredentials, cloudProvider, path+"exported_operations.txt", env) - getTrades(allHistory.Trades, commonArgs.Extra, cloudStorageBucket, cloudCredentials, cloudProvider, path+"exported_trades.txt") - getEffects(allHistory.Ledgers, commonArgs.Extra, cloudStorageBucket, cloudCredentials, cloudProvider, path+"exported_effects.txt", env) - getTransactions(allHistory.Ledgers, commonArgs.Extra, cloudStorageBucket, cloudCredentials, cloudProvider, path+"exported_transactions.txt") - getDiagnosticEvents(allHistory.Ledgers, commonArgs.Extra, cloudStorageBucket, cloudCredentials, cloudProvider, path+"exported_diagnostic_events.txt") - cmdLogger.Info("done doing other exports") - }, -} - -func getOperations(operations []input.OperationTransformInput, extra map[string]string, cloudStorageBucket string, cloudCredentials string, cloudProvider string, path string, env utils.EnvironmentDetails) { - outFileOperations := mustOutFile(path) - numFailures := 0 - totalNumBytes := 0 - for _, transformInput := range operations { - transformed, err := transform.TransformOperation(transformInput.Operation, transformInput.OperationIndex, transformInput.Transaction, transformInput.LedgerSeqNum, transformInput.LedgerCloseMeta, env.NetworkPassphrase) - if err != nil { - txIndex := transformInput.Transaction.Index - cmdLogger.LogError(fmt.Errorf("could not transform operation %d in transaction %d in ledger %d: %v", transformInput.OperationIndex, txIndex, transformInput.LedgerSeqNum, err)) - numFailures += 1 - continue - } - - numBytes, err := exportEntry(transformed, outFileOperations, extra) - if err != nil { - cmdLogger.LogError(fmt.Errorf("could not export operation: %v", err)) - numFailures += 1 - continue - } - totalNumBytes += numBytes - } - - outFileOperations.Close() - cmdLogger.Info("Number of bytes written: ", totalNumBytes) - - printTransformStats(len(operations), numFailures) - - maybeUpload(cloudCredentials, cloudStorageBucket, cloudProvider, path) -} - -func getTrades(trades []input.TradeTransformInput, extra map[string]string, cloudStorageBucket string, cloudCredentials string, cloudProvider string, path string) { - outFile := mustOutFile(path) - numFailures := 0 - totalNumBytes := 0 - for _, tradeInput := range trades { - trades, err := transform.TransformTrade(tradeInput.OperationIndex, tradeInput.OperationHistoryID, tradeInput.Transaction, tradeInput.CloseTime) - if err != nil { - parsedID := toid.Parse(tradeInput.OperationHistoryID) - cmdLogger.LogError(fmt.Errorf("from ledger %d, transaction %d, operation %d: %v", parsedID.LedgerSequence, parsedID.TransactionOrder, parsedID.OperationOrder, err)) - numFailures += 1 - continue - } - - for _, transformed := range trades { - numBytes, err := exportEntry(transformed, outFile, extra) - if err != nil { - cmdLogger.LogError(err) - numFailures += 1 - continue - } - totalNumBytes += numBytes - } - } - - outFile.Close() - cmdLogger.Info("Number of bytes written: ", totalNumBytes) - - printTransformStats(len(trades), numFailures) - - maybeUpload(cloudCredentials, cloudStorageBucket, cloudProvider, path) -} - -func getEffects(transactions []input.LedgerTransformInput, extra map[string]string, cloudStorageBucket string, cloudCredentials string, cloudProvider string, path string, env utils.EnvironmentDetails) { - outFile := mustOutFile(path) - numFailures := 0 - totalNumBytes := 0 - for _, transformInput := range transactions { - LedgerSeq := uint32(transformInput.LedgerHistory.Header.LedgerSeq) - effects, err := transform.TransformEffect(transformInput.Transaction, LedgerSeq, transformInput.LedgerCloseMeta, env.NetworkPassphrase) - if err != nil { - txIndex := transformInput.Transaction.Index - cmdLogger.Errorf("could not transform transaction %d in ledger %d: %v", txIndex, LedgerSeq, err) - numFailures += 1 - continue - } - - for _, transformed := range effects { - numBytes, err := exportEntry(transformed, outFile, extra) - if err != nil { - cmdLogger.LogError(err) - numFailures += 1 - continue - } - totalNumBytes += numBytes - } - } - - outFile.Close() - cmdLogger.Info("Number of bytes written: ", totalNumBytes) - - printTransformStats(len(transactions), numFailures) - - maybeUpload(cloudCredentials, cloudStorageBucket, cloudProvider, path) -} - -func getTransactions(transactions []input.LedgerTransformInput, extra map[string]string, cloudStorageBucket string, cloudCredentials string, cloudProvider string, path string) { - outFile := mustOutFile(path) - numFailures := 0 - totalNumBytes := 0 - for _, transformInput := range transactions { - transformed, err := transform.TransformTransaction(transformInput.Transaction, transformInput.LedgerHistory) - if err != nil { - ledgerSeq := transformInput.LedgerHistory.Header.LedgerSeq - cmdLogger.LogError(fmt.Errorf("could not transform transaction %d in ledger %d: ", transformInput.Transaction.Index, ledgerSeq)) - numFailures += 1 - continue - } - - numBytes, err := exportEntry(transformed, outFile, extra) - if err != nil { - cmdLogger.LogError(fmt.Errorf("could not export transaction: %v", err)) - numFailures += 1 - continue - } - totalNumBytes += numBytes - } - - outFile.Close() - cmdLogger.Info("Number of bytes written: ", totalNumBytes) - - printTransformStats(len(transactions), numFailures) - - maybeUpload(cloudCredentials, cloudStorageBucket, cloudProvider, path) -} - -func getDiagnosticEvents(transactions []input.LedgerTransformInput, extra map[string]string, cloudStorageBucket string, cloudCredentials string, cloudProvider string, path string) { - outFile := mustOutFile(path) - numFailures := 0 - for _, transformInput := range transactions { - transformed, err, ok := transform.TransformDiagnosticEvent(transformInput.Transaction, transformInput.LedgerHistory) - if err != nil { - ledgerSeq := transformInput.LedgerHistory.Header.LedgerSeq - cmdLogger.LogError(fmt.Errorf("could not transform diagnostic events in transaction %d in ledger %d: ", transformInput.Transaction.Index, ledgerSeq)) - numFailures += 1 - continue - } - - if !ok { - continue - } - for _, diagnosticEvent := range transformed { - _, err := exportEntry(diagnosticEvent, outFile, extra) - if err != nil { - cmdLogger.LogError(fmt.Errorf("could not export diagnostic event: %v", err)) - numFailures += 1 - continue - } - } - } - - outFile.Close() - - printTransformStats(len(transactions), numFailures) - - maybeUpload(cloudCredentials, cloudStorageBucket, cloudProvider, path) -} - -func init() { - rootCmd.AddCommand(allHistoryCmd) - utils.AddCommonFlags(allHistoryCmd.Flags()) - utils.AddArchiveFlags("", allHistoryCmd.Flags()) - utils.AddCloudStorageFlags(allHistoryCmd.Flags()) - allHistoryCmd.MarkFlagRequired("end-ledger") -} diff --git a/cmd/export_claimable_balances.go b/cmd/export_claimable_balances.go deleted file mode 100644 index 684c5113..00000000 --- a/cmd/export_claimable_balances.go +++ /dev/null @@ -1,82 +0,0 @@ -package cmd - -import ( - "fmt" - - "github.com/sirupsen/logrus" - "github.com/spf13/cobra" - - "github.com/stellar/stellar-etl/internal/input" - "github.com/stellar/stellar-etl/internal/transform" - "github.com/stellar/stellar-etl/internal/utils" - - "github.com/stellar/go/xdr" -) - -var claimableBalancesCmd = &cobra.Command{ - Use: "export_claimable_balances", - Short: "Exports the data on claimable balances made from the genesis ledger to a specified endpoint.", - Long: `Exports historical offer data from the genesis ledger to the provided end-ledger to an output file. - The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it - should be used in an initial data dump. In order to get offer information within a specified ledger range, see - the export_ledger_entry_changes command.`, - Run: func(cmd *cobra.Command, args []string) { - cmdLogger.SetLevel(logrus.InfoLevel) - commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger) - cmdLogger.StrictExport = commonArgs.StrictExport - env := utils.GetEnvironmentDetails(commonArgs.IsTest, commonArgs.IsFuture, commonArgs.DatastorePath) - path := utils.MustBucketFlags(cmd.Flags(), cmdLogger) - cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - - balances, err := input.GetEntriesFromGenesis(commonArgs.EndNum, xdr.LedgerEntryTypeClaimableBalance, env.ArchiveURLs) - if err != nil { - cmdLogger.Fatal("could not read balances: ", err) - } - - outFile := mustOutFile(path) - numFailures := 0 - totalNumBytes := 0 - var header xdr.LedgerHeaderHistoryEntry - for _, balance := range balances { - transformed, err := transform.TransformClaimableBalance(balance, header) - if err != nil { - cmdLogger.LogError(fmt.Errorf("could not transform balance %+v: %v", balance, err)) - numFailures += 1 - continue - } - - numBytes, err := exportEntry(transformed, outFile, commonArgs.Extra) - if err != nil { - cmdLogger.LogError(fmt.Errorf("could not export balance %+v: %v", balance, err)) - numFailures += 1 - continue - } - totalNumBytes += numBytes - } - - outFile.Close() - cmdLogger.Info("Number of bytes written: ", totalNumBytes) - - printTransformStats(len(balances), numFailures) - - maybeUpload(cloudCredentials, cloudStorageBucket, cloudProvider, path) - }, -} - -func init() { - rootCmd.AddCommand(claimableBalancesCmd) - utils.AddCommonFlags(claimableBalancesCmd.Flags()) - utils.AddBucketFlags("claimable_balances", claimableBalancesCmd.Flags()) - utils.AddCloudStorageFlags(claimableBalancesCmd.Flags()) - claimableBalancesCmd.MarkFlagRequired("end-ledger") - - /* - Current flags: - end-ledger: the ledger sequence number for the end of the export range (required) - output-file: filename of the output file - - TODO: implement extra flags if possible - serialize-method: the method for serialization of the output data (JSON, XDR, etc) - end time as a replacement for end sequence numbers - */ -} diff --git a/cmd/export_claimable_balances_test.go b/cmd/export_claimable_balances_test.go deleted file mode 100644 index fafcaa64..00000000 --- a/cmd/export_claimable_balances_test.go +++ /dev/null @@ -1,20 +0,0 @@ -package cmd - -import ( - "testing" -) - -func TestExportClaimableBalances(t *testing.T) { - tests := []cliTest{ - { - name: "claimable balances", - args: []string{"export_claimable_balances", "-e", "32878607", "-o", gotTestDir(t, "bucket_read.txt")}, - golden: "bucket_read.golden", - wantErr: nil, - }, - } - - for _, test := range tests { - runCLITest(t, test, "testdata/claimable_balances/") - } -} diff --git a/cmd/export_config_setting.go b/cmd/export_config_setting.go deleted file mode 100644 index 58c944c2..00000000 --- a/cmd/export_config_setting.go +++ /dev/null @@ -1,81 +0,0 @@ -package cmd - -import ( - "fmt" - - "github.com/sirupsen/logrus" - "github.com/spf13/cobra" - - "github.com/stellar/stellar-etl/internal/input" - "github.com/stellar/stellar-etl/internal/transform" - "github.com/stellar/stellar-etl/internal/utils" - - "github.com/stellar/go/xdr" -) - -var configSettingCmd = &cobra.Command{ - Use: "export_config_setting", - Short: "Exports the config setting information.", - Long: `Exports historical config settings data from the genesis ledger to the provided end-ledger to an output file. - The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it - should be used in an initial data dump. In order to get offer information within a specified ledger range, see - the export_ledger_entry_changes command.`, - Run: func(cmd *cobra.Command, args []string) { - cmdLogger.SetLevel(logrus.InfoLevel) - commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger) - cmdLogger.StrictExport = commonArgs.StrictExport - env := utils.GetEnvironmentDetails(commonArgs.IsTest, commonArgs.IsFuture, commonArgs.DatastorePath) - path := utils.MustBucketFlags(cmd.Flags(), cmdLogger) - cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - - settings, err := input.GetEntriesFromGenesis(commonArgs.EndNum, xdr.LedgerEntryTypeConfigSetting, env.ArchiveURLs) - if err != nil { - cmdLogger.Fatal("Error getting ledger entries: ", err) - } - - outFile := mustOutFile(path) - numFailures := 0 - totalNumBytes := 0 - var header xdr.LedgerHeaderHistoryEntry - for _, setting := range settings { - transformed, err := transform.TransformConfigSetting(setting, header) - if err != nil { - cmdLogger.LogError(fmt.Errorf("could not transform config setting %+v: %v", setting, err)) - numFailures += 1 - continue - } - - numBytes, err := exportEntry(transformed, outFile, commonArgs.Extra) - if err != nil { - cmdLogger.LogError(fmt.Errorf("could not export config setting %+v: %v", setting, err)) - numFailures += 1 - continue - } - totalNumBytes += numBytes - } - outFile.Close() - cmdLogger.Info("Number of bytes written: ", totalNumBytes) - - printTransformStats(len(settings), numFailures) - maybeUpload(cloudCredentials, cloudStorageBucket, cloudProvider, path) - - }, -} - -func init() { - rootCmd.AddCommand(configSettingCmd) - utils.AddCommonFlags(configSettingCmd.Flags()) - utils.AddBucketFlags("config_settings", configSettingCmd.Flags()) - utils.AddCloudStorageFlags(configSettingCmd.Flags()) - configSettingCmd.MarkFlagRequired("end-ledger") - /* - Current flags: - end-ledger: the ledger sequence number for the end of the export range (required) - output-file: filename of the output file - stdout: if set, output is printed to stdout - - TODO: implement extra flags if possible - serialize-method: the method for serialization of the output data (JSON, XDR, etc) - end time as a replacement for end sequence numbers - */ -} diff --git a/cmd/export_config_setting_test.go b/cmd/export_config_setting_test.go deleted file mode 100644 index a8114429..00000000 --- a/cmd/export_config_setting_test.go +++ /dev/null @@ -1,22 +0,0 @@ -package cmd - -import ( - "testing" -) - -func TestExportConfigSetting(t *testing.T) { - t.Skip("Skipping due to unstable data in Futurenet") - // TODO: find ledger with data and create testdata - tests := []cliTest{ - { - name: "config setting", - args: []string{"export_config_setting", "-e", "78975", "-o", gotTestDir(t, "bucket_read.txt")}, - golden: "bucket_read.golden", - wantErr: nil, - }, - } - - for _, test := range tests { - runCLITest(t, test, "testdata/config_setting/") - } -} diff --git a/cmd/export_contract_code.go b/cmd/export_contract_code.go deleted file mode 100644 index c9b5978b..00000000 --- a/cmd/export_contract_code.go +++ /dev/null @@ -1,81 +0,0 @@ -package cmd - -import ( - "fmt" - - "github.com/sirupsen/logrus" - "github.com/spf13/cobra" - - "github.com/stellar/stellar-etl/internal/input" - "github.com/stellar/stellar-etl/internal/transform" - "github.com/stellar/stellar-etl/internal/utils" - - "github.com/stellar/go/xdr" -) - -var codeCmd = &cobra.Command{ - Use: "export_contract_code", - Short: "Exports the contract code information.", - Long: `Exports historical contract code data from the genesis ledger to the provided end-ledger to an output file. - The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it - should be used in an initial data dump. In order to get offer information within a specified ledger range, see - the export_ledger_entry_changes command.`, - Run: func(cmd *cobra.Command, args []string) { - cmdLogger.SetLevel(logrus.InfoLevel) - commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger) - cmdLogger.StrictExport = commonArgs.StrictExport - env := utils.GetEnvironmentDetails(commonArgs.IsTest, commonArgs.IsFuture, commonArgs.DatastorePath) - path := utils.MustBucketFlags(cmd.Flags(), cmdLogger) - cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - - codes, err := input.GetEntriesFromGenesis(commonArgs.EndNum, xdr.LedgerEntryTypeContractCode, env.ArchiveURLs) - if err != nil { - cmdLogger.Fatal("Error getting ledger entries: ", err) - } - - outFile := mustOutFile(path) - numFailures := 0 - totalNumBytes := 0 - var header xdr.LedgerHeaderHistoryEntry - for _, code := range codes { - transformed, err := transform.TransformContractCode(code, header) - if err != nil { - cmdLogger.LogError(fmt.Errorf("could not transform contract code %+v: %v", code, err)) - numFailures += 1 - continue - } - - numBytes, err := exportEntry(transformed, outFile, commonArgs.Extra) - if err != nil { - cmdLogger.LogError(fmt.Errorf("could not export contract code %+v: %v", code, err)) - numFailures += 1 - continue - } - totalNumBytes += numBytes - } - outFile.Close() - cmdLogger.Info("Number of bytes written: ", totalNumBytes) - - printTransformStats(len(codes), numFailures) - maybeUpload(cloudCredentials, cloudStorageBucket, cloudProvider, path) - - }, -} - -func init() { - rootCmd.AddCommand(codeCmd) - utils.AddCommonFlags(codeCmd.Flags()) - utils.AddBucketFlags("contract_code", codeCmd.Flags()) - utils.AddCloudStorageFlags(codeCmd.Flags()) - codeCmd.MarkFlagRequired("end-ledger") - /* - Current flags: - end-ledger: the ledger sequence number for the end of the export range (required) - output-file: filename of the output file - stdout: if set, output is printed to stdout - - TODO: implement extra flags if possible - serialize-method: the method for serialization of the output data (JSON, XDR, etc) - end time as a replacement for end sequence numbers - */ -} diff --git a/cmd/export_contract_code_test.go b/cmd/export_contract_code_test.go deleted file mode 100644 index 74f51285..00000000 --- a/cmd/export_contract_code_test.go +++ /dev/null @@ -1,22 +0,0 @@ -package cmd - -import ( - "testing" -) - -func TestExportContractCode(t *testing.T) { - t.Skip("Skipping due to unstable data in Futurenet") - // TODO: find ledger with data and create testdata - tests := []cliTest{ - { - name: "contract code", - args: []string{"export_contract_code", "-e", "78975", "-o", gotTestDir(t, "bucket_read.txt")}, - golden: "bucket_read.golden", - wantErr: nil, - }, - } - - for _, test := range tests { - runCLITest(t, test, "testdata/contract_code/") - } -} diff --git a/cmd/export_contract_data.go b/cmd/export_contract_data.go deleted file mode 100644 index dc7f8c97..00000000 --- a/cmd/export_contract_data.go +++ /dev/null @@ -1,86 +0,0 @@ -package cmd - -import ( - "fmt" - - "github.com/sirupsen/logrus" - "github.com/spf13/cobra" - - "github.com/stellar/stellar-etl/internal/input" - "github.com/stellar/stellar-etl/internal/transform" - "github.com/stellar/stellar-etl/internal/utils" - - "github.com/stellar/go/xdr" -) - -var dataCmd = &cobra.Command{ - Use: "export_contract_data", - Short: "Exports the contract data information made from the genesis ledger to a specified endpoint.", - Long: `Exports historical contract data from the genesis ledger to the provided end-ledger to an output file. - The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it - should be used in an initial data dump. In order to get offer information within a specified ledger range, see - the export_ledger_entry_changes command.`, - Run: func(cmd *cobra.Command, args []string) { - cmdLogger.SetLevel(logrus.InfoLevel) - commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger) - cmdLogger.StrictExport = commonArgs.StrictExport - env := utils.GetEnvironmentDetails(commonArgs.IsTest, commonArgs.IsFuture, commonArgs.DatastorePath) - path := utils.MustBucketFlags(cmd.Flags(), cmdLogger) - cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - - datas, err := input.GetEntriesFromGenesis(commonArgs.EndNum, xdr.LedgerEntryTypeContractData, env.ArchiveURLs) - if err != nil { - cmdLogger.Fatal("Error getting ledger entries: ", err) - } - - outFile := mustOutFile(path) - numFailures := 0 - totalNumBytes := 0 - var header xdr.LedgerHeaderHistoryEntry - for _, data := range datas { - TransformContractData := transform.NewTransformContractDataStruct(transform.AssetFromContractData, transform.ContractBalanceFromContractData) - transformed, err, ok := TransformContractData.TransformContractData(data, env.NetworkPassphrase, header) - if err != nil { - cmdLogger.LogError(fmt.Errorf("could not transform contract data %+v: %v", data, err)) - numFailures += 1 - continue - } - - if !ok { - continue - } - - numBytes, err := exportEntry(transformed, outFile, commonArgs.Extra) - if err != nil { - cmdLogger.LogError(fmt.Errorf("could not export contract data %+v: %v", data, err)) - numFailures += 1 - continue - } - totalNumBytes += numBytes - } - outFile.Close() - cmdLogger.Info("Number of bytes written: ", totalNumBytes) - - printTransformStats(len(datas), numFailures) - maybeUpload(cloudCredentials, cloudStorageBucket, cloudProvider, path) - - }, -} - -func init() { - rootCmd.AddCommand(dataCmd) - utils.AddCommonFlags(dataCmd.Flags()) - utils.AddBucketFlags("contract_data", dataCmd.Flags()) - utils.AddCloudStorageFlags(dataCmd.Flags()) - dataCmd.MarkFlagRequired("end-ledger") - /* - Current flags: - end-ledger: the ledger sequence number for the end of the export range (required) - output-file: filename of the output file - stdout: if set, output is printed to stdout - - TODO: implement extra flags if possible - serialize-method: the method for serialization of the output data (JSON, XDR, etc) - end time as a replacement for end sequence numbers - */ -} diff --git a/cmd/export_contract_data_test.go b/cmd/export_contract_data_test.go deleted file mode 100644 index 2e511e8e..00000000 --- a/cmd/export_contract_data_test.go +++ /dev/null @@ -1,22 +0,0 @@ -package cmd - -import ( - "testing" -) - -func TestExportContractData(t *testing.T) { - t.Skip("Skipping due to unstable data in Futurenet") - // TODO: find ledger with data and create testdata - tests := []cliTest{ - { - name: "contract data", - args: []string{"export_contract_data", "-e", "78975", "-o", gotTestDir(t, "bucket_read.txt")}, - golden: "bucket_read.golden", - wantErr: nil, - }, - } - - for _, test := range tests { - runCLITest(t, test, "testdata/contract_data/") - } -} diff --git a/cmd/export_liquidity_pools.go b/cmd/export_liquidity_pools.go deleted file mode 100644 index d5b3de65..00000000 --- a/cmd/export_liquidity_pools.go +++ /dev/null @@ -1,81 +0,0 @@ -package cmd - -import ( - "fmt" - - "github.com/sirupsen/logrus" - "github.com/spf13/cobra" - - "github.com/stellar/stellar-etl/internal/input" - "github.com/stellar/stellar-etl/internal/transform" - "github.com/stellar/stellar-etl/internal/utils" - - "github.com/stellar/go/xdr" -) - -var poolsCmd = &cobra.Command{ - Use: "export_pools", - Short: "Exports the liquidity pools data.", - Long: `Exports historical liquidity pools data from the genesis ledger to the provided end-ledger to an output file. -The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it -should be used in an initial data dump. In order to get liqudity pools information within a specified ledger range, see -the export_ledger_entry_changes command.`, - Run: func(cmd *cobra.Command, args []string) { - cmdLogger.SetLevel(logrus.InfoLevel) - commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger) - cmdLogger.StrictExport = commonArgs.StrictExport - env := utils.GetEnvironmentDetails(commonArgs.IsTest, commonArgs.IsFuture, commonArgs.DatastorePath) - path := utils.MustBucketFlags(cmd.Flags(), cmdLogger) - cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - - pools, err := input.GetEntriesFromGenesis(commonArgs.EndNum, xdr.LedgerEntryTypeLiquidityPool, env.ArchiveURLs) - if err != nil { - cmdLogger.Fatal("could not read accounts: ", err) - } - - outFile := mustOutFile(path) - numFailures := 0 - totalNumBytes := 0 - var header xdr.LedgerHeaderHistoryEntry - for _, pool := range pools { - transformed, err := transform.TransformPool(pool, header) - if err != nil { - cmdLogger.LogError(fmt.Errorf("could not transform pool %+v: %v", pool, err)) - numFailures += 1 - continue - } - - numBytes, err := exportEntry(transformed, outFile, commonArgs.Extra) - if err != nil { - cmdLogger.LogError(fmt.Errorf("could not export pool %+v: %v", pool, err)) - numFailures += 1 - continue - } - totalNumBytes += numBytes - } - outFile.Close() - cmdLogger.Info("Number of bytes written: ", totalNumBytes) - - printTransformStats(len(pools), numFailures) - maybeUpload(cloudCredentials, cloudStorageBucket, cloudProvider, path) - - }, -} - -func init() { - rootCmd.AddCommand(poolsCmd) - utils.AddCommonFlags(poolsCmd.Flags()) - utils.AddBucketFlags("pools", poolsCmd.Flags()) - utils.AddCloudStorageFlags(poolsCmd.Flags()) - poolsCmd.MarkFlagRequired("end-ledger") - /* - Current flags: - end-ledger: the ledger sequence number for the end of the export range (required) - output-file: filename of the output file - stdout: if set, output is printed to stdout - - TODO: implement extra flags if possible - serialize-method: the method for serialization of the output data (JSON, XDR, etc) - end time as a replacement for end sequence numbers - */ -} diff --git a/cmd/export_offers.go b/cmd/export_offers.go deleted file mode 100644 index cf7a425f..00000000 --- a/cmd/export_offers.go +++ /dev/null @@ -1,82 +0,0 @@ -package cmd - -import ( - "fmt" - - "github.com/sirupsen/logrus" - "github.com/spf13/cobra" - - "github.com/stellar/stellar-etl/internal/input" - "github.com/stellar/stellar-etl/internal/transform" - "github.com/stellar/stellar-etl/internal/utils" - - "github.com/stellar/go/xdr" -) - -// offersCmd represents the offers command -var offersCmd = &cobra.Command{ - Use: "export_offers", - Short: "Exports the data on offers made from the genesis ledger to a specified endpoint.", - Long: `Exports historical offer data from the genesis ledger to the provided end-ledger to an output file. - The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it - should be used in an initial data dump. In order to get offer information within a specified ledger range, see - the export_ledger_entry_changes command.`, - Run: func(cmd *cobra.Command, args []string) { - cmdLogger.SetLevel(logrus.InfoLevel) - commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger) - cmdLogger.StrictExport = commonArgs.StrictExport - env := utils.GetEnvironmentDetails(commonArgs.IsTest, commonArgs.IsFuture, commonArgs.DatastorePath) - path := utils.MustBucketFlags(cmd.Flags(), cmdLogger) - cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - - offers, err := input.GetEntriesFromGenesis(commonArgs.EndNum, xdr.LedgerEntryTypeOffer, env.ArchiveURLs) - if err != nil { - cmdLogger.Fatal("could not read offers: ", err) - } - - outFile := mustOutFile(path) - numFailures := 0 - totalNumBytes := 0 - var header xdr.LedgerHeaderHistoryEntry - for _, offer := range offers { - transformed, err := transform.TransformOffer(offer, header) - if err != nil { - cmdLogger.LogError(fmt.Errorf("could not transform offer %+v: %v", offer, err)) - numFailures += 1 - continue - } - - numBytes, err := exportEntry(transformed, outFile, commonArgs.Extra) - if err != nil { - cmdLogger.LogError(fmt.Errorf("could not export offer %+v: %v", offer, err)) - numFailures += 1 - continue - } - totalNumBytes += numBytes - } - - outFile.Close() - cmdLogger.Info("Number of bytes written: ", totalNumBytes) - - printTransformStats(len(offers), numFailures) - - maybeUpload(cloudCredentials, cloudStorageBucket, cloudProvider, path) - }, -} - -func init() { - rootCmd.AddCommand(offersCmd) - utils.AddCommonFlags(offersCmd.Flags()) - utils.AddBucketFlags("offers", offersCmd.Flags()) - utils.AddCloudStorageFlags(offersCmd.Flags()) - offersCmd.MarkFlagRequired("end-ledger") - /* - Current flags: - end-ledger: the ledger sequence number for the end of the export range (required) - output-file: filename of the output file - - TODO: implement extra flags if possible - serialize-method: the method for serialization of the output data (JSON, XDR, etc) - end time as a replacement for end sequence numbers - */ -} diff --git a/cmd/export_offers_test.go b/cmd/export_offers_test.go deleted file mode 100644 index a2ab9c40..00000000 --- a/cmd/export_offers_test.go +++ /dev/null @@ -1,26 +0,0 @@ -package cmd - -import ( - "testing" -) - -func TestExportOffers(t *testing.T) { - tests := []cliTest{ - { - name: "offers: bucket list with exact checkpoint", - args: []string{"export_offers", "-e", "78975", "-o", gotTestDir(t, "bucket_read_exact.txt")}, - golden: "bucket_read_exact.golden", - wantErr: nil, - }, - { - name: "offers: bucket list with end not on checkpoint", - args: []string{"export_offers", "-e", "80210", "-o", gotTestDir(t, "bucket_read_offset.txt")}, - golden: "bucket_read_offset.golden", - wantErr: nil, - }, - } - - for _, test := range tests { - runCLITest(t, test, "testdata/offers/") - } -} diff --git a/cmd/export_orderbooks.go b/cmd/export_orderbooks.go deleted file mode 100644 index 58816fae..00000000 --- a/cmd/export_orderbooks.go +++ /dev/null @@ -1,185 +0,0 @@ -package cmd - -import ( - "bytes" - "encoding/json" - "math" - "os" - "path/filepath" - - "github.com/spf13/cobra" - - "github.com/stellar/stellar-etl/internal/input" - "github.com/stellar/stellar-etl/internal/utils" - - "github.com/stellar/go/xdr" -) - -// exportOrderbooksCmd represents the exportOrderbooks command -var exportOrderbooksCmd = &cobra.Command{ - Use: "export_orderbooks", - Short: "This command exports the historical orderbooks", - Long: `This command instantiates a stellar-core instance and uses it to export normalized orderbooks. - The information is exported in batches determined by the batch-size flag. The normalized data is exported in multiple - different files within the exported data folder. These files are dimAccounts.txt, dimOffers.txt, dimMarkets.txt, and factEvents.txt. - These files contain normalized data that helps save storage space. - - If the end-ledger is omitted, then the stellar-core node will continue running and exporting information as new ledgers are - confirmed by the Stellar network. In this unbounded case, a stellar-core config path is required to utilize the Captive Core toml.`, - Run: func(cmd *cobra.Command, args []string) { - commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger) - cmdLogger.StrictExport = commonArgs.StrictExport - env := utils.GetEnvironmentDetails(commonArgs.IsTest, commonArgs.IsFuture, commonArgs.DatastorePath) - - execPath, configPath, startNum, batchSize, outputFolder := utils.MustCoreFlags(cmd.Flags(), cmdLogger) - cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - - if batchSize <= 0 { - cmdLogger.Fatalf("batch-size (%d) must be greater than 0", batchSize) - } - - if configPath == "" && commonArgs.EndNum == 0 { - cmdLogger.Fatal("stellar-core needs a config file path when exporting ledgers continuously (endNum = 0)") - } - - var err error - execPath, err = filepath.Abs(execPath) - if err != nil { - cmdLogger.Fatal("could not get absolute filepath for stellar-core executable: ", err) - } - - configPath, err = filepath.Abs(configPath) - if err != nil { - cmdLogger.Fatal("could not get absolute filepath for the config file: ", err) - } - - checkpointSeq := utils.GetMostRecentCheckpoint(startNum) - core, err := input.PrepareCaptiveCore(execPath, configPath, checkpointSeq, commonArgs.EndNum, env) - if err != nil { - cmdLogger.Fatal("error creating a prepared captive core instance: ", err) - } - - orderbook, err := input.GetEntriesFromGenesis(checkpointSeq, xdr.LedgerEntryTypeOffer, env.ArchiveURLs) - if err != nil { - cmdLogger.Fatal("could not read initial orderbook: ", err) - } - - orderbookChannel := make(chan input.OrderbookBatch) - - go input.StreamOrderbooks(core, startNum, commonArgs.EndNum, batchSize, orderbookChannel, orderbook, env, cmdLogger) - - // If the end sequence number is defined, we work in a closed range and export a finite number of batches - if commonArgs.EndNum != 0 { - batchCount := uint32(math.Ceil(float64(commonArgs.EndNum-startNum+1) / float64(batchSize))) - for i := uint32(0); i < batchCount; i++ { - batchStart := startNum + i*batchSize - // Subtract 1 from the end batch number because batches do not include the last batch in the range - batchEnd := batchStart + batchSize - 1 - if batchEnd > commonArgs.EndNum { - batchEnd = commonArgs.EndNum - } - - parser := input.ReceiveParsedOrderbooks(orderbookChannel, cmdLogger) - exportOrderbook(batchStart, batchEnd, outputFolder, parser, cloudCredentials, cloudStorageBucket, cloudProvider, commonArgs.Extra) - } - } else { - // otherwise, we export in an unbounded manner where batches are constantly exported - var batchNum uint32 = 0 - for { - batchStart := startNum + batchNum*batchSize - batchEnd := batchStart + batchSize - 1 - parser := input.ReceiveParsedOrderbooks(orderbookChannel, cmdLogger) - exportOrderbook(batchStart, batchEnd, outputFolder, parser, cloudCredentials, cloudStorageBucket, cloudProvider, commonArgs.Extra) - batchNum++ - } - } - }, -} - -// writeSlice writes the slice either to a file. -func writeSlice(file *os.File, slice [][]byte, extra map[string]string) error { - - for _, data := range slice { - bytesToWrite := data - if len(extra) > 0 { - i := map[string]interface{}{} - decoder := json.NewDecoder(bytes.NewReader(data)) - decoder.UseNumber() - err := decoder.Decode(&i) - if err != nil { - return err - } - for k, v := range extra { - i[k] = v - } - bytesToWrite, err = json.Marshal(i) - if err != nil { - return err - } - } - file.WriteString(string(bytesToWrite) + "\n") - } - - file.Close() - return nil -} - -func exportOrderbook( - start, end uint32, - folderPath string, - parser *input.OrderbookParser, - cloudCredentials, cloudStorageBucket, cloudProvider string, - extra map[string]string) { - marketsFilePath := filepath.Join(folderPath, exportFilename(start, end, "dimMarkets")) - offersFilePath := filepath.Join(folderPath, exportFilename(start, end, "dimOffers")) - accountsFilePath := filepath.Join(folderPath, exportFilename(start, end, "dimAccounts")) - eventsFilePath := filepath.Join(folderPath, exportFilename(start, end, "factEvents")) - - marketsFile := mustOutFile(marketsFilePath) - offersFile := mustOutFile(offersFilePath) - accountsFile := mustOutFile(accountsFilePath) - eventsFile := mustOutFile(eventsFilePath) - - err := writeSlice(marketsFile, parser.Markets, extra) - if err != nil { - cmdLogger.LogError(err) - } - err = writeSlice(offersFile, parser.Offers, extra) - if err != nil { - cmdLogger.LogError(err) - } - err = writeSlice(accountsFile, parser.Accounts, extra) - if err != nil { - cmdLogger.LogError(err) - } - err = writeSlice(eventsFile, parser.Events, extra) - if err != nil { - cmdLogger.LogError(err) - } - - maybeUpload(cloudCredentials, cloudStorageBucket, cloudProvider, marketsFilePath) - maybeUpload(cloudCredentials, cloudStorageBucket, cloudProvider, offersFilePath) - maybeUpload(cloudCredentials, cloudStorageBucket, cloudProvider, accountsFilePath) - maybeUpload(cloudCredentials, cloudStorageBucket, cloudProvider, eventsFilePath) -} - -func init() { - rootCmd.AddCommand(exportOrderbooksCmd) - utils.AddCommonFlags(exportOrderbooksCmd.Flags()) - utils.AddCoreFlags(exportOrderbooksCmd.Flags(), "orderbooks_output/") - utils.AddCloudStorageFlags(exportOrderbooksCmd.Flags()) - - exportOrderbooksCmd.MarkFlagRequired("start-ledger") - /* - Current flags: - start-ledger: the ledger sequence number for the beginning of the export period - end-ledger: the ledger sequence number for the end of the export range - - output-folder: folder that will contain the output files - limit: maximum number of changes to export in a given batch; if negative then everything gets exported - batch-size: size of the export batches - - core-executable: path to stellar-core executable - core-config: path to stellar-core config file - */ -} diff --git a/cmd/export_orderbooks_test.go b/cmd/export_orderbooks_test.go deleted file mode 100644 index bf1c043b..00000000 --- a/cmd/export_orderbooks_test.go +++ /dev/null @@ -1,41 +0,0 @@ -package cmd - -import ( - "fmt" - "testing" -) - -func TestExportOrderbooks(t *testing.T) { - tests := []cliTest{ - { - name: "unbounded range with no config", - args: []string{"export_orderbooks", "-x", coreExecutablePath, "-s", "100000"}, - golden: "", - wantErr: fmt.Errorf("stellar-core needs a config file path when exporting ledgers continuously (endNum = 0)"), - }, - { - name: "0 batch size", - args: []string{"export_orderbooks", "-b", "0", "-x", coreExecutablePath, "-c", coreConfigPath, "-s", "100000", "-e", "164000"}, - golden: "", - wantErr: fmt.Errorf("batch-size (0) must be greater than 0"), - }, - { - name: "orderbook from single ledger", - args: []string{"export_orderbooks", "-x", coreExecutablePath, "-c", coreConfigPath, "-s", "5000000", "-e", "5000000", "-o", gotTestDir(t, "single/")}, - golden: "single_ledger.golden", - sortForComparison: true, - wantErr: nil, - }, - { - name: "orderbooks from large range", - args: []string{"export_orderbooks", "-x", coreExecutablePath, "-c", coreConfigPath, "-s", "6000000", "-e", "6001000", "-o", gotTestDir(t, "range/")}, - golden: "large_range_orderbooks.golden", - sortForComparison: true, - wantErr: nil, - }, - } - - for _, test := range tests { - runCLITest(t, test, "testdata/orderbooks/") - } -} diff --git a/cmd/export_trustlines.go b/cmd/export_trustlines.go deleted file mode 100644 index 01434be9..00000000 --- a/cmd/export_trustlines.go +++ /dev/null @@ -1,85 +0,0 @@ -package cmd - -import ( - "fmt" - - "github.com/sirupsen/logrus" - "github.com/spf13/cobra" - - "github.com/stellar/stellar-etl/internal/input" - "github.com/stellar/stellar-etl/internal/transform" - "github.com/stellar/stellar-etl/internal/utils" - - "github.com/stellar/go/xdr" -) - -// trustlinesCmd represents the trustlines command -var trustlinesCmd = &cobra.Command{ - Use: "export_trustlines", - Short: "Exports the trustline data over a specified range.", - Long: `Exports historical trustline data from the genesis ledger to the provided end-ledger to an output file. - The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it - should be used in an initial data dump. In order to get trustline information within a specified ledger range, see - the export_ledger_entry_changes command.`, - Run: func(cmd *cobra.Command, args []string) { - cmdLogger.SetLevel(logrus.InfoLevel) - commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger) - cmdLogger.StrictExport = commonArgs.StrictExport - env := utils.GetEnvironmentDetails(commonArgs.IsTest, commonArgs.IsFuture, commonArgs.DatastorePath) - path := utils.MustBucketFlags(cmd.Flags(), cmdLogger) - cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - - trustlines, err := input.GetEntriesFromGenesis(commonArgs.EndNum, xdr.LedgerEntryTypeTrustline, env.ArchiveURLs) - if err != nil { - cmdLogger.Fatal("could not read trustlines: ", err) - } - - outFile := mustOutFile(path) - numFailures := 0 - totalNumBytes := 0 - var header xdr.LedgerHeaderHistoryEntry - for _, trust := range trustlines { - transformed, err := transform.TransformTrustline(trust, header) - if err != nil { - cmdLogger.LogError(fmt.Errorf("could not json transform trustline %+v: %v", trust, err)) - numFailures += 1 - continue - } - - numBytes, err := exportEntry(transformed, outFile, commonArgs.Extra) - if err != nil { - cmdLogger.LogError(fmt.Errorf("could not export trustline %+v: %v", trust, err)) - numFailures += 1 - continue - } - totalNumBytes += numBytes - } - - outFile.Close() - - cmdLogger.Info("Number of bytes written: ", totalNumBytes) - - printTransformStats(len(trustlines), numFailures) - - maybeUpload(cloudCredentials, cloudStorageBucket, cloudProvider, path) - }, -} - -func init() { - rootCmd.AddCommand(trustlinesCmd) - utils.AddCommonFlags(trustlinesCmd.Flags()) - utils.AddBucketFlags("trustlines", trustlinesCmd.Flags()) - utils.AddCloudStorageFlags(trustlinesCmd.Flags()) - trustlinesCmd.MarkFlagRequired("end-ledger") - - /* - Current flags: - end-ledger: the ledger sequence number for the end of the export range (required) - output-file: filename of the output file - stdout: if set, output is printed to stdout - - TODO: implement extra flags if possible - serialize-method: the method for serialization of the output data (JSON, XDR, etc) - end time as a replacement for end sequence numbers - */ -} diff --git a/cmd/export_trustlines_test.go b/cmd/export_trustlines_test.go deleted file mode 100644 index 61a69281..00000000 --- a/cmd/export_trustlines_test.go +++ /dev/null @@ -1,26 +0,0 @@ -package cmd - -import ( - "testing" -) - -func TestExportTrustlines(t *testing.T) { - tests := []cliTest{ - { - name: "trustlines: bucket list with exact checkpoint", - args: []string{"export_trustlines", "-e", "78975", "-o", gotTestDir(t, "bucket_read_exact.golden")}, - golden: "bucket_read_exact.golden", - wantErr: nil, - }, - { - name: "trustlines: bucket list with end not on checkpoint", - args: []string{"export_trustlines", "-e", "139672", "-o", gotTestDir(t, "bucket_read_off.golden")}, - golden: "bucket_read_off.golden", - wantErr: nil, - }, - } - - for _, test := range tests { - runCLITest(t, test, "testdata/trustlines/") - } -} diff --git a/cmd/export_ttl.go b/cmd/export_ttl.go deleted file mode 100644 index 89adba3b..00000000 --- a/cmd/export_ttl.go +++ /dev/null @@ -1,81 +0,0 @@ -package cmd - -import ( - "fmt" - - "github.com/sirupsen/logrus" - "github.com/spf13/cobra" - - "github.com/stellar/stellar-etl/internal/input" - "github.com/stellar/stellar-etl/internal/transform" - "github.com/stellar/stellar-etl/internal/utils" - - "github.com/stellar/go/xdr" -) - -var ttlCmd = &cobra.Command{ - Use: "export_ttl", - Short: "Exports the ttl information.", - Long: `Exports historical ttl data from the genesis ledger to the provided end-ledger to an output file. - The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it - should be used in an initial data dump. In order to get offer information within a specified ledger range, see - the export_ledger_entry_changes command.`, - Run: func(cmd *cobra.Command, args []string) { - cmdLogger.SetLevel(logrus.InfoLevel) - commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger) - cmdLogger.StrictExport = commonArgs.StrictExport - env := utils.GetEnvironmentDetails(commonArgs.IsTest, commonArgs.IsFuture, commonArgs.DatastorePath) - path := utils.MustBucketFlags(cmd.Flags(), cmdLogger) - cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - - ttls, err := input.GetEntriesFromGenesis(commonArgs.EndNum, xdr.LedgerEntryTypeTtl, env.ArchiveURLs) - if err != nil { - cmdLogger.Fatal("Error getting ledger entries: ", err) - } - - outFile := mustOutFile(path) - numFailures := 0 - totalNumBytes := 0 - var header xdr.LedgerHeaderHistoryEntry - for _, ttl := range ttls { - transformed, err := transform.TransformTtl(ttl, header) - if err != nil { - cmdLogger.LogError(fmt.Errorf("could not transform ttl %+v: %v", ttl, err)) - numFailures += 1 - continue - } - - numBytes, err := exportEntry(transformed, outFile, commonArgs.Extra) - if err != nil { - cmdLogger.LogError(fmt.Errorf("could not export ttl %+v: %v", ttl, err)) - numFailures += 1 - continue - } - totalNumBytes += numBytes - } - outFile.Close() - cmdLogger.Info("Number of bytes written: ", totalNumBytes) - - printTransformStats(len(ttls), numFailures) - maybeUpload(cloudCredentials, cloudStorageBucket, cloudProvider, path) - - }, -} - -func init() { - rootCmd.AddCommand(ttlCmd) - utils.AddCommonFlags(ttlCmd.Flags()) - utils.AddBucketFlags("ttl", ttlCmd.Flags()) - utils.AddCloudStorageFlags(ttlCmd.Flags()) - ttlCmd.MarkFlagRequired("end-ledger") - /* - Current flags: - end-ledger: the ledger sequence number for the end of the export range (required) - output-file: filename of the output file - stdout: if set, output is printed to stdout - - TODO: implement extra flags if possible - serialize-method: the method for serialization of the output data (JSON, XDR, etc) - end time as a replacement for end sequence numbers - */ -} diff --git a/cmd/export_ttl_test.go b/cmd/export_ttl_test.go deleted file mode 100644 index 3dc5d762..00000000 --- a/cmd/export_ttl_test.go +++ /dev/null @@ -1,22 +0,0 @@ -package cmd - -import ( - "testing" -) - -func TestExportttl(t *testing.T) { - t.Skip("Skipping due to unstable data in Futurenet") - // TODO: find ledger with data and create testdata - tests := []cliTest{ - { - name: "ttl", - args: []string{"export_ttl", "-e", "78975", "-o", gotTestDir(t, "bucket_read.txt")}, - golden: "bucket_read.golden", - wantErr: nil, - }, - } - - for _, test := range tests { - runCLITest(t, test, "testdata/ttl/") - } -} From 86a7f88bd76b4597f8d7eb21d73a86235dfa7f0b Mon Sep 17 00:00:00 2001 From: Simon Chow Date: Tue, 7 May 2024 17:00:19 -0400 Subject: [PATCH 37/49] Remove unused bucketlist entries --- internal/input/bucketlist_entries.go | 63 ---------------------------- 1 file changed, 63 deletions(-) delete mode 100644 internal/input/bucketlist_entries.go diff --git a/internal/input/bucketlist_entries.go b/internal/input/bucketlist_entries.go deleted file mode 100644 index 517641d5..00000000 --- a/internal/input/bucketlist_entries.go +++ /dev/null @@ -1,63 +0,0 @@ -package input - -import ( - "context" - "io" - - "github.com/stellar/go/historyarchive" - "github.com/stellar/go/ingest" - "github.com/stellar/go/xdr" - - "github.com/stellar/stellar-etl/internal/utils" -) - -// GetEntriesFromGenesis returns a slice of ledger entries of the specified type for the ledgers starting from the genesis ledger and ending at end (inclusive) -func GetEntriesFromGenesis(end uint32, entryType xdr.LedgerEntryType, archiveURLs []string) ([]ingest.Change, error) { - archive, err := utils.CreateHistoryArchiveClient(archiveURLs) - if err != nil { - return []ingest.Change{}, err - } - - latestNum, err := utils.GetLatestLedgerSequence(archiveURLs) - if err != nil { - return []ingest.Change{}, err - } - - if err = utils.ValidateLedgerRange(2, end, latestNum); err != nil { - return []ingest.Change{}, err - } - - checkpointSeq, err := utils.GetCheckpointNum(end, latestNum) - if err != nil { - return []ingest.Change{}, err - } - - return readBucketList(archive, checkpointSeq, entryType) -} - -// readBucketList reads the bucket list for the specified checkpoint sequence number and returns a slice of ledger entries of the specified type -func readBucketList(archive historyarchive.ArchiveInterface, checkpointSeq uint32, entryType xdr.LedgerEntryType) ([]ingest.Change, error) { - changeReader, err := ingest.NewCheckpointChangeReader(context.Background(), archive, checkpointSeq) - defer changeReader.Close() - if err != nil { - return []ingest.Change{}, err - } - - entrySlice := []ingest.Change{} - for { - change, err := changeReader.Read() - if err == io.EOF { - break - } - - if err != nil { - return []ingest.Change{}, err - } - - if change.Type == entryType { - entrySlice = append(entrySlice, change) - } - } - - return entrySlice, nil -} From ed5a07f552df0a0e8f1bc2d5cd0c5680b1588466 Mon Sep 17 00:00:00 2001 From: Simon Chow Date: Tue, 7 May 2024 17:03:09 -0400 Subject: [PATCH 38/49] Remove debug comment --- internal/utils/main.go | 1 - 1 file changed, 1 deletion(-) diff --git a/internal/utils/main.go b/internal/utils/main.go index 1bf01ed0..069ac0f8 100644 --- a/internal/utils/main.go +++ b/internal/utils/main.go @@ -796,7 +796,6 @@ func CreateLedgerBackend(ctx context.Context, useCaptiveCore bool, env Environme // Create ledger backend from datastore params := make(map[string]string) - //params["destination_bucket_path"] = "ledger-exporter/ledgers" params["destination_bucket_path"] = env.DatastorePath dataStoreConfig := datastore.DataStoreConfig{ Type: "GCS", From f8250609dd16e1aeeabf33ff3328fb204eae59c0 Mon Sep 17 00:00:00 2001 From: Simon Chow Date: Tue, 7 May 2024 17:57:20 -0400 Subject: [PATCH 39/49] Pass new params correctly --- cmd/export_assets.go | 2 +- cmd/export_diagnostic_events.go | 2 +- cmd/export_effects.go | 2 +- cmd/export_ledger_entry_changes.go | 2 +- cmd/export_ledger_transaction.go | 2 +- cmd/export_ledgers.go | 2 +- cmd/export_operations.go | 2 +- cmd/export_trades.go | 2 +- cmd/export_transactions.go | 2 +- internal/input/ledger_range.go | 6 +++++- internal/utils/main.go | 32 +++++++++++++++++++----------- 11 files changed, 34 insertions(+), 22 deletions(-) diff --git a/cmd/export_assets.go b/cmd/export_assets.go index 7ea33a69..2160d6eb 100644 --- a/cmd/export_assets.go +++ b/cmd/export_assets.go @@ -20,7 +20,7 @@ var assetsCmd = &cobra.Command{ cmdLogger.StrictExport = commonArgs.StrictExport startNum, path, limit := utils.MustArchiveFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - env := utils.GetEnvironmentDetails(commonArgs.IsTest, commonArgs.IsFuture, commonArgs.DatastorePath) + env := utils.GetEnvironmentDetails(commonArgs) outFile := mustOutFile(path) diff --git a/cmd/export_diagnostic_events.go b/cmd/export_diagnostic_events.go index 3a5e9ad2..66ed6438 100644 --- a/cmd/export_diagnostic_events.go +++ b/cmd/export_diagnostic_events.go @@ -20,7 +20,7 @@ var diagnosticEventsCmd = &cobra.Command{ cmdLogger.StrictExport = commonArgs.StrictExport startNum, path, limit := utils.MustArchiveFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - env := utils.GetEnvironmentDetails(commonArgs.IsTest, commonArgs.IsFuture, commonArgs.DatastorePath) + env := utils.GetEnvironmentDetails(commonArgs) transactions, err := input.GetTransactions(startNum, commonArgs.EndNum, limit, env, commonArgs.UseCaptiveCore) if err != nil { diff --git a/cmd/export_effects.go b/cmd/export_effects.go index 03e0f4d4..b93aaf1c 100644 --- a/cmd/export_effects.go +++ b/cmd/export_effects.go @@ -18,7 +18,7 @@ var effectsCmd = &cobra.Command{ cmdLogger.StrictExport = commonArgs.StrictExport startNum, path, limit := utils.MustArchiveFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - env := utils.GetEnvironmentDetails(commonArgs.IsTest, commonArgs.IsFuture, commonArgs.DatastorePath) + env := utils.GetEnvironmentDetails(commonArgs) transactions, err := input.GetTransactions(startNum, commonArgs.EndNum, limit, env, commonArgs.UseCaptiveCore) if err != nil { diff --git a/cmd/export_ledger_entry_changes.go b/cmd/export_ledger_entry_changes.go index aec74de1..b227defe 100644 --- a/cmd/export_ledger_entry_changes.go +++ b/cmd/export_ledger_entry_changes.go @@ -30,7 +30,7 @@ be exported.`, Run: func(cmd *cobra.Command, args []string) { commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger) cmdLogger.StrictExport = commonArgs.StrictExport - env := utils.GetEnvironmentDetails(commonArgs.IsTest, commonArgs.IsFuture, commonArgs.DatastorePath) + env := utils.GetEnvironmentDetails(commonArgs) _, configPath, startNum, batchSize, outputFolder := utils.MustCoreFlags(cmd.Flags(), cmdLogger) exports := utils.MustExportTypeFlags(cmd.Flags(), cmdLogger) diff --git a/cmd/export_ledger_transaction.go b/cmd/export_ledger_transaction.go index 4054b63f..7d07b9ec 100644 --- a/cmd/export_ledger_transaction.go +++ b/cmd/export_ledger_transaction.go @@ -20,7 +20,7 @@ var ledgerTransactionCmd = &cobra.Command{ cmdLogger.StrictExport = commonArgs.StrictExport startNum, path, limit := utils.MustArchiveFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - env := utils.GetEnvironmentDetails(commonArgs.IsTest, commonArgs.IsFuture, commonArgs.DatastorePath) + env := utils.GetEnvironmentDetails(commonArgs) ledgerTransaction, err := input.GetTransactions(startNum, commonArgs.EndNum, limit, env, commonArgs.UseCaptiveCore) if err != nil { diff --git a/cmd/export_ledgers.go b/cmd/export_ledgers.go index 501e649b..e1dce45b 100644 --- a/cmd/export_ledgers.go +++ b/cmd/export_ledgers.go @@ -20,7 +20,7 @@ var ledgersCmd = &cobra.Command{ cmdLogger.StrictExport = commonArgs.StrictExport startNum, path, limit := utils.MustArchiveFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - env := utils.GetEnvironmentDetails(commonArgs.IsTest, commonArgs.IsFuture, commonArgs.DatastorePath) + env := utils.GetEnvironmentDetails(commonArgs) var ledgers []utils.HistoryArchiveLedgerAndLCM var err error diff --git a/cmd/export_operations.go b/cmd/export_operations.go index e8418e6d..cbfb8d84 100644 --- a/cmd/export_operations.go +++ b/cmd/export_operations.go @@ -20,7 +20,7 @@ var operationsCmd = &cobra.Command{ cmdLogger.StrictExport = commonArgs.StrictExport startNum, path, limit := utils.MustArchiveFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - env := utils.GetEnvironmentDetails(commonArgs.IsTest, commonArgs.IsFuture, commonArgs.DatastorePath) + env := utils.GetEnvironmentDetails(commonArgs) operations, err := input.GetOperations(startNum, commonArgs.EndNum, limit, env, commonArgs.UseCaptiveCore) if err != nil { diff --git a/cmd/export_trades.go b/cmd/export_trades.go index 94441860..748cdb66 100644 --- a/cmd/export_trades.go +++ b/cmd/export_trades.go @@ -22,7 +22,7 @@ var tradesCmd = &cobra.Command{ commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger) cmdLogger.StrictExport = commonArgs.StrictExport startNum, path, limit := utils.MustArchiveFlags(cmd.Flags(), cmdLogger) - env := utils.GetEnvironmentDetails(commonArgs.IsTest, commonArgs.IsFuture, commonArgs.DatastorePath) + env := utils.GetEnvironmentDetails(commonArgs) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) trades, err := input.GetTrades(startNum, commonArgs.EndNum, limit, env, commonArgs.UseCaptiveCore) diff --git a/cmd/export_transactions.go b/cmd/export_transactions.go index 966cd0fa..35f82bd2 100644 --- a/cmd/export_transactions.go +++ b/cmd/export_transactions.go @@ -20,7 +20,7 @@ var transactionsCmd = &cobra.Command{ cmdLogger.StrictExport = commonArgs.StrictExport startNum, path, limit := utils.MustArchiveFlags(cmd.Flags(), cmdLogger) cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - env := utils.GetEnvironmentDetails(commonArgs.IsTest, commonArgs.IsFuture, commonArgs.DatastorePath) + env := utils.GetEnvironmentDetails(commonArgs) transactions, err := input.GetTransactions(startNum, commonArgs.EndNum, limit, env, commonArgs.UseCaptiveCore) if err != nil { diff --git a/internal/input/ledger_range.go b/internal/input/ledger_range.go index f4ec07ee..2f778095 100644 --- a/internal/input/ledger_range.go +++ b/internal/input/ledger_range.go @@ -32,7 +32,11 @@ const avgCloseTime = time.Second * 5 // average time to close a stellar ledger func GetLedgerRange(startTime, endTime time.Time, isTest bool, isFuture bool) (int64, int64, error) { startTime = startTime.UTC() endTime = endTime.UTC() - env := utils.GetEnvironmentDetails(isTest, isFuture, "") + commonFlagValues := utils.CommonFlagValues{ + IsTest: isTest, + IsFuture: isFuture, + } + env := utils.GetEnvironmentDetails(commonFlagValues) if startTime.After(endTime) { return 0, 0, fmt.Errorf("start time must be less than or equal to the end time") diff --git a/internal/utils/main.go b/internal/utils/main.go index 069ac0f8..e54fe920 100644 --- a/internal/utils/main.go +++ b/internal/utils/main.go @@ -236,6 +236,7 @@ func AddCommonFlags(flags *pflag.FlagSet) { flags.StringToStringP("extra-fields", "u", map[string]string{}, "Additional fields to append to output jsons. Used for appending metadata") flags.Bool("captive-core", false, "If set, run captive core to retrieve data. Otherwise use TxMeta file datastore.") flags.String("datastore-path", "ledger-exporter/ledgers", "Datastore bucket path to read txmeta files from.") + flags.Uint32("buffer-size", 5, "Buffer size sets the max limit for the number of txmeta files that can be held in memory.") flags.Uint32("num-workers", 5, "Number of workers to spawn that read txmeta files from the datastore.") flags.Uint32("retry-limit", 3, "Datastore GetLedger retry limit.") flags.Uint32("retry-wait", 5, "Time in seconds to wait for GetLedger retry.") @@ -293,6 +294,7 @@ type CommonFlagValues struct { Extra map[string]string UseCaptiveCore bool DatastorePath string + BufferSize uint32 NumWorkers uint32 RetryLimit uint32 RetryWait uint32 @@ -336,6 +338,11 @@ func MustCommonFlags(flags *pflag.FlagSet, logger *EtlLogger) CommonFlagValues { logger.Fatal("could not get datastore-bucket-path string: ", err) } + bufferSize, err := flags.GetUint32("buffer-size") + if err != nil { + logger.Fatal("could not get buffer-size uint32: ", err) + } + numWorkers, err := flags.GetUint32("num-workers") if err != nil { logger.Fatal("could not get num-workers uint32: ", err) @@ -359,6 +366,7 @@ func MustCommonFlags(flags *pflag.FlagSet, logger *EtlLogger) CommonFlagValues { Extra: extra, UseCaptiveCore: useCaptiveCore, DatastorePath: datastorePath, + BufferSize: bufferSize, NumWorkers: numWorkers, RetryLimit: retryLimit, RetryWait: retryWait, @@ -679,29 +687,29 @@ type EnvironmentDetails struct { ArchiveURLs []string BinaryPath string CoreConfig string - DatastorePath string Network string + CommonFlagValues CommonFlagValues } // GetPassphrase returns the correct Network Passphrase based on env preference -func GetEnvironmentDetails(isTest bool, isFuture bool, datastorePath string) (details EnvironmentDetails) { - if isTest { +func GetEnvironmentDetails(commonFlags CommonFlagValues) (details EnvironmentDetails) { + if commonFlags.IsTest { // testnet passphrase to be used for testing details.NetworkPassphrase = network.TestNetworkPassphrase details.ArchiveURLs = testArchiveURLs details.BinaryPath = "/usr/bin/stellar-core" details.CoreConfig = "/etl/docker/stellar-core_testnet.cfg" - details.DatastorePath = datastorePath details.Network = "testnet" + details.CommonFlagValues = commonFlags return details - } else if isFuture { + } else if commonFlags.IsFuture { // details.NetworkPassphrase = network.FutureNetworkPassphrase details.NetworkPassphrase = "Test SDF Future Network ; October 2022" details.ArchiveURLs = futureArchiveURLs details.BinaryPath = "/usr/bin/stellar-core" details.CoreConfig = "/etl/docker/stellar-core_futurenet.cfg" - details.DatastorePath = datastorePath details.Network = "futurenet" + details.CommonFlagValues = commonFlags return details } else { // default: mainnet @@ -709,8 +717,8 @@ func GetEnvironmentDetails(isTest bool, isFuture bool, datastorePath string) (de details.ArchiveURLs = mainArchiveURLs details.BinaryPath = "/usr/bin/stellar-core" details.CoreConfig = "/etl/docker/stellar-core.cfg" - details.DatastorePath = datastorePath details.Network = "pubnet" + details.CommonFlagValues = commonFlags return details } } @@ -796,7 +804,7 @@ func CreateLedgerBackend(ctx context.Context, useCaptiveCore bool, env Environme // Create ledger backend from datastore params := make(map[string]string) - params["destination_bucket_path"] = env.DatastorePath + params["destination_bucket_path"] = env.CommonFlagValues.DatastorePath dataStoreConfig := datastore.DataStoreConfig{ Type: "GCS", Params: params, @@ -820,10 +828,10 @@ func CreateLedgerBackend(ctx context.Context, useCaptiveCore bool, env Environme LedgerBatchConfig: ledgerBatchConfig, CompressionType: "gzip", DataStore: dataStore, - BufferSize: 1000, - NumWorkers: 5, - RetryLimit: 3, - RetryWait: 5, + BufferSize: env.CommonFlagValues.BufferSize, + NumWorkers: env.CommonFlagValues.NumWorkers, + RetryLimit: env.CommonFlagValues.RetryLimit, + RetryWait: time.Duration(env.CommonFlagValues.RetryWait) * time.Second, } backend, err := ledgerbackend.NewBufferedStorageBackend(ctx, BSBackendConfig) From a14b3224a8647a4064a3c7b6275aaaf2aacac18c Mon Sep 17 00:00:00 2001 From: chowbao Date: Mon, 13 May 2024 10:46:38 -0400 Subject: [PATCH 40/49] Update to latest stellar/go change with updated ledgerexporter zstd (#244) * Update to latest stellar/go change with updated ledgerexporter zstd --- go.mod | 3 ++- go.sum | 4 ++-- internal/utils/main.go | 5 +---- 3 files changed, 5 insertions(+), 7 deletions(-) diff --git a/go.mod b/go.mod index fc76db68..750af302 100644 --- a/go.mod +++ b/go.mod @@ -15,7 +15,7 @@ require ( github.com/spf13/cobra v1.7.0 github.com/spf13/pflag v1.0.5 github.com/spf13/viper v1.17.0 - github.com/stellar/go v0.0.0-20240507142223-735600adb2d4 + github.com/stellar/go v0.0.0-20240510213328-79f44c65cb44 github.com/stretchr/testify v1.9.0 ) @@ -50,6 +50,7 @@ require ( github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/jmespath/go-jmespath v0.4.0 // indirect github.com/jmoiron/sqlx v1.3.5 // indirect + github.com/klauspost/compress v1.17.0 // indirect github.com/lann/builder v0.0.0-20180802200727-47ae307949d0 // indirect github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0 // indirect github.com/magiconair/properties v1.8.7 // indirect diff --git a/go.sum b/go.sum index 568dfe39..7ba7615d 100644 --- a/go.sum +++ b/go.sum @@ -296,8 +296,8 @@ github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/spf13/viper v1.17.0 h1:I5txKw7MJasPL/BrfkbA0Jyo/oELqVmux4pR/UxOMfI= github.com/spf13/viper v1.17.0/go.mod h1:BmMMMLQXSbcHK6KAOiFLz0l5JHrU89OdIRHvsk0+yVI= -github.com/stellar/go v0.0.0-20240507142223-735600adb2d4 h1:4dmEOaVcttNCZTIXE8y5VwNvduqVwE+D7oFLAu2nn/k= -github.com/stellar/go v0.0.0-20240507142223-735600adb2d4/go.mod h1:kxiz7GJ94uVORlLZ/q7BrEQZAvBgkNXly7I19axD3EA= +github.com/stellar/go v0.0.0-20240510213328-79f44c65cb44 h1:a53z95H5rfJ871NWopW1s57lmXvsC76KTIcYrWfvkrA= +github.com/stellar/go v0.0.0-20240510213328-79f44c65cb44/go.mod h1:cHNYV5oK7fp9ZkRIC9fjQxvIArxGPk42q/f3YQBQll0= github.com/stellar/go-xdr v0.0.0-20231122183749-b53fb00bcac2 h1:OzCVd0SV5qE3ZcDeSFCmOWLZfEWZ3Oe8KtmSOYKEVWE= github.com/stellar/go-xdr v0.0.0-20231122183749-b53fb00bcac2/go.mod h1:yoxyU/M8nl9LKeWIoBrbDPQ7Cy+4jxRcWcOayZ4BMps= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= diff --git a/internal/utils/main.go b/internal/utils/main.go index e54fe920..bcbd8bc8 100644 --- a/internal/utils/main.go +++ b/internal/utils/main.go @@ -235,7 +235,7 @@ func AddCommonFlags(flags *pflag.FlagSet) { flags.Bool("futurenet", false, "If set, will connect to Futurenet instead of Mainnet.") flags.StringToStringP("extra-fields", "u", map[string]string{}, "Additional fields to append to output jsons. Used for appending metadata") flags.Bool("captive-core", false, "If set, run captive core to retrieve data. Otherwise use TxMeta file datastore.") - flags.String("datastore-path", "ledger-exporter/ledgers", "Datastore bucket path to read txmeta files from.") + flags.String("datastore-path", "sdf-ledger-close-metas/ledgers", "Datastore bucket path to read txmeta files from.") flags.Uint32("buffer-size", 5, "Buffer size sets the max limit for the number of txmeta files that can be held in memory.") flags.Uint32("num-workers", 5, "Number of workers to spawn that read txmeta files from the datastore.") flags.Uint32("retry-limit", 3, "Datastore GetLedger retry limit.") @@ -820,13 +820,10 @@ func CreateLedgerBackend(ctx context.Context, useCaptiveCore bool, env Environme ledgerBatchConfig := datastore.LedgerBatchConfig{ LedgersPerFile: 1, FilesPerPartition: 64000, - FileSuffix: ".xdr.gz", } - // TODO: In the future CompressionType should be removed as it won't be configurable BSBackendConfig := ledgerbackend.BufferedStorageBackendConfig{ LedgerBatchConfig: ledgerBatchConfig, - CompressionType: "gzip", DataStore: dataStore, BufferSize: env.CommonFlagValues.BufferSize, NumWorkers: env.CommonFlagValues.NumWorkers, From 2f8c44a2ccc1da3cf0185827220187b087b20927 Mon Sep 17 00:00:00 2001 From: Kanwalpreet Dhindsa Date: Tue, 14 May 2024 09:51:08 -0700 Subject: [PATCH 41/49] Create codeql.yml (#243) --- .github/workflows/codeql.yml | 41 ++++++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) create mode 100644 .github/workflows/codeql.yml diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml new file mode 100644 index 00000000..5907fe11 --- /dev/null +++ b/.github/workflows/codeql.yml @@ -0,0 +1,41 @@ +name: "CodeQL" + +on: + push: + branches: [ "master" ] + pull_request: + branches: [ "master" ] + schedule: + - cron: '42 15 * * 6' + +jobs: + analyze: + name: Analyze (${{ matrix.language }}) + runs-on: ubuntu-latest + timeout-minutes: 360 + permissions: + # required for all workflows + security-events: write + + strategy: + fail-fast: false + matrix: + include: + - language: go + build-mode: autobuild + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v3 + with: + languages: ${{ matrix.language }} + build-mode: ${{ matrix.build-mode }} + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v3 + with: + category: "/language:${{matrix.language}}" From d741725fcc505e08ab8ee996926ab2e9dd758d89 Mon Sep 17 00:00:00 2001 From: chowbao Date: Thu, 16 May 2024 17:54:09 -0400 Subject: [PATCH 42/49] Update readme (#245) Update repo readme --- README.md | 335 ++++++++++++++++++++---------------------------------- 1 file changed, 126 insertions(+), 209 deletions(-) diff --git a/README.md b/README.md index 1501fa3c..544608ff 100644 --- a/README.md +++ b/README.md @@ -1,210 +1,123 @@ +# **Stellar ETL** -# Stellar ETL The Stellar-ETL is a data pipeline that allows users to extract data from the history of the Stellar network. ## **Table of Contents** - [Exporting the Ledger Chain](#exporting-the-ledger-chain) - - [Command Reference](#command-reference) - - [Bucket List Commands](#bucket-list-commands) - - [export_accounts](#export_accounts) - - [export_offers](#export_offers) - - [export_trustlines](#export_trustlines) - - [export_claimable_balances](#export_claimable_balances) - - [export_pools](#export_pools) - - [export_signers](#export_signers) - - [export_contract_data (futurenet, testnet)](#export_contract_data) - - [export_contract_code (futurenet, testnet)](#export_contract_code) - - [export_config_settings (futurenet, testnet)](#export_config_settings) - - [export_ttl (futurenet, testnet)](#export_ttl) - - [History Archive Commands](#history-archive-commands) - - [export_ledgers](#export_ledgers) - - [export_transactions](#export_transactions) - - [export_operations](#export_operations) - - [export_effects](#export_effects) - - [export_assets](#export_assets) - - [export_trades](#export_trades) - - [export_diagnostic_events (futurenet, testnet)](#export_diagnostic_events) - - [Stellar Core Commands](#stellar-core-commands) - - [export_ledger_entry_changes](#export_ledger_entry_changes) - - [export_orderbooks (unsupported)](#export_orderbooks-unsupported) - - [Utility Commands](#utility-commands) - - [get_ledger_range_from_times](#get_ledger_range_from_times) +- [Command Reference](#command-reference) + - [Export Commands](#export-commands) + - [export_ledgers](#export_ledgers) + - [export_transactions](#export_transactions) + - [export_operations](#export_operations) + - [export_effects](#export_effects) + - [export_assets](#export_assets) + - [export_trades](#export_trades) + - [export_diagnostic_events](#export_diagnostic_events) + - [export_ledger_entry_changes](#export_ledger_entry_changes) + - [Utility Commands](#utility-commands) + - [get_ledger_range_from_times](#get_ledger_range_from_times) - [Schemas](#schemas) - [Extensions](#extensions) - [Adding New Commands](#adding-new-commands) -
+
+--- -# Exporting the Ledger Chain +# **Exporting the Ledger Chain** ## **Docker** + 1. Download the latest version of docker [Docker](https://www.docker.com/get-started) -2. Pull the stellar-etl Docker image: `docker pull stellar/stellar-etl` -3. Run the Docker images with the desired stellar-etl command: `docker run stellar/stellar-etl stellar-etl [etl-command] [etl-command arguments]` +2. Pull the latest stellar-etl Docker image: `docker pull stellar/stellar-etl:latest` +3. Run the Docker images with the desired stellar-etl command: `docker run stellar/stellar-etl:latest stellar-etl [etl-command] [etl-command arguments]` ## **Manual Installation** -1. Install Golang v1.19.0 or later: https://golang.org/dl/ +1. Install Golang v1.22.1 or later: https://golang.org/dl/ 2. Ensure that your Go bin has been added to the PATH env variable: `export PATH=$PATH:$(go env GOPATH)/bin` -3. Download and install Stellar-Core v19.0.0 or later: https://github.com/stellar/stellar-core/blob/master/INSTALL.md - -4. Run `go get github.com/stellar/stellar-etl` to install the ETL - +3. If using captive-core, download and install Stellar-Core v20.0.0 or later: https://github.com/stellar/stellar-core/blob/master/INSTALL.md +4. Run `go install github.com/stellar/stellar-etl@latest` to install the ETL 5. Run export commands to export information about the legder -## **Command Reference** -- [Bucket List Commands](#bucket-list-commands) - - [export_accounts](#export_accounts) - - [export_offers](#export_offers) - - [export_trustlines](#export_trustlines) - - [export_claimable_balances](#export_claimable_balances) - - [export_pools](#export_pools) - - [export_signers](#export_signers) - - [export_contract_data](#export_contract_data) - - [export_contract_code](#export_contract_code) - - [export_config_settings](#export_config_settings) - - [export_ttl](#export_ttl) -- [History Archive Commands](#history-archive-commands) - - [export_ledgers](#export_ledgers) - - [export_transactions](#export_transactions) - - [export_operations](#export_operations) - - [export_effects](#export_effects) - - [export_assets](#export_assets) - - [export_trades](#export_trades) - - [export_diagnostic_events](#export_diagnostic_events) - - [Stellar Core Commands](#stellar-core-commands) - - [export_orderbooks (unsupported)](#export_orderbooks-unsupported) - - [Utility Commands](#utility-commands) - - [get_ledger_range_from_times](#get_ledger_range_from_times) - -Every command accepts a `-h` parameter, which provides a help screen containing information about the command, its usage, and its flags. - -Commands have the option to read from testnet with the `--testnet` flag, from futurenet with the `--futurenet` flag, and defaults to reading from mainnet without any flags. -> *_NOTE:_* Adding both flags will default to testnet. Each stellar-etl command can only run from one network at a time. - -
- -*** - -## **Bucket List Commands** - -These commands use the bucket list in order to ingest large amounts of data from the history of the stellar ledger. If you are trying to read large amounts of information in order to catch up to the current state of the ledger, these commands provide a good way to catchup quickly. However, they don't allow for custom start-ledger values. For updating within a user-defined range, see the Stellar Core commands. - -> *_NOTE:_* In order to get information within a specified ledger range for bucket list commands, see the export_ledger_entry_changes command. - -
- -### **export_accounts** - -```bash -> stellar-etl export_accounts --end-ledger 500000 --output exported_accounts.txt -``` - -Exports historical account data from the genesis ledger to the provided end-ledger to an output file. The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it should be used in an initial data dump. In order to get account information within a specified ledger range, see the export_ledger_entry_changes command. - -
- -### **export_offers** +## **Manual build for local development** -```bash -> stellar-etl export_offers --end-ledger 500000 --output exported_offers.txt -``` +1. Clone this repo `git clone https://github.com/stellar/stellar-etl` +2. Build stellar-etl with `go build` +3. Run export commands to export information about the legder -Exports historical offer data from the genesis ledger to the provided end-ledger to an output file. The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it should be used in an initial data dump. In order to get offer information within a specified ledger range, see the export_ledger_entry_changes command. - -
+> _*Note:*_ If using the GCS datastore, you can run the following to set GCP credentials to use in your shell -### **export_trustlines** - -```bash -> stellar-etl export_trustlines --end-ledger 500000 --output exported_trustlines.txt ``` - -Exports historical trustline data from the genesis ledger to the provided end-ledger to an output file. The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it should be used in an initial data dump. In order to get trustline information within a specified ledger range, see the export_ledger_entry_changes command. - -
- -### **export_claimable_balances** - -```bash -> stellar-etl export_claimable_balances --end-ledger 500000 --output exported_claimable_balances.txt +gcloud auth login +gcloud config set project dev-hubble +gcloud auth application-default login ``` -Exports claimable balances data from the genesis ledger to the provided end-ledger to an output file. The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it should be used in an initial data dump. In order to get claimable balances information within a specified ledger range, see the export_ledger_entry_changes command. +> _*Note:*_ Instructions for installing gcloud can be found [here](https://cloud.google.com/sdk/docs/install-sdk)
-### **export_pools** +--- -```bash -> stellar-etl export_pools --end-ledger 500000 --output exported_pools.txt -``` - -Exports historical liquidity pools data from the genesis ledger to the provided end-ledger to an output file. The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it should be used in an initial data dump. In order to get liquidity pools information within a specified ledger range, see the export_ledger_entry_changes command. - -
- -### **export_signers** - -```bash -> stellar-etl export_signers --end-ledger 500000 --output exported_signers.txt -``` - -Exports historical account signers data from the genesis ledger to the provided end-ledger to an output file. The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it should be used in an initial data dump. In order to get account signers information within a specified ledger range, see the export_ledger_entry_changes command. +# **Command Reference** -
- -### **export_contract_data** +- [Export Commands](#export-commands) + - [export_ledgers](#export_ledgers) + - [export_transactions](#export_transactions) + - [export_operations](#export_operations) + - [export_effects](#export_effects) + - [export_assets](#export_assets) + - [export_trades](#export_trades) + - [export_diagnostic_events](#export_diagnostic_events) + - [export_ledger_entry_changes](#export_ledger_entry_changes) +- [Utility Commands](#utility-commands) + - [get_ledger_range_from_times](#get_ledger_range_from_times) -```bash -> stellar-etl export_contract_data --end-ledger 500000 --output export_contract_data.txt -``` - -Exports historical contract data data from the genesis ledger to the provided end-ledger to an output file. The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it should be used in an initial data dump. In order to get contract data information within a specified ledger range, see the export_ledger_entry_changes command. - -
- -### **export_contract_code** +Every command accepts a `-h` parameter, which provides a help screen containing information about the command, its usage, and its flags. -```bash -> stellar-etl export_contract_code --end-ledger 500000 --output export_contract_code.txt -``` +Commands have the option to read from testnet with the `--testnet` flag, from futurenet with the `--futurenet` flag, and defaults to reading from mainnet without any flags. -Exports historical contract code data from the genesis ledger to the provided end-ledger to an output file. The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it should be used in an initial data dump. In order to get contract code information within a specified ledger range, see the export_ledger_entry_changes command. +> _*NOTE:*_ Adding both flags will default to testnet. Each stellar-etl command can only run from one network at a time.
-### **export_config_settings** +--- -```bash -> stellar-etl export_config_settings --end-ledger 500000 --output export_config_settings.txt -``` +## **Export Commands** -Exports historical config settings data from the genesis ledger to the provided end-ledger to an output file. The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it should be used in an initial data dump. In order to get config settings data information within a specified ledger range, see the export_ledger_entry_changes command. +These commands export information using the [Ledger Exporter](https://github.com/stellar/go/blob/master/exp/services/ledgerexporter/README.md) output files within a specified datastore (currently [datastore](https://github.com/stellar/go/tree/master/support/datastore) only supports GCS). This allows users to provide a start and end ledger range. The commands in this category export a list of everything that occurred within the provided range. All of the ranges are inclusive. -
+> _*NOTE:*_ The datastore must contain the expected compressed LedgerCloseMetaBatch XDR binary files as exported from [Ledger Exporter](https://github.com/stellar/go/blob/master/exp/services/ledgerexporter/README.md#exported-files). -### **export_ttl** +#### Common Flags -```bash -> stellar-etl export_ttl --end-ledger 500000 --output export_ttl.txt -``` +| Flag | Description | Default | +| -------------- | --------------------------------------------------------------------------------------------- | ----------------------- | +| start-ledger | The ledger sequence number for the beginning of the export period. Defaults to genesis ledger | 2 | +| end-ledger | The ledger sequence number for the end of the export range | 0 | +| strict-export | If set, transform errors will be fatal | false | +| testnet | If set, will connect to Testnet instead of Pubnet | false | +| futurenet | If set, will connect to Futurenet instead of Pubnet | false | +| extra-fields | Additional fields to append to output jsons. Used for appending metadata | --- | +| captive-core | If set, run captive core to retrieve data. Otherwise use TxMeta file datastore | false | +| datastore-path | Datastore bucket path to read txmeta files from | ledger-exporter/ledgers | +| buffer-size | Buffer size sets the max limit for the number of txmeta files that can be held in memory | 1000 | +| num-workers | Number of workers to spawn that read txmeta files from the datastore | 5 | +| retry-limit | Datastore GetLedger retry limit | 3 | +| retry-wait | Time in seconds to wait for GetLedger retry | 5 | -Exports historical expiration data from the genesis ledger to the provided end-ledger to an output file. The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it should be used in an initial data dump. In order to get expiration information within a specified ledger range, see the export_ledger_entry_changes command. +> _*NOTE:*_ Using captive-core requires a Stellar Core instance that is v20.0.0 or later. The commands use the Core instance to retrieve information about changes from the ledger. More information about the Stellar ledger information can be found [here](https://developers.stellar.org/network/horizon/api-reference/resources). +>
As the Stellar network grows, the Stellar Core instance has to catch up on an increasingly large amount of information. This catch-up process can add some overhead to the commands in this category. In order to avoid this overhead, run prefer processing larger ranges instead of many small ones, or use unbounded mode. +>

Recommended resources for running captive-core within a KubernetesPod: +> ``` +> {cpu: 3.5, memory: 20Gi, ephemeral-storage: 12Gi} +> ```
-*** - -## **History Archive Commands** - -These commands export information using the history archives. This allows users to provide a start and end ledger range. The commands in this category export a list of everything that occurred within the provided range. All of the ranges are inclusive. - -> *_NOTE:_* Commands except `export_ledgers` and `export_assets` also require Captive Core to export data. - -
+--- ### **export_ledgers** @@ -213,10 +126,12 @@ These commands export information using the history archives. This allows users --end-ledger 500000 --output exported_ledgers.txt ``` -This command exports ledgers within the provided range. +This command exports ledgers within the provided range.
+--- + ### **export_transactions** ```bash @@ -228,6 +143,8 @@ This command exports transactions within the provided range.
+--- + ### **export_operations** ```bash @@ -239,6 +156,8 @@ This command exports operations within the provided range.
+--- + ### **export_effects** ```bash @@ -250,7 +169,10 @@ This command exports effects within the provided range.
+--- + ### **export_assets** + ```bash > stellar-etl export_assets \ --start-ledger 1000 \ @@ -261,7 +183,10 @@ Exports the assets that are created from payment operations over a specified led
+--- + ### **export_trades** + ```bash > stellar-etl export_trades \ --start-ledger 1000 \ @@ -272,7 +197,10 @@ Exports trade data within the specified range to an output file
+--- + ### **export_diagnostic_events** + ```bash > stellar-etl export_diagnostic_events \ --start-ledger 1000 \ @@ -283,15 +211,7 @@ Exports diagnostic events data within the specified range to an output file
-*** - -## **Stellar Core Commands** - -These commands require a Stellar Core instance that is v19.0.0 or later. The commands use the Core instance to retrieve information about changes from the ledger. These changes can be in the form of accounts, offers, trustlines, claimable balances, liquidity pools, or account signers. - -As the Stellar network grows, the Stellar Core instance has to catch up on an increasingly large amount of information. This catch-up process can add some overhead to the commands in this category. In order to avoid this overhead, run prefer processing larger ranges instead of many small ones, or use unbounded mode. - -
+--- ### **export_ledger_entry_changes** @@ -302,82 +222,79 @@ As the Stellar network grows, the Stellar Core instance has to catch up on an in This command exports ledger changes within the provided ledger range. Flags can filter which ledger entry types are exported. If no data type flags are set, then by default all types are exported. If any are set, it is assumed that the others should not be exported. -Changes are exported in batches of a size defined by the `batch-size` flag. By default, the batch-size parameter is set to 64 ledgers, which corresponds to a five minute period of time. This batch size is convenient because checkpoint ledgers are created every 64 ledgers. Checkpoint ledgers act as anchoring points for the nodes on the network, so it is beneficial to export in multiples of 64. +Changes are exported in batches of a size defined by the `--batch-size` flag. By default, the batch-size parameter is set to 64 ledgers, which corresponds to a five minute period of time. This batch size is convenient because checkpoint ledgers are created every 64 ledgers. Checkpoint ledgers act as anchoring points for the nodes on the network, so it is beneficial to export in multiples of 64. This command has two modes: bounded and unbounded. #### **Bounded** - If both a start and end ledger are provided, then the command runs in a bounded mode. This means that once all the ledgers in the range are processed and exported, the command shuts down. - -#### **Unbounded** -If only a start ledger is provided, then the command runs in an unbounded fashion starting from the provided ledger. In this mode, the Stellar Core connects to the Stellar network and processes new changes as they occur on the network. Since the changes are continually exported in batches, this process can be continually run in the background in order to avoid the overhead of closing and starting new Stellar Core instances. - -
- -### **export_orderbooks (unsupported)** - -```bash -> stellar-etl export_orderbooks --start-ledger 1000 \ ---end-ledger 500000 --output exported_orderbooks_folder/ -``` -> *_NOTE:_* This is an expermental feature and is currently unsupported. +If both a start and end ledger are provided, then the command runs in a bounded mode. This means that once all the ledgers in the range are processed and exported, the command shuts down. -This command exports orderbooks within the provided ledger range. Since exporting complete orderbooks at every single ledger would require an excessive amount of storage space, the output is normalized. Each batch that is exported contains multiple files, namely: `dimAccounts.txt`, `dimOffers.txt`, `dimMarkets.txt`, and `factEvents.txt`. The dim files relate a data structure to an ID. `dimMarkets`, for example, contains the buying and selling assets of a market, as well as the ID for that market. That ID is used in other places as a replacement for the full market information. This normalization process saves a significant amount of space (roughly 90% in our benchmarks). The `factEvents` file connects ledger numbers to the offer IDs that were present at that ledger. +#### **Unbounded (Currently Unsupported)** -Orderbooks are exported in batches of a size defined by the `batch-size` flag. By default, the batch-size parameter is set to 64 ledgers, which corresponds to a five minute period of time. This batch size is convenient because checkpoint ledgers are created every 64 ledgers. Checkpoint ledgers act as anchoring points in that once they are available, so are the previous 63 nodes. It is beneficial to export in multiples of 64. +If only a start ledger is provided, then the command runs in an unbounded fashion starting from the provided ledger. In this mode, stellar-etl will block and wait for the next sequentially written ledger file in the datastore. Since the changes are continually exported in batches, this process can be continually run in the background in order to avoid the overhead of closing and starting new stellar-etl instances. -This command has two modes: bounded and unbounded. +The following are the ledger entry type flags that can be used to export data: -#### **Bounded** - If both a start and end ledger are provided, then the command runs in a bounded mode. This means that once all the ledgers in the range are processed and exported, the command shuts down. - -#### **Unbounded** -If only a start ledger is provided, then the command runs in an unbounded fashion starting from the provided ledger. In this mode, the Stellar Core connects to the Stellar network and processes new orderbooks as they occur on the network. Since the changes are continually exported in batches, this process can be continually run in the background in order to avoid the overhead of closing and starting new Stellar Core instances. +- export-accounts +- export-trustlines +- export-offers +- export-pools +- export-balances +- export-contract-code +- export-contract-data +- export-config-settings +- export-ttl
-*** +--- ## **Utility Commands** +These commands aid in the usage of [Export Commands](#export-commands). + ### **get_ledger_range_from_times** + ```bash > stellar-etl get_ledger_range_from_times \ --start-time 2019-09-13T23:00:00+00:00 \ --end-time 2019-09-14T13:35:10+00:00 --output exported_range.txt ``` -This command exports takes in a start and end time and converts it to a ledger range. The ledger range that is returned will be the smallest possible ledger range that completely covers the provided time period. +This command takes in a start and end time and converts it to a ledger range. The ledger range that is returned will be the smallest possible ledger range that completely covers the provided time period.
-
+ +--- # Schemas See https://github.com/stellar/stellar-etl/blob/master/internal/transform/schema.go for the schemas of the data structures that are outputted by the ETL. -

+--- + # Extensions + This section covers some possible extensions or further work that can be done. ## **Adding New Commands** + In general, in order to add new commands, you need to add these files: - - `export_new_data_structure.go` in the `cmd` folder - - This file can be generated with cobra by calling: `cobra add {command}` - - This file will parse flags, create output files, get the transformed data from the input package, and then export the data. - - `export_new_data_structure_test.go` in the `cmd` folder - - This file will contain some tests for the newly added command. The `runCLI` function does most of the heavy lifting. All the tests need is the command arguments to test and the desired output. - - Test data should be stored in the `testdata/new_data_structure` folder - - `new_data_structure.go` in the `internal/input` folder - - This file will contain the methods needed to extract the new data structure from wherever it is located. This may be the history archives, the bucket list, or a captive core instance. - - This file should extract the data and transform it, and return the transformed data. - - If working with captive core, the methods need to work in the background. There should be methods that export batches of data and send them to a channel. There should be other methods that read from the channel and transform the data so it can be exported. +- `export_new_data_structure.go` in the `cmd` folder + - This file can be generated with cobra by calling: `cobra add {command}` + - This file will parse flags, create output files, get the transformed data from the input package, and then export the data. +- `export_new_data_structure_test.go` in the `cmd` folder + - This file will contain some tests for the newly added command. The `runCLI` function does most of the heavy lifting. All the tests need is the command arguments to test and the desired output. + - Test data should be stored in the `testdata/new_data_structure` folder +- `new_data_structure.go` in the `internal/input` folder + - This file will contain the methods needed to extract the new data structure from wherever it is located. This may be the history archives, the bucket list, a captive core instance, or a datastore. + - If working with captive core, the methods need to work in the background. There should be methods that export batches of data and send them to a channel. There should be other methods that read from the channel and transform the data so it can be exported. - `new_data_structure.go` in the `internal/transform` folder - - This file will contain the methods needed to transform the extracted data into a form that is suitable for BigQuery. - - The struct definition for the transformed object should be stored in `schemas.go` in the `internal/transform` folder. + - This file will contain the methods needed to transform the extracted data into a form that is suitable for BigQuery. + - The struct definition for the transformed object should be stored in `schemas.go` in the `internal/transform` folder. A good number of common methods are already written and stored in the `util` package. From 71e6cb44f7e3cd211902dd6ecf5b3d0a35acd2fc Mon Sep 17 00:00:00 2001 From: Laysa Bitencourt Date: Fri, 31 May 2024 11:07:46 -0300 Subject: [PATCH 43/49] Release/CI enhancements (#247) * test new feature * README instructions * test release * test tag * v * echo * . * . * without v * final for release * testing patch versioning * generate new release * final version * reference branch name from PR not main --- .github/workflows/release.yml | 42 +++++++++++++++++++++++++++++------ README.md | 10 +++++++++ 2 files changed, 45 insertions(+), 7 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index e84156a5..7d614830 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,15 +1,15 @@ name: Release Drafter and Publisher on: - push: - tags: - - v* + pull_request: + types: [closed] permissions: contents: read jobs: new_release: + if: github.event.pull_request.merged == true permissions: # write permission is required to create a github release contents: write @@ -21,17 +21,45 @@ jobs: steps: - name: Checkout code uses: actions/checkout@v2 + with: + fetch-depth: 0 + + - name: Get branch name + id: getbranch + run: echo ::set-output name=BRANCH::${GITHUB_HEAD_REF} - # ${{ github.ref }} was not giving v* as tag name, but refs/tags/v* instead, so I had to abbreviate it + # ${{ github.ref }} was not giving v* as tag name, but refs/tags/v* instead, so It had to be abbreviated - name: Get latest abbreviated tag id: gettag - run: echo ::set-output name=TAG::$(git describe --tags --abbrev=7) + run: echo ::set-output name=TAG::$(git describe --tags --abbrev=0) + + - name: Calculate next version + id: nextversion + run: | + BRANCH_NAME="${{ steps.getbranch.outputs.BRANCH }}" + CURRENT_VERSION="${{ steps.gettag.outputs.TAG }}" + CURRENT_VERSION="${CURRENT_VERSION#v}" # Remove the 'v' from the start of the version + IFS='.' read -ra VERSION_PARTS <<< "$CURRENT_VERSION" + if [[ $BRANCH_NAME =~ ^feature/ ]]; then + VERSION_PARTS[1]=$((VERSION_PARTS[1] + 1)) + elif [[ $BRANCH_NAME =~ ^patch/ ]]; then + VERSION_PARTS[2]=$((VERSION_PARTS[2] + 1)) + elif [[ $BRANCH_NAME =~ ^release/ ]]; then + VERSION_PARTS[0]=$((VERSION_PARTS[0] + 1)) + fi + NEXT_VERSION="v${VERSION_PARTS[0]}.${VERSION_PARTS[1]}.${VERSION_PARTS[2]}" + echo ::set-output name=NEXT_VERSION::"$NEXT_VERSION" + + - name: Create and publish new tag + run: | + git tag ${{ steps.nextversion.outputs.NEXT_VERSION }} + git push origin ${{ steps.nextversion.outputs.NEXT_VERSION }} - uses: release-drafter/release-drafter@v5 with: commitish: master - name: "stellar-etl ${{ steps.gettag.outputs.TAG }}" - tag: ${{ github.ref }} + name: "stellar-etl ${{ steps.nextversion.outputs.NEXT_VERSION }}" + tag: ${{ steps.nextversion.outputs.NEXT_VERSION }} publish: true env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/README.md b/README.md index 544608ff..97cf6318 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,16 @@ The Stellar-ETL is a data pipeline that allows users to extract data from the history of the Stellar network. +## ** Before creating a branch ** + +Pay attention, it is very important to know if your modification to this repository is a release (breaking changes), a feature (functionalities) or a patch(to fix bugs). With that information, create your branch name like this: + +* ```release/``` +* ```feature/``` +* ```patch/``` + +If branch is already made, just rename it *before passing the pull request*. + ## **Table of Contents** - [Exporting the Ledger Chain](#exporting-the-ledger-chain) From 6f6760791b01cba2523ba570d1e0643923a6db2b Mon Sep 17 00:00:00 2001 From: Laysa Bitencourt Date: Mon, 3 Jun 2024 14:17:05 -0300 Subject: [PATCH 44/49] Hubble 406 - Feature / PR template (#252) * Create pull_request_template.md * final version * Update pull_request_template.md --- .github/pull_request_template.md | 36 ++++++++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) create mode 100644 .github/pull_request_template.md diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 00000000..09a19adf --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,36 @@ + + +
+ PR Checklist + +### PR Structure + +* [ ] This PR has reasonably narrow scope (if not, break it down into smaller PRs). +* [ ] This PR avoids mixing refactoring changes with feature changes (split into two PRs + otherwise). +* [ ] This PR's title starts with the jira ticket associated with the PR. + +### Thoroughness + +* [ ] This PR adds tests for the most critical parts of the new functionality or fixes. +* [ ] I've updated the README with the added features, breaking changes, new instructions on how to use the repository. I updated the description of the fuction with the changes that were made. + +### Release planning + +* [ ] I've decided if this PR requires a new major/minor/patch version accordingly to + [semver](https://semver.org/), and I've changed the name of the BRANCH to release/* , feature/* or patch/* . +
+ +### What + +[TODO: Short statement about what is changing.] + +### Why + +[TODO: Why this change is being made. Include any context required to understand the why.] + +### Known limitations + +[TODO or N/A] From 382f0305b54bf83d8e2be3ab4becaee0073173cf Mon Sep 17 00:00:00 2001 From: Laysa Bitencourt Date: Tue, 11 Jun 2024 11:17:56 -0300 Subject: [PATCH 45/49] CI enhancements (#253) --- .github/workflows/release.yml | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 7d614830..f10d8fb5 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -31,7 +31,7 @@ jobs: # ${{ github.ref }} was not giving v* as tag name, but refs/tags/v* instead, so It had to be abbreviated - name: Get latest abbreviated tag id: gettag - run: echo ::set-output name=TAG::$(git describe --tags --abbrev=0) + run: echo ::set-output name=TAG::$(git describe --tags $(git rev-list --tags --max-count=1)) # get the latest tag across all branches and put it in the output TAG - name: Calculate next version id: nextversion @@ -40,12 +40,15 @@ jobs: CURRENT_VERSION="${{ steps.gettag.outputs.TAG }}" CURRENT_VERSION="${CURRENT_VERSION#v}" # Remove the 'v' from the start of the version IFS='.' read -ra VERSION_PARTS <<< "$CURRENT_VERSION" - if [[ $BRANCH_NAME =~ ^feature/ ]]; then + if [[ $BRANCH_NAME =~ ^release/ ]]; then + VERSION_PARTS[0]=$((VERSION_PARTS[0] + 1)) + VERSION_PARTS[1]=0 + VERSION_PARTS[2]=0 + elif [[ $BRANCH_NAME =~ ^feature/ ]]; then VERSION_PARTS[1]=$((VERSION_PARTS[1] + 1)) + VERSION_PARTS[2]=0 elif [[ $BRANCH_NAME =~ ^patch/ ]]; then VERSION_PARTS[2]=$((VERSION_PARTS[2] + 1)) - elif [[ $BRANCH_NAME =~ ^release/ ]]; then - VERSION_PARTS[0]=$((VERSION_PARTS[0] + 1)) fi NEXT_VERSION="v${VERSION_PARTS[0]}.${VERSION_PARTS[1]}.${VERSION_PARTS[2]}" echo ::set-output name=NEXT_VERSION::"$NEXT_VERSION" From 274b138473904193502c3bfd0a43531cab6feae0 Mon Sep 17 00:00:00 2001 From: chowbao Date: Thu, 13 Jun 2024 12:14:13 -0400 Subject: [PATCH 46/49] Fix cross contract contract_id bug (#254) --- internal/transform/operation.go | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/internal/transform/operation.go b/internal/transform/operation.go index b3a79def..6de15dbe 100644 --- a/internal/transform/operation.go +++ b/internal/transform/operation.go @@ -1030,9 +1030,14 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT details["type"] = "invoke_contract" + contractId, err := invokeArgs.ContractAddress.String() + if err != nil { + return nil, err + } + transactionEnvelope := getTransactionV1Envelope(transaction.Envelope) details["ledger_key_hash"] = ledgerKeyHashFromTxEnvelope(transactionEnvelope) - details["contract_id"] = contractIdFromTxEnvelope(transactionEnvelope) + details["contract_id"] = contractId details["contract_code_hash"] = contractCodeHashFromTxEnvelope(transactionEnvelope) for _, param := range args { @@ -1633,9 +1638,14 @@ func (operation *transactionOperationWrapper) Details() (map[string]interface{}, details["type"] = "invoke_contract" + contractId, err := invokeArgs.ContractAddress.String() + if err != nil { + return nil, err + } + transactionEnvelope := getTransactionV1Envelope(operation.transaction.Envelope) details["ledger_key_hash"] = ledgerKeyHashFromTxEnvelope(transactionEnvelope) - details["contract_id"] = contractIdFromTxEnvelope(transactionEnvelope) + details["contract_id"] = contractId details["contract_code_hash"] = contractCodeHashFromTxEnvelope(transactionEnvelope) for _, param := range args { From 03a1afa158a0c774c92e81179095ca7fa14b9a00 Mon Sep 17 00:00:00 2001 From: chowbao Date: Thu, 20 Jun 2024 13:31:38 -0400 Subject: [PATCH 47/49] Update core image for consistency (no funcitonal change) (#256) --- docker/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index 76842e8e..f3081f3a 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -11,7 +11,7 @@ COPY . . RUN go build -v -o /usr/local/bin ./... # stage 2: runtime enviroment -FROM stellar/unsafe-stellar-core:21.0.0-1812.rc1.a10329cca.focal +FROM stellar/stellar-core:21.0.0-1872.c6f474133.focal WORKDIR /etl From 29ab062f927a77fe53a5201790dae2293606bd26 Mon Sep 17 00:00:00 2001 From: chowbao Date: Mon, 24 Jun 2024 13:29:27 -0400 Subject: [PATCH 48/49] Create export_contract_events command (#255) Create export_contract_events command --- cmd/export_contract_events.go | 67 ++++++++ cmd/export_diagnostic_events.go | 85 ---------- internal/transform/contract_events.go | 153 ++++++++++++++++++ ...events_test.go => contract_events_test.go} | 59 +++++-- internal/transform/diagnostic_events.go | 71 -------- internal/transform/schema.go | 28 ++-- internal/utils/main.go | 148 +++++++++++++++-- 7 files changed, 420 insertions(+), 191 deletions(-) create mode 100644 cmd/export_contract_events.go delete mode 100644 cmd/export_diagnostic_events.go create mode 100644 internal/transform/contract_events.go rename internal/transform/{diagnostic_events_test.go => contract_events_test.go} (73%) delete mode 100644 internal/transform/diagnostic_events.go diff --git a/cmd/export_contract_events.go b/cmd/export_contract_events.go new file mode 100644 index 00000000..4bae06ff --- /dev/null +++ b/cmd/export_contract_events.go @@ -0,0 +1,67 @@ +package cmd + +import ( + "fmt" + + "github.com/sirupsen/logrus" + "github.com/spf13/cobra" + "github.com/stellar/stellar-etl/internal/input" + "github.com/stellar/stellar-etl/internal/transform" + "github.com/stellar/stellar-etl/internal/utils" +) + +var contractEventsCmd = &cobra.Command{ + Use: "export_contract_events", + Short: "Exports the contract events over a specified range.", + Long: `Exports the contract events over a specified range to an output file.`, + Run: func(cmd *cobra.Command, args []string) { + cmdLogger.SetLevel(logrus.InfoLevel) + cmdArgs := utils.MustFlags(cmd.Flags(), cmdLogger) + + // TODO: https://stellarorg.atlassian.net/browse/HUBBLE-386 GetEnvironmentDetails should be refactored + commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger) + env := utils.GetEnvironmentDetails(commonArgs) + + transactions, err := input.GetTransactions(cmdArgs.StartNum, cmdArgs.EndNum, cmdArgs.Limit, env, cmdArgs.UseCaptiveCore) + if err != nil { + cmdLogger.Fatal("could not read transactions: ", err) + } + + outFile := mustOutFile(cmdArgs.Path) + numFailures := 0 + for _, transformInput := range transactions { + transformed, err := transform.TransformContractEvent(transformInput.Transaction, transformInput.LedgerHistory) + if err != nil { + ledgerSeq := transformInput.LedgerHistory.Header.LedgerSeq + cmdLogger.LogError(fmt.Errorf("could not transform contract events in transaction %d in ledger %d: ", transformInput.Transaction.Index, ledgerSeq)) + numFailures += 1 + continue + } + + for _, contractEvent := range transformed { + _, err := exportEntry(contractEvent, outFile, cmdArgs.Extra) + if err != nil { + cmdLogger.LogError(fmt.Errorf("could not export contract event: %v", err)) + numFailures += 1 + continue + } + } + } + + outFile.Close() + + printTransformStats(len(transactions), numFailures) + + maybeUpload(cmdArgs.Credentials, cmdArgs.Bucket, cmdArgs.Provider, cmdArgs.Path) + }, +} + +func init() { + rootCmd.AddCommand(contractEventsCmd) + utils.AddCommonFlags(contractEventsCmd.Flags()) + utils.AddArchiveFlags("contract_events", contractEventsCmd.Flags()) + utils.AddCloudStorageFlags(contractEventsCmd.Flags()) + + contractEventsCmd.MarkFlagRequired("start-ledger") + contractEventsCmd.MarkFlagRequired("end-ledger") +} diff --git a/cmd/export_diagnostic_events.go b/cmd/export_diagnostic_events.go deleted file mode 100644 index 66ed6438..00000000 --- a/cmd/export_diagnostic_events.go +++ /dev/null @@ -1,85 +0,0 @@ -package cmd - -import ( - "fmt" - - "github.com/sirupsen/logrus" - "github.com/spf13/cobra" - "github.com/stellar/stellar-etl/internal/input" - "github.com/stellar/stellar-etl/internal/transform" - "github.com/stellar/stellar-etl/internal/utils" -) - -var diagnosticEventsCmd = &cobra.Command{ - Use: "export_diagnostic_events", - Short: "Exports the diagnostic events over a specified range.", - Long: `Exports the diagnostic events over a specified range to an output file.`, - Run: func(cmd *cobra.Command, args []string) { - cmdLogger.SetLevel(logrus.InfoLevel) - commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger) - cmdLogger.StrictExport = commonArgs.StrictExport - startNum, path, limit := utils.MustArchiveFlags(cmd.Flags(), cmdLogger) - cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger) - env := utils.GetEnvironmentDetails(commonArgs) - - transactions, err := input.GetTransactions(startNum, commonArgs.EndNum, limit, env, commonArgs.UseCaptiveCore) - if err != nil { - cmdLogger.Fatal("could not read transactions: ", err) - } - - outFile := mustOutFile(path) - numFailures := 0 - for _, transformInput := range transactions { - transformed, err, ok := transform.TransformDiagnosticEvent(transformInput.Transaction, transformInput.LedgerHistory) - if err != nil { - ledgerSeq := transformInput.LedgerHistory.Header.LedgerSeq - cmdLogger.LogError(fmt.Errorf("could not transform diagnostic events in transaction %d in ledger %d: ", transformInput.Transaction.Index, ledgerSeq)) - numFailures += 1 - continue - } - - if !ok { - continue - } - for _, diagnosticEvent := range transformed { - _, err := exportEntry(diagnosticEvent, outFile, commonArgs.Extra) - if err != nil { - cmdLogger.LogError(fmt.Errorf("could not export diagnostic event: %v", err)) - numFailures += 1 - continue - } - } - } - - outFile.Close() - - printTransformStats(len(transactions), numFailures) - - maybeUpload(cloudCredentials, cloudStorageBucket, cloudProvider, path) - }, -} - -func init() { - rootCmd.AddCommand(diagnosticEventsCmd) - utils.AddCommonFlags(diagnosticEventsCmd.Flags()) - utils.AddArchiveFlags("diagnostic_events", diagnosticEventsCmd.Flags()) - utils.AddCloudStorageFlags(diagnosticEventsCmd.Flags()) - diagnosticEventsCmd.MarkFlagRequired("end-ledger") - - /* - Current flags: - start-ledger: the ledger sequence number for the beginning of the export period - end-ledger: the ledger sequence number for the end of the export range (*required) - - limit: maximum number of diagnostic events to export - TODO: measure a good default value that ensures all diagnostic events within a 5 minute period will be exported with a single call - The current max_tx_set_size is 1000 and there are 60 new ledgers in a 5 minute period: - 1000*60 = 60000 - - output-file: filename of the output file - - TODO: implement extra flags if possible - serialize-method: the method for serialization of the output data (JSON, XDR, etc) - start and end time as a replacement for start and end sequence numbers - */ -} diff --git a/internal/transform/contract_events.go b/internal/transform/contract_events.go new file mode 100644 index 00000000..f9ce1a8a --- /dev/null +++ b/internal/transform/contract_events.go @@ -0,0 +1,153 @@ +package transform + +import ( + "encoding/base64" + "fmt" + + "github.com/stellar/stellar-etl/internal/toid" + "github.com/stellar/stellar-etl/internal/utils" + + "github.com/stellar/go/ingest" + "github.com/stellar/go/strkey" + "github.com/stellar/go/xdr" +) + +// TransformContractEvent converts a transaction's contract events and diagnostic events into a form suitable for BigQuery. +// It is known that contract events are a subset of the diagnostic events XDR definition. We are opting to call all of these events +// contract events for better clarity to data analytics users. +func TransformContractEvent(transaction ingest.LedgerTransaction, lhe xdr.LedgerHeaderHistoryEntry) ([]ContractEventOutput, error) { + ledgerHeader := lhe.Header + outputTransactionHash := utils.HashToHexString(transaction.Result.TransactionHash) + outputLedgerSequence := uint32(ledgerHeader.LedgerSeq) + + transactionIndex := uint32(transaction.Index) + + outputTransactionID := toid.New(int32(outputLedgerSequence), int32(transactionIndex), 0).ToInt64() + + outputCloseTime, err := utils.TimePointToUTCTimeStamp(ledgerHeader.ScpValue.CloseTime) + if err != nil { + return []ContractEventOutput{}, fmt.Errorf("for ledger %d; transaction %d (transaction id=%d): %v", outputLedgerSequence, transactionIndex, outputTransactionID, err) + } + + // GetDiagnosticEvents will return all contract events and diagnostic events emitted + contractEvents, err := transaction.GetDiagnosticEvents() + if err != nil { + return []ContractEventOutput{}, err + } + + var transformedContractEvents []ContractEventOutput + + for _, contractEvent := range contractEvents { + var outputContractId string + outputTopicsJson := make(map[string][]map[string]string, 1) + outputTopicsDecodedJson := make(map[string][]map[string]string, 1) + + outputInSuccessfulContractCall := contractEvent.InSuccessfulContractCall + event := contractEvent.Event + outputType := event.Type + outputTypeString := event.Type.String() + + eventTopics := getEventTopics(event.Body) + outputTopics, outputTopicsDecoded := serializeScValArray(eventTopics) + outputTopicsJson["topics"] = outputTopics + outputTopicsDecodedJson["topics_decoded"] = outputTopicsDecoded + + eventData := getEventData(event.Body) + outputData, outputDataDecoded := serializeScVal(eventData) + + // Convert the xdrContactId to string + // TODO: https://stellarorg.atlassian.net/browse/HUBBLE-386 this should be a stellar/go/xdr function + if event.ContractId != nil { + contractId := *event.ContractId + contractIdByte, _ := contractId.MarshalBinary() + outputContractId, _ = strkey.Encode(strkey.VersionByteContract, contractIdByte) + } + + outputContractEventXDR, err := xdr.MarshalBase64(contractEvent) + if err != nil { + return []ContractEventOutput{}, err + } + + outputTransactionID := toid.New(int32(outputLedgerSequence), int32(transactionIndex), 0).ToInt64() + outputSuccessful := transaction.Result.Successful() + + transformedDiagnosticEvent := ContractEventOutput{ + TransactionHash: outputTransactionHash, + TransactionID: outputTransactionID, + Successful: outputSuccessful, + LedgerSequence: outputLedgerSequence, + ClosedAt: outputCloseTime, + InSuccessfulContractCall: outputInSuccessfulContractCall, + ContractId: outputContractId, + Type: int32(outputType), + TypeString: outputTypeString, + Topics: outputTopicsJson, + TopicsDecoded: outputTopicsDecodedJson, + Data: outputData, + DataDecoded: outputDataDecoded, + ContractEventXDR: outputContractEventXDR, + } + + transformedContractEvents = append(transformedContractEvents, transformedDiagnosticEvent) + } + + return transformedContractEvents, nil +} + +// TODO this should be a stellar/go/xdr function +func getEventTopics(eventBody xdr.ContractEventBody) []xdr.ScVal { + switch eventBody.V { + case 0: + contractEventV0 := eventBody.MustV0() + return contractEventV0.Topics + default: + panic("unsupported event body version: " + string(eventBody.V)) + } +} + +// TODO this should be a stellar/go/xdr function +func getEventData(eventBody xdr.ContractEventBody) xdr.ScVal { + switch eventBody.V { + case 0: + contractEventV0 := eventBody.MustV0() + return contractEventV0.Data + default: + panic("unsupported event body version: " + string(eventBody.V)) + } +} + +// TODO this should also be used in the operations processor +func serializeScVal(scVal xdr.ScVal) (map[string]string, map[string]string) { + serializedData := map[string]string{} + serializedData["value"] = "n/a" + serializedData["type"] = "n/a" + + serializedDataDecoded := map[string]string{} + serializedDataDecoded["value"] = "n/a" + serializedDataDecoded["type"] = "n/a" + + if scValTypeName, ok := scVal.ArmForSwitch(int32(scVal.Type)); ok { + serializedData["type"] = scValTypeName + serializedDataDecoded["type"] = scValTypeName + if raw, err := scVal.MarshalBinary(); err == nil { + serializedData["value"] = base64.StdEncoding.EncodeToString(raw) + serializedDataDecoded["value"] = scVal.String() + } + } + + return serializedData, serializedDataDecoded +} + +// TODO this should also be used in the operations processor +func serializeScValArray(scVals []xdr.ScVal) ([]map[string]string, []map[string]string) { + data := make([]map[string]string, 0, len(scVals)) + dataDecoded := make([]map[string]string, 0, len(scVals)) + + for _, scVal := range scVals { + serializedData, serializedDataDecoded := serializeScVal(scVal) + data = append(data, serializedData) + dataDecoded = append(dataDecoded, serializedDataDecoded) + } + + return data, dataDecoded +} diff --git a/internal/transform/diagnostic_events_test.go b/internal/transform/contract_events_test.go similarity index 73% rename from internal/transform/diagnostic_events_test.go rename to internal/transform/contract_events_test.go index ed6f0c47..15e75703 100644 --- a/internal/transform/diagnostic_events_test.go +++ b/internal/transform/contract_events_test.go @@ -10,20 +10,20 @@ import ( "github.com/stellar/go/xdr" ) -func TestTransformDiagnosticEvent(t *testing.T) { +func TestTransformContractEvent(t *testing.T) { type inputStruct struct { transaction ingest.LedgerTransaction historyHeader xdr.LedgerHeaderHistoryEntry } type transformTest struct { input inputStruct - wantOutput []DiagnosticEventOutput + wantOutput []ContractEventOutput wantErr error } - hardCodedTransaction, hardCodedLedgerHeader, err := makeDiagnosticEventTestInput() + hardCodedTransaction, hardCodedLedgerHeader, err := makeContractEventTestInput() assert.NoError(t, err) - hardCodedOutput, err := makeDiagnosticEventTestOutput() + hardCodedOutput, err := makeContractEventTestOutput() assert.NoError(t, err) tests := []transformTest{} @@ -37,30 +37,61 @@ func TestTransformDiagnosticEvent(t *testing.T) { } for _, test := range tests { - actualOutput, actualError, _ := TransformDiagnosticEvent(test.input.transaction, test.input.historyHeader) + actualOutput, actualError := TransformContractEvent(test.input.transaction, test.input.historyHeader) assert.Equal(t, test.wantErr, actualError) assert.Equal(t, test.wantOutput, actualOutput) } } -func makeDiagnosticEventTestOutput() (output [][]DiagnosticEventOutput, err error) { - output = [][]DiagnosticEventOutput{{ - DiagnosticEventOutput{ +func makeContractEventTestOutput() (output [][]ContractEventOutput, err error) { + + topics := make(map[string][]map[string]string, 1) + topics["topics"] = []map[string]string{ + { + "type": "B", + "value": "AAAAAAAAAAE=", + }, + } + + topicsDecoded := make(map[string][]map[string]string, 1) + topicsDecoded["topics_decoded"] = []map[string]string{ + { + "type": "B", + "value": "true", + }, + } + + data := map[string]string{ + "type": "B", + "value": "AAAAAAAAAAE=", + } + + dataDecoded := map[string]string{ + "type": "B", + "value": "true", + } + + output = [][]ContractEventOutput{{ + ContractEventOutput{ TransactionHash: "a87fef5eeb260269c380f2de456aad72b59bb315aaac777860456e09dac0bafb", - LedgerSequence: 30521816, TransactionID: 131090201534533632, + Successful: false, + LedgerSequence: 30521816, ClosedAt: time.Date(2020, time.July, 9, 5, 28, 42, 0, time.UTC), InSuccessfulContractCall: true, - ExtV: 0, ContractId: "CAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABSC4", - Type: "ContractEventTypeDiagnostic", - BodyV: 0, - Body: "AAAAAQAAAAAAAAABAAAAAAAAAAE=", + Type: 2, + TypeString: "ContractEventTypeDiagnostic", + Topics: topics, + TopicsDecoded: topicsDecoded, + Data: data, + DataDecoded: dataDecoded, + ContractEventXDR: "AAAAAQAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACAAAAAAAAAAEAAAAAAAAAAQAAAAAAAAAB", }, }} return } -func makeDiagnosticEventTestInput() (transaction []ingest.LedgerTransaction, historyHeader []xdr.LedgerHeaderHistoryEntry, err error) { +func makeContractEventTestInput() (transaction []ingest.LedgerTransaction, historyHeader []xdr.LedgerHeaderHistoryEntry, err error) { hardCodedMemoText := "HL5aCgozQHIW7sSc5XdcfmR" hardCodedTransactionHash := xdr.Hash([32]byte{0xa8, 0x7f, 0xef, 0x5e, 0xeb, 0x26, 0x2, 0x69, 0xc3, 0x80, 0xf2, 0xde, 0x45, 0x6a, 0xad, 0x72, 0xb5, 0x9b, 0xb3, 0x15, 0xaa, 0xac, 0x77, 0x78, 0x60, 0x45, 0x6e, 0x9, 0xda, 0xc0, 0xba, 0xfb}) var hardCodedContractId xdr.Hash diff --git a/internal/transform/diagnostic_events.go b/internal/transform/diagnostic_events.go deleted file mode 100644 index b7ade6b3..00000000 --- a/internal/transform/diagnostic_events.go +++ /dev/null @@ -1,71 +0,0 @@ -package transform - -import ( - "fmt" - - "github.com/stellar/stellar-etl/internal/toid" - "github.com/stellar/stellar-etl/internal/utils" - - "github.com/stellar/go/ingest" - "github.com/stellar/go/strkey" - "github.com/stellar/go/xdr" -) - -// TransformDiagnosticEvent converts a transaction's diagnostic events from the history archive ingestion system into a form suitable for BigQuery -func TransformDiagnosticEvent(transaction ingest.LedgerTransaction, lhe xdr.LedgerHeaderHistoryEntry) ([]DiagnosticEventOutput, error, bool) { - ledgerHeader := lhe.Header - outputTransactionHash := utils.HashToHexString(transaction.Result.TransactionHash) - outputLedgerSequence := uint32(ledgerHeader.LedgerSeq) - - transactionIndex := uint32(transaction.Index) - - outputTransactionID := toid.New(int32(outputLedgerSequence), int32(transactionIndex), 0).ToInt64() - - outputCloseTime, err := utils.TimePointToUTCTimeStamp(ledgerHeader.ScpValue.CloseTime) - if err != nil { - return []DiagnosticEventOutput{}, fmt.Errorf("for ledger %d; transaction %d (transaction id=%d): %v", outputLedgerSequence, transactionIndex, outputTransactionID, err), false - } - - diagnosticEvents, err := transaction.GetDiagnosticEvents() - if err != nil { - return []DiagnosticEventOutput{}, nil, false - } - - var transformedDiagnosticEvents []DiagnosticEventOutput - - for _, diagnoticEvent := range diagnosticEvents { - var outputContractId string - - outputInSuccessfulContractCall := diagnoticEvent.InSuccessfulContractCall - event := diagnoticEvent.Event - outputExtV := event.Ext.V - outputType := event.Type.String() - outputBodyV := event.Body.V - body, _ := event.Body.GetV0() - - outputBody, _ := xdr.MarshalBase64(body) - - if event.ContractId != nil { - contractId := *event.ContractId - contractIdByte, _ := contractId.MarshalBinary() - outputContractId, _ = strkey.Encode(strkey.VersionByteContract, contractIdByte) - } - - transformedDiagnosticEvent := DiagnosticEventOutput{ - TransactionHash: outputTransactionHash, - LedgerSequence: outputLedgerSequence, - TransactionID: outputTransactionID, - ClosedAt: outputCloseTime, - InSuccessfulContractCall: outputInSuccessfulContractCall, - ExtV: outputExtV, - ContractId: outputContractId, - Type: outputType, - BodyV: outputBodyV, - Body: outputBody, - } - - transformedDiagnosticEvents = append(transformedDiagnosticEvents, transformedDiagnosticEvent) - } - - return transformedDiagnosticEvents, nil, true -} diff --git a/internal/transform/schema.go b/internal/transform/schema.go index 59fec01a..29e34186 100644 --- a/internal/transform/schema.go +++ b/internal/transform/schema.go @@ -597,16 +597,20 @@ type TtlOutput struct { LedgerSequence uint32 `json:"ledger_sequence"` } -// DiagnosticEventOutput is a representation of soroban diagnostic events that currently are not stored in a BQ table -type DiagnosticEventOutput struct { - TransactionHash string `json:"transaction_hash"` - LedgerSequence uint32 `json:"ledger_sequence"` - TransactionID int64 `json:"transaction_id"` - ClosedAt time.Time `json:"closed_at"` - InSuccessfulContractCall bool `json:"in_successful_contract_call"` - ExtV int32 `json:"ext_v"` - ContractId string `json:"contract_id"` - Type string `json:"type"` - BodyV int32 `json:"body_v"` - Body string `json:"body"` +// ContractEventOutput is a representation of soroban contract events and diagnostic events +type ContractEventOutput struct { + TransactionHash string `json:"transaction_hash"` + TransactionID int64 `json:"transaction_id"` + Successful bool `json:"successful"` + LedgerSequence uint32 `json:"ledger_sequence"` + ClosedAt time.Time `json:"closed_at"` + InSuccessfulContractCall bool `json:"in_successful_contract_call"` + ContractId string `json:"contract_id"` + Type int32 `json:"type"` + TypeString string `json:"type_string"` + Topics map[string][]map[string]string `json:"topics"` + TopicsDecoded map[string][]map[string]string `json:"topics_decoded"` + Data map[string]string `json:"data"` + DataDecoded map[string]string `json:"data_decoded"` + ContractEventXDR string `json:"contract_event_xdr"` } diff --git a/internal/utils/main.go b/internal/utils/main.go index bcbd8bc8..fc54d646 100644 --- a/internal/utils/main.go +++ b/internal/utils/main.go @@ -227,7 +227,7 @@ func AddLPOperations(txMeta []xdr.OperationMeta, AssetA, AssetB xdr.Asset) []xdr return txMeta } -// AddCommonFlags adds the flags common to all commands: end-ledger, stdout, and strict-export +// AddCommonFlags adds the flags common to all commands: start-ledger, end-ledger, stdout, and strict-export func AddCommonFlags(flags *pflag.FlagSet) { flags.Uint32P("end-ledger", "e", 0, "The ledger sequence number for the end of the export range") flags.Bool("strict-export", false, "If set, transform errors will be fatal.") @@ -236,24 +236,20 @@ func AddCommonFlags(flags *pflag.FlagSet) { flags.StringToStringP("extra-fields", "u", map[string]string{}, "Additional fields to append to output jsons. Used for appending metadata") flags.Bool("captive-core", false, "If set, run captive core to retrieve data. Otherwise use TxMeta file datastore.") flags.String("datastore-path", "sdf-ledger-close-metas/ledgers", "Datastore bucket path to read txmeta files from.") - flags.Uint32("buffer-size", 5, "Buffer size sets the max limit for the number of txmeta files that can be held in memory.") - flags.Uint32("num-workers", 5, "Number of workers to spawn that read txmeta files from the datastore.") + flags.Uint32("buffer-size", 200, "Buffer size sets the max limit for the number of txmeta files that can be held in memory.") + flags.Uint32("num-workers", 10, "Number of workers to spawn that read txmeta files from the datastore.") flags.Uint32("retry-limit", 3, "Datastore GetLedger retry limit.") flags.Uint32("retry-wait", 5, "Time in seconds to wait for GetLedger retry.") } -// AddArchiveFlags adds the history archive specific flags: start-ledger, output, and limit +// AddArchiveFlags adds the history archive specific flags: output, and limit +// TODO: https://stellarorg.atlassian.net/browse/HUBBLE-386 Rename AddArchiveFlags to something more relevant func AddArchiveFlags(objectName string, flags *pflag.FlagSet) { flags.Uint32P("start-ledger", "s", 2, "The ledger sequence number for the beginning of the export period. Defaults to genesis ledger") flags.StringP("output", "o", "exported_"+objectName+".txt", "Filename of the output file") flags.Int64P("limit", "l", -1, "Maximum number of "+objectName+" to export. If the limit is set to a negative number, all the objects in the provided range are exported") } -// AddBucketFlags adds the bucket list specifc flags: output -func AddBucketFlags(objectName string, flags *pflag.FlagSet) { - flags.StringP("output", "o", "exported_"+objectName+".txt", "Filename of the output file") -} - // AddCloudStorageFlags adds the cloud storage releated flags: cloud-storage-bucket, cloud-credentials func AddCloudStorageFlags(flags *pflag.FlagSet) { flags.String("cloud-storage-bucket", "stellar-etl-cli", "Cloud storage bucket to export to.") @@ -263,11 +259,13 @@ func AddCloudStorageFlags(flags *pflag.FlagSet) { } // AddCoreFlags adds the captive core specific flags: core-executable, core-config, batch-size, and output flags +// TODO: https://stellarorg.atlassian.net/browse/HUBBLE-386 Deprecate? func AddCoreFlags(flags *pflag.FlagSet, defaultFolder string) { flags.StringP("core-executable", "x", "", "Filepath to the stellar-core executable") flags.StringP("core-config", "c", "", "Filepath to the config file for stellar-core") flags.Uint32P("batch-size", "b", 64, "number of ledgers to export changes from in each batches") + // TODO: https://stellarorg.atlassian.net/browse/HUBBLE-386 Move output to different flag group flags.StringP("output", "o", defaultFolder, "Folder that will contain the output files") flags.Uint32P("start-ledger", "s", 2, "The ledger sequence number for the beginning of the export period. Defaults to genesis ledger") @@ -286,6 +284,138 @@ func AddExportTypeFlags(flags *pflag.FlagSet) { flags.BoolP("export-ttl", "", false, "set in order to export ttl changes") } +// TODO: https://stellarorg.atlassian.net/browse/HUBBLE-386 better flags/params +// Some flags should be named better +type FlagValues struct { + StartNum uint32 + EndNum uint32 + StrictExport bool + IsTest bool + IsFuture bool + Extra map[string]string + UseCaptiveCore bool + DatastorePath string + BufferSize uint32 + NumWorkers uint32 + RetryLimit uint32 + RetryWait uint32 + Path string + Limit int64 + Bucket string + Credentials string + Provider string +} + +// MustFlags gets the values of the the flags for all commands. +// If any do not exist, it stops the program fatally using the logger +// TODO: https://stellarorg.atlassian.net/browse/HUBBLE-386 Not sure if all these arg checks are necessary +func MustFlags(flags *pflag.FlagSet, logger *EtlLogger) FlagValues { + endNum, err := flags.GetUint32("end-ledger") + if err != nil { + logger.Fatal("could not get end sequence number: ", err) + } + + strictExport, err := flags.GetBool("strict-export") + if err != nil { + logger.Fatal("could not get strict-export boolean: ", err) + } + + isTest, err := flags.GetBool("testnet") + if err != nil { + logger.Fatal("could not get testnet boolean: ", err) + } + + isFuture, err := flags.GetBool("futurenet") + if err != nil { + logger.Fatal("could not get futurenet boolean: ", err) + } + + extra, err := flags.GetStringToString("extra-fields") + if err != nil { + logger.Fatal("could not get extra fields string: ", err) + } + + useCaptiveCore, err := flags.GetBool("captive-core") + if err != nil { + logger.Fatal("could not get captive-core flag: ", err) + } + + datastorePath, err := flags.GetString("datastore-path") + if err != nil { + logger.Fatal("could not get datastore-bucket-path string: ", err) + } + + bufferSize, err := flags.GetUint32("buffer-size") + if err != nil { + logger.Fatal("could not get buffer-size uint32: ", err) + } + + numWorkers, err := flags.GetUint32("num-workers") + if err != nil { + logger.Fatal("could not get num-workers uint32: ", err) + } + + retryLimit, err := flags.GetUint32("retry-limit") + if err != nil { + logger.Fatal("could not get retry-limit uint32: ", err) + } + + retryWait, err := flags.GetUint32("retry-wait") + if err != nil { + logger.Fatal("could not get retry-wait uint32: ", err) + } + + startNum, err := flags.GetUint32("start-ledger") + if err != nil { + logger.Fatal("could not get start sequence number: ", err) + } + + path, err := flags.GetString("output") + if err != nil { + logger.Fatal("could not get output filename: ", err) + } + + limit, err := flags.GetInt64("limit") + if err != nil { + logger.Fatal("could not get limit: ", err) + } + + bucket, err := flags.GetString("cloud-storage-bucket") + if err != nil { + logger.Fatal("could not get cloud storage bucket: ", err) + } + + credentials, err := flags.GetString("cloud-credentials") + if err != nil { + logger.Fatal("could not get cloud credentials file: ", err) + } + + provider, err := flags.GetString("cloud-provider") + if err != nil { + logger.Fatal("could not get cloud provider: ", err) + } + + return FlagValues{ + StartNum: startNum, + EndNum: endNum, + StrictExport: strictExport, + IsTest: isTest, + IsFuture: isFuture, + Extra: extra, + UseCaptiveCore: useCaptiveCore, + DatastorePath: datastorePath, + BufferSize: bufferSize, + NumWorkers: numWorkers, + RetryLimit: retryLimit, + RetryWait: retryWait, + Path: path, + Limit: limit, + Bucket: bucket, + Credentials: credentials, + Provider: provider, + } +} + type CommonFlagValues struct { EndNum uint32 StrictExport bool From 3825a6eb9db13235bddffb4bb0fc2b8e3b822716 Mon Sep 17 00:00:00 2001 From: Laysa Bitencourt Date: Mon, 24 Jun 2024 18:12:02 -0300 Subject: [PATCH 49/49] [412 and 408 HUBBLE ] - Automated linting (#257) * show current errors * show linting errors * new config * CI ready * CI final * UpdateOrderbook unchanged --- .github/pull_request_template.md | 18 ++-- .github/release-drafter.yml | 10 +-- .github/workflows/codeql.yml | 36 ++++---- .github/workflows/internal.yml | 39 ++++----- .github/workflows/lint-tests.yml | 32 +++++++ .github/workflows/release.yml | 136 ++++++++++++++--------------- .gitignore | 1 + .golangci.yml | 11 +++ .pre-commit-config.yaml | 25 ++++++ README.md | 11 +-- cmd/export_ledger_entry_changes.go | 6 +- cmd/export_ledgers_test.go | 4 +- cmd/get_ledger_range_from_times.go | 2 +- internal/input/orderbooks.go | 4 +- internal/input/trades.go | 2 +- internal/toid/main.go | 33 ++++--- internal/toid/main_test.go | 6 +- internal/toid/synt_offer_id.go | 5 +- 18 files changed, 223 insertions(+), 158 deletions(-) create mode 100644 .github/workflows/lint-tests.yml create mode 100644 .golangci.yml create mode 100644 .pre-commit-config.yaml diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 09a19adf..34a4b107 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -4,23 +4,23 @@ change is, and why it is being made, with enough context for anyone to understan
PR Checklist - + ### PR Structure -* [ ] This PR has reasonably narrow scope (if not, break it down into smaller PRs). -* [ ] This PR avoids mixing refactoring changes with feature changes (split into two PRs - otherwise). -* [ ] This PR's title starts with the jira ticket associated with the PR. +- [ ] This PR has reasonably narrow scope (if not, break it down into smaller PRs). +- [ ] This PR avoids mixing refactoring changes with feature changes (split into two PRs + otherwise). +- [ ] This PR's title starts with the jira ticket associated with the PR. ### Thoroughness -* [ ] This PR adds tests for the most critical parts of the new functionality or fixes. -* [ ] I've updated the README with the added features, breaking changes, new instructions on how to use the repository. I updated the description of the fuction with the changes that were made. +- [ ] This PR adds tests for the most critical parts of the new functionality or fixes. +- [ ] I've updated the README with the added features, breaking changes, new instructions on how to use the repository. I updated the description of the fuction with the changes that were made. ### Release planning -* [ ] I've decided if this PR requires a new major/minor/patch version accordingly to - [semver](https://semver.org/), and I've changed the name of the BRANCH to release/* , feature/* or patch/* . +- [ ] I've decided if this PR requires a new major/minor/patch version accordingly to + [semver](https://semver.org/), and I've changed the name of the BRANCH to release/_ , feature/_ or patch/\* .
### What diff --git a/.github/release-drafter.yml b/.github/release-drafter.yml index f627cd0e..b4505e02 100644 --- a/.github/release-drafter.yml +++ b/.github/release-drafter.yml @@ -1,5 +1,5 @@ -template: | - ## What's Changed - $CHANGES - - **Full Changelog**: https://github.com/$OWNER/$REPOSITORY/compare/$PREVIOUS_TAG...v$RESOLVED_VERSION \ No newline at end of file +template: | + ## What's Changed + $CHANGES + + **Full Changelog**: https://github.com/$OWNER/$REPOSITORY/compare/$PREVIOUS_TAG...v$RESOLVED_VERSION diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 5907fe11..92bc0800 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -2,11 +2,11 @@ name: "CodeQL" on: push: - branches: [ "master" ] + branches: ["master"] pull_request: - branches: [ "master" ] + branches: ["master"] schedule: - - cron: '42 15 * * 6' + - cron: "42 15 * * 6" jobs: analyze: @@ -21,21 +21,21 @@ jobs: fail-fast: false matrix: include: - - language: go - build-mode: autobuild - + - language: go + build-mode: autobuild + steps: - - name: Checkout repository - uses: actions/checkout@v4 + - name: Checkout repository + uses: actions/checkout@v4 - # Initializes the CodeQL tools for scanning. - - name: Initialize CodeQL - uses: github/codeql-action/init@v3 - with: - languages: ${{ matrix.language }} - build-mode: ${{ matrix.build-mode }} + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v3 + with: + languages: ${{ matrix.language }} + build-mode: ${{ matrix.build-mode }} - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3 - with: - category: "/language:${{matrix.language}}" + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v3 + with: + category: "/language:${{matrix.language}}" diff --git a/.github/workflows/internal.yml b/.github/workflows/internal.yml index 99fda785..54c55d14 100644 --- a/.github/workflows/internal.yml +++ b/.github/workflows/internal.yml @@ -2,32 +2,31 @@ name: internal on: pull_request: - branches: [ master ] + branches: [master] jobs: - build: runs-on: ubuntu-latest steps: - - name: Checkout - uses: actions/checkout@v2 - - name: Set up Go - uses: actions/setup-go@v2 - with: - go-version: 1 - - name: Build Internal - working-directory: internal - run: go build ./... + - name: Checkout + uses: actions/checkout@v2 + - name: Set up Go + uses: actions/setup-go@v2 + with: + go-version: 1 + - name: Build Internal + working-directory: internal + run: go build ./... unit-tests: runs-on: ubuntu-latest steps: - - name: Checkout - uses: actions/checkout@v2 - - name: Set up Go - uses: actions/setup-go@v2 - with: - go-version: 1 - - name: Run Internal Unit Tests - working-directory: internal - run: go test -v -cover ./... + - name: Checkout + uses: actions/checkout@v2 + - name: Set up Go + uses: actions/setup-go@v2 + with: + go-version: 1 + - name: Run Internal Unit Tests + working-directory: internal + run: go test -v -cover ./... diff --git a/.github/workflows/lint-tests.yml b/.github/workflows/lint-tests.yml new file mode 100644 index 00000000..d0ae9c44 --- /dev/null +++ b/.github/workflows/lint-tests.yml @@ -0,0 +1,32 @@ +name: Linting + +on: + pull_request: + branches: + - master + +jobs: + pre-commit: + runs-on: ubuntu-latest + if: >- + github.event.pull_request.merged == false && + github.event.pull_request.state == 'open' + steps: + - name: Checkout code + uses: actions/checkout@v2 + with: + fetch-depth: 0 # Fetch all history + + - name: Set up Go + uses: actions/setup-go@v2 + with: + go-version: 1 + + - id: file_changes + uses: trilom/file-changes-action@v1.2.3 + with: + output: " " + + - uses: pre-commit/action@v3.0.0 + env: + extra_args: --color=always --files ${{ steps.file_changes.outputs.files}} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index f10d8fb5..155d5986 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,68 +1,68 @@ -name: Release Drafter and Publisher - -on: - pull_request: - types: [closed] - -permissions: - contents: read - -jobs: - new_release: - if: github.event.pull_request.merged == true - permissions: - # write permission is required to create a github release - contents: write - # write permission is required for autolabeler - # otherwise, read permission is required at least - pull-requests: write - runs-on: ubuntu-latest - - steps: - - name: Checkout code - uses: actions/checkout@v2 - with: - fetch-depth: 0 - - - name: Get branch name - id: getbranch - run: echo ::set-output name=BRANCH::${GITHUB_HEAD_REF} - - # ${{ github.ref }} was not giving v* as tag name, but refs/tags/v* instead, so It had to be abbreviated - - name: Get latest abbreviated tag - id: gettag - run: echo ::set-output name=TAG::$(git describe --tags $(git rev-list --tags --max-count=1)) # get the latest tag across all branches and put it in the output TAG - - - name: Calculate next version - id: nextversion - run: | - BRANCH_NAME="${{ steps.getbranch.outputs.BRANCH }}" - CURRENT_VERSION="${{ steps.gettag.outputs.TAG }}" - CURRENT_VERSION="${CURRENT_VERSION#v}" # Remove the 'v' from the start of the version - IFS='.' read -ra VERSION_PARTS <<< "$CURRENT_VERSION" - if [[ $BRANCH_NAME =~ ^release/ ]]; then - VERSION_PARTS[0]=$((VERSION_PARTS[0] + 1)) - VERSION_PARTS[1]=0 - VERSION_PARTS[2]=0 - elif [[ $BRANCH_NAME =~ ^feature/ ]]; then - VERSION_PARTS[1]=$((VERSION_PARTS[1] + 1)) - VERSION_PARTS[2]=0 - elif [[ $BRANCH_NAME =~ ^patch/ ]]; then - VERSION_PARTS[2]=$((VERSION_PARTS[2] + 1)) - fi - NEXT_VERSION="v${VERSION_PARTS[0]}.${VERSION_PARTS[1]}.${VERSION_PARTS[2]}" - echo ::set-output name=NEXT_VERSION::"$NEXT_VERSION" - - - name: Create and publish new tag - run: | - git tag ${{ steps.nextversion.outputs.NEXT_VERSION }} - git push origin ${{ steps.nextversion.outputs.NEXT_VERSION }} - - - uses: release-drafter/release-drafter@v5 - with: - commitish: master - name: "stellar-etl ${{ steps.nextversion.outputs.NEXT_VERSION }}" - tag: ${{ steps.nextversion.outputs.NEXT_VERSION }} - publish: true - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} +name: Release Drafter and Publisher + +on: + pull_request: + types: [closed] + +permissions: + contents: read + +jobs: + new_release: + if: github.event.pull_request.merged == true + permissions: + # write permission is required to create a github release + contents: write + # write permission is required for autolabeler + # otherwise, read permission is required at least + pull-requests: write + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v2 + with: + fetch-depth: 0 + + - name: Get branch name + id: getbranch + run: echo ::set-output name=BRANCH::${GITHUB_HEAD_REF} + + # ${{ github.ref }} was not giving v* as tag name, but refs/tags/v* instead, so It had to be abbreviated + - name: Get latest abbreviated tag + id: gettag + run: echo ::set-output name=TAG::$(git describe --tags $(git rev-list --tags --max-count=1)) # get the latest tag across all branches and put it in the output TAG + + - name: Calculate next version + id: nextversion + run: | + BRANCH_NAME="${{ steps.getbranch.outputs.BRANCH }}" + CURRENT_VERSION="${{ steps.gettag.outputs.TAG }}" + CURRENT_VERSION="${CURRENT_VERSION#v}" # Remove the 'v' from the start of the version + IFS='.' read -ra VERSION_PARTS <<< "$CURRENT_VERSION" + if [[ $BRANCH_NAME =~ ^release/ ]]; then + VERSION_PARTS[0]=$((VERSION_PARTS[0] + 1)) + VERSION_PARTS[1]=0 + VERSION_PARTS[2]=0 + elif [[ $BRANCH_NAME =~ ^feature/ ]]; then + VERSION_PARTS[1]=$((VERSION_PARTS[1] + 1)) + VERSION_PARTS[2]=0 + elif [[ $BRANCH_NAME =~ ^patch/ ]]; then + VERSION_PARTS[2]=$((VERSION_PARTS[2] + 1)) + fi + NEXT_VERSION="v${VERSION_PARTS[0]}.${VERSION_PARTS[1]}.${VERSION_PARTS[2]}" + echo ::set-output name=NEXT_VERSION::"$NEXT_VERSION" + + - name: Create and publish new tag + run: | + git tag ${{ steps.nextversion.outputs.NEXT_VERSION }} + git push origin ${{ steps.nextversion.outputs.NEXT_VERSION }} + + - uses: release-drafter/release-drafter@v5 + with: + commitish: master + name: "stellar-etl ${{ steps.nextversion.outputs.NEXT_VERSION }}" + tag: ${{ steps.nextversion.outputs.NEXT_VERSION }} + publish: true + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.gitignore b/.gitignore index a450b63a..a3b18857 100644 --- a/.gitignore +++ b/.gitignore @@ -14,6 +14,7 @@ debug *.csr *.key stellar-etl +env ### Credentials checks credentials.json diff --git a/.golangci.yml b/.golangci.yml new file mode 100644 index 00000000..0e7b5035 --- /dev/null +++ b/.golangci.yml @@ -0,0 +1,11 @@ +linters: + disable: + - gosimple + - errcheck + - ineffassign + - staticcheck + + enable: + - goimports # Check import statements are formatted according to the 'goimport' command. Reformat imports in autofix mode. + - importas # Enforces consistent import aliases. + - misspell # Finds commonly misspelled English words. diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..5152a6b9 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,25 @@ +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.6.0 + hooks: + - id: check-added-large-files # prevents giant files from being committed. + - id: check-case-conflict # checks for files that would conflict in case-insensitive filesystems. + - id: check-merge-conflict # checks for files that contain merge conflict strings. + - id: detect-private-key # detects the presence of private keys. + - id: end-of-file-fixer # ensures that a file is either empty, or ends with one newline. + - id: fix-byte-order-marker # removes utf-8 byte order marker. + - id: mixed-line-ending # replaces or checks mixed line ending. + - id: trailing-whitespace # trims trailing whitespace. + + - repo: https://github.com/golangci/golangci-lint + rev: v1.59.1 + hooks: + - id: golangci-lint + entry: golangci-lint run --fix + + - repo: https://github.com/pre-commit/mirrors-prettier + rev: v3.1.0 + hooks: + - id: prettier + files: \.(json|markdown|md|yaml|yml)$ + language_version: 14.21.3 diff --git a/README.md b/README.md index 97cf6318..ab21a96b 100644 --- a/README.md +++ b/README.md @@ -6,11 +6,11 @@ The Stellar-ETL is a data pipeline that allows users to extract data from the hi Pay attention, it is very important to know if your modification to this repository is a release (breaking changes), a feature (functionalities) or a patch(to fix bugs). With that information, create your branch name like this: -* ```release/``` -* ```feature/``` -* ```patch/``` +- `release/` +- `feature/` +- `patch/` -If branch is already made, just rename it *before passing the pull request*. +If branch is already made, just rename it _before passing the pull request_. ## **Table of Contents** @@ -120,7 +120,8 @@ These commands export information using the [Ledger Exporter](https://github.com > _*NOTE:*_ Using captive-core requires a Stellar Core instance that is v20.0.0 or later. The commands use the Core instance to retrieve information about changes from the ledger. More information about the Stellar ledger information can be found [here](https://developers.stellar.org/network/horizon/api-reference/resources). >
As the Stellar network grows, the Stellar Core instance has to catch up on an increasingly large amount of information. This catch-up process can add some overhead to the commands in this category. In order to avoid this overhead, run prefer processing larger ranges instead of many small ones, or use unbounded mode. ->

Recommended resources for running captive-core within a KubernetesPod: +>

Recommended resources for running captive-core within a KubernetesPod: +> > ``` > {cpu: 3.5, memory: 20Gi, ephemeral-storage: 12Gi} > ``` diff --git a/cmd/export_ledger_entry_changes.go b/cmd/export_ledger_entry_changes.go index b227defe..dfb4ae75 100644 --- a/cmd/export_ledger_entry_changes.go +++ b/cmd/export_ledger_entry_changes.go @@ -19,11 +19,11 @@ var exportLedgerEntryChangesCmd = &cobra.Command{ Use: "export_ledger_entry_changes", Short: "This command exports the changes in accounts, offers, trustlines and liquidity pools.", Long: `This command instantiates a stellar-core instance and uses it to export about accounts, offers, trustlines and liquidity pools. -The information is exported in batches determined by the batch-size flag. Each exported file will include the changes to the +The information is exported in batches determined by the batch-size flag. Each exported file will include the changes to the relevant data type that occurred during that batch. -If the end-ledger is omitted, then the stellar-core node will continue running and exporting information as new ledgers are -confirmed by the Stellar network. +If the end-ledger is omitted, then the stellar-core node will continue running and exporting information as new ledgers are +confirmed by the Stellar network. If no data type flags are set, then by default all of them are exported. If any are set, it is assumed that the others should not be exported.`, diff --git a/cmd/export_ledgers_test.go b/cmd/export_ledgers_test.go index 8006ea6d..9a1e1f63 100644 --- a/cmd/export_ledgers_test.go +++ b/cmd/export_ledgers_test.go @@ -4,7 +4,7 @@ import ( "bytes" "flag" "fmt" - "io/ioutil" + "io" "log" "os" "os/exec" @@ -229,7 +229,7 @@ func getGolden(t *testing.T, goldenFile string, actual string, update bool) (str return actual, nil } - wantOutput, err := ioutil.ReadAll(f) + wantOutput, err := io.ReadAll(f) if err != nil { return "", err } diff --git a/cmd/get_ledger_range_from_times.go b/cmd/get_ledger_range_from_times.go index 3edfff35..07cc5041 100644 --- a/cmd/get_ledger_range_from_times.go +++ b/cmd/get_ledger_range_from_times.go @@ -44,7 +44,7 @@ var getLedgerRangeFromTimesCmd = &cobra.Command{ cmdLogger.Fatal("could not get testnet boolean: ", err) } - isFuture, err := cmd.Flags().GetBool("futurenet") + isFuture, err := cmd.Flags().GetBool("futurenet") if err != nil { cmdLogger.Fatal("could not get futurenet boolean: ", err) } diff --git a/internal/input/orderbooks.go b/internal/input/orderbooks.go index 96ce8ca7..95c1f2bc 100644 --- a/internal/input/orderbooks.go +++ b/internal/input/orderbooks.go @@ -59,7 +59,6 @@ func NewOrderbookParser(logger *utils.EtlLogger) OrderbookParser { } } - func (o *OrderbookParser) parseOrderbook(orderbook []ingest.Change, seq uint32) { var group sync.WaitGroup allConverted := make([]transform.NormalizedOfferOutput, len(orderbook)) @@ -119,7 +118,7 @@ func (o *OrderbookParser) parseOrderbook(orderbook []ingest.Change, seq uint32) } } -// GetOfferChanges gets the offer changes that ocurred between the firstSeq ledger and nextSeq ledger +// GetOfferChanges gets the offer changes that occurred between the firstSeq ledger and nextSeq ledger func GetOfferChanges(core *ledgerbackend.CaptiveStellarCore, env utils.EnvironmentDetails, firstSeq, nextSeq uint32) (*ingest.ChangeCompactor, error) { offChanges := ingest.NewChangeCompactor() ctx := context.Background() @@ -206,7 +205,6 @@ func UpdateOrderbook(start, end uint32, orderbook []ingest.Change, core *ledgerb for _, change := range orderbook { changeCache.AddChange(change) } - orderbook = changeCache.GetChanges() } diff --git a/internal/input/trades.go b/internal/input/trades.go index 9c26cdf4..afdede71 100644 --- a/internal/input/trades.go +++ b/internal/input/trades.go @@ -45,7 +45,7 @@ func GetTrades(start, end uint32, limit int64, env utils.EnvironmentDetails, use return []TradeTransformInput{}, err } - closeTime, err := utils.TimePointToUTCTimeStamp(txReader.GetHeader().Header.ScpValue.CloseTime) + closeTime, _ := utils.TimePointToUTCTimeStamp(txReader.GetHeader().Header.ScpValue.CloseTime) for int64(len(tradeSlice)) < limit || limit < 0 { tx, err := txReader.Read() diff --git a/internal/toid/main.go b/internal/toid/main.go index 9ce468f5..e91f73c7 100644 --- a/internal/toid/main.go +++ b/internal/toid/main.go @@ -5,7 +5,6 @@ import ( "fmt" ) -// // ID represents the total order of Ledgers, Transactions and // Operations. // @@ -19,36 +18,35 @@ import ( // // The follow diagram shows this format: // -// 0 1 2 3 -// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 -// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ -// | Ledger Sequence Number | -// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ -// | Transaction Application Order | Op Index | -// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// 0 1 2 3 +// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Ledger Sequence Number | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Transaction Application Order | Op Index | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ // // By component: // // Ledger Sequence: 32-bits // -// A complete ledger sequence number in which the operation was validated. +// A complete ledger sequence number in which the operation was validated. // -// Expressed in network byte order. +// Expressed in network byte order. // // Transaction Application Order: 20-bits // -// The order that the transaction was applied within the ledger it was -// validated. Accommodates up to 1,048,575 transactions in a single ledger. +// The order that the transaction was applied within the ledger it was +// validated. Accommodates up to 1,048,575 transactions in a single ledger. // -// Expressed in network byte order. +// Expressed in network byte order. // // Operation Index: 12-bits // -// The index of the operation within its parent transaction. Accommodates up -// to 4095 operations per transaction. -// -// Expressed in network byte order. +// The index of the operation within its parent transaction. Accommodates up +// to 4095 operations per transaction. // +// Expressed in network byte order. // // Note: API Clients should not be interpreting this value. We will use it // as an opaque paging token that clients can parrot back to us after having read @@ -59,7 +57,6 @@ import ( // transaction as well. Given that this ID is only meant for ordering within a // single type of object, the sharing of ids across object types seems // acceptable. -// type ID struct { LedgerSequence int32 TransactionOrder int32 diff --git a/internal/toid/main_test.go b/internal/toid/main_test.go index 70433b1a..cac6305b 100644 --- a/internal/toid/main_test.go +++ b/internal/toid/main_test.go @@ -18,7 +18,7 @@ func TestID_ToInt64(t *testing.T) { expected int64 shouldPanic bool }{ - // accomodates 12-bits of precision for the operation field + // accommodates 12-bits of precision for the operation field { id: &ID{0, 0, 1}, expected: 1, @@ -31,7 +31,7 @@ func TestID_ToInt64(t *testing.T) { id: &ID{0, 0, 4096}, shouldPanic: true, }, - // accomodates 20-bits of precision for the transaction field + // accommodates 20-bits of precision for the transaction field { id: &ID{0, 1, 0}, expected: 4096, @@ -44,7 +44,7 @@ func TestID_ToInt64(t *testing.T) { id: &ID{0, 1048576, 0}, shouldPanic: true, }, - // accomodates 32-bits of precision for the ledger field + // accommodates 32-bits of precision for the ledger field { id: &ID{1, 0, 0}, expected: 4294967296, diff --git a/internal/toid/synt_offer_id.go b/internal/toid/synt_offer_id.go index b0fe4e1b..39777109 100644 --- a/internal/toid/synt_offer_id.go +++ b/internal/toid/synt_offer_id.go @@ -22,8 +22,9 @@ const ( // Due to the 2nd bit being used, the largest possible toid is: // 0011111111111111111111111111111100000000000000000001000000000001 // \ ledger /\ transaction /\ op / -// = 1073741823 -// with avg. 5 sec close time will reach in ~170 years +// +// = 1073741823 +// with avg. 5 sec close time will reach in ~170 years func EncodeOfferId(id uint64, typ OfferIDType) int64 { // First ensure the bits we're going to change are 0s if id&mask != 0 {