diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md
new file mode 100644
index 00000000..34a4b107
--- /dev/null
+++ b/.github/pull_request_template.md
@@ -0,0 +1,36 @@
+
+
+
+ PR Checklist
+
+### PR Structure
+
+- [ ] This PR has reasonably narrow scope (if not, break it down into smaller PRs).
+- [ ] This PR avoids mixing refactoring changes with feature changes (split into two PRs
+ otherwise).
+- [ ] This PR's title starts with the jira ticket associated with the PR.
+
+### Thoroughness
+
+- [ ] This PR adds tests for the most critical parts of the new functionality or fixes.
+- [ ] I've updated the README with the added features, breaking changes, new instructions on how to use the repository. I updated the description of the fuction with the changes that were made.
+
+### Release planning
+
+- [ ] I've decided if this PR requires a new major/minor/patch version accordingly to
+ [semver](https://semver.org/), and I've changed the name of the BRANCH to release/_ , feature/_ or patch/\* .
+
+
+### What
+
+[TODO: Short statement about what is changing.]
+
+### Why
+
+[TODO: Why this change is being made. Include any context required to understand the why.]
+
+### Known limitations
+
+[TODO or N/A]
diff --git a/.github/release-drafter.yml b/.github/release-drafter.yml
new file mode 100644
index 00000000..b4505e02
--- /dev/null
+++ b/.github/release-drafter.yml
@@ -0,0 +1,5 @@
+template: |
+ ## What's Changed
+ $CHANGES
+
+ **Full Changelog**: https://github.com/$OWNER/$REPOSITORY/compare/$PREVIOUS_TAG...v$RESOLVED_VERSION
diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml
new file mode 100644
index 00000000..92bc0800
--- /dev/null
+++ b/.github/workflows/codeql.yml
@@ -0,0 +1,41 @@
+name: "CodeQL"
+
+on:
+ push:
+ branches: ["master"]
+ pull_request:
+ branches: ["master"]
+ schedule:
+ - cron: "42 15 * * 6"
+
+jobs:
+ analyze:
+ name: Analyze (${{ matrix.language }})
+ runs-on: ubuntu-latest
+ timeout-minutes: 360
+ permissions:
+ # required for all workflows
+ security-events: write
+
+ strategy:
+ fail-fast: false
+ matrix:
+ include:
+ - language: go
+ build-mode: autobuild
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+
+ # Initializes the CodeQL tools for scanning.
+ - name: Initialize CodeQL
+ uses: github/codeql-action/init@v3
+ with:
+ languages: ${{ matrix.language }}
+ build-mode: ${{ matrix.build-mode }}
+
+ - name: Perform CodeQL Analysis
+ uses: github/codeql-action/analyze@v3
+ with:
+ category: "/language:${{matrix.language}}"
diff --git a/.github/workflows/internal.yml b/.github/workflows/internal.yml
index 99fda785..54c55d14 100644
--- a/.github/workflows/internal.yml
+++ b/.github/workflows/internal.yml
@@ -2,32 +2,31 @@ name: internal
on:
pull_request:
- branches: [ master ]
+ branches: [master]
jobs:
-
build:
runs-on: ubuntu-latest
steps:
- - name: Checkout
- uses: actions/checkout@v2
- - name: Set up Go
- uses: actions/setup-go@v2
- with:
- go-version: 1
- - name: Build Internal
- working-directory: internal
- run: go build ./...
+ - name: Checkout
+ uses: actions/checkout@v2
+ - name: Set up Go
+ uses: actions/setup-go@v2
+ with:
+ go-version: 1
+ - name: Build Internal
+ working-directory: internal
+ run: go build ./...
unit-tests:
runs-on: ubuntu-latest
steps:
- - name: Checkout
- uses: actions/checkout@v2
- - name: Set up Go
- uses: actions/setup-go@v2
- with:
- go-version: 1
- - name: Run Internal Unit Tests
- working-directory: internal
- run: go test -v -cover ./...
+ - name: Checkout
+ uses: actions/checkout@v2
+ - name: Set up Go
+ uses: actions/setup-go@v2
+ with:
+ go-version: 1
+ - name: Run Internal Unit Tests
+ working-directory: internal
+ run: go test -v -cover ./...
diff --git a/.github/workflows/lint-tests.yml b/.github/workflows/lint-tests.yml
new file mode 100644
index 00000000..d0ae9c44
--- /dev/null
+++ b/.github/workflows/lint-tests.yml
@@ -0,0 +1,32 @@
+name: Linting
+
+on:
+ pull_request:
+ branches:
+ - master
+
+jobs:
+ pre-commit:
+ runs-on: ubuntu-latest
+ if: >-
+ github.event.pull_request.merged == false &&
+ github.event.pull_request.state == 'open'
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v2
+ with:
+ fetch-depth: 0 # Fetch all history
+
+ - name: Set up Go
+ uses: actions/setup-go@v2
+ with:
+ go-version: 1
+
+ - id: file_changes
+ uses: trilom/file-changes-action@v1.2.3
+ with:
+ output: " "
+
+ - uses: pre-commit/action@v3.0.0
+ env:
+ extra_args: --color=always --files ${{ steps.file_changes.outputs.files}}
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
new file mode 100644
index 00000000..155d5986
--- /dev/null
+++ b/.github/workflows/release.yml
@@ -0,0 +1,68 @@
+name: Release Drafter and Publisher
+
+on:
+ pull_request:
+ types: [closed]
+
+permissions:
+ contents: read
+
+jobs:
+ new_release:
+ if: github.event.pull_request.merged == true
+ permissions:
+ # write permission is required to create a github release
+ contents: write
+ # write permission is required for autolabeler
+ # otherwise, read permission is required at least
+ pull-requests: write
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v2
+ with:
+ fetch-depth: 0
+
+ - name: Get branch name
+ id: getbranch
+ run: echo ::set-output name=BRANCH::${GITHUB_HEAD_REF}
+
+ # ${{ github.ref }} was not giving v* as tag name, but refs/tags/v* instead, so It had to be abbreviated
+ - name: Get latest abbreviated tag
+ id: gettag
+ run: echo ::set-output name=TAG::$(git describe --tags $(git rev-list --tags --max-count=1)) # get the latest tag across all branches and put it in the output TAG
+
+ - name: Calculate next version
+ id: nextversion
+ run: |
+ BRANCH_NAME="${{ steps.getbranch.outputs.BRANCH }}"
+ CURRENT_VERSION="${{ steps.gettag.outputs.TAG }}"
+ CURRENT_VERSION="${CURRENT_VERSION#v}" # Remove the 'v' from the start of the version
+ IFS='.' read -ra VERSION_PARTS <<< "$CURRENT_VERSION"
+ if [[ $BRANCH_NAME =~ ^release/ ]]; then
+ VERSION_PARTS[0]=$((VERSION_PARTS[0] + 1))
+ VERSION_PARTS[1]=0
+ VERSION_PARTS[2]=0
+ elif [[ $BRANCH_NAME =~ ^feature/ ]]; then
+ VERSION_PARTS[1]=$((VERSION_PARTS[1] + 1))
+ VERSION_PARTS[2]=0
+ elif [[ $BRANCH_NAME =~ ^patch/ ]]; then
+ VERSION_PARTS[2]=$((VERSION_PARTS[2] + 1))
+ fi
+ NEXT_VERSION="v${VERSION_PARTS[0]}.${VERSION_PARTS[1]}.${VERSION_PARTS[2]}"
+ echo ::set-output name=NEXT_VERSION::"$NEXT_VERSION"
+
+ - name: Create and publish new tag
+ run: |
+ git tag ${{ steps.nextversion.outputs.NEXT_VERSION }}
+ git push origin ${{ steps.nextversion.outputs.NEXT_VERSION }}
+
+ - uses: release-drafter/release-drafter@v5
+ with:
+ commitish: master
+ name: "stellar-etl ${{ steps.nextversion.outputs.NEXT_VERSION }}"
+ tag: ${{ steps.nextversion.outputs.NEXT_VERSION }}
+ publish: true
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.gitignore b/.gitignore
index a450b63a..a3b18857 100644
--- a/.gitignore
+++ b/.gitignore
@@ -14,6 +14,7 @@ debug
*.csr
*.key
stellar-etl
+env
### Credentials checks
credentials.json
diff --git a/.golangci.yml b/.golangci.yml
new file mode 100644
index 00000000..0e7b5035
--- /dev/null
+++ b/.golangci.yml
@@ -0,0 +1,11 @@
+linters:
+ disable:
+ - gosimple
+ - errcheck
+ - ineffassign
+ - staticcheck
+
+ enable:
+ - goimports # Check import statements are formatted according to the 'goimport' command. Reformat imports in autofix mode.
+ - importas # Enforces consistent import aliases.
+ - misspell # Finds commonly misspelled English words.
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
new file mode 100644
index 00000000..5152a6b9
--- /dev/null
+++ b/.pre-commit-config.yaml
@@ -0,0 +1,25 @@
+repos:
+ - repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v4.6.0
+ hooks:
+ - id: check-added-large-files # prevents giant files from being committed.
+ - id: check-case-conflict # checks for files that would conflict in case-insensitive filesystems.
+ - id: check-merge-conflict # checks for files that contain merge conflict strings.
+ - id: detect-private-key # detects the presence of private keys.
+ - id: end-of-file-fixer # ensures that a file is either empty, or ends with one newline.
+ - id: fix-byte-order-marker # removes utf-8 byte order marker.
+ - id: mixed-line-ending # replaces or checks mixed line ending.
+ - id: trailing-whitespace # trims trailing whitespace.
+
+ - repo: https://github.com/golangci/golangci-lint
+ rev: v1.59.1
+ hooks:
+ - id: golangci-lint
+ entry: golangci-lint run --fix
+
+ - repo: https://github.com/pre-commit/mirrors-prettier
+ rev: v3.1.0
+ hooks:
+ - id: prettier
+ files: \.(json|markdown|md|yaml|yml)$
+ language_version: 14.21.3
diff --git a/README.md b/README.md
index 1501fa3c..ab21a96b 100644
--- a/README.md
+++ b/README.md
@@ -1,210 +1,134 @@
+# **Stellar ETL**
-# Stellar ETL
The Stellar-ETL is a data pipeline that allows users to extract data from the history of the Stellar network.
+## ** Before creating a branch **
+
+Pay attention, it is very important to know if your modification to this repository is a release (breaking changes), a feature (functionalities) or a patch(to fix bugs). With that information, create your branch name like this:
+
+- `release/`
+- `feature/`
+- `patch/`
+
+If branch is already made, just rename it _before passing the pull request_.
+
## **Table of Contents**
- [Exporting the Ledger Chain](#exporting-the-ledger-chain)
- - [Command Reference](#command-reference)
- - [Bucket List Commands](#bucket-list-commands)
- - [export_accounts](#export_accounts)
- - [export_offers](#export_offers)
- - [export_trustlines](#export_trustlines)
- - [export_claimable_balances](#export_claimable_balances)
- - [export_pools](#export_pools)
- - [export_signers](#export_signers)
- - [export_contract_data (futurenet, testnet)](#export_contract_data)
- - [export_contract_code (futurenet, testnet)](#export_contract_code)
- - [export_config_settings (futurenet, testnet)](#export_config_settings)
- - [export_ttl (futurenet, testnet)](#export_ttl)
- - [History Archive Commands](#history-archive-commands)
- - [export_ledgers](#export_ledgers)
- - [export_transactions](#export_transactions)
- - [export_operations](#export_operations)
- - [export_effects](#export_effects)
- - [export_assets](#export_assets)
- - [export_trades](#export_trades)
- - [export_diagnostic_events (futurenet, testnet)](#export_diagnostic_events)
- - [Stellar Core Commands](#stellar-core-commands)
- - [export_ledger_entry_changes](#export_ledger_entry_changes)
- - [export_orderbooks (unsupported)](#export_orderbooks-unsupported)
- - [Utility Commands](#utility-commands)
- - [get_ledger_range_from_times](#get_ledger_range_from_times)
+- [Command Reference](#command-reference)
+ - [Export Commands](#export-commands)
+ - [export_ledgers](#export_ledgers)
+ - [export_transactions](#export_transactions)
+ - [export_operations](#export_operations)
+ - [export_effects](#export_effects)
+ - [export_assets](#export_assets)
+ - [export_trades](#export_trades)
+ - [export_diagnostic_events](#export_diagnostic_events)
+ - [export_ledger_entry_changes](#export_ledger_entry_changes)
+ - [Utility Commands](#utility-commands)
+ - [get_ledger_range_from_times](#get_ledger_range_from_times)
- [Schemas](#schemas)
- [Extensions](#extensions)
- [Adding New Commands](#adding-new-commands)
-
+
+---
-# Exporting the Ledger Chain
+# **Exporting the Ledger Chain**
## **Docker**
+
1. Download the latest version of docker [Docker](https://www.docker.com/get-started)
-2. Pull the stellar-etl Docker image: `docker pull stellar/stellar-etl`
-3. Run the Docker images with the desired stellar-etl command: `docker run stellar/stellar-etl stellar-etl [etl-command] [etl-command arguments]`
+2. Pull the latest stellar-etl Docker image: `docker pull stellar/stellar-etl:latest`
+3. Run the Docker images with the desired stellar-etl command: `docker run stellar/stellar-etl:latest stellar-etl [etl-command] [etl-command arguments]`
## **Manual Installation**
-1. Install Golang v1.19.0 or later: https://golang.org/dl/
+1. Install Golang v1.22.1 or later: https://golang.org/dl/
2. Ensure that your Go bin has been added to the PATH env variable: `export PATH=$PATH:$(go env GOPATH)/bin`
-3. Download and install Stellar-Core v19.0.0 or later: https://github.com/stellar/stellar-core/blob/master/INSTALL.md
-
-4. Run `go get github.com/stellar/stellar-etl` to install the ETL
-
+3. If using captive-core, download and install Stellar-Core v20.0.0 or later: https://github.com/stellar/stellar-core/blob/master/INSTALL.md
+4. Run `go install github.com/stellar/stellar-etl@latest` to install the ETL
5. Run export commands to export information about the legder
-## **Command Reference**
-- [Bucket List Commands](#bucket-list-commands)
- - [export_accounts](#export_accounts)
- - [export_offers](#export_offers)
- - [export_trustlines](#export_trustlines)
- - [export_claimable_balances](#export_claimable_balances)
- - [export_pools](#export_pools)
- - [export_signers](#export_signers)
- - [export_contract_data](#export_contract_data)
- - [export_contract_code](#export_contract_code)
- - [export_config_settings](#export_config_settings)
- - [export_ttl](#export_ttl)
-- [History Archive Commands](#history-archive-commands)
- - [export_ledgers](#export_ledgers)
- - [export_transactions](#export_transactions)
- - [export_operations](#export_operations)
- - [export_effects](#export_effects)
- - [export_assets](#export_assets)
- - [export_trades](#export_trades)
- - [export_diagnostic_events](#export_diagnostic_events)
- - [Stellar Core Commands](#stellar-core-commands)
- - [export_orderbooks (unsupported)](#export_orderbooks-unsupported)
- - [Utility Commands](#utility-commands)
- - [get_ledger_range_from_times](#get_ledger_range_from_times)
-
-Every command accepts a `-h` parameter, which provides a help screen containing information about the command, its usage, and its flags.
-
-Commands have the option to read from testnet with the `--testnet` flag, from futurenet with the `--futurenet` flag, and defaults to reading from mainnet without any flags.
-> *_NOTE:_* Adding both flags will default to testnet. Each stellar-etl command can only run from one network at a time.
-
-
-
-***
-
-## **Bucket List Commands**
-
-These commands use the bucket list in order to ingest large amounts of data from the history of the stellar ledger. If you are trying to read large amounts of information in order to catch up to the current state of the ledger, these commands provide a good way to catchup quickly. However, they don't allow for custom start-ledger values. For updating within a user-defined range, see the Stellar Core commands.
-
-> *_NOTE:_* In order to get information within a specified ledger range for bucket list commands, see the export_ledger_entry_changes command.
-
-
-
-### **export_accounts**
-
-```bash
-> stellar-etl export_accounts --end-ledger 500000 --output exported_accounts.txt
-```
-
-Exports historical account data from the genesis ledger to the provided end-ledger to an output file. The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it should be used in an initial data dump. In order to get account information within a specified ledger range, see the export_ledger_entry_changes command.
+## **Manual build for local development**
-
-
-### **export_offers**
-
-```bash
-> stellar-etl export_offers --end-ledger 500000 --output exported_offers.txt
-```
-
-Exports historical offer data from the genesis ledger to the provided end-ledger to an output file. The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it should be used in an initial data dump. In order to get offer information within a specified ledger range, see the export_ledger_entry_changes command.
-
-
-
-### **export_trustlines**
-
-```bash
-> stellar-etl export_trustlines --end-ledger 500000 --output exported_trustlines.txt
-```
-
-Exports historical trustline data from the genesis ledger to the provided end-ledger to an output file. The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it should be used in an initial data dump. In order to get trustline information within a specified ledger range, see the export_ledger_entry_changes command.
+1. Clone this repo `git clone https://github.com/stellar/stellar-etl`
+2. Build stellar-etl with `go build`
+3. Run export commands to export information about the legder
-
-
-### **export_claimable_balances**
+> _*Note:*_ If using the GCS datastore, you can run the following to set GCP credentials to use in your shell
-```bash
-> stellar-etl export_claimable_balances --end-ledger 500000 --output exported_claimable_balances.txt
```
-
-Exports claimable balances data from the genesis ledger to the provided end-ledger to an output file. The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it should be used in an initial data dump. In order to get claimable balances information within a specified ledger range, see the export_ledger_entry_changes command.
-
-
-
-### **export_pools**
-
-```bash
-> stellar-etl export_pools --end-ledger 500000 --output exported_pools.txt
+gcloud auth login
+gcloud config set project dev-hubble
+gcloud auth application-default login
```
-Exports historical liquidity pools data from the genesis ledger to the provided end-ledger to an output file. The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it should be used in an initial data dump. In order to get liquidity pools information within a specified ledger range, see the export_ledger_entry_changes command.
+> _*Note:*_ Instructions for installing gcloud can be found [here](https://cloud.google.com/sdk/docs/install-sdk)
-### **export_signers**
-
-```bash
-> stellar-etl export_signers --end-ledger 500000 --output exported_signers.txt
-```
+---
-Exports historical account signers data from the genesis ledger to the provided end-ledger to an output file. The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it should be used in an initial data dump. In order to get account signers information within a specified ledger range, see the export_ledger_entry_changes command.
+# **Command Reference**
-
+- [Export Commands](#export-commands)
+ - [export_ledgers](#export_ledgers)
+ - [export_transactions](#export_transactions)
+ - [export_operations](#export_operations)
+ - [export_effects](#export_effects)
+ - [export_assets](#export_assets)
+ - [export_trades](#export_trades)
+ - [export_diagnostic_events](#export_diagnostic_events)
+ - [export_ledger_entry_changes](#export_ledger_entry_changes)
+- [Utility Commands](#utility-commands)
+ - [get_ledger_range_from_times](#get_ledger_range_from_times)
-### **export_contract_data**
-
-```bash
-> stellar-etl export_contract_data --end-ledger 500000 --output export_contract_data.txt
-```
-
-Exports historical contract data data from the genesis ledger to the provided end-ledger to an output file. The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it should be used in an initial data dump. In order to get contract data information within a specified ledger range, see the export_ledger_entry_changes command.
-
-
-
-### **export_contract_code**
+Every command accepts a `-h` parameter, which provides a help screen containing information about the command, its usage, and its flags.
-```bash
-> stellar-etl export_contract_code --end-ledger 500000 --output export_contract_code.txt
-```
+Commands have the option to read from testnet with the `--testnet` flag, from futurenet with the `--futurenet` flag, and defaults to reading from mainnet without any flags.
-Exports historical contract code data from the genesis ledger to the provided end-ledger to an output file. The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it should be used in an initial data dump. In order to get contract code information within a specified ledger range, see the export_ledger_entry_changes command.
+> _*NOTE:*_ Adding both flags will default to testnet. Each stellar-etl command can only run from one network at a time.
-### **export_config_settings**
+---
-```bash
-> stellar-etl export_config_settings --end-ledger 500000 --output export_config_settings.txt
-```
+## **Export Commands**
-Exports historical config settings data from the genesis ledger to the provided end-ledger to an output file. The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it should be used in an initial data dump. In order to get config settings data information within a specified ledger range, see the export_ledger_entry_changes command.
+These commands export information using the [Ledger Exporter](https://github.com/stellar/go/blob/master/exp/services/ledgerexporter/README.md) output files within a specified datastore (currently [datastore](https://github.com/stellar/go/tree/master/support/datastore) only supports GCS). This allows users to provide a start and end ledger range. The commands in this category export a list of everything that occurred within the provided range. All of the ranges are inclusive.
-
+> _*NOTE:*_ The datastore must contain the expected compressed LedgerCloseMetaBatch XDR binary files as exported from [Ledger Exporter](https://github.com/stellar/go/blob/master/exp/services/ledgerexporter/README.md#exported-files).
-### **export_ttl**
+#### Common Flags
-```bash
-> stellar-etl export_ttl --end-ledger 500000 --output export_ttl.txt
-```
+| Flag | Description | Default |
+| -------------- | --------------------------------------------------------------------------------------------- | ----------------------- |
+| start-ledger | The ledger sequence number for the beginning of the export period. Defaults to genesis ledger | 2 |
+| end-ledger | The ledger sequence number for the end of the export range | 0 |
+| strict-export | If set, transform errors will be fatal | false |
+| testnet | If set, will connect to Testnet instead of Pubnet | false |
+| futurenet | If set, will connect to Futurenet instead of Pubnet | false |
+| extra-fields | Additional fields to append to output jsons. Used for appending metadata | --- |
+| captive-core | If set, run captive core to retrieve data. Otherwise use TxMeta file datastore | false |
+| datastore-path | Datastore bucket path to read txmeta files from | ledger-exporter/ledgers |
+| buffer-size | Buffer size sets the max limit for the number of txmeta files that can be held in memory | 1000 |
+| num-workers | Number of workers to spawn that read txmeta files from the datastore | 5 |
+| retry-limit | Datastore GetLedger retry limit | 3 |
+| retry-wait | Time in seconds to wait for GetLedger retry | 5 |
-Exports historical expiration data from the genesis ledger to the provided end-ledger to an output file. The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it should be used in an initial data dump. In order to get expiration information within a specified ledger range, see the export_ledger_entry_changes command.
+> _*NOTE:*_ Using captive-core requires a Stellar Core instance that is v20.0.0 or later. The commands use the Core instance to retrieve information about changes from the ledger. More information about the Stellar ledger information can be found [here](https://developers.stellar.org/network/horizon/api-reference/resources).
+>
As the Stellar network grows, the Stellar Core instance has to catch up on an increasingly large amount of information. This catch-up process can add some overhead to the commands in this category. In order to avoid this overhead, run prefer processing larger ranges instead of many small ones, or use unbounded mode.
+>
Recommended resources for running captive-core within a KubernetesPod:
+>
+> ```
+> {cpu: 3.5, memory: 20Gi, ephemeral-storage: 12Gi}
+> ```
-***
-
-## **History Archive Commands**
-
-These commands export information using the history archives. This allows users to provide a start and end ledger range. The commands in this category export a list of everything that occurred within the provided range. All of the ranges are inclusive.
-
-> *_NOTE:_* Commands except `export_ledgers` and `export_assets` also require Captive Core to export data.
-
-
+---
### **export_ledgers**
@@ -213,10 +137,12 @@ These commands export information using the history archives. This allows users
--end-ledger 500000 --output exported_ledgers.txt
```
-This command exports ledgers within the provided range.
+This command exports ledgers within the provided range.
+---
+
### **export_transactions**
```bash
@@ -228,6 +154,8 @@ This command exports transactions within the provided range.
+---
+
### **export_operations**
```bash
@@ -239,6 +167,8 @@ This command exports operations within the provided range.
+---
+
### **export_effects**
```bash
@@ -250,7 +180,10 @@ This command exports effects within the provided range.
+---
+
### **export_assets**
+
```bash
> stellar-etl export_assets \
--start-ledger 1000 \
@@ -261,7 +194,10 @@ Exports the assets that are created from payment operations over a specified led
+---
+
### **export_trades**
+
```bash
> stellar-etl export_trades \
--start-ledger 1000 \
@@ -272,7 +208,10 @@ Exports trade data within the specified range to an output file
+---
+
### **export_diagnostic_events**
+
```bash
> stellar-etl export_diagnostic_events \
--start-ledger 1000 \
@@ -283,15 +222,7 @@ Exports diagnostic events data within the specified range to an output file
-***
-
-## **Stellar Core Commands**
-
-These commands require a Stellar Core instance that is v19.0.0 or later. The commands use the Core instance to retrieve information about changes from the ledger. These changes can be in the form of accounts, offers, trustlines, claimable balances, liquidity pools, or account signers.
-
-As the Stellar network grows, the Stellar Core instance has to catch up on an increasingly large amount of information. This catch-up process can add some overhead to the commands in this category. In order to avoid this overhead, run prefer processing larger ranges instead of many small ones, or use unbounded mode.
-
-
+---
### **export_ledger_entry_changes**
@@ -302,82 +233,79 @@ As the Stellar network grows, the Stellar Core instance has to catch up on an in
This command exports ledger changes within the provided ledger range. Flags can filter which ledger entry types are exported. If no data type flags are set, then by default all types are exported. If any are set, it is assumed that the others should not be exported.
-Changes are exported in batches of a size defined by the `batch-size` flag. By default, the batch-size parameter is set to 64 ledgers, which corresponds to a five minute period of time. This batch size is convenient because checkpoint ledgers are created every 64 ledgers. Checkpoint ledgers act as anchoring points for the nodes on the network, so it is beneficial to export in multiples of 64.
+Changes are exported in batches of a size defined by the `--batch-size` flag. By default, the batch-size parameter is set to 64 ledgers, which corresponds to a five minute period of time. This batch size is convenient because checkpoint ledgers are created every 64 ledgers. Checkpoint ledgers act as anchoring points for the nodes on the network, so it is beneficial to export in multiples of 64.
This command has two modes: bounded and unbounded.
#### **Bounded**
- If both a start and end ledger are provided, then the command runs in a bounded mode. This means that once all the ledgers in the range are processed and exported, the command shuts down.
-
-#### **Unbounded**
-If only a start ledger is provided, then the command runs in an unbounded fashion starting from the provided ledger. In this mode, the Stellar Core connects to the Stellar network and processes new changes as they occur on the network. Since the changes are continually exported in batches, this process can be continually run in the background in order to avoid the overhead of closing and starting new Stellar Core instances.
-
-
-### **export_orderbooks (unsupported)**
+If both a start and end ledger are provided, then the command runs in a bounded mode. This means that once all the ledgers in the range are processed and exported, the command shuts down.
-```bash
-> stellar-etl export_orderbooks --start-ledger 1000 \
---end-ledger 500000 --output exported_orderbooks_folder/
-```
+#### **Unbounded (Currently Unsupported)**
-> *_NOTE:_* This is an expermental feature and is currently unsupported.
+If only a start ledger is provided, then the command runs in an unbounded fashion starting from the provided ledger. In this mode, stellar-etl will block and wait for the next sequentially written ledger file in the datastore. Since the changes are continually exported in batches, this process can be continually run in the background in order to avoid the overhead of closing and starting new stellar-etl instances.
-This command exports orderbooks within the provided ledger range. Since exporting complete orderbooks at every single ledger would require an excessive amount of storage space, the output is normalized. Each batch that is exported contains multiple files, namely: `dimAccounts.txt`, `dimOffers.txt`, `dimMarkets.txt`, and `factEvents.txt`. The dim files relate a data structure to an ID. `dimMarkets`, for example, contains the buying and selling assets of a market, as well as the ID for that market. That ID is used in other places as a replacement for the full market information. This normalization process saves a significant amount of space (roughly 90% in our benchmarks). The `factEvents` file connects ledger numbers to the offer IDs that were present at that ledger.
+The following are the ledger entry type flags that can be used to export data:
-Orderbooks are exported in batches of a size defined by the `batch-size` flag. By default, the batch-size parameter is set to 64 ledgers, which corresponds to a five minute period of time. This batch size is convenient because checkpoint ledgers are created every 64 ledgers. Checkpoint ledgers act as anchoring points in that once they are available, so are the previous 63 nodes. It is beneficial to export in multiples of 64.
-
-This command has two modes: bounded and unbounded.
-
-#### **Bounded**
- If both a start and end ledger are provided, then the command runs in a bounded mode. This means that once all the ledgers in the range are processed and exported, the command shuts down.
-
-#### **Unbounded**
-If only a start ledger is provided, then the command runs in an unbounded fashion starting from the provided ledger. In this mode, the Stellar Core connects to the Stellar network and processes new orderbooks as they occur on the network. Since the changes are continually exported in batches, this process can be continually run in the background in order to avoid the overhead of closing and starting new Stellar Core instances.
+- export-accounts
+- export-trustlines
+- export-offers
+- export-pools
+- export-balances
+- export-contract-code
+- export-contract-data
+- export-config-settings
+- export-ttl
-***
+---
## **Utility Commands**
+These commands aid in the usage of [Export Commands](#export-commands).
+
### **get_ledger_range_from_times**
+
```bash
> stellar-etl get_ledger_range_from_times \
--start-time 2019-09-13T23:00:00+00:00 \
--end-time 2019-09-14T13:35:10+00:00 --output exported_range.txt
```
-This command exports takes in a start and end time and converts it to a ledger range. The ledger range that is returned will be the smallest possible ledger range that completely covers the provided time period.
+This command takes in a start and end time and converts it to a ledger range. The ledger range that is returned will be the smallest possible ledger range that completely covers the provided time period.
-
+
+---
# Schemas
See https://github.com/stellar/stellar-etl/blob/master/internal/transform/schema.go for the schemas of the data structures that are outputted by the ETL.
-
+---
+
# Extensions
+
This section covers some possible extensions or further work that can be done.
## **Adding New Commands**
+
In general, in order to add new commands, you need to add these files:
- - `export_new_data_structure.go` in the `cmd` folder
- - This file can be generated with cobra by calling: `cobra add {command}`
- - This file will parse flags, create output files, get the transformed data from the input package, and then export the data.
- - `export_new_data_structure_test.go` in the `cmd` folder
- - This file will contain some tests for the newly added command. The `runCLI` function does most of the heavy lifting. All the tests need is the command arguments to test and the desired output.
- - Test data should be stored in the `testdata/new_data_structure` folder
- - `new_data_structure.go` in the `internal/input` folder
- - This file will contain the methods needed to extract the new data structure from wherever it is located. This may be the history archives, the bucket list, or a captive core instance.
- - This file should extract the data and transform it, and return the transformed data.
- - If working with captive core, the methods need to work in the background. There should be methods that export batches of data and send them to a channel. There should be other methods that read from the channel and transform the data so it can be exported.
+- `export_new_data_structure.go` in the `cmd` folder
+ - This file can be generated with cobra by calling: `cobra add {command}`
+ - This file will parse flags, create output files, get the transformed data from the input package, and then export the data.
+- `export_new_data_structure_test.go` in the `cmd` folder
+ - This file will contain some tests for the newly added command. The `runCLI` function does most of the heavy lifting. All the tests need is the command arguments to test and the desired output.
+ - Test data should be stored in the `testdata/new_data_structure` folder
+- `new_data_structure.go` in the `internal/input` folder
+ - This file will contain the methods needed to extract the new data structure from wherever it is located. This may be the history archives, the bucket list, a captive core instance, or a datastore.
+ - If working with captive core, the methods need to work in the background. There should be methods that export batches of data and send them to a channel. There should be other methods that read from the channel and transform the data so it can be exported.
- `new_data_structure.go` in the `internal/transform` folder
- - This file will contain the methods needed to transform the extracted data into a form that is suitable for BigQuery.
- - The struct definition for the transformed object should be stored in `schemas.go` in the `internal/transform` folder.
+ - This file will contain the methods needed to transform the extracted data into a form that is suitable for BigQuery.
+ - The struct definition for the transformed object should be stored in `schemas.go` in the `internal/transform` folder.
A good number of common methods are already written and stored in the `util` package.
diff --git a/cmd/export_account_signers.go b/cmd/export_account_signers.go
deleted file mode 100644
index 73d01517..00000000
--- a/cmd/export_account_signers.go
+++ /dev/null
@@ -1,78 +0,0 @@
-package cmd
-
-import (
- "fmt"
-
- "github.com/sirupsen/logrus"
- "github.com/spf13/cobra"
-
- "github.com/stellar/stellar-etl/internal/input"
- "github.com/stellar/stellar-etl/internal/transform"
- "github.com/stellar/stellar-etl/internal/utils"
-
- "github.com/stellar/go/xdr"
-)
-
-var accountSignersCmd = &cobra.Command{
- Use: "export_signers",
- Short: "Exports the account signers data.",
- Long: `Exports historical account signers data from the genesis ledger to the provided end-ledger to an output file.
-The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it
-should be used in an initial data dump. In order to get account information within a specified ledger range, see
-the export_ledger_entry_changes command.`,
- Run: func(cmd *cobra.Command, args []string) {
- cmdLogger.SetLevel(logrus.InfoLevel)
- endNum, strictExport, isTest, isFuture, extra := utils.MustCommonFlags(cmd.Flags(), cmdLogger)
- cmdLogger.StrictExport = strictExport
- env := utils.GetEnvironmentDetails(isTest, isFuture)
- path := utils.MustBucketFlags(cmd.Flags(), cmdLogger)
- cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger)
-
- accounts, err := input.GetEntriesFromGenesis(endNum, xdr.LedgerEntryTypeAccount, env.ArchiveURLs)
- if err != nil {
- cmdLogger.Fatal("could not read accounts: ", err)
- }
-
- outFile := mustOutFile(path)
- numFailures := 0
- totalNumBytes := 0
- numSigners := 0
- var header xdr.LedgerHeaderHistoryEntry
- for _, acc := range accounts {
- if acc.AccountSignersChanged() {
- transformed, err := transform.TransformSigners(acc, header)
- if err != nil {
- cmdLogger.LogError(fmt.Errorf("could not json transform account signer: %v", err))
- numFailures += 1
- continue
- }
-
- for _, entry := range transformed {
- numBytes, err := exportEntry(entry, outFile, extra)
- if err != nil {
- cmdLogger.LogError(fmt.Errorf("could not export entry: %v", err))
- numFailures += 1
- continue
- }
- numSigners += 1
- totalNumBytes += numBytes
- }
- }
- }
-
- outFile.Close()
- cmdLogger.Info("Number of bytes written: ", totalNumBytes)
-
- printTransformStats(numSigners, numFailures)
-
- maybeUpload(cloudCredentials, cloudStorageBucket, cloudProvider, path)
- },
-}
-
-func init() {
- rootCmd.AddCommand(accountSignersCmd)
- utils.AddCommonFlags(accountSignersCmd.Flags())
- utils.AddBucketFlags("signers", accountSignersCmd.Flags())
- utils.AddCloudStorageFlags(accountSignersCmd.Flags())
- accountSignersCmd.MarkFlagRequired("end-ledger")
-}
diff --git a/cmd/export_account_signers_test.go b/cmd/export_account_signers_test.go
deleted file mode 100644
index 33193444..00000000
--- a/cmd/export_account_signers_test.go
+++ /dev/null
@@ -1,29 +0,0 @@
-package cmd
-
-import (
-"testing"
-)
-
-func TestExportSigners(t *testing.T) {
- tests := []cliTest{
- {
- name: "signers: bucket list with exact checkpoint",
- args: []string{"export_signers", "-e", "78975", "-o", gotTestDir(t, "bucket_read_exact.txt")},
- golden: "bucket_read_exact.golden",
- wantErr: nil,
- sortForComparison: true,
- },
- {
- name: "signers: bucket list with end not on checkpoint",
- args: []string{"export_signers", "-e", "80210", "-o", gotTestDir(t, "bucket_read_off.txt")},
- golden: "bucket_read_off.golden",
- wantErr: nil,
- sortForComparison: true,
- },
- }
-
- for _, test := range tests {
- runCLITest(t, test, "testdata/signers/")
- }
-}
-
diff --git a/cmd/export_accounts.go b/cmd/export_accounts.go
deleted file mode 100644
index 9c6d7f76..00000000
--- a/cmd/export_accounts.go
+++ /dev/null
@@ -1,81 +0,0 @@
-package cmd
-
-import (
- "fmt"
-
- "github.com/sirupsen/logrus"
- "github.com/spf13/cobra"
-
- "github.com/stellar/stellar-etl/internal/input"
- "github.com/stellar/stellar-etl/internal/transform"
- "github.com/stellar/stellar-etl/internal/utils"
-
- "github.com/stellar/go/xdr"
-)
-
-var accountsCmd = &cobra.Command{
- Use: "export_accounts",
- Short: "Exports the account data.",
- Long: `Exports historical account data from the genesis ledger to the provided end-ledger to an output file.
-The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it
-should be used in an initial data dump. In order to get account information within a specified ledger range, see
-the export_ledger_entry_changes command.`,
- Run: func(cmd *cobra.Command, args []string) {
- cmdLogger.SetLevel(logrus.InfoLevel)
- endNum, strictExport, isTest, isFuture, extra := utils.MustCommonFlags(cmd.Flags(), cmdLogger)
- cmdLogger.StrictExport = strictExport
- env := utils.GetEnvironmentDetails(isTest, isFuture)
- path := utils.MustBucketFlags(cmd.Flags(), cmdLogger)
- cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger)
-
- accounts, err := input.GetEntriesFromGenesis(endNum, xdr.LedgerEntryTypeAccount, env.ArchiveURLs)
- if err != nil {
- cmdLogger.Fatal("could not read accounts: ", err)
- }
-
- outFile := mustOutFile(path)
- numFailures := 0
- totalNumBytes := 0
- var header xdr.LedgerHeaderHistoryEntry
- for _, acc := range accounts {
- transformed, err := transform.TransformAccount(acc, header)
- if err != nil {
- cmdLogger.LogError(fmt.Errorf("could not json transform account: %v", err))
- numFailures += 1
- continue
- }
-
- numBytes, err := exportEntry(transformed, outFile, extra)
- if err != nil {
- cmdLogger.LogError(fmt.Errorf("could not export entry: %v", err))
- numFailures += 1
- continue
- }
- totalNumBytes += numBytes
- }
-
- outFile.Close()
- cmdLogger.Info("Number of bytes written: ", totalNumBytes)
-
- printTransformStats(len(accounts), numFailures)
-
- maybeUpload(cloudCredentials, cloudStorageBucket, cloudProvider, path)
- },
-}
-
-func init() {
- rootCmd.AddCommand(accountsCmd)
- utils.AddCommonFlags(accountsCmd.Flags())
- utils.AddBucketFlags("accounts", accountsCmd.Flags())
- utils.AddCloudStorageFlags(accountsCmd.Flags())
- accountsCmd.MarkFlagRequired("end-ledger")
- /*
- Current flags:
- end-ledger: the ledger sequence number for the end of the export range (required)
- output-file: filename of the output file
-
- TODO: implement extra flags if possible
- serialize-method: the method for serialization of the output data (JSON, XDR, etc)
- end time as a replacement for end sequence numbers
- */
-}
diff --git a/cmd/export_accounts_test.go b/cmd/export_accounts_test.go
deleted file mode 100644
index c1981fbf..00000000
--- a/cmd/export_accounts_test.go
+++ /dev/null
@@ -1,26 +0,0 @@
-package cmd
-
-import (
- "testing"
-)
-
-func TestExportAccounts(t *testing.T) {
- tests := []cliTest{
- {
- name: "accounts: bucket list with exact checkpoint",
- args: []string{"export_accounts", "-e", "78975", "-o", gotTestDir(t, "bucket_read_exact.txt")},
- golden: "bucket_read_exact.golden",
- wantErr: nil,
- },
- {
- name: "accounts: bucket list with end not on checkpoint",
- args: []string{"export_accounts", "-e", "80210", "-o", gotTestDir(t, "bucket_read_off.txt")},
- golden: "bucket_read_off.golden",
- wantErr: nil,
- },
- }
-
- for _, test := range tests {
- runCLITest(t, test, "testdata/accounts/")
- }
-}
diff --git a/cmd/export_all_history.go b/cmd/export_all_history.go
deleted file mode 100644
index 2d4f12f7..00000000
--- a/cmd/export_all_history.go
+++ /dev/null
@@ -1,207 +0,0 @@
-package cmd
-
-import (
- "fmt"
-
- "github.com/sirupsen/logrus"
- "github.com/spf13/cobra"
-
- "github.com/stellar/stellar-etl/internal/input"
- "github.com/stellar/stellar-etl/internal/toid"
- "github.com/stellar/stellar-etl/internal/transform"
- "github.com/stellar/stellar-etl/internal/utils"
-)
-
-var allHistoryCmd = &cobra.Command{
- Use: "export_all_history",
- Short: "Exports all stellar network history.",
- Long: `Exports historical stellar network data between provided start-ledger/end-ledger to output files.
-This is a temporary command used to reduce the amount of requests to history archives
-in order to mitigate egress costs for the entity hosting history archives.`,
- Run: func(cmd *cobra.Command, args []string) {
- cmdLogger.SetLevel(logrus.InfoLevel)
- endNum, strictExport, isTest, isFuture, extra := utils.MustCommonFlags(cmd.Flags(), cmdLogger)
- cmdLogger.StrictExport = strictExport
- startNum, path, limit := utils.MustArchiveFlags(cmd.Flags(), cmdLogger)
- cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger)
- env := utils.GetEnvironmentDetails(isTest, isFuture)
-
- allHistory, err := input.GetAllHistory(startNum, endNum, limit, env)
- if err != nil {
- cmdLogger.Fatal("could not read all history: ", err)
- }
-
- cmdLogger.Info("start doing other exports")
- getOperations(allHistory.Operations, extra, cloudStorageBucket, cloudCredentials, cloudProvider, path+"exported_operations.txt", env)
- getTrades(allHistory.Trades, extra, cloudStorageBucket, cloudCredentials, cloudProvider, path+"exported_trades.txt")
- getEffects(allHistory.Ledgers, extra, cloudStorageBucket, cloudCredentials, cloudProvider, path+"exported_effects.txt", env)
- getTransactions(allHistory.Ledgers, extra, cloudStorageBucket, cloudCredentials, cloudProvider, path+"exported_transactions.txt")
- getDiagnosticEvents(allHistory.Ledgers, extra, cloudStorageBucket, cloudCredentials, cloudProvider, path+"exported_diagnostic_events.txt")
- cmdLogger.Info("done doing other exports")
- },
-}
-
-func getOperations(operations []input.OperationTransformInput, extra map[string]string, cloudStorageBucket string, cloudCredentials string, cloudProvider string, path string, env utils.EnvironmentDetails) {
- outFileOperations := mustOutFile(path)
- numFailures := 0
- totalNumBytes := 0
- for _, transformInput := range operations {
- transformed, err := transform.TransformOperation(transformInput.Operation, transformInput.OperationIndex, transformInput.Transaction, transformInput.LedgerSeqNum, transformInput.LedgerCloseMeta, env.NetworkPassphrase)
- if err != nil {
- txIndex := transformInput.Transaction.Index
- cmdLogger.LogError(fmt.Errorf("could not transform operation %d in transaction %d in ledger %d: %v", transformInput.OperationIndex, txIndex, transformInput.LedgerSeqNum, err))
- numFailures += 1
- continue
- }
-
- numBytes, err := exportEntry(transformed, outFileOperations, extra)
- if err != nil {
- cmdLogger.LogError(fmt.Errorf("could not export operation: %v", err))
- numFailures += 1
- continue
- }
- totalNumBytes += numBytes
- }
-
- outFileOperations.Close()
- cmdLogger.Info("Number of bytes written: ", totalNumBytes)
-
- printTransformStats(len(operations), numFailures)
-
- maybeUpload(cloudCredentials, cloudStorageBucket, cloudProvider, path)
-}
-
-func getTrades(trades []input.TradeTransformInput, extra map[string]string, cloudStorageBucket string, cloudCredentials string, cloudProvider string, path string) {
- outFile := mustOutFile(path)
- numFailures := 0
- totalNumBytes := 0
- for _, tradeInput := range trades {
- trades, err := transform.TransformTrade(tradeInput.OperationIndex, tradeInput.OperationHistoryID, tradeInput.Transaction, tradeInput.CloseTime)
- if err != nil {
- parsedID := toid.Parse(tradeInput.OperationHistoryID)
- cmdLogger.LogError(fmt.Errorf("from ledger %d, transaction %d, operation %d: %v", parsedID.LedgerSequence, parsedID.TransactionOrder, parsedID.OperationOrder, err))
- numFailures += 1
- continue
- }
-
- for _, transformed := range trades {
- numBytes, err := exportEntry(transformed, outFile, extra)
- if err != nil {
- cmdLogger.LogError(err)
- numFailures += 1
- continue
- }
- totalNumBytes += numBytes
- }
- }
-
- outFile.Close()
- cmdLogger.Info("Number of bytes written: ", totalNumBytes)
-
- printTransformStats(len(trades), numFailures)
-
- maybeUpload(cloudCredentials, cloudStorageBucket, cloudProvider, path)
-}
-
-func getEffects(transactions []input.LedgerTransformInput, extra map[string]string, cloudStorageBucket string, cloudCredentials string, cloudProvider string, path string, env utils.EnvironmentDetails) {
- outFile := mustOutFile(path)
- numFailures := 0
- totalNumBytes := 0
- for _, transformInput := range transactions {
- LedgerSeq := uint32(transformInput.LedgerHistory.Header.LedgerSeq)
- effects, err := transform.TransformEffect(transformInput.Transaction, LedgerSeq, transformInput.LedgerCloseMeta, env.NetworkPassphrase)
- if err != nil {
- txIndex := transformInput.Transaction.Index
- cmdLogger.Errorf("could not transform transaction %d in ledger %d: %v", txIndex, LedgerSeq, err)
- numFailures += 1
- continue
- }
-
- for _, transformed := range effects {
- numBytes, err := exportEntry(transformed, outFile, extra)
- if err != nil {
- cmdLogger.LogError(err)
- numFailures += 1
- continue
- }
- totalNumBytes += numBytes
- }
- }
-
- outFile.Close()
- cmdLogger.Info("Number of bytes written: ", totalNumBytes)
-
- printTransformStats(len(transactions), numFailures)
-
- maybeUpload(cloudCredentials, cloudStorageBucket, cloudProvider, path)
-}
-
-func getTransactions(transactions []input.LedgerTransformInput, extra map[string]string, cloudStorageBucket string, cloudCredentials string, cloudProvider string, path string) {
- outFile := mustOutFile(path)
- numFailures := 0
- totalNumBytes := 0
- for _, transformInput := range transactions {
- transformed, err := transform.TransformTransaction(transformInput.Transaction, transformInput.LedgerHistory)
- if err != nil {
- ledgerSeq := transformInput.LedgerHistory.Header.LedgerSeq
- cmdLogger.LogError(fmt.Errorf("could not transform transaction %d in ledger %d: ", transformInput.Transaction.Index, ledgerSeq))
- numFailures += 1
- continue
- }
-
- numBytes, err := exportEntry(transformed, outFile, extra)
- if err != nil {
- cmdLogger.LogError(fmt.Errorf("could not export transaction: %v", err))
- numFailures += 1
- continue
- }
- totalNumBytes += numBytes
- }
-
- outFile.Close()
- cmdLogger.Info("Number of bytes written: ", totalNumBytes)
-
- printTransformStats(len(transactions), numFailures)
-
- maybeUpload(cloudCredentials, cloudStorageBucket, cloudProvider, path)
-}
-
-func getDiagnosticEvents(transactions []input.LedgerTransformInput, extra map[string]string, cloudStorageBucket string, cloudCredentials string, cloudProvider string, path string) {
- outFile := mustOutFile(path)
- numFailures := 0
- for _, transformInput := range transactions {
- transformed, err, ok := transform.TransformDiagnosticEvent(transformInput.Transaction, transformInput.LedgerHistory)
- if err != nil {
- ledgerSeq := transformInput.LedgerHistory.Header.LedgerSeq
- cmdLogger.LogError(fmt.Errorf("could not transform diagnostic events in transaction %d in ledger %d: ", transformInput.Transaction.Index, ledgerSeq))
- numFailures += 1
- continue
- }
-
- if !ok {
- continue
- }
- for _, diagnosticEvent := range transformed {
- _, err := exportEntry(diagnosticEvent, outFile, extra)
- if err != nil {
- cmdLogger.LogError(fmt.Errorf("could not export diagnostic event: %v", err))
- numFailures += 1
- continue
- }
- }
- }
-
- outFile.Close()
-
- printTransformStats(len(transactions), numFailures)
-
- maybeUpload(cloudCredentials, cloudStorageBucket, cloudProvider, path)
-}
-
-func init() {
- rootCmd.AddCommand(allHistoryCmd)
- utils.AddCommonFlags(allHistoryCmd.Flags())
- utils.AddArchiveFlags("", allHistoryCmd.Flags())
- utils.AddCloudStorageFlags(allHistoryCmd.Flags())
- allHistoryCmd.MarkFlagRequired("end-ledger")
-}
diff --git a/cmd/export_assets.go b/cmd/export_assets.go
index 55647dab..8326c76b 100644
--- a/cmd/export_assets.go
+++ b/cmd/export_assets.go
@@ -16,14 +16,22 @@ var assetsCmd = &cobra.Command{
Long: `Exports the assets that are created from payment operations over a specified ledger range`,
Run: func(cmd *cobra.Command, args []string) {
cmdLogger.SetLevel(logrus.InfoLevel)
- endNum, strictExport, isTest, isFuture, extra := utils.MustCommonFlags(cmd.Flags(), cmdLogger)
- cmdLogger.StrictExport = strictExport
+ commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger)
+ cmdLogger.StrictExport = commonArgs.StrictExport
startNum, path, limit := utils.MustArchiveFlags(cmd.Flags(), cmdLogger)
cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger)
+ env := utils.GetEnvironmentDetails(commonArgs)
outFile := mustOutFile(path)
- paymentOps, err := input.GetPaymentOperations(startNum, endNum, limit, isTest, isFuture)
+ var paymentOps []input.AssetTransformInput
+ var err error
+
+ if commonArgs.UseCaptiveCore {
+ paymentOps, err = input.GetPaymentOperationsHistoryArchive(startNum, commonArgs.EndNum, limit, env, commonArgs.UseCaptiveCore)
+ } else {
+ paymentOps, err = input.GetPaymentOperations(startNum, commonArgs.EndNum, limit, env, commonArgs.UseCaptiveCore)
+ }
if err != nil {
cmdLogger.Fatal("could not read asset: ", err)
}
@@ -47,7 +55,7 @@ var assetsCmd = &cobra.Command{
}
seenIDs[transformed.AssetID] = true
- numBytes, err := exportEntry(transformed, outFile, extra)
+ numBytes, err := exportEntry(transformed, outFile, commonArgs.Extra)
if err != nil {
cmdLogger.Error(err)
numFailures += 1
diff --git a/cmd/export_claimable_balances.go b/cmd/export_claimable_balances.go
deleted file mode 100644
index 89af046c..00000000
--- a/cmd/export_claimable_balances.go
+++ /dev/null
@@ -1,82 +0,0 @@
-package cmd
-
-import (
- "fmt"
-
- "github.com/sirupsen/logrus"
- "github.com/spf13/cobra"
-
- "github.com/stellar/stellar-etl/internal/input"
- "github.com/stellar/stellar-etl/internal/transform"
- "github.com/stellar/stellar-etl/internal/utils"
-
- "github.com/stellar/go/xdr"
-)
-
-var claimableBalancesCmd = &cobra.Command{
- Use: "export_claimable_balances",
- Short: "Exports the data on claimable balances made from the genesis ledger to a specified endpoint.",
- Long: `Exports historical offer data from the genesis ledger to the provided end-ledger to an output file.
- The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it
- should be used in an initial data dump. In order to get offer information within a specified ledger range, see
- the export_ledger_entry_changes command.`,
- Run: func(cmd *cobra.Command, args []string) {
- cmdLogger.SetLevel(logrus.InfoLevel)
- endNum, strictExport, isTest, isFuture, extra := utils.MustCommonFlags(cmd.Flags(), cmdLogger)
- cmdLogger.StrictExport = strictExport
- env := utils.GetEnvironmentDetails(isTest, isFuture)
- path := utils.MustBucketFlags(cmd.Flags(), cmdLogger)
- cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger)
-
- balances, err := input.GetEntriesFromGenesis(endNum, xdr.LedgerEntryTypeClaimableBalance, env.ArchiveURLs)
- if err != nil {
- cmdLogger.Fatal("could not read balances: ", err)
- }
-
- outFile := mustOutFile(path)
- numFailures := 0
- totalNumBytes := 0
- var header xdr.LedgerHeaderHistoryEntry
- for _, balance := range balances {
- transformed, err := transform.TransformClaimableBalance(balance, header)
- if err != nil {
- cmdLogger.LogError(fmt.Errorf("could not transform balance %+v: %v", balance, err))
- numFailures += 1
- continue
- }
-
- numBytes, err := exportEntry(transformed, outFile, extra)
- if err != nil {
- cmdLogger.LogError(fmt.Errorf("could not export balance %+v: %v", balance, err))
- numFailures += 1
- continue
- }
- totalNumBytes += numBytes
- }
-
- outFile.Close()
- cmdLogger.Info("Number of bytes written: ", totalNumBytes)
-
- printTransformStats(len(balances), numFailures)
-
- maybeUpload(cloudCredentials, cloudStorageBucket, cloudProvider, path)
- },
-}
-
-func init() {
- rootCmd.AddCommand(claimableBalancesCmd)
- utils.AddCommonFlags(claimableBalancesCmd.Flags())
- utils.AddBucketFlags("claimable_balances", claimableBalancesCmd.Flags())
- utils.AddCloudStorageFlags(claimableBalancesCmd.Flags())
- claimableBalancesCmd.MarkFlagRequired("end-ledger")
-
- /*
- Current flags:
- end-ledger: the ledger sequence number for the end of the export range (required)
- output-file: filename of the output file
-
- TODO: implement extra flags if possible
- serialize-method: the method for serialization of the output data (JSON, XDR, etc)
- end time as a replacement for end sequence numbers
- */
-}
diff --git a/cmd/export_claimable_balances_test.go b/cmd/export_claimable_balances_test.go
deleted file mode 100644
index fafcaa64..00000000
--- a/cmd/export_claimable_balances_test.go
+++ /dev/null
@@ -1,20 +0,0 @@
-package cmd
-
-import (
- "testing"
-)
-
-func TestExportClaimableBalances(t *testing.T) {
- tests := []cliTest{
- {
- name: "claimable balances",
- args: []string{"export_claimable_balances", "-e", "32878607", "-o", gotTestDir(t, "bucket_read.txt")},
- golden: "bucket_read.golden",
- wantErr: nil,
- },
- }
-
- for _, test := range tests {
- runCLITest(t, test, "testdata/claimable_balances/")
- }
-}
diff --git a/cmd/export_config_setting.go b/cmd/export_config_setting.go
deleted file mode 100644
index 4de5d087..00000000
--- a/cmd/export_config_setting.go
+++ /dev/null
@@ -1,81 +0,0 @@
-package cmd
-
-import (
- "fmt"
-
- "github.com/sirupsen/logrus"
- "github.com/spf13/cobra"
-
- "github.com/stellar/stellar-etl/internal/input"
- "github.com/stellar/stellar-etl/internal/transform"
- "github.com/stellar/stellar-etl/internal/utils"
-
- "github.com/stellar/go/xdr"
-)
-
-var configSettingCmd = &cobra.Command{
- Use: "export_config_setting",
- Short: "Exports the config setting information.",
- Long: `Exports historical config settings data from the genesis ledger to the provided end-ledger to an output file.
- The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it
- should be used in an initial data dump. In order to get offer information within a specified ledger range, see
- the export_ledger_entry_changes command.`,
- Run: func(cmd *cobra.Command, args []string) {
- cmdLogger.SetLevel(logrus.InfoLevel)
- endNum, strictExport, isTest, isFuture, extra := utils.MustCommonFlags(cmd.Flags(), cmdLogger)
- cmdLogger.StrictExport = strictExport
- env := utils.GetEnvironmentDetails(isTest, isFuture)
- path := utils.MustBucketFlags(cmd.Flags(), cmdLogger)
- cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger)
-
- settings, err := input.GetEntriesFromGenesis(endNum, xdr.LedgerEntryTypeConfigSetting, env.ArchiveURLs)
- if err != nil {
- cmdLogger.Fatal("Error getting ledger entries: ", err)
- }
-
- outFile := mustOutFile(path)
- numFailures := 0
- totalNumBytes := 0
- var header xdr.LedgerHeaderHistoryEntry
- for _, setting := range settings {
- transformed, err := transform.TransformConfigSetting(setting, header)
- if err != nil {
- cmdLogger.LogError(fmt.Errorf("could not transform config setting %+v: %v", setting, err))
- numFailures += 1
- continue
- }
-
- numBytes, err := exportEntry(transformed, outFile, extra)
- if err != nil {
- cmdLogger.LogError(fmt.Errorf("could not export config setting %+v: %v", setting, err))
- numFailures += 1
- continue
- }
- totalNumBytes += numBytes
- }
- outFile.Close()
- cmdLogger.Info("Number of bytes written: ", totalNumBytes)
-
- printTransformStats(len(settings), numFailures)
- maybeUpload(cloudCredentials, cloudStorageBucket, cloudProvider, path)
-
- },
-}
-
-func init() {
- rootCmd.AddCommand(configSettingCmd)
- utils.AddCommonFlags(configSettingCmd.Flags())
- utils.AddBucketFlags("config_settings", configSettingCmd.Flags())
- utils.AddCloudStorageFlags(configSettingCmd.Flags())
- configSettingCmd.MarkFlagRequired("end-ledger")
- /*
- Current flags:
- end-ledger: the ledger sequence number for the end of the export range (required)
- output-file: filename of the output file
- stdout: if set, output is printed to stdout
-
- TODO: implement extra flags if possible
- serialize-method: the method for serialization of the output data (JSON, XDR, etc)
- end time as a replacement for end sequence numbers
- */
-}
diff --git a/cmd/export_config_setting_test.go b/cmd/export_config_setting_test.go
deleted file mode 100644
index a8114429..00000000
--- a/cmd/export_config_setting_test.go
+++ /dev/null
@@ -1,22 +0,0 @@
-package cmd
-
-import (
- "testing"
-)
-
-func TestExportConfigSetting(t *testing.T) {
- t.Skip("Skipping due to unstable data in Futurenet")
- // TODO: find ledger with data and create testdata
- tests := []cliTest{
- {
- name: "config setting",
- args: []string{"export_config_setting", "-e", "78975", "-o", gotTestDir(t, "bucket_read.txt")},
- golden: "bucket_read.golden",
- wantErr: nil,
- },
- }
-
- for _, test := range tests {
- runCLITest(t, test, "testdata/config_setting/")
- }
-}
diff --git a/cmd/export_contract_code.go b/cmd/export_contract_code.go
deleted file mode 100644
index 07e2ffee..00000000
--- a/cmd/export_contract_code.go
+++ /dev/null
@@ -1,81 +0,0 @@
-package cmd
-
-import (
- "fmt"
-
- "github.com/sirupsen/logrus"
- "github.com/spf13/cobra"
-
- "github.com/stellar/stellar-etl/internal/input"
- "github.com/stellar/stellar-etl/internal/transform"
- "github.com/stellar/stellar-etl/internal/utils"
-
- "github.com/stellar/go/xdr"
-)
-
-var codeCmd = &cobra.Command{
- Use: "export_contract_code",
- Short: "Exports the contract code information.",
- Long: `Exports historical contract code data from the genesis ledger to the provided end-ledger to an output file.
- The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it
- should be used in an initial data dump. In order to get offer information within a specified ledger range, see
- the export_ledger_entry_changes command.`,
- Run: func(cmd *cobra.Command, args []string) {
- cmdLogger.SetLevel(logrus.InfoLevel)
- endNum, strictExport, isTest, isFuture, extra := utils.MustCommonFlags(cmd.Flags(), cmdLogger)
- cmdLogger.StrictExport = strictExport
- env := utils.GetEnvironmentDetails(isTest, isFuture)
- path := utils.MustBucketFlags(cmd.Flags(), cmdLogger)
- cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger)
-
- codes, err := input.GetEntriesFromGenesis(endNum, xdr.LedgerEntryTypeContractCode, env.ArchiveURLs)
- if err != nil {
- cmdLogger.Fatal("Error getting ledger entries: ", err)
- }
-
- outFile := mustOutFile(path)
- numFailures := 0
- totalNumBytes := 0
- var header xdr.LedgerHeaderHistoryEntry
- for _, code := range codes {
- transformed, err := transform.TransformContractCode(code, header)
- if err != nil {
- cmdLogger.LogError(fmt.Errorf("could not transform contract code %+v: %v", code, err))
- numFailures += 1
- continue
- }
-
- numBytes, err := exportEntry(transformed, outFile, extra)
- if err != nil {
- cmdLogger.LogError(fmt.Errorf("could not export contract code %+v: %v", code, err))
- numFailures += 1
- continue
- }
- totalNumBytes += numBytes
- }
- outFile.Close()
- cmdLogger.Info("Number of bytes written: ", totalNumBytes)
-
- printTransformStats(len(codes), numFailures)
- maybeUpload(cloudCredentials, cloudStorageBucket, cloudProvider, path)
-
- },
-}
-
-func init() {
- rootCmd.AddCommand(codeCmd)
- utils.AddCommonFlags(codeCmd.Flags())
- utils.AddBucketFlags("contract_code", codeCmd.Flags())
- utils.AddCloudStorageFlags(codeCmd.Flags())
- codeCmd.MarkFlagRequired("end-ledger")
- /*
- Current flags:
- end-ledger: the ledger sequence number for the end of the export range (required)
- output-file: filename of the output file
- stdout: if set, output is printed to stdout
-
- TODO: implement extra flags if possible
- serialize-method: the method for serialization of the output data (JSON, XDR, etc)
- end time as a replacement for end sequence numbers
- */
-}
diff --git a/cmd/export_contract_code_test.go b/cmd/export_contract_code_test.go
deleted file mode 100644
index 74f51285..00000000
--- a/cmd/export_contract_code_test.go
+++ /dev/null
@@ -1,22 +0,0 @@
-package cmd
-
-import (
- "testing"
-)
-
-func TestExportContractCode(t *testing.T) {
- t.Skip("Skipping due to unstable data in Futurenet")
- // TODO: find ledger with data and create testdata
- tests := []cliTest{
- {
- name: "contract code",
- args: []string{"export_contract_code", "-e", "78975", "-o", gotTestDir(t, "bucket_read.txt")},
- golden: "bucket_read.golden",
- wantErr: nil,
- },
- }
-
- for _, test := range tests {
- runCLITest(t, test, "testdata/contract_code/")
- }
-}
diff --git a/cmd/export_contract_data.go b/cmd/export_contract_data.go
deleted file mode 100644
index fcd759d9..00000000
--- a/cmd/export_contract_data.go
+++ /dev/null
@@ -1,86 +0,0 @@
-package cmd
-
-import (
- "fmt"
-
- "github.com/sirupsen/logrus"
- "github.com/spf13/cobra"
-
- "github.com/stellar/stellar-etl/internal/input"
- "github.com/stellar/stellar-etl/internal/transform"
- "github.com/stellar/stellar-etl/internal/utils"
-
- "github.com/stellar/go/xdr"
-)
-
-var dataCmd = &cobra.Command{
- Use: "export_contract_data",
- Short: "Exports the contract data information made from the genesis ledger to a specified endpoint.",
- Long: `Exports historical contract data from the genesis ledger to the provided end-ledger to an output file.
- The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it
- should be used in an initial data dump. In order to get offer information within a specified ledger range, see
- the export_ledger_entry_changes command.`,
- Run: func(cmd *cobra.Command, args []string) {
- cmdLogger.SetLevel(logrus.InfoLevel)
- endNum, strictExport, isTest, isFuture, extra := utils.MustCommonFlags(cmd.Flags(), cmdLogger)
- cmdLogger.StrictExport = strictExport
- env := utils.GetEnvironmentDetails(isTest, isFuture)
- path := utils.MustBucketFlags(cmd.Flags(), cmdLogger)
- cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger)
-
- datas, err := input.GetEntriesFromGenesis(endNum, xdr.LedgerEntryTypeContractData, env.ArchiveURLs)
- if err != nil {
- cmdLogger.Fatal("Error getting ledger entries: ", err)
- }
-
- outFile := mustOutFile(path)
- numFailures := 0
- totalNumBytes := 0
- var header xdr.LedgerHeaderHistoryEntry
- for _, data := range datas {
- TransformContractData := transform.NewTransformContractDataStruct(transform.AssetFromContractData, transform.ContractBalanceFromContractData)
- transformed, err, ok := TransformContractData.TransformContractData(data, env.NetworkPassphrase, header)
- if err != nil {
- cmdLogger.LogError(fmt.Errorf("could not transform contract data %+v: %v", data, err))
- numFailures += 1
- continue
- }
-
- if !ok {
- continue
- }
-
- numBytes, err := exportEntry(transformed, outFile, extra)
- if err != nil {
- cmdLogger.LogError(fmt.Errorf("could not export contract data %+v: %v", data, err))
- numFailures += 1
- continue
- }
- totalNumBytes += numBytes
- }
- outFile.Close()
- cmdLogger.Info("Number of bytes written: ", totalNumBytes)
-
- printTransformStats(len(datas), numFailures)
- maybeUpload(cloudCredentials, cloudStorageBucket, cloudProvider, path)
-
- },
-}
-
-func init() {
- rootCmd.AddCommand(dataCmd)
- utils.AddCommonFlags(dataCmd.Flags())
- utils.AddBucketFlags("contract_data", dataCmd.Flags())
- utils.AddCloudStorageFlags(dataCmd.Flags())
- dataCmd.MarkFlagRequired("end-ledger")
- /*
- Current flags:
- end-ledger: the ledger sequence number for the end of the export range (required)
- output-file: filename of the output file
- stdout: if set, output is printed to stdout
-
- TODO: implement extra flags if possible
- serialize-method: the method for serialization of the output data (JSON, XDR, etc)
- end time as a replacement for end sequence numbers
- */
-}
diff --git a/cmd/export_contract_data_test.go b/cmd/export_contract_data_test.go
deleted file mode 100644
index 2e511e8e..00000000
--- a/cmd/export_contract_data_test.go
+++ /dev/null
@@ -1,22 +0,0 @@
-package cmd
-
-import (
- "testing"
-)
-
-func TestExportContractData(t *testing.T) {
- t.Skip("Skipping due to unstable data in Futurenet")
- // TODO: find ledger with data and create testdata
- tests := []cliTest{
- {
- name: "contract data",
- args: []string{"export_contract_data", "-e", "78975", "-o", gotTestDir(t, "bucket_read.txt")},
- golden: "bucket_read.golden",
- wantErr: nil,
- },
- }
-
- for _, test := range tests {
- runCLITest(t, test, "testdata/contract_data/")
- }
-}
diff --git a/cmd/export_contract_events.go b/cmd/export_contract_events.go
new file mode 100644
index 00000000..4bae06ff
--- /dev/null
+++ b/cmd/export_contract_events.go
@@ -0,0 +1,67 @@
+package cmd
+
+import (
+ "fmt"
+
+ "github.com/sirupsen/logrus"
+ "github.com/spf13/cobra"
+ "github.com/stellar/stellar-etl/internal/input"
+ "github.com/stellar/stellar-etl/internal/transform"
+ "github.com/stellar/stellar-etl/internal/utils"
+)
+
+var contractEventsCmd = &cobra.Command{
+ Use: "export_contract_events",
+ Short: "Exports the contract events over a specified range.",
+ Long: `Exports the contract events over a specified range to an output file.`,
+ Run: func(cmd *cobra.Command, args []string) {
+ cmdLogger.SetLevel(logrus.InfoLevel)
+ cmdArgs := utils.MustFlags(cmd.Flags(), cmdLogger)
+
+ // TODO: https://stellarorg.atlassian.net/browse/HUBBLE-386 GetEnvironmentDetails should be refactored
+ commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger)
+ env := utils.GetEnvironmentDetails(commonArgs)
+
+ transactions, err := input.GetTransactions(cmdArgs.StartNum, cmdArgs.EndNum, cmdArgs.Limit, env, cmdArgs.UseCaptiveCore)
+ if err != nil {
+ cmdLogger.Fatal("could not read transactions: ", err)
+ }
+
+ outFile := mustOutFile(cmdArgs.Path)
+ numFailures := 0
+ for _, transformInput := range transactions {
+ transformed, err := transform.TransformContractEvent(transformInput.Transaction, transformInput.LedgerHistory)
+ if err != nil {
+ ledgerSeq := transformInput.LedgerHistory.Header.LedgerSeq
+ cmdLogger.LogError(fmt.Errorf("could not transform contract events in transaction %d in ledger %d: ", transformInput.Transaction.Index, ledgerSeq))
+ numFailures += 1
+ continue
+ }
+
+ for _, contractEvent := range transformed {
+ _, err := exportEntry(contractEvent, outFile, cmdArgs.Extra)
+ if err != nil {
+ cmdLogger.LogError(fmt.Errorf("could not export contract event: %v", err))
+ numFailures += 1
+ continue
+ }
+ }
+ }
+
+ outFile.Close()
+
+ printTransformStats(len(transactions), numFailures)
+
+ maybeUpload(cmdArgs.Credentials, cmdArgs.Bucket, cmdArgs.Provider, cmdArgs.Path)
+ },
+}
+
+func init() {
+ rootCmd.AddCommand(contractEventsCmd)
+ utils.AddCommonFlags(contractEventsCmd.Flags())
+ utils.AddArchiveFlags("contract_events", contractEventsCmd.Flags())
+ utils.AddCloudStorageFlags(contractEventsCmd.Flags())
+
+ contractEventsCmd.MarkFlagRequired("start-ledger")
+ contractEventsCmd.MarkFlagRequired("end-ledger")
+}
diff --git a/cmd/export_diagnostic_events.go b/cmd/export_diagnostic_events.go
deleted file mode 100644
index 17655bd9..00000000
--- a/cmd/export_diagnostic_events.go
+++ /dev/null
@@ -1,85 +0,0 @@
-package cmd
-
-import (
- "fmt"
-
- "github.com/sirupsen/logrus"
- "github.com/spf13/cobra"
- "github.com/stellar/stellar-etl/internal/input"
- "github.com/stellar/stellar-etl/internal/transform"
- "github.com/stellar/stellar-etl/internal/utils"
-)
-
-var diagnosticEventsCmd = &cobra.Command{
- Use: "export_diagnostic_events",
- Short: "Exports the diagnostic events over a specified range.",
- Long: `Exports the diagnostic events over a specified range to an output file.`,
- Run: func(cmd *cobra.Command, args []string) {
- cmdLogger.SetLevel(logrus.InfoLevel)
- endNum, strictExport, isTest, isFuture, extra := utils.MustCommonFlags(cmd.Flags(), cmdLogger)
- cmdLogger.StrictExport = strictExport
- startNum, path, limit := utils.MustArchiveFlags(cmd.Flags(), cmdLogger)
- cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger)
- env := utils.GetEnvironmentDetails(isTest, isFuture)
-
- transactions, err := input.GetTransactions(startNum, endNum, limit, env)
- if err != nil {
- cmdLogger.Fatal("could not read transactions: ", err)
- }
-
- outFile := mustOutFile(path)
- numFailures := 0
- for _, transformInput := range transactions {
- transformed, err, ok := transform.TransformDiagnosticEvent(transformInput.Transaction, transformInput.LedgerHistory)
- if err != nil {
- ledgerSeq := transformInput.LedgerHistory.Header.LedgerSeq
- cmdLogger.LogError(fmt.Errorf("could not transform diagnostic events in transaction %d in ledger %d: ", transformInput.Transaction.Index, ledgerSeq))
- numFailures += 1
- continue
- }
-
- if !ok {
- continue
- }
- for _, diagnosticEvent := range transformed {
- _, err := exportEntry(diagnosticEvent, outFile, extra)
- if err != nil {
- cmdLogger.LogError(fmt.Errorf("could not export diagnostic event: %v", err))
- numFailures += 1
- continue
- }
- }
- }
-
- outFile.Close()
-
- printTransformStats(len(transactions), numFailures)
-
- maybeUpload(cloudCredentials, cloudStorageBucket, cloudProvider, path)
- },
-}
-
-func init() {
- rootCmd.AddCommand(diagnosticEventsCmd)
- utils.AddCommonFlags(diagnosticEventsCmd.Flags())
- utils.AddArchiveFlags("diagnostic_events", diagnosticEventsCmd.Flags())
- utils.AddCloudStorageFlags(diagnosticEventsCmd.Flags())
- diagnosticEventsCmd.MarkFlagRequired("end-ledger")
-
- /*
- Current flags:
- start-ledger: the ledger sequence number for the beginning of the export period
- end-ledger: the ledger sequence number for the end of the export range (*required)
-
- limit: maximum number of diagnostic events to export
- TODO: measure a good default value that ensures all diagnostic events within a 5 minute period will be exported with a single call
- The current max_tx_set_size is 1000 and there are 60 new ledgers in a 5 minute period:
- 1000*60 = 60000
-
- output-file: filename of the output file
-
- TODO: implement extra flags if possible
- serialize-method: the method for serialization of the output data (JSON, XDR, etc)
- start and end time as a replacement for start and end sequence numbers
- */
-}
diff --git a/cmd/export_effects.go b/cmd/export_effects.go
index 3fff5009..b93aaf1c 100644
--- a/cmd/export_effects.go
+++ b/cmd/export_effects.go
@@ -14,15 +14,15 @@ var effectsCmd = &cobra.Command{
Long: "Exports the effects data over a specified range to an output file.",
Run: func(cmd *cobra.Command, args []string) {
cmdLogger.SetLevel(logrus.InfoLevel)
- endNum, strictExport, isTest, isFuture, extra := utils.MustCommonFlags(cmd.Flags(), cmdLogger)
- cmdLogger.StrictExport = strictExport
+ commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger)
+ cmdLogger.StrictExport = commonArgs.StrictExport
startNum, path, limit := utils.MustArchiveFlags(cmd.Flags(), cmdLogger)
cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger)
- env := utils.GetEnvironmentDetails(isTest, isFuture)
+ env := utils.GetEnvironmentDetails(commonArgs)
- transactions, err := input.GetTransactions(startNum, endNum, limit, env)
+ transactions, err := input.GetTransactions(startNum, commonArgs.EndNum, limit, env, commonArgs.UseCaptiveCore)
if err != nil {
- cmdLogger.Fatalf("could not read transactions in [%d, %d] (limit=%d): %v", startNum, endNum, limit, err)
+ cmdLogger.Fatalf("could not read transactions in [%d, %d] (limit=%d): %v", startNum, commonArgs.EndNum, limit, err)
}
outFile := mustOutFile(path)
@@ -39,7 +39,7 @@ var effectsCmd = &cobra.Command{
}
for _, transformed := range effects {
- numBytes, err := exportEntry(transformed, outFile, extra)
+ numBytes, err := exportEntry(transformed, outFile, commonArgs.Extra)
if err != nil {
cmdLogger.LogError(err)
numFailures += 1
diff --git a/cmd/export_ledger_entry_changes.go b/cmd/export_ledger_entry_changes.go
index 21489aff..dfb4ae75 100644
--- a/cmd/export_ledger_entry_changes.go
+++ b/cmd/export_ledger_entry_changes.go
@@ -1,12 +1,14 @@
package cmd
import (
+ "context"
"fmt"
"math"
"os"
"path/filepath"
"github.com/spf13/cobra"
+ "github.com/stellar/go/ingest/ledgerbackend"
"github.com/stellar/go/xdr"
"github.com/stellar/stellar-etl/internal/input"
"github.com/stellar/stellar-etl/internal/transform"
@@ -17,23 +19,25 @@ var exportLedgerEntryChangesCmd = &cobra.Command{
Use: "export_ledger_entry_changes",
Short: "This command exports the changes in accounts, offers, trustlines and liquidity pools.",
Long: `This command instantiates a stellar-core instance and uses it to export about accounts, offers, trustlines and liquidity pools.
-The information is exported in batches determined by the batch-size flag. Each exported file will include the changes to the
+The information is exported in batches determined by the batch-size flag. Each exported file will include the changes to the
relevant data type that occurred during that batch.
-If the end-ledger is omitted, then the stellar-core node will continue running and exporting information as new ledgers are
-confirmed by the Stellar network.
+If the end-ledger is omitted, then the stellar-core node will continue running and exporting information as new ledgers are
+confirmed by the Stellar network.
If no data type flags are set, then by default all of them are exported. If any are set, it is assumed that the others should not
be exported.`,
Run: func(cmd *cobra.Command, args []string) {
- endNum, strictExport, isTest, isFuture, extra := utils.MustCommonFlags(cmd.Flags(), cmdLogger)
- cmdLogger.StrictExport = strictExport
- env := utils.GetEnvironmentDetails(isTest, isFuture)
+ commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger)
+ cmdLogger.StrictExport = commonArgs.StrictExport
+ env := utils.GetEnvironmentDetails(commonArgs)
- execPath, configPath, startNum, batchSize, outputFolder := utils.MustCoreFlags(cmd.Flags(), cmdLogger)
+ _, configPath, startNum, batchSize, outputFolder := utils.MustCoreFlags(cmd.Flags(), cmdLogger)
exports := utils.MustExportTypeFlags(cmd.Flags(), cmdLogger)
cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger)
+ cmd.Flags()
+
err := os.MkdirAll(outputFolder, os.ModePerm)
if err != nil {
cmdLogger.Fatalf("unable to mkdir %s: %v", outputFolder, err)
@@ -46,44 +50,40 @@ be exported.`,
// If none of the export flags are set, then we assume that everything should be exported
allFalse := true
for _, value := range exports {
- if true == value {
+ if value {
allFalse = false
break
}
}
if allFalse {
- for export_name, _ := range exports {
+ for export_name := range exports {
exports[export_name] = true
}
}
- if configPath == "" && endNum == 0 {
+ if configPath == "" && commonArgs.EndNum == 0 {
cmdLogger.Fatal("stellar-core needs a config file path when exporting ledgers continuously (endNum = 0)")
}
- execPath, err = filepath.Abs(execPath)
- if err != nil {
- cmdLogger.Fatal("could not get absolute filepath for stellar-core executable: ", err)
- }
-
- configPath, err = filepath.Abs(configPath)
+ ctx := context.Background()
+ backend, err := utils.CreateLedgerBackend(ctx, commonArgs.UseCaptiveCore, env)
if err != nil {
- cmdLogger.Fatal("could not get absolute filepath for the config file: ", err)
+ cmdLogger.Fatal("error creating a cloud storage backend: ", err)
}
- core, err := input.PrepareCaptiveCore(execPath, configPath, startNum, endNum, env)
+ err = backend.PrepareRange(ctx, ledgerbackend.BoundedRange(startNum, commonArgs.EndNum))
if err != nil {
- cmdLogger.Fatal("error creating a prepared captive core instance: ", err)
+ cmdLogger.Fatal("error preparing ledger range for cloud storage backend: ", err)
}
- if endNum == 0 {
- endNum = math.MaxInt32
+ if commonArgs.EndNum == 0 {
+ commonArgs.EndNum = math.MaxInt32
}
changeChan := make(chan input.ChangeBatch)
closeChan := make(chan int)
- go input.StreamChanges(core, startNum, endNum, batchSize, changeChan, closeChan, env, cmdLogger)
+ go input.StreamChanges(&backend, startNum, commonArgs.EndNum, batchSize, changeChan, closeChan, env, cmdLogger)
for {
select {
@@ -126,7 +126,7 @@ be exported.`,
}
transformedOutputs["accounts"] = append(transformedOutputs["accounts"], acc)
}
- if change.AccountSignersChanged() {
+ if utils.AccountSignersChanged(change) {
signers, err := transform.TransformSigners(change, changes.LedgerHeaders[i])
if err != nil {
entry, _, _, _ := utils.ExtractEntryFromChange(change)
@@ -252,7 +252,16 @@ be exported.`,
}
}
- err := exportTransformedData(batch.BatchStart, batch.BatchEnd, outputFolder, transformedOutputs, cloudCredentials, cloudStorageBucket, cloudProvider, extra)
+ err := exportTransformedData(
+ batch.BatchStart,
+ batch.BatchEnd,
+ outputFolder,
+ transformedOutputs,
+ cloudCredentials,
+ cloudStorageBucket,
+ cloudProvider,
+ commonArgs.Extra,
+ )
if err != nil {
cmdLogger.LogError(err)
continue
@@ -295,7 +304,6 @@ func init() {
utils.AddCloudStorageFlags(exportLedgerEntryChangesCmd.Flags())
exportLedgerEntryChangesCmd.MarkFlagRequired("start-ledger")
- exportLedgerEntryChangesCmd.MarkFlagRequired("core-executable")
/*
Current flags:
start-ledger: the ledger sequence number for the beginning of the export period
diff --git a/cmd/export_ledger_entry_changes_test.go b/cmd/export_ledger_entry_changes_test.go
index 8a518e1d..364c3fde 100644
--- a/cmd/export_ledger_entry_changes_test.go
+++ b/cmd/export_ledger_entry_changes_test.go
@@ -6,7 +6,7 @@ import (
)
const coreExecutablePath = "../stellar-core/src/stellar-core"
-const coreConfigPath = "./docker/stellar-core.cfg"
+const coreConfigPath = "/etl/docker/stellar-core.cfg"
func TestExportChanges(t *testing.T) {
diff --git a/cmd/export_ledger_transaction.go b/cmd/export_ledger_transaction.go
index b8dca36e..7d07b9ec 100644
--- a/cmd/export_ledger_transaction.go
+++ b/cmd/export_ledger_transaction.go
@@ -16,13 +16,13 @@ var ledgerTransactionCmd = &cobra.Command{
Long: `Exports the ledger_transaction transaction data over a specified range to an output file.`,
Run: func(cmd *cobra.Command, args []string) {
cmdLogger.SetLevel(logrus.InfoLevel)
- endNum, strictExport, isTest, isFuture, extra := utils.MustCommonFlags(cmd.Flags(), cmdLogger)
- cmdLogger.StrictExport = strictExport
+ commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger)
+ cmdLogger.StrictExport = commonArgs.StrictExport
startNum, path, limit := utils.MustArchiveFlags(cmd.Flags(), cmdLogger)
cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger)
- env := utils.GetEnvironmentDetails(isTest, isFuture)
+ env := utils.GetEnvironmentDetails(commonArgs)
- ledgerTransaction, err := input.GetTransactions(startNum, endNum, limit, env)
+ ledgerTransaction, err := input.GetTransactions(startNum, commonArgs.EndNum, limit, env, commonArgs.UseCaptiveCore)
if err != nil {
cmdLogger.Fatal("could not read ledger_transaction: ", err)
}
@@ -39,7 +39,7 @@ var ledgerTransactionCmd = &cobra.Command{
continue
}
- numBytes, err := exportEntry(transformed, outFile, extra)
+ numBytes, err := exportEntry(transformed, outFile, commonArgs.Extra)
if err != nil {
cmdLogger.LogError(fmt.Errorf("could not export transaction: %v", err))
numFailures += 1
diff --git a/cmd/export_ledgers.go b/cmd/export_ledgers.go
index 0bec34d2..e1dce45b 100644
--- a/cmd/export_ledgers.go
+++ b/cmd/export_ledgers.go
@@ -16,12 +16,20 @@ var ledgersCmd = &cobra.Command{
Long: `Exports ledger data within the specified range to an output file. Encodes ledgers as JSON objects and exports them to the output file.`,
Run: func(cmd *cobra.Command, args []string) {
cmdLogger.SetLevel(logrus.InfoLevel)
- endNum, strictExport, isTest, isFuture, extra := utils.MustCommonFlags(cmd.Flags(), cmdLogger)
- cmdLogger.StrictExport = strictExport
+ commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger)
+ cmdLogger.StrictExport = commonArgs.StrictExport
startNum, path, limit := utils.MustArchiveFlags(cmd.Flags(), cmdLogger)
cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger)
+ env := utils.GetEnvironmentDetails(commonArgs)
- ledgers, err := input.GetLedgers(startNum, endNum, limit, isTest, isFuture)
+ var ledgers []utils.HistoryArchiveLedgerAndLCM
+ var err error
+
+ if commonArgs.UseCaptiveCore {
+ ledgers, err = input.GetLedgersHistoryArchive(startNum, commonArgs.EndNum, limit, env, commonArgs.UseCaptiveCore)
+ } else {
+ ledgers, err = input.GetLedgers(startNum, commonArgs.EndNum, limit, env, commonArgs.UseCaptiveCore)
+ }
if err != nil {
cmdLogger.Fatal("could not read ledgers: ", err)
}
@@ -30,15 +38,15 @@ var ledgersCmd = &cobra.Command{
numFailures := 0
totalNumBytes := 0
- for i, lcm := range ledgers {
- transformed, err := transform.TransformLedger(lcm)
+ for i, ledger := range ledgers {
+ transformed, err := transform.TransformLedger(ledger.Ledger, ledger.LCM)
if err != nil {
cmdLogger.LogError(fmt.Errorf("could not json transform ledger %d: %s", startNum+uint32(i), err))
numFailures += 1
continue
}
- numBytes, err := exportEntry(transformed, outFile, extra)
+ numBytes, err := exportEntry(transformed, outFile, commonArgs.Extra)
if err != nil {
cmdLogger.LogError(fmt.Errorf("could not export ledger %d: %s", startNum+uint32(i), err))
numFailures += 1
diff --git a/cmd/export_ledgers_test.go b/cmd/export_ledgers_test.go
index 68d3f0f5..9a1e1f63 100644
--- a/cmd/export_ledgers_test.go
+++ b/cmd/export_ledgers_test.go
@@ -4,7 +4,7 @@ import (
"bytes"
"flag"
"fmt"
- "io/ioutil"
+ "io"
"log"
"os"
"os/exec"
@@ -121,7 +121,7 @@ func indexOf(l []string, s string) int {
return -1
}
-func sortByName(files []os.FileInfo) {
+func sortByName(files []os.DirEntry) {
sort.Slice(files, func(i, j int) bool {
return files[i].Name() < files[j].Name()
})
@@ -144,14 +144,14 @@ func runCLITest(t *testing.T, test cliTest, goldenFolder string) {
// If the output arg specified is a directory, concat the contents for comparison.
if stat.IsDir() {
- files, err := ioutil.ReadDir(outLocation)
+ files, err := os.ReadDir(outLocation)
if err != nil {
log.Fatal(err)
}
var buf bytes.Buffer
sortByName(files)
for _, f := range files {
- b, err := ioutil.ReadFile(filepath.Join(outLocation, f.Name()))
+ b, err := os.ReadFile(filepath.Join(outLocation, f.Name()))
if err != nil {
log.Fatal(err)
}
@@ -160,7 +160,7 @@ func runCLITest(t *testing.T, test cliTest, goldenFolder string) {
testOutput = buf.Bytes()
} else {
// If the output is written to a file, read the contents of the file for comparison.
- testOutput, err = ioutil.ReadFile(outLocation)
+ testOutput, err = os.ReadFile(outLocation)
if err != nil {
log.Fatal(err)
}
@@ -197,16 +197,6 @@ func extractErrorMsg(loggerOutput string) string {
return loggerOutput[errIndex : errIndex+endIndex]
}
-func removeCoreLogging(loggerOutput string) string {
- endIndex := strings.Index(loggerOutput, "{\"")
- // if there is no bracket, then nothing was exported except logs
- if endIndex == -1 {
- return ""
- }
-
- return loggerOutput[endIndex:]
-}
-
func getLastSeqNum(archiveURLs []string) uint32 {
num, err := utils.GetLatestLedgerSequence(archiveURLs)
if err != nil {
@@ -218,10 +208,10 @@ func getLastSeqNum(archiveURLs []string) uint32 {
func getGolden(t *testing.T, goldenFile string, actual string, update bool) (string, error) {
t.Helper()
f, err := os.OpenFile(goldenFile, os.O_RDWR, 0644)
- defer f.Close()
if err != nil {
return "", err
}
+ defer f.Close()
// If the update flag is true, clear the current contents of the golden file and write the actual output
// This is useful for when new tests or added or functionality changes that breaks current tests
@@ -239,7 +229,7 @@ func getGolden(t *testing.T, goldenFile string, actual string, update bool) (str
return actual, nil
}
- wantOutput, err := ioutil.ReadAll(f)
+ wantOutput, err := io.ReadAll(f)
if err != nil {
return "", err
}
diff --git a/cmd/export_liquidity_pools.go b/cmd/export_liquidity_pools.go
deleted file mode 100644
index 7130b3e1..00000000
--- a/cmd/export_liquidity_pools.go
+++ /dev/null
@@ -1,81 +0,0 @@
-package cmd
-
-import (
- "fmt"
-
- "github.com/sirupsen/logrus"
- "github.com/spf13/cobra"
-
- "github.com/stellar/stellar-etl/internal/input"
- "github.com/stellar/stellar-etl/internal/transform"
- "github.com/stellar/stellar-etl/internal/utils"
-
- "github.com/stellar/go/xdr"
-)
-
-var poolsCmd = &cobra.Command{
- Use: "export_pools",
- Short: "Exports the liquidity pools data.",
- Long: `Exports historical liquidity pools data from the genesis ledger to the provided end-ledger to an output file.
-The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it
-should be used in an initial data dump. In order to get liqudity pools information within a specified ledger range, see
-the export_ledger_entry_changes command.`,
- Run: func(cmd *cobra.Command, args []string) {
- cmdLogger.SetLevel(logrus.InfoLevel)
- endNum, strictExport, isTest, isFuture, extra := utils.MustCommonFlags(cmd.Flags(), cmdLogger)
- cmdLogger.StrictExport = strictExport
- env := utils.GetEnvironmentDetails(isTest, isFuture)
- path := utils.MustBucketFlags(cmd.Flags(), cmdLogger)
- cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger)
-
- pools, err := input.GetEntriesFromGenesis(endNum, xdr.LedgerEntryTypeLiquidityPool, env.ArchiveURLs)
- if err != nil {
- cmdLogger.Fatal("could not read accounts: ", err)
- }
-
- outFile := mustOutFile(path)
- numFailures := 0
- totalNumBytes := 0
- var header xdr.LedgerHeaderHistoryEntry
- for _, pool := range pools {
- transformed, err := transform.TransformPool(pool, header)
- if err != nil {
- cmdLogger.LogError(fmt.Errorf("could not transform pool %+v: %v", pool, err))
- numFailures += 1
- continue
- }
-
- numBytes, err := exportEntry(transformed, outFile, extra)
- if err != nil {
- cmdLogger.LogError(fmt.Errorf("could not export pool %+v: %v", pool, err))
- numFailures += 1
- continue
- }
- totalNumBytes += numBytes
- }
- outFile.Close()
- cmdLogger.Info("Number of bytes written: ", totalNumBytes)
-
- printTransformStats(len(pools), numFailures)
- maybeUpload(cloudCredentials, cloudStorageBucket, cloudProvider, path)
-
- },
-}
-
-func init() {
- rootCmd.AddCommand(poolsCmd)
- utils.AddCommonFlags(poolsCmd.Flags())
- utils.AddBucketFlags("pools", poolsCmd.Flags())
- utils.AddCloudStorageFlags(poolsCmd.Flags())
- poolsCmd.MarkFlagRequired("end-ledger")
- /*
- Current flags:
- end-ledger: the ledger sequence number for the end of the export range (required)
- output-file: filename of the output file
- stdout: if set, output is printed to stdout
-
- TODO: implement extra flags if possible
- serialize-method: the method for serialization of the output data (JSON, XDR, etc)
- end time as a replacement for end sequence numbers
- */
-}
diff --git a/cmd/export_offers.go b/cmd/export_offers.go
deleted file mode 100644
index c1827c96..00000000
--- a/cmd/export_offers.go
+++ /dev/null
@@ -1,82 +0,0 @@
-package cmd
-
-import (
- "fmt"
-
- "github.com/sirupsen/logrus"
- "github.com/spf13/cobra"
-
- "github.com/stellar/stellar-etl/internal/input"
- "github.com/stellar/stellar-etl/internal/transform"
- "github.com/stellar/stellar-etl/internal/utils"
-
- "github.com/stellar/go/xdr"
-)
-
-// offersCmd represents the offers command
-var offersCmd = &cobra.Command{
- Use: "export_offers",
- Short: "Exports the data on offers made from the genesis ledger to a specified endpoint.",
- Long: `Exports historical offer data from the genesis ledger to the provided end-ledger to an output file.
- The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it
- should be used in an initial data dump. In order to get offer information within a specified ledger range, see
- the export_ledger_entry_changes command.`,
- Run: func(cmd *cobra.Command, args []string) {
- cmdLogger.SetLevel(logrus.InfoLevel)
- endNum, strictExport, isTest, isFuture, extra := utils.MustCommonFlags(cmd.Flags(), cmdLogger)
- cmdLogger.StrictExport = strictExport
- env := utils.GetEnvironmentDetails(isTest, isFuture)
- path := utils.MustBucketFlags(cmd.Flags(), cmdLogger)
- cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger)
-
- offers, err := input.GetEntriesFromGenesis(endNum, xdr.LedgerEntryTypeOffer, env.ArchiveURLs)
- if err != nil {
- cmdLogger.Fatal("could not read offers: ", err)
- }
-
- outFile := mustOutFile(path)
- numFailures := 0
- totalNumBytes := 0
- var header xdr.LedgerHeaderHistoryEntry
- for _, offer := range offers {
- transformed, err := transform.TransformOffer(offer, header)
- if err != nil {
- cmdLogger.LogError(fmt.Errorf("could not transform offer %+v: %v", offer, err))
- numFailures += 1
- continue
- }
-
- numBytes, err := exportEntry(transformed, outFile, extra)
- if err != nil {
- cmdLogger.LogError(fmt.Errorf("could not export offer %+v: %v", offer, err))
- numFailures += 1
- continue
- }
- totalNumBytes += numBytes
- }
-
- outFile.Close()
- cmdLogger.Info("Number of bytes written: ", totalNumBytes)
-
- printTransformStats(len(offers), numFailures)
-
- maybeUpload(cloudCredentials, cloudStorageBucket, cloudProvider, path)
- },
-}
-
-func init() {
- rootCmd.AddCommand(offersCmd)
- utils.AddCommonFlags(offersCmd.Flags())
- utils.AddBucketFlags("offers", offersCmd.Flags())
- utils.AddCloudStorageFlags(offersCmd.Flags())
- offersCmd.MarkFlagRequired("end-ledger")
- /*
- Current flags:
- end-ledger: the ledger sequence number for the end of the export range (required)
- output-file: filename of the output file
-
- TODO: implement extra flags if possible
- serialize-method: the method for serialization of the output data (JSON, XDR, etc)
- end time as a replacement for end sequence numbers
- */
-}
diff --git a/cmd/export_offers_test.go b/cmd/export_offers_test.go
deleted file mode 100644
index a2ab9c40..00000000
--- a/cmd/export_offers_test.go
+++ /dev/null
@@ -1,26 +0,0 @@
-package cmd
-
-import (
- "testing"
-)
-
-func TestExportOffers(t *testing.T) {
- tests := []cliTest{
- {
- name: "offers: bucket list with exact checkpoint",
- args: []string{"export_offers", "-e", "78975", "-o", gotTestDir(t, "bucket_read_exact.txt")},
- golden: "bucket_read_exact.golden",
- wantErr: nil,
- },
- {
- name: "offers: bucket list with end not on checkpoint",
- args: []string{"export_offers", "-e", "80210", "-o", gotTestDir(t, "bucket_read_offset.txt")},
- golden: "bucket_read_offset.golden",
- wantErr: nil,
- },
- }
-
- for _, test := range tests {
- runCLITest(t, test, "testdata/offers/")
- }
-}
diff --git a/cmd/export_operations.go b/cmd/export_operations.go
index e82a2942..cbfb8d84 100644
--- a/cmd/export_operations.go
+++ b/cmd/export_operations.go
@@ -16,13 +16,13 @@ var operationsCmd = &cobra.Command{
Long: `Exports the operations data over a specified range. Each operation is an individual command that mutates the Stellar ledger.`,
Run: func(cmd *cobra.Command, args []string) {
cmdLogger.SetLevel(logrus.InfoLevel)
- endNum, strictExport, isTest, isFuture, extra := utils.MustCommonFlags(cmd.Flags(), cmdLogger)
- cmdLogger.StrictExport = strictExport
+ commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger)
+ cmdLogger.StrictExport = commonArgs.StrictExport
startNum, path, limit := utils.MustArchiveFlags(cmd.Flags(), cmdLogger)
cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger)
- env := utils.GetEnvironmentDetails(isTest, isFuture)
+ env := utils.GetEnvironmentDetails(commonArgs)
- operations, err := input.GetOperations(startNum, endNum, limit, env)
+ operations, err := input.GetOperations(startNum, commonArgs.EndNum, limit, env, commonArgs.UseCaptiveCore)
if err != nil {
cmdLogger.Fatal("could not read operations: ", err)
}
@@ -39,7 +39,7 @@ var operationsCmd = &cobra.Command{
continue
}
- numBytes, err := exportEntry(transformed, outFile, extra)
+ numBytes, err := exportEntry(transformed, outFile, commonArgs.Extra)
if err != nil {
cmdLogger.LogError(fmt.Errorf("could not export operation: %v", err))
numFailures += 1
diff --git a/cmd/export_orderbooks.go b/cmd/export_orderbooks.go
deleted file mode 100644
index 2c39e8d6..00000000
--- a/cmd/export_orderbooks.go
+++ /dev/null
@@ -1,185 +0,0 @@
-package cmd
-
-import (
- "bytes"
- "encoding/json"
- "math"
- "os"
- "path/filepath"
-
- "github.com/spf13/cobra"
-
- "github.com/stellar/stellar-etl/internal/input"
- "github.com/stellar/stellar-etl/internal/utils"
-
- "github.com/stellar/go/xdr"
-)
-
-// exportOrderbooksCmd represents the exportOrderbooks command
-var exportOrderbooksCmd = &cobra.Command{
- Use: "export_orderbooks",
- Short: "This command exports the historical orderbooks",
- Long: `This command instantiates a stellar-core instance and uses it to export normalized orderbooks.
- The information is exported in batches determined by the batch-size flag. The normalized data is exported in multiple
- different files within the exported data folder. These files are dimAccounts.txt, dimOffers.txt, dimMarkets.txt, and factEvents.txt.
- These files contain normalized data that helps save storage space.
-
- If the end-ledger is omitted, then the stellar-core node will continue running and exporting information as new ledgers are
- confirmed by the Stellar network. In this unbounded case, a stellar-core config path is required to utilize the Captive Core toml.`,
- Run: func(cmd *cobra.Command, args []string) {
- endNum, strictExport, isTest, isFuture, extra := utils.MustCommonFlags(cmd.Flags(), cmdLogger)
- cmdLogger.StrictExport = strictExport
- env := utils.GetEnvironmentDetails(isTest, isFuture)
-
- execPath, configPath, startNum, batchSize, outputFolder := utils.MustCoreFlags(cmd.Flags(), cmdLogger)
- cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger)
-
- if batchSize <= 0 {
- cmdLogger.Fatalf("batch-size (%d) must be greater than 0", batchSize)
- }
-
- if configPath == "" && endNum == 0 {
- cmdLogger.Fatal("stellar-core needs a config file path when exporting ledgers continuously (endNum = 0)")
- }
-
- var err error
- execPath, err = filepath.Abs(execPath)
- if err != nil {
- cmdLogger.Fatal("could not get absolute filepath for stellar-core executable: ", err)
- }
-
- configPath, err = filepath.Abs(configPath)
- if err != nil {
- cmdLogger.Fatal("could not get absolute filepath for the config file: ", err)
- }
-
- checkpointSeq := utils.GetMostRecentCheckpoint(startNum)
- core, err := input.PrepareCaptiveCore(execPath, configPath, checkpointSeq, endNum, env)
- if err != nil {
- cmdLogger.Fatal("error creating a prepared captive core instance: ", err)
- }
-
- orderbook, err := input.GetEntriesFromGenesis(checkpointSeq, xdr.LedgerEntryTypeOffer, env.ArchiveURLs)
- if err != nil {
- cmdLogger.Fatal("could not read initial orderbook: ", err)
- }
-
- orderbookChannel := make(chan input.OrderbookBatch)
-
- go input.StreamOrderbooks(core, startNum, endNum, batchSize, orderbookChannel, orderbook, env, cmdLogger)
-
- // If the end sequence number is defined, we work in a closed range and export a finite number of batches
- if endNum != 0 {
- batchCount := uint32(math.Ceil(float64(endNum-startNum+1) / float64(batchSize)))
- for i := uint32(0); i < batchCount; i++ {
- batchStart := startNum + i*batchSize
- // Subtract 1 from the end batch number because batches do not include the last batch in the range
- batchEnd := batchStart + batchSize - 1
- if batchEnd > endNum {
- batchEnd = endNum
- }
-
- parser := input.ReceiveParsedOrderbooks(orderbookChannel, cmdLogger)
- exportOrderbook(batchStart, batchEnd, outputFolder, parser, cloudCredentials, cloudStorageBucket, cloudProvider, extra)
- }
- } else {
- // otherwise, we export in an unbounded manner where batches are constantly exported
- var batchNum uint32 = 0
- for {
- batchStart := startNum + batchNum*batchSize
- batchEnd := batchStart + batchSize - 1
- parser := input.ReceiveParsedOrderbooks(orderbookChannel, cmdLogger)
- exportOrderbook(batchStart, batchEnd, outputFolder, parser, cloudCredentials, cloudStorageBucket, cloudProvider, extra)
- batchNum++
- }
- }
- },
-}
-
-// writeSlice writes the slice either to a file.
-func writeSlice(file *os.File, slice [][]byte, extra map[string]string) error {
-
- for _, data := range slice {
- bytesToWrite := data
- if len(extra) > 0 {
- i := map[string]interface{}{}
- decoder := json.NewDecoder(bytes.NewReader(data))
- decoder.UseNumber()
- err := decoder.Decode(&i)
- if err != nil {
- return err
- }
- for k, v := range extra {
- i[k] = v
- }
- bytesToWrite, err = json.Marshal(i)
- if err != nil {
- return err
- }
- }
- file.WriteString(string(bytesToWrite) + "\n")
- }
-
- file.Close()
- return nil
-}
-
-func exportOrderbook(
- start, end uint32,
- folderPath string,
- parser *input.OrderbookParser,
- cloudCredentials, cloudStorageBucket, cloudProvider string,
- extra map[string]string) {
- marketsFilePath := filepath.Join(folderPath, exportFilename(start, end, "dimMarkets"))
- offersFilePath := filepath.Join(folderPath, exportFilename(start, end, "dimOffers"))
- accountsFilePath := filepath.Join(folderPath, exportFilename(start, end, "dimAccounts"))
- eventsFilePath := filepath.Join(folderPath, exportFilename(start, end, "factEvents"))
-
- marketsFile := mustOutFile(marketsFilePath)
- offersFile := mustOutFile(offersFilePath)
- accountsFile := mustOutFile(accountsFilePath)
- eventsFile := mustOutFile(eventsFilePath)
-
- err := writeSlice(marketsFile, parser.Markets, extra)
- if err != nil {
- cmdLogger.LogError(err)
- }
- err = writeSlice(offersFile, parser.Offers, extra)
- if err != nil {
- cmdLogger.LogError(err)
- }
- err = writeSlice(accountsFile, parser.Accounts, extra)
- if err != nil {
- cmdLogger.LogError(err)
- }
- err = writeSlice(eventsFile, parser.Events, extra)
- if err != nil {
- cmdLogger.LogError(err)
- }
-
- maybeUpload(cloudCredentials, cloudStorageBucket, cloudProvider, marketsFilePath)
- maybeUpload(cloudCredentials, cloudStorageBucket, cloudProvider, offersFilePath)
- maybeUpload(cloudCredentials, cloudStorageBucket, cloudProvider, accountsFilePath)
- maybeUpload(cloudCredentials, cloudStorageBucket, cloudProvider, eventsFilePath)
-}
-
-func init() {
- rootCmd.AddCommand(exportOrderbooksCmd)
- utils.AddCommonFlags(exportOrderbooksCmd.Flags())
- utils.AddCoreFlags(exportOrderbooksCmd.Flags(), "orderbooks_output/")
- utils.AddCloudStorageFlags(exportOrderbooksCmd.Flags())
-
- exportOrderbooksCmd.MarkFlagRequired("start-ledger")
- /*
- Current flags:
- start-ledger: the ledger sequence number for the beginning of the export period
- end-ledger: the ledger sequence number for the end of the export range
-
- output-folder: folder that will contain the output files
- limit: maximum number of changes to export in a given batch; if negative then everything gets exported
- batch-size: size of the export batches
-
- core-executable: path to stellar-core executable
- core-config: path to stellar-core config file
- */
-}
diff --git a/cmd/export_orderbooks_test.go b/cmd/export_orderbooks_test.go
deleted file mode 100644
index bf1c043b..00000000
--- a/cmd/export_orderbooks_test.go
+++ /dev/null
@@ -1,41 +0,0 @@
-package cmd
-
-import (
- "fmt"
- "testing"
-)
-
-func TestExportOrderbooks(t *testing.T) {
- tests := []cliTest{
- {
- name: "unbounded range with no config",
- args: []string{"export_orderbooks", "-x", coreExecutablePath, "-s", "100000"},
- golden: "",
- wantErr: fmt.Errorf("stellar-core needs a config file path when exporting ledgers continuously (endNum = 0)"),
- },
- {
- name: "0 batch size",
- args: []string{"export_orderbooks", "-b", "0", "-x", coreExecutablePath, "-c", coreConfigPath, "-s", "100000", "-e", "164000"},
- golden: "",
- wantErr: fmt.Errorf("batch-size (0) must be greater than 0"),
- },
- {
- name: "orderbook from single ledger",
- args: []string{"export_orderbooks", "-x", coreExecutablePath, "-c", coreConfigPath, "-s", "5000000", "-e", "5000000", "-o", gotTestDir(t, "single/")},
- golden: "single_ledger.golden",
- sortForComparison: true,
- wantErr: nil,
- },
- {
- name: "orderbooks from large range",
- args: []string{"export_orderbooks", "-x", coreExecutablePath, "-c", coreConfigPath, "-s", "6000000", "-e", "6001000", "-o", gotTestDir(t, "range/")},
- golden: "large_range_orderbooks.golden",
- sortForComparison: true,
- wantErr: nil,
- },
- }
-
- for _, test := range tests {
- runCLITest(t, test, "testdata/orderbooks/")
- }
-}
diff --git a/cmd/export_trades.go b/cmd/export_trades.go
index 99ff880e..748cdb66 100644
--- a/cmd/export_trades.go
+++ b/cmd/export_trades.go
@@ -19,13 +19,13 @@ var tradesCmd = &cobra.Command{
Long: `Exports trade data within the specified range to an output file`,
Run: func(cmd *cobra.Command, args []string) {
cmdLogger.SetLevel(logrus.InfoLevel)
- endNum, strictExport, isTest, isFuture, extra := utils.MustCommonFlags(cmd.Flags(), cmdLogger)
- cmdLogger.StrictExport = strictExport
+ commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger)
+ cmdLogger.StrictExport = commonArgs.StrictExport
startNum, path, limit := utils.MustArchiveFlags(cmd.Flags(), cmdLogger)
- env := utils.GetEnvironmentDetails(isTest, isFuture)
+ env := utils.GetEnvironmentDetails(commonArgs)
cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger)
- trades, err := input.GetTrades(startNum, endNum, limit, env)
+ trades, err := input.GetTrades(startNum, commonArgs.EndNum, limit, env, commonArgs.UseCaptiveCore)
if err != nil {
cmdLogger.Fatal("could not read trades ", err)
}
@@ -43,7 +43,7 @@ var tradesCmd = &cobra.Command{
}
for _, transformed := range trades {
- numBytes, err := exportEntry(transformed, outFile, extra)
+ numBytes, err := exportEntry(transformed, outFile, commonArgs.Extra)
if err != nil {
cmdLogger.LogError(err)
numFailures += 1
diff --git a/cmd/export_transactions.go b/cmd/export_transactions.go
index 18717bd0..35f82bd2 100644
--- a/cmd/export_transactions.go
+++ b/cmd/export_transactions.go
@@ -16,13 +16,13 @@ var transactionsCmd = &cobra.Command{
Long: `Exports the transaction data over a specified range to an output file.`,
Run: func(cmd *cobra.Command, args []string) {
cmdLogger.SetLevel(logrus.InfoLevel)
- endNum, strictExport, isTest, isFuture, extra := utils.MustCommonFlags(cmd.Flags(), cmdLogger)
- cmdLogger.StrictExport = strictExport
+ commonArgs := utils.MustCommonFlags(cmd.Flags(), cmdLogger)
+ cmdLogger.StrictExport = commonArgs.StrictExport
startNum, path, limit := utils.MustArchiveFlags(cmd.Flags(), cmdLogger)
cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger)
- env := utils.GetEnvironmentDetails(isTest, isFuture)
+ env := utils.GetEnvironmentDetails(commonArgs)
- transactions, err := input.GetTransactions(startNum, endNum, limit, env)
+ transactions, err := input.GetTransactions(startNum, commonArgs.EndNum, limit, env, commonArgs.UseCaptiveCore)
if err != nil {
cmdLogger.Fatal("could not read transactions: ", err)
}
@@ -39,7 +39,7 @@ var transactionsCmd = &cobra.Command{
continue
}
- numBytes, err := exportEntry(transformed, outFile, extra)
+ numBytes, err := exportEntry(transformed, outFile, commonArgs.Extra)
if err != nil {
cmdLogger.LogError(fmt.Errorf("could not export transaction: %v", err))
numFailures += 1
diff --git a/cmd/export_trustlines.go b/cmd/export_trustlines.go
deleted file mode 100644
index 68ab838d..00000000
--- a/cmd/export_trustlines.go
+++ /dev/null
@@ -1,85 +0,0 @@
-package cmd
-
-import (
- "fmt"
-
- "github.com/sirupsen/logrus"
- "github.com/spf13/cobra"
-
- "github.com/stellar/stellar-etl/internal/input"
- "github.com/stellar/stellar-etl/internal/transform"
- "github.com/stellar/stellar-etl/internal/utils"
-
- "github.com/stellar/go/xdr"
-)
-
-// trustlinesCmd represents the trustlines command
-var trustlinesCmd = &cobra.Command{
- Use: "export_trustlines",
- Short: "Exports the trustline data over a specified range.",
- Long: `Exports historical trustline data from the genesis ledger to the provided end-ledger to an output file.
- The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it
- should be used in an initial data dump. In order to get trustline information within a specified ledger range, see
- the export_ledger_entry_changes command.`,
- Run: func(cmd *cobra.Command, args []string) {
- cmdLogger.SetLevel(logrus.InfoLevel)
- endNum, strictExport, isTest, isFuture, extra := utils.MustCommonFlags(cmd.Flags(), cmdLogger)
- cmdLogger.StrictExport = strictExport
- env := utils.GetEnvironmentDetails(isTest, isFuture)
- path := utils.MustBucketFlags(cmd.Flags(), cmdLogger)
- cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger)
-
- trustlines, err := input.GetEntriesFromGenesis(endNum, xdr.LedgerEntryTypeTrustline, env.ArchiveURLs)
- if err != nil {
- cmdLogger.Fatal("could not read trustlines: ", err)
- }
-
- outFile := mustOutFile(path)
- numFailures := 0
- totalNumBytes := 0
- var header xdr.LedgerHeaderHistoryEntry
- for _, trust := range trustlines {
- transformed, err := transform.TransformTrustline(trust, header)
- if err != nil {
- cmdLogger.LogError(fmt.Errorf("could not json transform trustline %+v: %v", trust, err))
- numFailures += 1
- continue
- }
-
- numBytes, err := exportEntry(transformed, outFile, extra)
- if err != nil {
- cmdLogger.LogError(fmt.Errorf("could not export trustline %+v: %v", trust, err))
- numFailures += 1
- continue
- }
- totalNumBytes += numBytes
- }
-
- outFile.Close()
-
- cmdLogger.Info("Number of bytes written: ", totalNumBytes)
-
- printTransformStats(len(trustlines), numFailures)
-
- maybeUpload(cloudCredentials, cloudStorageBucket, cloudProvider, path)
- },
-}
-
-func init() {
- rootCmd.AddCommand(trustlinesCmd)
- utils.AddCommonFlags(trustlinesCmd.Flags())
- utils.AddBucketFlags("trustlines", trustlinesCmd.Flags())
- utils.AddCloudStorageFlags(trustlinesCmd.Flags())
- trustlinesCmd.MarkFlagRequired("end-ledger")
-
- /*
- Current flags:
- end-ledger: the ledger sequence number for the end of the export range (required)
- output-file: filename of the output file
- stdout: if set, output is printed to stdout
-
- TODO: implement extra flags if possible
- serialize-method: the method for serialization of the output data (JSON, XDR, etc)
- end time as a replacement for end sequence numbers
- */
-}
diff --git a/cmd/export_trustlines_test.go b/cmd/export_trustlines_test.go
deleted file mode 100644
index 61a69281..00000000
--- a/cmd/export_trustlines_test.go
+++ /dev/null
@@ -1,26 +0,0 @@
-package cmd
-
-import (
- "testing"
-)
-
-func TestExportTrustlines(t *testing.T) {
- tests := []cliTest{
- {
- name: "trustlines: bucket list with exact checkpoint",
- args: []string{"export_trustlines", "-e", "78975", "-o", gotTestDir(t, "bucket_read_exact.golden")},
- golden: "bucket_read_exact.golden",
- wantErr: nil,
- },
- {
- name: "trustlines: bucket list with end not on checkpoint",
- args: []string{"export_trustlines", "-e", "139672", "-o", gotTestDir(t, "bucket_read_off.golden")},
- golden: "bucket_read_off.golden",
- wantErr: nil,
- },
- }
-
- for _, test := range tests {
- runCLITest(t, test, "testdata/trustlines/")
- }
-}
diff --git a/cmd/export_ttl.go b/cmd/export_ttl.go
deleted file mode 100644
index c55ef571..00000000
--- a/cmd/export_ttl.go
+++ /dev/null
@@ -1,81 +0,0 @@
-package cmd
-
-import (
- "fmt"
-
- "github.com/sirupsen/logrus"
- "github.com/spf13/cobra"
-
- "github.com/stellar/stellar-etl/internal/input"
- "github.com/stellar/stellar-etl/internal/transform"
- "github.com/stellar/stellar-etl/internal/utils"
-
- "github.com/stellar/go/xdr"
-)
-
-var ttlCmd = &cobra.Command{
- Use: "export_ttl",
- Short: "Exports the ttl information.",
- Long: `Exports historical ttl data from the genesis ledger to the provided end-ledger to an output file.
- The command reads from the bucket list, which includes the full history of the Stellar ledger. As a result, it
- should be used in an initial data dump. In order to get offer information within a specified ledger range, see
- the export_ledger_entry_changes command.`,
- Run: func(cmd *cobra.Command, args []string) {
- cmdLogger.SetLevel(logrus.InfoLevel)
- endNum, strictExport, isTest, isFuture, extra := utils.MustCommonFlags(cmd.Flags(), cmdLogger)
- cmdLogger.StrictExport = strictExport
- env := utils.GetEnvironmentDetails(isTest, isFuture)
- path := utils.MustBucketFlags(cmd.Flags(), cmdLogger)
- cloudStorageBucket, cloudCredentials, cloudProvider := utils.MustCloudStorageFlags(cmd.Flags(), cmdLogger)
-
- ttls, err := input.GetEntriesFromGenesis(endNum, xdr.LedgerEntryTypeTtl, env.ArchiveURLs)
- if err != nil {
- cmdLogger.Fatal("Error getting ledger entries: ", err)
- }
-
- outFile := mustOutFile(path)
- numFailures := 0
- totalNumBytes := 0
- var header xdr.LedgerHeaderHistoryEntry
- for _, ttl := range ttls {
- transformed, err := transform.TransformTtl(ttl, header)
- if err != nil {
- cmdLogger.LogError(fmt.Errorf("could not transform ttl %+v: %v", ttl, err))
- numFailures += 1
- continue
- }
-
- numBytes, err := exportEntry(transformed, outFile, extra)
- if err != nil {
- cmdLogger.LogError(fmt.Errorf("could not export ttl %+v: %v", ttl, err))
- numFailures += 1
- continue
- }
- totalNumBytes += numBytes
- }
- outFile.Close()
- cmdLogger.Info("Number of bytes written: ", totalNumBytes)
-
- printTransformStats(len(ttls), numFailures)
- maybeUpload(cloudCredentials, cloudStorageBucket, cloudProvider, path)
-
- },
-}
-
-func init() {
- rootCmd.AddCommand(ttlCmd)
- utils.AddCommonFlags(ttlCmd.Flags())
- utils.AddBucketFlags("ttl", ttlCmd.Flags())
- utils.AddCloudStorageFlags(ttlCmd.Flags())
- ttlCmd.MarkFlagRequired("end-ledger")
- /*
- Current flags:
- end-ledger: the ledger sequence number for the end of the export range (required)
- output-file: filename of the output file
- stdout: if set, output is printed to stdout
-
- TODO: implement extra flags if possible
- serialize-method: the method for serialization of the output data (JSON, XDR, etc)
- end time as a replacement for end sequence numbers
- */
-}
diff --git a/cmd/export_ttl_test.go b/cmd/export_ttl_test.go
deleted file mode 100644
index 3dc5d762..00000000
--- a/cmd/export_ttl_test.go
+++ /dev/null
@@ -1,22 +0,0 @@
-package cmd
-
-import (
- "testing"
-)
-
-func TestExportttl(t *testing.T) {
- t.Skip("Skipping due to unstable data in Futurenet")
- // TODO: find ledger with data and create testdata
- tests := []cliTest{
- {
- name: "ttl",
- args: []string{"export_ttl", "-e", "78975", "-o", gotTestDir(t, "bucket_read.txt")},
- golden: "bucket_read.golden",
- wantErr: nil,
- },
- }
-
- for _, test := range tests {
- runCLITest(t, test, "testdata/ttl/")
- }
-}
diff --git a/cmd/get_ledger_range_from_times.go b/cmd/get_ledger_range_from_times.go
index 3edfff35..07cc5041 100644
--- a/cmd/get_ledger_range_from_times.go
+++ b/cmd/get_ledger_range_from_times.go
@@ -44,7 +44,7 @@ var getLedgerRangeFromTimesCmd = &cobra.Command{
cmdLogger.Fatal("could not get testnet boolean: ", err)
}
- isFuture, err := cmd.Flags().GetBool("futurenet")
+ isFuture, err := cmd.Flags().GetBool("futurenet")
if err != nil {
cmdLogger.Fatal("could not get futurenet boolean: ", err)
}
diff --git a/docker/Dockerfile b/docker/Dockerfile
index 2fc7d358..f3081f3a 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -1,7 +1,5 @@
# stage 1: build stellar-etl app
-# golang 1.19, pinned by sha digest
-#FROM golang@sha256:04f76f956e51797a44847e066bde1341c01e09054d3878ae88c7f77f09897c4d AS build
-FROM golang:1.20.5-buster AS build
+FROM golang:1.22.1-alpine AS build
WORKDIR /usr/src/etl
@@ -13,13 +11,16 @@ COPY . .
RUN go build -v -o /usr/local/bin ./...
# stage 2: runtime enviroment
-FROM stellar/stellar-core:20.2.0-1716.rc3.34d82fc00.focal
+FROM stellar/stellar-core:21.0.0-1872.c6f474133.focal
WORKDIR /etl
COPY --from=build /usr/local/bin/stellar-etl /usr/local/bin/stellar-etl
COPY --from=build /usr/src/etl/docker docker
+# changing workdir to a new path in order to use mounted empty ephemeral volumes as storage
+WORKDIR /etl/data
+
# clear entrypoint from stellar-core image
ENTRYPOINT []
diff --git a/docker/stellar-core.cfg b/docker/stellar-core.cfg
index 449e0806..b7c710d8 100644
--- a/docker/stellar-core.cfg
+++ b/docker/stellar-core.cfg
@@ -2,6 +2,10 @@
# see https://developers.stellar.org/docs/run-core-node/
# for how to properly configure your environment
+ENABLE_SOROBAN_DIAGNOSTIC_EVENTS=true
+EMIT_SOROBAN_TRANSACTION_META_EXT_V1=true
+EMIT_LEDGER_CLOSE_META_EXT_V1=true
+
#FAILURE_SAFETY is minimum number of nodes that are allowed to fail before you no longer have quorum
FAILURE_SAFETY=1
diff --git a/docker/stellar-core_futurenet.cfg b/docker/stellar-core_futurenet.cfg
index ecd4873a..832c75c4 100644
--- a/docker/stellar-core_futurenet.cfg
+++ b/docker/stellar-core_futurenet.cfg
@@ -6,6 +6,10 @@ PUBLIC_HTTP_PORT=false
NETWORK_PASSPHRASE="Test SDF Future Network ; October 2022"
+ENABLE_SOROBAN_DIAGNOSTIC_EVENTS=true
+EMIT_SOROBAN_TRANSACTION_META_EXT_V1=true
+EMIT_LEDGER_CLOSE_META_EXT_V1=true
+
# DATABASE="sqlite3://stellar.db"
PEER_PORT=11725
diff --git a/docker/stellar-core_testnet.cfg b/docker/stellar-core_testnet.cfg
index e021da5e..2d8a0426 100644
--- a/docker/stellar-core_testnet.cfg
+++ b/docker/stellar-core_testnet.cfg
@@ -5,6 +5,10 @@ NETWORK_PASSPHRASE="Test SDF Network ; September 2015"
# DATABASE="sqlite3://stellar.db"
+ENABLE_SOROBAN_DIAGNOSTIC_EVENTS=true
+EMIT_SOROBAN_TRANSACTION_META_EXT_V1=true
+EMIT_LEDGER_CLOSE_META_EXT_V1=true
+
# Stellar Testnet validators
[[HOME_DOMAINS]]
HOME_DOMAIN="testnet.stellar.org"
diff --git a/go.mod b/go.mod
index 8c390d07..750af302 100644
--- a/go.mod
+++ b/go.mod
@@ -1,9 +1,11 @@
module github.com/stellar/stellar-etl
-go 1.19
+go 1.22
+
+toolchain go1.22.1
require (
- cloud.google.com/go/storage v1.32.0
+ cloud.google.com/go/storage v1.40.0
github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13
github.com/guregu/null v4.0.0+incompatible
github.com/lib/pq v1.10.9
@@ -13,49 +15,53 @@ require (
github.com/spf13/cobra v1.7.0
github.com/spf13/pflag v1.0.5
github.com/spf13/viper v1.17.0
- github.com/stellar/go v0.0.0-20240111173100-ed7ae81c8546
- github.com/stretchr/testify v1.8.4
+ github.com/stellar/go v0.0.0-20240510213328-79f44c65cb44
+ github.com/stretchr/testify v1.9.0
)
require (
- cloud.google.com/go v0.111.0 // indirect
- cloud.google.com/go/compute v1.23.3 // indirect
- cloud.google.com/go/compute/metadata v0.2.3 // indirect
- cloud.google.com/go/iam v1.1.5 // indirect
+ cloud.google.com/go v0.112.2 // indirect
+ cloud.google.com/go/auth v0.2.1 // indirect
+ cloud.google.com/go/auth/oauth2adapt v0.2.1 // indirect
+ cloud.google.com/go/compute/metadata v0.3.0 // indirect
+ cloud.google.com/go/iam v1.1.7 // indirect
github.com/Masterminds/squirrel v1.5.4 // indirect
github.com/Microsoft/go-winio v0.6.1 // indirect
- github.com/aws/aws-sdk-go v1.45.26 // indirect
+ github.com/aws/aws-sdk-go v1.51.24 // indirect
github.com/beorn7/perks v1.0.1 // indirect
- github.com/cespare/xxhash/v2 v2.2.0 // indirect
+ github.com/cenkalti/backoff/v4 v4.3.0 // indirect
+ github.com/cespare/xxhash/v2 v2.3.0 // indirect
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
+ github.com/djherbis/fscache v0.10.1 // indirect
+ github.com/felixge/httpsnoop v1.0.4 // indirect
github.com/fsnotify/fsnotify v1.6.0 // indirect
github.com/go-errors/errors v1.5.1 // indirect
- github.com/go-logr/logr v1.2.4 // indirect
+ github.com/go-logr/logr v1.4.1 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
- github.com/golang/protobuf v1.5.3 // indirect
+ github.com/golang/protobuf v1.5.4 // indirect
github.com/google/s2a-go v0.1.7 // indirect
- github.com/google/uuid v1.4.0 // indirect
+ github.com/google/uuid v1.6.0 // indirect
github.com/googleapis/enterprise-certificate-proxy v0.3.2 // indirect
- github.com/googleapis/gax-go/v2 v2.12.0 // indirect
+ github.com/googleapis/gax-go/v2 v2.12.3 // indirect
github.com/hashicorp/golang-lru v1.0.2 // indirect
github.com/hashicorp/hcl v1.0.0 // indirect
github.com/holiman/uint256 v1.2.3 // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/jmespath/go-jmespath v0.4.0 // indirect
github.com/jmoiron/sqlx v1.3.5 // indirect
+ github.com/klauspost/compress v1.17.0 // indirect
github.com/lann/builder v0.0.0-20180802200727-47ae307949d0 // indirect
github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0 // indirect
github.com/magiconair/properties v1.8.7 // indirect
- github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/pelletier/go-toml v1.9.5 // indirect
github.com/pelletier/go-toml/v2 v2.1.0 // indirect
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
- github.com/prometheus/client_golang v1.17.0 // indirect
- github.com/prometheus/client_model v0.5.0 // indirect
- github.com/prometheus/common v0.44.0 // indirect
- github.com/prometheus/procfs v0.12.0 // indirect
+ github.com/prometheus/client_golang v1.19.0 // indirect
+ github.com/prometheus/client_model v0.6.1 // indirect
+ github.com/prometheus/common v0.53.0 // indirect
+ github.com/prometheus/procfs v0.13.0 // indirect
github.com/sagikazarmark/locafero v0.3.0 // indirect
github.com/sagikazarmark/slog-shim v0.1.0 // indirect
github.com/segmentio/go-loggly v0.5.1-0.20171222203950-eb91657e62b2 // indirect
@@ -63,29 +69,33 @@ require (
github.com/spf13/afero v1.10.0 // indirect
github.com/spf13/cast v1.5.1 // indirect
github.com/stellar/go-xdr v0.0.0-20231122183749-b53fb00bcac2 // indirect
- github.com/stretchr/objx v0.5.1 // indirect
+ github.com/stretchr/objx v0.5.2 // indirect
github.com/subosito/gotenv v1.6.0 // indirect
go.opencensus.io v0.24.0 // indirect
- go.opentelemetry.io/otel v1.19.0 // indirect
- go.opentelemetry.io/otel/metric v1.19.0 // indirect
- go.opentelemetry.io/otel/trace v1.19.0 // indirect
+ go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.50.0 // indirect
+ go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.50.0 // indirect
+ go.opentelemetry.io/otel v1.25.0 // indirect
+ go.opentelemetry.io/otel/metric v1.25.0 // indirect
+ go.opentelemetry.io/otel/trace v1.25.0 // indirect
go.uber.org/multierr v1.11.0 // indirect
- golang.org/x/crypto v0.17.0 // indirect
- golang.org/x/exp v0.0.0-20231006140011-7918f672742d // indirect
- golang.org/x/mod v0.13.0 // indirect
- golang.org/x/net v0.19.0 // indirect
- golang.org/x/oauth2 v0.13.0 // indirect
- golang.org/x/sync v0.4.0 // indirect
- golang.org/x/sys v0.16.0 // indirect
+ golang.org/x/crypto v0.22.0 // indirect
+ golang.org/x/exp v0.0.0-20240416160154-fe59bbe5cc7f // indirect
+ golang.org/x/mod v0.17.0 // indirect
+ golang.org/x/net v0.24.0 // indirect
+ golang.org/x/oauth2 v0.19.0 // indirect
+ golang.org/x/sync v0.7.0 // indirect
+ golang.org/x/sys v0.19.0 // indirect
golang.org/x/text v0.14.0 // indirect
- golang.org/x/tools v0.14.0 // indirect
- google.golang.org/api v0.149.0 // indirect
- google.golang.org/appengine v1.6.8 // indirect
- google.golang.org/genproto v0.0.0-20231212172506-995d672761c0 // indirect
- google.golang.org/genproto/googleapis/api v0.0.0-20231211222908-989df2bf70f3 // indirect
- google.golang.org/genproto/googleapis/rpc v0.0.0-20240102182953-50ed04b92917 // indirect
- google.golang.org/grpc v1.60.1 // indirect
- google.golang.org/protobuf v1.32.0 // indirect
+ golang.org/x/time v0.5.0 // indirect
+ golang.org/x/tools v0.20.0 // indirect
+ google.golang.org/api v0.174.0 // indirect
+ google.golang.org/genproto v0.0.0-20240415180920-8c6c420018be // indirect
+ google.golang.org/genproto/googleapis/api v0.0.0-20240415180920-8c6c420018be // indirect
+ google.golang.org/genproto/googleapis/rpc v0.0.0-20240415180920-8c6c420018be // indirect
+ google.golang.org/grpc v1.63.2 // indirect
+ google.golang.org/protobuf v1.33.0 // indirect
+ gopkg.in/djherbis/atime.v1 v1.0.0 // indirect
+ gopkg.in/djherbis/stream.v1 v1.3.1 // indirect
gopkg.in/ini.v1 v1.67.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
)
diff --git a/go.sum b/go.sum
index b2038cdc..7ba7615d 100644
--- a/go.sum
+++ b/go.sum
@@ -17,22 +17,24 @@ cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHOb
cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI=
cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk=
cloud.google.com/go v0.75.0/go.mod h1:VGuuCn7PG0dwsd5XPVm2Mm3wlh3EL55/79EKB6hlPTY=
-cloud.google.com/go v0.111.0 h1:YHLKNupSD1KqjDbQ3+LVdQ81h/UJbJyZG203cEfnQgM=
-cloud.google.com/go v0.111.0/go.mod h1:0mibmpKP1TyOOFYQY5izo0LnT+ecvOQ0Sg3OdmMiNRU=
+cloud.google.com/go v0.112.2 h1:ZaGT6LiG7dBzi6zNOvVZwacaXlmf3lRqnC4DQzqyRQw=
+cloud.google.com/go v0.112.2/go.mod h1:iEqjp//KquGIJV/m+Pk3xecgKNhV+ry+vVTsy4TbDms=
+cloud.google.com/go/auth v0.2.1 h1:RMl6PI2MH1Qc3CM7XNJJHGwbC4WHQppSAjL0Cvu/M/g=
+cloud.google.com/go/auth v0.2.1/go.mod h1:khQRBNrvNoHiHhV1iu2x8fSnlNbCaVHilznW5MAI5GY=
+cloud.google.com/go/auth/oauth2adapt v0.2.1 h1:VSPmMmUlT8CkIZ2PzD9AlLN+R3+D1clXMWHHa6vG/Ag=
+cloud.google.com/go/auth/oauth2adapt v0.2.1/go.mod h1:tOdK/k+D2e4GEwfBRA48dKNQiDsqIXxLh7VU319eV0g=
cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o=
cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE=
cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc=
cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg=
cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc=
cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ=
-cloud.google.com/go/compute v1.23.3 h1:6sVlXXBmbd7jNX0Ipq0trII3e4n1/MsADLK6a+aiVlk=
-cloud.google.com/go/compute v1.23.3/go.mod h1:VCgBUoMnIVIR0CscqQiPJLAG25E3ZRZMzcFZeQ+h8CI=
-cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY=
-cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA=
+cloud.google.com/go/compute/metadata v0.3.0 h1:Tz+eQXMEqDIKRsmY3cHTL6FVaynIjX2QxYC4trgAKZc=
+cloud.google.com/go/compute/metadata v0.3.0/go.mod h1:zFmK7XCadkQkj6TtorcaGlCW1hT1fIilQDwofLpJ20k=
cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE=
cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk=
-cloud.google.com/go/iam v1.1.5 h1:1jTsCu4bcsNsE4iiqNT5SHwrDRCfRmIaaaVFhRveTJI=
-cloud.google.com/go/iam v1.1.5/go.mod h1:rB6P/Ic3mykPbFio+vo7403drjlgvoWfYpJhMXEbzv8=
+cloud.google.com/go/iam v1.1.7 h1:z4VHOhwKLF/+UYXAJDFwGtNF0b6gjsW1Pk9Ml0U/IoM=
+cloud.google.com/go/iam v1.1.7/go.mod h1:J4PMPg8TtyurAUvSmPj8FF3EDgY1SPRZxcUGrn7WXGA=
cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I=
cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw=
cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA=
@@ -43,8 +45,8 @@ cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohl
cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs=
cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0=
cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo=
-cloud.google.com/go/storage v1.32.0 h1:5w6DxEGOnktmJHarxAOUywxVW9lbNWIzlzzUltG/3+o=
-cloud.google.com/go/storage v1.32.0/go.mod h1:Hhh/dogNRGca7IWv1RC2YqEn0c0G77ctA/OxflYkiD8=
+cloud.google.com/go/storage v1.40.0 h1:VEpDQV5CJxFmJ6ueWNsKxcr1QAYOXEgxDa+sBbJahPw=
+cloud.google.com/go/storage v1.40.0/go.mod h1:Rrj7/hKlG87BLqDJYtwR0fbPld8uJPbQ2ucUMY7Ir0g=
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
@@ -53,14 +55,18 @@ github.com/Masterminds/squirrel v1.5.4/go.mod h1:NNaOrjSoIDfDA40n7sr2tPNZRfjzjA4
github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow=
github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM=
github.com/ajg/form v0.0.0-20160822230020-523a5da1a92f h1:zvClvFQwU++UpIUBGC8YmDlfhUrweEy1R1Fj1gu5iIM=
+github.com/ajg/form v0.0.0-20160822230020-523a5da1a92f/go.mod h1:uL1WgH+h2mgNtvBq0339dVnzXdBETtL2LeUXaIv25UY=
github.com/andybalholm/brotli v1.0.4 h1:V7DdXeJtZscaqfNuAdSRuRFzuiKlHSC/Zh3zl9qY3JY=
-github.com/aws/aws-sdk-go v1.45.26 h1:PJ2NJNY5N/yeobLYe1Y+xLdavBi67ZI8gvph6ftwVCg=
-github.com/aws/aws-sdk-go v1.45.26/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI=
+github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
+github.com/aws/aws-sdk-go v1.51.24 h1:nwL5MaommPkwb7Ixk24eWkdx5HY4of1gD10kFFVAl6A=
+github.com/aws/aws-sdk-go v1.51.24/go.mod h1:LF8svs817+Nz+DmiMQKTO3ubZ/6IaTpq3TjupRn3Eqk=
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
+github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8=
+github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE=
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
-github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44=
-github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
+github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
+github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
@@ -75,6 +81,8 @@ github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 h1:fAjc9m62+UWV/WAFKLNi6ZS0675eEUC9y3AlwSbQu1Y=
github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw=
+github.com/djherbis/fscache v0.10.1 h1:hDv+RGyvD+UDKyRYuLoVNbuRTnf2SrA2K3VyR1br9lk=
+github.com/djherbis/fscache v0.10.1/go.mod h1:yyPYtkNnnPXsW+81lAcQS6yab3G2CRfnPLotBvtbf0c=
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
@@ -82,19 +90,25 @@ github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5y
github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
github.com/fatih/structs v1.0.0 h1:BrX964Rv5uQ3wwS+KRUAJCBBw5PQmgJfJ6v4yly5QwU=
+github.com/fatih/structs v1.0.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M=
+github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
+github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
github.com/frankban/quicktest v1.14.4 h1:g2rn0vABPOOXmZUj+vbmUp0lPoXEMuhTpIluN0XL9UY=
+github.com/frankban/quicktest v1.14.4/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY=
github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw=
github.com/gavv/monotime v0.0.0-20161010190848-47d58efa6955 h1:gmtGRvSexPU4B1T/yYo0sLOKzER1YT+b4kPxPpm0Ty4=
+github.com/gavv/monotime v0.0.0-20161010190848-47d58efa6955/go.mod h1:vmp8DIyckQMXOPl0AQVHt+7n5h7Gb7hS6CUydiV8QeA=
github.com/go-chi/chi v4.1.2+incompatible h1:fGFk2Gmi/YKXk0OmGfBh0WgmN3XB8lVnEyNz34tQRec=
+github.com/go-chi/chi v4.1.2+incompatible/go.mod h1:eB3wogJHnLi3x/kFX2A+IbTBlXxmMeXJVKy9tTv1XzQ=
github.com/go-errors/errors v1.5.1 h1:ZwEMSLRCapFLflTpT7NKaAc7ukJ8ZPEjzlxt8rPN8bk=
github.com/go-errors/errors v1.5.1/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og=
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
-github.com/go-logr/logr v1.2.4 h1:g01GSCwiDw2xSZfjJ2/T9M+S6pFdcNtFYsp+Y43HYDQ=
-github.com/go-logr/logr v1.2.4/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
+github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ=
+github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE=
@@ -126,10 +140,8 @@ github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvq
github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8=
github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
-github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
-github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
-github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg=
-github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
+github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
+github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
@@ -142,14 +154,16 @@ github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
-github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
+github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/go-querystring v0.0.0-20160401233042-9235644dd9e5 h1:oERTZ1buOUYlpmKaqlO5fYmz8cZ1rYu5DieJzF4ZVmU=
+github.com/google/go-querystring v0.0.0-20160401233042-9235644dd9e5/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck=
github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no=
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=
github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=
github.com/google/martian/v3 v3.3.2 h1:IqNFLAmvJOgVlpdEBiQbDc2EwKW77amAycfTuWKdfvw=
+github.com/google/martian/v3 v3.3.2/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk=
github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
@@ -164,16 +178,17 @@ github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm4
github.com/google/s2a-go v0.1.7 h1:60BLSyTrOV4/haCDW4zb1guZItoSq8foHCXrAnjBo/o=
github.com/google/s2a-go v0.1.7/go.mod h1:50CgR4k1jNlWBu4UfS4AcfhVe1r6pdZPygJ3R8F0Qdw=
github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
-github.com/google/uuid v1.4.0 h1:MtMxsa51/r9yyhkyLsVeVt0B+BGQZzpQiTQ4eHZ8bc4=
-github.com/google/uuid v1.4.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
+github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
+github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/googleapis/enterprise-certificate-proxy v0.3.2 h1:Vie5ybvEvT75RniqhfFxPRy3Bf7vr3h0cechB90XaQs=
github.com/googleapis/enterprise-certificate-proxy v0.3.2/go.mod h1:VLSiSSBs/ksPL8kq3OBOQ6WRI2QnaFynd1DCjZ62+V0=
github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
-github.com/googleapis/gax-go/v2 v2.12.0 h1:A+gCJKdRfqXkr+BIRGtZLibNXf0m1f9E4HG56etFpas=
-github.com/googleapis/gax-go/v2 v2.12.0/go.mod h1:y+aIqrI5eb1YGMVJfuV3185Ts/D7qKpsEkdD5+I6QGU=
+github.com/googleapis/gax-go/v2 v2.12.3 h1:5/zPPDvw8Q1SuXjrqrZslrqT7dL/uJT2CQii/cLCKqA=
+github.com/googleapis/gax-go/v2 v2.12.3/go.mod h1:AKloxT6GtNbaLm8QTNSidHUVsHYcBHwWRvkNFJUQcS4=
github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g=
github.com/gorilla/schema v1.2.0 h1:YufUaxZYCKGFuAq3c96BOhjgd5nmXiOY9NGzF247Tsc=
+github.com/gorilla/schema v1.2.0/go.mod h1:kgLaKoK1FELgZqMAVxx/5cbj0kT+57qxUrAlIO2eleU=
github.com/guregu/null v4.0.0+incompatible h1:4zw0ckM7ECd6FNNddc3Fu4aty9nTlpkkzH7dPn4/4Gw=
github.com/guregu/null v4.0.0+incompatible/go.mod h1:ePGpQaN9cw0tj45IR5E5ehMvsFlLlQZAkkOXZurJ3NM=
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
@@ -187,9 +202,11 @@ github.com/holiman/uint256 v1.2.3/go.mod h1:SC8Ryt4n+UBbPbIBKaG9zbbDlp4jOru9xFZm
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/imkira/go-interpol v1.1.0 h1:KIiKr0VSG2CUW1hl1jpiyuzuJeKUUpC8iM1AIE7N1Vk=
+github.com/imkira/go-interpol v1.1.0/go.mod h1:z0h2/2T3XF8kyEPpRgJ3kmNv+C43p+I/CoI+jC3w2iA=
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
github.com/jarcoal/httpmock v0.0.0-20161210151336-4442edb3db31 h1:Aw95BEvxJ3K6o9GGv5ppCd1P8hkeIeEJ30FO+OhOJpM=
+github.com/jarcoal/httpmock v0.0.0-20161210151336-4442edb3db31/go.mod h1:ks+b9deReOc7jgqp+e7LuFiCBH6Rm5hL32cLcEAArb4=
github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg=
github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo=
github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8=
@@ -200,12 +217,15 @@ github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
github.com/klauspost/compress v1.17.0 h1:Rnbp4K9EjcDuVuHtd0dgA4qNuv9yKDYKK1ulpJwgrqM=
+github.com/klauspost/compress v1.17.0/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE=
github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
+github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
+github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/lann/builder v0.0.0-20180802200727-47ae307949d0 h1:SOEGU9fKiNWd/HOJuq6+3iTQz8KNCLtVX6idSoTLdUw=
github.com/lann/builder v0.0.0-20180802200727-47ae307949d0/go.mod h1:dXGbAdH5GtBTC4WfIxhKZfyBF/HBFgRZSWwZ9g/He9o=
github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0 h1:P6pPBnrTSX3DEVR4fDembhRWSsG5rVo6hYhAB/ADZrk=
@@ -216,18 +236,21 @@ github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY=
github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0=
github.com/manucorporat/sse v0.0.0-20160126180136-ee05b128a739 h1:ykXz+pRRTibcSjG1yRhpdSHInF8yZY/mfn+Rz2Nd1rE=
+github.com/manucorporat/sse v0.0.0-20160126180136-ee05b128a739/go.mod h1:zUx1mhth20V3VKgL5jbd1BSQcW4Fy6Qs4PZvQwRFwzM=
github.com/mattn/go-sqlite3 v1.14.6 h1:dNPt6NO46WmLVt2DLNpwczCmdV5boIZ6g/tlDrlRUbg=
github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
-github.com/matttproud/golang_protobuf_extensions v1.0.4 h1:mmDVorXM7PCGKw94cs5zkfA9PSy5pEvNWRP0ET0TIVo=
-github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4=
github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/moul/http2curl v0.0.0-20161031194548-4e24498b31db h1:eZgFHVkk9uOTaOQLC6tgjkzdp7Ays8eEVecBcfHZlJQ=
+github.com/moul/http2curl v0.0.0-20161031194548-4e24498b31db/go.mod h1:8UbvGypXm98wA/IqH45anm5Y2Z6ep6O31QGOAZ3H0fQ=
github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE=
+github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU=
github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE=
+github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU=
github.com/onsi/gomega v1.27.10 h1:naR28SdDFlqrG6kScpT8VWpu1xWY5nJRCF3XaYyBjhI=
+github.com/onsi/gomega v1.27.10/go.mod h1:RsS8tutOdbdgzbPtzzATp12yT7kM5I5aElG3evPbQ0M=
github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8=
github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c=
github.com/pelletier/go-toml/v2 v2.1.0 h1:FnwAJ4oYMvbT/34k9zzHuZNrhlz48GB3/s6at6/MHO4=
@@ -238,17 +261,18 @@ github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qR
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
-github.com/prometheus/client_golang v1.17.0 h1:rl2sfwZMtSthVU752MqfjQozy7blglC+1SOtjMAMh+Q=
-github.com/prometheus/client_golang v1.17.0/go.mod h1:VeL+gMmOAxkS2IqfCq0ZmHSL+LjWfWDUmp1mBz9JgUY=
+github.com/prometheus/client_golang v1.19.0 h1:ygXvpU1AoN1MhdzckN+PyD9QJOSD4x7kmXYlnfbA6JU=
+github.com/prometheus/client_golang v1.19.0/go.mod h1:ZRM9uEAypZakd+q/x7+gmsvXdURP+DABIEIjnmDdp+k=
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
-github.com/prometheus/client_model v0.5.0 h1:VQw1hfvPvk3Uv6Qf29VrPF32JB6rtbgI6cYPYQjL0Qw=
-github.com/prometheus/client_model v0.5.0/go.mod h1:dTiFglRmd66nLR9Pv9f0mZi7B7fk5Pm3gvsjB5tr+kI=
-github.com/prometheus/common v0.44.0 h1:+5BrQJwiBB9xsMygAB3TNvpQKOwlkc25LbISbrdOOfY=
-github.com/prometheus/common v0.44.0/go.mod h1:ofAIvZbQ1e/nugmZGz4/qCb9Ap1VoSTIO7x0VV9VvuY=
-github.com/prometheus/procfs v0.12.0 h1:jluTpSng7V9hY0O2R9DzzJHYb2xULk9VTR1V1R/k6Bo=
-github.com/prometheus/procfs v0.12.0/go.mod h1:pcuDEFsWDnvcgNzo4EEweacyhjeA9Zk3cnaOZAZEfOo=
+github.com/prometheus/client_model v0.6.1 h1:ZKSh/rekM+n3CeS952MLRAdFwIKqeY8b62p8ais2e9E=
+github.com/prometheus/client_model v0.6.1/go.mod h1:OrxVMOVHjw3lKMa8+x6HeMGkHMQyHDk9E3jmP2AmGiY=
+github.com/prometheus/common v0.53.0 h1:U2pL9w9nmJwJDa4qqLQ3ZaePJ6ZTwt7cMD3AG3+aLCE=
+github.com/prometheus/common v0.53.0/go.mod h1:BrxBKv3FWBIGXw89Mg1AeBq7FSyRzXWI3l3e7W3RN5U=
+github.com/prometheus/procfs v0.13.0 h1:GqzLlQyfsPbaEHaQkO7tbDlriv/4o5Hudv6OXHGKX7o=
+github.com/prometheus/procfs v0.13.0/go.mod h1:cd4PFCR54QLnGKPaKGA6l+cfuNXtht43ZKY6tow0Y1g=
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M=
+github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/sagikazarmark/locafero v0.3.0 h1:zT7VEGWC2DTflmccN/5T1etyKvxSxpHsjb9cJvm4SvQ=
github.com/sagikazarmark/locafero v0.3.0/go.mod h1:w+v7UsPNFwzF1cHuOajOOzoq4U7v/ig1mpRjqV+Bu1U=
@@ -257,6 +281,7 @@ github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWR
github.com/segmentio/go-loggly v0.5.1-0.20171222203950-eb91657e62b2 h1:S4OC0+OBKz6mJnzuHioeEat74PuQ4Sgvbf8eus695sc=
github.com/segmentio/go-loggly v0.5.1-0.20171222203950-eb91657e62b2/go.mod h1:8zLRYR5npGjaOXgPSKat5+oOh+UHd8OdbS18iqX9F6Y=
github.com/sergi/go-diff v0.0.0-20161205080420-83532ca1c1ca h1:oR/RycYTFTVXzND5r4FdsvbnBn0HJXSVeNAnwaTXRwk=
+github.com/sergi/go-diff v0.0.0-20161205080420-83532ca1c1ca/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo=
@@ -271,15 +296,15 @@ github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/viper v1.17.0 h1:I5txKw7MJasPL/BrfkbA0Jyo/oELqVmux4pR/UxOMfI=
github.com/spf13/viper v1.17.0/go.mod h1:BmMMMLQXSbcHK6KAOiFLz0l5JHrU89OdIRHvsk0+yVI=
-github.com/stellar/go v0.0.0-20240111173100-ed7ae81c8546 h1:FCLk33pNq5q/A5DfaBMFvyv4V2V0rxIgDurOf2sQHlw=
-github.com/stellar/go v0.0.0-20240111173100-ed7ae81c8546/go.mod h1:Ka4piwZT4Q9799f+BZeaKkAiYo4UpIWXyu0oSUbCVfM=
+github.com/stellar/go v0.0.0-20240510213328-79f44c65cb44 h1:a53z95H5rfJ871NWopW1s57lmXvsC76KTIcYrWfvkrA=
+github.com/stellar/go v0.0.0-20240510213328-79f44c65cb44/go.mod h1:cHNYV5oK7fp9ZkRIC9fjQxvIArxGPk42q/f3YQBQll0=
github.com/stellar/go-xdr v0.0.0-20231122183749-b53fb00bcac2 h1:OzCVd0SV5qE3ZcDeSFCmOWLZfEWZ3Oe8KtmSOYKEVWE=
github.com/stellar/go-xdr v0.0.0-20231122183749-b53fb00bcac2/go.mod h1:yoxyU/M8nl9LKeWIoBrbDPQ7Cy+4jxRcWcOayZ4BMps=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
-github.com/stretchr/objx v0.5.1 h1:4VhoImhV/Bm0ToFkXFi8hXNXwpDRZ/ynw3amt82mzq0=
-github.com/stretchr/objx v0.5.1/go.mod h1:/iHQpkQwBD6DLUmQ4pE+s1TXdob1mORJ4/UFdrifcy0=
+github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
+github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
@@ -287,25 +312,33 @@ github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
-github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
-github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
+github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
+github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8=
github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
+github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
github.com/valyala/fasthttp v1.34.0 h1:d3AAQJ2DRcxJYHm7OXNXtXt2as1vMDfxeIcFvhmGGm4=
+github.com/valyala/fasthttp v1.34.0/go.mod h1:epZA5N+7pY6ZaEKRmstzOuYJx9HI8DI1oaCGZpdH4h0=
github.com/xdrpp/goxdr v0.1.1 h1:E1B2c6E8eYhOVyd7yEpOyopzTPirUeF6mVOfXfGyJyc=
+github.com/xdrpp/goxdr v0.1.1/go.mod h1:dXo1scL/l6s7iME1gxHWo2XCppbHEKZS7m/KyYWkNzA=
github.com/xeipuuv/gojsonpointer v0.0.0-20151027082146-e0fe6f683076 h1:KM4T3G70MiR+JtqplcYkNVoNz7pDwYaBxWBXQK804So=
+github.com/xeipuuv/gojsonpointer v0.0.0-20151027082146-e0fe6f683076/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
github.com/xeipuuv/gojsonreference v0.0.0-20150808065054-e02fc20de94c h1:XZWnr3bsDQWAZg4Ne+cPoXRPILrNlPNQfxBuwLl43is=
+github.com/xeipuuv/gojsonreference v0.0.0-20150808065054-e02fc20de94c/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ=
github.com/xeipuuv/gojsonschema v0.0.0-20161231055540-f06f290571ce h1:cVSRGH8cOveJNwFEEZLXtB+XMnRqKLjUP6V/ZFYQCXI=
+github.com/xeipuuv/gojsonschema v0.0.0-20161231055540-f06f290571ce/go.mod h1:5yf86TLmAcydyeJq5YvxkGPE2fm/u4myDekKRoLuqhs=
github.com/yalp/jsonpath v0.0.0-20150812003900-31a79c7593bb h1:06WAhQa+mYv7BiOk13B/ywyTlkoE/S7uu6TBKU6FHnE=
+github.com/yalp/jsonpath v0.0.0-20150812003900-31a79c7593bb/go.mod h1:/LWChgwKmvncFJFHJ7Gvn9wZArjbV5/FppcK2fKk/tI=
github.com/yudai/gojsondiff v0.0.0-20170107030110-7b1b7adf999d h1:yJIizrfO599ot2kQ6Af1enICnwBD3XoxgX3MrMwot2M=
+github.com/yudai/gojsondiff v0.0.0-20170107030110-7b1b7adf999d/go.mod h1:AY32+k2cwILAkW1fbgxQ5mUmMiZFgLIV+FBNExI05xg=
github.com/yudai/golcs v0.0.0-20150405163532-d1c525dea8ce h1:888GrqRxabUce7lj4OaoShPxodm3kXOMpSa85wdYzfY=
+github.com/yudai/golcs v0.0.0-20150405163532-d1c525dea8ce/go.mod h1:lgjkn3NuSvDfVJdfcVVdX+jpBxNmX4rDAzaS45IcYoM=
github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
-github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU=
go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8=
go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
@@ -314,13 +347,18 @@ go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk=
go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0=
go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo=
-go.opentelemetry.io/otel v1.19.0 h1:MuS/TNf4/j4IXsZuJegVzI1cwut7Qc00344rgH7p8bs=
-go.opentelemetry.io/otel v1.19.0/go.mod h1:i0QyjOq3UPoTzff0PJB2N66fb4S0+rSbSB15/oyH9fY=
-go.opentelemetry.io/otel/metric v1.19.0 h1:aTzpGtV0ar9wlV4Sna9sdJyII5jTVJEvKETPiOKwvpE=
-go.opentelemetry.io/otel/metric v1.19.0/go.mod h1:L5rUsV9kM1IxCj1MmSdS+JQAcVm319EUrDVLrt7jqt8=
-go.opentelemetry.io/otel/sdk v1.19.0 h1:6USY6zH+L8uMH8L3t1enZPR3WFEmSTADlqldyHtJi3o=
-go.opentelemetry.io/otel/trace v1.19.0 h1:DFVQmlVbfVeOuBRrwdtaehRrWiL1JoVs9CPIQ1Dzxpg=
-go.opentelemetry.io/otel/trace v1.19.0/go.mod h1:mfaSyvGyEJEI0nyV2I4qhNQnbBOUUmYZpYojqMnX2vo=
+go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.50.0 h1:zvpPXY7RfYAGSdYQLjp6zxdJNSYD/+FFoCTQN9IPxBs=
+go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.50.0/go.mod h1:BMn8NB1vsxTljvuorms2hyOs8IBuuBEq0pl7ltOfy30=
+go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.50.0 h1:cEPbyTSEHlQR89XVlyo78gqluF8Y3oMeBkXGWzQsfXY=
+go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.50.0/go.mod h1:DKdbWcT4GH1D0Y3Sqt/PFXt2naRKDWtU+eE6oLdFNA8=
+go.opentelemetry.io/otel v1.25.0 h1:gldB5FfhRl7OJQbUHt/8s0a7cE8fbsPAtdpRaApKy4k=
+go.opentelemetry.io/otel v1.25.0/go.mod h1:Wa2ds5NOXEMkCmUou1WA7ZBfLTHWIsp034OVD7AO+Vg=
+go.opentelemetry.io/otel/metric v1.25.0 h1:LUKbS7ArpFL/I2jJHdJcqMGxkRdxpPHE0VU/D4NuEwA=
+go.opentelemetry.io/otel/metric v1.25.0/go.mod h1:rkDLUSd2lC5lq2dFNrX9LGAbINP5B7WBkC78RXCpH5s=
+go.opentelemetry.io/otel/sdk v1.22.0 h1:6coWHw9xw7EfClIC/+O31R8IY3/+EiRFHevmHafB2Gw=
+go.opentelemetry.io/otel/sdk v1.22.0/go.mod h1:iu7luyVGYovrRpe2fmj3CVKouQNdTOkxtLzPvPz1DOc=
+go.opentelemetry.io/otel/trace v1.25.0 h1:tqukZGLwQYRIFtSQM2u2+yfMVTgGVeqRLPUYx1Dq6RM=
+go.opentelemetry.io/otel/trace v1.25.0/go.mod h1:hCCs70XM/ljO+BeQkyFnbK28SBIJ/Emuha+ccrCRT7I=
go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0=
go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
@@ -329,10 +367,9 @@ golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8U
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4=
-golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
-golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k=
-golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4=
+golang.org/x/crypto v0.22.0 h1:g1v0xeRhjcugydODzvb3mEM9SQ0HGp9s/nh3COQ/C30=
+golang.org/x/crypto v0.22.0/go.mod h1:vr6Su+7cTlO45qkww3VDJlzDn0ctJvRgYbC2NvXHt+M=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
@@ -343,8 +380,8 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0
golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU=
-golang.org/x/exp v0.0.0-20231006140011-7918f672742d h1:jtJma62tbqLibJ5sFQz8bKtEM8rJBtfilJ2qTU199MI=
-golang.org/x/exp v0.0.0-20231006140011-7918f672742d/go.mod h1:ldy0pHrwJyGW56pPQzzkH36rKxoZW1tw7ZJpeKx+hdo=
+golang.org/x/exp v0.0.0-20240416160154-fe59bbe5cc7f h1:99ci1mjWVBWwJiEKYY6jWa4d2nTQVIEhZIptnrVb1XY=
+golang.org/x/exp v0.0.0-20240416160154-fe59bbe5cc7f/go.mod h1:/lliqkxwWAhPjf5oSOIJup2XcqJaw8RGS6k3TGEc7GI=
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
@@ -368,9 +405,8 @@ golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
-golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
-golang.org/x/mod v0.13.0 h1:I/DsJXRlw/8l/0c24sM9yb0T4z9liZTduXvdAWYiysY=
-golang.org/x/mod v0.13.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
+golang.org/x/mod v0.17.0 h1:zY54UmvipHiNd+pm+m0x9KhZ9hl1/7QNMyxXbc6ICqA=
+golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@@ -404,10 +440,8 @@ golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v
golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
-golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
-golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco=
-golang.org/x/net v0.19.0 h1:zTwKpTd2XuCqf8huc7Fo2iSy+4RHPd10s4KzeTnVr1c=
-golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U=
+golang.org/x/net v0.24.0 h1:1PcaxkF854Fu3+lvBIx5SYn9wRlBzzcnHZSiaFFAb0w=
+golang.org/x/net v0.24.0/go.mod h1:2Q7sJY5mzlzWjKtYUEXSlBWCdyaioyXzRB2RtU8KVE8=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
@@ -417,8 +451,8 @@ golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ
golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
-golang.org/x/oauth2 v0.13.0 h1:jDDenyj+WgFtmV3zYVoi8aE2BwtXFLWOA67ZfNWftiY=
-golang.org/x/oauth2 v0.13.0/go.mod h1:/JMhi4ZRXAf4HG9LiNmxvk+45+96RUlVThiH8FzNBn0=
+golang.org/x/oauth2 v0.19.0 h1:9+E/EZBCbTLNrbN35fHv/a/d/mOBatymz1zbtQrXpIg=
+golang.org/x/oauth2 v0.19.0/go.mod h1:vYi7skDa1x015PmRRYZ7+s1cWyPgrPiSYRe4rnsexc8=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
@@ -429,9 +463,8 @@ golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJ
golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.4.0 h1:zxkM55ReGkDlKSM+Fu41A+zmbZuaPVbGMzvvdUPznYQ=
-golang.org/x/sync v0.4.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
+golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M=
+golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
@@ -467,16 +500,11 @@ golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.16.0 h1:xWw16ngr6ZMtmxDyKyIgsE93KNKz5HKmMa3b8ALHidU=
-golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
+golang.org/x/sys v0.19.0 h1:q5f1RH2jigJ1MoAWp2KTp3gm5zAGFUTarQZ5U386+4o=
+golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
-golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
-golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
@@ -485,13 +513,13 @@ golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
-golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
-golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
+golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk=
+golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
@@ -539,14 +567,14 @@ golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4f
golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0=
-golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
-golang.org/x/tools v0.14.0 h1:jvNa2pY0M4r62jkRQ6RwEZZyPcymeL9XZMLBbV7U2nc=
-golang.org/x/tools v0.14.0/go.mod h1:uYBEerGOWcJyEORxN+Ek8+TT266gXkNlHdJBwexUsBg=
+golang.org/x/tools v0.20.0 h1:hz/CVckiOxybQvFw6h7b/q80NTr9IUQb4s1IIzW7KNY=
+golang.org/x/tools v0.20.0/go.mod h1:WvitBU7JJf6A4jOdg4S1tviW9bhUxkgeCui/0JHctQg=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
-golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 h1:H2TDz8ibqkAF6YGhCdN3jS9O0/s90v0rJh3X/OLHEUk=
+golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 h1:+cNy6SZtPcJQH3LJVLOSmiC7MMxXNOb3PU/VUEz+EhU=
+golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90=
google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M=
google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
@@ -566,8 +594,8 @@ google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz513
google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg=
google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE=
google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8=
-google.golang.org/api v0.149.0 h1:b2CqT6kG+zqJIVKRQ3ELJVLN1PwHZ6DJ3dW8yl82rgY=
-google.golang.org/api v0.149.0/go.mod h1:Mwn1B7JTXrzXtnvmzQE2BD6bYZQ8DShKZDZbeN9I7qI=
+google.golang.org/api v0.174.0 h1:zB1BWl7ocxfTea2aQ9mgdzXjnfPySllpPOskdnO+q34=
+google.golang.org/api v0.174.0/go.mod h1:aC7tB6j0HR1Nl0ni5ghpx6iLasmAX78Zkh/wgxAAjLg=
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
@@ -575,8 +603,6 @@ google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww
google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
-google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM=
-google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds=
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
@@ -613,12 +639,12 @@ google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6D
google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
-google.golang.org/genproto v0.0.0-20231212172506-995d672761c0 h1:YJ5pD9rF8o9Qtta0Cmy9rdBwkSjrTCT6XTiUQVOtIos=
-google.golang.org/genproto v0.0.0-20231212172506-995d672761c0/go.mod h1:l/k7rMz0vFTBPy+tFSGvXEd3z+BcoG1k7EHbqm+YBsY=
-google.golang.org/genproto/googleapis/api v0.0.0-20231211222908-989df2bf70f3 h1:EWIeHfGuUf00zrVZGEgYFxok7plSAXBGcH7NNdMAWvA=
-google.golang.org/genproto/googleapis/api v0.0.0-20231211222908-989df2bf70f3/go.mod h1:k2dtGpRrbsSyKcNPKKI5sstZkrNCZwpU/ns96JoHbGg=
-google.golang.org/genproto/googleapis/rpc v0.0.0-20240102182953-50ed04b92917 h1:6G8oQ016D88m1xAKljMlBOOGWDZkes4kMhgGFlf8WcQ=
-google.golang.org/genproto/googleapis/rpc v0.0.0-20240102182953-50ed04b92917/go.mod h1:xtjpI3tXFPP051KaWnhvxkiubL/6dJ18vLVf7q2pTOU=
+google.golang.org/genproto v0.0.0-20240415180920-8c6c420018be h1:g4aX8SUFA8V5F4LrSY5EclyGYw1OZN4HS1jTyjB9ZDc=
+google.golang.org/genproto v0.0.0-20240415180920-8c6c420018be/go.mod h1:FeSdT5fk+lkxatqJP38MsUicGqHax5cLtmy/6TAuxO4=
+google.golang.org/genproto/googleapis/api v0.0.0-20240415180920-8c6c420018be h1:Zz7rLWqp0ApfsR/l7+zSHhY3PMiH2xqgxlfYfAfNpoU=
+google.golang.org/genproto/googleapis/api v0.0.0-20240415180920-8c6c420018be/go.mod h1:dvdCTIoAGbkWbcIKBniID56/7XHTt6WfxXNMxuziJ+w=
+google.golang.org/genproto/googleapis/rpc v0.0.0-20240415180920-8c6c420018be h1:LG9vZxsWGOmUKieR8wPAUR3u3MpnYFQZROPIMaXh7/A=
+google.golang.org/genproto/googleapis/rpc v0.0.0-20240415180920-8c6c420018be/go.mod h1:WtryC6hu0hhx87FDGxWCDptyssuo68sk10vYjF+T9fY=
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
@@ -635,8 +661,8 @@ google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM
google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc=
google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8=
google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
-google.golang.org/grpc v1.60.1 h1:26+wFr+cNqSGFcOXcabYC0lUVJVRa2Sb2ortSK7VrEU=
-google.golang.org/grpc v1.60.1/go.mod h1:OlCHIeLYqSSsLi6i49B5QGdzaMZK9+M7LXN2FKz4eGM=
+google.golang.org/grpc v1.63.2 h1:MUeiw1B2maTVZthpU5xvASfTh3LDbxHd6IJ6QQVU+xM=
+google.golang.org/grpc v1.63.2/go.mod h1:WAX/8DgncnokcFUldAxq7GeB5DXHDbMF+lLvDomNkRA=
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
@@ -647,21 +673,27 @@ google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2
google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4=
google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c=
-google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
-google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
-google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7I=
-google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
+google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI=
+google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
+gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
+gopkg.in/djherbis/atime.v1 v1.0.0 h1:eMRqB/JrLKocla2PBPKgQYg/p5UG4L6AUAs92aP7F60=
+gopkg.in/djherbis/atime.v1 v1.0.0/go.mod h1:hQIUStKmJfvf7xdh/wtK84qe+DsTV5LnA9lzxxtPpJ8=
+gopkg.in/djherbis/stream.v1 v1.3.1 h1:uGfmsOY1qqMjQQphhRBSGLyA9qumJ56exkRu9ASTjCw=
+gopkg.in/djherbis/stream.v1 v1.3.1/go.mod h1:aEV8CBVRmSpLamVJfM903Npic1IKmb2qS30VAZ+sssg=
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
gopkg.in/gavv/httpexpect.v1 v1.0.0-20170111145843-40724cf1e4a0 h1:r5ptJ1tBxVAeqw4CrYWhXIMr0SybY3CDHuIbCg5CFVw=
+gopkg.in/gavv/httpexpect.v1 v1.0.0-20170111145843-40724cf1e4a0/go.mod h1:WtiW9ZA1LdaWqtQRo1VbIL/v4XZ8NDta+O/kSpGgVek=
gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA=
gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
+gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
+gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
diff --git a/internal/input/all_history.go b/internal/input/all_history.go
index 35fab98f..f38f5596 100644
--- a/internal/input/all_history.go
+++ b/internal/input/all_history.go
@@ -20,11 +20,10 @@ type AllHistoryTransformInput struct {
// GetAllHistory returns a slice of operations, trades, effects, transactions, diagnostic events
// for the ledgers in the provided range (inclusive on both ends)
-func GetAllHistory(start, end uint32, limit int64, env utils.EnvironmentDetails) (AllHistoryTransformInput, error) {
+func GetAllHistory(start, end uint32, limit int64, env utils.EnvironmentDetails, useCaptiveCore bool) (AllHistoryTransformInput, error) {
ctx := context.Background()
- backend, err := env.CreateCaptiveCoreBackend()
-
+ backend, err := utils.CreateLedgerBackend(ctx, useCaptiveCore, env)
if err != nil {
return AllHistoryTransformInput{}, err
}
diff --git a/internal/input/assets.go b/internal/input/assets.go
index 00e83984..0f00708f 100644
--- a/internal/input/assets.go
+++ b/internal/input/assets.go
@@ -3,9 +3,9 @@ package input
import (
"context"
- "github.com/stellar/stellar-etl/internal/transform"
"github.com/stellar/stellar-etl/internal/utils"
+ "github.com/stellar/go/ingest/ledgerbackend"
"github.com/stellar/go/xdr"
)
@@ -17,23 +17,24 @@ type AssetTransformInput struct {
}
// GetPaymentOperations returns a slice of payment operations that can include new assets from the ledgers in the provided range (inclusive on both ends)
-func GetPaymentOperations(start, end uint32, limit int64, isTest bool, isFuture bool) ([]AssetTransformInput, error) {
- env := utils.GetEnvironmentDetails(isTest, isFuture)
- backend, err := utils.CreateBackend(start, end, env.ArchiveURLs)
+func GetPaymentOperations(start, end uint32, limit int64, env utils.EnvironmentDetails, useCaptiveCore bool) ([]AssetTransformInput, error) {
+ ctx := context.Background()
+ backend, err := utils.CreateLedgerBackend(ctx, useCaptiveCore, env)
if err != nil {
return []AssetTransformInput{}, err
}
assetSlice := []AssetTransformInput{}
- ctx := context.Background()
+ err = backend.PrepareRange(ctx, ledgerbackend.BoundedRange(start, end))
+ panicIf(err)
for seq := start; seq <= end; seq++ {
// Get ledger from sequence number
- ledger, err := backend.GetLedgerArchive(ctx, seq)
+ ledger, err := backend.GetLedger(ctx, seq)
if err != nil {
return []AssetTransformInput{}, err
}
- transactionSet := transform.GetTransactionSet(ledger)
+ transactionSet := ledger.TransactionEnvelopes()
for txIndex, transaction := range transactionSet {
for opIndex, op := range transaction.Operations() {
diff --git a/internal/input/assets_history_archive.go b/internal/input/assets_history_archive.go
new file mode 100644
index 00000000..da7d5f83
--- /dev/null
+++ b/internal/input/assets_history_archive.go
@@ -0,0 +1,50 @@
+package input
+
+import (
+ "context"
+
+ "github.com/stellar/stellar-etl/internal/transform"
+ "github.com/stellar/stellar-etl/internal/utils"
+
+ "github.com/stellar/go/xdr"
+)
+
+// GetPaymentOperations returns a slice of payment operations that can include new assets from the ledgers in the provided range (inclusive on both ends)
+func GetPaymentOperationsHistoryArchive(start, end uint32, limit int64, env utils.EnvironmentDetails, useCaptivere bool) ([]AssetTransformInput, error) {
+ backend, err := utils.CreateBackend(start, end, env.ArchiveURLs)
+ if err != nil {
+ return []AssetTransformInput{}, err
+ }
+
+ assetSlice := []AssetTransformInput{}
+ ctx := context.Background()
+ for seq := start; seq <= end; seq++ {
+ // Get ledger from sequence number
+ ledger, err := backend.GetLedgerArchive(ctx, seq)
+ if err != nil {
+ return []AssetTransformInput{}, err
+ }
+
+ transactionSet := transform.GetTransactionSet(ledger)
+
+ for txIndex, transaction := range transactionSet {
+ for opIndex, op := range transaction.Operations() {
+ if op.Body.Type == xdr.OperationTypePayment || op.Body.Type == xdr.OperationTypeManageSellOffer {
+ assetSlice = append(assetSlice, AssetTransformInput{
+ Operation: op,
+ OperationIndex: int32(opIndex),
+ TransactionIndex: int32(txIndex),
+ LedgerSeqNum: int32(seq),
+ })
+ }
+
+ }
+
+ }
+ if int64(len(assetSlice)) >= limit && limit >= 0 {
+ break
+ }
+ }
+
+ return assetSlice, nil
+}
diff --git a/internal/input/bucketlist_entries.go b/internal/input/bucketlist_entries.go
deleted file mode 100644
index 517641d5..00000000
--- a/internal/input/bucketlist_entries.go
+++ /dev/null
@@ -1,63 +0,0 @@
-package input
-
-import (
- "context"
- "io"
-
- "github.com/stellar/go/historyarchive"
- "github.com/stellar/go/ingest"
- "github.com/stellar/go/xdr"
-
- "github.com/stellar/stellar-etl/internal/utils"
-)
-
-// GetEntriesFromGenesis returns a slice of ledger entries of the specified type for the ledgers starting from the genesis ledger and ending at end (inclusive)
-func GetEntriesFromGenesis(end uint32, entryType xdr.LedgerEntryType, archiveURLs []string) ([]ingest.Change, error) {
- archive, err := utils.CreateHistoryArchiveClient(archiveURLs)
- if err != nil {
- return []ingest.Change{}, err
- }
-
- latestNum, err := utils.GetLatestLedgerSequence(archiveURLs)
- if err != nil {
- return []ingest.Change{}, err
- }
-
- if err = utils.ValidateLedgerRange(2, end, latestNum); err != nil {
- return []ingest.Change{}, err
- }
-
- checkpointSeq, err := utils.GetCheckpointNum(end, latestNum)
- if err != nil {
- return []ingest.Change{}, err
- }
-
- return readBucketList(archive, checkpointSeq, entryType)
-}
-
-// readBucketList reads the bucket list for the specified checkpoint sequence number and returns a slice of ledger entries of the specified type
-func readBucketList(archive historyarchive.ArchiveInterface, checkpointSeq uint32, entryType xdr.LedgerEntryType) ([]ingest.Change, error) {
- changeReader, err := ingest.NewCheckpointChangeReader(context.Background(), archive, checkpointSeq)
- defer changeReader.Close()
- if err != nil {
- return []ingest.Change{}, err
- }
-
- entrySlice := []ingest.Change{}
- for {
- change, err := changeReader.Read()
- if err == io.EOF {
- break
- }
-
- if err != nil {
- return []ingest.Change{}, err
- }
-
- if change.Type == entryType {
- entrySlice = append(entrySlice, change)
- }
- }
-
- return entrySlice, nil
-}
diff --git a/internal/input/changes.go b/internal/input/changes.go
index 7dc7dfaa..d3011e29 100644
--- a/internal/input/changes.go
+++ b/internal/input/changes.go
@@ -84,7 +84,7 @@ func PrepareCaptiveCore(execPath string, tomlPath string, start, end uint32, env
// extractBatch gets the changes from the ledgers in the range [batchStart, batchEnd] and compacts them
func extractBatch(
batchStart, batchEnd uint32,
- core *ledgerbackend.CaptiveStellarCore,
+ backend *ledgerbackend.LedgerBackend,
env utils.EnvironmentDetails, logger *utils.EtlLogger) ChangeBatch {
dataTypes := []xdr.LedgerEntryType{
@@ -106,16 +106,11 @@ func extractBatch(
changeCompactors[dt] = ingest.NewChangeCompactor()
}
- latestLedger, err := core.GetLatestLedgerSequence(ctx)
- if err != nil {
- logger.Fatal("unable to get the latest ledger sequence: ", err)
- }
-
// if this ledger is available, we process its changes and move on to the next ledger by incrementing seq.
// Otherwise, nothing is incremented, and we try again on the next iteration of the loop
var header xdr.LedgerHeaderHistoryEntry
- if seq <= latestLedger {
- changeReader, err := ingest.NewLedgerChangeReader(ctx, core, env.NetworkPassphrase, seq)
+ if seq <= batchEnd {
+ changeReader, err := ingest.NewLedgerChangeReader(ctx, *backend, env.NetworkPassphrase, seq)
if err != nil {
logger.Fatal(fmt.Sprintf("unable to create change reader for ledger %d: ", seq), err)
}
@@ -163,14 +158,14 @@ func extractBatch(
// StreamChanges reads in ledgers, processes the changes, and send the changes to the channel matching their type
// Ledgers are processed in batches of size .
-func StreamChanges(core *ledgerbackend.CaptiveStellarCore, start, end, batchSize uint32, changeChannel chan ChangeBatch, closeChan chan int, env utils.EnvironmentDetails, logger *utils.EtlLogger) {
+func StreamChanges(backend *ledgerbackend.LedgerBackend, start, end, batchSize uint32, changeChannel chan ChangeBatch, closeChan chan int, env utils.EnvironmentDetails, logger *utils.EtlLogger) {
batchStart := start
batchEnd := uint32(math.Min(float64(batchStart+batchSize), float64(end)))
for batchStart < batchEnd {
if batchEnd < end {
batchEnd = uint32(batchEnd - 1)
}
- batch := ExtractBatch(batchStart, batchEnd, core, env, logger)
+ batch := ExtractBatch(batchStart, batchEnd, backend, env, logger)
changeChannel <- batch
// batchStart and batchEnd should not overlap
// overlapping batches causes duplicate record loads
diff --git a/internal/input/changes_test.go b/internal/input/changes_test.go
index cabed9ca..f5bce40d 100644
--- a/internal/input/changes_test.go
+++ b/internal/input/changes_test.go
@@ -129,7 +129,7 @@ func wrapLedgerEntry(entryType xdr.LedgerEntryType, entry xdr.LedgerEntry) Chang
func mockExtractBatch(
batchStart, batchEnd uint32,
- core *ledgerbackend.CaptiveStellarCore,
+ backend *ledgerbackend.LedgerBackend,
env utils.EnvironmentDetails, logger *utils.EtlLogger) ChangeBatch {
log.Errorf("mock called")
return ChangeBatch{
diff --git a/internal/input/ledger_range.go b/internal/input/ledger_range.go
index 81eb9630..2f778095 100644
--- a/internal/input/ledger_range.go
+++ b/internal/input/ledger_range.go
@@ -32,7 +32,11 @@ const avgCloseTime = time.Second * 5 // average time to close a stellar ledger
func GetLedgerRange(startTime, endTime time.Time, isTest bool, isFuture bool) (int64, int64, error) {
startTime = startTime.UTC()
endTime = endTime.UTC()
- env := utils.GetEnvironmentDetails(isTest, isFuture)
+ commonFlagValues := utils.CommonFlagValues{
+ IsTest: isTest,
+ IsFuture: isFuture,
+ }
+ env := utils.GetEnvironmentDetails(commonFlagValues)
if startTime.After(endTime) {
return 0, 0, fmt.Errorf("start time must be less than or equal to the end time")
diff --git a/internal/input/ledgers.go b/internal/input/ledgers.go
index 7c3865b9..d70012de 100644
--- a/internal/input/ledgers.go
+++ b/internal/input/ledgers.go
@@ -6,25 +6,74 @@ import (
"github.com/stellar/stellar-etl/internal/utils"
"github.com/stellar/go/historyarchive"
+ "github.com/stellar/go/ingest/ledgerbackend"
+ "github.com/stellar/go/xdr"
)
// GetLedgers returns a slice of ledger close metas for the ledgers in the provided range (inclusive on both ends)
-func GetLedgers(start, end uint32, limit int64, isTest bool, isFuturenet bool) ([]historyarchive.Ledger, error) {
- env := utils.GetEnvironmentDetails(isTest, isFuturenet)
- backend, err := utils.CreateBackend(start, end, env.ArchiveURLs)
+func GetLedgers(start, end uint32, limit int64, env utils.EnvironmentDetails, useCaptiveCore bool) ([]utils.HistoryArchiveLedgerAndLCM, error) {
+ ctx := context.Background()
+ backend, err := utils.CreateLedgerBackend(ctx, useCaptiveCore, env)
if err != nil {
- return []historyarchive.Ledger{}, err
+ return []utils.HistoryArchiveLedgerAndLCM{}, err
}
- ledgerSlice := []historyarchive.Ledger{}
- ctx := context.Background()
+ ledgerSlice := []utils.HistoryArchiveLedgerAndLCM{}
+ err = backend.PrepareRange(ctx, ledgerbackend.BoundedRange(start, end))
+ panicIf(err)
for seq := start; seq <= end; seq++ {
- ledger, err := backend.GetLedgerArchive(ctx, seq)
+ lcm, err := backend.GetLedger(ctx, seq)
if err != nil {
- return []historyarchive.Ledger{}, err
+ return []utils.HistoryArchiveLedgerAndLCM{}, err
+ }
+
+ var ext xdr.TransactionHistoryEntryExt
+ var transactionResultPair []xdr.TransactionResultPair
+
+ switch lcm.V {
+ case 0:
+ ext = xdr.TransactionHistoryEntryExt{
+ V: 0,
+ GeneralizedTxSet: nil,
+ }
+ for _, transactionResultMeta := range lcm.V0.TxProcessing {
+ transactionResultPair = append(transactionResultPair, transactionResultMeta.Result)
+ }
+ case 1:
+ ext = xdr.TransactionHistoryEntryExt{
+ V: 1,
+ GeneralizedTxSet: &lcm.V1.TxSet,
+ }
+ for _, transactionResultMeta := range lcm.V1.TxProcessing {
+ transactionResultPair = append(transactionResultPair, transactionResultMeta.Result)
+ }
+ }
+
+ ledger := historyarchive.Ledger{
+ Header: lcm.LedgerHeaderHistoryEntry(),
+ Transaction: xdr.TransactionHistoryEntry{
+ LedgerSeq: lcm.LedgerHeaderHistoryEntry().Header.LedgerSeq,
+ TxSet: xdr.TransactionSet{
+ PreviousLedgerHash: lcm.LedgerHeaderHistoryEntry().Header.PreviousLedgerHash,
+ Txs: lcm.TransactionEnvelopes(),
+ },
+ Ext: ext,
+ },
+ TransactionResult: xdr.TransactionHistoryResultEntry{
+ LedgerSeq: lcm.LedgerHeaderHistoryEntry().Header.LedgerSeq,
+ TxResultSet: xdr.TransactionResultSet{
+ Results: transactionResultPair,
+ },
+ Ext: xdr.TransactionHistoryResultEntryExt{},
+ },
+ }
+
+ ledgerLCM := utils.HistoryArchiveLedgerAndLCM{
+ Ledger: ledger,
+ LCM: lcm,
}
- ledgerSlice = append(ledgerSlice, ledger)
+ ledgerSlice = append(ledgerSlice, ledgerLCM)
if int64(len(ledgerSlice)) >= limit && limit >= 0 {
break
}
diff --git a/internal/input/ledgers_history_archive.go b/internal/input/ledgers_history_archive.go
new file mode 100644
index 00000000..5b42ba5c
--- /dev/null
+++ b/internal/input/ledgers_history_archive.go
@@ -0,0 +1,35 @@
+package input
+
+import (
+ "context"
+
+ "github.com/stellar/stellar-etl/internal/utils"
+)
+
+// GetLedgers returns a slice of ledger close metas for the ledgers in the provided range (inclusive on both ends)
+func GetLedgersHistoryArchive(start, end uint32, limit int64, env utils.EnvironmentDetails, useCaptiveCore bool) ([]utils.HistoryArchiveLedgerAndLCM, error) {
+ backend, err := utils.CreateBackend(start, end, env.ArchiveURLs)
+ if err != nil {
+ return []utils.HistoryArchiveLedgerAndLCM{}, err
+ }
+
+ ledgerSlice := []utils.HistoryArchiveLedgerAndLCM{}
+ ctx := context.Background()
+ for seq := start; seq <= end; seq++ {
+ ledger, err := backend.GetLedgerArchive(ctx, seq)
+ if err != nil {
+ return []utils.HistoryArchiveLedgerAndLCM{}, err
+ }
+
+ ledgerLCM := utils.HistoryArchiveLedgerAndLCM{
+ Ledger: ledger,
+ }
+
+ ledgerSlice = append(ledgerSlice, ledgerLCM)
+ if int64(len(ledgerSlice)) >= limit && limit >= 0 {
+ break
+ }
+ }
+
+ return ledgerSlice, nil
+}
diff --git a/internal/input/operations.go b/internal/input/operations.go
index e53437ae..0fca21ed 100644
--- a/internal/input/operations.go
+++ b/internal/input/operations.go
@@ -27,11 +27,10 @@ func panicIf(err error) {
}
// GetOperations returns a slice of operations for the ledgers in the provided range (inclusive on both ends)
-func GetOperations(start, end uint32, limit int64, env utils.EnvironmentDetails) ([]OperationTransformInput, error) {
+func GetOperations(start, end uint32, limit int64, env utils.EnvironmentDetails, useCaptiveCore bool) ([]OperationTransformInput, error) {
ctx := context.Background()
- backend, err := env.CreateCaptiveCoreBackend()
-
+ backend, err := utils.CreateLedgerBackend(ctx, useCaptiveCore, env)
if err != nil {
return []OperationTransformInput{}, err
}
@@ -40,15 +39,14 @@ func GetOperations(start, end uint32, limit int64, env utils.EnvironmentDetails)
err = backend.PrepareRange(ctx, ledgerbackend.BoundedRange(start, end))
panicIf(err)
for seq := start; seq <= end; seq++ {
- changeReader, err := ingest.NewLedgerChangeReader(ctx, backend, env.NetworkPassphrase, seq)
+ ledgerCloseMeta, err := backend.GetLedger(ctx, seq)
if err != nil {
- return []OperationTransformInput{}, err
+ return []OperationTransformInput{}, fmt.Errorf("error getting ledger seq %d from the backend: %v", seq, err)
}
- txReader := changeReader.LedgerTransactionReader
- ledgerCloseMeta, err := backend.GetLedger(ctx, seq)
+ txReader, err := ingest.NewLedgerTransactionReaderFromLedgerCloseMeta(env.NetworkPassphrase, ledgerCloseMeta)
if err != nil {
- return nil, fmt.Errorf("error getting ledger seq %d from the backend: %v", seq, err)
+ return []OperationTransformInput{}, err
}
for int64(len(opSlice)) < limit || limit < 0 {
@@ -73,6 +71,7 @@ func GetOperations(start, end uint32, limit int64, env utils.EnvironmentDetails)
}
txReader.Close()
+
if int64(len(opSlice)) >= limit && limit >= 0 {
break
}
diff --git a/internal/input/orderbooks.go b/internal/input/orderbooks.go
index 96ce8ca7..95c1f2bc 100644
--- a/internal/input/orderbooks.go
+++ b/internal/input/orderbooks.go
@@ -59,7 +59,6 @@ func NewOrderbookParser(logger *utils.EtlLogger) OrderbookParser {
}
}
-
func (o *OrderbookParser) parseOrderbook(orderbook []ingest.Change, seq uint32) {
var group sync.WaitGroup
allConverted := make([]transform.NormalizedOfferOutput, len(orderbook))
@@ -119,7 +118,7 @@ func (o *OrderbookParser) parseOrderbook(orderbook []ingest.Change, seq uint32)
}
}
-// GetOfferChanges gets the offer changes that ocurred between the firstSeq ledger and nextSeq ledger
+// GetOfferChanges gets the offer changes that occurred between the firstSeq ledger and nextSeq ledger
func GetOfferChanges(core *ledgerbackend.CaptiveStellarCore, env utils.EnvironmentDetails, firstSeq, nextSeq uint32) (*ingest.ChangeCompactor, error) {
offChanges := ingest.NewChangeCompactor()
ctx := context.Background()
@@ -206,7 +205,6 @@ func UpdateOrderbook(start, end uint32, orderbook []ingest.Change, core *ledgerb
for _, change := range orderbook {
changeCache.AddChange(change)
}
-
orderbook = changeCache.GetChanges()
}
diff --git a/internal/input/trades.go b/internal/input/trades.go
index 3d69a02b..afdede71 100644
--- a/internal/input/trades.go
+++ b/internal/input/trades.go
@@ -10,6 +10,7 @@ import (
"github.com/stellar/go/ingest"
"github.com/stellar/go/ingest/ledgerbackend"
+ "github.com/stellar/go/support/errors"
"github.com/stellar/go/xdr"
)
@@ -22,26 +23,30 @@ type TradeTransformInput struct {
}
// GetTrades returns a slice of trades for the ledgers in the provided range (inclusive on both ends)
-func GetTrades(start, end uint32, limit int64, env utils.EnvironmentDetails) ([]TradeTransformInput, error) {
+func GetTrades(start, end uint32, limit int64, env utils.EnvironmentDetails, useCaptiveCore bool) ([]TradeTransformInput, error) {
ctx := context.Background()
- backend, err := env.CreateCaptiveCoreBackend()
+ backend, err := utils.CreateLedgerBackend(ctx, useCaptiveCore, env)
+ if err != nil {
+ return []TradeTransformInput{}, err
+ }
tradeSlice := []TradeTransformInput{}
err = backend.PrepareRange(ctx, ledgerbackend.BoundedRange(start, end))
panicIf(err)
for seq := start; seq <= end; seq++ {
- changeReader, err := ingest.NewLedgerChangeReader(ctx, backend, env.NetworkPassphrase, seq)
+ ledgerCloseMeta, err := backend.GetLedger(ctx, seq)
if err != nil {
- return []TradeTransformInput{}, err
+ return []TradeTransformInput{}, errors.Wrap(err, "error getting ledger from the backend")
}
- txReader := changeReader.LedgerTransactionReader
- closeTime, err := utils.TimePointToUTCTimeStamp(txReader.GetHeader().Header.ScpValue.CloseTime)
+ txReader, err := ingest.NewLedgerTransactionReaderFromLedgerCloseMeta(env.NetworkPassphrase, ledgerCloseMeta)
if err != nil {
return []TradeTransformInput{}, err
}
+ closeTime, _ := utils.TimePointToUTCTimeStamp(txReader.GetHeader().Header.ScpValue.CloseTime)
+
for int64(len(tradeSlice)) < limit || limit < 0 {
tx, err := txReader.Read()
if err == io.EOF {
diff --git a/internal/input/transactions.go b/internal/input/transactions.go
index 8bc79fcd..b5e4019c 100644
--- a/internal/input/transactions.go
+++ b/internal/input/transactions.go
@@ -20,22 +20,24 @@ type LedgerTransformInput struct {
}
// GetTransactions returns a slice of transactions for the ledgers in the provided range (inclusive on both ends)
-func GetTransactions(start, end uint32, limit int64, env utils.EnvironmentDetails) ([]LedgerTransformInput, error) {
+func GetTransactions(start, end uint32, limit int64, env utils.EnvironmentDetails, useCaptiveCore bool) ([]LedgerTransformInput, error) {
ctx := context.Background()
- backend, err := env.CreateCaptiveCoreBackend()
-
+ backend, err := utils.CreateLedgerBackend(ctx, useCaptiveCore, env)
if err != nil {
return []LedgerTransformInput{}, err
}
txSlice := []LedgerTransformInput{}
err = backend.PrepareRange(ctx, ledgerbackend.BoundedRange(start, end))
+ if err != nil {
+ return []LedgerTransformInput{}, err
+ }
panicIf(err)
for seq := start; seq <= end; seq++ {
ledgerCloseMeta, err := backend.GetLedger(ctx, seq)
if err != nil {
- return nil, errors.Wrap(err, "error getting ledger from the backend")
+ return []LedgerTransformInput{}, errors.Wrap(err, "error getting ledger from the backend")
}
txReader, err := ingest.NewLedgerTransactionReaderFromLedgerCloseMeta(env.NetworkPassphrase, ledgerCloseMeta)
diff --git a/internal/toid/main.go b/internal/toid/main.go
index 9ce468f5..e91f73c7 100644
--- a/internal/toid/main.go
+++ b/internal/toid/main.go
@@ -5,7 +5,6 @@ import (
"fmt"
)
-//
// ID represents the total order of Ledgers, Transactions and
// Operations.
//
@@ -19,36 +18,35 @@ import (
//
// The follow diagram shows this format:
//
-// 0 1 2 3
-// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | Ledger Sequence Number |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | Transaction Application Order | Op Index |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | Ledger Sequence Number |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | Transaction Application Order | Op Index |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
//
// By component:
//
// Ledger Sequence: 32-bits
//
-// A complete ledger sequence number in which the operation was validated.
+// A complete ledger sequence number in which the operation was validated.
//
-// Expressed in network byte order.
+// Expressed in network byte order.
//
// Transaction Application Order: 20-bits
//
-// The order that the transaction was applied within the ledger it was
-// validated. Accommodates up to 1,048,575 transactions in a single ledger.
+// The order that the transaction was applied within the ledger it was
+// validated. Accommodates up to 1,048,575 transactions in a single ledger.
//
-// Expressed in network byte order.
+// Expressed in network byte order.
//
// Operation Index: 12-bits
//
-// The index of the operation within its parent transaction. Accommodates up
-// to 4095 operations per transaction.
-//
-// Expressed in network byte order.
+// The index of the operation within its parent transaction. Accommodates up
+// to 4095 operations per transaction.
//
+// Expressed in network byte order.
//
// Note: API Clients should not be interpreting this value. We will use it
// as an opaque paging token that clients can parrot back to us after having read
@@ -59,7 +57,6 @@ import (
// transaction as well. Given that this ID is only meant for ordering within a
// single type of object, the sharing of ids across object types seems
// acceptable.
-//
type ID struct {
LedgerSequence int32
TransactionOrder int32
diff --git a/internal/toid/main_test.go b/internal/toid/main_test.go
index 70433b1a..cac6305b 100644
--- a/internal/toid/main_test.go
+++ b/internal/toid/main_test.go
@@ -18,7 +18,7 @@ func TestID_ToInt64(t *testing.T) {
expected int64
shouldPanic bool
}{
- // accomodates 12-bits of precision for the operation field
+ // accommodates 12-bits of precision for the operation field
{
id: &ID{0, 0, 1},
expected: 1,
@@ -31,7 +31,7 @@ func TestID_ToInt64(t *testing.T) {
id: &ID{0, 0, 4096},
shouldPanic: true,
},
- // accomodates 20-bits of precision for the transaction field
+ // accommodates 20-bits of precision for the transaction field
{
id: &ID{0, 1, 0},
expected: 4096,
@@ -44,7 +44,7 @@ func TestID_ToInt64(t *testing.T) {
id: &ID{0, 1048576, 0},
shouldPanic: true,
},
- // accomodates 32-bits of precision for the ledger field
+ // accommodates 32-bits of precision for the ledger field
{
id: &ID{1, 0, 0},
expected: 4294967296,
diff --git a/internal/toid/synt_offer_id.go b/internal/toid/synt_offer_id.go
index b0fe4e1b..39777109 100644
--- a/internal/toid/synt_offer_id.go
+++ b/internal/toid/synt_offer_id.go
@@ -22,8 +22,9 @@ const (
// Due to the 2nd bit being used, the largest possible toid is:
// 0011111111111111111111111111111100000000000000000001000000000001
// \ ledger /\ transaction /\ op /
-// = 1073741823
-// with avg. 5 sec close time will reach in ~170 years
+//
+// = 1073741823
+// with avg. 5 sec close time will reach in ~170 years
func EncodeOfferId(id uint64, typ OfferIDType) int64 {
// First ensure the bits we're going to change are 0s
if id&mask != 0 {
diff --git a/internal/transform/account.go b/internal/transform/account.go
index 1a605ac6..84f4c706 100644
--- a/internal/transform/account.go
+++ b/internal/transform/account.go
@@ -18,7 +18,7 @@ func TransformAccount(ledgerChange ingest.Change, header xdr.LedgerHeaderHistory
accountEntry, accountFound := ledgerEntry.Data.GetAccount()
if !accountFound {
- return AccountOutput{}, fmt.Errorf("Could not extract account data from ledger entry; actual type is %s", ledgerEntry.Data.Type)
+ return AccountOutput{}, fmt.Errorf("could not extract account data from ledger entry; actual type is %s", ledgerEntry.Data.Type)
}
outputID, err := accountEntry.AccountId.GetAddress()
@@ -28,7 +28,7 @@ func TransformAccount(ledgerChange ingest.Change, header xdr.LedgerHeaderHistory
outputBalance := accountEntry.Balance
if outputBalance < 0 {
- return AccountOutput{}, fmt.Errorf("Balance is negative (%d) for account: %s", outputBalance, outputID)
+ return AccountOutput{}, fmt.Errorf("balance is negative (%d) for account: %s", outputBalance, outputID)
}
//The V1 struct is the first version of the extender from accountEntry. It contains information on liabilities, and in the future
@@ -39,17 +39,17 @@ func TransformAccount(ledgerChange ingest.Change, header xdr.LedgerHeaderHistory
liabilities := accountExtensionInfo.Liabilities
outputBuyingLiabilities, outputSellingLiabilities = liabilities.Buying, liabilities.Selling
if outputBuyingLiabilities < 0 {
- return AccountOutput{}, fmt.Errorf("The buying liabilities count is negative (%d) for account: %s", outputBuyingLiabilities, outputID)
+ return AccountOutput{}, fmt.Errorf("the buying liabilities count is negative (%d) for account: %s", outputBuyingLiabilities, outputID)
}
if outputSellingLiabilities < 0 {
- return AccountOutput{}, fmt.Errorf("The selling liabilities count is negative (%d) for account: %s", outputSellingLiabilities, outputID)
+ return AccountOutput{}, fmt.Errorf("the selling liabilities count is negative (%d) for account: %s", outputSellingLiabilities, outputID)
}
}
outputSequenceNumber := int64(accountEntry.SeqNum)
if outputSequenceNumber < 0 {
- return AccountOutput{}, fmt.Errorf("Account sequence number is negative (%d) for account: %s", outputSequenceNumber, outputID)
+ return AccountOutput{}, fmt.Errorf("account sequence number is negative (%d) for account: %s", outputSequenceNumber, outputID)
}
outputSequenceLedger := accountEntry.SeqLedger()
outputSequenceTime := accountEntry.SeqTime()
diff --git a/internal/transform/account_test.go b/internal/transform/account_test.go
index c99b66fc..af050c03 100644
--- a/internal/transform/account_test.go
+++ b/internal/transform/account_test.go
@@ -38,7 +38,7 @@ func TestTransformAccount(t *testing.T) {
},
},
},
- AccountOutput{}, fmt.Errorf("Could not extract account data from ledger entry; actual type is LedgerEntryTypeOffer"),
+ AccountOutput{}, fmt.Errorf("could not extract account data from ledger entry; actual type is LedgerEntryTypeOffer"),
},
{
inputStruct{wrapAccountEntry(xdr.AccountEntry{
@@ -46,7 +46,7 @@ func TestTransformAccount(t *testing.T) {
Balance: -1,
}, 0),
},
- AccountOutput{}, fmt.Errorf("Balance is negative (-1) for account: %s", genericAccountAddress),
+ AccountOutput{}, fmt.Errorf("balance is negative (-1) for account: %s", genericAccountAddress),
},
{
inputStruct{wrapAccountEntry(xdr.AccountEntry{
@@ -61,7 +61,7 @@ func TestTransformAccount(t *testing.T) {
},
}, 0),
},
- AccountOutput{}, fmt.Errorf("The buying liabilities count is negative (-1) for account: %s", genericAccountAddress),
+ AccountOutput{}, fmt.Errorf("the buying liabilities count is negative (-1) for account: %s", genericAccountAddress),
},
{
inputStruct{wrapAccountEntry(xdr.AccountEntry{
@@ -76,7 +76,7 @@ func TestTransformAccount(t *testing.T) {
},
}, 0),
},
- AccountOutput{}, fmt.Errorf("The selling liabilities count is negative (-2) for account: %s", genericAccountAddress),
+ AccountOutput{}, fmt.Errorf("the selling liabilities count is negative (-2) for account: %s", genericAccountAddress),
},
{
inputStruct{wrapAccountEntry(xdr.AccountEntry{
@@ -84,7 +84,7 @@ func TestTransformAccount(t *testing.T) {
SeqNum: -3,
}, 0),
},
- AccountOutput{}, fmt.Errorf("Account sequence number is negative (-3) for account: %s", genericAccountAddress),
+ AccountOutput{}, fmt.Errorf("account sequence number is negative (-3) for account: %s", genericAccountAddress),
},
{
inputStruct{
diff --git a/internal/transform/asset_test.go b/internal/transform/asset_test.go
index 6d57687e..f2f4170c 100644
--- a/internal/transform/asset_test.go
+++ b/internal/transform/asset_test.go
@@ -68,7 +68,7 @@ func makeAssetTestInput() (inputTransaction ingest.LedgerTransaction, err error)
inputEnvelope.Tx.SourceAccount = testAccount1
inputOperations := []xdr.Operation{
- xdr.Operation{
+ {
SourceAccount: nil,
Body: xdr.OperationBody{
Type: xdr.OperationTypePayment,
@@ -79,7 +79,7 @@ func makeAssetTestInput() (inputTransaction ingest.LedgerTransaction, err error)
},
},
},
- xdr.Operation{
+ {
SourceAccount: nil,
Body: xdr.OperationBody{
Type: xdr.OperationTypePayment,
@@ -99,13 +99,13 @@ func makeAssetTestInput() (inputTransaction ingest.LedgerTransaction, err error)
func makeAssetTestOutput() (transformedAssets []AssetOutput) {
transformedAssets = []AssetOutput{
- AssetOutput{
+ {
AssetCode: "USDT",
AssetIssuer: "GBVVRXLMNCJQW3IDDXC3X6XCH35B5Q7QXNMMFPENSOGUPQO7WO7HGZPA",
AssetType: "credit_alphanum4",
AssetID: -8205667356306085451,
},
- AssetOutput{
+ {
AssetCode: "",
AssetIssuer: "",
AssetType: "native",
diff --git a/internal/transform/claimable_balance.go b/internal/transform/claimable_balance.go
index 2e2d954a..fb76b1d2 100644
--- a/internal/transform/claimable_balance.go
+++ b/internal/transform/claimable_balance.go
@@ -33,7 +33,7 @@ func TransformClaimableBalance(ledgerChange ingest.Change, header xdr.LedgerHead
}
balanceID, err := xdr.MarshalHex(balanceEntry.BalanceId)
if err != nil {
- return ClaimableBalanceOutput{}, fmt.Errorf("Invalid balanceId in op: %d", uint32(ledgerEntry.LastModifiedLedgerSeq))
+ return ClaimableBalanceOutput{}, fmt.Errorf("invalid balanceId in op: %d", uint32(ledgerEntry.LastModifiedLedgerSeq))
}
outputFlags := uint32(balanceEntry.Flags())
outputAsset, err := transformSingleAsset(balanceEntry.Asset)
diff --git a/internal/transform/config_setting.go b/internal/transform/config_setting.go
index c98d17c1..f110b2ab 100644
--- a/internal/transform/config_setting.go
+++ b/internal/transform/config_setting.go
@@ -18,7 +18,7 @@ func TransformConfigSetting(ledgerChange ingest.Change, header xdr.LedgerHeaderH
configSetting, ok := ledgerEntry.Data.GetConfigSetting()
if !ok {
- return ConfigSettingOutput{}, fmt.Errorf("Could not extract config setting from ledger entry; actual type is %s", ledgerEntry.Data.Type)
+ return ConfigSettingOutput{}, fmt.Errorf("could not extract config setting from ledger entry; actual type is %s", ledgerEntry.Data.Type)
}
configSettingId := configSetting.ConfigSettingId
@@ -48,7 +48,7 @@ func TransformConfigSetting(ledgerChange ingest.Change, header xdr.LedgerHeaderH
writeFee1KbBucketListHigh := contractLedgerCost.WriteFee1KbBucketListHigh
bucketListWriteFeeGrowthFactor := contractLedgerCost.BucketListWriteFeeGrowthFactor
- contractHistoricalData, ok := configSetting.GetContractHistoricalData()
+ contractHistoricalData, _ := configSetting.GetContractHistoricalData()
feeHistorical1Kb := contractHistoricalData.FeeHistorical1Kb
contractMetaData, _ := configSetting.GetContractEvents()
@@ -66,9 +66,9 @@ func TransformConfigSetting(ledgerChange ingest.Change, header xdr.LedgerHeaderH
paramsMemBytes, _ := configSetting.GetContractCostParamsMemBytes()
contractCostParamsMemBytes := serializeParams(paramsMemBytes)
- contractDataKeySizeBytes, ok := configSetting.GetContractDataKeySizeBytes()
+ contractDataKeySizeBytes, _ := configSetting.GetContractDataKeySizeBytes()
- contractDataEntrySizeBytes, ok := configSetting.GetContractDataEntrySizeBytes()
+ contractDataEntrySizeBytes, _ := configSetting.GetContractDataEntrySizeBytes()
stateArchivalSettings, _ := configSetting.GetStateArchivalSettings()
maxEntryTtl := stateArchivalSettings.MaxEntryTtl
diff --git a/internal/transform/config_setting_test.go b/internal/transform/config_setting_test.go
index b780c3fe..59163e88 100644
--- a/internal/transform/config_setting_test.go
+++ b/internal/transform/config_setting_test.go
@@ -31,7 +31,7 @@ func TestTransformConfigSetting(t *testing.T) {
},
},
},
- ConfigSettingOutput{}, fmt.Errorf("Could not extract config setting from ledger entry; actual type is LedgerEntryTypeOffer"),
+ ConfigSettingOutput{}, fmt.Errorf("could not extract config setting from ledger entry; actual type is LedgerEntryTypeOffer"),
},
}
@@ -82,8 +82,8 @@ func makeConfigSettingTestInput() []ingest.Change {
}
func makeConfigSettingTestOutput() []ConfigSettingOutput {
- contractMapType := make([]map[string]string, 0, 0)
- bucket := make([]uint64, 0, 0)
+ contractMapType := make([]map[string]string, 0)
+ bucket := make([]uint64, 0)
return []ConfigSettingOutput{
{
diff --git a/internal/transform/contract_code.go b/internal/transform/contract_code.go
index 8bfcf574..b026d11d 100644
--- a/internal/transform/contract_code.go
+++ b/internal/transform/contract_code.go
@@ -17,7 +17,7 @@ func TransformContractCode(ledgerChange ingest.Change, header xdr.LedgerHeaderHi
contractCode, ok := ledgerEntry.Data.GetContractCode()
if !ok {
- return ContractCodeOutput{}, fmt.Errorf("Could not extract contract code from ledger entry; actual type is %s", ledgerEntry.Data.Type)
+ return ContractCodeOutput{}, fmt.Errorf("could not extract contract code from ledger entry; actual type is %s", ledgerEntry.Data.Type)
}
// LedgerEntryChange must contain a contract code change to be parsed, otherwise skip
@@ -38,6 +38,31 @@ func TransformContractCode(ledgerChange ingest.Change, header xdr.LedgerHeaderHi
ledgerSequence := header.Header.LedgerSeq
+ var outputNInstructions uint32
+ var outputNFunctions uint32
+ var outputNGlobals uint32
+ var outputNTableEntries uint32
+ var outputNTypes uint32
+ var outputNDataSegments uint32
+ var outputNElemSegments uint32
+ var outputNImports uint32
+ var outputNExports uint32
+ var outputNDataSegmentBytes uint32
+
+ extV1, ok := contractCode.Ext.GetV1()
+ if ok {
+ outputNInstructions = uint32(extV1.CostInputs.NInstructions)
+ outputNFunctions = uint32(extV1.CostInputs.NFunctions)
+ outputNGlobals = uint32(extV1.CostInputs.NGlobals)
+ outputNTableEntries = uint32(extV1.CostInputs.NTableEntries)
+ outputNTypes = uint32(extV1.CostInputs.NTypes)
+ outputNDataSegments = uint32(extV1.CostInputs.NDataSegments)
+ outputNElemSegments = uint32(extV1.CostInputs.NElemSegments)
+ outputNImports = uint32(extV1.CostInputs.NImports)
+ outputNExports = uint32(extV1.CostInputs.NExports)
+ outputNDataSegmentBytes = uint32(extV1.CostInputs.NDataSegmentBytes)
+ }
+
transformedCode := ContractCodeOutput{
ContractCodeHash: contractCodeHash,
ContractCodeExtV: int32(contractCodeExtV),
@@ -47,6 +72,16 @@ func TransformContractCode(ledgerChange ingest.Change, header xdr.LedgerHeaderHi
ClosedAt: closedAt,
LedgerSequence: uint32(ledgerSequence),
LedgerKeyHash: ledgerKeyHash,
+ NInstructions: outputNInstructions,
+ NFunctions: outputNFunctions,
+ NGlobals: outputNGlobals,
+ NTableEntries: outputNTableEntries,
+ NTypes: outputNTypes,
+ NDataSegments: outputNDataSegments,
+ NElemSegments: outputNElemSegments,
+ NImports: outputNImports,
+ NExports: outputNExports,
+ NDataSegmentBytes: outputNDataSegmentBytes,
}
return transformedCode, nil
}
diff --git a/internal/transform/contract_code_test.go b/internal/transform/contract_code_test.go
index 812bd446..c15467a6 100644
--- a/internal/transform/contract_code_test.go
+++ b/internal/transform/contract_code_test.go
@@ -31,7 +31,7 @@ func TestTransformContractCode(t *testing.T) {
},
},
},
- ContractCodeOutput{}, fmt.Errorf("Could not extract contract code from ledger entry; actual type is LedgerEntryTypeOffer"),
+ ContractCodeOutput{}, fmt.Errorf("could not extract contract code from ledger entry; actual type is LedgerEntryTypeOffer"),
},
}
@@ -67,8 +67,22 @@ func makeContractCodeTestInput() []ingest.Change {
Type: xdr.LedgerEntryTypeContractCode,
ContractCode: &xdr.ContractCodeEntry{
Hash: hash,
- Ext: xdr.ExtensionPoint{
+ Ext: xdr.ContractCodeEntryExt{
V: 1,
+ V1: &xdr.ContractCodeEntryV1{
+ CostInputs: xdr.ContractCodeCostInputs{
+ NInstructions: 1,
+ NFunctions: 2,
+ NGlobals: 3,
+ NTableEntries: 4,
+ NTypes: 5,
+ NDataSegments: 6,
+ NElemSegments: 7,
+ NImports: 8,
+ NExports: 9,
+ NDataSegmentBytes: 10,
+ },
+ },
},
},
},
@@ -94,6 +108,16 @@ func makeContractCodeTestOutput() []ContractCodeOutput {
LedgerSequence: 10,
ClosedAt: time.Date(1970, time.January, 1, 0, 16, 40, 0, time.UTC),
LedgerKeyHash: "dfed061dbe464e0ff320744fcd604ac08b39daa74fa24110936654cbcb915ccc",
+ NInstructions: 1,
+ NFunctions: 2,
+ NGlobals: 3,
+ NTableEntries: 4,
+ NTypes: 5,
+ NDataSegments: 6,
+ NElemSegments: 7,
+ NImports: 8,
+ NExports: 9,
+ NDataSegmentBytes: 10,
},
}
}
diff --git a/internal/transform/contract_data.go b/internal/transform/contract_data.go
index 30fec49b..614c8655 100644
--- a/internal/transform/contract_data.go
+++ b/internal/transform/contract_data.go
@@ -10,27 +10,13 @@ import (
"github.com/stellar/stellar-etl/internal/utils"
)
-const (
- scDecimalPrecision = 7
-)
-
var (
- // https://github.com/stellar/rs-soroban-env/blob/v0.0.16/soroban-env-host/src/native_contract/token/public_types.rs#L22
- nativeAssetSym = xdr.ScSymbol("Native")
// these are storage DataKey enum
// https://github.com/stellar/rs-soroban-env/blob/v0.0.16/soroban-env-host/src/native_contract/token/storage_types.rs#L23
balanceMetadataSym = xdr.ScSymbol("Balance")
- metadataSym = xdr.ScSymbol("METADATA")
- metadataNameSym = xdr.ScSymbol("name")
- metadataSymbolSym = xdr.ScSymbol("symbol")
- adminSym = xdr.ScSymbol("Admin")
issuerSym = xdr.ScSymbol("issuer")
assetCodeSym = xdr.ScSymbol("asset_code")
- alphaNum4Sym = xdr.ScSymbol("AlphaNum4")
- alphaNum12Sym = xdr.ScSymbol("AlphaNum12")
- decimalSym = xdr.ScSymbol("decimal")
assetInfoSym = xdr.ScSymbol("AssetInfo")
- decimalVal = xdr.Uint32(scDecimalPrecision)
assetInfoVec = &xdr.ScVec{
xdr.ScVal{
Type: xdr.ScValTypeScvSymbol,
@@ -67,7 +53,7 @@ func (t *TransformContractDataStruct) TransformContractData(ledgerChange ingest.
contractData, ok := ledgerEntry.Data.GetContractData()
if !ok {
- return ContractDataOutput{}, fmt.Errorf("Could not extract contract data from ledger entry; actual type is %s", ledgerEntry.Data.Type), false
+ return ContractDataOutput{}, fmt.Errorf("could not extract contract data from ledger entry; actual type is %s", ledgerEntry.Data.Type), false
}
if contractData.Key.Type.String() == "ScValTypeScvLedgerKeyNonce" {
@@ -100,7 +86,7 @@ func (t *TransformContractDataStruct) TransformContractData(ledgerChange ingest.
contractDataContractId, ok := contractData.Contract.GetContractId()
if !ok {
- return ContractDataOutput{}, fmt.Errorf("Could not extract contractId data information from contractData"), false
+ return ContractDataOutput{}, fmt.Errorf("could not extract contractId data information from contractData"), false
}
contractDataKeyType := contractData.Key.Type.String()
diff --git a/internal/transform/contract_data_test.go b/internal/transform/contract_data_test.go
index d09ab588..2777fae9 100644
--- a/internal/transform/contract_data_test.go
+++ b/internal/transform/contract_data_test.go
@@ -34,7 +34,7 @@ func TestTransformContractData(t *testing.T) {
},
},
"unit test",
- ContractDataOutput{}, fmt.Errorf("Could not extract contract data from ledger entry; actual type is LedgerEntryTypeOffer"),
+ ContractDataOutput{}, fmt.Errorf("could not extract contract data from ledger entry; actual type is LedgerEntryTypeOffer"),
},
}
diff --git a/internal/transform/contract_events.go b/internal/transform/contract_events.go
new file mode 100644
index 00000000..f9ce1a8a
--- /dev/null
+++ b/internal/transform/contract_events.go
@@ -0,0 +1,153 @@
+package transform
+
+import (
+ "encoding/base64"
+ "fmt"
+
+ "github.com/stellar/stellar-etl/internal/toid"
+ "github.com/stellar/stellar-etl/internal/utils"
+
+ "github.com/stellar/go/ingest"
+ "github.com/stellar/go/strkey"
+ "github.com/stellar/go/xdr"
+)
+
+// TransformContractEvent converts a transaction's contract events and diagnostic events into a form suitable for BigQuery.
+// It is known that contract events are a subset of the diagnostic events XDR definition. We are opting to call all of these events
+// contract events for better clarity to data analytics users.
+func TransformContractEvent(transaction ingest.LedgerTransaction, lhe xdr.LedgerHeaderHistoryEntry) ([]ContractEventOutput, error) {
+ ledgerHeader := lhe.Header
+ outputTransactionHash := utils.HashToHexString(transaction.Result.TransactionHash)
+ outputLedgerSequence := uint32(ledgerHeader.LedgerSeq)
+
+ transactionIndex := uint32(transaction.Index)
+
+ outputTransactionID := toid.New(int32(outputLedgerSequence), int32(transactionIndex), 0).ToInt64()
+
+ outputCloseTime, err := utils.TimePointToUTCTimeStamp(ledgerHeader.ScpValue.CloseTime)
+ if err != nil {
+ return []ContractEventOutput{}, fmt.Errorf("for ledger %d; transaction %d (transaction id=%d): %v", outputLedgerSequence, transactionIndex, outputTransactionID, err)
+ }
+
+ // GetDiagnosticEvents will return all contract events and diagnostic events emitted
+ contractEvents, err := transaction.GetDiagnosticEvents()
+ if err != nil {
+ return []ContractEventOutput{}, err
+ }
+
+ var transformedContractEvents []ContractEventOutput
+
+ for _, contractEvent := range contractEvents {
+ var outputContractId string
+ outputTopicsJson := make(map[string][]map[string]string, 1)
+ outputTopicsDecodedJson := make(map[string][]map[string]string, 1)
+
+ outputInSuccessfulContractCall := contractEvent.InSuccessfulContractCall
+ event := contractEvent.Event
+ outputType := event.Type
+ outputTypeString := event.Type.String()
+
+ eventTopics := getEventTopics(event.Body)
+ outputTopics, outputTopicsDecoded := serializeScValArray(eventTopics)
+ outputTopicsJson["topics"] = outputTopics
+ outputTopicsDecodedJson["topics_decoded"] = outputTopicsDecoded
+
+ eventData := getEventData(event.Body)
+ outputData, outputDataDecoded := serializeScVal(eventData)
+
+ // Convert the xdrContactId to string
+ // TODO: https://stellarorg.atlassian.net/browse/HUBBLE-386 this should be a stellar/go/xdr function
+ if event.ContractId != nil {
+ contractId := *event.ContractId
+ contractIdByte, _ := contractId.MarshalBinary()
+ outputContractId, _ = strkey.Encode(strkey.VersionByteContract, contractIdByte)
+ }
+
+ outputContractEventXDR, err := xdr.MarshalBase64(contractEvent)
+ if err != nil {
+ return []ContractEventOutput{}, err
+ }
+
+ outputTransactionID := toid.New(int32(outputLedgerSequence), int32(transactionIndex), 0).ToInt64()
+ outputSuccessful := transaction.Result.Successful()
+
+ transformedDiagnosticEvent := ContractEventOutput{
+ TransactionHash: outputTransactionHash,
+ TransactionID: outputTransactionID,
+ Successful: outputSuccessful,
+ LedgerSequence: outputLedgerSequence,
+ ClosedAt: outputCloseTime,
+ InSuccessfulContractCall: outputInSuccessfulContractCall,
+ ContractId: outputContractId,
+ Type: int32(outputType),
+ TypeString: outputTypeString,
+ Topics: outputTopicsJson,
+ TopicsDecoded: outputTopicsDecodedJson,
+ Data: outputData,
+ DataDecoded: outputDataDecoded,
+ ContractEventXDR: outputContractEventXDR,
+ }
+
+ transformedContractEvents = append(transformedContractEvents, transformedDiagnosticEvent)
+ }
+
+ return transformedContractEvents, nil
+}
+
+// TODO this should be a stellar/go/xdr function
+func getEventTopics(eventBody xdr.ContractEventBody) []xdr.ScVal {
+ switch eventBody.V {
+ case 0:
+ contractEventV0 := eventBody.MustV0()
+ return contractEventV0.Topics
+ default:
+ panic("unsupported event body version: " + string(eventBody.V))
+ }
+}
+
+// TODO this should be a stellar/go/xdr function
+func getEventData(eventBody xdr.ContractEventBody) xdr.ScVal {
+ switch eventBody.V {
+ case 0:
+ contractEventV0 := eventBody.MustV0()
+ return contractEventV0.Data
+ default:
+ panic("unsupported event body version: " + string(eventBody.V))
+ }
+}
+
+// TODO this should also be used in the operations processor
+func serializeScVal(scVal xdr.ScVal) (map[string]string, map[string]string) {
+ serializedData := map[string]string{}
+ serializedData["value"] = "n/a"
+ serializedData["type"] = "n/a"
+
+ serializedDataDecoded := map[string]string{}
+ serializedDataDecoded["value"] = "n/a"
+ serializedDataDecoded["type"] = "n/a"
+
+ if scValTypeName, ok := scVal.ArmForSwitch(int32(scVal.Type)); ok {
+ serializedData["type"] = scValTypeName
+ serializedDataDecoded["type"] = scValTypeName
+ if raw, err := scVal.MarshalBinary(); err == nil {
+ serializedData["value"] = base64.StdEncoding.EncodeToString(raw)
+ serializedDataDecoded["value"] = scVal.String()
+ }
+ }
+
+ return serializedData, serializedDataDecoded
+}
+
+// TODO this should also be used in the operations processor
+func serializeScValArray(scVals []xdr.ScVal) ([]map[string]string, []map[string]string) {
+ data := make([]map[string]string, 0, len(scVals))
+ dataDecoded := make([]map[string]string, 0, len(scVals))
+
+ for _, scVal := range scVals {
+ serializedData, serializedDataDecoded := serializeScVal(scVal)
+ data = append(data, serializedData)
+ dataDecoded = append(dataDecoded, serializedDataDecoded)
+ }
+
+ return data, dataDecoded
+}
diff --git a/internal/transform/diagnostic_events_test.go b/internal/transform/contract_events_test.go
similarity index 73%
rename from internal/transform/diagnostic_events_test.go
rename to internal/transform/contract_events_test.go
index 39a71de1..15e75703 100644
--- a/internal/transform/diagnostic_events_test.go
+++ b/internal/transform/contract_events_test.go
@@ -10,20 +10,20 @@ import (
"github.com/stellar/go/xdr"
)
-func TestTransformDiagnosticEvent(t *testing.T) {
+func TestTransformContractEvent(t *testing.T) {
type inputStruct struct {
transaction ingest.LedgerTransaction
historyHeader xdr.LedgerHeaderHistoryEntry
}
type transformTest struct {
input inputStruct
- wantOutput []DiagnosticEventOutput
+ wantOutput []ContractEventOutput
wantErr error
}
- hardCodedTransaction, hardCodedLedgerHeader, err := makeDiagnosticEventTestInput()
+ hardCodedTransaction, hardCodedLedgerHeader, err := makeContractEventTestInput()
assert.NoError(t, err)
- hardCodedOutput, err := makeDiagnosticEventTestOutput()
+ hardCodedOutput, err := makeContractEventTestOutput()
assert.NoError(t, err)
tests := []transformTest{}
@@ -37,30 +37,61 @@ func TestTransformDiagnosticEvent(t *testing.T) {
}
for _, test := range tests {
- actualOutput, actualError, _ := TransformDiagnosticEvent(test.input.transaction, test.input.historyHeader)
+ actualOutput, actualError := TransformContractEvent(test.input.transaction, test.input.historyHeader)
assert.Equal(t, test.wantErr, actualError)
assert.Equal(t, test.wantOutput, actualOutput)
}
}
-func makeDiagnosticEventTestOutput() (output [][]DiagnosticEventOutput, err error) {
- output = [][]DiagnosticEventOutput{{
- DiagnosticEventOutput{
+func makeContractEventTestOutput() (output [][]ContractEventOutput, err error) {
+
+ topics := make(map[string][]map[string]string, 1)
+ topics["topics"] = []map[string]string{
+ {
+ "type": "B",
+ "value": "AAAAAAAAAAE=",
+ },
+ }
+
+ topicsDecoded := make(map[string][]map[string]string, 1)
+ topicsDecoded["topics_decoded"] = []map[string]string{
+ {
+ "type": "B",
+ "value": "true",
+ },
+ }
+
+ data := map[string]string{
+ "type": "B",
+ "value": "AAAAAAAAAAE=",
+ }
+
+ dataDecoded := map[string]string{
+ "type": "B",
+ "value": "true",
+ }
+
+ output = [][]ContractEventOutput{{
+ ContractEventOutput{
TransactionHash: "a87fef5eeb260269c380f2de456aad72b59bb315aaac777860456e09dac0bafb",
- LedgerSequence: 30521816,
TransactionID: 131090201534533632,
+ Successful: false,
+ LedgerSequence: 30521816,
ClosedAt: time.Date(2020, time.July, 9, 5, 28, 42, 0, time.UTC),
InSuccessfulContractCall: true,
- ExtV: 0,
ContractId: "CAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABSC4",
- Type: "ContractEventTypeDiagnostic",
- BodyV: 0,
- Body: "AAAAAQAAAAAAAAABAAAAAAAAAAE=",
+ Type: 2,
+ TypeString: "ContractEventTypeDiagnostic",
+ Topics: topics,
+ TopicsDecoded: topicsDecoded,
+ Data: data,
+ DataDecoded: dataDecoded,
+ ContractEventXDR: "AAAAAQAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACAAAAAAAAAAEAAAAAAAAAAQAAAAAAAAAB",
},
}}
return
}
-func makeDiagnosticEventTestInput() (transaction []ingest.LedgerTransaction, historyHeader []xdr.LedgerHeaderHistoryEntry, err error) {
+func makeContractEventTestInput() (transaction []ingest.LedgerTransaction, historyHeader []xdr.LedgerHeaderHistoryEntry, err error) {
hardCodedMemoText := "HL5aCgozQHIW7sSc5XdcfmR"
hardCodedTransactionHash := xdr.Hash([32]byte{0xa8, 0x7f, 0xef, 0x5e, 0xeb, 0x26, 0x2, 0x69, 0xc3, 0x80, 0xf2, 0xde, 0x45, 0x6a, 0xad, 0x72, 0xb5, 0x9b, 0xb3, 0x15, 0xaa, 0xac, 0x77, 0x78, 0x60, 0x45, 0x6e, 0x9, 0xda, 0xc0, 0xba, 0xfb})
var hardCodedContractId xdr.Hash
@@ -98,7 +129,7 @@ func makeDiagnosticEventTestInput() (transaction []ingest.LedgerTransaction, his
}
genericResultResults := &[]xdr.OperationResult{
- xdr.OperationResult{
+ {
Tr: &xdr.OperationResultTr{
Type: xdr.OperationTypeCreateAccount,
CreateAccountResult: &xdr.CreateAccountResult{
@@ -118,7 +149,7 @@ func makeDiagnosticEventTestInput() (transaction []ingest.LedgerTransaction, his
}
transaction = []ingest.LedgerTransaction{
- ingest.LedgerTransaction{
+ {
Index: 1,
UnsafeMeta: hardCodedMeta,
Envelope: xdr.TransactionEnvelope{
@@ -140,7 +171,7 @@ func makeDiagnosticEventTestInput() (transaction []ingest.LedgerTransaction, his
},
},
Operations: []xdr.Operation{
- xdr.Operation{
+ {
SourceAccount: &testAccount2,
Body: xdr.OperationBody{
Type: xdr.OperationTypePathPaymentStrictReceive,
@@ -166,7 +197,7 @@ func makeDiagnosticEventTestInput() (transaction []ingest.LedgerTransaction, his
},
}
historyHeader = []xdr.LedgerHeaderHistoryEntry{
- xdr.LedgerHeaderHistoryEntry{
+ {
Header: xdr.LedgerHeader{
LedgerSeq: 30521816,
ScpValue: xdr.StellarValue{CloseTime: 1594272522},
diff --git a/internal/transform/diagnostic_events.go b/internal/transform/diagnostic_events.go
deleted file mode 100644
index 46161c07..00000000
--- a/internal/transform/diagnostic_events.go
+++ /dev/null
@@ -1,81 +0,0 @@
-package transform
-
-import (
- "fmt"
-
- "github.com/stellar/stellar-etl/internal/toid"
- "github.com/stellar/stellar-etl/internal/utils"
-
- "github.com/stellar/go/ingest"
- "github.com/stellar/go/strkey"
- "github.com/stellar/go/xdr"
-)
-
-// TransformDiagnosticEvent converts a transaction's diagnostic events from the history archive ingestion system into a form suitable for BigQuery
-func TransformDiagnosticEvent(transaction ingest.LedgerTransaction, lhe xdr.LedgerHeaderHistoryEntry) ([]DiagnosticEventOutput, error, bool) {
- ledgerHeader := lhe.Header
- outputTransactionHash := utils.HashToHexString(transaction.Result.TransactionHash)
- outputLedgerSequence := uint32(ledgerHeader.LedgerSeq)
-
- transactionIndex := uint32(transaction.Index)
-
- outputTransactionID := toid.New(int32(outputLedgerSequence), int32(transactionIndex), 0).ToInt64()
-
- outputCloseTime, err := utils.TimePointToUTCTimeStamp(ledgerHeader.ScpValue.CloseTime)
- if err != nil {
- return []DiagnosticEventOutput{}, fmt.Errorf("for ledger %d; transaction %d (transaction id=%d): %v", outputLedgerSequence, transactionIndex, outputTransactionID, err), false
- }
-
- transactionMeta, ok := transaction.UnsafeMeta.GetV3()
- if !ok {
- return []DiagnosticEventOutput{}, nil, false
- }
-
- if transactionMeta.SorobanMeta == nil {
- return []DiagnosticEventOutput{}, nil, false
- }
-
- var transformedDiagnosticEvents []DiagnosticEventOutput
-
- for _, diagnoticEvent := range transactionMeta.SorobanMeta.DiagnosticEvents {
- var outputContractId string
-
- outputInSuccessfulContractCall := diagnoticEvent.InSuccessfulContractCall
- event := diagnoticEvent.Event
- outputExtV := event.Ext.V
- outputType := event.Type.String()
- outputBodyV := event.Body.V
- body, ok := event.Body.GetV0()
- if !ok {
- continue
- }
-
- outputBody, err := xdr.MarshalBase64(body)
- if err != nil {
- continue
- }
-
- if event.ContractId != nil {
- contractId := *event.ContractId
- contractIdByte, _ := contractId.MarshalBinary()
- outputContractId, _ = strkey.Encode(strkey.VersionByteContract, contractIdByte)
- }
-
- transformedDiagnosticEvent := DiagnosticEventOutput{
- TransactionHash: outputTransactionHash,
- LedgerSequence: outputLedgerSequence,
- TransactionID: outputTransactionID,
- ClosedAt: outputCloseTime,
- InSuccessfulContractCall: outputInSuccessfulContractCall,
- ExtV: outputExtV,
- ContractId: outputContractId,
- Type: outputType,
- BodyV: outputBodyV,
- Body: outputBody,
- }
-
- transformedDiagnosticEvents = append(transformedDiagnosticEvents, transformedDiagnosticEvent)
- }
-
- return transformedDiagnosticEvents, nil, true
-}
diff --git a/internal/transform/effects.go b/internal/transform/effects.go
index f1867347..711ed60e 100644
--- a/internal/transform/effects.go
+++ b/internal/transform/effects.go
@@ -131,7 +131,7 @@ func (operation *transactionOperationWrapper) effects() ([]EffectOutput, error)
case xdr.OperationTypeRestoreFootprint:
err = wrapper.addRestoreFootprintExpirationEffect()
default:
- return nil, fmt.Errorf("Unknown operation type: %s", op.Body.Type)
+ return nil, fmt.Errorf("unknown operation type: %s", op.Body.Type)
}
if err != nil {
return nil, err
@@ -905,7 +905,7 @@ func (e *effectsWrapper) addClaimClaimableBalanceEffects(changes []ingest.Change
balanceID, err := xdr.MarshalHex(op.BalanceId)
if err != nil {
- return fmt.Errorf("Invalid balanceId in op: %d", e.operation.index)
+ return fmt.Errorf("invalid balanceId in op: %d", e.operation.index)
}
var cBalance xdr.ClaimableBalanceEntry
@@ -919,7 +919,7 @@ func (e *effectsWrapper) addClaimClaimableBalanceEffects(changes []ingest.Change
cBalance = change.Pre.Data.MustClaimableBalance()
preBalanceID, err := xdr.MarshalHex(cBalance.BalanceId)
if err != nil {
- return fmt.Errorf("Invalid balanceId in meta changes for op: %d", e.operation.index)
+ return fmt.Errorf("invalid balanceId in meta changes for op: %d", e.operation.index)
}
if preBalanceID == balanceID {
@@ -930,7 +930,7 @@ func (e *effectsWrapper) addClaimClaimableBalanceEffects(changes []ingest.Change
}
if !found {
- return fmt.Errorf("Change not found for balanceId : %s", balanceID)
+ return fmt.Errorf("change not found for balanceId : %s", balanceID)
}
details := map[string]interface{}{
diff --git a/internal/transform/effects_test.go b/internal/transform/effects_test.go
index d99cfb4c..09fd2722 100644
--- a/internal/transform/effects_test.go
+++ b/internal/transform/effects_test.go
@@ -78,7 +78,7 @@ func TestEffectsCoversAllOperationTypes(t *testing.T) {
}
// calling effects should error due to the unknown operation
_, err := operation.effects()
- assert.Contains(t, err.Error(), "Unknown operation type")
+ assert.Contains(t, err.Error(), "unknown operation type")
}
func TestOperationEffects(t *testing.T) {
@@ -349,6 +349,7 @@ func TestOperationEffects(t *testing.T) {
harCodedCloseMetaInput := makeLedgerCloseMeta()
LedgerClosed, err := utils.GetCloseTime(harCodedCloseMetaInput)
+ assert.NoError(t, err)
revokeSponsorshipMeta, revokeSponsorshipEffects := getRevokeSponsorshipMeta(t)
@@ -2565,12 +2566,12 @@ func TestLiquidityPoolEffects(t *testing.T) {
"id": poolIDStr,
"reserves": []base.AssetAmount{
{
- "native",
- "0.0000200",
+ Asset: "native",
+ Amount: "0.0000200",
},
{
- "USD:GAUJETIZVEP2NRYLUESJ3LS66NVCEGMON4UDCBCSBEVPIID773P2W6AY",
- "0.0000100",
+ Asset: "USD:GAUJETIZVEP2NRYLUESJ3LS66NVCEGMON4UDCBCSBEVPIID773P2W6AY",
+ Amount: "0.0000100",
},
},
"total_shares": "0.0001000",
@@ -2623,12 +2624,12 @@ func TestLiquidityPoolEffects(t *testing.T) {
"id": poolIDStr,
"reserves": []base.AssetAmount{
{
- "native",
- "0.0000250",
+ Asset: "native",
+ Amount: "0.0000250",
},
{
- "USD:GAUJETIZVEP2NRYLUESJ3LS66NVCEGMON4UDCBCSBEVPIID773P2W6AY",
- "0.0000160",
+ Asset: "USD:GAUJETIZVEP2NRYLUESJ3LS66NVCEGMON4UDCBCSBEVPIID773P2W6AY",
+ Amount: "0.0000160",
},
},
"total_shares": "0.0001010",
@@ -2637,12 +2638,12 @@ func TestLiquidityPoolEffects(t *testing.T) {
},
"reserves_deposited": []base.AssetAmount{
{
- "native",
- "0.0000050",
+ Asset: "native",
+ Amount: "0.0000050",
},
{
- "USD:GAUJETIZVEP2NRYLUESJ3LS66NVCEGMON4UDCBCSBEVPIID773P2W6AY",
- "0.0000060",
+ Asset: "USD:GAUJETIZVEP2NRYLUESJ3LS66NVCEGMON4UDCBCSBEVPIID773P2W6AY",
+ Amount: "0.0000060",
},
},
"shares_received": "0.0000010",
@@ -2685,12 +2686,12 @@ func TestLiquidityPoolEffects(t *testing.T) {
"id": poolIDStr,
"reserves": []base.AssetAmount{
{
- "native",
- "0.0000189",
+ Asset: "native",
+ Amount: "0.0000189",
},
{
- "USD:GAUJETIZVEP2NRYLUESJ3LS66NVCEGMON4UDCBCSBEVPIID773P2W6AY",
- "0.0000094",
+ Asset: "USD:GAUJETIZVEP2NRYLUESJ3LS66NVCEGMON4UDCBCSBEVPIID773P2W6AY",
+ Amount: "0.0000094",
},
},
"total_shares": "0.0000990",
@@ -2699,12 +2700,12 @@ func TestLiquidityPoolEffects(t *testing.T) {
},
"reserves_received": []base.AssetAmount{
{
- "native",
- "0.0000011",
+ Asset: "native",
+ Amount: "0.0000011",
},
{
- "USD:GAUJETIZVEP2NRYLUESJ3LS66NVCEGMON4UDCBCSBEVPIID773P2W6AY",
- "0.0000006",
+ Asset: "USD:GAUJETIZVEP2NRYLUESJ3LS66NVCEGMON4UDCBCSBEVPIID773P2W6AY",
+ Amount: "0.0000006",
},
},
"shares_redeemed": "0.0000010",
@@ -2805,12 +2806,12 @@ func TestLiquidityPoolEffects(t *testing.T) {
"id": poolIDStr,
"reserves": []base.AssetAmount{
{
- "native",
- "0.0000189",
+ Asset: "native",
+ Amount: "0.0000189",
},
{
- "USD:GAUJETIZVEP2NRYLUESJ3LS66NVCEGMON4UDCBCSBEVPIID773P2W6AY",
- "0.0000094",
+ Asset: "USD:GAUJETIZVEP2NRYLUESJ3LS66NVCEGMON4UDCBCSBEVPIID773P2W6AY",
+ Amount: "0.0000094",
},
},
"total_shares": "0.0000990",
@@ -3002,12 +3003,12 @@ func TestLiquidityPoolEffects(t *testing.T) {
"id": poolIDStr,
"reserves": []base.AssetAmount{
{
- "native",
- "0.0000200",
+ Asset: "native",
+ Amount: "0.0000200",
},
{
- "USD:GAUJETIZVEP2NRYLUESJ3LS66NVCEGMON4UDCBCSBEVPIID773P2W6AY",
- "0.0000100",
+ Asset: "USD:GAUJETIZVEP2NRYLUESJ3LS66NVCEGMON4UDCBCSBEVPIID773P2W6AY",
+ Amount: "0.0000100",
},
},
"total_shares": "0.0001000",
@@ -3326,29 +3327,16 @@ func getRevokeSponsorshipMeta(t *testing.T) (string, []EffectOutput) {
type ClaimClaimableBalanceEffectsTestSuite struct {
suite.Suite
- ops []xdr.Operation
- tx ingest.LedgerTransaction
}
type CreateClaimableBalanceEffectsTestSuite struct {
suite.Suite
- ops []xdr.Operation
- tx ingest.LedgerTransaction
}
const (
networkPassphrase = "Arbitrary Testing Passphrase"
)
-type effect struct {
- address string
- addressMuxed null.String
- operationID int64
- details map[string]interface{}
- effectType EffectType
- order uint32
-}
-
func TestInvokeHostFunctionEffects(t *testing.T) {
randAddr := func() string {
return keypair.MustRandom().Address()
diff --git a/internal/transform/ledger.go b/internal/transform/ledger.go
index 2239eaae..9324e21e 100644
--- a/internal/transform/ledger.go
+++ b/internal/transform/ledger.go
@@ -12,7 +12,7 @@ import (
)
// TransformLedger converts a ledger from the history archive ingestion system into a form suitable for BigQuery
-func TransformLedger(inputLedger historyarchive.Ledger) (LedgerOutput, error) {
+func TransformLedger(inputLedger historyarchive.Ledger, lcm xdr.LedgerCloseMeta) (LedgerOutput, error) {
ledgerHeader := inputLedger.Header.Header
outputSequence := uint32(ledgerHeader.LedgerSeq)
@@ -55,6 +55,15 @@ func TransformLedger(inputLedger historyarchive.Ledger) (LedgerOutput, error) {
outputProtocolVersion := uint32(ledgerHeader.LedgerVersion)
+ var outputSorobanFeeWrite1Kb int64
+ lcmV1, ok := lcm.GetV1()
+ if ok {
+ extV1, ok := lcmV1.Ext.GetV1()
+ if ok {
+ outputSorobanFeeWrite1Kb = int64(extV1.SorobanFeeWrite1Kb)
+ }
+ }
+
transformedLedger := LedgerOutput{
Sequence: outputSequence,
LedgerID: outputLedgerID,
@@ -73,6 +82,7 @@ func TransformLedger(inputLedger historyarchive.Ledger) (LedgerOutput, error) {
BaseReserve: outputBaseReserve,
MaxTxSetSize: outputMaxTxSetSize,
ProtocolVersion: outputProtocolVersion,
+ SorobanFeeWrite1Kb: outputSorobanFeeWrite1Kb,
}
return transformedLedger, nil
}
@@ -93,7 +103,7 @@ func extractCounts(ledger historyarchive.Ledger) (transactionCount int32, operat
results := ledger.TransactionResult.TxResultSet.Results
txCount := len(transactions)
if txCount != len(results) {
- err = fmt.Errorf("The number of transactions and results are different (%d != %d)", txCount, len(results))
+ err = fmt.Errorf("the number of transactions and results are different (%d != %d)", txCount, len(results))
return
}
@@ -107,7 +117,7 @@ func extractCounts(ledger historyarchive.Ledger) (transactionCount int32, operat
if results[i].Result.Successful() {
operationResults, ok := results[i].Result.OperationResults()
if !ok {
- err = fmt.Errorf("Could not access operation results for result %d", i)
+ err = fmt.Errorf("could not access operation results for result %d", i)
return
}
diff --git a/internal/transform/ledger_test.go b/internal/transform/ledger_test.go
index ba40e28d..0e58e2e8 100644
--- a/internal/transform/ledger_test.go
+++ b/internal/transform/ledger_test.go
@@ -14,7 +14,7 @@ import (
func TestTransformLedger(t *testing.T) {
type transformTest struct {
- input historyarchive.Ledger
+ input utils.HistoryArchiveLedgerAndLCM
wantOutput LedgerOutput
wantErr error
}
@@ -26,10 +26,23 @@ func TestTransformLedger(t *testing.T) {
tests := []transformTest{
{
- historyarchive.Ledger{
- Header: xdr.LedgerHeaderHistoryEntry{
- Header: xdr.LedgerHeader{
- TotalCoins: -1,
+ utils.HistoryArchiveLedgerAndLCM{
+ Ledger: historyarchive.Ledger{
+ Header: xdr.LedgerHeaderHistoryEntry{
+ Header: xdr.LedgerHeader{
+ TotalCoins: -1,
+ },
+ },
+ },
+ LCM: xdr.LedgerCloseMeta{
+ V: 1,
+ V1: &xdr.LedgerCloseMetaV1{
+ Ext: xdr.LedgerCloseMetaExt{
+ V: 1,
+ V1: &xdr.LedgerCloseMetaExtV1{
+ SorobanFeeWrite1Kb: xdr.Int64(1234),
+ },
+ },
},
},
},
@@ -37,10 +50,23 @@ func TestTransformLedger(t *testing.T) {
fmt.Errorf("the total number of coins (-1) is negative for ledger 0 (ledger id=0)"),
},
{
- historyarchive.Ledger{
- Header: xdr.LedgerHeaderHistoryEntry{
- Header: xdr.LedgerHeader{
- FeePool: -1,
+ utils.HistoryArchiveLedgerAndLCM{
+ Ledger: historyarchive.Ledger{
+ Header: xdr.LedgerHeaderHistoryEntry{
+ Header: xdr.LedgerHeader{
+ FeePool: -1,
+ },
+ },
+ },
+ LCM: xdr.LedgerCloseMeta{
+ V: 1,
+ V1: &xdr.LedgerCloseMetaV1{
+ Ext: xdr.LedgerCloseMetaExt{
+ V: 1,
+ V1: &xdr.LedgerCloseMetaExtV1{
+ SorobanFeeWrite1Kb: xdr.Int64(1234),
+ },
+ },
},
},
},
@@ -55,7 +81,7 @@ func TestTransformLedger(t *testing.T) {
}
for _, test := range tests {
- actualOutput, actualError := TransformLedger(test.input)
+ actualOutput, actualError := TransformLedger(test.input.Ledger, test.input.LCM)
assert.Equal(t, test.wantErr, actualError)
assert.Equal(t, test.wantOutput, actualOutput)
}
@@ -88,11 +114,12 @@ func makeLedgerTestOutput() (output LedgerOutput, err error) {
SuccessfulTransactionCount: 1,
FailedTransactionCount: 1,
TxSetOperationCount: "13",
+ SorobanFeeWrite1Kb: 1234,
}
return
}
-func makeLedgerTestInput() (lcm historyarchive.Ledger, err error) {
+func makeLedgerTestInput() (lcm utils.HistoryArchiveLedgerAndLCM, err error) {
hardCodedTxSet := xdr.TransactionSet{
Txs: []xdr.TransactionEnvelope{
utils.CreateSampleTx(0, 3),
@@ -103,7 +130,7 @@ func makeLedgerTestInput() (lcm historyarchive.Ledger, err error) {
utils.CreateSampleResultPair(false, 3),
utils.CreateSampleResultPair(true, 10),
}
- lcm = historyarchive.Ledger{
+ ledger := historyarchive.Ledger{
Header: xdr.LedgerHeaderHistoryEntry{
Header: xdr.LedgerHeader{
LedgerSeq: 30578981,
@@ -130,5 +157,21 @@ func makeLedgerTestInput() (lcm historyarchive.Ledger, err error) {
Ext: xdr.TransactionHistoryResultEntryExt{},
},
}
+
+ lcm = utils.HistoryArchiveLedgerAndLCM{
+ Ledger: ledger,
+ LCM: xdr.LedgerCloseMeta{
+ V: 1,
+ V1: &xdr.LedgerCloseMetaV1{
+ Ext: xdr.LedgerCloseMetaExt{
+ V: 1,
+ V1: &xdr.LedgerCloseMetaExtV1{
+ SorobanFeeWrite1Kb: xdr.Int64(1234),
+ },
+ },
+ },
+ },
+ }
+
return lcm, nil
}
diff --git a/internal/transform/ledger_transaction_test.go b/internal/transform/ledger_transaction_test.go
index 7e53e63d..2c471744 100644
--- a/internal/transform/ledger_transaction_test.go
+++ b/internal/transform/ledger_transaction_test.go
@@ -45,7 +45,7 @@ func TestTransformTx(t *testing.T) {
func makeLedgerTransactionTestOutput() (output []LedgerTransactionOutput, err error) {
output = []LedgerTransactionOutput{
- LedgerTransactionOutput{
+ {
TxEnvelope: "AAAAAgAAAACI4aa0pXFSj6qfJuIObLw/5zyugLRGYwxb7wFSr3B9eAABX5ABjydzAABBtwAAAAEAAAAAAAAAAAAAAABfBqt0AAAAAQAAABdITDVhQ2dvelFISVc3c1NjNVhkY2ZtUgAAAAABAAAAAQAAAAAcR0GXGO76pFs4y38vJVAanjnLg4emNun7zAx0pHcDGAAAAAIAAAAAAAAAAAAAAAAAAAAAAQIDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=",
TxResult: "qH/vXusmAmnDgPLeRWqtcrWbsxWqrHd4YEVuCdrAuvsAAAAAAAABLP////8AAAABAAAAAAAAAAAAAAAAAAAAAA==",
TxMeta: "AAAAAQAAAAAAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACAAAAAwAAAAAAAAAFAQIDBAUGBwgJAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFVU1NEAAAAAGtY3WxokwttAx3Fu/riPvoew/C7WMK8jZONR8Hfs75zAAAAHgAAAAAAAYagAAAAAAAAA+gAAAAAAAAB9AAAAAAAAAAZAAAAAAAAAAEAAAAAAAAABQECAwQFBgcICQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABVVNTRAAAAABrWN1saJMLbQMdxbv64j76HsPwu1jCvI2TjUfB37O+cwAAAB4AAAAAAAGKiAAAAAAAAARMAAAAAAAAAfYAAAAAAAAAGgAAAAAAAAACAAAAAwAAAAAAAAAFAQIDBAUGBwgJAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFVU1NEAAAAAGtY3WxokwttAx3Fu/riPvoew/C7WMK8jZONR8Hfs75zAAAAHgAAAAAAAYagAAAAAAAAA+gAAAAAAAAB9AAAAAAAAAAZAAAAAAAAAAEAAAAAAAAABQECAwQFBgcICQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABVVNTRAAAAABrWN1saJMLbQMdxbv64j76HsPwu1jCvI2TjUfB37O+cwAAAB4AAAAAAAGKiAAAAAAAAARMAAAAAAAAAfYAAAAAAAAAGgAAAAAAAAAA",
@@ -54,7 +54,7 @@ func makeLedgerTransactionTestOutput() (output []LedgerTransactionOutput, err er
LedgerSequence: 30521816,
ClosedAt: time.Date(2020, time.July, 9, 5, 28, 42, 0, time.UTC),
},
- LedgerTransactionOutput{
+ {
TxEnvelope: "AAAABQAAAABnzACGTDuJFoxqr+C8NHCe0CHFBXLi+YhhNCIILCIpcgAAAAAAABwgAAAAAgAAAACI4aa0pXFSj6qfJuIObLw/5zyugLRGYwxb7wFSr3B9eAAAAAACFPY2AAAAfQAAAAEAAAAAAAAAAAAAAABfBqt0AAAAAQAAABdITDVhQ2dvelFISVc3c1NjNVhkY2ZtUgAAAAABAAAAAQAAAAAcR0GXGO76pFs4y38vJVAanjnLg4emNun7zAx0pHcDGAAAAAIAAAAAAAAAAAAAAAAAAAAAAQIDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA==",
TxResult: "qH/vXusmAmnDgPLeRWqtcrWbsxWqrHd4YEVuCdrAuvsAAAAAAAABLAAAAAGof+9e6yYCacOA8t5Faq1ytZuzFaqsd3hgRW4J2sC6+wAAAAAAAABkAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAA==",
TxMeta: "AAAAAQAAAAAAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACAAAAAwAAAAAAAAAFAQIDBAUGBwgJAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFVU1NEAAAAAGtY3WxokwttAx3Fu/riPvoew/C7WMK8jZONR8Hfs75zAAAAHgAAAAAAAYagAAAAAAAAA+gAAAAAAAAB9AAAAAAAAAAZAAAAAAAAAAEAAAAAAAAABQECAwQFBgcICQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABVVNTRAAAAABrWN1saJMLbQMdxbv64j76HsPwu1jCvI2TjUfB37O+cwAAAB4AAAAAAAGKiAAAAAAAAARMAAAAAAAAAfYAAAAAAAAAGgAAAAAAAAACAAAAAwAAAAAAAAAFAQIDBAUGBwgJAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFVU1NEAAAAAGtY3WxokwttAx3Fu/riPvoew/C7WMK8jZONR8Hfs75zAAAAHgAAAAAAAYagAAAAAAAAA+gAAAAAAAAB9AAAAAAAAAAZAAAAAAAAAAEAAAAAAAAABQECAwQFBgcICQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABVVNTRAAAAABrWN1saJMLbQMdxbv64j76HsPwu1jCvI2TjUfB37O+cwAAAB4AAAAAAAGKiAAAAAAAAARMAAAAAAAAAfYAAAAAAAAAGgAAAAAAAAAA",
@@ -63,7 +63,7 @@ func makeLedgerTransactionTestOutput() (output []LedgerTransactionOutput, err er
LedgerSequence: 30521817,
ClosedAt: time.Date(2020, time.July, 9, 5, 28, 42, 0, time.UTC),
},
- LedgerTransactionOutput{
+ {
TxEnvelope: "AAAAAgAAAAAcR0GXGO76pFs4y38vJVAanjnLg4emNun7zAx0pHcDGAAAAGQBpLyvsiV6gwAAAAIAAAABAAAAAAAAAAAAAAAAXwardAAAAAEAAAAFAAAACgAAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAMCAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAABdITDVhQ2dvelFISVc3c1NjNVhkY2ZtUgAAAAABAAAAAQAAAABrWN1saJMLbQMdxbv64j76HsPwu1jCvI2TjUfB37O+cwAAAAIAAAAAAAAAAAAAAAAAAAAAAQIDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=",
TxResult: "qH/vXusmAmnDgPLeRWqtcrWbsxWqrHd4YEVuCdrAuvsAAAAAAAAAZP////8AAAABAAAAAAAAAAAAAAAAAAAAAA==",
TxMeta: "AAAAAQAAAAAAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACAAAAAwAAAAAAAAAFAQIDBAUGBwgJAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFVU1NEAAAAAGtY3WxokwttAx3Fu/riPvoew/C7WMK8jZONR8Hfs75zAAAAHgAAAAAAAYagAAAAAAAAA+gAAAAAAAAB9AAAAAAAAAAZAAAAAAAAAAEAAAAAAAAABQECAwQFBgcICQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABVVNTRAAAAABrWN1saJMLbQMdxbv64j76HsPwu1jCvI2TjUfB37O+cwAAAB4AAAAAAAGKiAAAAAAAAARMAAAAAAAAAfYAAAAAAAAAGgAAAAAAAAACAAAAAwAAAAAAAAAFAQIDBAUGBwgJAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFVU1NEAAAAAGtY3WxokwttAx3Fu/riPvoew/C7WMK8jZONR8Hfs75zAAAAHgAAAAAAAYagAAAAAAAAA+gAAAAAAAAB9AAAAAAAAAAZAAAAAAAAAAEAAAAAAAAABQECAwQFBgcICQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABVVNTRAAAAABrWN1saJMLbQMdxbv64j76HsPwu1jCvI2TjUfB37O+cwAAAB4AAAAAAAGKiAAAAAAAAARMAAAAAAAAAfYAAAAAAAAAGgAAAAAAAAAA",
@@ -79,7 +79,7 @@ func makeLedgerTransactionTestInput() (transaction []ingest.LedgerTransaction, h
hardCodedMemoText := "HL5aCgozQHIW7sSc5XdcfmR"
hardCodedTransactionHash := xdr.Hash([32]byte{0xa8, 0x7f, 0xef, 0x5e, 0xeb, 0x26, 0x2, 0x69, 0xc3, 0x80, 0xf2, 0xde, 0x45, 0x6a, 0xad, 0x72, 0xb5, 0x9b, 0xb3, 0x15, 0xaa, 0xac, 0x77, 0x78, 0x60, 0x45, 0x6e, 0x9, 0xda, 0xc0, 0xba, 0xfb})
genericResultResults := &[]xdr.OperationResult{
- xdr.OperationResult{
+ {
Tr: &xdr.OperationResultTr{
Type: xdr.OperationTypeCreateAccount,
CreateAccountResult: &xdr.CreateAccountResult{
@@ -106,7 +106,7 @@ func makeLedgerTransactionTestInput() (transaction []ingest.LedgerTransaction, h
Ed25519: source.Ed25519,
}
transaction = []ingest.LedgerTransaction{
- ingest.LedgerTransaction{
+ {
Index: 1,
UnsafeMeta: hardCodedMeta,
Envelope: xdr.TransactionEnvelope{
@@ -128,7 +128,7 @@ func makeLedgerTransactionTestInput() (transaction []ingest.LedgerTransaction, h
},
},
Operations: []xdr.Operation{
- xdr.Operation{
+ {
SourceAccount: &testAccount2,
Body: xdr.OperationBody{
Type: xdr.OperationTypePathPaymentStrictReceive,
@@ -152,7 +152,7 @@ func makeLedgerTransactionTestInput() (transaction []ingest.LedgerTransaction, h
},
},
},
- ingest.LedgerTransaction{
+ {
Index: 1,
UnsafeMeta: hardCodedMeta,
Envelope: xdr.TransactionEnvelope{
@@ -179,7 +179,7 @@ func makeLedgerTransactionTestInput() (transaction []ingest.LedgerTransaction, h
},
},
Operations: []xdr.Operation{
- xdr.Operation{
+ {
SourceAccount: &testAccount2,
Body: xdr.OperationBody{
Type: xdr.OperationTypePathPaymentStrictReceive,
@@ -208,7 +208,7 @@ func makeLedgerTransactionTestInput() (transaction []ingest.LedgerTransaction, h
Result: xdr.InnerTransactionResultResult{
Code: xdr.TransactionResultCodeTxSuccess,
Results: &[]xdr.OperationResult{
- xdr.OperationResult{
+ {
Tr: &xdr.OperationResultTr{
CreateAccountResult: &xdr.CreateAccountResult{},
},
@@ -217,14 +217,12 @@ func makeLedgerTransactionTestInput() (transaction []ingest.LedgerTransaction, h
},
},
},
- Results: &[]xdr.OperationResult{
- xdr.OperationResult{},
- },
+ Results: &[]xdr.OperationResult{{}},
},
},
},
},
- ingest.LedgerTransaction{
+ {
Index: 1,
UnsafeMeta: hardCodedMeta,
Envelope: xdr.TransactionEnvelope{
@@ -253,7 +251,7 @@ func makeLedgerTransactionTestInput() (transaction []ingest.LedgerTransaction, h
},
},
Operations: []xdr.Operation{
- xdr.Operation{
+ {
SourceAccount: &testAccount4,
Body: xdr.OperationBody{
Type: xdr.OperationTypePathPaymentStrictReceive,
@@ -279,19 +277,19 @@ func makeLedgerTransactionTestInput() (transaction []ingest.LedgerTransaction, h
},
}
historyHeader = []xdr.LedgerHeaderHistoryEntry{
- xdr.LedgerHeaderHistoryEntry{
+ {
Header: xdr.LedgerHeader{
LedgerSeq: 30521816,
ScpValue: xdr.StellarValue{CloseTime: 1594272522},
},
},
- xdr.LedgerHeaderHistoryEntry{
+ {
Header: xdr.LedgerHeader{
LedgerSeq: 30521817,
ScpValue: xdr.StellarValue{CloseTime: 1594272522},
},
},
- xdr.LedgerHeaderHistoryEntry{
+ {
Header: xdr.LedgerHeader{
LedgerSeq: 30521818,
ScpValue: xdr.StellarValue{CloseTime: 1594272522},
diff --git a/internal/transform/liquidity_pool.go b/internal/transform/liquidity_pool.go
index 30cbaa24..eacc6be3 100644
--- a/internal/transform/liquidity_pool.go
+++ b/internal/transform/liquidity_pool.go
@@ -22,17 +22,17 @@ func TransformPool(ledgerChange ingest.Change, header xdr.LedgerHeaderHistoryEnt
lp, ok := ledgerEntry.Data.GetLiquidityPool()
if !ok {
- return PoolOutput{}, fmt.Errorf("Could not extract liquidity pool data from ledger entry; actual type is %s", ledgerEntry.Data.Type)
+ return PoolOutput{}, fmt.Errorf("could not extract liquidity pool data from ledger entry; actual type is %s", ledgerEntry.Data.Type)
}
cp, ok := lp.Body.GetConstantProduct()
if !ok {
- return PoolOutput{}, fmt.Errorf("Could not extract constant product information for liquidity pool %s", xdr.Hash(lp.LiquidityPoolId).HexString())
+ return PoolOutput{}, fmt.Errorf("could not extract constant product information for liquidity pool %s", xdr.Hash(lp.LiquidityPoolId).HexString())
}
poolType, ok := xdr.LiquidityPoolTypeToString[lp.Body.Type]
if !ok {
- return PoolOutput{}, fmt.Errorf("Unknown liquidity pool type: %d", lp.Body.Type)
+ return PoolOutput{}, fmt.Errorf("unknown liquidity pool type: %d", lp.Body.Type)
}
var assetAType, assetACode, assetAIssuer string
@@ -44,6 +44,9 @@ func TransformPool(ledgerChange ingest.Change, header xdr.LedgerHeaderHistoryEnt
var assetBType, assetBCode, assetBIssuer string
err = cp.Params.AssetB.Extract(&assetBType, &assetBCode, &assetBIssuer)
+ if err != nil {
+ return PoolOutput{}, err
+ }
assetBID := FarmHashAsset(assetBCode, assetBIssuer, assetBType)
closedAt, err := utils.TimePointToUTCTimeStamp(header.Header.ScpValue.CloseTime)
diff --git a/internal/transform/liquidity_pool_test.go b/internal/transform/liquidity_pool_test.go
index 8a60f59d..af97587a 100644
--- a/internal/transform/liquidity_pool_test.go
+++ b/internal/transform/liquidity_pool_test.go
@@ -61,19 +61,6 @@ func TestTransformPool(t *testing.T) {
}
}
-func wrapPoolEntry(poolEntry xdr.LiquidityPoolEntry, lastModified int) ingest.Change {
- return ingest.Change{
- Type: xdr.LedgerEntryTypeLiquidityPool,
- Pre: &xdr.LedgerEntry{
- LastModifiedLedgerSeq: xdr.Uint32(lastModified),
- Data: xdr.LedgerEntryData{
- Type: xdr.LedgerEntryTypeLiquidityPool,
- LiquidityPool: &poolEntry,
- },
- },
- }
-}
-
func makePoolTestInput() ingest.Change {
ledgerEntry := xdr.LedgerEntry{
LastModifiedLedgerSeq: 30705278,
diff --git a/internal/transform/offer.go b/internal/transform/offer.go
index fcc4336d..b3008d30 100644
--- a/internal/transform/offer.go
+++ b/internal/transform/offer.go
@@ -18,7 +18,7 @@ func TransformOffer(ledgerChange ingest.Change, header xdr.LedgerHeaderHistoryEn
offerEntry, offerFound := ledgerEntry.Data.GetOffer()
if !offerFound {
- return OfferOutput{}, fmt.Errorf("Could not extract offer data from ledger entry; actual type is %s", ledgerEntry.Data.Type)
+ return OfferOutput{}, fmt.Errorf("could not extract offer data from ledger entry; actual type is %s", ledgerEntry.Data.Type)
}
outputSellerID, err := offerEntry.SellerId.GetAddress()
@@ -28,7 +28,7 @@ func TransformOffer(ledgerChange ingest.Change, header xdr.LedgerHeaderHistoryEn
outputOfferID := int64(offerEntry.OfferId)
if outputOfferID < 0 {
- return OfferOutput{}, fmt.Errorf("OfferID is negative (%d) for offer from account: %s", outputOfferID, outputSellerID)
+ return OfferOutput{}, fmt.Errorf("offerID is negative (%d) for offer from account: %s", outputOfferID, outputSellerID)
}
outputSellingAsset, err := transformSingleAsset(offerEntry.Selling)
@@ -43,21 +43,21 @@ func TransformOffer(ledgerChange ingest.Change, header xdr.LedgerHeaderHistoryEn
outputAmount := offerEntry.Amount
if outputAmount < 0 {
- return OfferOutput{}, fmt.Errorf("Amount is negative (%d) for offer %d", outputAmount, outputOfferID)
+ return OfferOutput{}, fmt.Errorf("amount is negative (%d) for offer %d", outputAmount, outputOfferID)
}
outputPriceN := int32(offerEntry.Price.N)
if outputPriceN < 0 {
- return OfferOutput{}, fmt.Errorf("Price numerator is negative (%d) for offer %d", outputPriceN, outputOfferID)
+ return OfferOutput{}, fmt.Errorf("price numerator is negative (%d) for offer %d", outputPriceN, outputOfferID)
}
outputPriceD := int32(offerEntry.Price.D)
if outputPriceD == 0 {
- return OfferOutput{}, fmt.Errorf("Price denominator is 0 for offer %d", outputOfferID)
+ return OfferOutput{}, fmt.Errorf("price denominator is 0 for offer %d", outputOfferID)
}
if outputPriceD < 0 {
- return OfferOutput{}, fmt.Errorf("Price denominator is negative (%d) for offer %d", outputPriceD, outputOfferID)
+ return OfferOutput{}, fmt.Errorf("price denominator is negative (%d) for offer %d", outputPriceD, outputOfferID)
}
var outputPrice float64
diff --git a/internal/transform/offer_normalized.go b/internal/transform/offer_normalized.go
index 83eced04..0276509e 100644
--- a/internal/transform/offer_normalized.go
+++ b/internal/transform/offer_normalized.go
@@ -25,7 +25,7 @@ func TransformOfferNormalized(ledgerChange ingest.Change, ledgerSeq uint32) (Nor
return NormalizedOfferOutput{}, fmt.Errorf("offer %d is deleted", transformed.OfferID)
}
- buyingAsset, sellingAsset, err := extractAssets(ledgerChange, transformed)
+ buyingAsset, sellingAsset, err := extractAssets(ledgerChange)
if err != nil {
return NormalizedOfferOutput{}, err
}
@@ -57,7 +57,7 @@ func TransformOfferNormalized(ledgerChange ingest.Change, ledgerSeq uint32) (Nor
}
// extractAssets extracts the buying and selling assets as strings of the format code:issuer
-func extractAssets(ledgerChange ingest.Change, transformed OfferOutput) (string, string, error) {
+func extractAssets(ledgerChange ingest.Change) (string, string, error) {
ledgerEntry, _, _, err := utils.ExtractEntryFromChange(ledgerChange)
if err != nil {
return "", "", err
@@ -65,7 +65,7 @@ func extractAssets(ledgerChange ingest.Change, transformed OfferOutput) (string,
offerEntry, offerFound := ledgerEntry.Data.GetOffer()
if !offerFound {
- return "", "", fmt.Errorf("Could not extract offer data from ledger entry; actual type is %s", ledgerEntry.Data.Type)
+ return "", "", fmt.Errorf("could not extract offer data from ledger entry; actual type is %s", ledgerEntry.Data.Type)
}
var sellType, sellCode, sellIssuer string
diff --git a/internal/transform/offer_test.go b/internal/transform/offer_test.go
index 303693eb..8279d620 100644
--- a/internal/transform/offer_test.go
+++ b/internal/transform/offer_test.go
@@ -37,7 +37,7 @@ func TestTransformOffer(t *testing.T) {
},
},
},
- OfferOutput{}, fmt.Errorf("Could not extract offer data from ledger entry; actual type is LedgerEntryTypeAccount"),
+ OfferOutput{}, fmt.Errorf("could not extract offer data from ledger entry; actual type is LedgerEntryTypeAccount"),
},
{
inputStruct{wrapOfferEntry(xdr.OfferEntry{
@@ -45,7 +45,7 @@ func TestTransformOffer(t *testing.T) {
OfferId: -1,
}, 0),
},
- OfferOutput{}, fmt.Errorf("OfferID is negative (-1) for offer from account: %s", genericAccountAddress),
+ OfferOutput{}, fmt.Errorf("offerID is negative (-1) for offer from account: %s", genericAccountAddress),
},
{
inputStruct{wrapOfferEntry(xdr.OfferEntry{
@@ -53,7 +53,7 @@ func TestTransformOffer(t *testing.T) {
Amount: -2,
}, 0),
},
- OfferOutput{}, fmt.Errorf("Amount is negative (-2) for offer 0"),
+ OfferOutput{}, fmt.Errorf("amount is negative (-2) for offer 0"),
},
{
inputStruct{wrapOfferEntry(xdr.OfferEntry{
@@ -64,7 +64,7 @@ func TestTransformOffer(t *testing.T) {
},
}, 0),
},
- OfferOutput{}, fmt.Errorf("Price numerator is negative (-3) for offer 0"),
+ OfferOutput{}, fmt.Errorf("price numerator is negative (-3) for offer 0"),
},
{
inputStruct{wrapOfferEntry(xdr.OfferEntry{
@@ -75,7 +75,7 @@ func TestTransformOffer(t *testing.T) {
},
}, 0),
},
- OfferOutput{}, fmt.Errorf("Price denominator is negative (-4) for offer 0"),
+ OfferOutput{}, fmt.Errorf("price denominator is negative (-4) for offer 0"),
},
{
inputStruct{wrapOfferEntry(xdr.OfferEntry{
@@ -86,7 +86,7 @@ func TestTransformOffer(t *testing.T) {
},
}, 0),
},
- OfferOutput{}, fmt.Errorf("Price denominator is 0 for offer 0"),
+ OfferOutput{}, fmt.Errorf("price denominator is 0 for offer 0"),
},
{
inputStruct{
diff --git a/internal/transform/operation.go b/internal/transform/operation.go
index 9229419b..6de15dbe 100644
--- a/internal/transform/operation.go
+++ b/internal/transform/operation.go
@@ -48,7 +48,7 @@ func TransformOperation(operation xdr.Operation, operationIndex int32, transacti
outputOperationType := int32(operation.Body.Type)
if outputOperationType < 0 {
- return OperationOutput{}, fmt.Errorf("The operation type (%d) is negative for operation %d (operation id=%d)", outputOperationType, operationIndex, outputOperationID)
+ return OperationOutput{}, fmt.Errorf("the operation type (%d) is negative for operation %d (operation id=%d)", outputOperationType, operationIndex, outputOperationID)
}
outputDetails, err := extractOperationDetails(operation, transaction, operationIndex, network)
@@ -66,15 +66,31 @@ func TransformOperation(operation xdr.Operation, operationIndex int32, transacti
return OperationOutput{}, err
}
+ var outputOperationResultCode string
+ var outputOperationTraceCode string
+ outputOperationResults, ok := transaction.Result.Result.OperationResults()
+ if ok {
+ outputOperationResultCode = outputOperationResults[operationIndex].Code.String()
+ operationResultTr, ok := outputOperationResults[operationIndex].GetTr()
+ if ok {
+ outputOperationTraceCode, err = mapOperationTrace(operationResultTr)
+ if err != nil {
+ return OperationOutput{}, err
+ }
+ }
+ }
+
transformedOperation := OperationOutput{
- SourceAccount: outputSourceAccount,
- SourceAccountMuxed: outputSourceAccountMuxed.String,
- Type: outputOperationType,
- TypeString: outputOperationTypeString,
- TransactionID: outputTransactionID,
- OperationID: outputOperationID,
- OperationDetails: outputDetails,
- ClosedAt: outputCloseTime,
+ SourceAccount: outputSourceAccount,
+ SourceAccountMuxed: outputSourceAccountMuxed.String,
+ Type: outputOperationType,
+ TypeString: outputOperationTypeString,
+ TransactionID: outputTransactionID,
+ OperationID: outputOperationID,
+ OperationDetails: outputDetails,
+ ClosedAt: outputCloseTime,
+ OperationResultCode: outputOperationResultCode,
+ OperationTraceCode: outputOperationTraceCode,
}
return transformedOperation, nil
@@ -140,11 +156,76 @@ func mapOperationType(operation xdr.Operation) (string, error) {
case xdr.OperationTypeRestoreFootprint:
op_string_type = "restore_footprint"
default:
- return op_string_type, fmt.Errorf("Unknown operation type: %s", operation.Body.Type.String())
+ return op_string_type, fmt.Errorf("unknown operation type: %s", operation.Body.Type.String())
}
return op_string_type, nil
}
+func mapOperationTrace(operationTrace xdr.OperationResultTr) (string, error) {
+ var operationTraceDescription string
+ operationType := operationTrace.Type
+
+ switch operationType {
+ case xdr.OperationTypeCreateAccount:
+ operationTraceDescription = operationTrace.CreateAccountResult.Code.String()
+ case xdr.OperationTypePayment:
+ operationTraceDescription = operationTrace.PaymentResult.Code.String()
+ case xdr.OperationTypePathPaymentStrictReceive:
+ operationTraceDescription = operationTrace.PathPaymentStrictReceiveResult.Code.String()
+ case xdr.OperationTypePathPaymentStrictSend:
+ operationTraceDescription = operationTrace.PathPaymentStrictSendResult.Code.String()
+ case xdr.OperationTypeManageBuyOffer:
+ operationTraceDescription = operationTrace.ManageBuyOfferResult.Code.String()
+ case xdr.OperationTypeManageSellOffer:
+ operationTraceDescription = operationTrace.ManageSellOfferResult.Code.String()
+ case xdr.OperationTypeCreatePassiveSellOffer:
+ operationTraceDescription = operationTrace.CreatePassiveSellOfferResult.Code.String()
+ case xdr.OperationTypeSetOptions:
+ operationTraceDescription = operationTrace.SetOptionsResult.Code.String()
+ case xdr.OperationTypeChangeTrust:
+ operationTraceDescription = operationTrace.ChangeTrustResult.Code.String()
+ case xdr.OperationTypeAllowTrust:
+ operationTraceDescription = operationTrace.AllowTrustResult.Code.String()
+ case xdr.OperationTypeAccountMerge:
+ operationTraceDescription = operationTrace.AccountMergeResult.Code.String()
+ case xdr.OperationTypeInflation:
+ operationTraceDescription = operationTrace.InflationResult.Code.String()
+ case xdr.OperationTypeManageData:
+ operationTraceDescription = operationTrace.ManageDataResult.Code.String()
+ case xdr.OperationTypeBumpSequence:
+ operationTraceDescription = operationTrace.BumpSeqResult.Code.String()
+ case xdr.OperationTypeCreateClaimableBalance:
+ operationTraceDescription = operationTrace.CreateClaimableBalanceResult.Code.String()
+ case xdr.OperationTypeClaimClaimableBalance:
+ operationTraceDescription = operationTrace.ClaimClaimableBalanceResult.Code.String()
+ case xdr.OperationTypeBeginSponsoringFutureReserves:
+ operationTraceDescription = operationTrace.BeginSponsoringFutureReservesResult.Code.String()
+ case xdr.OperationTypeEndSponsoringFutureReserves:
+ operationTraceDescription = operationTrace.EndSponsoringFutureReservesResult.Code.String()
+ case xdr.OperationTypeRevokeSponsorship:
+ operationTraceDescription = operationTrace.RevokeSponsorshipResult.Code.String()
+ case xdr.OperationTypeClawback:
+ operationTraceDescription = operationTrace.ClawbackResult.Code.String()
+ case xdr.OperationTypeClawbackClaimableBalance:
+ operationTraceDescription = operationTrace.ClawbackClaimableBalanceResult.Code.String()
+ case xdr.OperationTypeSetTrustLineFlags:
+ operationTraceDescription = operationTrace.SetTrustLineFlagsResult.Code.String()
+ case xdr.OperationTypeLiquidityPoolDeposit:
+ operationTraceDescription = operationTrace.LiquidityPoolDepositResult.Code.String()
+ case xdr.OperationTypeLiquidityPoolWithdraw:
+ operationTraceDescription = operationTrace.LiquidityPoolWithdrawResult.Code.String()
+ case xdr.OperationTypeInvokeHostFunction:
+ operationTraceDescription = operationTrace.InvokeHostFunctionResult.Code.String()
+ case xdr.OperationTypeExtendFootprintTtl:
+ operationTraceDescription = operationTrace.ExtendFootprintTtlResult.Code.String()
+ case xdr.OperationTypeRestoreFootprint:
+ operationTraceDescription = operationTrace.RestoreFootprintResult.Code.String()
+ default:
+ return operationTraceDescription, fmt.Errorf("unknown operation type: %s", operationTrace.Type.String())
+ }
+ return operationTraceDescription, nil
+}
+
func PoolIDToString(id xdr.PoolId) string {
return xdr.Hash(id).HexString()
}
@@ -196,7 +277,7 @@ func getLiquidityPoolAndProductDelta(operationIndex int32, transaction ingest.Le
return lp, delta, nil
}
- return nil, nil, fmt.Errorf("Liquidity pool change not found")
+ return nil, nil, fmt.Errorf("liquidity pool change not found")
}
func getOperationSourceAccount(operation xdr.Operation, transaction ingest.LedgerTransaction) xdr.MuxedAccount {
@@ -483,7 +564,7 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT
case xdr.OperationTypeCreateAccount:
op, ok := operation.Body.GetCreateAccountOp()
if !ok {
- return details, fmt.Errorf("Could not access CreateAccount info for this operation (index %d)", operationIndex)
+ return details, fmt.Errorf("could not access CreateAccount info for this operation (index %d)", operationIndex)
}
if err := addAccountAndMuxedAccountDetails(details, sourceAccount, "funder"); err != nil {
@@ -495,7 +576,7 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT
case xdr.OperationTypePayment:
op, ok := operation.Body.GetPaymentOp()
if !ok {
- return details, fmt.Errorf("Could not access Payment info for this operation (index %d)", operationIndex)
+ return details, fmt.Errorf("could not access Payment info for this operation (index %d)", operationIndex)
}
if err := addAccountAndMuxedAccountDetails(details, sourceAccount, "from"); err != nil {
@@ -512,7 +593,7 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT
case xdr.OperationTypePathPaymentStrictReceive:
op, ok := operation.Body.GetPathPaymentStrictReceiveOp()
if !ok {
- return details, fmt.Errorf("Could not access PathPaymentStrictReceive info for this operation (index %d)", operationIndex)
+ return details, fmt.Errorf("could not access PathPaymentStrictReceive info for this operation (index %d)", operationIndex)
}
if err := addAccountAndMuxedAccountDetails(details, sourceAccount, "from"); err != nil {
@@ -534,16 +615,16 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT
if transaction.Result.Successful() {
allOperationResults, ok := transaction.Result.OperationResults()
if !ok {
- return details, fmt.Errorf("Could not access any results for this transaction")
+ return details, fmt.Errorf("could not access any results for this transaction")
}
currentOperationResult := allOperationResults[operationIndex]
resultBody, ok := currentOperationResult.GetTr()
if !ok {
- return details, fmt.Errorf("Could not access result body for this operation (index %d)", operationIndex)
+ return details, fmt.Errorf("could not access result body for this operation (index %d)", operationIndex)
}
result, ok := resultBody.GetPathPaymentStrictReceiveResult()
if !ok {
- return details, fmt.Errorf("Could not access PathPaymentStrictReceive result info for this operation (index %d)", operationIndex)
+ return details, fmt.Errorf("could not access PathPaymentStrictReceive result info for this operation (index %d)", operationIndex)
}
details["source_amount"] = utils.ConvertStroopValueToReal(result.SendAmount())
}
@@ -553,7 +634,7 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT
case xdr.OperationTypePathPaymentStrictSend:
op, ok := operation.Body.GetPathPaymentStrictSendOp()
if !ok {
- return details, fmt.Errorf("Could not access PathPaymentStrictSend info for this operation (index %d)", operationIndex)
+ return details, fmt.Errorf("could not access PathPaymentStrictSend info for this operation (index %d)", operationIndex)
}
if err := addAccountAndMuxedAccountDetails(details, sourceAccount, "from"); err != nil {
@@ -575,16 +656,16 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT
if transaction.Result.Successful() {
allOperationResults, ok := transaction.Result.OperationResults()
if !ok {
- return details, fmt.Errorf("Could not access any results for this transaction")
+ return details, fmt.Errorf("could not access any results for this transaction")
}
currentOperationResult := allOperationResults[operationIndex]
resultBody, ok := currentOperationResult.GetTr()
if !ok {
- return details, fmt.Errorf("Could not access result body for this operation (index %d)", operationIndex)
+ return details, fmt.Errorf("could not access result body for this operation (index %d)", operationIndex)
}
result, ok := resultBody.GetPathPaymentStrictSendResult()
if !ok {
- return details, fmt.Errorf("Could not access GetPathPaymentStrictSendResult result info for this operation (index %d)", operationIndex)
+ return details, fmt.Errorf("could not access GetPathPaymentStrictSendResult result info for this operation (index %d)", operationIndex)
}
details["amount"] = utils.ConvertStroopValueToReal(result.DestAmount())
}
@@ -594,7 +675,7 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT
case xdr.OperationTypeManageBuyOffer:
op, ok := operation.Body.GetManageBuyOfferOp()
if !ok {
- return details, fmt.Errorf("Could not access ManageBuyOffer info for this operation (index %d)", operationIndex)
+ return details, fmt.Errorf("could not access ManageBuyOffer info for this operation (index %d)", operationIndex)
}
details["offer_id"] = int64(op.OfferId)
@@ -613,7 +694,7 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT
case xdr.OperationTypeManageSellOffer:
op, ok := operation.Body.GetManageSellOfferOp()
if !ok {
- return details, fmt.Errorf("Could not access ManageSellOffer info for this operation (index %d)", operationIndex)
+ return details, fmt.Errorf("could not access ManageSellOffer info for this operation (index %d)", operationIndex)
}
details["offer_id"] = int64(op.OfferId)
@@ -632,7 +713,7 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT
case xdr.OperationTypeCreatePassiveSellOffer:
op, ok := operation.Body.GetCreatePassiveSellOfferOp()
if !ok {
- return details, fmt.Errorf("Could not access CreatePassiveSellOffer info for this operation (index %d)", operationIndex)
+ return details, fmt.Errorf("could not access CreatePassiveSellOffer info for this operation (index %d)", operationIndex)
}
details["amount"] = utils.ConvertStroopValueToReal(op.Amount)
@@ -650,7 +731,7 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT
case xdr.OperationTypeSetOptions:
op, ok := operation.Body.GetSetOptionsOp()
if !ok {
- return details, fmt.Errorf("Could not access GetSetOptions info for this operation (index %d)", operationIndex)
+ return details, fmt.Errorf("could not access GetSetOptions info for this operation (index %d)", operationIndex)
}
if op.InflationDest != nil {
@@ -693,7 +774,7 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT
case xdr.OperationTypeChangeTrust:
op, ok := operation.Body.GetChangeTrustOp()
if !ok {
- return details, fmt.Errorf("Could not access GetChangeTrust info for this operation (index %d)", operationIndex)
+ return details, fmt.Errorf("could not access GetChangeTrust info for this operation (index %d)", operationIndex)
}
if op.Line.Type == xdr.AssetTypeAssetTypePoolShare {
@@ -715,7 +796,7 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT
case xdr.OperationTypeAllowTrust:
op, ok := operation.Body.GetAllowTrustOp()
if !ok {
- return details, fmt.Errorf("Could not access AllowTrust info for this operation (index %d)", operationIndex)
+ return details, fmt.Errorf("could not access AllowTrust info for this operation (index %d)", operationIndex)
}
if err := addAssetDetailsToOperationDetails(details, op.Asset.ToAsset(sourceAccount.ToAccountId()), ""); err != nil {
@@ -739,7 +820,7 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT
case xdr.OperationTypeAccountMerge:
destinationAccount, ok := operation.Body.GetDestination()
if !ok {
- return details, fmt.Errorf("Could not access Destination info for this operation (index %d)", operationIndex)
+ return details, fmt.Errorf("could not access Destination info for this operation (index %d)", operationIndex)
}
if err := addAccountAndMuxedAccountDetails(details, sourceAccount, "account"); err != nil {
@@ -754,7 +835,7 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT
case xdr.OperationTypeManageData:
op, ok := operation.Body.GetManageDataOp()
if !ok {
- return details, fmt.Errorf("Could not access GetManageData info for this operation (index %d)", operationIndex)
+ return details, fmt.Errorf("could not access GetManageData info for this operation (index %d)", operationIndex)
}
details["name"] = string(op.DataName)
@@ -767,7 +848,7 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT
case xdr.OperationTypeBumpSequence:
op, ok := operation.Body.GetBumpSequenceOp()
if !ok {
- return details, fmt.Errorf("Could not access BumpSequence info for this operation (index %d)", operationIndex)
+ return details, fmt.Errorf("could not access BumpSequence info for this operation (index %d)", operationIndex)
}
details["bump_to"] = fmt.Sprintf("%d", op.BumpTo)
@@ -781,7 +862,7 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT
op := operation.Body.MustClaimClaimableBalanceOp()
balanceID, err := xdr.MarshalHex(op.BalanceId)
if err != nil {
- return details, fmt.Errorf("Invalid balanceId in op: %d", operationIndex)
+ return details, fmt.Errorf("invalid balanceId in op: %d", operationIndex)
}
details["balance_id"] = balanceID
if err := addAccountAndMuxedAccountDetails(details, sourceAccount, "claimant"); err != nil {
@@ -827,7 +908,7 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT
op := operation.Body.MustClawbackClaimableBalanceOp()
balanceID, err := xdr.MarshalHex(op.BalanceId)
if err != nil {
- return details, fmt.Errorf("Invalid balanceId in op: %d", operationIndex)
+ return details, fmt.Errorf("invalid balanceId in op: %d", operationIndex)
}
details["balance_id"] = balanceID
@@ -949,8 +1030,14 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT
details["type"] = "invoke_contract"
+ contractId, err := invokeArgs.ContractAddress.String()
+ if err != nil {
+ return nil, err
+ }
+
transactionEnvelope := getTransactionV1Envelope(transaction.Envelope)
- details["contract_id"] = contractIdFromTxEnvelope(transactionEnvelope)
+ details["ledger_key_hash"] = ledgerKeyHashFromTxEnvelope(transactionEnvelope)
+ details["contract_id"] = contractId
details["contract_code_hash"] = contractCodeHashFromTxEnvelope(transactionEnvelope)
for _, param := range args {
@@ -987,6 +1074,7 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT
details["type"] = "create_contract"
transactionEnvelope := getTransactionV1Envelope(transaction.Envelope)
+ details["ledger_key_hash"] = ledgerKeyHashFromTxEnvelope(transactionEnvelope)
details["contract_id"] = contractIdFromTxEnvelope(transactionEnvelope)
details["contract_code_hash"] = contractCodeHashFromTxEnvelope(transactionEnvelope)
@@ -1008,6 +1096,7 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT
case xdr.HostFunctionTypeHostFunctionTypeUploadContractWasm:
details["type"] = "upload_wasm"
transactionEnvelope := getTransactionV1Envelope(transaction.Envelope)
+ details["ledger_key_hash"] = ledgerKeyHashFromTxEnvelope(transactionEnvelope)
details["contract_code_hash"] = contractCodeHashFromTxEnvelope(transactionEnvelope)
default:
panic(fmt.Errorf("unknown host function type: %s", op.HostFunction.Type))
@@ -1018,16 +1107,18 @@ func extractOperationDetails(operation xdr.Operation, transaction ingest.LedgerT
details["extend_to"] = op.ExtendTo
transactionEnvelope := getTransactionV1Envelope(transaction.Envelope)
+ details["ledger_key_hash"] = ledgerKeyHashFromTxEnvelope(transactionEnvelope)
details["contract_id"] = contractIdFromTxEnvelope(transactionEnvelope)
details["contract_code_hash"] = contractCodeHashFromTxEnvelope(transactionEnvelope)
case xdr.OperationTypeRestoreFootprint:
details["type"] = "restore_footprint"
transactionEnvelope := getTransactionV1Envelope(transaction.Envelope)
+ details["ledger_key_hash"] = ledgerKeyHashFromTxEnvelope(transactionEnvelope)
details["contract_id"] = contractIdFromTxEnvelope(transactionEnvelope)
details["contract_code_hash"] = contractCodeHashFromTxEnvelope(transactionEnvelope)
default:
- return details, fmt.Errorf("Unknown operation type: %s", operation.Body.Type.String())
+ return details, fmt.Errorf("unknown operation type: %s", operation.Body.Type.String())
}
sponsor, err := getSponsor(operation, transaction, operationIndex)
@@ -1420,7 +1511,7 @@ func (operation *transactionOperationWrapper) Details() (map[string]interface{},
op := operation.operation.Body.MustClaimClaimableBalanceOp()
balanceID, err := xdr.MarshalHex(op.BalanceId)
if err != nil {
- panic(fmt.Errorf("Invalid balanceId in op: %d", operation.index))
+ panic(fmt.Errorf("invalid balanceId in op: %d", operation.index))
}
details["balance_id"] = balanceID
addAccountAndMuxedAccountDetails(details, *source, "claimant")
@@ -1453,7 +1544,7 @@ func (operation *transactionOperationWrapper) Details() (map[string]interface{},
op := operation.operation.Body.MustClawbackClaimableBalanceOp()
balanceID, err := xdr.MarshalHex(op.BalanceId)
if err != nil {
- panic(fmt.Errorf("Invalid balanceId in op: %d", operation.index))
+ panic(fmt.Errorf("invalid balanceId in op: %d", operation.index))
}
details["balance_id"] = balanceID
case xdr.OperationTypeSetTrustLineFlags:
@@ -1547,8 +1638,14 @@ func (operation *transactionOperationWrapper) Details() (map[string]interface{},
details["type"] = "invoke_contract"
+ contractId, err := invokeArgs.ContractAddress.String()
+ if err != nil {
+ return nil, err
+ }
+
transactionEnvelope := getTransactionV1Envelope(operation.transaction.Envelope)
- details["contract_id"] = contractIdFromTxEnvelope(transactionEnvelope)
+ details["ledger_key_hash"] = ledgerKeyHashFromTxEnvelope(transactionEnvelope)
+ details["contract_id"] = contractId
details["contract_code_hash"] = contractCodeHashFromTxEnvelope(transactionEnvelope)
for _, param := range args {
@@ -1585,6 +1682,7 @@ func (operation *transactionOperationWrapper) Details() (map[string]interface{},
details["type"] = "create_contract"
transactionEnvelope := getTransactionV1Envelope(operation.transaction.Envelope)
+ details["ledger_key_hash"] = ledgerKeyHashFromTxEnvelope(transactionEnvelope)
details["contract_id"] = contractIdFromTxEnvelope(transactionEnvelope)
details["contract_code_hash"] = contractCodeHashFromTxEnvelope(transactionEnvelope)
@@ -1606,6 +1704,7 @@ func (operation *transactionOperationWrapper) Details() (map[string]interface{},
case xdr.HostFunctionTypeHostFunctionTypeUploadContractWasm:
details["type"] = "upload_wasm"
transactionEnvelope := getTransactionV1Envelope(operation.transaction.Envelope)
+ details["ledger_key_hash"] = ledgerKeyHashFromTxEnvelope(transactionEnvelope)
details["contract_code_hash"] = contractCodeHashFromTxEnvelope(transactionEnvelope)
default:
panic(fmt.Errorf("unknown host function type: %s", op.HostFunction.Type))
@@ -1616,16 +1715,18 @@ func (operation *transactionOperationWrapper) Details() (map[string]interface{},
details["extend_to"] = op.ExtendTo
transactionEnvelope := getTransactionV1Envelope(operation.transaction.Envelope)
+ details["ledger_key_hash"] = ledgerKeyHashFromTxEnvelope(transactionEnvelope)
details["contract_id"] = contractIdFromTxEnvelope(transactionEnvelope)
details["contract_code_hash"] = contractCodeHashFromTxEnvelope(transactionEnvelope)
case xdr.OperationTypeRestoreFootprint:
details["type"] = "restore_footprint"
transactionEnvelope := getTransactionV1Envelope(operation.transaction.Envelope)
+ details["ledger_key_hash"] = ledgerKeyHashFromTxEnvelope(transactionEnvelope)
details["contract_id"] = contractIdFromTxEnvelope(transactionEnvelope)
details["contract_code_hash"] = contractCodeHashFromTxEnvelope(transactionEnvelope)
default:
- panic(fmt.Errorf("Unknown operation type: %s", operation.OperationType()))
+ panic(fmt.Errorf("unknown operation type: %s", operation.OperationType()))
}
sponsor, err := operation.getSponsor()
@@ -1701,6 +1802,23 @@ func contractCodeHashFromTxEnvelope(transactionEnvelope xdr.TransactionV1Envelop
return ""
}
+func ledgerKeyHashFromTxEnvelope(transactionEnvelope xdr.TransactionV1Envelope) []string {
+ var ledgerKeyHash []string
+ for _, ledgerKey := range transactionEnvelope.Tx.Ext.SorobanData.Resources.Footprint.ReadOnly {
+ if utils.LedgerKeyToLedgerKeyHash(ledgerKey) != "" {
+ ledgerKeyHash = append(ledgerKeyHash, utils.LedgerKeyToLedgerKeyHash(ledgerKey))
+ }
+ }
+
+ for _, ledgerKey := range transactionEnvelope.Tx.Ext.SorobanData.Resources.Footprint.ReadWrite {
+ if utils.LedgerKeyToLedgerKeyHash(ledgerKey) != "" {
+ ledgerKeyHash = append(ledgerKeyHash, utils.LedgerKeyToLedgerKeyHash(ledgerKey))
+ }
+ }
+
+ return ledgerKeyHash
+}
+
func contractCodeFromContractData(ledgerKey xdr.LedgerKey) string {
contractCode, ok := ledgerKey.GetContractCode()
if !ok {
@@ -2027,7 +2145,7 @@ func (operation *transactionOperationWrapper) Participants() ([]xdr.AccountId, e
case xdr.OperationTypeRestoreFootprint:
// the only direct participant is the source_account
default:
- return participants, fmt.Errorf("Unknown operation type: %s", op.Body.Type)
+ return participants, fmt.Errorf("unknown operation type: %s", op.Body.Type)
}
sponsor, err := operation.getSponsor()
@@ -2053,25 +2171,3 @@ func dedupeParticipants(in []xdr.AccountId) (out []xdr.AccountId) {
}
return
}
-
-// OperationsParticipants returns a map with all participants per operation
-func operationsParticipants(transaction ingest.LedgerTransaction, sequence uint32) (map[int64][]xdr.AccountId, error) {
- participants := map[int64][]xdr.AccountId{}
-
- for opi, op := range transaction.Envelope.Operations() {
- operation := transactionOperationWrapper{
- index: uint32(opi),
- transaction: transaction,
- operation: op,
- ledgerSequence: sequence,
- }
-
- p, err := operation.Participants()
- if err != nil {
- return participants, errors.Wrapf(err, "reading operation %v participants", operation.ID())
- }
- participants[operation.ID()] = p
- }
-
- return participants, nil
-}
diff --git a/internal/transform/operation_test.go b/internal/transform/operation_test.go
index 7ae5b341..97959107 100644
--- a/internal/transform/operation_test.go
+++ b/internal/transform/operation_test.go
@@ -45,12 +45,12 @@ func TestTransformOperation(t *testing.T) {
{
negativeOpTypeInput,
OperationOutput{},
- fmt.Errorf("The operation type (-1) is negative for operation 1 (operation id=4098)"),
+ fmt.Errorf("the operation type (-1) is negative for operation 1 (operation id=4098)"),
},
{
unknownOpTypeInput,
OperationOutput{},
- fmt.Errorf("Unknown operation type: "),
+ fmt.Errorf("unknown operation type: "),
},
}
hardCodedInputTransaction, err := makeOperationTestInput()
@@ -129,7 +129,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er
hardCodedDataValue := xdr.DataValue([]byte{0x76, 0x61, 0x6c, 0x75, 0x65})
hardCodedSequenceNumber := xdr.SequenceNumber(100)
inputOperations := []xdr.Operation{
- xdr.Operation{
+ {
SourceAccount: nil,
Body: xdr.OperationBody{
Type: xdr.OperationTypeCreateAccount,
@@ -139,7 +139,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er
},
},
},
- xdr.Operation{
+ {
SourceAccount: nil,
Body: xdr.OperationBody{
Type: xdr.OperationTypePayment,
@@ -150,7 +150,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er
},
},
},
- xdr.Operation{
+ {
SourceAccount: nil,
Body: xdr.OperationBody{
Type: xdr.OperationTypePayment,
@@ -161,7 +161,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er
},
},
},
- xdr.Operation{
+ {
SourceAccount: &testAccount3,
Body: xdr.OperationBody{
Type: xdr.OperationTypePathPaymentStrictReceive,
@@ -175,7 +175,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er
},
},
},
- xdr.Operation{
+ {
SourceAccount: nil,
Body: xdr.OperationBody{
Type: xdr.OperationTypeManageSellOffer,
@@ -191,7 +191,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er
},
},
},
- xdr.Operation{
+ {
SourceAccount: nil,
Body: xdr.OperationBody{
Type: xdr.OperationTypeCreatePassiveSellOffer,
@@ -206,7 +206,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er
},
},
},
- xdr.Operation{
+ {
SourceAccount: nil,
Body: xdr.OperationBody{
Type: xdr.OperationTypeSetOptions,
@@ -223,7 +223,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er
},
},
},
- xdr.Operation{
+ {
SourceAccount: nil,
Body: xdr.OperationBody{
Type: xdr.OperationTypeChangeTrust,
@@ -233,7 +233,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er
},
},
},
- xdr.Operation{
+ {
SourceAccount: nil,
Body: xdr.OperationBody{
Type: xdr.OperationTypeChangeTrust,
@@ -243,7 +243,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er
},
},
},
- xdr.Operation{
+ {
SourceAccount: nil,
Body: xdr.OperationBody{
Type: xdr.OperationTypeAllowTrust,
@@ -254,20 +254,20 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er
},
},
},
- xdr.Operation{
+ {
SourceAccount: nil,
Body: xdr.OperationBody{
Type: xdr.OperationTypeAccountMerge,
Destination: &testAccount4,
},
},
- xdr.Operation{
+ {
SourceAccount: nil,
Body: xdr.OperationBody{
Type: xdr.OperationTypeInflation,
},
},
- xdr.Operation{
+ {
SourceAccount: nil,
Body: xdr.OperationBody{
Type: xdr.OperationTypeManageData,
@@ -277,7 +277,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er
},
},
},
- xdr.Operation{
+ {
SourceAccount: nil,
Body: xdr.OperationBody{
Type: xdr.OperationTypeBumpSequence,
@@ -286,7 +286,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er
},
},
},
- xdr.Operation{
+ {
SourceAccount: nil,
Body: xdr.OperationBody{
Type: xdr.OperationTypeManageBuyOffer,
@@ -302,7 +302,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er
},
},
},
- xdr.Operation{
+ {
SourceAccount: nil,
Body: xdr.OperationBody{
Type: xdr.OperationTypePathPaymentStrictSend,
@@ -316,7 +316,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er
},
},
},
- xdr.Operation{
+ {
SourceAccount: nil,
Body: xdr.OperationBody{
Type: xdr.OperationTypeCreateClaimableBalance,
@@ -327,7 +327,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er
},
},
},
- xdr.Operation{
+ {
SourceAccount: &testAccount3,
Body: xdr.OperationBody{
Type: xdr.OperationTypeClaimClaimableBalance,
@@ -336,7 +336,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er
},
},
},
- xdr.Operation{
+ {
SourceAccount: nil,
Body: xdr.OperationBody{
Type: xdr.OperationTypeBeginSponsoringFutureReserves,
@@ -345,7 +345,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er
},
},
},
- xdr.Operation{
+ {
SourceAccount: nil,
Body: xdr.OperationBody{
Type: xdr.OperationTypeRevokeSponsorship,
@@ -358,7 +358,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er
},
},
},
- xdr.Operation{
+ {
SourceAccount: nil,
Body: xdr.OperationBody{
Type: xdr.OperationTypeRevokeSponsorship,
@@ -373,7 +373,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er
},
},
},
- xdr.Operation{
+ {
SourceAccount: nil,
Body: xdr.OperationBody{
Type: xdr.OperationTypeRevokeSponsorship,
@@ -388,7 +388,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er
},
},
},
- xdr.Operation{
+ {
SourceAccount: nil,
Body: xdr.OperationBody{
Type: xdr.OperationTypeRevokeSponsorship,
@@ -404,7 +404,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er
},
},
},
- xdr.Operation{
+ {
SourceAccount: nil,
Body: xdr.OperationBody{
Type: xdr.OperationTypeRevokeSponsorship,
@@ -420,7 +420,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er
},
},
},
- xdr.Operation{
+ {
SourceAccount: nil,
Body: xdr.OperationBody{
Type: xdr.OperationTypeRevokeSponsorship,
@@ -436,7 +436,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er
},
},
},
- xdr.Operation{
+ {
SourceAccount: nil,
Body: xdr.OperationBody{
Type: xdr.OperationTypeRevokeSponsorship,
@@ -451,7 +451,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er
},
},
},
- xdr.Operation{
+ {
SourceAccount: nil,
Body: xdr.OperationBody{
Type: xdr.OperationTypeClawback,
@@ -462,7 +462,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er
},
},
},
- xdr.Operation{
+ {
SourceAccount: nil,
Body: xdr.OperationBody{
Type: xdr.OperationTypeClawbackClaimableBalance,
@@ -471,7 +471,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er
},
},
},
- xdr.Operation{
+ {
SourceAccount: nil,
Body: xdr.OperationBody{
Type: xdr.OperationTypeSetTrustLineFlags,
@@ -483,7 +483,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er
},
},
},
- xdr.Operation{
+ {
SourceAccount: nil,
Body: xdr.OperationBody{
Type: xdr.OperationTypeLiquidityPoolDeposit,
@@ -502,7 +502,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er
},
},
},
- xdr.Operation{
+ {
SourceAccount: nil,
Body: xdr.OperationBody{
Type: xdr.OperationTypeLiquidityPoolWithdraw,
@@ -514,7 +514,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er
},
},
},
- //xdr.Operation{
+ //{
// SourceAccount: nil,
// Body: xdr.OperationBody{
// Type: xdr.OperationTypeInvokeHostFunction,
@@ -533,7 +533,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er
// },
// },
//},
- //xdr.Operation{
+ //{
// SourceAccount: nil,
// Body: xdr.OperationBody{
// Type: xdr.OperationTypeInvokeHostFunction,
@@ -557,7 +557,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er
// },
// },
//},
- //xdr.Operation{
+ //{
// SourceAccount: nil,
// Body: xdr.OperationBody{
// Type: xdr.OperationTypeInvokeHostFunction,
@@ -584,7 +584,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er
// },
// },
//},
- //xdr.Operation{
+ //{
// SourceAccount: nil,
// Body: xdr.OperationBody{
// Type: xdr.OperationTypeInvokeHostFunction,
@@ -596,7 +596,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er
// },
// },
//},
- //xdr.Operation{
+ //{
// SourceAccount: nil,
// Body: xdr.OperationBody{
// Type: xdr.OperationTypeBumpFootprintExpiration,
@@ -608,7 +608,7 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er
// },
// },
//},
- //xdr.Operation{
+ //{
// SourceAccount: nil,
// Body: xdr.OperationBody{
// Type: xdr.OperationTypeRestoreFootprint,
@@ -622,11 +622,35 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er
}
inputEnvelope.Tx.Operations = inputOperations
results := []xdr.OperationResult{
- xdr.OperationResult{},
- xdr.OperationResult{},
- xdr.OperationResult{},
+ {
+ Code: xdr.OperationResultCodeOpInner,
+ Tr: &xdr.OperationResultTr{
+ Type: xdr.OperationTypeCreateAccount,
+ CreateAccountResult: &xdr.CreateAccountResult{
+ Code: xdr.CreateAccountResultCodeCreateAccountSuccess,
+ },
+ },
+ },
+ {
+ Code: xdr.OperationResultCodeOpInner,
+ Tr: &xdr.OperationResultTr{
+ Type: xdr.OperationTypePayment,
+ PaymentResult: &xdr.PaymentResult{
+ Code: xdr.PaymentResultCodePaymentSuccess,
+ },
+ },
+ },
+ {
+ Code: xdr.OperationResultCodeOpInner,
+ Tr: &xdr.OperationResultTr{
+ Type: xdr.OperationTypePayment,
+ PaymentResult: &xdr.PaymentResult{
+ Code: xdr.PaymentResultCodePaymentSuccess,
+ },
+ },
+ },
// There needs to be a true result for path payment receive and send
- xdr.OperationResult{
+ {
Code: xdr.OperationResultCodeOpInner,
Tr: &xdr.OperationResultTr{
Type: xdr.OperationTypePathPaymentStrictReceive,
@@ -638,18 +662,106 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er
},
},
},
- xdr.OperationResult{},
- xdr.OperationResult{},
- xdr.OperationResult{},
- xdr.OperationResult{},
- xdr.OperationResult{},
- xdr.OperationResult{},
- xdr.OperationResult{},
- xdr.OperationResult{},
- xdr.OperationResult{},
- xdr.OperationResult{},
- xdr.OperationResult{},
- xdr.OperationResult{
+ {
+ Code: xdr.OperationResultCodeOpInner,
+ Tr: &xdr.OperationResultTr{
+ Type: xdr.OperationTypeManageSellOffer,
+ ManageSellOfferResult: &xdr.ManageSellOfferResult{
+ Code: xdr.ManageSellOfferResultCodeManageSellOfferSuccess,
+ },
+ },
+ },
+ {
+ Code: xdr.OperationResultCodeOpInner,
+ Tr: &xdr.OperationResultTr{
+ Type: xdr.OperationTypeManageSellOffer,
+ ManageSellOfferResult: &xdr.ManageSellOfferResult{
+ Code: xdr.ManageSellOfferResultCodeManageSellOfferSuccess,
+ },
+ },
+ },
+ {
+ Code: xdr.OperationResultCodeOpInner,
+ Tr: &xdr.OperationResultTr{
+ Type: xdr.OperationTypeSetOptions,
+ SetOptionsResult: &xdr.SetOptionsResult{
+ Code: xdr.SetOptionsResultCodeSetOptionsSuccess,
+ },
+ },
+ },
+ {
+ Code: xdr.OperationResultCodeOpInner,
+ Tr: &xdr.OperationResultTr{
+ Type: xdr.OperationTypeChangeTrust,
+ ChangeTrustResult: &xdr.ChangeTrustResult{
+ Code: xdr.ChangeTrustResultCodeChangeTrustSuccess,
+ },
+ },
+ },
+ {
+ Code: xdr.OperationResultCodeOpInner,
+ Tr: &xdr.OperationResultTr{
+ Type: xdr.OperationTypeChangeTrust,
+ ChangeTrustResult: &xdr.ChangeTrustResult{
+ Code: xdr.ChangeTrustResultCodeChangeTrustSuccess,
+ },
+ },
+ },
+ {
+ Code: xdr.OperationResultCodeOpInner,
+ Tr: &xdr.OperationResultTr{
+ Type: xdr.OperationTypeAllowTrust,
+ AllowTrustResult: &xdr.AllowTrustResult{
+ Code: xdr.AllowTrustResultCodeAllowTrustSuccess,
+ },
+ },
+ },
+ {
+ Code: xdr.OperationResultCodeOpInner,
+ Tr: &xdr.OperationResultTr{
+ Type: xdr.OperationTypeAccountMerge,
+ AccountMergeResult: &xdr.AccountMergeResult{
+ Code: xdr.AccountMergeResultCodeAccountMergeSuccess,
+ },
+ },
+ },
+ {
+ Code: xdr.OperationResultCodeOpInner,
+ Tr: &xdr.OperationResultTr{
+ Type: xdr.OperationTypeInflation,
+ InflationResult: &xdr.InflationResult{
+ Code: xdr.InflationResultCodeInflationSuccess,
+ },
+ },
+ },
+ {
+ Code: xdr.OperationResultCodeOpInner,
+ Tr: &xdr.OperationResultTr{
+ Type: xdr.OperationTypeManageData,
+ ManageDataResult: &xdr.ManageDataResult{
+ Code: xdr.ManageDataResultCodeManageDataSuccess,
+ },
+ },
+ },
+ {
+ Code: xdr.OperationResultCodeOpInner,
+ Tr: &xdr.OperationResultTr{
+ Type: xdr.OperationTypeBumpSequence,
+ BumpSeqResult: &xdr.BumpSequenceResult{
+ Code: xdr.BumpSequenceResultCodeBumpSequenceSuccess,
+ },
+ },
+ },
+ {
+ Code: xdr.OperationResultCodeOpInner,
+ Tr: &xdr.OperationResultTr{
+ Type: xdr.OperationTypeManageBuyOffer,
+ ManageBuyOfferResult: &xdr.ManageBuyOfferResult{
+ Code: xdr.ManageBuyOfferResultCodeManageBuyOfferSuccess,
+ },
+ },
+ },
+ {
Code: xdr.OperationResultCodeOpInner,
Tr: &xdr.OperationResultTr{
Type: xdr.OperationTypePathPaymentStrictSend,
@@ -661,27 +773,147 @@ func makeOperationTestInput() (inputTransaction ingest.LedgerTransaction, err er
},
},
},
- xdr.OperationResult{},
- xdr.OperationResult{},
- xdr.OperationResult{},
- xdr.OperationResult{},
- xdr.OperationResult{},
- xdr.OperationResult{},
- xdr.OperationResult{},
- xdr.OperationResult{},
- xdr.OperationResult{},
- xdr.OperationResult{},
- xdr.OperationResult{},
- xdr.OperationResult{},
- xdr.OperationResult{},
- xdr.OperationResult{},
- xdr.OperationResult{},
- //xdr.OperationResult{},
- //xdr.OperationResult{},
- //xdr.OperationResult{},
- //xdr.OperationResult{},
- //xdr.OperationResult{},
- //xdr.OperationResult{},
+ {
+ Code: xdr.OperationResultCodeOpInner,
+ Tr: &xdr.OperationResultTr{
+ Type: xdr.OperationTypeCreateClaimableBalance,
+ CreateClaimableBalanceResult: &xdr.CreateClaimableBalanceResult{
+ Code: xdr.CreateClaimableBalanceResultCodeCreateClaimableBalanceSuccess,
+ },
+ },
+ },
+ {
+ Code: xdr.OperationResultCodeOpInner,
+ Tr: &xdr.OperationResultTr{
+ Type: xdr.OperationTypeClaimClaimableBalance,
+ ClaimClaimableBalanceResult: &xdr.ClaimClaimableBalanceResult{
+ Code: xdr.ClaimClaimableBalanceResultCodeClaimClaimableBalanceSuccess,
+ },
+ },
+ },
+ {
+ Code: xdr.OperationResultCodeOpInner,
+ Tr: &xdr.OperationResultTr{
+ Type: xdr.OperationTypeBeginSponsoringFutureReserves,
+ BeginSponsoringFutureReservesResult: &xdr.BeginSponsoringFutureReservesResult{
+ Code: xdr.BeginSponsoringFutureReservesResultCodeBeginSponsoringFutureReservesSuccess,
+ },
+ },
+ },
+ {
+ Code: xdr.OperationResultCodeOpInner,
+ Tr: &xdr.OperationResultTr{
+ Type: xdr.OperationTypeRevokeSponsorship,
+ RevokeSponsorshipResult: &xdr.RevokeSponsorshipResult{
+ Code: xdr.RevokeSponsorshipResultCodeRevokeSponsorshipSuccess,
+ },
+ },
+ },
+ {
+ Code: xdr.OperationResultCodeOpInner,
+ Tr: &xdr.OperationResultTr{
+ Type: xdr.OperationTypeRevokeSponsorship,
+ RevokeSponsorshipResult: &xdr.RevokeSponsorshipResult{
+ Code: xdr.RevokeSponsorshipResultCodeRevokeSponsorshipSuccess,
+ },
+ },
+ },
+ {
+ Code: xdr.OperationResultCodeOpInner,
+ Tr: &xdr.OperationResultTr{
+ Type: xdr.OperationTypeRevokeSponsorship,
+ RevokeSponsorshipResult: &xdr.RevokeSponsorshipResult{
+ Code: xdr.RevokeSponsorshipResultCodeRevokeSponsorshipSuccess,
+ },
+ },
+ },
+ {
+ Code: xdr.OperationResultCodeOpInner,
+ Tr: &xdr.OperationResultTr{
+ Type: xdr.OperationTypeRevokeSponsorship,
+ RevokeSponsorshipResult: &xdr.RevokeSponsorshipResult{
+ Code: xdr.RevokeSponsorshipResultCodeRevokeSponsorshipSuccess,
+ },
+ },
+ },
+ {
+ Code: xdr.OperationResultCodeOpInner,
+ Tr: &xdr.OperationResultTr{
+ Type: xdr.OperationTypeRevokeSponsorship,
+ RevokeSponsorshipResult: &xdr.RevokeSponsorshipResult{
+ Code: xdr.RevokeSponsorshipResultCodeRevokeSponsorshipSuccess,
+ },
+ },
+ },
+ {
+ Code: xdr.OperationResultCodeOpInner,
+ Tr: &xdr.OperationResultTr{
+ Type: xdr.OperationTypeRevokeSponsorship,
+ RevokeSponsorshipResult: &xdr.RevokeSponsorshipResult{
+ Code: xdr.RevokeSponsorshipResultCodeRevokeSponsorshipSuccess,
+ },
+ },
+ },
+ {
+ Code: xdr.OperationResultCodeOpInner,
+ Tr: &xdr.OperationResultTr{
+ Type: xdr.OperationTypeRevokeSponsorship,
+ RevokeSponsorshipResult: &xdr.RevokeSponsorshipResult{
+ Code: xdr.RevokeSponsorshipResultCodeRevokeSponsorshipSuccess,
+ },
+ },
+ },
+ {
+ Code: xdr.OperationResultCodeOpInner,
+ Tr: &xdr.OperationResultTr{
+ Type: xdr.OperationTypeClawback,
+ ClawbackResult: &xdr.ClawbackResult{
+ Code: xdr.ClawbackResultCodeClawbackSuccess,
+ },
+ },
+ },
+ {
+ Code: xdr.OperationResultCodeOpInner,
+ Tr: &xdr.OperationResultTr{
+ Type: xdr.OperationTypeClawbackClaimableBalance,
+ ClawbackClaimableBalanceResult: &xdr.ClawbackClaimableBalanceResult{
+ Code: xdr.ClawbackClaimableBalanceResultCodeClawbackClaimableBalanceSuccess,
+ },
+ },
+ },
+ {
+ Code: xdr.OperationResultCodeOpInner,
+ Tr: &xdr.OperationResultTr{
+ Type: xdr.OperationTypeSetTrustLineFlags,
+ SetTrustLineFlagsResult: &xdr.SetTrustLineFlagsResult{
+ Code: xdr.SetTrustLineFlagsResultCodeSetTrustLineFlagsSuccess,
+ },
+ },
+ },
+ {
+ Code: xdr.OperationResultCodeOpInner,
+ Tr: &xdr.OperationResultTr{
+ Type: xdr.OperationTypeLiquidityPoolDeposit,
+ LiquidityPoolDepositResult: &xdr.LiquidityPoolDepositResult{
+ Code: xdr.LiquidityPoolDepositResultCodeLiquidityPoolDepositSuccess,
+ },
+ },
+ },
+ {
+ Code: xdr.OperationResultCodeOpInner,
+ Tr: &xdr.OperationResultTr{
+ Type: xdr.OperationTypeLiquidityPoolWithdraw,
+ LiquidityPoolWithdrawResult: &xdr.LiquidityPoolWithdrawResult{
+ Code: xdr.LiquidityPoolWithdrawResultCodeLiquidityPoolWithdrawSuccess,
+ },
+ },
+ },
+ //{},
+ //{},
+ //{},
+ //{},
+ //{},
+ //{},
}
inputTransaction.Result.Result.Result.Results = &results
inputTransaction.Envelope.V1 = &inputEnvelope
@@ -693,7 +925,7 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) {
hardCodedDestAccountAddress := testAccount4Address
hardCodedLedgerClose := genericCloseTime.UTC()
transformedOperations = []OperationOutput{
- OperationOutput{
+ {
SourceAccount: hardCodedSourceAccountAddress,
Type: 0,
TypeString: "create_account",
@@ -704,9 +936,11 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) {
"funder": hardCodedSourceAccountAddress,
"starting_balance": 2.5,
},
- ClosedAt: hardCodedLedgerClose,
+ ClosedAt: hardCodedLedgerClose,
+ OperationResultCode: "OperationResultCodeOpInner",
+ OperationTraceCode: "CreateAccountResultCodeCreateAccountSuccess",
},
- OperationOutput{
+ {
Type: 1,
TypeString: "payment",
SourceAccount: hardCodedSourceAccountAddress,
@@ -721,9 +955,11 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) {
"asset_issuer": hardCodedDestAccountAddress,
"asset_id": int64(-8205667356306085451),
},
- ClosedAt: hardCodedLedgerClose,
+ ClosedAt: hardCodedLedgerClose,
+ OperationResultCode: "OperationResultCodeOpInner",
+ OperationTraceCode: "PaymentResultCodePaymentSuccess",
},
- OperationOutput{
+ {
Type: 1,
TypeString: "payment",
SourceAccount: hardCodedSourceAccountAddress,
@@ -736,9 +972,11 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) {
"asset_type": "native",
"asset_id": int64(-5706705804583548011),
},
- ClosedAt: hardCodedLedgerClose,
+ ClosedAt: hardCodedLedgerClose,
+ OperationResultCode: "OperationResultCodeOpInner",
+ OperationTraceCode: "PaymentResultCodePaymentSuccess",
},
- OperationOutput{
+ {
Type: 2,
TypeString: "path_payment_strict_receive",
SourceAccount: hardCodedSourceAccountAddress,
@@ -756,9 +994,11 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) {
"asset_id": int64(-5706705804583548011),
"path": []Path{usdtAssetPath},
},
- ClosedAt: hardCodedLedgerClose,
+ ClosedAt: hardCodedLedgerClose,
+ OperationResultCode: "OperationResultCodeOpInner",
+ OperationTraceCode: "PathPaymentStrictReceiveResultCodePathPaymentStrictReceiveSuccess",
},
- OperationOutput{
+ {
Type: 3,
TypeString: "manage_sell_offer",
SourceAccount: hardCodedSourceAccountAddress,
@@ -779,9 +1019,11 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) {
"buying_asset_type": "native",
"buying_asset_id": int64(-5706705804583548011),
},
- ClosedAt: hardCodedLedgerClose,
+ ClosedAt: hardCodedLedgerClose,
+ OperationResultCode: "OperationResultCodeOpInner",
+ OperationTraceCode: "ManageSellOfferResultCodeManageSellOfferSuccess",
},
- OperationOutput{
+ {
Type: 4,
TypeString: "create_passive_sell_offer",
SourceAccount: hardCodedSourceAccountAddress,
@@ -801,9 +1043,11 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) {
"selling_asset_type": "native",
"selling_asset_id": int64(-5706705804583548011),
},
- ClosedAt: hardCodedLedgerClose,
+ ClosedAt: hardCodedLedgerClose,
+ OperationResultCode: "OperationResultCodeOpInner",
+ OperationTraceCode: "ManageSellOfferResultCodeManageSellOfferSuccess",
},
- OperationOutput{
+ {
Type: 5,
TypeString: "set_options",
SourceAccount: hardCodedSourceAccountAddress,
@@ -823,9 +1067,11 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) {
"signer_key": "GAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAWHF",
"signer_weight": uint32(1),
},
- ClosedAt: hardCodedLedgerClose,
+ ClosedAt: hardCodedLedgerClose,
+ OperationResultCode: "OperationResultCodeOpInner",
+ OperationTraceCode: "SetOptionsResultCodeSetOptionsSuccess",
},
- OperationOutput{
+ {
Type: 6,
TypeString: "change_trust",
SourceAccount: hardCodedSourceAccountAddress,
@@ -840,9 +1086,11 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) {
"asset_issuer": hardCodedDestAccountAddress,
"asset_id": int64(6690054458235693884),
},
- ClosedAt: hardCodedLedgerClose,
+ ClosedAt: hardCodedLedgerClose,
+ OperationResultCode: "OperationResultCodeOpInner",
+ OperationTraceCode: "ChangeTrustResultCodeChangeTrustSuccess",
},
- OperationOutput{
+ {
Type: 6,
TypeString: "change_trust",
SourceAccount: hardCodedSourceAccountAddress,
@@ -854,9 +1102,11 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) {
"asset_type": "liquidity_pool_shares",
"liquidity_pool_id": "185a6b384c651552ba09b32851b79f5f6ab61e80883d303f52bea1406a4923f0",
},
- ClosedAt: hardCodedLedgerClose,
+ ClosedAt: hardCodedLedgerClose,
+ OperationResultCode: "OperationResultCodeOpInner",
+ OperationTraceCode: "ChangeTrustResultCodeChangeTrustSuccess",
},
- OperationOutput{
+ {
Type: 7,
TypeString: "allow_trust",
SourceAccount: hardCodedSourceAccountAddress,
@@ -871,9 +1121,11 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) {
"asset_issuer": hardCodedSourceAccountAddress,
"asset_id": int64(8485542065083974675),
},
- ClosedAt: hardCodedLedgerClose,
+ ClosedAt: hardCodedLedgerClose,
+ OperationResultCode: "OperationResultCodeOpInner",
+ OperationTraceCode: "AllowTrustResultCodeAllowTrustSuccess",
},
- OperationOutput{
+ {
Type: 8,
TypeString: "account_merge",
SourceAccount: hardCodedSourceAccountAddress,
@@ -883,18 +1135,22 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) {
"account": hardCodedSourceAccountAddress,
"into": hardCodedDestAccountAddress,
},
- ClosedAt: hardCodedLedgerClose,
+ ClosedAt: hardCodedLedgerClose,
+ OperationResultCode: "OperationResultCodeOpInner",
+ OperationTraceCode: "AccountMergeResultCodeAccountMergeSuccess",
},
- OperationOutput{
- Type: 9,
- TypeString: "inflation",
- SourceAccount: hardCodedSourceAccountAddress,
- TransactionID: 4096,
- OperationID: 4108,
- OperationDetails: map[string]interface{}{},
- ClosedAt: hardCodedLedgerClose,
+ {
+ Type: 9,
+ TypeString: "inflation",
+ SourceAccount: hardCodedSourceAccountAddress,
+ TransactionID: 4096,
+ OperationID: 4108,
+ OperationDetails: map[string]interface{}{},
+ ClosedAt: hardCodedLedgerClose,
+ OperationResultCode: "OperationResultCodeOpInner",
+ OperationTraceCode: "InflationResultCodeInflationSuccess",
},
- OperationOutput{
+ {
Type: 10,
TypeString: "manage_data",
SourceAccount: hardCodedSourceAccountAddress,
@@ -904,9 +1160,11 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) {
"name": "test",
"value": base64.StdEncoding.EncodeToString([]byte{0x76, 0x61, 0x6c, 0x75, 0x65}),
},
- ClosedAt: hardCodedLedgerClose,
+ ClosedAt: hardCodedLedgerClose,
+ OperationResultCode: "OperationResultCodeOpInner",
+ OperationTraceCode: "ManageDataResultCodeManageDataSuccess",
},
- OperationOutput{
+ {
Type: 11,
TypeString: "bump_sequence",
SourceAccount: hardCodedSourceAccountAddress,
@@ -915,9 +1173,11 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) {
OperationDetails: map[string]interface{}{
"bump_to": "100",
},
- ClosedAt: hardCodedLedgerClose,
+ ClosedAt: hardCodedLedgerClose,
+ OperationResultCode: "OperationResultCodeOpInner",
+ OperationTraceCode: "BumpSequenceResultCodeBumpSequenceSuccess",
},
- OperationOutput{
+ {
Type: 12,
TypeString: "manage_buy_offer",
SourceAccount: hardCodedSourceAccountAddress,
@@ -938,9 +1198,11 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) {
"buying_asset_id": int64(-5706705804583548011),
"offer_id": int64(100),
},
- ClosedAt: hardCodedLedgerClose,
+ ClosedAt: hardCodedLedgerClose,
+ OperationResultCode: "OperationResultCodeOpInner",
+ OperationTraceCode: "ManageBuyOfferResultCodeManageBuyOfferSuccess",
},
- OperationOutput{
+ {
Type: 13,
TypeString: "path_payment_strict_send",
SourceAccount: hardCodedSourceAccountAddress,
@@ -958,9 +1220,11 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) {
"asset_type": "native",
"asset_id": int64(-5706705804583548011),
},
- ClosedAt: hardCodedLedgerClose,
+ ClosedAt: hardCodedLedgerClose,
+ OperationResultCode: "OperationResultCodeOpInner",
+ OperationTraceCode: "PathPaymentStrictSendResultCodePathPaymentStrictSendSuccess",
},
- OperationOutput{
+ {
Type: 14,
TypeString: "create_claimable_balance",
SourceAccount: hardCodedSourceAccountAddress,
@@ -971,9 +1235,11 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) {
"amount": 123456.789,
"claimants": []Claimant{testClaimantDetails},
},
- ClosedAt: hardCodedLedgerClose,
+ ClosedAt: hardCodedLedgerClose,
+ OperationResultCode: "OperationResultCodeOpInner",
+ OperationTraceCode: "CreateClaimableBalanceResultCodeCreateClaimableBalanceSuccess",
},
- OperationOutput{
+ {
Type: 15,
TypeString: "claim_claimable_balance",
SourceAccount: testAccount3Address,
@@ -983,9 +1249,11 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) {
"claimant": hardCodedSourceAccountAddress,
"balance_id": "000000000102030405060708090000000000000000000000000000000000000000000000",
},
- ClosedAt: hardCodedLedgerClose,
+ ClosedAt: hardCodedLedgerClose,
+ OperationResultCode: "OperationResultCodeOpInner",
+ OperationTraceCode: "ClaimClaimableBalanceResultCodeClaimClaimableBalanceSuccess",
},
- OperationOutput{
+ {
Type: 16,
TypeString: "begin_sponsoring_future_reserves",
SourceAccount: hardCodedSourceAccountAddress,
@@ -994,9 +1262,11 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) {
OperationDetails: map[string]interface{}{
"sponsored_id": hardCodedDestAccountAddress,
},
- ClosedAt: hardCodedLedgerClose,
+ ClosedAt: hardCodedLedgerClose,
+ OperationResultCode: "OperationResultCodeOpInner",
+ OperationTraceCode: "BeginSponsoringFutureReservesResultCodeBeginSponsoringFutureReservesSuccess",
},
- OperationOutput{
+ {
Type: 18,
TypeString: "revoke_sponsorship",
SourceAccount: hardCodedSourceAccountAddress,
@@ -1006,9 +1276,11 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) {
"signer_account_id": hardCodedDestAccountAddress,
"signer_key": "GAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAWHF",
},
- ClosedAt: hardCodedLedgerClose,
+ ClosedAt: hardCodedLedgerClose,
+ OperationResultCode: "OperationResultCodeOpInner",
+ OperationTraceCode: "RevokeSponsorshipResultCodeRevokeSponsorshipSuccess",
},
- OperationOutput{
+ {
Type: 18,
TypeString: "revoke_sponsorship",
SourceAccount: hardCodedSourceAccountAddress,
@@ -1017,9 +1289,11 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) {
OperationDetails: map[string]interface{}{
"account_id": hardCodedDestAccountAddress,
},
- ClosedAt: hardCodedLedgerClose,
+ ClosedAt: hardCodedLedgerClose,
+ OperationResultCode: "OperationResultCodeOpInner",
+ OperationTraceCode: "RevokeSponsorshipResultCodeRevokeSponsorshipSuccess",
},
- OperationOutput{
+ {
Type: 18,
TypeString: "revoke_sponsorship",
SourceAccount: hardCodedSourceAccountAddress,
@@ -1028,9 +1302,11 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) {
OperationDetails: map[string]interface{}{
"claimable_balance_id": "000000000102030405060708090000000000000000000000000000000000000000000000",
},
- ClosedAt: hardCodedLedgerClose,
+ ClosedAt: hardCodedLedgerClose,
+ OperationResultCode: "OperationResultCodeOpInner",
+ OperationTraceCode: "RevokeSponsorshipResultCodeRevokeSponsorshipSuccess",
},
- OperationOutput{
+ {
Type: 18,
TypeString: "revoke_sponsorship",
SourceAccount: hardCodedSourceAccountAddress,
@@ -1040,9 +1316,11 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) {
"data_account_id": hardCodedDestAccountAddress,
"data_name": "test",
},
- ClosedAt: hardCodedLedgerClose,
+ ClosedAt: hardCodedLedgerClose,
+ OperationResultCode: "OperationResultCodeOpInner",
+ OperationTraceCode: "RevokeSponsorshipResultCodeRevokeSponsorshipSuccess",
},
- OperationOutput{
+ {
Type: 18,
TypeString: "revoke_sponsorship",
SourceAccount: hardCodedSourceAccountAddress,
@@ -1051,9 +1329,11 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) {
OperationDetails: map[string]interface{}{
"offer_id": int64(100),
},
- ClosedAt: hardCodedLedgerClose,
+ ClosedAt: hardCodedLedgerClose,
+ OperationResultCode: "OperationResultCodeOpInner",
+ OperationTraceCode: "RevokeSponsorshipResultCodeRevokeSponsorshipSuccess",
},
- OperationOutput{
+ {
Type: 18,
TypeString: "revoke_sponsorship",
SourceAccount: hardCodedSourceAccountAddress,
@@ -1063,9 +1343,11 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) {
"trustline_account_id": testAccount3Address,
"trustline_asset": "USTT:GBT4YAEGJQ5YSFUMNKX6BPBUOCPNAIOFAVZOF6MIME2CECBMEIUXFZZN",
},
- ClosedAt: hardCodedLedgerClose,
+ ClosedAt: hardCodedLedgerClose,
+ OperationResultCode: "OperationResultCodeOpInner",
+ OperationTraceCode: "RevokeSponsorshipResultCodeRevokeSponsorshipSuccess",
},
- OperationOutput{
+ {
Type: 18,
TypeString: "revoke_sponsorship",
SourceAccount: hardCodedSourceAccountAddress,
@@ -1074,9 +1356,11 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) {
OperationDetails: map[string]interface{}{
"liquidity_pool_id": "0102030405060708090000000000000000000000000000000000000000000000",
},
- ClosedAt: hardCodedLedgerClose,
+ ClosedAt: hardCodedLedgerClose,
+ OperationResultCode: "OperationResultCodeOpInner",
+ OperationTraceCode: "RevokeSponsorshipResultCodeRevokeSponsorshipSuccess",
},
- OperationOutput{
+ {
Type: 19,
TypeString: "clawback",
SourceAccount: hardCodedSourceAccountAddress,
@@ -1090,9 +1374,11 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) {
"asset_type": "credit_alphanum4",
"asset_id": int64(-8205667356306085451),
},
- ClosedAt: hardCodedLedgerClose,
+ ClosedAt: hardCodedLedgerClose,
+ OperationResultCode: "OperationResultCodeOpInner",
+ OperationTraceCode: "ClawbackResultCodeClawbackSuccess",
},
- OperationOutput{
+ {
Type: 20,
TypeString: "clawback_claimable_balance",
SourceAccount: hardCodedSourceAccountAddress,
@@ -1101,9 +1387,11 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) {
OperationDetails: map[string]interface{}{
"balance_id": "000000000102030405060708090000000000000000000000000000000000000000000000",
},
- ClosedAt: hardCodedLedgerClose,
+ ClosedAt: hardCodedLedgerClose,
+ OperationResultCode: "OperationResultCodeOpInner",
+ OperationTraceCode: "ClawbackClaimableBalanceResultCodeClawbackClaimableBalanceSuccess",
},
- OperationOutput{
+ {
Type: 21,
TypeString: "set_trust_line_flags",
SourceAccount: hardCodedSourceAccountAddress,
@@ -1120,9 +1408,11 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) {
"set_flags": []int32{4},
"set_flags_s": []string{"clawback_enabled"},
},
- ClosedAt: hardCodedLedgerClose,
+ ClosedAt: hardCodedLedgerClose,
+ OperationResultCode: "OperationResultCodeOpInner",
+ OperationTraceCode: "SetTrustLineFlagsResultCodeSetTrustLineFlagsSuccess",
},
- OperationOutput{
+ {
Type: 22,
TypeString: "liquidity_pool_deposit",
SourceAccount: hardCodedSourceAccountAddress,
@@ -1152,9 +1442,11 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) {
},
"shares_received": 0.0000002,
},
- ClosedAt: hardCodedLedgerClose,
+ ClosedAt: hardCodedLedgerClose,
+ OperationResultCode: "OperationResultCodeOpInner",
+ OperationTraceCode: "LiquidityPoolDepositResultCodeLiquidityPoolDepositSuccess",
},
- OperationOutput{
+ {
Type: 23,
TypeString: "liquidity_pool_withdraw",
SourceAccount: hardCodedSourceAccountAddress,
@@ -1174,7 +1466,9 @@ func makeOperationTestOutputs() (transformedOperations []OperationOutput) {
"reserve_b_min_amount": 0.0000001,
"shares": 0.0000004,
},
- ClosedAt: hardCodedLedgerClose,
+ ClosedAt: hardCodedLedgerClose,
+ OperationResultCode: "OperationResultCodeOpInner",
+ OperationTraceCode: "LiquidityPoolWithdrawResultCodeLiquidityPoolWithdrawSuccess",
},
//OperationOutput{
// Type: 24,
diff --git a/internal/transform/schema.go b/internal/transform/schema.go
index b5b3f9e8..6b7c9c65 100644
--- a/internal/transform/schema.go
+++ b/internal/transform/schema.go
@@ -28,42 +28,50 @@ type LedgerOutput struct {
MaxTxSetSize uint32 `json:"max_tx_set_size"`
ProtocolVersion uint32 `json:"protocol_version"`
LedgerID int64 `json:"id"`
+ SorobanFeeWrite1Kb int64 `json:"soroban_fee_write_1kb"`
}
// TransactionOutput is a representation of a transaction that aligns with the BigQuery table history_transactions
type TransactionOutput struct {
- TransactionHash string `json:"transaction_hash"`
- LedgerSequence uint32 `json:"ledger_sequence"`
- Account string `json:"account"`
- AccountMuxed string `json:"account_muxed,omitempty"`
- AccountSequence int64 `json:"account_sequence"`
- MaxFee uint32 `json:"max_fee"`
- FeeCharged int64 `json:"fee_charged"`
- OperationCount int32 `json:"operation_count"`
- TxEnvelope string `json:"tx_envelope"`
- TxResult string `json:"tx_result"`
- TxMeta string `json:"tx_meta"`
- TxFeeMeta string `json:"tx_fee_meta"`
- CreatedAt time.Time `json:"created_at"`
- MemoType string `json:"memo_type"`
- Memo string `json:"memo"`
- TimeBounds string `json:"time_bounds"`
- Successful bool `json:"successful"`
- TransactionID int64 `json:"id"`
- FeeAccount string `json:"fee_account,omitempty"`
- FeeAccountMuxed string `json:"fee_account_muxed,omitempty"`
- InnerTransactionHash string `json:"inner_transaction_hash,omitempty"`
- NewMaxFee uint32 `json:"new_max_fee,omitempty"`
- LedgerBounds string `json:"ledger_bounds"`
- MinAccountSequence null.Int `json:"min_account_sequence"`
- MinAccountSequenceAge null.Int `json:"min_account_sequence_age"`
- MinAccountSequenceLedgerGap null.Int `json:"min_account_sequence_ledger_gap"`
- ExtraSigners pq.StringArray `json:"extra_signers"`
- ClosedAt time.Time `json:"closed_at"`
- ResourceFee int64 `json:"resource_fee"`
- SorobanResourcesInstructions uint32 `json:"soroban_resources_instructions"`
- SorobanResourcesReadBytes uint32 `json:"soroban_resources_read_bytes"`
- SorobanResourcesWriteBytes uint32 `json:"soroban_resources_write_bytes"`
+ TransactionHash string `json:"transaction_hash"`
+ LedgerSequence uint32 `json:"ledger_sequence"`
+ Account string `json:"account"`
+ AccountMuxed string `json:"account_muxed,omitempty"`
+ AccountSequence int64 `json:"account_sequence"`
+ MaxFee uint32 `json:"max_fee"`
+ FeeCharged int64 `json:"fee_charged"`
+ OperationCount int32 `json:"operation_count"`
+ TxEnvelope string `json:"tx_envelope"`
+ TxResult string `json:"tx_result"`
+ TxMeta string `json:"tx_meta"`
+ TxFeeMeta string `json:"tx_fee_meta"`
+ CreatedAt time.Time `json:"created_at"`
+ MemoType string `json:"memo_type"`
+ Memo string `json:"memo"`
+ TimeBounds string `json:"time_bounds"`
+ Successful bool `json:"successful"`
+ TransactionID int64 `json:"id"`
+ FeeAccount string `json:"fee_account,omitempty"`
+ FeeAccountMuxed string `json:"fee_account_muxed,omitempty"`
+ InnerTransactionHash string `json:"inner_transaction_hash,omitempty"`
+ NewMaxFee uint32 `json:"new_max_fee,omitempty"`
+ LedgerBounds string `json:"ledger_bounds"`
+ MinAccountSequence null.Int `json:"min_account_sequence"`
+ MinAccountSequenceAge null.Int `json:"min_account_sequence_age"`
+ MinAccountSequenceLedgerGap null.Int `json:"min_account_sequence_ledger_gap"`
+ ExtraSigners pq.StringArray `json:"extra_signers"`
+ ClosedAt time.Time `json:"closed_at"`
+ ResourceFee int64 `json:"resource_fee"`
+ SorobanResourcesInstructions uint32 `json:"soroban_resources_instructions"`
+ SorobanResourcesReadBytes uint32 `json:"soroban_resources_read_bytes"`
+ SorobanResourcesWriteBytes uint32 `json:"soroban_resources_write_bytes"`
+ TransactionResultCode string `json:"transaction_result_code"`
+ InclusionFeeBid int64 `json:"inclusion_fee_bid"`
+ InclusionFeeCharged int64 `json:"inclusion_fee_charged"`
+ ResourceFeeRefund int64 `json:"resource_fee_refund"`
+ TotalNonRefundableResourceFeeCharged int64 `json:"non_refundable_resource_fee_charged"`
+ TotalRefundableResourceFeeCharged int64 `json:"refundable_resource_fee_charged"`
+ RentFeeCharged int64 `json:"rent_fee_charged"`
}
type LedgerTransactionOutput struct {
@@ -118,14 +126,16 @@ type AccountSignerOutput struct {
// OperationOutput is a representation of an operation that aligns with the BigQuery table history_operations
type OperationOutput struct {
- SourceAccount string `json:"source_account"`
- SourceAccountMuxed string `json:"source_account_muxed,omitempty"`
- Type int32 `json:"type"`
- TypeString string `json:"type_string"`
- OperationDetails map[string]interface{} `json:"details"` //Details is a JSON object that varies based on operation type
- TransactionID int64 `json:"transaction_id"`
- OperationID int64 `json:"id"`
- ClosedAt time.Time `json:"closed_at"`
+ SourceAccount string `json:"source_account"`
+ SourceAccountMuxed string `json:"source_account_muxed,omitempty"`
+ Type int32 `json:"type"`
+ TypeString string `json:"type_string"`
+ OperationDetails map[string]interface{} `json:"details"` //Details is a JSON object that varies based on operation type
+ TransactionID int64 `json:"transaction_id"`
+ OperationID int64 `json:"id"`
+ ClosedAt time.Time `json:"closed_at"`
+ OperationResultCode string `json:"operation_result_code"`
+ OperationTraceCode string `json:"operation_trace_code"`
}
// ClaimableBalanceOutput is a representation of a claimable balances that aligns with the BigQuery table claimable_balances
@@ -511,6 +521,16 @@ type ContractCodeOutput struct {
LedgerSequence uint32 `json:"ledger_sequence"`
LedgerKeyHash string `json:"ledger_key_hash"`
//ContractCodeCode string `json:"contract_code"`
+ NInstructions uint32 `json:"n_instructions"`
+ NFunctions uint32 `json:"n_functions"`
+ NGlobals uint32 `json:"n_globals"`
+ NTableEntries uint32 `json:"n_table_entries"`
+ NTypes uint32 `json:"n_types"`
+ NDataSegments uint32 `json:"n_data_segments"`
+ NElemSegments uint32 `json:"n_elem_segments"`
+ NImports uint32 `json:"n_imports"`
+ NExports uint32 `json:"n_exports"`
+ NDataSegmentBytes uint32 `json:"n_data_segment_bytes"`
}
// ConfigSettingOutput is a representation of soroban config settings that aligns with the Bigquery table config_settings
@@ -576,16 +596,20 @@ type TtlOutput struct {
LedgerSequence uint32 `json:"ledger_sequence"`
}
-// DiagnosticEventOutput is a representation of soroban diagnostic events that currently are not stored in a BQ table
-type DiagnosticEventOutput struct {
- TransactionHash string `json:"transaction_hash"`
- LedgerSequence uint32 `json:"ledger_sequence"`
- TransactionID int64 `json:"transaction_id"`
- ClosedAt time.Time `json:"closed_at"`
- InSuccessfulContractCall bool `json:"in_successful_contract_call"`
- ExtV int32 `json:"ext_v"`
- ContractId string `json:"contract_id"`
- Type string `json:"type"`
- BodyV int32 `json:"body_v"`
- Body string `json:"body"`
+// ContractEventOutput is a representation of soroban contract events and diagnostic events
+type ContractEventOutput struct {
+ TransactionHash string `json:"transaction_hash"`
+ TransactionID int64 `json:"transaction_id"`
+ Successful bool `json:"successful"`
+ LedgerSequence uint32 `json:"ledger_sequence"`
+ ClosedAt time.Time `json:"closed_at"`
+ InSuccessfulContractCall bool `json:"in_successful_contract_call"`
+ ContractId string `json:"contract_id"`
+ Type int32 `json:"type"`
+ TypeString string `json:"type_string"`
+ Topics map[string][]map[string]string `json:"topics"`
+ TopicsDecoded map[string][]map[string]string `json:"topics_decoded"`
+ Data map[string]string `json:"data"`
+ DataDecoded map[string]string `json:"data_decoded"`
+ ContractEventXDR string `json:"contract_event_xdr"`
}
diff --git a/internal/transform/test_variables_test.go b/internal/transform/test_variables_test.go
index 9621723b..e007aa19 100644
--- a/internal/transform/test_variables_test.go
+++ b/internal/transform/test_variables_test.go
@@ -95,64 +95,6 @@ var testAccount4Address = "GBVVRXLMNCJQW3IDDXC3X6XCH35B5Q7QXNMMFPENSOGUPQO7WO7HG
var testAccount4ID, _ = xdr.AddressToAccountId(testAccount4Address)
var testAccount4 = testAccount4ID.ToMuxedAccount()
-// a selection of hardcoded Liquidity Pools
-var lpDepositChanges = []xdr.OperationMeta{
- {
- Changes: xdr.LedgerEntryChanges{
- xdr.LedgerEntryChange{
- Type: xdr.LedgerEntryChangeTypeLedgerEntryState,
- State: &xdr.LedgerEntry{
- Data: xdr.LedgerEntryData{
- Type: xdr.LedgerEntryTypeLiquidityPool,
- LiquidityPool: &xdr.LiquidityPoolEntry{
- LiquidityPoolId: xdr.PoolId{1, 2, 3, 4, 5, 6, 7, 8, 9},
- Body: xdr.LiquidityPoolEntryBody{
- Type: xdr.LiquidityPoolTypeLiquidityPoolConstantProduct,
- ConstantProduct: &xdr.LiquidityPoolEntryConstantProduct{
- Params: xdr.LiquidityPoolConstantProductParameters{
- AssetA: lpAssetA,
- AssetB: lpAssetB,
- Fee: 30,
- },
- ReserveA: 100000,
- ReserveB: 1000,
- TotalPoolShares: 500,
- PoolSharesTrustLineCount: 25,
- },
- },
- },
- },
- },
- },
- xdr.LedgerEntryChange{
- Type: xdr.LedgerEntryChangeTypeLedgerEntryUpdated,
- Updated: &xdr.LedgerEntry{
- Data: xdr.LedgerEntryData{
- Type: xdr.LedgerEntryTypeLiquidityPool,
- LiquidityPool: &xdr.LiquidityPoolEntry{
- LiquidityPoolId: xdr.PoolId{1, 2, 3, 4, 5, 6, 7, 8, 9},
- Body: xdr.LiquidityPoolEntryBody{
- Type: xdr.LiquidityPoolTypeLiquidityPoolConstantProduct,
- ConstantProduct: &xdr.LiquidityPoolEntryConstantProduct{
- Params: xdr.LiquidityPoolConstantProductParameters{
- AssetA: lpAssetA,
- AssetB: lpAssetB,
- Fee: 30,
- },
- ReserveA: 101000,
- ReserveB: 1100,
- TotalPoolShares: 502,
- PoolSharesTrustLineCount: 26,
- },
- },
- },
- },
- },
- },
- },
- },
-}
-
// a selection of hardcoded assets and their AssetOutput representations
var usdtAsset = xdr.Asset{
@@ -225,23 +167,12 @@ var ethTrustLineAsset = xdr.TrustLineAsset{
},
}
-var ethAssetPath = Path{
- AssetType: "credit_alphanum4",
- AssetCode: "ETH",
- AssetIssuer: testAccount1Address,
-}
-
var liquidityPoolAsset = xdr.TrustLineAsset{
Type: xdr.AssetTypeAssetTypePoolShare,
LiquidityPoolId: &xdr.PoolId{1, 3, 4, 5, 7, 9},
}
var nativeAsset = xdr.MustNewNativeAsset()
-var nativeAssetPath = Path{
- AssetType: "native",
-}
-
-var nativeTrustLineAsset = xdr.MustNewNativeAsset().ToTrustLineAsset()
var genericClaimableBalance = xdr.ClaimableBalanceId{
Type: xdr.ClaimableBalanceIdTypeClaimableBalanceIdTypeV0,
diff --git a/internal/transform/trade.go b/internal/transform/trade.go
index 27dfc0da..06ac05f0 100644
--- a/internal/transform/trade.go
+++ b/internal/transform/trade.go
@@ -20,11 +20,11 @@ import (
func TransformTrade(operationIndex int32, operationID int64, transaction ingest.LedgerTransaction, ledgerCloseTime time.Time) ([]TradeOutput, error) {
operationResults, ok := transaction.Result.OperationResults()
if !ok {
- return []TradeOutput{}, fmt.Errorf("Could not get any results from this transaction")
+ return []TradeOutput{}, fmt.Errorf("could not get any results from this transaction")
}
if !transaction.Result.Successful() {
- return []TradeOutput{}, fmt.Errorf("Transaction failed; no trades")
+ return []TradeOutput{}, fmt.Errorf("transaction failed; no trades")
}
operation := transaction.Envelope.Operations()[operationIndex]
@@ -50,7 +50,7 @@ func TransformTrade(operationIndex int32, operationID int64, transaction ingest.
outputSellingAmount := claimOffer.AmountSold()
if outputSellingAmount < 0 {
- return []TradeOutput{}, fmt.Errorf("Amount sold is negative (%d) for operation at index %d", outputSellingAmount, operationIndex)
+ return []TradeOutput{}, fmt.Errorf("amount sold is negative (%d) for operation at index %d", outputSellingAmount, operationIndex)
}
var outputBuyingAssetType, outputBuyingAssetCode, outputBuyingAssetIssuer string
@@ -62,7 +62,7 @@ func TransformTrade(operationIndex int32, operationID int64, transaction ingest.
outputBuyingAmount := int64(claimOffer.AmountBought())
if outputBuyingAmount < 0 {
- return []TradeOutput{}, fmt.Errorf("Amount bought is negative (%d) for operation at index %d", outputBuyingAmount, operationIndex)
+ return []TradeOutput{}, fmt.Errorf("amount bought is negative (%d) for operation at index %d", outputBuyingAmount, operationIndex)
}
if outputSellingAmount == 0 && outputBuyingAmount == 0 {
@@ -87,7 +87,7 @@ func TransformTrade(operationIndex int32, operationID int64, transaction ingest.
tradeType = int32(2)
var fee uint32
if fee, err = findPoolFee(transaction, operationIndex, id); err != nil {
- return []TradeOutput{}, fmt.Errorf("Cannot parse fee for liquidity pool %v", liquidityPoolID)
+ return []TradeOutput{}, fmt.Errorf("cannot parse fee for liquidity pool %v", liquidityPoolID)
}
outputPoolFee = null.IntFrom(int64(fee))
@@ -156,25 +156,25 @@ func TransformTrade(operationIndex int32, operationID int64, transaction ingest.
func extractClaimedOffers(operationResults []xdr.OperationResult, operationIndex int32, operationType xdr.OperationType) (claimedOffers []xdr.ClaimAtom, BuyingOffer *xdr.OfferEntry, sellerIsExact null.Bool, err error) {
if operationIndex >= int32(len(operationResults)) {
- err = fmt.Errorf("Operation index of %d is out of bounds in result slice (len = %d)", operationIndex, len(operationResults))
+ err = fmt.Errorf("operation index of %d is out of bounds in result slice (len = %d)", operationIndex, len(operationResults))
return
}
if operationResults[operationIndex].Tr == nil {
- err = fmt.Errorf("Could not get result Tr for operation at index %d", operationIndex)
+ err = fmt.Errorf("could not get result Tr for operation at index %d", operationIndex)
return
}
operationTr, ok := operationResults[operationIndex].GetTr()
if !ok {
- err = fmt.Errorf("Could not get result Tr for operation at index %d", operationIndex)
+ err = fmt.Errorf("could not get result Tr for operation at index %d", operationIndex)
return
}
switch operationType {
case xdr.OperationTypeManageBuyOffer:
var buyOfferResult xdr.ManageBuyOfferResult
if buyOfferResult, ok = operationTr.GetManageBuyOfferResult(); !ok {
- err = fmt.Errorf("Could not get ManageBuyOfferResult for operation at index %d", operationIndex)
+ err = fmt.Errorf("could not get ManageBuyOfferResult for operation at index %d", operationIndex)
return
}
if success, ok := buyOfferResult.GetSuccess(); ok {
@@ -183,12 +183,12 @@ func extractClaimedOffers(operationResults []xdr.OperationResult, operationIndex
return
}
- err = fmt.Errorf("Could not get ManageOfferSuccess for operation at index %d", operationIndex)
+ err = fmt.Errorf("could not get ManageOfferSuccess for operation at index %d", operationIndex)
case xdr.OperationTypeManageSellOffer:
var sellOfferResult xdr.ManageSellOfferResult
if sellOfferResult, ok = operationTr.GetManageSellOfferResult(); !ok {
- err = fmt.Errorf("Could not get ManageSellOfferResult for operation at index %d", operationIndex)
+ err = fmt.Errorf("could not get ManageSellOfferResult for operation at index %d", operationIndex)
return
}
@@ -198,7 +198,7 @@ func extractClaimedOffers(operationResults []xdr.OperationResult, operationIndex
return
}
- err = fmt.Errorf("Could not get ManageOfferSuccess for operation at index %d", operationIndex)
+ err = fmt.Errorf("could not get ManageOfferSuccess for operation at index %d", operationIndex)
case xdr.OperationTypeCreatePassiveSellOffer:
// KNOWN ISSUE: stellar-core creates results for CreatePassiveOffer operations
@@ -219,7 +219,7 @@ func extractClaimedOffers(operationResults []xdr.OperationResult, operationIndex
var pathSendResult xdr.PathPaymentStrictSendResult
sellerIsExact = null.BoolFrom(false)
if pathSendResult, ok = operationTr.GetPathPaymentStrictSendResult(); !ok {
- err = fmt.Errorf("Could not get PathPaymentStrictSendResult for operation at index %d", operationIndex)
+ err = fmt.Errorf("could not get PathPaymentStrictSendResult for operation at index %d", operationIndex)
return
}
@@ -229,13 +229,13 @@ func extractClaimedOffers(operationResults []xdr.OperationResult, operationIndex
return
}
- err = fmt.Errorf("Could not get PathPaymentStrictSendSuccess for operation at index %d", operationIndex)
+ err = fmt.Errorf("could not get PathPaymentStrictSendSuccess for operation at index %d", operationIndex)
case xdr.OperationTypePathPaymentStrictReceive:
var pathReceiveResult xdr.PathPaymentStrictReceiveResult
sellerIsExact = null.BoolFrom(true)
if pathReceiveResult, ok = operationTr.GetPathPaymentStrictReceiveResult(); !ok {
- err = fmt.Errorf("Could not get PathPaymentStrictReceiveResult for operation at index %d", operationIndex)
+ err = fmt.Errorf("could not get PathPaymentStrictReceiveResult for operation at index %d", operationIndex)
return
}
@@ -244,10 +244,10 @@ func extractClaimedOffers(operationResults []xdr.OperationResult, operationIndex
return
}
- err = fmt.Errorf("Could not get GetPathPaymentStrictReceiveSuccess for operation at index %d", operationIndex)
+ err = fmt.Errorf("could not get GetPathPaymentStrictReceiveSuccess for operation at index %d", operationIndex)
default:
- err = fmt.Errorf("Operation of type %s at index %d does not result in trades", operationType, operationIndex)
+ err = fmt.Errorf("operation of type %s at index %d does not result in trades", operationType, operationIndex)
return
}
@@ -386,7 +386,7 @@ func roundingSlippage(t ingest.LedgerTransaction, operationIndex int32, trade xd
}
return null.IntFrom(int64(roundingSlippageBips)), nil
default:
- return null.Int{}, fmt.Errorf("Unexpected trade operation type: %v", op.Body.Type)
+ return null.Int{}, fmt.Errorf("unexpected trade operation type: %v", op.Body.Type)
}
}
diff --git a/internal/transform/trade_test.go b/internal/transform/trade_test.go
index 8e963366..8cc46241 100644
--- a/internal/transform/trade_test.go
+++ b/internal/transform/trade_test.go
@@ -65,14 +65,14 @@ func TestTransformTrade(t *testing.T) {
noTrEnvelope := genericManageBuyOfferEnvelope
noTrInput.transaction.Envelope.V1 = &noTrEnvelope
noTrInput.transaction.Result = wrapOperationsResultsSlice([]xdr.OperationResult{
- xdr.OperationResult{Tr: nil},
+ {Tr: nil},
}, true)
failedResultInput := genericInput
failedResultEnvelope := genericManageBuyOfferEnvelope
failedResultInput.transaction.Envelope.V1 = &failedResultEnvelope
failedResultInput.transaction.Result = wrapOperationsResultsSlice([]xdr.OperationResult{
- xdr.OperationResult{
+ {
Tr: &xdr.OperationResultTr{
Type: xdr.OperationTypeManageBuyOffer,
ManageBuyOfferResult: &xdr.ManageBuyOfferResult{
@@ -85,14 +85,14 @@ func TestTransformTrade(t *testing.T) {
negBaseAmountEnvelope := genericManageBuyOfferEnvelope
negBaseAmountInput.transaction.Envelope.V1 = &negBaseAmountEnvelope
negBaseAmountInput.transaction.Result = wrapOperationsResultsSlice([]xdr.OperationResult{
- xdr.OperationResult{
+ {
Tr: &xdr.OperationResultTr{
Type: xdr.OperationTypeManageBuyOffer,
ManageBuyOfferResult: &xdr.ManageBuyOfferResult{
Code: xdr.ManageBuyOfferResultCodeManageBuyOfferSuccess,
Success: &xdr.ManageOfferSuccessResult{
OffersClaimed: []xdr.ClaimAtom{
- xdr.ClaimAtom{
+ {
Type: xdr.ClaimAtomTypeClaimAtomTypeOrderBook,
OrderBook: &xdr.ClaimOfferAtom{
SellerId: genericAccountID,
@@ -109,14 +109,14 @@ func TestTransformTrade(t *testing.T) {
negCounterAmountEnvelope := genericManageBuyOfferEnvelope
negCounterAmountInput.transaction.Envelope.V1 = &negCounterAmountEnvelope
negCounterAmountInput.transaction.Result = wrapOperationsResultsSlice([]xdr.OperationResult{
- xdr.OperationResult{
+ {
Tr: &xdr.OperationResultTr{
Type: xdr.OperationTypeManageBuyOffer,
ManageBuyOfferResult: &xdr.ManageBuyOfferResult{
Code: xdr.ManageBuyOfferResultCodeManageBuyOfferSuccess,
Success: &xdr.ManageOfferSuccessResult{
OffersClaimed: []xdr.ClaimAtom{
- xdr.ClaimAtom{
+ {
Type: xdr.ClaimAtomTypeClaimAtomTypeOrderBook,
OrderBook: &xdr.ClaimOfferAtom{
SellerId: genericAccountID,
@@ -132,31 +132,31 @@ func TestTransformTrade(t *testing.T) {
tests := []transformTest{
{
wrongTypeInput,
- []TradeOutput{}, fmt.Errorf("Operation of type OperationTypeBumpSequence at index 0 does not result in trades"),
+ []TradeOutput{}, fmt.Errorf("operation of type OperationTypeBumpSequence at index 0 does not result in trades"),
},
{
resultOutOfRangeInput,
- []TradeOutput{}, fmt.Errorf("Operation index of 0 is out of bounds in result slice (len = 0)"),
+ []TradeOutput{}, fmt.Errorf("operation index of 0 is out of bounds in result slice (len = 0)"),
},
{
failedTxInput,
- []TradeOutput{}, fmt.Errorf("Transaction failed; no trades"),
+ []TradeOutput{}, fmt.Errorf("transaction failed; no trades"),
},
{
noTrInput,
- []TradeOutput{}, fmt.Errorf("Could not get result Tr for operation at index 0"),
+ []TradeOutput{}, fmt.Errorf("could not get result Tr for operation at index 0"),
},
{
failedResultInput,
- []TradeOutput{}, fmt.Errorf("Could not get ManageOfferSuccess for operation at index 0"),
+ []TradeOutput{}, fmt.Errorf("could not get ManageOfferSuccess for operation at index 0"),
},
{
negBaseAmountInput,
- []TradeOutput{}, fmt.Errorf("Amount sold is negative (-1) for operation at index 0"),
+ []TradeOutput{}, fmt.Errorf("amount sold is negative (-1) for operation at index 0"),
},
{
negCounterAmountInput,
- []TradeOutput{}, fmt.Errorf("Amount bought is negative (-2) for operation at index 0"),
+ []TradeOutput{}, fmt.Errorf("amount bought is negative (-2) for operation at index 0"),
},
}
@@ -240,23 +240,21 @@ func makeTradeTestInput() (inputTransaction ingest.LedgerTransaction) {
}
inputOperations := []xdr.Operation{
-
- xdr.Operation{
+ {
SourceAccount: nil,
Body: xdr.OperationBody{
Type: xdr.OperationTypeManageSellOffer,
ManageSellOfferOp: &xdr.ManageSellOfferOp{},
},
},
-
- xdr.Operation{
+ {
SourceAccount: nil,
Body: xdr.OperationBody{
Type: xdr.OperationTypeManageBuyOffer,
ManageBuyOfferOp: &xdr.ManageBuyOfferOp{},
},
},
- xdr.Operation{
+ {
SourceAccount: nil,
Body: xdr.OperationBody{
Type: xdr.OperationTypePathPaymentStrictSend,
@@ -265,7 +263,7 @@ func makeTradeTestInput() (inputTransaction ingest.LedgerTransaction) {
},
},
},
- xdr.Operation{
+ {
SourceAccount: &testAccount3,
Body: xdr.OperationBody{
Type: xdr.OperationTypePathPaymentStrictReceive,
@@ -274,21 +272,21 @@ func makeTradeTestInput() (inputTransaction ingest.LedgerTransaction) {
},
},
},
- xdr.Operation{
+ {
SourceAccount: &testAccount3,
Body: xdr.OperationBody{
Type: xdr.OperationTypePathPaymentStrictSend,
PathPaymentStrictSendOp: &xdr.PathPaymentStrictSendOp{},
},
},
- xdr.Operation{
+ {
SourceAccount: &testAccount3,
Body: xdr.OperationBody{
Type: xdr.OperationTypePathPaymentStrictReceive,
PathPaymentStrictReceiveOp: &xdr.PathPaymentStrictReceiveOp{},
},
},
- xdr.Operation{
+ {
SourceAccount: nil,
Body: xdr.OperationBody{
Type: xdr.OperationTypeCreatePassiveSellOffer,
@@ -298,7 +296,7 @@ func makeTradeTestInput() (inputTransaction ingest.LedgerTransaction) {
}
inputEnvelope.Tx.Operations = inputOperations
results := []xdr.OperationResult{
- xdr.OperationResult{
+ {
Code: xdr.OperationResultCodeOpInner,
Tr: &xdr.OperationResultTr{
Type: xdr.OperationTypeManageSellOffer,
@@ -313,7 +311,7 @@ func makeTradeTestInput() (inputTransaction ingest.LedgerTransaction) {
},
},
- xdr.OperationResult{
+ {
Tr: &xdr.OperationResultTr{
Type: xdr.OperationTypeManageBuyOffer,
ManageBuyOfferResult: &xdr.ManageBuyOfferResult{
@@ -326,7 +324,7 @@ func makeTradeTestInput() (inputTransaction ingest.LedgerTransaction) {
},
},
},
- xdr.OperationResult{
+ {
Code: xdr.OperationResultCodeOpInner,
Tr: &xdr.OperationResultTr{
Type: xdr.OperationTypePathPaymentStrictSend,
@@ -340,7 +338,7 @@ func makeTradeTestInput() (inputTransaction ingest.LedgerTransaction) {
},
},
},
- xdr.OperationResult{
+ {
Code: xdr.OperationResultCodeOpInner,
Tr: &xdr.OperationResultTr{
Type: xdr.OperationTypePathPaymentStrictReceive,
@@ -354,7 +352,7 @@ func makeTradeTestInput() (inputTransaction ingest.LedgerTransaction) {
},
},
},
- xdr.OperationResult{
+ {
Tr: &xdr.OperationResultTr{
Type: xdr.OperationTypePathPaymentStrictSend,
PathPaymentStrictSendResult: &xdr.PathPaymentStrictSendResult{
@@ -367,7 +365,7 @@ func makeTradeTestInput() (inputTransaction ingest.LedgerTransaction) {
},
},
},
- xdr.OperationResult{
+ {
Tr: &xdr.OperationResultTr{
Type: xdr.OperationTypePathPaymentStrictReceive,
PathPaymentStrictReceiveResult: &xdr.PathPaymentStrictReceiveResult{
@@ -380,7 +378,7 @@ func makeTradeTestInput() (inputTransaction ingest.LedgerTransaction) {
},
},
},
- xdr.OperationResult{
+ {
Tr: &xdr.OperationResultTr{
Type: xdr.OperationTypeCreatePassiveSellOffer,
CreatePassiveSellOfferResult: &xdr.ManageSellOfferResult{
@@ -395,7 +393,7 @@ func makeTradeTestInput() (inputTransaction ingest.LedgerTransaction) {
unsafeMeta := xdr.TransactionMetaV1{
Operations: []xdr.OperationMeta{
- xdr.OperationMeta{
+ {
Changes: xdr.LedgerEntryChanges{
xdr.LedgerEntryChange{
Type: xdr.LedgerEntryChangeTypeLedgerEntryState,
@@ -431,7 +429,7 @@ func makeTradeTestInput() (inputTransaction ingest.LedgerTransaction) {
},
},
},
- xdr.OperationMeta{
+ {
Changes: xdr.LedgerEntryChanges{
xdr.LedgerEntryChange{
Type: xdr.LedgerEntryChangeTypeLedgerEntryState,
@@ -467,7 +465,7 @@ func makeTradeTestInput() (inputTransaction ingest.LedgerTransaction) {
},
},
},
- xdr.OperationMeta{
+ {
Changes: xdr.LedgerEntryChanges{
xdr.LedgerEntryChange{
Type: xdr.LedgerEntryChangeTypeLedgerEntryState,
@@ -535,7 +533,7 @@ func makeTradeTestInput() (inputTransaction ingest.LedgerTransaction) {
},
},
},
- xdr.OperationMeta{
+ {
Changes: xdr.LedgerEntryChanges{
xdr.LedgerEntryChange{
Type: xdr.LedgerEntryChangeTypeLedgerEntryState,
@@ -602,7 +600,7 @@ func makeTradeTestInput() (inputTransaction ingest.LedgerTransaction) {
},
},
},
- xdr.OperationMeta{
+ {
Changes: xdr.LedgerEntryChanges{
xdr.LedgerEntryChange{
Type: xdr.LedgerEntryChangeTypeLedgerEntryState,
@@ -656,7 +654,7 @@ func makeTradeTestInput() (inputTransaction ingest.LedgerTransaction) {
},
},
},
- xdr.OperationMeta{
+ {
Changes: xdr.LedgerEntryChanges{
xdr.LedgerEntryChange{
Type: xdr.LedgerEntryChangeTypeLedgerEntryState,
@@ -710,7 +708,7 @@ func makeTradeTestInput() (inputTransaction ingest.LedgerTransaction) {
},
},
},
- xdr.OperationMeta{},
+ {},
}}
inputTransaction.Result.Result.Result.Results = &results
@@ -834,13 +832,13 @@ func makeTradeTestOutput() [][]TradeOutput {
offerTwoOutputSecondPlace.SellerIsExact = null.BoolFrom(false)
output := [][]TradeOutput{
- []TradeOutput{offerOneOutput},
- []TradeOutput{offerTwoOutput},
- []TradeOutput{onePriceIsAmount, offerTwoOutputSecondPlace},
- []TradeOutput{twoPriceIsAmount, offerOneOutputSecondPlace},
- []TradeOutput{lPOneOutput},
- []TradeOutput{lPTwoOutput},
- []TradeOutput{},
+ {offerOneOutput},
+ {offerTwoOutput},
+ {onePriceIsAmount, offerTwoOutputSecondPlace},
+ {twoPriceIsAmount, offerOneOutputSecondPlace},
+ {lPOneOutput},
+ {lPTwoOutput},
+ {},
}
return output
}
diff --git a/internal/transform/transaction.go b/internal/transform/transaction.go
index baad3770..f4eb255d 100644
--- a/internal/transform/transaction.go
+++ b/internal/transform/transaction.go
@@ -33,17 +33,14 @@ func TransformTransaction(transaction ingest.LedgerTransaction, lhe xdr.LedgerHe
outputAccountSequence := transaction.Envelope.SeqNum()
if outputAccountSequence < 0 {
- return TransactionOutput{}, fmt.Errorf("The account's sequence number (%d) is negative for ledger %d; transaction %d (transaction id=%d)", outputAccountSequence, outputLedgerSequence, transactionIndex, outputTransactionID)
+ return TransactionOutput{}, fmt.Errorf("the account's sequence number (%d) is negative for ledger %d; transaction %d (transaction id=%d)", outputAccountSequence, outputLedgerSequence, transactionIndex, outputTransactionID)
}
outputMaxFee := transaction.Envelope.Fee()
- if outputMaxFee < 0 {
- return TransactionOutput{}, fmt.Errorf("The fee (%d) is negative for ledger %d; transaction %d (transaction id=%d)", outputMaxFee, outputLedgerSequence, transactionIndex, outputTransactionID)
- }
outputFeeCharged := int64(transaction.Result.Result.FeeCharged)
if outputFeeCharged < 0 {
- return TransactionOutput{}, fmt.Errorf("The fee charged (%d) is negative for ledger %d; transaction %d (transaction id=%d)", outputFeeCharged, outputLedgerSequence, transactionIndex, outputTransactionID)
+ return TransactionOutput{}, fmt.Errorf("the fee charged (%d) is negative for ledger %d; transaction %d (transaction id=%d)", outputFeeCharged, outputLedgerSequence, transactionIndex, outputTransactionID)
}
outputOperationCount := int32(len(transaction.Envelope.Operations()))
@@ -94,7 +91,7 @@ func TransformTransaction(transaction ingest.LedgerTransaction, lhe xdr.LedgerHe
if timeBound != nil {
if timeBound.MaxTime < timeBound.MinTime && timeBound.MaxTime != 0 {
- return TransactionOutput{}, fmt.Errorf("The max time is earlier than the min time (%d < %d) for ledger %d; transaction %d (transaction id=%d)",
+ return TransactionOutput{}, fmt.Errorf("the max time is earlier than the min time (%d < %d) for ledger %d; transaction %d (transaction id=%d)",
timeBound.MaxTime, timeBound.MinTime, outputLedgerSequence, transactionIndex, outputTransactionID)
}
@@ -134,20 +131,52 @@ func TransformTransaction(transaction ingest.LedgerTransaction, lhe xdr.LedgerHe
// Note: MaxFee and FeeCharged is the sum of base transaction fees + Soroban fees
// Breakdown of Soroban fees can be calculated by the config_setting resource pricing * the resources used
+ var sorobanData xdr.SorobanTransactionData
+ var hasSorobanData bool
var outputResourceFee int64
var outputSorobanResourcesInstructions uint32
var outputSorobanResourcesReadBytes uint32
var outputSorobanResourcesWriteBytes uint32
+ var outputInclusionFeeBid int64
+ var outputInclusionFeeCharged int64
+ var outputResourceFeeRefund int64
+ var outputTotalNonRefundableResourceFeeCharged int64
+ var outputTotalRefundableResourceFeeCharged int64
+ var outputRentFeeCharged int64
+
+ // Soroban data can exist in V1 and FeeBump transactionEnvelopes
+ switch transaction.Envelope.Type {
+ case xdr.EnvelopeTypeEnvelopeTypeTx:
+ sorobanData, hasSorobanData = transaction.Envelope.V1.Tx.Ext.GetSorobanData()
+ case xdr.EnvelopeTypeEnvelopeTypeTxFeeBump:
+ sorobanData, hasSorobanData = transaction.Envelope.FeeBump.Tx.InnerTx.V1.Tx.Ext.GetSorobanData()
+ }
+
+ if hasSorobanData {
+ outputResourceFee = int64(sorobanData.ResourceFee)
+ outputSorobanResourcesInstructions = uint32(sorobanData.Resources.Instructions)
+ outputSorobanResourcesReadBytes = uint32(sorobanData.Resources.ReadBytes)
+ outputSorobanResourcesWriteBytes = uint32(sorobanData.Resources.WriteBytes)
+ outputInclusionFeeBid = int64(transaction.Envelope.Fee()) - outputResourceFee
- transactionEnvelopeV1, ok := transaction.Envelope.GetV1()
- if ok {
- sorobanData, ok := transactionEnvelopeV1.Tx.Ext.GetSorobanData()
+ accountBalanceStart, accountBalanceEnd := getAccountBalanceFromLedgerEntryChanges(transaction.FeeChanges, sourceAccount.Address())
+ initialFeeCharged := accountBalanceStart - accountBalanceEnd
+ outputInclusionFeeCharged = initialFeeCharged - outputResourceFee
+
+ meta, ok := transaction.UnsafeMeta.GetV3()
if ok {
- outputResourceFee = int64(sorobanData.ResourceFee)
- outputSorobanResourcesInstructions = uint32(sorobanData.Resources.Instructions)
- outputSorobanResourcesReadBytes = uint32(sorobanData.Resources.ReadBytes)
- outputSorobanResourcesWriteBytes = uint32(sorobanData.Resources.WriteBytes)
+ accountBalanceStart, accountBalanceEnd := getAccountBalanceFromLedgerEntryChanges(meta.TxChangesAfter, sourceAccount.Address())
+ outputResourceFeeRefund = accountBalanceEnd - accountBalanceStart
+ extV1, ok := meta.SorobanMeta.Ext.GetV1()
+ if ok {
+ outputTotalNonRefundableResourceFeeCharged = int64(extV1.TotalNonRefundableResourceFeeCharged)
+ outputTotalRefundableResourceFeeCharged = int64(extV1.TotalRefundableResourceFeeCharged)
+ outputRentFeeCharged = int64(extV1.RentFeeCharged)
+ }
}
+
+ // TODO: FeeCharged is calculated incorrectly in protocol 20. Remove when protocol is updated and the bug is fixed
+ outputFeeCharged = outputResourceFee - outputResourceFeeRefund + outputInclusionFeeCharged
}
outputCloseTime, err := utils.TimePointToUTCTimeStamp(ledgerHeader.ScpValue.CloseTime)
@@ -155,35 +184,44 @@ func TransformTransaction(transaction ingest.LedgerTransaction, lhe xdr.LedgerHe
return TransactionOutput{}, fmt.Errorf("for ledger %d; transaction %d (transaction id=%d): %v", outputLedgerSequence, transactionIndex, outputTransactionID, err)
}
+ outputTxResultCode := transaction.Result.Result.Result.Code.String()
+
outputSuccessful := transaction.Result.Successful()
transformedTransaction := TransactionOutput{
- TransactionHash: outputTransactionHash,
- LedgerSequence: outputLedgerSequence,
- TransactionID: outputTransactionID,
- Account: outputAccount,
- AccountSequence: outputAccountSequence,
- MaxFee: outputMaxFee,
- FeeCharged: outputFeeCharged,
- OperationCount: outputOperationCount,
- TxEnvelope: outputTxEnvelope,
- TxResult: outputTxResult,
- TxMeta: outputTxMeta,
- TxFeeMeta: outputTxFeeMeta,
- CreatedAt: outputCreatedAt,
- MemoType: outputMemoType,
- Memo: outputMemoContents,
- TimeBounds: outputTimeBounds,
- Successful: outputSuccessful,
- LedgerBounds: outputLedgerBound,
- MinAccountSequence: outputMinSequence,
- MinAccountSequenceAge: outputMinSequenceAge,
- MinAccountSequenceLedgerGap: outputMinSequenceLedgerGap,
- ExtraSigners: formatSigners(transaction.Envelope.ExtraSigners()),
- ClosedAt: outputCloseTime,
- ResourceFee: outputResourceFee,
- SorobanResourcesInstructions: outputSorobanResourcesInstructions,
- SorobanResourcesReadBytes: outputSorobanResourcesReadBytes,
- SorobanResourcesWriteBytes: outputSorobanResourcesWriteBytes,
+ TransactionHash: outputTransactionHash,
+ LedgerSequence: outputLedgerSequence,
+ TransactionID: outputTransactionID,
+ Account: outputAccount,
+ AccountSequence: outputAccountSequence,
+ MaxFee: outputMaxFee,
+ FeeCharged: outputFeeCharged,
+ OperationCount: outputOperationCount,
+ TxEnvelope: outputTxEnvelope,
+ TxResult: outputTxResult,
+ TxMeta: outputTxMeta,
+ TxFeeMeta: outputTxFeeMeta,
+ CreatedAt: outputCreatedAt,
+ MemoType: outputMemoType,
+ Memo: outputMemoContents,
+ TimeBounds: outputTimeBounds,
+ Successful: outputSuccessful,
+ LedgerBounds: outputLedgerBound,
+ MinAccountSequence: outputMinSequence,
+ MinAccountSequenceAge: outputMinSequenceAge,
+ MinAccountSequenceLedgerGap: outputMinSequenceLedgerGap,
+ ExtraSigners: formatSigners(transaction.Envelope.ExtraSigners()),
+ ClosedAt: outputCloseTime,
+ ResourceFee: outputResourceFee,
+ SorobanResourcesInstructions: outputSorobanResourcesInstructions,
+ SorobanResourcesReadBytes: outputSorobanResourcesReadBytes,
+ SorobanResourcesWriteBytes: outputSorobanResourcesWriteBytes,
+ TransactionResultCode: outputTxResultCode,
+ InclusionFeeBid: outputInclusionFeeBid,
+ InclusionFeeCharged: outputInclusionFeeCharged,
+ ResourceFeeRefund: outputResourceFeeRefund,
+ TotalNonRefundableResourceFeeCharged: outputTotalNonRefundableResourceFeeCharged,
+ TotalRefundableResourceFeeCharged: outputTotalRefundableResourceFeeCharged,
+ RentFeeCharged: outputRentFeeCharged,
}
// Add Muxed Account Details, if exists
@@ -213,6 +251,36 @@ func TransformTransaction(transaction ingest.LedgerTransaction, lhe xdr.LedgerHe
return transformedTransaction, nil
}
+func getAccountBalanceFromLedgerEntryChanges(changes xdr.LedgerEntryChanges, sourceAccountAddress string) (int64, int64) {
+ var accountBalanceStart int64
+ var accountBalanceEnd int64
+
+ for _, change := range changes {
+ switch change.Type {
+ case xdr.LedgerEntryChangeTypeLedgerEntryUpdated:
+ accountEntry, ok := change.Updated.Data.GetAccount()
+ if !ok {
+ continue
+ }
+
+ if accountEntry.AccountId.Address() == sourceAccountAddress {
+ accountBalanceEnd = int64(accountEntry.Balance)
+ }
+ case xdr.LedgerEntryChangeTypeLedgerEntryState:
+ accountEntry, ok := change.State.Data.GetAccount()
+ if !ok {
+ continue
+ }
+
+ if accountEntry.AccountId.Address() == sourceAccountAddress {
+ accountBalanceStart = int64(accountEntry.Balance)
+ }
+ }
+ }
+
+ return accountBalanceStart, accountBalanceEnd
+}
+
func formatSigners(s []xdr.SignerKey) pq.StringArray {
if s == nil {
return nil
diff --git a/internal/transform/transaction_test.go b/internal/transform/transaction_test.go
index bf21e463..fd1d8b47 100644
--- a/internal/transform/transaction_test.go
+++ b/internal/transform/transaction_test.go
@@ -47,20 +47,20 @@ func TestTransformTransaction(t *testing.T) {
assert.NoError(t, err)
tests := []transformTest{
- transformTest{
+ {
negativeSeqInput,
TransactionOutput{},
- fmt.Errorf("The account's sequence number (-1) is negative for ledger 0; transaction 1 (transaction id=4096)"),
+ fmt.Errorf("the account's sequence number (-1) is negative for ledger 0; transaction 1 (transaction id=4096)"),
},
{
badFeeChargedInput,
TransactionOutput{},
- fmt.Errorf("The fee charged (-1) is negative for ledger 0; transaction 1 (transaction id=4096)"),
+ fmt.Errorf("the fee charged (-1) is negative for ledger 0; transaction 1 (transaction id=4096)"),
},
{
badTimeboundInput,
TransactionOutput{},
- fmt.Errorf("The max time is earlier than the min time (100 < 1594586912) for ledger 0; transaction 1 (transaction id=4096)"),
+ fmt.Errorf("the max time is earlier than the min time (100 < 1594586912) for ledger 0; transaction 1 (transaction id=4096)"),
},
}
@@ -82,7 +82,7 @@ func TestTransformTransaction(t *testing.T) {
func makeTransactionTestOutput() (output []TransactionOutput, err error) {
correctTime, err := time.Parse("2006-1-2 15:04:05 MST", "2020-07-09 05:28:42 UTC")
output = []TransactionOutput{
- TransactionOutput{
+ {
TxEnvelope: "AAAAAgAAAACI4aa0pXFSj6qfJuIObLw/5zyugLRGYwxb7wFSr3B9eAABX5ABjydzAABBtwAAAAEAAAAAAAAAAAAAAABfBqt0AAAAAQAAABdITDVhQ2dvelFISVc3c1NjNVhkY2ZtUgAAAAABAAAAAQAAAAAcR0GXGO76pFs4y38vJVAanjnLg4emNun7zAx0pHcDGAAAAAIAAAAAAAAAAAAAAAAAAAAAAQIDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=",
TxResult: "AAAAAAAAASz/////AAAAAQAAAAAAAAAAAAAAAAAAAAA=",
TxMeta: "AAAAAQAAAAAAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACAAAAAwAAAAAAAAAFAQIDBAUGBwgJAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFVU1NEAAAAAGtY3WxokwttAx3Fu/riPvoew/C7WMK8jZONR8Hfs75zAAAAHgAAAAAAAYagAAAAAAAAA+gAAAAAAAAB9AAAAAAAAAAZAAAAAAAAAAEAAAAAAAAABQECAwQFBgcICQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABVVNTRAAAAABrWN1saJMLbQMdxbv64j76HsPwu1jCvI2TjUfB37O+cwAAAB4AAAAAAAGKiAAAAAAAAARMAAAAAAAAAfYAAAAAAAAAGgAAAAAAAAACAAAAAwAAAAAAAAAFAQIDBAUGBwgJAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFVU1NEAAAAAGtY3WxokwttAx3Fu/riPvoew/C7WMK8jZONR8Hfs75zAAAAHgAAAAAAAYagAAAAAAAAA+gAAAAAAAAB9AAAAAAAAAAZAAAAAAAAAAEAAAAAAAAABQECAwQFBgcICQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABVVNTRAAAAABrWN1saJMLbQMdxbv64j76HsPwu1jCvI2TjUfB37O+cwAAAB4AAAAAAAGKiAAAAAAAAARMAAAAAAAAAfYAAAAAAAAAGgAAAAAAAAAA",
@@ -105,8 +105,9 @@ func makeTransactionTestOutput() (output []TransactionOutput, err error) {
SorobanResourcesInstructions: 0,
SorobanResourcesReadBytes: 0,
SorobanResourcesWriteBytes: 0,
+ TransactionResultCode: "TransactionResultCodeTxFailed",
},
- TransactionOutput{
+ {
TxEnvelope: "AAAABQAAAABnzACGTDuJFoxqr+C8NHCe0CHFBXLi+YhhNCIILCIpcgAAAAAAABwgAAAAAgAAAACI4aa0pXFSj6qfJuIObLw/5zyugLRGYwxb7wFSr3B9eAAAAAACFPY2AAAAfQAAAAEAAAAAAAAAAAAAAABfBqt0AAAAAQAAABdITDVhQ2dvelFISVc3c1NjNVhkY2ZtUgAAAAABAAAAAQAAAAAcR0GXGO76pFs4y38vJVAanjnLg4emNun7zAx0pHcDGAAAAAIAAAAAAAAAAAAAAAAAAAAAAQIDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA==",
TxResult: "AAAAAAAAASwAAAABqH/vXusmAmnDgPLeRWqtcrWbsxWqrHd4YEVuCdrAuvsAAAAAAAAAZAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAA=",
TxMeta: "AAAAAQAAAAAAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACAAAAAwAAAAAAAAAFAQIDBAUGBwgJAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFVU1NEAAAAAGtY3WxokwttAx3Fu/riPvoew/C7WMK8jZONR8Hfs75zAAAAHgAAAAAAAYagAAAAAAAAA+gAAAAAAAAB9AAAAAAAAAAZAAAAAAAAAAEAAAAAAAAABQECAwQFBgcICQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABVVNTRAAAAABrWN1saJMLbQMdxbv64j76HsPwu1jCvI2TjUfB37O+cwAAAB4AAAAAAAGKiAAAAAAAAARMAAAAAAAAAfYAAAAAAAAAGgAAAAAAAAACAAAAAwAAAAAAAAAFAQIDBAUGBwgJAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFVU1NEAAAAAGtY3WxokwttAx3Fu/riPvoew/C7WMK8jZONR8Hfs75zAAAAHgAAAAAAAYagAAAAAAAAA+gAAAAAAAAB9AAAAAAAAAAZAAAAAAAAAAEAAAAAAAAABQECAwQFBgcICQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABVVNTRAAAAABrWN1saJMLbQMdxbv64j76HsPwu1jCvI2TjUfB37O+cwAAAB4AAAAAAAGKiAAAAAAAAARMAAAAAAAAAfYAAAAAAAAAGgAAAAAAAAAA",
@@ -132,10 +133,11 @@ func makeTransactionTestOutput() (output []TransactionOutput, err error) {
SorobanResourcesInstructions: 0,
SorobanResourcesReadBytes: 0,
SorobanResourcesWriteBytes: 0,
+ TransactionResultCode: "TransactionResultCodeTxFeeBumpInnerSuccess", //inner fee bump success
},
- TransactionOutput{
+ {
TxEnvelope: "AAAAAgAAAAAcR0GXGO76pFs4y38vJVAanjnLg4emNun7zAx0pHcDGAAAAGQBpLyvsiV6gwAAAAIAAAABAAAAAAAAAAAAAAAAXwardAAAAAEAAAAFAAAACgAAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAMCAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAABdITDVhQ2dvelFISVc3c1NjNVhkY2ZtUgAAAAABAAAAAQAAAABrWN1saJMLbQMdxbv64j76HsPwu1jCvI2TjUfB37O+cwAAAAIAAAAAAAAAAAAAAAAAAAAAAQIDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=",
- TxResult: "AAAAAAAAAGT/////AAAAAQAAAAAAAAAAAAAAAAAAAAA=",
+ TxResult: "AAAAAAAAAGT////5AAAAAA==",
TxMeta: "AAAAAQAAAAAAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACAAAAAwAAAAAAAAAFAQIDBAUGBwgJAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFVU1NEAAAAAGtY3WxokwttAx3Fu/riPvoew/C7WMK8jZONR8Hfs75zAAAAHgAAAAAAAYagAAAAAAAAA+gAAAAAAAAB9AAAAAAAAAAZAAAAAAAAAAEAAAAAAAAABQECAwQFBgcICQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABVVNTRAAAAABrWN1saJMLbQMdxbv64j76HsPwu1jCvI2TjUfB37O+cwAAAB4AAAAAAAGKiAAAAAAAAARMAAAAAAAAAfYAAAAAAAAAGgAAAAAAAAACAAAAAwAAAAAAAAAFAQIDBAUGBwgJAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFVU1NEAAAAAGtY3WxokwttAx3Fu/riPvoew/C7WMK8jZONR8Hfs75zAAAAHgAAAAAAAYagAAAAAAAAA+gAAAAAAAAB9AAAAAAAAAAZAAAAAAAAAAEAAAAAAAAABQECAwQFBgcICQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABVVNTRAAAAABrWN1saJMLbQMdxbv64j76HsPwu1jCvI2TjUfB37O+cwAAAB4AAAAAAAGKiAAAAAAAAARMAAAAAAAAAfYAAAAAAAAAGgAAAAAAAAAA",
TxFeeMeta: "AAAAAA==",
TransactionHash: "a87fef5eeb260269c380f2de456aad72b59bb315aaac777860456e09dac0bafb",
@@ -160,6 +162,7 @@ func makeTransactionTestOutput() (output []TransactionOutput, err error) {
SorobanResourcesInstructions: 0,
SorobanResourcesReadBytes: 0,
SorobanResourcesWriteBytes: 0,
+ TransactionResultCode: "TransactionResultCodeTxInsufficientBalance",
},
}
return
@@ -168,7 +171,7 @@ func makeTransactionTestInput() (transaction []ingest.LedgerTransaction, history
hardCodedMemoText := "HL5aCgozQHIW7sSc5XdcfmR"
hardCodedTransactionHash := xdr.Hash([32]byte{0xa8, 0x7f, 0xef, 0x5e, 0xeb, 0x26, 0x2, 0x69, 0xc3, 0x80, 0xf2, 0xde, 0x45, 0x6a, 0xad, 0x72, 0xb5, 0x9b, 0xb3, 0x15, 0xaa, 0xac, 0x77, 0x78, 0x60, 0x45, 0x6e, 0x9, 0xda, 0xc0, 0xba, 0xfb})
genericResultResults := &[]xdr.OperationResult{
- xdr.OperationResult{
+ {
Tr: &xdr.OperationResultTr{
Type: xdr.OperationTypeCreateAccount,
CreateAccountResult: &xdr.CreateAccountResult{
@@ -195,7 +198,7 @@ func makeTransactionTestInput() (transaction []ingest.LedgerTransaction, history
Ed25519: source.Ed25519,
}
transaction = []ingest.LedgerTransaction{
- ingest.LedgerTransaction{
+ {
Index: 1,
UnsafeMeta: hardCodedMeta,
Envelope: xdr.TransactionEnvelope{
@@ -217,7 +220,7 @@ func makeTransactionTestInput() (transaction []ingest.LedgerTransaction, history
},
},
Operations: []xdr.Operation{
- xdr.Operation{
+ {
SourceAccount: &testAccount2,
Body: xdr.OperationBody{
Type: xdr.OperationTypePathPaymentStrictReceive,
@@ -241,7 +244,7 @@ func makeTransactionTestInput() (transaction []ingest.LedgerTransaction, history
},
},
},
- ingest.LedgerTransaction{
+ {
Index: 1,
UnsafeMeta: hardCodedMeta,
Envelope: xdr.TransactionEnvelope{
@@ -268,7 +271,7 @@ func makeTransactionTestInput() (transaction []ingest.LedgerTransaction, history
},
},
Operations: []xdr.Operation{
- xdr.Operation{
+ {
SourceAccount: &testAccount2,
Body: xdr.OperationBody{
Type: xdr.OperationTypePathPaymentStrictReceive,
@@ -297,7 +300,7 @@ func makeTransactionTestInput() (transaction []ingest.LedgerTransaction, history
Result: xdr.InnerTransactionResultResult{
Code: xdr.TransactionResultCodeTxSuccess,
Results: &[]xdr.OperationResult{
- xdr.OperationResult{
+ {
Tr: &xdr.OperationResultTr{
CreateAccountResult: &xdr.CreateAccountResult{},
},
@@ -306,14 +309,12 @@ func makeTransactionTestInput() (transaction []ingest.LedgerTransaction, history
},
},
},
- Results: &[]xdr.OperationResult{
- xdr.OperationResult{},
- },
+ Results: &[]xdr.OperationResult{{}},
},
},
},
},
- ingest.LedgerTransaction{
+ {
Index: 1,
UnsafeMeta: hardCodedMeta,
Envelope: xdr.TransactionEnvelope{
@@ -342,7 +343,7 @@ func makeTransactionTestInput() (transaction []ingest.LedgerTransaction, history
},
},
Operations: []xdr.Operation{
- xdr.Operation{
+ {
SourceAccount: &testAccount4,
Body: xdr.OperationBody{
Type: xdr.OperationTypePathPaymentStrictReceive,
@@ -360,7 +361,7 @@ func makeTransactionTestInput() (transaction []ingest.LedgerTransaction, history
Result: xdr.TransactionResult{
FeeCharged: 100,
Result: xdr.TransactionResultResult{
- Code: xdr.TransactionResultCodeTxFailed,
+ Code: xdr.TransactionResultCodeTxInsufficientBalance,
Results: genericResultResults,
},
},
@@ -368,19 +369,19 @@ func makeTransactionTestInput() (transaction []ingest.LedgerTransaction, history
},
}
historyHeader = []xdr.LedgerHeaderHistoryEntry{
- xdr.LedgerHeaderHistoryEntry{
+ {
Header: xdr.LedgerHeader{
LedgerSeq: 30521816,
ScpValue: xdr.StellarValue{CloseTime: 1594272522},
},
},
- xdr.LedgerHeaderHistoryEntry{
+ {
Header: xdr.LedgerHeader{
LedgerSeq: 30521817,
ScpValue: xdr.StellarValue{CloseTime: 1594272522},
},
},
- xdr.LedgerHeaderHistoryEntry{
+ {
Header: xdr.LedgerHeader{
LedgerSeq: 30521818,
ScpValue: xdr.StellarValue{CloseTime: 1594272522},
diff --git a/internal/transform/trustline.go b/internal/transform/trustline.go
index 3099f306..dd0f3c26 100644
--- a/internal/transform/trustline.go
+++ b/internal/transform/trustline.go
@@ -22,7 +22,7 @@ func TransformTrustline(ledgerChange ingest.Change, header xdr.LedgerHeaderHisto
trustEntry, ok := ledgerEntry.Data.GetTrustLine()
if !ok {
- return TrustlineOutput{}, fmt.Errorf("Could not extract trustline data from ledger entry; actual type is %s", ledgerEntry.Data.Type)
+ return TrustlineOutput{}, fmt.Errorf("could not extract trustline data from ledger entry; actual type is %s", ledgerEntry.Data.Type)
}
outputAccountID, err := trustEntry.AccountId.GetAddress()
@@ -86,12 +86,12 @@ func trustLineEntryToLedgerKeyString(trustLine xdr.TrustLineEntry) (string, erro
ledgerKey := &xdr.LedgerKey{}
err := ledgerKey.SetTrustline(trustLine.AccountId, trustLine.Asset)
if err != nil {
- return "", fmt.Errorf("Error running ledgerKey.SetTrustline when calculating ledger key")
+ return "", fmt.Errorf("error running ledgerKey.SetTrustline when calculating ledger key")
}
key, err := ledgerKey.MarshalBinary()
if err != nil {
- return "", fmt.Errorf("Error running MarshalBinaryCompress when calculating ledger key")
+ return "", fmt.Errorf("error running MarshalBinaryCompress when calculating ledger key")
}
return base64.StdEncoding.EncodeToString(key), nil
diff --git a/internal/transform/trustline_test.go b/internal/transform/trustline_test.go
index 24efa2f5..437fd7dc 100644
--- a/internal/transform/trustline_test.go
+++ b/internal/transform/trustline_test.go
@@ -37,7 +37,7 @@ func TestTransformTrustline(t *testing.T) {
},
},
},
- TrustlineOutput{}, fmt.Errorf("Could not extract trustline data from ledger entry; actual type is LedgerEntryTypeOffer"),
+ TrustlineOutput{}, fmt.Errorf("could not extract trustline data from ledger entry; actual type is LedgerEntryTypeOffer"),
},
}
diff --git a/internal/transform/ttl.go b/internal/transform/ttl.go
index cb9218e1..c0e6fe9c 100644
--- a/internal/transform/ttl.go
+++ b/internal/transform/ttl.go
@@ -17,7 +17,7 @@ func TransformTtl(ledgerChange ingest.Change, header xdr.LedgerHeaderHistoryEntr
ttl, ok := ledgerEntry.Data.GetTtl()
if !ok {
- return TtlOutput{}, fmt.Errorf("Could not extract ttl from ledger entry; actual type is %s", ledgerEntry.Data.Type)
+ return TtlOutput{}, fmt.Errorf("could not extract ttl from ledger entry; actual type is %s", ledgerEntry.Data.Type)
}
// LedgerEntryChange must contain a ttl change to be parsed, otherwise skip
diff --git a/internal/transform/ttl_test.go b/internal/transform/ttl_test.go
index 4d49a54b..8f14e089 100644
--- a/internal/transform/ttl_test.go
+++ b/internal/transform/ttl_test.go
@@ -31,7 +31,7 @@ func TestTransformTtl(t *testing.T) {
},
},
},
- TtlOutput{}, fmt.Errorf("Could not extract ttl from ledger entry; actual type is LedgerEntryTypeOffer"),
+ TtlOutput{}, fmt.Errorf("could not extract ttl from ledger entry; actual type is LedgerEntryTypeOffer"),
},
}
diff --git a/internal/utils/main.go b/internal/utils/main.go
index 1a36eabb..fc54d646 100644
--- a/internal/utils/main.go
+++ b/internal/utils/main.go
@@ -16,6 +16,7 @@ import (
"github.com/stellar/go/ingest/ledgerbackend"
"github.com/stellar/go/keypair"
"github.com/stellar/go/network"
+ "github.com/stellar/go/support/datastore"
"github.com/stellar/go/support/storage"
"github.com/stellar/go/txnbuild"
"github.com/stellar/go/xdr"
@@ -226,27 +227,29 @@ func AddLPOperations(txMeta []xdr.OperationMeta, AssetA, AssetB xdr.Asset) []xdr
return txMeta
}
-// AddCommonFlags adds the flags common to all commands: end-ledger, stdout, and strict-export
+// AddCommonFlags adds the flags common to all commands: start-ledger, end-ledger, stdout, and strict-export
func AddCommonFlags(flags *pflag.FlagSet) {
flags.Uint32P("end-ledger", "e", 0, "The ledger sequence number for the end of the export range")
flags.Bool("strict-export", false, "If set, transform errors will be fatal.")
flags.Bool("testnet", false, "If set, will connect to Testnet instead of Mainnet.")
flags.Bool("futurenet", false, "If set, will connect to Futurenet instead of Mainnet.")
flags.StringToStringP("extra-fields", "u", map[string]string{}, "Additional fields to append to output jsons. Used for appending metadata")
+ flags.Bool("captive-core", false, "If set, run captive core to retrieve data. Otherwise use TxMeta file datastore.")
+ flags.String("datastore-path", "sdf-ledger-close-metas/ledgers", "Datastore bucket path to read txmeta files from.")
+ flags.Uint32("buffer-size", 200, "Buffer size sets the max limit for the number of txmeta files that can be held in memory.")
+ flags.Uint32("num-workers", 10, "Number of workers to spawn that read txmeta files from the datastore.")
+ flags.Uint32("retry-limit", 3, "Datastore GetLedger retry limit.")
+ flags.Uint32("retry-wait", 5, "Time in seconds to wait for GetLedger retry.")
}
-// AddArchiveFlags adds the history archive specific flags: start-ledger, output, and limit
+// AddArchiveFlags adds the history archive specific flags: output, and limit
+// TODO: https://stellarorg.atlassian.net/browse/HUBBLE-386 Rename AddArchiveFlags to something more relevant
func AddArchiveFlags(objectName string, flags *pflag.FlagSet) {
flags.Uint32P("start-ledger", "s", 2, "The ledger sequence number for the beginning of the export period. Defaults to genesis ledger")
flags.StringP("output", "o", "exported_"+objectName+".txt", "Filename of the output file")
flags.Int64P("limit", "l", -1, "Maximum number of "+objectName+" to export. If the limit is set to a negative number, all the objects in the provided range are exported")
}
-// AddBucketFlags adds the bucket list specifc flags: output
-func AddBucketFlags(objectName string, flags *pflag.FlagSet) {
- flags.StringP("output", "o", "exported_"+objectName+".txt", "Filename of the output file")
-}
-
// AddCloudStorageFlags adds the cloud storage releated flags: cloud-storage-bucket, cloud-credentials
func AddCloudStorageFlags(flags *pflag.FlagSet) {
flags.String("cloud-storage-bucket", "stellar-etl-cli", "Cloud storage bucket to export to.")
@@ -256,11 +259,13 @@ func AddCloudStorageFlags(flags *pflag.FlagSet) {
}
// AddCoreFlags adds the captive core specific flags: core-executable, core-config, batch-size, and output flags
+// TODO: https://stellarorg.atlassian.net/browse/HUBBLE-386 Deprecate?
func AddCoreFlags(flags *pflag.FlagSet, defaultFolder string) {
flags.StringP("core-executable", "x", "", "Filepath to the stellar-core executable")
flags.StringP("core-config", "c", "", "Filepath to the config file for stellar-core")
flags.Uint32P("batch-size", "b", 64, "number of ledgers to export changes from in each batches")
+ // TODO: https://stellarorg.atlassian.net/browse/HUBBLE-386 Move output to different flag group
flags.StringP("output", "o", defaultFolder, "Folder that will contain the output files")
flags.Uint32P("start-ledger", "s", 2, "The ledger sequence number for the beginning of the export period. Defaults to genesis ledger")
@@ -279,33 +284,223 @@ func AddExportTypeFlags(flags *pflag.FlagSet) {
flags.BoolP("export-ttl", "", false, "set in order to export ttl changes")
}
-// MustCommonFlags gets the values of the the flags common to all commands: end-ledger and strict-export. If any do not exist, it stops the program fatally using the logger
-func MustCommonFlags(flags *pflag.FlagSet, logger *EtlLogger) (endNum uint32, strictExport, isTest bool, isFuture bool, extra map[string]string) {
+// TODO: https://stellarorg.atlassian.net/browse/HUBBLE-386 better flags/params
+// Some flags should be named better
+type FlagValues struct {
+ StartNum uint32
+ EndNum uint32
+ StrictExport bool
+ IsTest bool
+ IsFuture bool
+ Extra map[string]string
+ UseCaptiveCore bool
+ DatastorePath string
+ BufferSize uint32
+ NumWorkers uint32
+ RetryLimit uint32
+ RetryWait uint32
+ Path string
+ Limit int64
+ Bucket string
+ Credentials string
+ Provider string
+}
+
+// MustFlags gets the values of the the flags for all commands.
+// If any do not exist, it stops the program fatally using the logger
+// TODO: https://stellarorg.atlassian.net/browse/HUBBLE-386 Not sure if all these arg checks are necessary
+func MustFlags(flags *pflag.FlagSet, logger *EtlLogger) FlagValues {
endNum, err := flags.GetUint32("end-ledger")
if err != nil {
logger.Fatal("could not get end sequence number: ", err)
}
- strictExport, err = flags.GetBool("strict-export")
+ strictExport, err := flags.GetBool("strict-export")
if err != nil {
logger.Fatal("could not get strict-export boolean: ", err)
}
- isTest, err = flags.GetBool("testnet")
+ isTest, err := flags.GetBool("testnet")
if err != nil {
logger.Fatal("could not get testnet boolean: ", err)
}
- isFuture, err = flags.GetBool("futurenet")
+ isFuture, err := flags.GetBool("futurenet")
if err != nil {
logger.Fatal("could not get futurenet boolean: ", err)
}
- extra, err = flags.GetStringToString("extra-fields")
+ extra, err := flags.GetStringToString("extra-fields")
if err != nil {
logger.Fatal("could not get extra fields string: ", err)
}
- return
+
+ useCaptiveCore, err := flags.GetBool("captive-core")
+ if err != nil {
+ logger.Fatal("could not get captive-core flag: ", err)
+ }
+
+ datastorePath, err := flags.GetString("datastore-path")
+ if err != nil {
+ logger.Fatal("could not get datastore-bucket-path string: ", err)
+ }
+
+ bufferSize, err := flags.GetUint32("buffer-size")
+ if err != nil {
+ logger.Fatal("could not get buffer-size uint32: ", err)
+ }
+
+ numWorkers, err := flags.GetUint32("num-workers")
+ if err != nil {
+ logger.Fatal("could not get num-workers uint32: ", err)
+ }
+
+ retryLimit, err := flags.GetUint32("retry-limit")
+ if err != nil {
+ logger.Fatal("could not get retry-limit uint32: ", err)
+ }
+
+ retryWait, err := flags.GetUint32("retry-wait")
+ if err != nil {
+ logger.Fatal("could not get retry-wait uint32: ", err)
+ }
+
+ startNum, err := flags.GetUint32("start-ledger")
+ if err != nil {
+ logger.Fatal("could not get start sequence number: ", err)
+ }
+
+ path, err := flags.GetString("output")
+ if err != nil {
+ logger.Fatal("could not get output filename: ", err)
+ }
+
+ limit, err := flags.GetInt64("limit")
+ if err != nil {
+ logger.Fatal("could not get limit: ", err)
+ }
+
+ bucket, err := flags.GetString("cloud-storage-bucket")
+ if err != nil {
+ logger.Fatal("could not get cloud storage bucket: ", err)
+ }
+
+ credentials, err := flags.GetString("cloud-credentials")
+ if err != nil {
+ logger.Fatal("could not get cloud credentials file: ", err)
+ }
+
+ provider, err := flags.GetString("cloud-provider")
+ if err != nil {
+ logger.Fatal("could not get cloud provider: ", err)
+ }
+
+ return FlagValues{
+ StartNum: startNum,
+ EndNum: endNum,
+ StrictExport: strictExport,
+ IsTest: isTest,
+ IsFuture: isFuture,
+ Extra: extra,
+ UseCaptiveCore: useCaptiveCore,
+ DatastorePath: datastorePath,
+ BufferSize: bufferSize,
+ NumWorkers: numWorkers,
+ RetryLimit: retryLimit,
+ RetryWait: retryWait,
+ Path: path,
+ Limit: limit,
+ Bucket: bucket,
+ Credentials: credentials,
+ Provider: provider,
+ }
+}
+
+type CommonFlagValues struct {
+ EndNum uint32
+ StrictExport bool
+ IsTest bool
+ IsFuture bool
+ Extra map[string]string
+ UseCaptiveCore bool
+ DatastorePath string
+ BufferSize uint32
+ NumWorkers uint32
+ RetryLimit uint32
+ RetryWait uint32
+}
+
+// MustCommonFlags gets the values of the the flags common to all commands: end-ledger and strict-export.
+// If any do not exist, it stops the program fatally using the logger
+func MustCommonFlags(flags *pflag.FlagSet, logger *EtlLogger) CommonFlagValues {
+ endNum, err := flags.GetUint32("end-ledger")
+ if err != nil {
+ logger.Fatal("could not get end sequence number: ", err)
+ }
+
+ strictExport, err := flags.GetBool("strict-export")
+ if err != nil {
+ logger.Fatal("could not get strict-export boolean: ", err)
+ }
+
+ isTest, err := flags.GetBool("testnet")
+ if err != nil {
+ logger.Fatal("could not get testnet boolean: ", err)
+ }
+
+ isFuture, err := flags.GetBool("futurenet")
+ if err != nil {
+ logger.Fatal("could not get futurenet boolean: ", err)
+ }
+
+ extra, err := flags.GetStringToString("extra-fields")
+ if err != nil {
+ logger.Fatal("could not get extra fields string: ", err)
+ }
+
+ useCaptiveCore, err := flags.GetBool("captive-core")
+ if err != nil {
+ logger.Fatal("could not get captive-core flag: ", err)
+ }
+
+ datastorePath, err := flags.GetString("datastore-path")
+ if err != nil {
+ logger.Fatal("could not get datastore-bucket-path string: ", err)
+ }
+
+ bufferSize, err := flags.GetUint32("buffer-size")
+ if err != nil {
+ logger.Fatal("could not get buffer-size uint32: ", err)
+ }
+
+ numWorkers, err := flags.GetUint32("num-workers")
+ if err != nil {
+ logger.Fatal("could not get num-workers uint32: ", err)
+ }
+
+ retryLimit, err := flags.GetUint32("retry-limit")
+ if err != nil {
+ logger.Fatal("could not get retry-limit uint32: ", err)
+ }
+
+ retryWait, err := flags.GetUint32("retry-wait")
+ if err != nil {
+ logger.Fatal("could not get retry-wait uint32: ", err)
+ }
+
+ return CommonFlagValues{
+ EndNum: endNum,
+ StrictExport: strictExport,
+ IsTest: isTest,
+ IsFuture: isFuture,
+ Extra: extra,
+ UseCaptiveCore: useCaptiveCore,
+ DatastorePath: datastorePath,
+ BufferSize: bufferSize,
+ NumWorkers: numWorkers,
+ RetryLimit: retryLimit,
+ RetryWait: retryWait,
+ }
}
// MustArchiveFlags gets the values of the the history archive specific flags: start-ledger, output, and limit
@@ -404,7 +599,7 @@ func MustExportTypeFlags(flags *pflag.FlagSet, logger *EtlLogger) map[string]boo
"export-ttl": false,
}
- for export_name, _ := range exports {
+ for export_name := range exports {
exports[export_name], err = flags.GetBool(export_name)
if err != nil {
logger.Fatalf("could not get %s flag: %v", export_name, err)
@@ -622,30 +817,38 @@ type EnvironmentDetails struct {
ArchiveURLs []string
BinaryPath string
CoreConfig string
+ Network string
+ CommonFlagValues CommonFlagValues
}
// GetPassphrase returns the correct Network Passphrase based on env preference
-func GetEnvironmentDetails(isTest bool, isFuture bool) (details EnvironmentDetails) {
- if isTest {
+func GetEnvironmentDetails(commonFlags CommonFlagValues) (details EnvironmentDetails) {
+ if commonFlags.IsTest {
// testnet passphrase to be used for testing
details.NetworkPassphrase = network.TestNetworkPassphrase
details.ArchiveURLs = testArchiveURLs
details.BinaryPath = "/usr/bin/stellar-core"
- details.CoreConfig = "docker/stellar-core_testnet.cfg"
+ details.CoreConfig = "/etl/docker/stellar-core_testnet.cfg"
+ details.Network = "testnet"
+ details.CommonFlagValues = commonFlags
return details
- } else if isFuture {
+ } else if commonFlags.IsFuture {
// details.NetworkPassphrase = network.FutureNetworkPassphrase
details.NetworkPassphrase = "Test SDF Future Network ; October 2022"
details.ArchiveURLs = futureArchiveURLs
details.BinaryPath = "/usr/bin/stellar-core"
- details.CoreConfig = "docker/stellar-core_futurenet.cfg"
+ details.CoreConfig = "/etl/docker/stellar-core_futurenet.cfg"
+ details.Network = "futurenet"
+ details.CommonFlagValues = commonFlags
return details
} else {
// default: mainnet
details.NetworkPassphrase = network.PublicNetworkPassphrase
details.ArchiveURLs = mainArchiveURLs
details.BinaryPath = "/usr/bin/stellar-core"
- details.CoreConfig = "docker/stellar-core.cfg"
+ details.CoreConfig = "/etl/docker/stellar-core.cfg"
+ details.Network = "pubnet"
+ details.CommonFlagValues = commonFlags
return details
}
}
@@ -684,6 +887,9 @@ func (e EnvironmentDetails) GetUnboundedLedgerCloseMeta(end uint32) (xdr.LedgerC
ctx := context.Background()
backend, err := e.CreateCaptiveCoreBackend()
+ if err != nil {
+ return xdr.LedgerCloseMeta{}, err
+ }
ledgerRange := ledgerbackend.UnboundedRange(end)
@@ -713,3 +919,135 @@ func LedgerEntryToLedgerKeyHash(ledgerEntry xdr.LedgerEntry) string {
return ledgerKeyHash
}
+
+// CreateLedgerBackend creates a ledger backend using captive core or datastore
+// Defaults to using datastore
+func CreateLedgerBackend(ctx context.Context, useCaptiveCore bool, env EnvironmentDetails) (ledgerbackend.LedgerBackend, error) {
+ // Create ledger backend from captive core
+ if useCaptiveCore {
+ backend, err := env.CreateCaptiveCoreBackend()
+ if err != nil {
+ return nil, err
+ }
+ return backend, nil
+ }
+
+ // Create ledger backend from datastore
+ params := make(map[string]string)
+ params["destination_bucket_path"] = env.CommonFlagValues.DatastorePath
+ dataStoreConfig := datastore.DataStoreConfig{
+ Type: "GCS",
+ Params: params,
+ }
+
+ dataStore, err := datastore.NewDataStore(ctx, dataStoreConfig, env.Network)
+ if err != nil {
+ return nil, err
+ }
+
+ // TODO: In the future these will come from a config file written by ledgerexporter
+ // Hard code ledger batch values for now
+ ledgerBatchConfig := datastore.LedgerBatchConfig{
+ LedgersPerFile: 1,
+ FilesPerPartition: 64000,
+ }
+
+ BSBackendConfig := ledgerbackend.BufferedStorageBackendConfig{
+ LedgerBatchConfig: ledgerBatchConfig,
+ DataStore: dataStore,
+ BufferSize: env.CommonFlagValues.BufferSize,
+ NumWorkers: env.CommonFlagValues.NumWorkers,
+ RetryLimit: env.CommonFlagValues.RetryLimit,
+ RetryWait: time.Duration(env.CommonFlagValues.RetryWait) * time.Second,
+ }
+
+ backend, err := ledgerbackend.NewBufferedStorageBackend(ctx, BSBackendConfig)
+ if err != nil {
+ return nil, err
+ }
+ return backend, nil
+}
+
+func LedgerKeyToLedgerKeyHash(ledgerKey xdr.LedgerKey) string {
+ ledgerKeyByte, _ := ledgerKey.MarshalBinary()
+ hashedLedgerKeyByte := hash.Hash(ledgerKeyByte)
+ ledgerKeyHash := hex.EncodeToString(hashedLedgerKeyByte[:])
+
+ return ledgerKeyHash
+}
+
+// AccountSignersChanged returns true if account signers have changed.
+// Notice: this will return true on master key changes too!
+func AccountSignersChanged(c ingest.Change) bool {
+ if c.Type != xdr.LedgerEntryTypeAccount {
+ panic("This should not be called on changes other than Account changes")
+ }
+
+ // New account so new master key (which is also a signer)
+ if c.Pre == nil {
+ return true
+ }
+
+ // Account merged. Account being merge can still have signers.
+ // c.Pre != nil at this point.
+ if c.Post == nil {
+ return true
+ }
+
+ // c.Pre != nil && c.Post != nil at this point.
+ preAccountEntry := c.Pre.Data.MustAccount()
+ postAccountEntry := c.Post.Data.MustAccount()
+
+ preSigners := preAccountEntry.SignerSummary()
+ postSigners := postAccountEntry.SignerSummary()
+
+ if len(preSigners) != len(postSigners) {
+ return true
+ }
+
+ for postSigner, postWeight := range postSigners {
+ preWeight, exist := preSigners[postSigner]
+ if !exist {
+ return true
+ }
+
+ if preWeight != postWeight {
+ return true
+ }
+ }
+
+ preSignerSponsors := preAccountEntry.SignerSponsoringIDs()
+ postSignerSponsors := postAccountEntry.SignerSponsoringIDs()
+
+ if len(preSignerSponsors) != len(postSignerSponsors) {
+ return true
+ }
+
+ for i := 0; i < len(preSignerSponsors); i++ {
+ preSponsor := preSignerSponsors[i]
+ postSponsor := postSignerSponsors[i]
+
+ if preSponsor == nil && postSponsor != nil {
+ return true
+ } else if preSponsor != nil && postSponsor == nil {
+ return true
+ } else if preSponsor != nil && postSponsor != nil {
+ preSponsorAccountID := xdr.AccountId(*preSponsor)
+ preSponsorAddress := preSponsorAccountID.Address()
+
+ postSponsorAccountID := xdr.AccountId(*postSponsor)
+ postSponsorAddress := postSponsorAccountID.Address()
+
+ if preSponsorAddress != postSponsorAddress {
+ return true
+ }
+ }
+ }
+
+ return false
+}
+
+type HistoryArchiveLedgerAndLCM struct {
+ Ledger historyarchive.Ledger
+ LCM xdr.LedgerCloseMeta
+}