-
Notifications
You must be signed in to change notification settings - Fork 5
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge branch 'new-main' into feat/mtg-979
- Loading branch information
Showing
53 changed files
with
2,147 additions
and
1,476 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -35,12 +35,12 @@ INGESTER_RPC_HOST='https://rpc:port' | |
INGESTER_BACKFILLER_SOURCE_MODE=RPC #RPC or Bigtable | ||
INGESTER_BIG_TABLE_CONFIG='{creds="/usr/src/app/creds.json", timeout=1000}' | ||
|
||
INGESTER_RUN_SEQUENCE_CONSISTENT_CHECKER=true | ||
INGESTER_RUN_SEQUENCE_CONSISTENT_CHECKER=false # experimental, enable only for testing purposes | ||
# Optional, required only if it needs to run fork cleaner, default is false. Unstable as it removes forked items, but also removes some valid leafs. Recommended to use only! for testing purposes. | ||
INGESTER_RUN_FORK_CLEANER=false | ||
INGESTER_RUN_BUBBLEGUM_BACKFILLER=true | ||
|
||
INGESTER_BACKFILLER_MODE=PersistAndIngest | ||
INGESTER_BACKFILLER_MODE=PersistAndIngest # The only available option, the variable will be removed | ||
INGESTER_SLOT_UNTIL=0 | ||
INGESTER_SLOT_START_FROM=0 | ||
INGESTER_WORKERS_COUNT=100 | ||
|
@@ -57,14 +57,21 @@ INGESTER_PROFILING_FILE_PATH="/path/to/profiling" | |
|
||
INGESTER_FILE_STORAGE_PATH_CONTAINER="/usr/src/app/file_storage" | ||
INGESTER_FILE_STORAGE_PATH="path/to/file/storage" | ||
INGESTER_MIGRATION_STORAGE_PATH=/path/to/migration_storage | ||
INGESTER_MIGRATION_STORAGE_PATH=/path/to/migration_storage # requires explanation | ||
|
||
INGESTER_ROCKS_FLUSH_BEFORE_BACKUP=false | ||
INGESTER_ROCKS_INTERVAL_IN_SECONDS=3600 | ||
INGESTER_ROCKS_SYNC_INTERVAL_SECONDS=2 | ||
|
||
INGESTER_SYNCHRONIZER_DUMP_PATH="/path/to/dump" | ||
|
||
INGESTER_DISABLE_SYNCHRONIZER=true | ||
INGESTER_SKIP_CHECK_TREE_GAPS=true | ||
# path to the slots data, required for the backfiller to work | ||
INGESTER_SLOTS_DB_PATH=/path/to/slots-data | ||
INGESTER_SECONDARY_SLOTS_DB_PATH=/path/to/secondary/ingester-slots # should be removed | ||
|
||
# a common log level for all instances, will be overridden by specific log levels, requires refactoring | ||
RUST_LOG=info | ||
# API instance config | ||
API_LOG_LEVEL=info | ||
|
||
|
@@ -91,21 +98,23 @@ API_CONSISTENCE_SYNCHRONIZATION_API_THRESHOLD=1000000 | |
API_CONSISTENCE_BACKFILLING_SLOTS_THRESHOLD=500 | ||
|
||
# if set to true API will not check if tree where user requests assets from has any gaps | ||
API_SKIP_CHECK_TREE_GAPS=false | ||
API_SKIP_CHECK_TREE_GAPS=true | ||
|
||
# Synchronizer instance config | ||
SYNCHRONIZER_LOG_LEVEL=info | ||
|
||
SYNCHRONIZER_DATABASE_CONFIG='{max_postgres_connections=100, url="postgres://user:[email protected]:5432/database"}' | ||
SYNCHRONIZER_ROCKS_DB_PATH_CONTAINER="/usr/src/rocksdb-data" | ||
SYNCHRONIZER_ROCKS_DB_SECONDARY_PATH_CONTAINER="path/to/rocks/secondary/db" | ||
SYNCHRONIZER_ROCKS_DB_SECONDARY_PATH_CONTAINER="path/to/rocks/secondary/db" # should be removed | ||
|
||
SYNCHRONIZER_METRICS_PORT=6091 | ||
|
||
SYNCHRONIZER_DUMP_PATH="/path/to/migration_data" | ||
|
||
SYNCHRONIZER_DUMP_SYNCHRONIZER_BATCH_SIZE=10000 | ||
SYNCHRONIZER_DUMP_SYNC_THRESHOLD=50000000 | ||
# threshold on the number of updates not being synchronized for the synchronizer to dump-load on start | ||
# 150M - that's a rough threshold after which the synchronizer will likely complete a full dymp-load cycle faster then doing an incremental sync | ||
SYNCHRONIZER_DUMP_SYNC_THRESHOLD=150000000 | ||
|
||
SYNCHRONIZER_PARALLEL_TASKS=30 | ||
|
||
|
Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,89 @@ | ||
# GPA tool | ||
|
||
## Script Description | ||
|
||
This script interacts with the Metaplex Core program on the SVM blockchains to retrieve assets associated with a specific collection and process a CSV file containing hexadecimal keys. It converts these keys to Base58 and identifies keys that are missing from the `assets_v3.csv` file. The result is saved to a new CSV file. | ||
|
||
--- | ||
|
||
## Prerequisites | ||
|
||
### Node.js and npm | ||
Ensure you have **Node.js** installed (v14 or higher recommended). | ||
|
||
### Install Dependencies | ||
Run the following command to install the required packages: | ||
|
||
```bash | ||
npm install @metaplex-foundation/umi-bundle-defaults @metaplex-foundation/mpl-core @metaplex-foundation/umi bs58 fs | ||
``` | ||
|
||
### Set Up a SVM RPC Node | ||
The script uses the RPC endpoint `https://api.mainnet-beta.solana.com`. You can replace this with your preferred RPC endpoint if needed. | ||
|
||
--- | ||
|
||
## How to Use | ||
|
||
1. **Set Up the Collection Public Key** | ||
Replace the placeholder public key `C8uYT2W93pBcmMVxSoUyzTW5mKVFTEpMNEPx1Y15MFyk` with the public key of the Core collection you want to query. | ||
|
||
2. **Prepare the CSV File** | ||
- Create a CSV file named `assets_v3.csv` in the root directory. | ||
- this file can be created by selecting keys from the PG. Here is SQL request to select asset keys from collection pointed above. `select ast_pubkey from assets_v3 where ast_collection = decode('a57708125d64ff943f1adf2fa45bfb7c0d8e581d6f3d036d6e41d64cd70434f3', 'HEX');` | ||
- The file must include a header row with `ast_pubkey` as the column name. | ||
- Each subsequent row should contain hexadecimal-encoded keys (e.g., `0x1234abcd`). | ||
|
||
3. **Run the Script** | ||
Execute the script with: | ||
|
||
```bash | ||
ts-node app.ts | ||
``` | ||
|
||
4. **Output** | ||
- The number of assets in the collection is printed to the console. | ||
- Missing keys (those not found in the PG data) are written to `absentKeys.csv`. | ||
|
||
--- | ||
|
||
## Functionality Overview | ||
|
||
### `main` | ||
1. Connects to the SVM RPC. | ||
2. Fetches all assets associated with the specified collection key. | ||
3. Extracts public keys of the assets. | ||
|
||
### `processCsvToBase58` | ||
1. Reads the `assets_v3.csv` file. | ||
2. Validates the file format. | ||
3. Converts hexadecimal keys from the file to Base58. | ||
4. Compares these keys with the blockchain data. | ||
5. Writes absent keys to `absentKeys.csv`. | ||
|
||
--- | ||
|
||
## Example | ||
|
||
### Input: `assets_v3.csv` | ||
|
||
```csv | ||
ast_pubkey | ||
0x1234abcd | ||
0x5678efgh | ||
``` | ||
|
||
### Output: `absentKeys.csv` | ||
|
||
```csv | ||
base58_key | ||
3QJmV3qfvL9SuYo34YihAfMZhD2xBn84cvTL9W5ddWKH | ||
``` | ||
|
||
--- | ||
|
||
## Notes | ||
- Update the RPC endpoint and collection key as needed. | ||
- Ensure you have sufficient permissions to read/write files in the directory. | ||
|
||
Feel free to extend the script for additional functionalities or integrate it into a larger SVM application. |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,95 @@ | ||
import { createUmi } from '@metaplex-foundation/umi-bundle-defaults' | ||
import { | ||
mplCore, | ||
getAssetV1GpaBuilder, | ||
Key, | ||
updateAuthority, | ||
} from '@metaplex-foundation/mpl-core' | ||
import { generateSigner, publicKey, signerIdentity } from '@metaplex-foundation/umi'; | ||
import * as fs from 'fs'; | ||
import bs58 from 'bs58'; | ||
|
||
const umi = createUmi('https://api.mainnet-beta.solana.com', 'processed').use(mplCore()) | ||
const payer = generateSigner(umi); | ||
|
||
umi.use(signerIdentity(payer)); | ||
|
||
async function main() { | ||
const collectionKey = publicKey("C8uYT2W93pBcmMVxSoUyzTW5mKVFTEpMNEPx1Y15MFyk"); | ||
|
||
const assetsByCollection = await getAssetV1GpaBuilder(umi) | ||
.whereField('key', Key.AssetV1) | ||
.whereField( | ||
'updateAuthority', | ||
updateAuthority('Collection', [collectionKey]) | ||
) | ||
.getDeserialized(); | ||
|
||
const publicKeyMap: string[] = []; | ||
|
||
for (const element of assetsByCollection) { | ||
publicKeyMap.push(element.publicKey.toString()); | ||
} | ||
|
||
console.log(assetsByCollection.length); | ||
|
||
await processCsvToBase58("./assets_v3.csv", publicKeyMap); | ||
|
||
} | ||
|
||
async function processCsvToBase58(filePath: string, keysFromTheNetwork: string[]): Promise<void> { | ||
try { | ||
if (!fs.existsSync(filePath)) { | ||
console.error('File does not exist:', filePath); | ||
return; | ||
} | ||
|
||
const csvData = fs.readFileSync(filePath, 'utf-8'); | ||
const rows = csvData.split('\n').filter(row => row.trim() !== ''); | ||
|
||
const header = rows[0].trim(); | ||
if (header !== 'ast_pubkey') { | ||
console.error('Invalid CSV format. Expected header: "ast_pubkey".'); | ||
return; | ||
} | ||
|
||
const keyFromTheDB: string[] = []; | ||
|
||
const hexValues = rows.slice(1); | ||
hexValues.forEach((hex, index) => { | ||
try { | ||
let trimmedHex = hex.trim(); | ||
|
||
if (trimmedHex.startsWith('0x')) { | ||
trimmedHex = trimmedHex.slice(2); | ||
} | ||
|
||
if (!/^([0-9A-Fa-f]+)$/.test(trimmedHex)) { | ||
console.warn(`Invalid HEX value at row ${index + 2}:`, hex); | ||
return; | ||
} | ||
|
||
const buffer = Buffer.from(trimmedHex, 'hex'); | ||
const base58 = bs58.encode(buffer); | ||
keyFromTheDB.push(base58); | ||
} catch (error) { | ||
console.error(`Error processing row ${index + 2}:`, error); | ||
} | ||
}); | ||
|
||
const absentKeys = keysFromTheNetwork.filter(key => !keyFromTheDB.includes(key)); | ||
|
||
const h = 'base58_key\n'; | ||
|
||
const r = absentKeys.map(key => key).join('\n'); | ||
|
||
const csvContent = h + r; | ||
|
||
fs.writeFileSync("./absentKeys.csv", csvContent, 'utf-8'); | ||
} catch (error) { | ||
console.error('Error reading or processing the file:', error); | ||
} | ||
} | ||
|
||
|
||
main().catch(console.error); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,22 @@ | ||
{ | ||
"name": "core-demo", | ||
"packageManager": "[email protected]", | ||
"dependencies": { | ||
"@metaplex-foundation/mpl-core": "^1.1.1", | ||
"@metaplex-foundation/mpl-inscription": "^0.8.1", | ||
"@metaplex-foundation/umi": "^0.9.2", | ||
"@metaplex-foundation/umi-bundle-defaults": "^0.9.2", | ||
"@solana/web3.js": "^1.95.8", | ||
"bs58": "^6.0.0", | ||
"fs": "^0.0.1-security" | ||
}, | ||
"version": "1.0.0", | ||
"description": "", | ||
"main": "index.js", | ||
"scripts": { | ||
"test": "echo \"Error: no test specified\" && exit 1" | ||
}, | ||
"keywords": [], | ||
"author": "", | ||
"license": "ISC" | ||
} |
Oops, something went wrong.