From fbbbd17d7de0e45b1bb7ceed7570e2d48c404ce2 Mon Sep 17 00:00:00 2001 From: Luis Mastrangelo Date: Wed, 24 Apr 2024 12:31:01 -0300 Subject: [PATCH] chore: remove unused config files (#159) Signed-off-by: Luis Mastrangelo --- .circleci/config.yml | 79 ----- .circleci/continue_config.yml | 283 ------------------ .circleci/new_branch.yml | 57 ---- .circleci/nightly.yml | 99 ------- .circleci/ssh.config | 2 - .circleci/test-chains-regularly.yml | 28 -- .gitattributes | 2 - .github/PULL_REQUEST_TEMPLATE/release.md | 25 -- .gitignore | 1 - .vscode/launch.json | 285 ------------------ .vscode/settings.json | 3 - environments/.env.dev | 98 ------ environments/.env.latest | 97 ------ environments/.env.secrets.gpg | Bin 2794 -> 0 bytes environments/.env.stable | 93 ------ environments/build-ipfs.yaml | 7 - environments/build-monitor.yaml | 7 - environments/build-s3.yaml | 7 - environments/grafana.yaml | 110 ------- environments/ipfs.yaml | 32 -- environments/loki-config.yaml | 52 ---- environments/monitor.yaml | 32 -- environments/promtail-config.yaml | 25 -- environments/repository.yaml | 30 -- environments/s3.yaml | 21 -- environments/server.yaml | 34 --- environments/ui.yaml | 28 -- package.json | 8 +- scripts/build_and_publish_docker_images.sh | 23 -- scripts/check-s3-backup.mjs | 88 ------ scripts/decrypt.sh | 5 - scripts/deploy.sh | 21 -- scripts/encrypt.sh | 6 - scripts/find_replace.sh | 86 ------ scripts/hedera-reset-previewnet.sh | 9 - scripts/hedera-reset-testnet.sh | 9 - scripts/monitor_ci.js | 62 ---- scripts/monitor_ci.sh | 33 --- scripts/publish_to_npm.sh | 51 ---- scripts/setup.sh | 27 -- scripts/test_new_chain_support.sh | 32 -- scripts/updateChains.mjs | 9 - services/ipfs/Dockerfile.ipfs | 16 - services/ipfs/README.md | 12 - services/ipfs/cron.job | 4 - services/ipfs/entrypoint.sh | 49 --- services/ipfs/init-config.sh | 36 --- services/ipfs/ipfs-latest.key.gpg | Bin 156 -> 0 bytes services/ipfs/ipfs-stable.key.gpg | 1 - services/ipfs/publish.sh | 57 ---- services/s3sync/Dockerfile.s3 | 10 - services/s3sync/entrypoint.sh | 13 - services/s3sync/login_sync_s3.sh | 19 -- src/Dockerfile.monitor | 12 - src/monitor/gateway.ts | 33 --- src/monitor/monitor.ts | 328 --------------------- src/monitor/pending-contract.ts | 107 ------- src/monitor/source-fetcher.ts | 302 ------------------- src/monitor/util.ts | 54 ---- test/monitor.js | 275 ----------------- tslint.json | 13 - ui/.env.development | 4 - ui/.env.production | 4 - ui/.gitignore | 4 - ui/.vscode/settings.json | 3 - 65 files changed, 2 insertions(+), 3360 deletions(-) delete mode 100644 .circleci/config.yml delete mode 100644 .circleci/continue_config.yml delete mode 100644 .circleci/new_branch.yml delete mode 100644 .circleci/nightly.yml delete mode 100755 .circleci/ssh.config delete mode 100644 .circleci/test-chains-regularly.yml delete mode 100644 .gitattributes delete mode 100644 .github/PULL_REQUEST_TEMPLATE/release.md delete mode 100644 .vscode/launch.json delete mode 100644 .vscode/settings.json delete mode 100644 environments/.env.dev delete mode 100644 environments/.env.latest delete mode 100644 environments/.env.secrets.gpg delete mode 100644 environments/.env.stable delete mode 100644 environments/build-ipfs.yaml delete mode 100644 environments/build-monitor.yaml delete mode 100644 environments/build-s3.yaml delete mode 100644 environments/grafana.yaml delete mode 100644 environments/ipfs.yaml delete mode 100644 environments/loki-config.yaml delete mode 100644 environments/monitor.yaml delete mode 100644 environments/promtail-config.yaml delete mode 100644 environments/repository.yaml delete mode 100644 environments/s3.yaml delete mode 100644 environments/server.yaml delete mode 100644 environments/ui.yaml delete mode 100755 scripts/build_and_publish_docker_images.sh delete mode 100644 scripts/check-s3-backup.mjs delete mode 100755 scripts/decrypt.sh delete mode 100755 scripts/deploy.sh delete mode 100755 scripts/encrypt.sh delete mode 100755 scripts/find_replace.sh delete mode 100755 scripts/hedera-reset-previewnet.sh delete mode 100755 scripts/hedera-reset-testnet.sh delete mode 100755 scripts/monitor_ci.js delete mode 100755 scripts/monitor_ci.sh delete mode 100755 scripts/publish_to_npm.sh delete mode 100755 scripts/setup.sh delete mode 100755 scripts/test_new_chain_support.sh delete mode 100644 scripts/updateChains.mjs delete mode 100644 services/ipfs/Dockerfile.ipfs delete mode 100644 services/ipfs/README.md delete mode 100644 services/ipfs/cron.job delete mode 100755 services/ipfs/entrypoint.sh delete mode 100644 services/ipfs/init-config.sh delete mode 100644 services/ipfs/ipfs-latest.key.gpg delete mode 100644 services/ipfs/ipfs-stable.key.gpg delete mode 100755 services/ipfs/publish.sh delete mode 100644 services/s3sync/Dockerfile.s3 delete mode 100644 services/s3sync/entrypoint.sh delete mode 100755 services/s3sync/login_sync_s3.sh delete mode 100644 src/Dockerfile.monitor delete mode 100644 src/monitor/gateway.ts delete mode 100755 src/monitor/monitor.ts delete mode 100644 src/monitor/pending-contract.ts delete mode 100644 src/monitor/source-fetcher.ts delete mode 100644 src/monitor/util.ts delete mode 100644 test/monitor.js delete mode 100644 tslint.json delete mode 100644 ui/.env.development delete mode 100644 ui/.env.production delete mode 100644 ui/.vscode/settings.json diff --git a/.circleci/config.yml b/.circleci/config.yml deleted file mode 100644 index 40011d7e..00000000 --- a/.circleci/config.yml +++ /dev/null @@ -1,79 +0,0 @@ -# Config that filters the build jobs: only run new builds for the paths that are changed. -# Runs the continue_config.yml subsequently. -version: 2.1 - -# this allows you to use CircleCI's dynamic configuration feature -setup: true - -# Pipeline parameters for nightly and regular test chain triggers. -parameters: - run-nightly: - type: boolean - default: false - run-test-chains-regularly: - type: boolean - default: false - -# the path-filtering orb is required to continue a pipeline based on -# the path of an updated fileset see https://circleci.com/docs/2.0/using-dynamic-configuration/ -orbs: - path-filtering: circleci/path-filtering@0.1.1 - continuation: circleci/continuation@0.3.1 - -# Can add multiple workflows in setup since only one of these will run. Otherwise it's not possible: https://support.circleci.com/hc/en-us/articles/360060934851--Max-number-of-workflows-exceeded-error -workflows: - always-run: - # Don't run on scheduled pipelines and only on master or staging. - when: - and: - - not: << pipeline.parameters.run-nightly >> - - not: << pipeline.parameters.run-test-chains-regularly >> - - or: - - equal: [ master, << pipeline.git.branch >> ] - - equal: [ staging, << pipeline.git.branch >> ] - jobs: - # the path-filtering/filter job determines which pipeline parameters to update. - - path-filtering/filter: - name: check-updated-modules - # h5ai-nginx without /.* part since it's a submodule. git diff outputs the folder name only, not files under it - mapping: | - services/ipfs/.* run-build-ipfs true - services/s3sync/.* run-build-s3 true - h5ai-nginx run-build-repository true - src/.* run-build-server true - packages/.* run-build-server true - environments/.* run-build-server true - scripts/.* run-build-server true - package.json run-build-server true - ui/.* run-build-ui true - # Compare against the last build of the branch not the default "main" branch - base-revision: << pipeline.git.base_revision >> - config-path: .circleci/continue_config.yml - - new-branch: - when: - and: - - not: << pipeline.parameters.run-nightly >> - - not: << pipeline.parameters.run-test-chains-regularly >> - - not: - equal: [ master, << pipeline.git.branch >> ] - - not: - equal: [ staging, << pipeline.git.branch >> ] - jobs: - - continuation/continue: - name: continue-new-branch - configuration_path: .circleci/new_branch.yml - - nightly: - when: << pipeline.parameters.run-nightly >> - jobs: - - continuation/continue: - name: continue-nightly - configuration_path: .circleci/nightly.yml - - test-chains-regularly: - when: << pipeline.parameters.run-test-chains-regularly >> - jobs: - - continuation/continue: - name: continue-test-chains-regularly - configuration_path: .circleci/test-chains-regularly.yml diff --git a/.circleci/continue_config.yml b/.circleci/continue_config.yml deleted file mode 100644 index 34b7b5fd..00000000 --- a/.circleci/continue_config.yml +++ /dev/null @@ -1,283 +0,0 @@ -# Main config for workflows that will be conditionally run according to the parameters set at the initial config.yml. Only runs builds for paths that are changed. -# Also contains workflows that will be run on all branches such as node tests and test-new-chain -version: 2.1 - -# Pipeline parameters to run module builds if related files have been changed -parameters: - run-build-ipfs: - type: boolean - default: false - run-build-s3: - type: boolean - default: false - run-build-repository: - type: boolean - default: false - run-build-ui: - type: boolean - default: false - run-build-server: - type: boolean - default: false - -################ -### ALIASES #### -################ -aliases: - - &build-base - docker: - - image: cimg/base:2021.04 - parameters: - run-build: - type: boolean - default: false - steps: - - when: - condition: << parameters.run-build >> - steps: - - checkout - - run: - name: "Pull Submodules" - command: | - git submodule init - git submodule update --remote - - setup_remote_docker: - version: 20.10.2 - - run: - name: Build and push docker images - command: | - ./scripts/build_and_publish_docker_images.sh - - run: echo 0 - - &filter-base - filters: - branches: - only: - - staging - - master - - &monitor-e2e-base - working_directory: ~/source-verify - steps: - - checkout - - run: - name: install node-fetch dotenv - command: npm install node-fetch dotenv - - run: - name: monitor test - command: ./scripts/monitor_ci.sh - no_output_timeout: 30m - docker: - - image: cimg/node:16.15 - - &verification-e2e-base - working_directory: ~/source-verify - steps: - - checkout - - run: - name: install dotenv - command: npm install dotenv - - run: - name: verification test - command: ./scripts/verification-e2e.sh - docker: - - image: cimg/node:16.15 - - -#################### -#### WORKFLOWS ##### -#################### -workflows: - build_publish_deploy_docker: - # Run build_publish_deploy when on staging or master AND when a module is changed and something needs to be built and deployed. - when: - and: - - or: - - equal: [ master, << pipeline.git.branch >> ] - - equal: [ staging, << pipeline.git.branch >> ] - - or: - [ - << pipeline.parameters.run-build-ipfs >>, - << pipeline.parameters.run-build-repository >>, - << pipeline.parameters.run-build-s3 >>, - << pipeline.parameters.run-build-server >>, - << pipeline.parameters.run-build-ui >>, - ] - jobs: - - tests-node-v16 - - npm-publish: - filters: - branches: - only: - - master - requires: - - tests-node-v16 - - build-push-ipfs: - run-build: << pipeline.parameters.run-build-ipfs >> - # Always build monitor if there's a change in the server - - build-push-monitor: - run-build: << pipeline.parameters.run-build-server >> - - build-push-repository: - run-build: << pipeline.parameters.run-build-repository >> - - build-push-s3: - run-build: << pipeline.parameters.run-build-s3 >> - - build-push-server: - run-build: << pipeline.parameters.run-build-server >> - - build-push-ui: - run-build: << pipeline.parameters.run-build-ui >> - - deploy: - requires: - - build-push-ipfs - - build-push-monitor - - build-push-repository - - build-push-s3 - - build-push-server - - build-push-ui - - monitor-e2e-goerli: - requires: - - deploy - - monitor-e2e-sepolia: - requires: - - deploy - - verification-e2e-goerli: - requires: - - deploy - - verification-e2e-sepolia: - requires: - - deploy - - # Tests workflow for not staging or master - tests: - when: - and: - - not: - equal: [ master, << pipeline.git.branch >> ] - - not: - equal: [ staging, << pipeline.git.branch >> ] - jobs: - - tests-node-v16 - - # Has to run always. Can't regex filter on add-chain-{chainId} branch names see: https://stackoverflow.com/questions/55839004/circleci-regex-filtering-match-within-string - test-new-chain: - jobs: - - test-new-chain - - -####################### -### JOB DEFINITIONS ### -####################### -jobs: - build-push-ipfs: - <<: *build-base - environment: - SERVICE: "ipfs" - build-push-monitor: - <<: *build-base - environment: - SERVICE: "monitor" - build-push-repository: - <<: *build-base - environment: - SERVICE: "repository" - build-push-s3: - <<: *build-base - environment: - SERVICE: "s3" - build-push-server: - <<: *build-base - environment: - SERVICE: "server" - build-push-ui: - <<: *build-base - environment: - SERVICE: "ui" - deploy: - docker: - - image: cimg/base:2021.04 - steps: - - checkout - - setup_remote_docker: - version: 20.10.2 - - add_ssh_keys: - fingerprints: - - "a9:2f:97:c3:64:10:80:c7:8e:2f:db:f7:a3:4a:53:66" - - run: - name: Deploy - no_output_timeout: 30m - command: | - ./scripts/deploy.sh - tests-node-v16: - docker: - - image: cimg/node:16.15 - working_directory: ~/source-verify - parameters: - run_coveralls: - type: boolean - default: false - steps: - - run: - name: Versions - command: npm version - - checkout - - run: - name: install dependencies - command: npm install && npx lerna bootstrap - - run: - name: install puppeteer dependencies - command: sudo apt-get update && sudo apt-get -y install xvfb gconf-service libasound2 libatk1.0-0 libc6 libcairo2 libcups2 libdbus-1-3 libexpat1 libfontconfig1 libgbm1 libgcc1 libgconf-2-4 libgdk-pixbuf2.0-0 libglib2.0-0 libgtk-3-0 libnspr4 libpango-1.0-0 libpangocairo-1.0-0 libstdc++6 libx11-6 libx11-xcb1 libxcb1 libxcomposite1 libxcursor1 libxdamage1 libxext6 libxfixes3 libxi6 libxrandr2 libxrender1 libxss1 libxtst6 ca-certificates fonts-liberation libappindicator1 libnss3 lsb-release xdg-utils wget - - run: - name: lint - command: npm run lint - - run: - name: tsc and test - command: npx lerna run build && npx lerna run test --stream - - run: - name: coverage - command: npm run cov:send - test-new-chain: - docker: - - image: cimg/node:16.15 - working_directory: ~/source-verify - steps: - - checkout - - run: - name: install dependencies - command: npm install && npx lerna bootstrap - - run: - name: build - command: npx lerna run build - - run: - name: test new chain PR - command: ./scripts/test_new_chain_support.sh - monitor-e2e-goerli: - <<: *monitor-e2e-base - environment: - CHAIN_ID: 5 - CHAIN_NAME: goerli - monitor-e2e-sepolia: - <<: *monitor-e2e-base - environment: - CHAIN_ID: '11155111' - CHAIN_NAME: sepolia - verification-e2e-goerli: - <<: *verification-e2e-base - environment: - CHAIN_ID: 5 - CHAIN_NAME: goerli - verification-e2e-sepolia: - <<: *verification-e2e-base - environment: - CHAIN_ID: '11155111' - CHAIN_NAME: sepolia - npm-publish: - working_directory: ~/source-verify - docker: - - image: cimg/node:16.15 - steps: - - checkout - - run: - name: install dependencies - command: npm install && npx lerna bootstrap - - run: - name: build everything - command: npx lerna run build - - run: - name: Publish npm package - command: ./scripts/publish_to_npm.sh \ No newline at end of file diff --git a/.circleci/new_branch.yml b/.circleci/new_branch.yml deleted file mode 100644 index fcb1fc7d..00000000 --- a/.circleci/new_branch.yml +++ /dev/null @@ -1,57 +0,0 @@ -version: 2.1 - -workflows: - # Always run - node-build: - jobs: - - node-v16 - # Has to run always. Can't regex filter on add-chain-{chainId} branch names see: https://stackoverflow.com/questions/55839004/circleci-regex-filtering-match-within-string - test-new-chain: - jobs: - - test-new-chain - -jobs: - node-v16: - docker: - - image: cimg/node:16.15 - working_directory: ~/source-verify - parameters: - run_coveralls: - type: boolean - default: false - steps: - - run: - name: Versions - command: npm version - - checkout - - run: - name: install dependencies - command: npm install && npx lerna bootstrap - - run: - name: install puppeteer dependencies - command: sudo apt-get update && sudo apt-get -y install xvfb gconf-service libasound2 libatk1.0-0 libc6 libcairo2 libcups2 libdbus-1-3 libexpat1 libfontconfig1 libgbm1 libgcc1 libgconf-2-4 libgdk-pixbuf2.0-0 libglib2.0-0 libgtk-3-0 libnspr4 libpango-1.0-0 libpangocairo-1.0-0 libstdc++6 libx11-6 libx11-xcb1 libxcb1 libxcomposite1 libxcursor1 libxdamage1 libxext6 libxfixes3 libxi6 libxrandr2 libxrender1 libxss1 libxtst6 ca-certificates fonts-liberation libappindicator1 libnss3 lsb-release xdg-utils wget - - - run: - name: lint - command: npm run lint - - run: - name: tsc and test - command: npx lerna run build && npx lerna run test --stream - - run: - name: coverage - command: npx lerna run cov:send - test-new-chain: - docker: - - image: cimg/node:16.15 - working_directory: ~/source-verify - steps: - - checkout - - run: - name: install dependencies - command: npm install && npx lerna bootstrap - - run: - name: build - command: npx lerna run build - - run: - name: test new chain PR - command: ./scripts/test_new_chain_support.sh diff --git a/.circleci/nightly.yml b/.circleci/nightly.yml deleted file mode 100644 index e1312308..00000000 --- a/.circleci/nightly.yml +++ /dev/null @@ -1,99 +0,0 @@ -version: 2.1 - -# Parameter that can be passed in this pipeline have to be defined here too. Used with nightly. -parameters: - run-nightly: - type: boolean - default: false - -aliases: - - &monitor-e2e-base - working_directory: ~/source-verify - steps: - - checkout - - run: - name: install node-fetch dotenv - command: npm install node-fetch dotenv - - run: - name: monitor test - command: ./scripts/monitor_ci.sh - no_output_timeout: 30m - docker: - - image: cimg/node:16.15 - - &verification-e2e-base - working_directory: ~/source-verify - steps: - - checkout - - run: - name: install dotenv - command: npm install dotenv - - run: - name: verification test - command: ./scripts/verification-e2e.sh - - store_artifacts: - path: ./verified-contracts - docker: - - image: cimg/node:16.15 - -workflows: - e2e-tests: - jobs: - - monitor-e2e-goerli - - monitor-e2e-sepolia - - verification-e2e-goerli - - verification-e2e-sepolia - s3-backup-check: - jobs: - - check-s3-backup - etherscan-instances: - jobs: - - check-etherscan-instances - -jobs: - monitor-e2e-goerli: - <<: *monitor-e2e-base - environment: - CHAIN_ID: 5 - CHAIN_NAME: goerli - monitor-e2e-sepolia: - <<: *monitor-e2e-base - environment: - CHAIN_ID: '11155111' - CHAIN_NAME: sepolia - verification-e2e-goerli: - <<: *verification-e2e-base - environment: - CHAIN_ID: 5 - CHAIN_NAME: goerli - verification-e2e-sepolia: - <<: *verification-e2e-base - environment: - CHAIN_ID: '11155111' - CHAIN_NAME: sepolia - check-s3-backup: - working_directory: ~/source-verify - steps: - - checkout - - run: - name: install dependencies - command: npm install @aws-sdk/client-s3 - - run: - name: backup test - command: node --experimental-fetch ./scripts/check-s3-backup.mjs - docker: - - image: cimg/node:16.15 - check-etherscan-instances: - working_directory: ~/source-verify - steps: - - checkout - - run: - name: install dependencies - command: npm install && npx lerna bootstrap - - run: - name: build - command: npx lerna run build - - run: - name: test - command: npx mocha test/etherscan/etherscanInstances.js --exit - docker: - - image: cimg/node:16.15 \ No newline at end of file diff --git a/.circleci/ssh.config b/.circleci/ssh.config deleted file mode 100755 index f30d239b..00000000 --- a/.circleci/ssh.config +++ /dev/null @@ -1,2 +0,0 @@ -Host * - StrictHostKeyChecking no diff --git a/.circleci/test-chains-regularly.yml b/.circleci/test-chains-regularly.yml deleted file mode 100644 index f3181a08..00000000 --- a/.circleci/test-chains-regularly.yml +++ /dev/null @@ -1,28 +0,0 @@ -version: 2.1 - -# Parameter that can be passed in this pipeline have to be defined here too. -parameters: - run-test-chains-regularly: - type: boolean - default: false - -workflows: - test-chains-regularly: - jobs: - - test-chains - -jobs: - test-chains: - working_directory: ~/source-verify - steps: - - checkout - - run: - name: install dependencies - command: npm install && npx lerna bootstrap - - run: - name: build and test - command: npx lerna run build && npx lerna run test:chains - - store_artifacts: # Store and publish test results for reference - path: ./chain-tests-report - docker: - - image: cimg/node:16.15 diff --git a/.gitattributes b/.gitattributes deleted file mode 100644 index 10f85611..00000000 --- a/.gitattributes +++ /dev/null @@ -1,2 +0,0 @@ -environments/.env.secrets filter=git-crypt diff=git-crypt -.gitattributes !filter !diff diff --git a/.github/PULL_REQUEST_TEMPLATE/release.md b/.github/PULL_REQUEST_TEMPLATE/release.md deleted file mode 100644 index 542bcbfb..00000000 --- a/.github/PULL_REQUEST_TEMPLATE/release.md +++ /dev/null @@ -1,25 +0,0 @@ -# Release - -## Changes - - - -- New supported chains: - - X Chain (43) - - Chainname (chainId) - - - -- Fix: ... -- - ... - -## Checklist - - - -- [ ] I have bumped the versions of the packages under `packages/`, if necessary - - [ ] `lib-sourcify` - - [ ] `bytecode-utils` - - [ ] `contract-call-decoder` -- [ ] All tests are passing diff --git a/.gitignore b/.gitignore index 2a430907..7b171783 100644 --- a/.gitignore +++ b/.gitignore @@ -17,7 +17,6 @@ tmp metacoin-source-verify/ **/synced **/sort.log -**/.env.secrets logs **/too_big.txt chain-tests-report/ diff --git a/.vscode/launch.json b/.vscode/launch.json deleted file mode 100644 index e7bbc894..00000000 --- a/.vscode/launch.json +++ /dev/null @@ -1,285 +0,0 @@ -{ - // Use IntelliSense to learn about possible attributes. - // Hover to view descriptions of existing attributes. - // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 - "version": "0.2.0", - "configurations": [ - { - "type": "node", - "request": "launch", - "name": "Server", - "program": "${workspaceFolder}/dist/server/server.js", - "envFile": "${workspaceFolder}/environments/.env", - "preLaunchTask": "npm: build:lerna", - "outFiles": [ - "${workspaceFolder}/dist/**/*.js", - "${workspaceFolder}/packages/**/build/**/*.js" - ], - "env": { - // "DEBUG": "express:*" // Debug all express modules * - }, - "smartStep": true, - "console": "integratedTerminal", - "outputCapture": "std", - }, - { - "type": "node", - "request": "launch", - "name": "Server - without build", - "program": "${workspaceFolder}/dist/server/server.js", - "envFile": "${workspaceFolder}/environments/.env", - "outFiles": [ - "${workspaceFolder}/dist/**/*.js", - "${workspaceFolder}/packages/**/build/**/*.js" - ], - "env": { - // "DEBUG": "express:*" // Debug all express modules * - }, - "smartStep": true, - "console": "integratedTerminal", - "outputCapture": "std", - }, - { - // Make sure the UI webserver is already running with `npm start` - "name": "UI", - "type": "chrome", - "request": "launch", - "url": "http://localhost:3000", - "webRoot": "${workspaceRoot}/ui/src", - }, - { - "type": "node", - "request": "launch", - "name": "Monitor", - "program": "${workspaceFolder}/dist/monitor/monitor.js", - "envFile": "${workspaceFolder}/environments/.env", - "preLaunchTask": "npm: build:lerna", - "outFiles": [ - "${workspaceFolder}/dist/**/*.js", - "${workspaceFolder}/packages/**/build/**/*.js" - ], - "smartStep": true, - "console": "integratedTerminal", - "outputCapture": "std" - }, - { - "type": "node", - "request": "launch", - "name": "Monitor - without build", - "program": "${workspaceFolder}/dist/monitor/monitor.js", - "envFile": "${workspaceFolder}/environments/.env", - "outFiles": [ - "${workspaceFolder}/dist/**/*.js", - "${workspaceFolder}/packages/**/build/**/*.js" - ], - "smartStep": true, - "console": "integratedTerminal", - "outputCapture": "std" - }, - { - "type": "node", - "request": "launch", - "name": "Mocha - All", - "program": "${workspaceFolder}/node_modules/.bin/lerna", - "env": { - "TESTING": "true", - }, - "args": [ - "run", - "test", - "--stream" - ], - "outFiles": [ - "${workspaceFolder}/dist/**/*.js", - "${workspaceFolder}/packages/**/build/**/*.js" - ], - "smartStep": true, - "skipFiles": [ - "/**", - "node_modules/**" - ], - "console": "integratedTerminal", - }, - { - "type": "node", - "request": "launch", - "name": "Mocha - Server", - "program": "${workspaceRoot}/node_modules/.bin/mocha", - "env": { - // "DEBUG": "express:*", // Debug all express modules * - "TESTING": "true", - }, - "args": [ - "${workspaceFolder}/test/server.js", - "--no-timeout", - // Run a single test when debugging - // "--grep=v0.6.12", - "--exit", - ], - "outFiles": [ - "${workspaceFolder}/dist/**/*.js", - "${workspaceFolder}/packages/**/build/**/*.js", - ], - "smartStep": true, - "console": "integratedTerminal", - // "internalConsoleOptions": "neverOpen" - }, - { - "type": "node", - "request": "launch", - "name": "Mocha - Etherscan", - "program": "${workspaceRoot}/node_modules/.bin/mocha", - "env": { - // "DEBUG": "express:*", // Debug all express modules * - "TESTING": "true", - }, - "args": [ - "${workspaceFolder}/test/etherscan.js", - "--no-timeout", - // Run a single test when debugging - // "--grep=v0.6.12", - "--exit", - ], - "outFiles": [ - "${workspaceFolder}/dist/**/*.js", - "${workspaceFolder}/packages/**/build/**/*.js", - ], - "smartStep": true, - "console": "integratedTerminal", - // "internalConsoleOptions": "neverOpen" - }, - { - "type": "node", - "request": "launch", - "name": "Mocha - Etherscan Instances", - "program": "${workspaceRoot}/node_modules/.bin/mocha", - "env": { - "TESTING": "true", - }, - "args": [ - "${workspaceFolder}/test/etherscan/etherscanInstances.js", - "--no-timeout", - // Run a single test when debugging - // "--grep=v0.6.12", - "--exit", - ], - "outFiles": [ - "${workspaceFolder}/dist/**/*.js", - "${workspaceFolder}/packages/**/build/**/*.js", - ], - "smartStep": true, - "console": "integratedTerminal", - // "internalConsoleOptions": "neverOpen" - }, - { - "type": "node", - "request": "launch", - "name": "Mocha - lib-sourcify", - "cwd": "${workspaceFolder}/packages/lib-sourcify", - "program": "./node_modules/.bin/mocha", - "env": { - // "DEBUG": "express:*", // Debug all express modules * - "TESTING": "true", - }, - "args": [ - "-r", - "ts-node/register", - "./test/**/*.spec.ts", - "--no-timeout", - // Run a single test when debugging - // "--grep=v0.6.12", - "--exit", - ], - "sourceMaps": true, - "smartStep": true, - "console": "integratedTerminal", - // "internalConsoleOptions": "neverOpen" - }, - { - "type": "node", - "request": "launch", - "name": "Mocha - Contract Call Decoder", - "cwd": "${workspaceFolder}/packages/contract-call-decoder", - "program": "./node_modules/ava/cli.js", - "env": { - "TESTING": "true", - }, - "args": [ - ], - "outFiles": [ - "./**/*.js", - "${workspaceFolder}/packages/contract-call-decoder/build/**/*.js" - ], - "smartStep": true, - "console": "integratedTerminal", - // "internalConsoleOptions": "neverOpen" - }, - { - "type": "node", - "request": "launch", - "name": "Mocha - Monitor", - "program": "${workspaceRoot}/node_modules/.bin/mocha", - "env": { - "TESTING": "true", - }, - "args": [ - "${workspaceFolder}/test/monitor.js", - "--no-timeout" - ], - "outFiles": [ - "${workspaceFolder}/dist/**/*.js", - "${workspaceFolder}/packages/**/build/**/*.js", - ], - "smartStep": true, - "console": "integratedTerminal", - "internalConsoleOptions": "neverOpen" - }, - { - "type": "node", - "request": "launch", - "name": "Mocha - Chains", - "program": "${workspaceRoot}/node_modules/.bin/mocha", - "env": { - "TESTING": "true", - // "NEW_CHAIN_ID": "11155111" - }, - "args": [ - "${workspaceFolder}/test/chains/chain-tests.js", - "--no-timeout", - // Run a single test when debugging - // "--grep=Optimism", - "--exit", - ], - "outFiles": [ - "${workspaceFolder}/dist/**/*.js", - "${workspaceFolder}/packages/**/build/**/*.js" - ], - "smartStep": true, - "console": "integratedTerminal", - "internalConsoleOptions": "neverOpen" - }, - { - "type": "node", - "request": "launch", - "name": "Mocha - Source Fetcher", - "program": "${workspaceRoot}/node_modules/.bin/mocha", - "env": { - "TESTING": "true", - }, - "args": [ - "${workspaceFolder}/test/sourceFetcher.js", - "--no-timeout", - // Run a single test when debugging - // "--grep=fallback", - "--exit", - ], - "outFiles": [ - "${workspaceFolder}/dist/**/*.js", - "${workspaceFolder}/packages/**/build/**/*.js" - ], - "smartStep": true, - "console": "integratedTerminal", - "internalConsoleOptions": "neverOpen" - }, - ] -} \ No newline at end of file diff --git a/.vscode/settings.json b/.vscode/settings.json deleted file mode 100644 index 23fd35f0..00000000 --- a/.vscode/settings.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "editor.formatOnSave": true -} \ No newline at end of file diff --git a/environments/.env.dev b/environments/.env.dev deleted file mode 100644 index 0b5085fa..00000000 --- a/environments/.env.dev +++ /dev/null @@ -1,98 +0,0 @@ -# Server config -REPOSITORY_PATH=/tmp/sourcify/repository -SOLC_REPO=/tmp/solc-bin/linux-amd64 -SOLJSON_REPO=/tmp/solc-bin/soljson -SERVER_PORT=5555 -SESSION_SECRET=yoursessionsecret -IPFS_GATEWAY=https://ipfs.io/ipfs/ - -# Repository web server config -REPOSITORY_SERVER_URL=http://localhost:10000 - -# Monitor config -MONITOR_PORT=80 -MONITOR_FETCH_TIMEOUT=300000 - -# S3 config -AWS_S3_ACCESS_KEY_ID=xxx -AWS_S3_SECRET_ACCESS_KEY=xxx -BUCKET_NAME=s3://sourcify-backup-s3 -# Chainsafe Storage S3 config -CHAINSAFE_S3_ACCESS_KEY_ID=xxx -CHAINSAFE_S3_SECRET_ACCESS_KEY=xxx - -# IPFS config -IPFS_SECRET=xxx -# IPNS=k51qzi5uqu5dkuzo866rys9qexfvbfdwxjc20njcln808mzjrhnorgu5rh30lb -IPNS=repo.staging.sourcify.dev # Use DNSLink -# Leave API blank if you don't run an ipfs node -IPFS_API= -# IPs to announce with the ipfs id -PUBLIC_IP=xxx -LOCAL_IP=xxx -# Pinning services -ESTUARY_PINNING_SECRET=xxx -WEB3_STORAGE_PINNING_SECRET=xxx - -# NPM config -NPM_TOKEN=xxx - -# Docker config -## Relevant if your're running in a container -## Where to mount the downloaded compilers directory on the host machine -SOLC_REPO_HOST=/home/gather/staging/data/solc-bin/linux-amd64 -SOLJSON_REPO_HOST=/home/gather/staging/data/solc-bin/bin -# Repository path in the host machine -REPOSITORY_PATH_HOST=/home/sourcify/production/data/repository -## Ports to access containers from the host -SERVER_EXTERNAL_PORT=5555 -UI_EXTERNAL_PORT=1234 -REPOSITORY_SERVER_EXTERNAL_PORT=10000 -MONITOR_EXTERNAL_PORT=3000 -IPFS_GW_EXTERNAL_PORT=5050 -IPFS_LIBP2P_EXTERNAL_PORT=4002 -IPFS_API_EXTERNAL_PORT=5002 -SERVER_URL=https://staging.sourcify.dev/server - -# Custom nodes -NODE_URL_MAINNET= -NODE_URL_GOERLI= -NODE_URL_SEPOLIA= -CF_ACCESS_CLIENT_ID= -CF_ACCESS_CLIENT_SECRET= - -# Other config -TESTING=false -TAG=latest -NODE_ENV=development -INFURA_ID=xxx -ALCHEMY_ID=xxx -# Optional, if not set will use ALCHEMY_ID -ALCHEMY_ID_OPTIMISM= -# Optional, if not set will use ALCHEMY_ID -ALCHEMY_ID_ARBITRUM= -# Optional, Needed for the Import from Etherscan functionality for each Etherscan instance -ETHERSCAN_API_KEY= -ARBISCAN_API_KEY= -POLYGONSCAN_API_KEY= -BSCSCAN_API_KEY= -SNOWTRACE_API_KEY= -CELOSCAN_API_KEY= -MOONSCAN_MOONBEAM_API_KEY= -MOONSCAN_MOONRIVER_API_KEY= -BOBASCAN_API_KEY= -GNOSISSCAN_API_KEY= -OPTIMISMSCAN_API_KEY= -CRONOSCAN_API_KEY= - -# Needed to call create2 APIs, format is: TOKEN1,TOKEN2,... -CREATE2_CLIENT_TOKENS= - -GRAFANA_PATH=/home/gather/staging/data/grafana -GRAFANA_HTTP_USER=xxx -GRAFANA_HTTP_PASS=xxx -GRAFANA_EXTERNAL_PORT=3000 -GRAFANA_LOKI_EXTERNAL_PORT=3100 -GRAFANA_PROMETHEUS_EXTERNAL_PORT=9090 -# Use if you'll have a running Loki instance, otherwise leave blank -# GRAFANA_LOKI_URL=http://localhost:3100 \ No newline at end of file diff --git a/environments/.env.latest b/environments/.env.latest deleted file mode 100644 index cbe10063..00000000 --- a/environments/.env.latest +++ /dev/null @@ -1,97 +0,0 @@ -# Server config -SERVER_PORT=80 -SOLC_REPO=/home/data/solc-bin/linux-amd64 -SOLJSON_REPO=/home/data/solc-bin/soljson -SESSION_SECRET=xxx -IPFS_GATEWAY=xxx -REPOSITORY_PATH=/home/data/repository - -# Repository web server config -REPOSITORY_SERVER_PORT=80 -REPOSITORY_SERVER_URL=https://repo.staging.sourcify.dev - -# Monitor config -MONITOR_PORT=80 -MONITOR_FETCH_TIMEOUT=300000 -FALLBACK_IPFS_GATEWAY=xxx - -# S3 config -AWS_S3_ACCESS_KEY_ID=xxx -AWS_S3_SECRET_ACCESS_KEY=xxx -BUCKET_NAME=s3://sourcify-backup-s3 -# Chainsafe Storage S3 config -CHAINSAFE_S3_ACCESS_KEY_ID=xxx -CHAINSAFE_S3_SECRET_ACCESS_KEY=xxx - -# IPFS config -IPFS_SECRET=xxx -# IPNS=k51qzi5uqu5dkuzo866rys9qexfvbfdwxjc20njcln808mzjrhnorgu5rh30lb -IPNS=repo.staging.sourcify.dev # Use DNSLink -# Leave API blank if you don't run an ipfs node -# IPFS_API=http://ipfs-latest:5001 -# IPs to announce with the ipfs id -PUBLIC_IP=xxx -LOCAL_IP=xxx -# Pinning services -ESTUARY_PINNING_SECRET=xxx -WEB3_STORAGE_PINNING_SECRET=xxx - -# NPM config -NPM_TOKEN=xxx - -# Docker config -## Relevant if your're running in a container -## Where to mount the downloaded compilers directory on the host machine -SOLC_REPO_HOST=/home/gather/staging/data/solc-bin/linux-amd64 -SOLJSON_REPO_HOST=/home/gather/staging/data/solc-bin/bin -# Repository path in the host machine -REPOSITORY_PATH_HOST=/home/gather/staging/data/repository -## Ports to access containers from the host -SERVER_EXTERNAL_PORT=5000 -UI_EXTERNAL_PORT=1234 -REPOSITORY_SERVER_EXTERNAL_PORT=10000 -MONITOR_EXTERNAL_PORT=3000 -IPFS_GW_EXTERNAL_PORT=5050 -IPFS_LIBP2P_EXTERNAL_PORT=4002 -IPFS_API_EXTERNAL_PORT=5002 -SERVER_URL=https://staging.sourcify.dev/server - -# Custom nodes -NODE_URL_MAINNET=https://rpc.mainnet.ethpandaops.io -NODE_URL_GOERLI=https://rpc.goerli.ethpandaops.io -NODE_URL_SEPOLIA=https://rpc.sepolia.ethpandaops.io -CF_ACCESS_CLIENT_ID=xxx -CF_ACCESS_CLIENT_SECRET=xxx - -# Other config -TESTING=false -TAG=latest -NODE_ENV=production -INFURA_ID=xxx -ALCHEMY_ID=xxx -ALCHEMY_ID_OPTIMISM=xxx -ALCHEMY_ID_ARBITRUM=xxx - -# Etherscan API keys -ETHERSCAN_API_KEY=xxx -ARBISCAN_API_KEY=xxx -POLYGONSCAN_API_KEY=xxx -BSCSCAN_API_KEY=xxx -SNOWTRACE_API_KEY=xxx -CELOSCAN_API_KEY=xxx -MOONSCAN_MOONBEAM_API_KEY=xxx -MOONSCAN_MOONRIVER_API_KEY=xxx -BOBASCAN_API_KEY=xxx -GNOSISSCAN_API_KEY=xxx -OPTIMISMSCAN_API_KEY=xxx -CRONOSCAN_API_KEY=xxx - -CREATE2_CLIENT_TOKENS=xxx - -GRAFANA_PATH=/home/gather/staging/data/grafana -GRAFANA_HTTP_USER=xxx -GRAFANA_HTTP_PASS=xxx -GRAFANA_EXTERNAL_PORT=13000 -GRAFANA_LOKI_EXTERNAL_PORT=13100 -GRAFANA_PROMETHEUS_EXTERNAL_PORT=9090 -GRAFANA_LOKI_URL=http://grafana-loki-latest:3100 \ No newline at end of file diff --git a/environments/.env.secrets.gpg b/environments/.env.secrets.gpg deleted file mode 100644 index 8d3c645df739f824bbd95f6e0f805cb259124310..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 2794 zcmVh5&I?+U34uMR0gI zj9Q_BjAab4)08iHatQ4Z@fykE5P7to<5}_zdu6$K;l7Z`nPcom(>F$L&W~vIl@PWx zS^5H*?GBcwe|`DF0tuv)HS|B9wgL*7qG928R|_96Fok)H(IdG|nvf|+hA|CzpNHX_>{S)@ZE|_YVJ;*Z8gR=H~EbsZDeQy!aRP3e!rVxngBYzWtPV1m*WKx9Hew17T=FvLonF3&b(*RQ=u3mBRhfVv6Wja-y~x*c z!B{1}ad9ZMEFCf9zgI3^q~TNE85h=@6^dwVGzA(9EMLU6?i=-)w6&MmQ9v7Gy)T;w z#6;jMF=c_kk#>IImtZh?Las!xIlLNvu3ZsR_&G_c7>*2)ktCJWw9hY_YWNr3pDHMq$Z3lq*^a%*it%lz6A0kGXv5piXt>KL z)WB`$e{#gTgyFxaj-@?vs4SMdNfIZ#UWIPsE}()K?g16iJ)P0?FQ&)-*apkT-5Cu?iQM2iFzN=i|NEFdnI zpG_Ve=Y<4%3u1?#b%FFiOiSH6ZmdhxZ~ zWBmK;^9Uwf>V=jz7WISU z!Ld2WLp3wC$j(|f;NK4*O7+B5H=XvHiK);x{I9I$JRv|a-!laoW=5}zv$z0Ass%(4 zZju8v(ADQpL-sWcL-I82Izke=a+wUNgkM7o7a+`^5^Ug#PBU>jGnQKj5X%92uB zg)DDt@$-h$_iJ75)&slJyWzatfeQRB+mBxD3OUgume)CZpu({|VjIk>bVw!9o+fvu zigrqV#X*-iD%Thk0tVS!v3KuFrVhML8y+k^?us7Tl!hdndqes9j~szZrJ_)5e$bsj zQbAjS)?0)5Qu|KMGjU(?DMJg+@0bntDS@>IrNp$iRh3hCY0gj}+$yK0Z5UBx@+|1Z zXofL9I>G)whYoHG#$k{0Za21VO%LM?IParH2dkX`g||u&uavk~_)^*AlHa(*0eLim!994SWS_?P~j^kPEBR;O7LM;5T_y zPc8x`vgw{xI@gGzbCBlu8O>l>YQL{U^5Lq7tPNtj*s%bL0vN_{kp%OddI3v7E?Jdp zOZ{u#Z&;;tR8PE0($Fn6wXGQ;kD6QFeX^?jb`aw{s_CGsYLN4lV))0Y^dvKOx%|^= zWfkxi_w3A0^{32gGna#Pms94}k6MBKgvX$0#kvDx@~p*8Ay);$3p*t|oRiIhWVJAq z>C20}p>gTkdls=>^N{KeNKu&PiCFBlL}|^C2(Lis3%o>5q{ofY;aFs#OpK;pud_jZ!#W0&)6X|ybl(sro@qqEIX{Yqa-k5tQHDTo` zGoJ~0YF>_ZQkSeQuPJKIlJi7oUmkA+QHb+(Apx2MYBfMc&~K-i0mH8d^UF<*F*7%~ z%~mgos?sWPj+z4ei0|~%OZ+_4wZuB?-4I(-iO%X?xs$S&n1z*DpeuR4+u(Ff_S#hJ z-F*F*ci;gKMevb9lGLAd4VDfYyF_9(vn?c~X%A&1IGHH~1X$!*!`ovEwawsYYQcUL za8PCLoMW*}r z4N5ima*)E`aosG2I5my)q}$JxRW2Z>jksD`R>L&3cfiP4O{DHxyH;|%zUhw*n(?F3 zM20`zSH|@ZsSnK~skVs56uN0zF_s5IBtcWFg+M!y^WCtt$5TYW)3(S5;H+!jM>afFaA$$Gp3KvD5TZsoQvRi{s(<<#?=~F)mLZo{?*qwY-d4Fxw$gD#1*@Cj-p+eLQI=c zjO6`(%Xhn>s*eJ_c#;_iM(%T6GmMiYW*2H>vr1)?&yO`--8LBKpw2t;20?Ykm+9`r ze|`O%|DDAEeLb67k3>D`6E)TR4#wz)8mw72U%i|_%TLvRBEw|;bJ>1Ey1e8t*$Nvz zRqZH^_S10Tf-GRCLJ=3c?qjA%DbWpNJM8FPQq3WTdn_y&erXT;wj!``V@Zca zT0k~GQxH=%!fe|JocK-YV56-whr?vwdVGWby`Dnt@~9`yr}8pk(Qq!^k`hk^970Bq w%%Zs}HGe<9Z38@J7hMC8Myiy&^Y>xbAX_n8T#JVdkfh+(`}@6rQ3W!gL|kcXtN;K2 diff --git a/environments/.env.stable b/environments/.env.stable deleted file mode 100644 index acccecf0..00000000 --- a/environments/.env.stable +++ /dev/null @@ -1,93 +0,0 @@ -# Server config -SERVER_PORT=80 -SOLC_REPO=/home/data/solc-bin/linux-amd64 -SOLJSON_REPO=/home/data/solc-bin/soljson -SESSION_SECRET=xxx -IPFS_GATEWAY=xxx -REPOSITORY_PATH=/home/data/repository - -# Repository web server config -REPOSITORY_SERVER_PORT=80 -REPOSITORY_SERVER_URL=https://repo.sourcify.dev - -# Monitor config -MONITOR_PORT=80 -MONITOR_FETCH_TIMEOUT=300000 - -# S3 config -AWS_S3_ACCESS_KEY_ID=xxx -AWS_S3_SECRET_ACCESS_KEY=xxx -BUCKET_NAME=s3://sourcify-backup-s3 - -# IPFS config -IPFS_SECRET=xxx -# IPNS=k51qzi5uqu5dll0ocge71eudqnrgnogmbr37gsgl12uubsinphjoknl6bbi41p -IPNS=repo.sourcify.dev # Use DNSLink -# Leave API blank if you don't run an ipfs node -# IPFS_API=http://ipfs-stable:5001 -# IPs to announce with the ipfs id -PUBLIC_IP=xxx -LOCAL_IP=xxx -# Pinning services -ESTUARY_PINNING_SECRET=xxx -WEB3_STORAGE_PINNING_SECRET=xxx - -# NPM config -NPM_TOKEN=xxx - -# Docker config -## Relevant if your're running in a container -## Where to mount the downloaded compilers directory on the host machine -SOLC_REPO_HOST=/home/sourcify/production/data/solc-bin/linux-amd64 -SOLJSON_REPO_HOST=/home/sourcify/production/data/solc-bin/bin -# Repository path in the host machine -REPOSITORY_PATH_HOST=/home/sourcify/production/data/repository -## Ports to access containers from the host -SERVER_EXTERNAL_PORT=5002 -UI_EXTERNAL_PORT=1235 -REPOSITORY_SERVER_EXTERNAL_PORT=10001 -MONITOR_EXTERNAL_PORT=3001 -IPFS_GW_EXTERNAL_PORT=5051 -IPFS_LIBP2P_EXTERNAL_PORT=4003 -IPFS_API_EXTERNAL_PORT=5003 -SERVER_URL=https://sourcify.dev/server - -# Custom nodes -NODE_URL_MAINNET=https://rpc.mainnet.ethpandaops.io -NODE_URL_GOERLI=https://rpc.goerli.ethpandaops.io -NODE_URL_SEPOLIA=https://rpc.sepolia.ethpandaops.io -CF_ACCESS_CLIENT_ID=xxx -CF_ACCESS_CLIENT_SECRET=xxx - -# Other config -TESTING=false -TAG=stable -NODE_ENV=production -INFURA_ID=xxx -ALCHEMY_ID=xxx -ALCHEMY_ID_OPTIMISM=xxx -ALCHEMY_ID_ARBITRUM=xxx - -# Etherscan API keys -ETHERSCAN_API_KEY=xxx -ARBISCAN_API_KEY=xxx -POLYGONSCAN_API_KEY=xxx -BSCSCAN_API_KEY=xxx -SNOWTRACE_API_KEY=xxx -CELOSCAN_API_KEY=xxx -MOONSCAN_MOONBEAM_API_KEY=xxx -MOONSCAN_MOONRIVER_API_KEY=xxx -BOBASCAN_API_KEY=xxx -GNOSISSCAN_API_KEY=xxx -OPTIMISMSCAN_API_KEY=xxx -CRONOSCAN_API_KEY=xxx - -CREATE2_CLIENT_TOKENS=xxx - -GRAFANA_PATH=/home/sourcify/production/data/grafana -GRAFANA_HTTP_USER=xxx -GRAFANA_HTTP_PASS=xxx -GRAFANA_EXTERNAL_PORT=13000 -GRAFANA_LOKI_EXTERNAL_PORT=13100 -GRAFANA_PROMETHEUS_EXTERNAL_PORT=9090 -GRAFANA_LOKI_URL=http://grafana-loki-stable:3100 \ No newline at end of file diff --git a/environments/build-ipfs.yaml b/environments/build-ipfs.yaml deleted file mode 100644 index d5bc7e4d..00000000 --- a/environments/build-ipfs.yaml +++ /dev/null @@ -1,7 +0,0 @@ -version: "3.7" -services: - ipfs: - image: ethereum/source-verify:ipfs-${TAG} - build: - context: ../services/ipfs/ - dockerfile: ./Dockerfile.ipfs diff --git a/environments/build-monitor.yaml b/environments/build-monitor.yaml deleted file mode 100644 index 573df94c..00000000 --- a/environments/build-monitor.yaml +++ /dev/null @@ -1,7 +0,0 @@ -version: "3.7" -services: - monitor: - image: ethereum/source-verify:monitor-${TAG} - build: - context: ../ - dockerfile: src/Dockerfile.monitor diff --git a/environments/build-s3.yaml b/environments/build-s3.yaml deleted file mode 100644 index ef79613b..00000000 --- a/environments/build-s3.yaml +++ /dev/null @@ -1,7 +0,0 @@ -version: "3.7" -services: - s3sync: - image: ethereum/source-verify:s3-${TAG} - build: - context: ../services/s3sync - dockerfile: Dockerfile.s3 diff --git a/environments/grafana.yaml b/environments/grafana.yaml deleted file mode 100644 index 27e72e95..00000000 --- a/environments/grafana.yaml +++ /dev/null @@ -1,110 +0,0 @@ -version: "3.7" - -x-project-base: &project-base - env_file: - - .env - restart: always - networks: - - source-verify - -networks: - source-verify: - -volumes: - prometheus-data: - driver: local - loki-data: - driver: local - -services: - loki: - container_name: grafana-loki-${TAG} - image: grafana/loki:2.8.2 - ports: - - "${GRAFANA_LOKI_EXTERNAL_PORT}:3100" - volumes: - - ./loki-config.yaml:/etc/loki/local-config.yaml - - loki-data:/loki/ # Persist loki filesystem storage, outside the sourcify/ folder - command: -config.file=/etc/loki/local-config.yaml - networks: - - source-verify - restart: always - - promtail: - container_name: grafana-promtail-${TAG} - image: grafana/promtail:2.8.2 - volumes: - - /var/log:/var/log - - ./promtail-config.yaml:/etc/promtail/config.yml - command: -config.file=/etc/promtail/config.yml - networks: - - source-verify - restart: always - - grafana: - container_name: grafana-grafana-${TAG} - image: grafana/grafana:9.5.3 - ports: - - "${GRAFANA_EXTERNAL_PORT}:3000" - networks: - - source-verify - volumes: - - type: bind - source: ${GRAFANA_PATH} - target: /var/lib/grafana - environment: - - HTTP_USER="${GRAFANA_HTTP_USER}" - - HTTP_PASS="${GRAFANA_HTTP_PASS}" - restart: always - - prometheus: - image: prom/prometheus:latest - container_name: grafana-prometheus-${TAG} - ports: - - "${GRAFANA_PROMETHEUS_EXTERNAL_PORT}:9090" - volumes: - - /etc/prometheus:/etc/prometheus - - prometheus-data:/prometheus - restart: unless-stopped - command: - - "--config.file=/etc/prometheus/prometheus.yml" - networks: - - source-verify - - cadvisor: - image: gcr.io/cadvisor/cadvisor:v0.47.0 - container_name: grafana-cadvisor-${TAG} - volumes: - - /:/rootfs:ro - - /var/run:/var/run:ro - - /sys:/sys:ro - - /var/lib/docker/:/var/lib/docker:ro - - /dev/disk/:/dev/disk:ro - devices: - - /dev/kmsg - restart: unless-stopped - privileged: true - networks: - - source-verify - - node_exporter: - image: quay.io/prometheus/node-exporter:latest - container_name: grafana-node_exporter-${TAG} - command: - - "--path.rootfs=/host" - pid: host - restart: unless-stopped - volumes: - - "/:/host:ro,rslave" - networks: - - source-verify - - nginx_exporter: - image: nginx/nginx-prometheus-exporter:0.10.0 - container_name: grafana-nginx_exporter-${TAG} - command: - - -nginx.scrape-uri - - https://sourcify.dev/nginx_status - restart: unless-stopped - networks: - - source-verify diff --git a/environments/ipfs.yaml b/environments/ipfs.yaml deleted file mode 100644 index 629556a5..00000000 --- a/environments/ipfs.yaml +++ /dev/null @@ -1,32 +0,0 @@ -version: "3.7" - -x-project-base: &project-base - env_file: - - .env - restart: always - networks: - - source-verify - -networks: - source-verify: - -services: - ipfs: - image: ethereum/source-verify:ipfs-${TAG} - container_name: ipfs-${TAG} - ports: - - "${IPFS_GW_EXTERNAL_PORT}:8080" - - "${IPFS_LIBP2P_EXTERNAL_PORT}:4001" - - "${IPFS_API_EXTERNAL_PORT}:5001" - env_file: - - .env - restart: always - networks: - - source-verify - volumes: - - type: bind - source: $REPOSITORY_PATH - target: /repository - - type: bind - source: ../services/ipfs/ipfs-${TAG}.key - target: /sourcify/ipfs-${TAG}.key diff --git a/environments/loki-config.yaml b/environments/loki-config.yaml deleted file mode 100644 index c5d1b7f0..00000000 --- a/environments/loki-config.yaml +++ /dev/null @@ -1,52 +0,0 @@ -auth_enabled: false - -server: - http_listen_port: 3100 - -common: - path_prefix: /loki - storage: - filesystem: - chunks_directory: /loki/chunks - rules_directory: /loki/rules - replication_factor: 1 - ring: - kvstore: - store: inmemory - -schema_config: - configs: - - from: 2020-10-24 - store: boltdb-shipper - object_store: filesystem - schema: v11 - index: - prefix: index_ - period: 24h - -# Avoid too many outstanding requests https://github.com/grafana/loki/issues/5123 -query_scheduler: - max_outstanding_requests_per_tenant: 4096 -frontend: - max_outstanding_per_tenant: 4096 -query_range: - parallelise_shardable_queries: true -limits_config: - split_queries_by_interval: 15m - max_query_parallelism: 32 - -ruler: - alertmanager_url: http://localhost:9093 - -# By default, Loki will send anonymous, but uniquely-identifiable usage and configuration -# analytics to Grafana Labs. These statistics are sent to https://stats.grafana.org/ -# -# Statistics help us better understand how Loki is used, and they show us performance -# levels for most users. This helps us prioritize features and documentation. -# For more information on what's sent, look at -# https://github.com/grafana/loki/blob/main/pkg/usagestats/stats.go -# Refer to the buildReport method to see what goes into a report. -# -# If you would like to disable reporting, uncomment the following lines: -analytics: - reporting_enabled: false \ No newline at end of file diff --git a/environments/monitor.yaml b/environments/monitor.yaml deleted file mode 100644 index b052a98f..00000000 --- a/environments/monitor.yaml +++ /dev/null @@ -1,32 +0,0 @@ -version: "3.7" - -x-project-base: - &project-base - env_file: - - .env - restart: always - networks: - - source-verify - -networks: - source-verify: - -services: - monitor: - <<: *project-base - image: ethereum/source-verify:monitor-${TAG} - container_name: monitor-${TAG} - volumes: - - type: bind - source: $REPOSITORY_PATH_HOST - target: $REPOSITORY_PATH - - type: bind - source: $SOLC_REPO_HOST - target: $SOLC_REPO - read_only: true - - type: bind - source: $SOLJSON_REPO_HOST - target: $SOLJSON_REPO - read_only: true - ports: - - "${MONITOR_EXTERNAL_PORT}:${MONITOR_PORT}" diff --git a/environments/promtail-config.yaml b/environments/promtail-config.yaml deleted file mode 100644 index 3ab5f8a7..00000000 --- a/environments/promtail-config.yaml +++ /dev/null @@ -1,25 +0,0 @@ -server: - http_listen_port: 9080 - -positions: - filename: /var/log/positions.yaml # default - -clients: - - url: http://loki:3100/loki/api/v1/push - -scrape_configs: -- job_name: system - static_configs: - - targets: - - localhost - labels: - job: varlogs - __path__: /var/log/*log -- job_name: nginx - static_configs: - - targets: - - localhost - labels: - job: nginx - __path__: /var/log/nginx/*log - host: promtail \ No newline at end of file diff --git a/environments/repository.yaml b/environments/repository.yaml deleted file mode 100644 index f6a4dbf8..00000000 --- a/environments/repository.yaml +++ /dev/null @@ -1,30 +0,0 @@ -version: "3.7" - -x-project-base: &project-base - env_file: - - .env - restart: always - networks: - - source-verify -networks: - source-verify: -# volumes: -# db: -# h5ai: -# driver: local - -services: - repository: - <<: *project-base - image: ghcr.io/hashgraph/hedera-sourcify:repository-${TAG} - container_name: repository-${TAG} - volumes: - - type: bind - source: $REPOSITORY_PATH_HOST - target: $REPOSITORY_PATH - read_only: true - - type: bind - source: ./docker-config.json - target: /redirects/config.json - ports: - - ${REPOSITORY_SERVER_EXTERNAL_PORT}:80 diff --git a/environments/s3.yaml b/environments/s3.yaml deleted file mode 100644 index d1b2faf2..00000000 --- a/environments/s3.yaml +++ /dev/null @@ -1,21 +0,0 @@ -version: "3.7" - -x-project-base: &project-base - env_file: - - .env - restart: always - networks: - - source-verify - -networks: - source-verify: - -services: - s3sync: - <<: *project-base - image: ethereum/source-verify:s3-${TAG} - container_name: s3-${TAG} - volumes: - - type: bind - source: $REPOSITORY_PATH_HOST - target: $REPOSITORY_PATH diff --git a/environments/server.yaml b/environments/server.yaml deleted file mode 100644 index ebce40f8..00000000 --- a/environments/server.yaml +++ /dev/null @@ -1,34 +0,0 @@ -version: "3.7" - -x-project-base: &project-base - env_file: - - .env - restart: always - networks: - - source-verify - -networks: - source-verify: - -services: - server: - <<: *project-base - image: ghcr.io/hashgraph/hedera-sourcify:server-${TAG} - container_name: server-${TAG} - ports: - - "${SERVER_EXTERNAL_PORT}:${SERVER_PORT}" - volumes: - - type: bind - source: $REPOSITORY_PATH_HOST - target: $REPOSITORY_PATH - - type: bind - source: $SOLC_REPO_HOST - target: $SOLC_REPO - - type: bind - source: $SOLJSON_REPO_HOST - target: $SOLJSON_REPO - healthcheck: - test: ["CMD", "curl", "-f", "http://localhost:${SERVER_PORT}/health"] - interval: 30s - timeout: 10s - retries: 10 diff --git a/environments/ui.yaml b/environments/ui.yaml deleted file mode 100644 index ab0aed7c..00000000 --- a/environments/ui.yaml +++ /dev/null @@ -1,28 +0,0 @@ -version: "3.7" - -networks: - source-verify: - -x-project-base: &project-base - env_file: - - .env - restart: always - networks: - - source-verify - -services: - ui: - <<: *project-base - image: ghcr.io/hashgraph/hedera-sourcify:ui-${TAG} - container_name: ui-${TAG} - healthcheck: - test: ["CMD", "curl", "-f", "http://localhost"] - interval: 30s - timeout: 10s - retries: 10 - volumes: - - type: bind - source: ./docker-config.json - target: /usr/share/nginx/html/config.json - ports: - - "${UI_EXTERNAL_PORT}:80" diff --git a/package.json b/package.json index 280186d1..0e14b4da 100644 --- a/package.json +++ b/package.json @@ -13,7 +13,6 @@ "build:clean": "rm -rf node_modules/ packages/bytecode-utils/node_modules/ packages/contract-call-decoder/node_modules/ packages/lib-sourcify/node_modules/ && npm install && npx lerna bootstrap && npx lerna run build", "build:lerna": "lerna run build", "test": "TESTING=true DOTENV_CONFIG_PATH=./environments/.env c8 --reporter=none mocha -r dotenv/config --exit", - "test:monitor": "TESTING=true DOTENV_CONFIG_PATH=./environments/.env mocha test/monitor.js --exit", "test:server": "TESTING=true DOTENV_CONFIG_PATH=./environments/.env mocha -r dotenv/config test/server.js --exit", "test:hedera": "mocha test/test-server.js --exit", "test:chains": "TESTING=true mocha test/chains/chain-tests.js --reporter mochawesome --reporter-options reportDir=chain-tests-report,reportFilename=report --exit", @@ -22,13 +21,10 @@ "lint-fix": "eslint . --fix --ext .ts", "coverage": "node ./node_modules/c8/bin/c8.js --reporter=lcov --reporter=text-summary ./node_modules/tape/bin/tape ./test/index.js", "coveralls": "npm run coverage && node ./node_modules/coveralls/bin/coveralls.js { - const workflowDate = new Date(item.created_at); - return yesterday.toDateString() === workflowDate.toDateString(); - } -); -assert( - circleCIWorkflowsYesterdayItem !== undefined, - `There is no backup workflow from yesterday` -); - -const workflowId = circleCIWorkflowsYesterdayItem.id; - -// find jobs id of verification-e2e-sepolia, verification-e2e-goerli -const jobsWithArtifacts = [ - "verification-e2e-sepolia", - "verification-e2e-goerli", -]; -const circleCIJobsUrl = `https://circleci.com/api/v2/workflow/${workflowId}/job`; -console.log("Fetching jobs from: ", circleCIJobsUrl); -const circleCIJobsUrlResult = await fetch(circleCIJobsUrl); -const circleCIJobsUrlJson = await circleCIJobsUrlResult.json(); -const jobs = circleCIJobsUrlJson.items.filter((job) => - jobsWithArtifacts.includes(job.name) -); - -// for each job id get the artifact and check the existance on s3 -let existance = false; -for (const job of jobs) { - console.log(`Checking job with name: ${job.name} and id: ${job.id}`); - const circleCIArtifactVerifiedContractUrl = `https://dl.circleci.com/private/output/job/${job.id}/artifacts/0/verified-contracts/saved.json`; - console.log("Fetching artifact from: ", circleCIArtifactVerifiedContractUrl); - const circleCIArtifactVerifiedContractResult = await fetch( - circleCIArtifactVerifiedContractUrl - ); - const circleCIArtifactVerifiedContractJson = - await circleCIArtifactVerifiedContractResult.json(); - const { deploymentAddress, deploymentChain } = - circleCIArtifactVerifiedContractJson; - - if (!deploymentAddress || !deploymentChain) { - throw new Error( - `Deployment address or chain not found in job ${job.id} with name ${job.name}. Deployment address: ${deploymentAddress}, Deployment chain: ${deploymentChain}` - ); - } - - try { - const s3Object = await bareBonesS3.send( - new GetObjectCommand({ - Key: `stable/repository/contracts/full_match/${deploymentChain}/${deploymentAddress}/metadata.json`, - Bucket: "sourcify-backup-s3", - }) - ); - - if (s3Object.ETag?.length > 0) { - existance = true; - break; - } - } catch (e) { - console.log(e); - console.log( - `not in backup: stable/repository/contracts/full_match/${deploymentChain}/${deploymentAddress}/metadata.json` - ); - } -} - -bareBonesS3.destroy(); -assert(existance, "Last nightly backup didn't worked"); diff --git a/scripts/decrypt.sh b/scripts/decrypt.sh deleted file mode 100755 index fe3381e6..00000000 --- a/scripts/decrypt.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash - -gpg --yes --batch --passphrase=$SECRET_KEY ./environments/.env.secrets.gpg -gpg --yes --batch --passphrase=$SECRET_KEY ./services/ipfs/ipfs-stable.key.gpg -gpg --yes --batch --passphrase=$SECRET_KEY ./services/ipfs/ipfs-latest.key.gpg diff --git a/scripts/deploy.sh b/scripts/deploy.sh deleted file mode 100755 index 4bd67554..00000000 --- a/scripts/deploy.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/bash -set -e - -if [ "$CIRCLE_BRANCH" == "staging" ]; then - TAG='latest' - REPO_PATH='/home/gather/staging/' - SERVER='-J source-verify@komputing.org gather@10.10.42.102' -elif [ "$CIRCLE_BRANCH" == "master" ]; then - TAG='stable' - REPO_PATH='/home/sourcify/production/' - SERVER='-J source-verify@komputing.org sourcify@10.10.42.7' -else - echo "Invalid branch $CIRCLE_BRANCH. Check your config.yml" - exit 1 -fi - -curl "https://raw.githubusercontent.com/ethereum/source-verify/${CIRCLE_BRANCH}/.circleci/ssh.config" > ~/.ssh/config - -ssh $SERVER "\ - mkdir -p scripts && curl https://raw.githubusercontent.com/ethereum/source-verify/${CIRCLE_BRANCH}/scripts/setup.sh > scripts/setup.sh && chmod +x scripts/setup.sh && chown $USER:$USER ./scripts/setup.sh && \ - REPO_PATH='${REPO_PATH}' CIRCLE_BRANCH='${CIRCLE_BRANCH}' TAG='${TAG}' ./scripts/setup.sh" diff --git a/scripts/encrypt.sh b/scripts/encrypt.sh deleted file mode 100755 index 57487142..00000000 --- a/scripts/encrypt.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/bash - -gpg --yes --batch --passphrase=$SECRET_KEY -c ./environments/.env.secrets -gpg --yes --batch --passphrase=$SECRET_KEY -c ./services/ipfs/ipfs-stable.key -gpg --yes --batch --passphrase=$SECRET_KEY -c ./services/ipfs/ipfs-latest.key - diff --git a/scripts/find_replace.sh b/scripts/find_replace.sh deleted file mode 100755 index d3e42465..00000000 --- a/scripts/find_replace.sh +++ /dev/null @@ -1,86 +0,0 @@ -#!/bin/bash -set -e - -source ~/.profile - -cd scripts/ -gpg --yes --batch --passphrase=$SECRET_KEY ../environments/.env.secrets.gpg -gpg --yes --batch --passphrase=$SECRET_KEY ../services/ipfs/ipfs-stable.key.gpg -gpg --yes --batch --passphrase=$SECRET_KEY ../services/ipfs/ipfs-latest.key.gpg -source ../environments/.env.secrets - -TAG="$CIRCLE_BRANCH" - -if [ "$CIRCLE_BRANCH" == "staging" ]; then - TAG="latest" - INFURA_ID=$INFURA_ID_STAGING - ALCHEMY_ID=$ALCHEMY_ID_STAGING - IPFS_SECRET=$IPFS_SECRET_STAGING - PUBLIC_IP=$PUBLIC_IP_STAGING - LOCAL_IP=$LOCAL_IP_STAGING - SESSION_SECRET=$SESSION_SECRET_STAGING - ALCHEMY_ID_OPTIMISM=$ALCHEMY_ID_OPTIMISM_STAGING - ALCHEMY_ID_ARBITRUM=$ALCHEMY_ID_ARBITRUM_STAGING - CHAINSAFE_S3_ACCESS_KEY_ID=$CHAINSAFE_S3_ACCESS_KEY_ID_STAGING - CHAINSAFE_S3_SECRET_ACCESS_KEY=$CHAINSAFE_S3_SECRET_ACCESS_KEY_STAGING - ESTUARY_PINNING_SECRET=$ESTUARY_PINNING_SECRET_STAGING - WEB3_STORAGE_PINNING_SECRET=$WEB3_STORAGE_PINNING_SECRET_STAGING - CREATE2_CLIENT_TOKENS=$CREATE2_CLIENT_TOKENS_STAGING - GRAFANA_HTTP_USER=$GRAFANA_HTTP_USER_STAGING - GRAFANA_HTTP_PASS=$GRAFANA_HTTP_PASS_STAGING - ETHERSCAN_API_KEY=$ETHERSCAN_API_KEY_STAGING - ARBISCAN_API_KEY=$ARBISCAN_API_KEY_STAGING - POLYGONSCAN_API_KEY=$POLYGONSCAN_API_KEY_STAGING - BSCSCAN_API_KEY=$BSCSCAN_API_KEY_STAGING - SNOWTRACE_API_KEY=$SNOWTRACE_API_KEY_STAGING - CELOSCAN_API_KEY=$CELOSCAN_API_KEY_STAGING - MOONSCAN_MOONBEAM_API_KEY=$MOONSCAN_MOONBEAM_API_KEY_STAGING - MOONSCAN_MOONRIVER_API_KEY=$MOONSCAN_MOONRIVER_API_KEY_STAGING - BOBASCAN_API_KEY=$BOBASCAN_API_KEY_STAGING - GNOSISSCAN_API_KEY=$GNOSISSCAN_API_KEY_STAGING - OPTIMISMSCAN_API_KEY=$OPTIMISMSCAN_API_KEY_STAGING - CRONOSCAN_API_KEY=$CRONOSCAN_API_KEY_STAGING -fi - -if [ "$CIRCLE_BRANCH" == "master" ]; then - TAG="stable"; - INFURA_ID=$INFURA_ID_MASTER - ALCHEMY_ID=$ALCHEMY_ID_MASTER - IPFS_SECRET=$IPFS_SECRET_MASTER - PUBLIC_IP=$PUBLIC_IP_MASTER - LOCAL_IP=$LOCAL_IP_MASTER - SESSION_SECRET=$SESSION_SECRET_MASTER - ALCHEMY_ID_OPTIMISM=$ALCHEMY_ID_OPTIMISM_MASTER - ALCHEMY_ID_ARBITRUM=$ALCHEMY_ID_ARBITRUM_MASTER - CHAINSAFE_S3_ACCESS_KEY_ID=$CHAINSAFE_S3_ACCESS_KEY_ID_MASTER - CHAINSAFE_S3_SECRET_ACCESS_KEY=$CHAINSAFE_S3_SECRET_ACCESS_KEY_MASTER - ESTUARY_PINNING_SECRET=$ESTUARY_PINNING_SECRET_MASTER - WEB3_STORAGE_PINNING_SECRET=$WEB3_STORAGE_PINNING_SECRET_MASTER - CREATE2_CLIENT_TOKENS=$CREATE2_CLIENT_TOKENS_MASTER - GRAFANA_HTTP_USER=$GRAFANA_HTTP_USER_MASTER - GRAFANA_HTTP_PASS=$GRAFANA_HTTP_PASS_MASTER - ETHERSCAN_API_KEY=$ETHERSCAN_API_KEY_MASTER - ARBISCAN_API_KEY=$ARBISCAN_API_KEY_MASTER - POLYGONSCAN_API_KEY=$POLYGONSCAN_API_KEY_MASTER - BSCSCAN_API_KEY=$BSCSCAN_API_KEY_MASTER - SNOWTRACE_API_KEY=$SNOWTRACE_API_KEY_MASTER - CELOSCAN_API_KEY=$CELOSCAN_API_KEY_MASTER - MOONSCAN_MOONBEAM_API_KEY=$MOONSCAN_MOONBEAM_API_KEY_MASTER - MOONSCAN_MOONRIVER_API_KEY=$MOONSCAN_MOONRIVER_API_KEY_MASTER - BOBASCAN_API_KEY=$BOBASCAN_API_KEY_MASTER - GNOSISSCAN_API_KEY=$GNOSISSCAN_API_KEY_MASTER - OPTIMISMSCAN_API_KEY=$OPTIMISMSCAN_API_KEY_MASTER - CRONOSCAN_API_KEY=$CRONOSCAN_API_KEY_MASTER -fi - -for VAR_NAME in INFURA_ID ALCHEMY_ID CF_ACCESS_CLIENT_ID CF_ACCESS_CLIENT_SECRET AWS_S3_ACCESS_KEY_ID AWS_S3_SECRET_ACCESS_KEY IPFS_SECRET NPM_TOKEN PUBLIC_IP LOCAL_IP SESSION_SECRET ALCHEMY_ID_OPTIMISM ALCHEMY_ID_ARBITRUM CHAINSAFE_S3_ACCESS_KEY_ID CHAINSAFE_S3_SECRET_ACCESS_KEY ESTUARY_PINNING_SECRET WEB3_STORAGE_PINNING_SECRET CREATE2_CLIENT_TOKENS GRAFANA_HTTP_USER GRAFANA_HTTP_PASS ETHERSCAN_API_KEY ARBISCAN_API_KEY POLYGONSCAN_API_KEY BSCSCAN_API_KEY SNOWTRACE_API_KEY CELOSCAN_API_KEY MOONSCAN_MOONBEAM_API_KEY MOONSCAN_MOONRIVER_API_KEY BOBASCAN_API_KEY GNOSISSCAN_API_KEY OPTIMISMSCAN_API_KEY CRONOSCAN_API_KEY -do - echo "find_repace.sh: replacing $VAR_NAME" - VAR_VAL=$(eval "echo \${$VAR_NAME}") - # Use @ as delimiter instead of / as values may contain / but @ is unlikely - # sed on MacOS has different syntax. Install "gsed" with brew install gnu-sed and replace when developing on MacOS - sed -i "s@${VAR_NAME}=xxx@${VAR_NAME}=${VAR_VAL}@g" ../environments/.env.$TAG -done - -cp ../environments/.env.$TAG ../environments/.env -rm ../environments/.env.secrets diff --git a/scripts/hedera-reset-previewnet.sh b/scripts/hedera-reset-previewnet.sh deleted file mode 100755 index de9deadb..00000000 --- a/scripts/hedera-reset-previewnet.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/bash -# -# Resets the contract verifications for previewnet by removing the corresponding directories -# in the repository. -# Assumes that the services (server, ui, repository) are shutdown -# - -SCRIPTS_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" -${SCRIPTS_PATH}/hedera-reset.sh previewnet \ No newline at end of file diff --git a/scripts/hedera-reset-testnet.sh b/scripts/hedera-reset-testnet.sh deleted file mode 100755 index a28c6638..00000000 --- a/scripts/hedera-reset-testnet.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/bash -# -# Resets the contract verifications for testnet by removing the corresponding directories -# in the repository. -# Assumes that the services (server, ui, repository) are shutdown -# - -SCRIPTS_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" -${SCRIPTS_PATH}/hedera-reset.sh testnet \ No newline at end of file diff --git a/scripts/monitor_ci.js b/scripts/monitor_ci.js deleted file mode 100755 index c95a2384..00000000 --- a/scripts/monitor_ci.js +++ /dev/null @@ -1,62 +0,0 @@ -#!/usr/bin/env node - -/** - * Part of E2E Monitor test run for staging and master builds - * Script queries the repository to discover whether a contract - * published to {chainID} in CI has been picked up and saved by the - * monitor. - */ - -require("dotenv").config({ path: "environments/.env" }); -const assert = require("assert"); -const fetch = require("node-fetch"); -const util = require("util"); -const log = console.log; - -const chainID = parseInt(process.argv[2]); -const chainName = process.argv[3]; -if (!chainID || !chainName) { - log("Expected arguments: "); - process.exit(1); -} - -const artifact = require("../metacoin-source-verify/MetaCoin.json"); -const address = artifact.networks[chainID].address; - -async function main() { - const url = `${process.env.REPOSITORY_SERVER_URL}/contracts/full_match/${chainID}/${address}/metadata.json`; - - log(); - log(`>>>>>>>>>>>>>>>>>>>>`); - log(`Fetching: ${url} `); - log(`>>>>>>>>>>>>>>>>>>>>`); - log(); - - const res = await fetch(url); - const text = await res.text(); - - let metadata; - try { - metadata = JSON.parse(text); - } catch (err) { - throw new Error("Metadata not found in repository..."); - } - - assert(metadata.compiler.version !== undefined); - assert(metadata.language === "Solidity"); - - log(); - log(`>>>>>>>>`); - log(`Metadata`); - log(`>>>>>>>>`); - log(); - - log(util.inspect(metadata)); -} - -main() - .then(() => process.exit(0)) - .catch((err) => { - log(err); - process.exit(1); - }); diff --git a/scripts/monitor_ci.sh b/scripts/monitor_ci.sh deleted file mode 100755 index 0bfa5d26..00000000 --- a/scripts/monitor_ci.sh +++ /dev/null @@ -1,33 +0,0 @@ -#!/bin/bash -./scripts/find_replace.sh - -# Install E2E test -git clone https://github.com/sourcifyeth/metacoin-source-verify.git -cd metacoin-source-verify -npm ci - -# Publishes sources to IPFS and deploys contracts to Goerli or Sepolia -# Account key and Infura project ID are Circle CI env variable settings. -npm run deploy:$CHAIN_NAME || exit 1 - -echo "Waiting 2 mins" -# Give monitor a chance to detect and save. -sleep 120 -echo "Waited 2 mins" - -# Script which verifies repository write -cd .. -for i in `seq 1 20` -do - # Give monitor a chance to detect and save. - sleep 30 - # Script which verifies repository write - echo "Trying ${i} times" - if (./scripts/monitor_ci.js $CHAIN_ID $CHAIN_NAME); then - echo "Test contract successfully verified!" - exit 0 - fi -done - -echo "Test contract not verified!" -exit 2 diff --git a/scripts/publish_to_npm.sh b/scripts/publish_to_npm.sh deleted file mode 100755 index f4b26bf7..00000000 --- a/scripts/publish_to_npm.sh +++ /dev/null @@ -1,51 +0,0 @@ -#!/bin/bash - -BYTECODE_UTILS_LOCAL_VERSION=$(cat packages/bytecode-utils/package.json \ - | grep version \ - | head -1 \ - | awk -F: '{ print $2 }' \ - | sed 's/[",]//g' \ - | tr -d '[[:space:]]') - -BYTECODE_UTILS_NPM_VERSION=$(npm view @ethereum-sourcify/bytecode-utils dist-tags.latest) - -CONTRACT_CALL_DECODER_LOCAL_VERSION=$(cat packages/contract-call-decoder/package.json \ - | grep version \ - | head -1 \ - | awk -F: '{ print $2 }' \ - | sed 's/[",]//g' \ - | tr -d '[[:space:]]') - -CONTRACT_CALL_DECODER_NPM_VERSION=$(npm view @ethereum-sourcify/contract-call-decoder dist-tags.latest) - -LIB_SOURCIFY_LOCAL_VERSION=$(cat packages/lib-sourcify/package.json \ - | grep version \ - | head -1 \ - | awk -F: '{ print $2 }' \ - | sed 's/[",]//g' \ - | tr -d '[[:space:]]') - -LIB_SOURCIFY_NPM_VERSION=$(npm view @ethereum-sourcify/lib-sourcify dist-tags.latest) - -npm config set //registry.npmjs.org/:_authToken=${NPM_TOKEN} - -if [ $BYTECODE_UTILS_LOCAL_VERSION = $BYTECODE_UTILS_NPM_VERSION ]; then - echo "@ethereum-sourcify/bytecode-utils:" - echo "Latest npm version is equal to current package version. Up the version to publish to npm." -else - npm publish packages/bytecode-utils/ --verbose --access=public -fi - -if [ $CONTRACT_CALL_DECODER_LOCAL_VERSION = $CONTRACT_CALL_DECODER_NPM_VERSION ]; then - echo "@ethereum-sourcify/contract-call-decoder:" - echo "Latest npm version is equal to current package version. Up the version to publish to npm." -else - npm publish packages/contract-call-decoder/ --verbose --access=public -fi - -if [ $LIB_SOURCIFY_LOCAL_VERSION = $LIB_SOURCIFY_NPM_VERSION ]; then - echo "@ethereum-sourcify/lib-sourcify:" - echo "Latest npm version is equal to current package version. Up the version to publish to npm." -else - npm publish packages/lib-sourcify/ --verbose --access=public -fi \ No newline at end of file diff --git a/scripts/setup.sh b/scripts/setup.sh deleted file mode 100755 index d20fca3d..00000000 --- a/scripts/setup.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/bash -set -e - -mkdir -p $REPO_PATH -cd $REPO_PATH - -if [[ ! -d sourcify ]]; then - git clone https://github.com/ethereum/sourcify.git sourcify - cd sourcify - git checkout ${CIRCLE_BRANCH} -else - cd sourcify - git fetch - git checkout ${CIRCLE_BRANCH} - git reset --hard origin/${CIRCLE_BRANCH} -fi - -COMPOSE_COMMAND="COMPOSE_PROJECT_NAME=${TAG} docker-compose -f ipfs.yaml -f monitor.yaml -f repository.yaml -f s3.yaml -f server.yaml -f ui.yaml -f grafana.yaml" - -TAG=$TAG ./scripts/find_replace.sh - -cd ./environments -source .env -mkdir -p $REPOSITORY_PATH -docker image prune -f -eval ${COMPOSE_COMMAND} pull -eval COMPOSE_HTTP_TIMEOUT=1200 ${COMPOSE_COMMAND} up -d diff --git a/scripts/test_new_chain_support.sh b/scripts/test_new_chain_support.sh deleted file mode 100755 index c5192b13..00000000 --- a/scripts/test_new_chain_support.sh +++ /dev/null @@ -1,32 +0,0 @@ -#!/bin/bash -# Script to run tests on new chain support requests. -# Runs for branches i.e. pull requests that are named add-chain-{chainId} - -# from https://stackoverflow.com/questions/55839004/circleci-regex-filtering-match-within-string - -NEW_CHAIN_REGEX='.*(add|update)-chain-(\d+)' - -# if CIRCLE_PR_NUMBER is NOT set (meaning it is not a PR -# from a forked repository), then CIRCLE_BRANCH will -# contain the real branch name -if [ -z "${CIRCLE_PR_NUMBER}" ]; then - REAL_BRANCH_NAME="${CIRCLE_BRANCH}" - -# if CIRCLE_PR_NUMBER is set, then we need to use it -# to fetch the real branch name -else - REAL_BRANCH_NAME=$(curl -s https://api.github.com/repos/"${CIRCLE_PROJECT_USERNAME}"/"${CIRCLE_PROJECT_REPONAME}"/pulls/"${CIRCLE_PR_NUMBER}" | jq -r '.head.ref') -fi - -echo "Real branch name: ${REAL_BRANCH_NAME}" -NEW_CHAIN_ID="$(echo "${REAL_BRANCH_NAME}" | sed -n "s/^.*add-chain-\([0-9]\+\).*$/\1/p")" - -# if the ticket number regex does not match, then it's not -# a feature branch, and we shouldn't upload to JIRA. -if [ -z "${NEW_CHAIN_ID}" ]; then - echo 'Not testing since its not a new chain PR.' - -# if it is a new chain PR, test it -else - NEW_CHAIN_ID=${NEW_CHAIN_ID} npm run test:chains -fi \ No newline at end of file diff --git a/scripts/updateChains.mjs b/scripts/updateChains.mjs deleted file mode 100644 index 11978ec3..00000000 --- a/scripts/updateChains.mjs +++ /dev/null @@ -1,9 +0,0 @@ -import { promises } from "fs"; - -const chainsUrl = "https://chainid.network/chains.json"; -const chainsPath = "src/chains.json"; - -const result = await fetch(chainsUrl); -const chainsList = await result.text(); - -await promises.writeFile(chainsPath, chainsList); diff --git a/services/ipfs/Dockerfile.ipfs b/services/ipfs/Dockerfile.ipfs deleted file mode 100644 index 047e2362..00000000 --- a/services/ipfs/Dockerfile.ipfs +++ /dev/null @@ -1,16 +0,0 @@ -FROM ubuntu:20.04 -RUN apt-get update && apt-get install -y cron curl - -RUN mkdir /sourcify -WORKDIR /sourcify - -RUN curl https://dist.ipfs.io/go-ipfs/v0.16.0/go-ipfs_v0.16.0_linux-amd64.tar.gz > go-ipfs.tar.gz && tar xvfz go-ipfs.tar.gz && cd go-ipfs && ./install.sh - -ADD publish.sh ./publish.sh -ADD init-config.sh ./init-config.sh -ADD entrypoint.sh ./entrypoint.sh -ADD cron.job ./cron.job - -# Terminate and auto-restart container if ipfs daemon crashes -HEALTHCHECK --interval=2m CMD curl http://localhost:8080 || ipfs shutdown && pkill entrypoint.sh -ENTRYPOINT ./entrypoint.sh diff --git a/services/ipfs/README.md b/services/ipfs/README.md deleted file mode 100644 index 1f7e29eb..00000000 --- a/services/ipfs/README.md +++ /dev/null @@ -1,12 +0,0 @@ -When testing use the following docker run command to prevent reimporting the repository each time. Note that the first time is mandatory. - -Setting `--env DEBUG=true` will: - -- prevent adding all the repo to ipfs (using the existing one, set with `-v /path_to_local_ipfs_folder:/root/.ipfs`) -- prevent using the private keys -- prevent running cron -- prevent using remote pinning services - -``` -docker run -it --rm --name sourcify_ipfs --env DEBUG=true -v /path_to_local_sourcify_repo:/repository/ -v /path_to_local_ipfs_folder:/root/.ipfs -p 5001:5001 -p 8080:8080 -p 4001:4001 ipfs_sourcify -``` diff --git a/services/ipfs/cron.job b/services/ipfs/cron.job deleted file mode 100644 index 642b22c6..00000000 --- a/services/ipfs/cron.job +++ /dev/null @@ -1,4 +0,0 @@ -SHELL=/bin/bash - -0 */6 * * * /sourcify/publish.sh >> /var/log/cron.log 2>&1 -# This extra line makes it a valid cron diff --git a/services/ipfs/entrypoint.sh b/services/ipfs/entrypoint.sh deleted file mode 100755 index ab186be5..00000000 --- a/services/ipfs/entrypoint.sh +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/bash - - -if [ ! -f /root/.ipfs/config ] -then - echo "No config found. Initializing..." - bash ./init-config.sh -fi - - -ipfs daemon --enable-pubsub-experiment --enable-namesys-pubsub & - -# Wait for the daemon to initialize -echo "Sleeping 30 seconds" -sleep 30 -echo "Sleeped 30 seconds" - -if [ -z "$DEBUG" ] -then - date - echo "Starting ipfs add" - hash=$(ipfs add -Q -r /repository/contracts) - echo "Finished ipfs add! New ipfs hash: $hash" - date - - # Remove the old /contracts in MFS - echo "Removing /contracts from MFS" - ipfs files rm -r /contracts - echo "Removed /contracts from MFS" - - # cp the repo under MFS - echo "Copying $hash to MFS at /contracts" - ipfs files cp -p /ipfs/$hash /contracts - echo "Copied $hash to MFS at /contracts" -fi - - -bash ./publish.sh - -# Write the TAG var to /etc/environment so that the crontab can pick up the variable -echo "TAG=$TAG" > /etc/environment - -if [ -z "$DEBUG" ] -then - crontab cron.job - cron -f -fi - -tail -f /dev/null diff --git a/services/ipfs/init-config.sh b/services/ipfs/init-config.sh deleted file mode 100644 index 189f406f..00000000 --- a/services/ipfs/init-config.sh +++ /dev/null @@ -1,36 +0,0 @@ -ipfs init --profile=badgerds -ipfs config Addresses.Gateway /ip4/0.0.0.0/tcp/8080 - -## Build announced address config according to https://docs.ipfs.io/how-to/configure-node/#addresses. Need to announce the public and local IPs in swarm manually since docker does not know these IPs. -ANNOUNCED_ADDRESSES='[' -if test -n "$PUBLIC_IP" -then - ANNOUNCED_ADDRESSES=''$ANNOUNCED_ADDRESSES'"/ip4/'$PUBLIC_IP'/tcp/'$IPFS_LIBP2P_EXTERNAL_PORT'","/ip4/'$PUBLIC_IP'/udp/'$IPFS_LIBP2P_EXTERNAL_PORT'/quic"' -fi - -if test -n "$LOCAL_IP" -then - if test -n "$PUBLIC_IP" # Add comma if there are addresses in the array already - then - ANNOUNCED_ADDRESSES=$ANNOUNCED_ADDRESSES',' - fi - ANNOUNCED_ADDRESSES=''$ANNOUNCED_ADDRESSES'"/ip4/'$LOCAL_IP'/tcp/'$IPFS_LIBP2P_EXTERNAL_PORT'","/ip4/'$LOCAL_IP'/udp/'$IPFS_LIBP2P_EXTERNAL_PORT'/quic"' -fi - -ANNOUNCED_ADDRESSES=$ANNOUNCED_ADDRESSES']' - -ipfs config Addresses.Announce $ANNOUNCED_ADDRESSES --json -ipfs config --json Experimental.AcceleratedDHTClient true - -# Allow WebUI to be accesible from host -ipfs config --json API.HTTPHeaders.Access-Control-Allow-Origin '["*"]' -ipfs config --json Addresses.API '["/ip4/0.0.0.0/tcp/5001"]' - -if [ -z "$DEBUG" ] -then - # Add remote pinning services - ipfs pin remote service add estuary https://api.estuary.tech/pinning $ESTUARY_PINNING_SECRET - ipfs pin remote service add web3.storage https://api.web3.storage/ $WEB3_STORAGE_PINNING_SECRET - - ipfs key import main /sourcify/ipfs-${TAG}.key -fi \ No newline at end of file diff --git a/services/ipfs/ipfs-latest.key.gpg b/services/ipfs/ipfs-latest.key.gpg deleted file mode 100644 index 300f589f117e17c2db34aec9ac0a6f9231c33716..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 156 zcmV;N0Av4*4Fm}T0yx`R-c}elr2Eo~0fzKV@ynKpXLF2p4xFec__rC#>I-eznr5Fy z3U&$TanO0jB7aK))Ez@OA5ZwwO}qVxJF{iX%>I4-%yL?7!ghD$nr~5e_$hXwBCj1l zOT0%tEM=uLV@X}jSVh-@&>i=Tcp5tm8|Ph&FlO#%oD$^ep^yNRE0yxF=o1dv;M{rs K`JHL_9L_k>z)ds& diff --git a/services/ipfs/ipfs-stable.key.gpg b/services/ipfs/ipfs-stable.key.gpg deleted file mode 100644 index 4f765d14..00000000 --- a/services/ipfs/ipfs-stable.key.gpg +++ /dev/null @@ -1 +0,0 @@ -  ی&G횔:ҋl?~(8HiyR;a%#i c.9Zw"V^j-i xlj 6l\_H#:Tݩ>suλgnV:QqSS}Z$V=fuX \ No newline at end of file diff --git a/services/ipfs/publish.sh b/services/ipfs/publish.sh deleted file mode 100755 index 0e500457..00000000 --- a/services/ipfs/publish.sh +++ /dev/null @@ -1,57 +0,0 @@ -#!/bin/bash -# Avoid cron job ipfs command not found. -PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - -# Update the contract stats. -date -echo "Started find in repository for stats" -REPOSITORY_PATH="/repository" -CHAINS=$(find $REPOSITORY_PATH/contracts/full_match/ -mindepth 1 -maxdepth 1 -type d | rev | cut -d "/" -f1 | rev) - -OUTPUT="{ " -for chainId in ${CHAINS}; do - OUTPUT="$OUTPUT \"$chainId\": {" - OUTPUT="$OUTPUT \"full_match\": $(find $REPOSITORY_PATH/contracts/full_match/$chainId/ -mindepth 1 -maxdepth 1 -type d | wc -l)," - OUTPUT="$OUTPUT \"partial_match\": $(find $REPOSITORY_PATH/contracts/partial_match/$chainId/ -mindepth 1 -maxdepth 1 -type d | wc -l)" - - if [[ $chainId == $(echo $CHAINS | rev | cut -d " " -f1 | rev) ]] - then - OUTPUT="$OUTPUT }" - else - OUTPUT="$OUTPUT }," - fi - -done -OUTPUT="$OUTPUT}" - -echo "Finished find in repo for stats" -echo $OUTPUT > $REPOSITORY_PATH/stats.json -date - -# Update the new manifest and stats in MFS. -manifestHash=$(ipfs add -Q /repository/manifest.json) -statsHash=$(ipfs add -Q /repository/stats.json) -# rm old files from MFS -ipfs files rm /manifest.json -ipfs files rm /stats.json -# add new manifest and stats -ipfs files cp -p /ipfs/$manifestHash /manifest.json -ipfs files cp -p /ipfs/$statsHash /stats.json - -# Publish the new root hash -rootHash=$(ipfs files stat / | head -n 1) - -echo "Publishing rootHash $rootHash under ipns key" -ipfs -D name publish --key=main $rootHash -echo "Published rootHash $rootHash under ipns key" - -timestamp=$(date -u +"%Y-%m-%dT%H:%MZ") -pinName=sourcify-$TAG-$timestamp - -if [ -z "$DEBUG" ] -then - echo "Pinning to remote services" - ipfs pin remote add --service=estuary $rootHash --background --name=$pinName - ipfs pin remote add --service=web3.storage $rootHash --background --name=$pinName - echo "Pinned to remote services (running in background)" -fi \ No newline at end of file diff --git a/services/s3sync/Dockerfile.s3 b/services/s3sync/Dockerfile.s3 deleted file mode 100644 index fe886377..00000000 --- a/services/s3sync/Dockerfile.s3 +++ /dev/null @@ -1,10 +0,0 @@ -FROM ubuntu:18.04 -WORKDIR /app -RUN apt-get update && apt-get install -y cron curl unzip -RUN curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" -RUN unzip awscliv2.zip -RUN ./aws/install -ADD ./login_sync_s3.sh ./login_sync_s3.sh -ADD ./entrypoint.sh ./entrypoint.sh -RUN chmod +x ./entrypoint.sh -ENTRYPOINT ./entrypoint.sh diff --git a/services/s3sync/entrypoint.sh b/services/s3sync/entrypoint.sh deleted file mode 100644 index e69b29ac..00000000 --- a/services/s3sync/entrypoint.sh +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/bash - -# Start the run once job. -echo "Docker container has been started" - -# Setup a cron schedule. -echo "0 3 * * * AWS_S3_ACCESS_KEY_ID=$AWS_S3_ACCESS_KEY_ID AWS_S3_SECRET_ACCESS_KEY=$AWS_S3_SECRET_ACCESS_KEY CHAINSAFE_S3_ACCESS_KEY_ID=$CHAINSAFE_S3_ACCESS_KEY_ID CHAINSAFE_S3_SECRET_ACCESS_KEY=$CHAINSAFE_S3_SECRET_ACCESS_KEY BUCKET_NAME=$BUCKET_NAME TAG=$TAG /app/login_sync_s3.sh >> /var/log/cron.log 2>&1 -# This extra line makes it a valid cron" > scheduler.txt - -mkdir /app/logs - -crontab scheduler.txt -cron -f diff --git a/services/s3sync/login_sync_s3.sh b/services/s3sync/login_sync_s3.sh deleted file mode 100755 index 58b0d207..00000000 --- a/services/s3sync/login_sync_s3.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/bash - -/usr/local/bin/aws --version -/usr/local/bin/aws configure set aws_access_key_id $AWS_S3_ACCESS_KEY_ID -/usr/local/bin/aws configure set aws_secret_access_key $AWS_S3_SECRET_ACCESS_KEY -/usr/local/bin/aws configure set s3.max_concurrent_requests 10 - -echo "$(date) Syncing AWS at $BUCKET_NAME/$TAG" -/usr/local/bin/aws s3 sync --quiet /app/repository $BUCKET_NAME/$TAG -echo "$(date) Sync AWS complete" - -# Sync to Chainsafe Storage S3 -# /usr/local/bin/aws configure set aws_access_key_id $CHAINSAFE_S3_ACCESS_KEY_ID -# /usr/local/bin/aws configure set aws_secret_access_key $CHAINSAFE_S3_SECRET_ACCESS_KEY -# /usr/local/bin/aws configure set s3.max_concurrent_requests 2 # Chainsafe Storage S3 concurrency is limited - -# echo "$(date) Syncing Chainsafe S3 at $BUCKET_NAME" -# /usr/local/bin/aws s3 sync /app/repository $BUCKET_NAME/$TAG --endpoint-url https://buckets.chainsafe.io > "/app/logs/s3sync-chainsafe-$(date '+%Y-%m-%dZ%H:%M').log" 2>&1 -# echo "$(date) Syncing Chainsafe S3 complete" diff --git a/src/Dockerfile.monitor b/src/Dockerfile.monitor deleted file mode 100644 index b5676e83..00000000 --- a/src/Dockerfile.monitor +++ /dev/null @@ -1,12 +0,0 @@ -FROM node:16 -WORKDIR /home/app - -# Install puppeteer dependencies. -RUN apt-get update && apt-get -y install xvfb gconf-service libasound2 libatk1.0-0 libc6 libcairo2 libcups2 libdbus-1-3 libexpat1 libfontconfig1 libgbm1 libgcc1 libgconf-2-4 libgdk-pixbuf2.0-0 libglib2.0-0 libgtk-3-0 libnspr4 libpango-1.0-0 libpangocairo-1.0-0 libstdc++6 libx11-6 libx11-xcb1 libxcb1 libxcomposite1 libxcursor1 libxdamage1 libxext6 libxfixes3 libxi6 libxrandr2 libxrender1 libxss1 libxtst6 ca-certificates fonts-liberation libappindicator1 libnss3 lsb-release xdg-utils wget - -COPY packages ./packages -COPY src ./src -COPY *.json ./ -RUN npm install && npx lerna bootstrap --ci && npx lerna run build - -CMD ["npm", "run", "monitor:start"] diff --git a/src/monitor/gateway.ts b/src/monitor/gateway.ts deleted file mode 100644 index 9516b34a..00000000 --- a/src/monitor/gateway.ts +++ /dev/null @@ -1,33 +0,0 @@ -import { SourceOrigin } from "./util"; - -export declare interface IGateway { - worksWith: (origin: SourceOrigin) => boolean; - createUrl: (fetchId: string) => string; - createFallbackUrl: (fetchId: string) => string | undefined; - baseUrl: string; - fallbackUrl?: string; -} - -export class SimpleGateway implements IGateway { - private origins: SourceOrigin[]; - baseUrl: string; - fallbackUrl: string | undefined; // A backup gateway in case the local ipfs node fails. - - constructor(origins: SourceOrigin[], baseUrl: string, fallbackUrl?: string) { - this.origins = origins; - this.baseUrl = baseUrl; - if (fallbackUrl) this.fallbackUrl = fallbackUrl; - } - - worksWith(origin: SourceOrigin): boolean { - return this.origins.includes(origin); - } - - createUrl(fetchId: string): string { - return this.baseUrl + fetchId; - } - - createFallbackUrl(fetchId: string): string | undefined { - return this.fallbackUrl && this.fallbackUrl + fetchId; - } -} diff --git a/src/monitor/monitor.ts b/src/monitor/monitor.ts deleted file mode 100755 index c563862f..00000000 --- a/src/monitor/monitor.ts +++ /dev/null @@ -1,328 +0,0 @@ -import { SourceAddress } from "./util"; -import { TransactionResponse, getCreateAddress } from "ethers"; -import SourceFetcher from "./source-fetcher"; -import assert from "assert"; -import { EventEmitter } from "stream"; -import { decode as bytecodeDecode } from "@ethereum-sourcify/bytecode-utils"; -import { SourcifyEventManager } from "../common/SourcifyEventManager/SourcifyEventManager"; -import { - CheckedContract, - SourcifyChain, -} from "@ethereum-sourcify/lib-sourcify"; -import { services } from "../server/services/services"; -import { IRepositoryService } from "../server/services/RepositoryService"; -import { IVerificationService } from "../server/services/VerificationService"; -import { monitoredChainArray } from "../sourcify-chains"; -import { logger } from "../common/loggerLoki"; -import "../common/SourcifyEventManager/listeners/logger"; - -const BLOCK_PAUSE_FACTOR = - parseInt(process.env.BLOCK_PAUSE_FACTOR || "") || 1.1; -assert(BLOCK_PAUSE_FACTOR > 1); -const BLOCK_PAUSE_UPPER_LIMIT = - parseInt(process.env.BLOCK_PAUSE_UPPER_LIMIT || "") || 30 * 1000; // default: 30 seconds -const BLOCK_PAUSE_LOWER_LIMIT = - parseInt(process.env.BLOCK_PAUSE_LOWER_LIMIT || "") || 0.5 * 1000; // default: 0.5 seconds - -function createsContract(tx: TransactionResponse): boolean { - return !tx.to; -} - -/** - * A monitor that periodically checks for new contracts on a single chain. - */ -class ChainMonitor extends EventEmitter { - private sourcifyChain: SourcifyChain; - private sourceFetcher: SourceFetcher; - private verificationService: IVerificationService; - private repositoryService: IRepositoryService; - private running: boolean; - - private getBytecodeRetryPause: number; - private getBlockPause: number; - private initialGetBytecodeTries: number; - - constructor( - sourcifyChain: SourcifyChain, - sourceFetcher: SourceFetcher, - verificationService: IVerificationService, - repositoryService: IRepositoryService - ) { - super(); - this.sourcifyChain = sourcifyChain; - this.sourceFetcher = sourceFetcher; - this.verificationService = verificationService; - this.repositoryService = repositoryService; - this.running = false; - - this.getBytecodeRetryPause = - parseInt(process.env.GET_BYTECODE_RETRY_PAUSE || "") || 5 * 1000; - this.getBlockPause = - parseInt(process.env.GET_BLOCK_PAUSE || "") || 10 * 1000; - this.initialGetBytecodeTries = - parseInt(process.env.INITIAL_GET_BYTECODE_TRIES || "") || 3; - } - - start = async (): Promise => { - this.running = true; - const rawStartBlock = - process.env[`MONITOR_START_${this.sourcifyChain.chainId}`]; - - try { - const lastBlockNumber = await this.sourcifyChain.getBlockNumber(); - const startBlock = - rawStartBlock !== undefined ? parseInt(rawStartBlock) : lastBlockNumber; - - SourcifyEventManager.trigger("Monitor.Started", { - chainId: this.sourcifyChain.chainId.toString(), - lastBlockNumber, - startBlock, - }); - this.processBlock(startBlock); - } catch (err) { - SourcifyEventManager.trigger("Monitor.Error.CantStart", { - chainId: this.sourcifyChain.chainId.toString(), - message: "Couldn't find a working RPC node.", - }); - } - }; - - /** - * Stops the monitor after executing all pending requests. - */ - stop = (): void => { - SourcifyEventManager.trigger( - "Monitor.Stopped", - this.sourcifyChain.chainId.toString() - ); - this.running = false; - }; - - private processBlock = (blockNumber: number) => { - this.sourcifyChain - .getBlock(blockNumber, true) - .then((block) => { - if (!block) { - this.adaptBlockPause("increase"); - return; - } - - this.adaptBlockPause("decrease"); - - SourcifyEventManager.trigger("Monitor.ProcessingBlock", { - blockNumber, - chainId: this.sourcifyChain.chainId.toString(), - getBlockPause: this.getBlockPause, - }); - - for (const tx of block.prefetchedTransactions) { - if (createsContract(tx)) { - const address = getCreateAddress(tx); - if (this.isVerified(address)) { - SourcifyEventManager.trigger("Monitor.AlreadyVerified", { - address, - chainId: this.sourcifyChain.chainId.toString(), - }); - this.emit( - "contract-already-verified", - this.sourcifyChain.chainId, - address - ); - } else { - SourcifyEventManager.trigger("Monitor.NewContract", { - address, - chainId: this.sourcifyChain.chainId.toString(), - }); - this.processBytecode( - tx.hash, - address, - this.initialGetBytecodeTries - ); - } - } - } - - blockNumber++; - }) - .catch((err) => { - SourcifyEventManager.trigger("Monitor.Error.ProcessingBlock", { - message: err.message, - stack: err.stack, - chainId: this.sourcifyChain.chainId.toString(), - blockNumber, - }); - }) - .finally(() => { - this.mySetTimeout(this.processBlock, this.getBlockPause, blockNumber); - }); - }; - - private isVerified(address: string): boolean { - const foundArr = this.repositoryService.checkByChainAndAddress( - address, - this.sourcifyChain.chainId.toString() - ); - return !!foundArr.length; - } - - private adaptBlockPause = (operation: "increase" | "decrease") => { - const factor = - operation === "increase" ? BLOCK_PAUSE_FACTOR : 1 / BLOCK_PAUSE_FACTOR; - this.getBlockPause *= factor; - this.getBlockPause = Math.min(this.getBlockPause, BLOCK_PAUSE_UPPER_LIMIT); - this.getBlockPause = Math.max(this.getBlockPause, BLOCK_PAUSE_LOWER_LIMIT); - }; - - private processBytecode = ( - creatorTxHash: string, - address: string, - retriesLeft: number - ): void => { - if (retriesLeft-- <= 0) { - return; - } - - this.sourcifyChain - .getBytecode(address) - .then((bytecode) => { - if (bytecode === "0x") { - this.mySetTimeout( - this.processBytecode, - this.getBytecodeRetryPause, - creatorTxHash, - address, - retriesLeft - ); - return; - } - - try { - const cborData = bytecodeDecode(bytecode); - const metadataAddress = SourceAddress.fromCborData(cborData); - this.sourceFetcher.assemble( - metadataAddress, - (contract: CheckedContract) => { - this.verifyAndStore(contract, address, creatorTxHash); - } - ); - } catch (err: any) { - SourcifyEventManager.trigger("Monitor.Error.ProcessingBytecode", { - message: err.message, - stack: err.stack, - chainId: this.sourcifyChain.chainId.toString(), - address, - }); - } - }) - .catch((err) => { - SourcifyEventManager.trigger("Monitor.Error.GettingBytecode", { - message: err.message, - stack: err.stack, - chainId: this.sourcifyChain.chainId.toString(), - address, - }); - this.mySetTimeout( - this.processBytecode, - this.getBytecodeRetryPause, - creatorTxHash, - address, - retriesLeft - ); - }); - }; - - private verifyAndStore = async ( - contract: CheckedContract, - address: string, - creatorTxHash: string - ) => { - try { - const match = await this.verificationService.verifyDeployed( - contract, - this.sourcifyChain.chainId.toString(), - address, - /* undefined, */ - creatorTxHash - ); - await this.repositoryService.storeMatch(contract, match); - this.emit( - "contract-verified-successfully", - this.sourcifyChain.chainId, - address - ); - } catch (err: any) { - SourcifyEventManager.trigger("Monitor.Error.VerifyError", { - message: err.message, - stack: err.stack, - chainId: this.sourcifyChain.chainId.toString(), - address, - }); - } - }; - - private mySetTimeout = ( - handler: TimerHandler, - timeout: number, - ...args: any[] - ) => { - if (this.running) { - setTimeout(handler, timeout, ...args); - } - }; -} - -/** - * A monitor that periodically checks for new contracts on designated chains. - */ -export default class Monitor extends EventEmitter { - private chainMonitors: ChainMonitor[]; - private sourceFetcher = new SourceFetcher(); - - constructor(chainsToMonitor?: SourcifyChain[]) { - super(); - chainsToMonitor = chainsToMonitor?.length - ? chainsToMonitor - : monitoredChainArray; // default to all monitored chains - this.chainMonitors = chainsToMonitor.map( - (sourcifyChain) => - new ChainMonitor( - sourcifyChain, - this.sourceFetcher, - services.verification, - services.repository - ) - ); - this.chainMonitors.forEach((cm) => { - cm.on("contract-verified-successfully", (chainId, address) => { - this.emit("contract-verified-successfully", chainId, address); - }); - cm.on("contract-already-verified", (chainId, address) => { - this.emit("contract-already-verified", chainId, address); - }); - }); - } - - /** - * Starts the monitor on all the designated chains. - */ - start = async (): Promise => { - const promises = []; - for (const cm of this.chainMonitors) { - promises.push(cm.start()); - } - await Promise.all(promises); - }; - - /** - * Stops the monitor after executing all the pending requests. - */ - stop = (): void => { - this.chainMonitors.forEach((cm) => cm.stop()); - this.sourceFetcher.stop(); - }; -} - -if (require.main === module) { - const monitor = new Monitor(); - monitor.start(); -} diff --git a/src/monitor/pending-contract.ts b/src/monitor/pending-contract.ts deleted file mode 100644 index 5d3f9003..00000000 --- a/src/monitor/pending-contract.ts +++ /dev/null @@ -1,107 +0,0 @@ -import SourceFetcher from "./source-fetcher"; -import { SourceAddress } from "./util"; -import { - CheckedContract, - isEmpty, - Metadata, - StringMap, -} from "@ethereum-sourcify/lib-sourcify"; -import { SourcifyEventManager } from "../common/SourcifyEventManager/SourcifyEventManager"; -import { id as keccak256str } from "ethers"; - -type PendingSource = { - keccak256: string; - content?: string; - urls: string[]; - name: string; -}; -interface PendingSourceMap { - [keccak256: string]: PendingSource; -} - -export default class PendingContract { - private metadata: Metadata | undefined; - private pendingSources: PendingSourceMap = {}; - private fetchedSources: StringMap = {}; - private sourceFetcher: SourceFetcher; - private callback: (contract: CheckedContract) => void; - - constructor( - sourceFetcher: SourceFetcher, - callback: (checkedContract: CheckedContract) => void - ) { - this.sourceFetcher = sourceFetcher; - this.callback = callback; - } - - /** - * Assembles this contract by first fetching its metadata and then fetching all the sources listed in the metadata. - * - * @param metadataAddress an object representing the location of the contract metadata - */ - assemble(metadataAddress: SourceAddress) { - this.sourceFetcher.subscribe(metadataAddress, this.addMetadata); - } - - private addMetadata = (rawMetadata: string) => { - this.metadata = JSON.parse(rawMetadata) as Metadata; - - for (const name in this.metadata.sources) { - const source = JSON.parse(JSON.stringify(this.metadata.sources[name])); - source.name = name; - - if (source.content) { - this.fetchedSources[name] = source.content; - continue; - } else if (!source.keccak256) { - SourcifyEventManager.trigger("Monitor.Error", { - message: `Source ${name} has no keccak256 nor content`, - }); - break; - } - this.pendingSources[source.keccak256] = source; - - const sourceAddresses: SourceAddress[] = []; - for (const url of source.urls) { - const sourceAddress = SourceAddress.fromUrl(url); - if (!sourceAddress) { - SourcifyEventManager.trigger("Monitor.Error", { - message: `Could not determine source file location for ${name} at ${url}`, - }); - continue; - } - sourceAddresses.push(sourceAddress); - - this.sourceFetcher.subscribe(sourceAddress, (sourceContent: string) => { - this.addFetchedSource(sourceContent); - // once source is resolved from one endpoint, others don't have to be pinged anymore, so delete them - for (const deletableSourceAddress of sourceAddresses) { - this.sourceFetcher.unsubscribe(deletableSourceAddress); - } - }); - } - } - - if (isEmpty(this.pendingSources)) { - const contract = new CheckedContract(this.metadata, this.fetchedSources); - this.callback(contract); - } - }; - - private addFetchedSource = (sourceContent: string) => { - const hash = keccak256str(sourceContent); - const source = this.pendingSources[hash]; - - if (!source || source.name in this.fetchedSources) { - return; - } - - delete this.pendingSources[hash]; - this.fetchedSources[source.name] = sourceContent; - - if (isEmpty(this.pendingSources) && this.metadata) { - const contract = new CheckedContract(this.metadata, this.fetchedSources); - this.callback(contract); - } - }; -} diff --git a/src/monitor/source-fetcher.ts b/src/monitor/source-fetcher.ts deleted file mode 100644 index 1dfab0ed..00000000 --- a/src/monitor/source-fetcher.ts +++ /dev/null @@ -1,302 +0,0 @@ -import { CheckedContract } from "@ethereum-sourcify/lib-sourcify"; -import { StatusCodes } from "http-status-codes"; -import nodeFetch from "node-fetch"; -import { SourcifyEventManager } from "../common/SourcifyEventManager/SourcifyEventManager"; -import { IGateway, SimpleGateway } from "./gateway"; -import PendingContract from "./pending-contract"; -import { SourceAddress, FetchedFileCallback } from "./util"; - -const STARTING_INDEX = 0; -const NO_PAUSE = 0; - -class Subscription { - sourceAddress: SourceAddress; - fetchUrl: string; - fallbackFetchUrl: string | undefined; - beingProcessed = false; - subscribers: Array = []; - - constructor( - sourceAddress: SourceAddress, - fetchUrl: string, - fallbackFetchUrl?: string - ) { - this.sourceAddress = sourceAddress; - this.fetchUrl = fetchUrl; - this.fallbackFetchUrl = fallbackFetchUrl; - } - - useFallbackUrl() { - this.fetchUrl = this.fallbackFetchUrl || this.fetchUrl; - } -} - -declare interface SubscriptionMap { - [hash: string]: Subscription; -} - -declare interface TimestampMap { - [hash: string]: Date; -} - -/** - * A fetcher of contract source files (metadata and solidity). - * Allows assembling a contract (collecting its sources) from the address of its metadata. - * Allows subscribing to individual sources. - */ -export default class SourceFetcher { - gatewayFetchers = [ - new GatewayFetcher( - new SimpleGateway( - ["ipfs"], - process.env.IPFS_GATEWAY || "https://ipfs.io/ipfs/", - "https://cloudflare-ipfs.com/ipfs/" - ) - ), - new GatewayFetcher( - new SimpleGateway( - ["bzzr0", "bzzr1"], - "https://swarm-gateways.net/bzz-raw:/" - ) - ), - ]; - - /** - * Tells the fetcher not to make new requests. Doesn't affect pending requests. - */ - stop(): void { - this.gatewayFetchers.forEach((gatewayFetcher) => gatewayFetcher.stop()); - } - - private findGatewayFetcher(sourceAddress: SourceAddress) { - for (const gatewayFetcher of this.gatewayFetchers) { - if (gatewayFetcher.worksWith(sourceAddress)) { - return gatewayFetcher; - } - } - - throw new Error(`Gateway not found for ${sourceAddress.origin}`); - } - - /** - * Fetches the requested source and executes the callback on the fetched content. - * - * @param sourceAddress an object representing the location of the source file - * @param callback the callback to be called on the fetched content - */ - subscribe(sourceAddress: SourceAddress, callback: FetchedFileCallback): void { - const gatewayFetcher = this.findGatewayFetcher(sourceAddress); - gatewayFetcher.subscribe(sourceAddress, callback); - } - - /** - * Stop fetching the source specified by the provided sourceAddress. - * - * @param sourceAddress - */ - unsubscribe(sourceAddress: SourceAddress): void { - const gatewayFetcher = this.findGatewayFetcher(sourceAddress); - gatewayFetcher.unsubscribe(sourceAddress); - } - - /** - * Begins the process of assembling a contract's sources. This is done by fetching the metadata from the address provided. - * After assembling the contract, the provided callback is called. - * - * @param metadataAddress an object representing the location of the contract metadata - * @param callback the callback to be called on the contract once it is assembled - */ - assemble( - metadataAddress: SourceAddress, - callback: (contract: CheckedContract) => void - ) { - const contract = new PendingContract(this, callback); - contract.assemble(metadataAddress); - } -} - -class GatewayFetcher { - private subscriptions: SubscriptionMap = {}; - private timestamps: TimestampMap = {}; - private fileCounter = 0; - private subscriptionCounter = 0; - private running = true; - - private fetchTimeout: number; // when to terminate a request - private fetchPause: number; // how much time to wait between two requests - private cleanupTime: number; // how much time has to pass before a source is forgotten - - private gateway: IGateway; - - constructor(gateway: IGateway) { - this.gateway = gateway; - this.fetchTimeout = - parseInt(process.env.MONITOR_FETCH_TIMEOUT || "") || 5 * 60 * 1000; - this.fetchPause = - parseInt(process.env.MONITOR_FETCH_PAUSE || "") || 1 * 1000; - this.cleanupTime = - parseInt(process.env.MONITOR_CLEANUP_PERIOD || "") || 30 * 60 * 1000; - this.fetch([], STARTING_INDEX); - } - - stop(): void { - this.running = false; - } - - private fetch = (sourceHashes: string[], index: number): void => { - if (index >= sourceHashes.length) { - const newSourceHashes = Object.keys(this.subscriptions); // make a copy so that subscriptions can be freely cleared if necessary - this.mySetTimeout(this.fetch, NO_PAUSE, newSourceHashes, STARTING_INDEX); - return; - } - - const sourceHash = sourceHashes[index]; - - if (this.isTimeUp(sourceHash)) { - this.cleanup(sourceHash); - } - - const subscription = this.subscriptions[sourceHash]; - if (!subscription || subscription.beingProcessed) { - this.mySetTimeout(this.fetch, NO_PAUSE, sourceHashes, index + 1); - return; - } - - subscription.beingProcessed = true; - nodeFetch(subscription.fetchUrl, { timeout: this.fetchTimeout }) - .then((resp) => { - resp.text().then((text) => { - if (resp.status === StatusCodes.OK) { - this.notifySubscribers(sourceHash, text); - } - }); - }) - .catch((err) => { - if (!subscription.fallbackFetchUrl) { - return Promise.resolve(); - } - SourcifyEventManager.trigger("SourceFetcher.UsingFallback", { - fetchUrl: subscription.fetchUrl, - fallbackUrl: subscription.fallbackFetchUrl, - }); - // fall back to external ipfs gateway - subscription.useFallbackUrl(); - - return nodeFetch(subscription.fetchUrl, { - timeout: this.fetchTimeout, - }).then((resp) => { - resp.text().then((text) => { - if (resp.status === StatusCodes.OK) { - this.notifySubscribers(sourceHash, text); - } - }); - }); - }) - .catch((err) => - SourcifyEventManager.trigger("SourceFetcher.FetchFailed", { - fetchUrl: subscription.fetchUrl, - sourceHash, - }) - ) - .finally(() => { - subscription.beingProcessed = false; - }); - - this.mySetTimeout(this.fetch, this.fetchPause, sourceHashes, index + 1); - }; - - private mySetTimeout = ( - handler: TimerHandler, - timeout: number, - ...args: any[] - ) => { - if (this.running) { - setTimeout(handler, timeout, ...args); - } - }; - - private notifySubscribers(id: string, file: string) { - if (!(id in this.subscriptions)) { - return; - } - - const subscription = this.subscriptions[id]; - this.cleanup(id); - - SourcifyEventManager.trigger("SourceFetcher.FetchingSuccessful", { - fetchUrl: subscription.fetchUrl, - id, - subscriberCount: subscription.subscribers.length, - }); - - subscription.subscribers.forEach((callback) => callback(file)); - } - - worksWith(sourceAddress: SourceAddress): boolean { - return this.gateway.worksWith(sourceAddress.origin); - } - - subscribe(sourceAddress: SourceAddress, callback: FetchedFileCallback): void { - const sourceHash = sourceAddress.getSourceHash(); - const fetchUrl = this.gateway.createUrl(sourceAddress.id); - let fallbackFetchUrl; - if (this.gateway.fallbackUrl) - fallbackFetchUrl = this.gateway.createFallbackUrl(sourceAddress.id); - if (!(sourceHash in this.subscriptions)) { - this.subscriptions[sourceHash] = new Subscription( - sourceAddress, - fetchUrl, - fallbackFetchUrl - ); - this.fileCounter++; - } - - this.timestamps[sourceHash] = new Date(); - this.subscriptions[sourceHash].subscribers.push(callback); - - this.subscriptionCounter++; - SourcifyEventManager.trigger("SourceFetcher.NewSubscription", { - fetchUrl: this.subscriptions[sourceHash].fetchUrl, - sourceHash, - filesPending: this.fileCounter, - subscriptions: this.subscriptionCounter, - }); - } - - unsubscribe(sourceAddress: SourceAddress): void { - const sourceHash = sourceAddress.getSourceHash(); - this.cleanup(sourceHash); - } - - private cleanup(sourceHash: string): void { - const subscription = this.subscriptions[sourceHash]; - if (!subscription) { - return; - } - const fetchUrl = subscription.fetchUrl; - - const subscribers = Object.keys(subscription.subscribers); - const subscriptionsDelta = subscribers.length; - delete this.subscriptions[sourceHash]; - - delete this.timestamps[sourceHash]; - - this.fileCounter--; - this.subscriptionCounter -= subscriptionsDelta; - SourcifyEventManager.trigger("SourceFetcher.Cleanup", { - fetchUrl: fetchUrl, - sourceHash, - filesPending: this.fileCounter, - subscriptions: this.subscriptionCounter, - }); - } - - private isTimeUp(sourceHash: string): boolean { - const subscription = this.subscriptions[sourceHash]; - if (!subscription || subscription.beingProcessed) { - return false; - } - const timestamp = this.timestamps[sourceHash]; - return timestamp && timestamp.getTime() + this.cleanupTime < Date.now(); - } -} diff --git a/src/monitor/util.ts b/src/monitor/util.ts deleted file mode 100644 index 00a8fb33..00000000 --- a/src/monitor/util.ts +++ /dev/null @@ -1,54 +0,0 @@ -export type SourceOrigin = "ipfs" | "bzzr1" | "bzzr0"; - -export type FetchedFileCallback = (fetchedFile: string) => any; - -interface Prefix { - regex: RegExp; - origin: SourceOrigin; -} - -const PREFIXES: Prefix[] = [ - { origin: "ipfs", regex: /dweb:\/ipfs\/{1,2}/ }, - { origin: "bzzr1", regex: /bzz-raw:\/{1,2}/ }, -]; - -const CBOR_SOURCES: SourceOrigin[] = ["ipfs", "bzzr0", "bzzr1"]; - -export class SourceAddress { - origin: SourceOrigin; - id: string; - - constructor(origin: SourceOrigin, id: string) { - this.origin = origin; - this.id = id; - } - - /** - * @returns a unique identifier of this source address of format ipfs-QmawU3NM1WNWkBauRudYCiFvuFE1tTLHB98akyBvb9UWwA - */ - getSourceHash(): string { - return this.origin + "-" + this.id; - } - - static fromUrl(url: string): SourceAddress | null { - for (const prefix of PREFIXES) { - const hash = url.replace(prefix.regex, ""); - if (hash !== url) { - return new SourceAddress(prefix.origin, hash); - } - } - return null; - } - - static fromCborData(cborData: any): SourceAddress { - for (const cborSource of CBOR_SOURCES) { - const metadataId = cborData[cborSource]; - if (metadataId) { - return new SourceAddress(cborSource, metadataId); - } - } - - const msg = `Unsupported metadata file format: ${Object.keys(cborData)}`; - throw new Error(msg); - } -} diff --git a/test/monitor.js b/test/monitor.js deleted file mode 100644 index 421da6f6..00000000 --- a/test/monitor.js +++ /dev/null @@ -1,275 +0,0 @@ -process.env.TESTING = "true"; -process.env.MOCK_REPOSITORY = "./mockRepository"; -process.env.IPFS_GATEWAY = "https://ipfs.io/ipfs/"; -const GANACHE_PORT = 8545; -const ContractWrapper = require("./helpers/ContractWrapper"); -const ganache = require("ganache"); -const ipfs = require("ipfs-core"); -const rimraf = require("rimraf"); -const chai = require("chai"); -const Monitor = require("../dist/monitor/monitor").default; -const { waitSecs } = require("./helpers/helpers"); -const fs = require("fs"); -const path = require("path"); -const { - id: keccak256str, - JsonRpcProvider, - getCreateAddress, - Network, -} = require("ethers"); -const { EventEmitter } = require("stream"); -const { LOCAL_CHAINS } = require("../dist/sourcify-chains"); - -class MonitorWrapper extends EventEmitter { - constructor() { - super(); - this.repository = process.env.MOCK_REPOSITORY; - this.monitor = new Monitor(LOCAL_CHAINS.slice(0, 1)); // Ganache - this.monitor.on("contract-verified-successfully", (chainId, address) => { - this.emit("contract-verified-successfully", chainId, address); - }); - this.monitor.on("contract-already-verified", (chainId, address) => { - this.emit("contract-already-verified", chainId, address); - }); - this.chainId = this.monitor.chainMonitors[0].sourcifyChain.chainId; - } - - async start(startBlock) { - const envVar = `MONITOR_START_${this.chainId}`; - this.envVarStash = process.env[envVar]; - if (startBlock !== undefined) { - process.env[envVar] = startBlock; - } - await this.monitor.start(); - } - - stop() { - const envVar = `MONITOR_START_${this.chainId}`; - delete process.env[envVar]; - if (this.envVarStash) { - process.env[envVar] = this.envVarStash; - } - this.monitor.stop(); - rimraf.sync(this.repository); - } - - getPathPrefix(address) { - return path.join( - this.repository, - "contracts", - "full_match", - this.chainId.toString(), - address - ); - } - - getAddressMetadataPath(address) { - const pathPrefix = this.getPathPrefix(address); - return path.join(pathPrefix, "metadata.json"); - } - - assertFilesNotStored(address, contractWrapper, expectedMtime) { - const addressMetadataPath = this.getAddressMetadataPath(address); - assertEqualityFromPath(contractWrapper.metadata, addressMetadataPath, { - expectedMtime, - isJson: true, - }); - } - - assertFilesStored(address, contractWrapper) { - console.log(`Started assertions for ${address}`); - const pathPrefix = this.getPathPrefix(address); - const addressMetadataPath = this.getAddressMetadataPath(address); - - const metadata = contractWrapper.metadata; - assertEqualityFromPath(metadata, addressMetadataPath, { isJson: true }); - - for (const sourceName in metadata.sources) { - const source = metadata.sources[sourceName]; - const sourcePath = path.join(pathPrefix, "sources", sourceName); - const savedSource = fs.readFileSync(sourcePath).toString(); - const savedSourceHash = keccak256str(savedSource); - const originalSourceHash = - source.keccak256 || keccak256str(source.content); - chai - .expect(savedSourceHash, "sourceHash comparison") - .to.equal(originalSourceHash); - } - } - - /** - * Used for writing (dummy) metadata independent of monitor's work. - * @param {string} address - * @param {*} metadata - * @returns ctime of written metadata - */ - writeMetadata(address, metadata) { - const addressMetadataPath = this.getAddressMetadataPath(address); - fs.mkdirSync(path.dirname(addressMetadataPath), { recursive: true }); - fs.writeFileSync(addressMetadataPath, metadata); - return fs.statSync(addressMetadataPath).ctime; - } -} - -function assertEqualityFromPath(obj1, obj2path, options = {}) { - const obj2raw = fs.readFileSync(obj2path).toString(); - const obj2 = options.isJson ? JSON.parse(obj2raw) : obj2raw; - chai.expect(obj1, `assertFromPath: ${obj2path}`).to.deep.equal(obj2); - if (options.expectedMtime) { - const actualMtime = fs.statSync(obj2path).mtime; - chai.expect(actualMtime).to.deep.equal(options.expectedMtime); - } -} - -describe("Monitor", function () { - this.timeout(60 * 1000); - let ganacheServer; - - const contractWrappers = { - simpleWithImport: new ContractWrapper( - require("./sources/pass/simpleWithImport.js"), - { metadata: true, sources: true } - ), - simpleLiteral: new ContractWrapper( - require("./sources/pass/simple.literal.js"), - { - metadata: true, - } - ), - withImmutables: new ContractWrapper( - require("./sources/pass/withImmutables.js"), - { metadata: true, sources: true }, - [2] - ), - withoutMetadataHash: new ContractWrapper( - require("./sources/pass/withoutMetadataHash.js"), - { metadata: true, sources: true } - ), - }; - - let ipfsNode; - let signer; - let account; - - before(async function () { - ipfsNode = await ipfs.create({ offline: true, silent: true }); - console.log("Initialized ipfs test node"); - - for (const contractName in contractWrappers) { - await contractWrappers[contractName].publish(ipfsNode); - } - }); - - beforeEach(async () => { - ganacheServer = ganache.server({ - wallet: { totalAccounts: 5 }, - chain: { chainId: 1337, networkId: 1337 }, - }); - await ganacheServer.listen(GANACHE_PORT); - console.log("Started ganache local server at port " + GANACHE_PORT); - const sourcifyChainGanache = LOCAL_CHAINS[0]; - const ethersNetwork = new Network( - sourcifyChainGanache.rpc[0], - sourcifyChainGanache.chainId - ); - signer = await new JsonRpcProvider( - `http://localhost:${GANACHE_PORT}`, - ethersNetwork, - { staticNetwork: ethersNetwork } - ).getSigner(); - - account = await signer.getAddress(); - console.log("Initialized provider with signer account " + account); - }); - - afterEach(async () => { - await ganacheServer.close(); - ganacheServer = null; - signer = null; - account = null; - }); - - const GENERATION_SECS = 10; // waiting for extra blocks to be generated - - const sourcifyContract = (contractWrapper, done) => { - const monitorWrapper = new MonitorWrapper(); - monitorWrapper.start().then(() => { - console.log("Started monitor for chainId: " + monitorWrapper.chainId); - let address; - - monitorWrapper.on("contract-verified-successfully", () => { - monitorWrapper.assertFilesStored(address, contractWrapper); - monitorWrapper.stop(); - done(); - }); - - contractWrapper.deploy(signer).then((addr) => (address = addr)); - }); - }; - - it("should sourcify the deployed contract", function (done) { - sourcifyContract(contractWrappers.simpleWithImport, done); - }); - - it("should sourcify if metadata provides only literal content", function (done) { - sourcifyContract(contractWrappers.simpleLiteral, done); - }); - - it("should sourcify a contract with immutables", function (done) { - sourcifyContract(contractWrappers.withImmutables, done); - }); - - it("should not resourcify if already sourcified", function (done) { - const contract = contractWrappers.simpleWithImport; - const monitorWrapper = new MonitorWrapper(); - const from = account; - const calculatedAddress = getCreateAddress({ - from, - nonce: 0, - }); - const metadataBirthtime = monitorWrapper.writeMetadata( - calculatedAddress, - contract.rawMetadata - ); - - monitorWrapper.start().then(() => { - let deployedAddress; - - monitorWrapper.on("contract-already-verified", () => { - monitorWrapper.assertFilesNotStored( - deployedAddress, - contract, - metadataBirthtime - ); - monitorWrapper.stop(); - done(); - }); - contract.deploy(signer).then((addr) => { - deployedAddress = addr; - chai.expect(calculatedAddress).to.deep.equal(deployedAddress); - }); - }); - }); - - it("should sourcify the deployed contract after being started with a delay", function (done) { - const contract = contractWrappers.simpleWithImport; - contract.deploy(signer).then((address) => { - signer.provider.getBlockNumber().then((currentBlockNumber) => { - waitSecs(GENERATION_SECS).then(() => { - const monitorWrapper = new MonitorWrapper(); - monitorWrapper.start(currentBlockNumber - 1).then(() => { - monitorWrapper.on("contract-verified-successfully", () => { - monitorWrapper.assertFilesStored(address, contract); - monitorWrapper.stop(); - done(); - }); - }); - }); - }); - }); - }); - - after(async function () { - await ipfsNode.stop(); - }); -}); diff --git a/tslint.json b/tslint.json deleted file mode 100644 index b57583ad..00000000 --- a/tslint.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "extends": "typestrict", - "rules": { - "no-debugger": true, - "mocha-avoid-only": true, - "prefer-const": true, - "no-var-keyword": true, - "no-commented-code": { - "severity": "warning" - }, - "no-use-before-declare": false - } -} diff --git a/ui/.env.development b/ui/.env.development deleted file mode 100644 index 280381a5..00000000 --- a/ui/.env.development +++ /dev/null @@ -1,4 +0,0 @@ -REACT_APP_SERVER_URL=http://localhost:5555 -REACT_APP_REPOSITORY_SERVER_URL=http://localhost:10000 -REACT_APP_IPNS=repo.staging.sourcify.dev -REACT_APP_TAG=latest \ No newline at end of file diff --git a/ui/.env.production b/ui/.env.production deleted file mode 100644 index a914a122..00000000 --- a/ui/.env.production +++ /dev/null @@ -1,4 +0,0 @@ -REACT_APP_SERVER_URL=${SERVER_URL} -REACT_APP_REPOSITORY_SERVER_URL=${REPOSITORY_SERVER_URL} -REACT_APP_IPNS=${IPNS} -REACT_APP_TAG=${TAG} \ No newline at end of file diff --git a/ui/.gitignore b/ui/.gitignore index 4d29575d..33071c6b 100644 --- a/ui/.gitignore +++ b/ui/.gitignore @@ -13,10 +13,6 @@ # misc .DS_Store -.env.local -.env.development.local -.env.test.local -.env.production.local npm-debug.log* yarn-debug.log* diff --git a/ui/.vscode/settings.json b/ui/.vscode/settings.json deleted file mode 100644 index 23fd35f0..00000000 --- a/ui/.vscode/settings.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "editor.formatOnSave": true -} \ No newline at end of file