Skip to content

Commit

Permalink
Update to DipDup 7.2
Browse files Browse the repository at this point in the history
  • Loading branch information
droserasprout committed Nov 25, 2023
1 parent f7f377c commit 5ad675e
Show file tree
Hide file tree
Showing 21 changed files with 212 additions and 169 deletions.
2 changes: 1 addition & 1 deletion api/tzprofiles/.dockerignore
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
# Add metadata and build files
!tzprofiles
!pyproject.toml
!pdm.lock
!*.lock
!README.md

# Add Python code
Expand Down
2 changes: 1 addition & 1 deletion api/tzprofiles/.gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
!**/Dockerfile
!**/Makefile
!**/pyproject.toml
!**/pdm.lock
!**/*.lock
!**/README.md
!**/.keep

Expand Down
46 changes: 46 additions & 0 deletions api/tzprofiles/Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
.ONESHELL:
.PHONY: $(MAKECMDGOALS)
MAKEFLAGS += --no-print-directory
##
## 🚧 DipDup developer tools
##
PACKAGE=tzprofiles
TAG=latest
COMPOSE=deploy/compose.yaml

help: ## Show this help (default)
@grep -Fh "##" $(MAKEFILE_LIST) | grep -Fv grep -F | sed -e 's/\\$$//' | sed -e 's/##//'

all: ## Run an entire CI pipeline
make format lint

format: ## Format with all tools
make black

lint: ## Lint with all tools
make ruff mypy

##

black: ## Format with black
black .

ruff: ## Lint with ruff
ruff check --fix .

mypy: ## Lint with mypy
mypy --no-incremental --exclude ${PACKAGE} .

##

image: ## Build Docker image
docker buildx build . -t ${PACKAGE}:${TAG}

up: ## Run Compose stack
docker-compose -f ${COMPOSE} up -d --build
docker-compose -f ${COMPOSE} logs -f

down: ## Stop Compose stack
docker-compose -f ${COMPOSE} down

##
35 changes: 24 additions & 11 deletions api/tzprofiles/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,35 +6,48 @@ DipDup indexer for Tezos Profiles

This project is based on [DipDup](https://dipdup.io), a framework for building featureful dapps.

You need a Linux/macOS system with Python 3.11 installed. Use our installer for easy setup:
You need a Linux/macOS system with Python 3.11 installed. To install DipDup with pipx for the current user:

```bash
```shell
curl -Lsf https://dipdup.io/install.py | python3
```

See the [Installation](https://dipdup.io/docs/installation) page for all options.

## Usage

Run the indexer in-memory:
Run the indexer in memory:

```bash
```shell
dipdup run
```

Store data in SQLite database:

```bash
dipdup -c . -c configs/dipdup.sqlite.yml run
```shell
dipdup -c . -c configs/dipdup.sqlite.yaml run
```

Or spawn a docker-compose stack:
Or spawn a Compose stack with PostgreSQL and Hasura:

```bash
cp deploy/.env.default deploy/.env
# Edit .env before running
docker-compose -f deploy/compose.yaml up
```shell
cd deploy
cp .env.default .env
# Edit .env file before running
docker-compose up
```

## Development setup

To set up the development environment:

```shell
pdm install
$(pdm venv activate)
```

Run `make all` to run full CI check or `make help` to see other available commands.

## Usage

For now, only a GraphQL API is available at `/v1/graphql` (to be used with
Expand Down
4 changes: 1 addition & 3 deletions api/tzprofiles/configs/dipdup.compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,4 @@ sentry:
environment: ${SENTRY_ENVIRONMENT:-""}

prometheus:
host: 0.0.0.0

logging: ${LOGLEVEL:-INFO}
host: 0.0.0.0
4 changes: 1 addition & 3 deletions api/tzprofiles/configs/dipdup.sqlite.yaml
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
database:
kind: sqlite
path: ${SQLITE_PATH:-/tmp/tzprofiles.sqlite}

logging: ${LOGLEVEL:-INFO}
path: ${SQLITE_PATH:-/tmp/tzprofiles.sqlite}
4 changes: 1 addition & 3 deletions api/tzprofiles/configs/dipdup.swarm.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,4 @@ sentry:
environment: ${SENTRY_ENVIRONMENT:-""}

prometheus:
host: 0.0.0.0

logging: ${LOGLEVEL:-INFO}
host: 0.0.0.0
5 changes: 4 additions & 1 deletion api/tzprofiles/configs/replay.yaml
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
# Run `dipdup new --replay configs/replay.yaml` to generate new project from this replay
# To refresh existing project run `dipdup init --base --force` after modifying this file.
# To generate a new project from this replay run `dipdup new --replay <path_to_file>`.
#
spec_version: 2.0
replay:
dipdup_version: 7
Expand All @@ -13,3 +15,4 @@ replay:
postgres_data_path: /var/lib/postgresql/data
hasura_image: hasura/graphql-engine:latest
line_length: 120
package_manager: pdm
1 change: 0 additions & 1 deletion api/tzprofiles/deploy/.env.default
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
#
HASURA_HOST=hasura
HASURA_SECRET=
LOGLEVEL=INFO
POSTGRES_DB=dipdup
POSTGRES_HOST=db
POSTGRES_PASSWORD=
Expand Down
1 change: 0 additions & 1 deletion api/tzprofiles/deploy/sqlite.env.default
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
# This env file was generated automatically by DipDup. Do not edit it!
# Create a copy with .env extension, fill it with your values and run DipDup with `--env-file` option.
#
LOGLEVEL=INFO
SQLITE_PATH=/tmp/tzprofiles.sqlite
1 change: 0 additions & 1 deletion api/tzprofiles/deploy/swarm.env.default
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
#
HASURA_HOST=tzprofiles_hasura
HASURA_SECRET=
LOGLEVEL=INFO
POSTGRES_DB=dipdup
POSTGRES_HOST=tzprofiles_db
POSTGRES_PASSWORD=
Expand Down
113 changes: 56 additions & 57 deletions api/tzprofiles/handlers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
ClaimList = list[storage.Claim] | list[old_storage.Claim]


KEPLER_ENDPOINT = os.getenv("KEPLER_ENDPOINT", "https://kepler.tzprofiles.com/")
KEPLER_ENDPOINT = os.getenv('KEPLER_ENDPOINT', 'https://kepler.tzprofiles.com/')

LOGGER = None

Expand All @@ -33,17 +33,17 @@ def set_logger(logger):


class Credential(Enum):
BASIC_PROFILE = "BasicProfile"
TWITTER = "TwitterVerification"
DNS = "DnsVerification"
DISCORD = "DiscordVerification"
GITHUB = "GitHubVerification"
ETHEREUM = "EthereumAddressControl"
ETHEREUM_OLD = "EthereumControl"
BASIC_PROFILE = 'BasicProfile'
TWITTER = 'TwitterVerification'
DNS = 'DnsVerification'
DISCORD = 'DiscordVerification'
GITHUB = 'GitHubVerification'
ETHEREUM = 'EthereumAddressControl'
ETHEREUM_OLD = 'EthereumControl'


class FailedChecksum(ValueError):
def __init__(self, vc, expected_checksum, resulted_checksum, message="Failed checksum"):
def __init__(self, vc, expected_checksum, resulted_checksum, message='Failed checksum'):
self.vc = vc
self.expected_checksum = expected_checksum
self.resulted_checksum = resulted_checksum
Expand All @@ -66,7 +66,7 @@ def __init__(self, vc, message):


class UnknownCredential(ValueError):
def __init__(self, vc, message="Unknown credential type"):
def __init__(self, vc, message='Unknown credential type'):
self.vc = vc
self.message = message
super().__init__(self.message, vc)
Expand All @@ -85,15 +85,15 @@ def __init__(self, message="Credential doesn't exist in Kepler anymore"):
after=after_log(LOGGER, logging.WARNING), # type: ignore[arg-type]
)
async def retrieve_claim(kepler_link: str) -> str:
orbit_id, file_hash = tuple(kepler_link.replace("kepler://", "").replace('v0:', '').split("/"))
url = urljoin(KEPLER_ENDPOINT, orbit_id + "/" + file_hash)
orbit_id, file_hash = tuple(kepler_link.replace('kepler://', '').replace('v0:', '').split('/'))
url = urljoin(KEPLER_ENDPOINT, orbit_id + '/' + file_hash)
async with aiohttp.ClientSession() as session:
async with session.get(url) as response:
if response.status not in range(200, 430):
response.raise_for_status()
elif response.status == 404:
raise DeletedCredential()
return (await response.text(encoding="utf-8")).strip()
return (await response.text(encoding='utf-8')).strip()


@retry(
Expand All @@ -111,64 +111,63 @@ async def resolve_claim(kepler_link: str, checksum: str) -> dict[str, Any]:
if hash != checksum:
raise FailedChecksum(claim, checksum, hash)

verify_result = await verify_credential(claim, "{}")
errors = json.loads(verify_result)["errors"]
verify_result = await verify_credential(claim, '{}')
errors = json.loads(verify_result)['errors']
if len(errors) > 0:
raise FailedVerification(claim, str(errors))

claim_json = json.loads(claim)
return claim_json
return json.loads(claim)


def validate_vc(vc: dict[str, Any], address: str) -> None:
if Credential.BASIC_PROFILE.value in vc["type"]:
if vc["credentialSubject"]["id"] != "did:pkh:tz:" + address:
if Credential.BASIC_PROFILE.value in vc['type']:
if vc['credentialSubject']['id'] != 'did:pkh:tz:' + address:
raise Spoof(vc, "Credential subject is not the profile's owner")
if vc["issuer"] != "did:pkh:tz:" + address:
raise Spoof(vc, "Issuer should be profile's owner: " + vc["issuer"])
if vc['issuer'] != 'did:pkh:tz:' + address:
raise Spoof(vc, "Issuer should be profile's owner: " + vc['issuer'])
elif (
Credential.TWITTER.value in vc["type"]
or Credential.DNS.value in vc["type"]
or Credential.DISCORD.value in vc["type"]
or Credential.GITHUB.value in vc["type"]
Credential.TWITTER.value in vc['type']
or Credential.DNS.value in vc['type']
or Credential.DISCORD.value in vc['type']
or Credential.GITHUB.value in vc['type']
):
if vc["credentialSubject"]["id"] != "did:pkh:tz:" + address:
if vc['credentialSubject']['id'] != 'did:pkh:tz:' + address:
raise Spoof(vc, "Credential subject is not the profile's owner")
if vc["issuer"] != "did:web:tzprofiles.com":
raise Spoof(vc, "Untrusted issuer: " + vc["issuer"])
elif Credential.ETHEREUM.value in vc["type"]:
if vc["credentialSubject"]["sameAs"] != address:
if vc['issuer'] != 'did:web:tzprofiles.com':
raise Spoof(vc, 'Untrusted issuer: ' + vc['issuer'])
elif Credential.ETHEREUM.value in vc['type']:
if vc['credentialSubject']['sameAs'] != address:
raise Spoof(vc, "Credential subject sameAs is not the profile's owner")
if vc["issuer"] != "did:pkh:eth:" + vc["credentialSubject"]["address"]:
raise Spoof(vc, "Issuer should be the credential subject")
elif Credential.ETHEREUM_OLD.value in vc["type"]:
if vc["credentialSubject"]["sameAs"] != address:
if vc['issuer'] != 'did:pkh:eth:' + vc['credentialSubject']['address']:
raise Spoof(vc, 'Issuer should be the credential subject')
elif Credential.ETHEREUM_OLD.value in vc['type']:
if vc['credentialSubject']['sameAs'] != address:
raise Spoof(vc, "Credential subject sameAs is not the profile's owner")
if vc["issuer"] != "did:pkh:eth:" + vc["credentialSubject"]["wallet"]:
raise Spoof(vc, "Issuer should be the credential subject")
if vc['issuer'] != 'did:pkh:eth:' + vc['credentialSubject']['wallet']:
raise Spoof(vc, 'Issuer should be the credential subject')
else:
raise UnknownCredential(vc)


def parse_claim(vc: dict[str, Any], profile: TZProfile) -> None:
try:
if Credential.BASIC_PROFILE.value in vc["type"]:
profile.alias = vc["credentialSubject"].get("alias", None)
profile.description = vc["credentialSubject"].get("description", None)
profile.logo = vc["credentialSubject"].get("logo", None)
profile.website = vc["credentialSubject"].get("website", None)
elif Credential.TWITTER.value in vc["type"]:
profile.twitter = vc["evidence"]["handle"]
elif Credential.DNS.value in vc["type"]:
profile.domain_name = vc["credentialSubject"]["sameAs"].replace("dns:", "")
elif Credential.DISCORD.value in vc["type"]:
profile.discord = vc["evidence"]["handle"]
elif Credential.GITHUB.value in vc["type"]:
profile.github = vc["evidence"]["handle"]
elif Credential.ETHEREUM.value in vc["type"]:
profile.ethereum = vc["credentialSubject"]["address"]
elif Credential.ETHEREUM_OLD.value in vc["type"]:
profile.ethereum = vc["credentialSubject"]["wallet"]
if Credential.BASIC_PROFILE.value in vc['type']:
profile.alias = vc['credentialSubject'].get('alias', None)
profile.description = vc['credentialSubject'].get('description', None)
profile.logo = vc['credentialSubject'].get('logo', None)
profile.website = vc['credentialSubject'].get('website', None)
elif Credential.TWITTER.value in vc['type']:
profile.twitter = vc['evidence']['handle']
elif Credential.DNS.value in vc['type']:
profile.domain_name = vc['credentialSubject']['sameAs'].replace('dns:', '')
elif Credential.DISCORD.value in vc['type']:
profile.discord = vc['evidence']['handle']
elif Credential.GITHUB.value in vc['type']:
profile.github = vc['evidence']['handle']
elif Credential.ETHEREUM.value in vc['type']:
profile.ethereum = vc['credentialSubject']['address']
elif Credential.ETHEREUM_OLD.value in vc['type']:
profile.ethereum = vc['credentialSubject']['wallet']
except Exception as e:
logging.exception(e)

Expand All @@ -183,22 +182,22 @@ async def save_claims(profile: TZProfile, claims: ClaimList) -> None:
async def resolve_profile(profile: TZProfile) -> None:
for claim in profile.unprocessed_claims:
string_0, string_1, bytes_ = claim
if string_1 != "VerifiableCredential":
if string_1 != 'VerifiableCredential':
continue
try:
vc = await resolve_claim(string_0, bytes_)
assert profile.account is not None
validate_vc(vc, profile.account)
profile.valid_claims += [(string_0, json.dumps(vc), "VerifiableCredential")]
profile.valid_claims += [(string_0, json.dumps(vc), 'VerifiableCredential')]
parse_claim(vc, profile)
except DeletedCredential:
pass
except (FailedChecksum, FailedVerification, Spoof, UnknownCredential) as e:
logging.exception(e)
if isinstance(e.vc, str):
profile.invalid_claims += [(string_0, e.vc, "VerifiableCredential")]
profile.invalid_claims += [(string_0, e.vc, 'VerifiableCredential')]
else:
profile.invalid_claims += [(string_0, json.dumps(e.vc), "VerifiableCredential")]
profile.invalid_claims += [(string_0, json.dumps(e.vc), 'VerifiableCredential')]
except Exception as e:
logging.exception(e)
profile.failed = True # type: ignore
Expand Down
2 changes: 1 addition & 1 deletion api/tzprofiles/handlers/on_origination.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,6 @@ async def on_origination(

profile, _ = await models.TZProfile.get_or_create(
account=tzprofile_origination.storage.owner,
defaults={"contract": contract},
defaults={'contract': contract},
)
await save_claims(profile, tzprofile_origination.storage.claims)
Loading

0 comments on commit 5ad675e

Please sign in to comment.