Skip to content

Commit

Permalink
fix: formatting
Browse files Browse the repository at this point in the history
  • Loading branch information
IsabellaSmallcombe committed Aug 23, 2023
1 parent 8dfd7d2 commit af3480a
Show file tree
Hide file tree
Showing 3 changed files with 50 additions and 18 deletions.
2 changes: 1 addition & 1 deletion offchain/metadata/adapters/arweave.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ def __init__(
self.secret = secret
self.timeout = timeout
super().__init__(*args, **kwargs)

def parse_ar_url(self, url: str) -> str:
parsed = parse_url(url)
if parsed.scheme == "ar":
Expand Down
63 changes: 46 additions & 17 deletions offchain/metadata/pipelines/metadata_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,9 @@
]

DEFAULT_PARSERS = (
ParserRegistry.get_all_collection_parsers() + ParserRegistry.get_all_schema_parsers() + [DefaultCatchallParser]
ParserRegistry.get_all_collection_parsers()
+ ParserRegistry.get_all_schema_parsers()
+ [DefaultCatchallParser]
)


Expand Down Expand Up @@ -78,12 +80,15 @@ def __init__(
adapter_configs = DEFAULT_ADAPTER_CONFIGS
for adapter_config in adapter_configs:
self.mount_adapter(
adapter=adapter_config.adapter_cls(host_prefixes=adapter_config.host_prefixes, **adapter_config.kwargs),
adapter=adapter_config.adapter_cls(
host_prefixes=adapter_config.host_prefixes, **adapter_config.kwargs
),
url_prefixes=adapter_config.mount_prefixes,
)
if parsers is None:
parsers = [
parser_cls(fetcher=self.fetcher, contract_caller=self.contract_caller) for parser_cls in DEFAULT_PARSERS
parser_cls(fetcher=self.fetcher, contract_caller=self.contract_caller)
for parser_cls in DEFAULT_PARSERS
]
self.parsers = parsers

Expand All @@ -103,7 +108,9 @@ def mount_adapter(
for prefix in url_prefixes:
self.fetcher.register_adapter(adapter, prefix)

def fetch_token_uri(self, token: Token, function_signature: str = "tokenURI(uint256)") -> Optional[str]:
def fetch_token_uri(
self, token: Token, function_signature: str = "tokenURI(uint256)"
) -> Optional[str]:
"""Given a token, fetch the token uri from the contract using a specified function signature.
Args:
Expand Down Expand Up @@ -149,7 +156,9 @@ def fetch_token_metadata(
error_message = f"({token.chain_identifier}-{token.collection_address}-{token.token_id}) Failed to fetch token uri. {str(e)}" # noqa: E501
logger.error(error_message)
possible_metadatas_or_errors.append(
MetadataProcessingError.from_token_and_error(token=token, e=Exception(error_message))
MetadataProcessingError.from_token_and_error(
token=token, e=Exception(error_message)
)
)

raw_data = None
Expand All @@ -162,20 +171,26 @@ def fetch_token_metadata(
error_message = f"({token.chain_identifier}-{token.collection_address}-{token.token_id}) Failed to parse token uri: {token.uri}. {str(e)}" # noqa: E501
logger.error(error_message)
possible_metadatas_or_errors.append(
MetadataProcessingError.from_token_and_error(token=token, e=Exception(error_message))
MetadataProcessingError.from_token_and_error(
token=token, e=Exception(error_message)
)
)

for parser in self.parsers:
if not parser.should_parse_token(token=token, raw_data=raw_data):
continue
try:
metadata_or_error = parser.parse_metadata(token=token, raw_data=raw_data)
metadata_or_error = parser.parse_metadata(
token=token, raw_data=raw_data
)
if isinstance(metadata_or_error, Metadata):
metadata_or_error.standard = parser._METADATA_STANDARD
if metadata_selector_fn is None:
return metadata_or_error
except Exception as e:
metadata_or_error = MetadataProcessingError.from_token_and_error(token=token, e=e)
metadata_or_error = MetadataProcessingError.from_token_and_error(
token=token, e=e
)
possible_metadatas_or_errors.append(metadata_or_error)
if len(possible_metadatas_or_errors) == 0:
possible_metadatas_or_errors.append(
Expand Down Expand Up @@ -218,7 +233,9 @@ async def async_fetch_token_metadata(
error_message = f"({token.chain_identifier}-{token.collection_address}-{token.token_id}) Failed to fetch token uri. {str(e)}" # noqa: E501
logger.error(error_message)
possible_metadatas_or_errors.append(
MetadataProcessingError.from_token_and_error(token=token, e=Exception(error_message))
MetadataProcessingError.from_token_and_error(
token=token, e=Exception(error_message)
)
)

raw_data = None
Expand All @@ -231,20 +248,26 @@ async def async_fetch_token_metadata(
error_message = f"({token.chain_identifier}-{token.collection_address}-{token.token_id}) Failed to parse token uri: {token.uri}. {str(e)}" # noqa: E501
logger.error(error_message)
possible_metadatas_or_errors.append(
MetadataProcessingError.from_token_and_error(token=token, e=Exception(error_message))
MetadataProcessingError.from_token_and_error(
token=token, e=Exception(error_message)
)
)

for parser in self.parsers:
if not parser.should_parse_token(token=token, raw_data=raw_data):
continue
try:
metadata_or_error = parser.parse_metadata(token=token, raw_data=raw_data)
metadata_or_error = parser.parse_metadata(
token=token, raw_data=raw_data
)
if isinstance(metadata_or_error, Metadata):
metadata_or_error.standard = parser._METADATA_STANDARD
if metadata_selector_fn is None:
return metadata_or_error
except Exception as e:
metadata_or_error = MetadataProcessingError.from_token_and_error(token=token, e=e)
metadata_or_error = MetadataProcessingError.from_token_and_error(
token=token, e=e
)
possible_metadatas_or_errors.append(metadata_or_error)
if len(possible_metadatas_or_errors) == 0:
possible_metadatas_or_errors.append(
Expand All @@ -260,7 +283,6 @@ async def async_fetch_token_metadata(
return metadata_selector_fn(possible_metadatas_or_errors)
return possible_metadatas_or_errors[0]


def run(
self,
tokens: list[Token],
Expand All @@ -286,12 +308,16 @@ def run(
return []

if parallelize:
metadatas_or_errors = batched_parmap(lambda t: self.fetch_token_metadata(t, select_metadata_fn), tokens, 15)
metadatas_or_errors = batched_parmap(
lambda t: self.fetch_token_metadata(t, select_metadata_fn), tokens, 15
)
else:
metadatas_or_errors = list(map(lambda t: self.fetch_token_metadata(t, select_metadata_fn), tokens))
metadatas_or_errors = list(
map(lambda t: self.fetch_token_metadata(t, select_metadata_fn), tokens)
)

return metadatas_or_errors

async def async_run(
self,
tokens: list[Token],
Expand All @@ -312,7 +338,10 @@ async def async_run(
"""
if len(tokens) == 0:
return []
tasks = [self.async_fetch_token_metadata(token, select_metadata_fn) for token in tokens]
tasks = [
self.async_fetch_token_metadata(token, select_metadata_fn)
for token in tokens
]

metadatas_or_errors = await asyncio.gather(*tasks)
return metadatas_or_errors
3 changes: 3 additions & 0 deletions tests/metadata/fetchers/test_metadata_fetcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ async def test_gen_fetch_data_adapter():
)
assert content is not None


@pytest.mark.asyncio
async def test_gen_fetch_ipfs_adapter():
fetcher = MetadataFetcher()
Expand All @@ -28,6 +29,7 @@ async def test_gen_fetch_ipfs_adapter():
)
assert content is not None


@pytest.mark.asyncio
async def test_gen_fetch_arweave_adapter():
fetcher = MetadataFetcher()
Expand All @@ -36,6 +38,7 @@ async def test_gen_fetch_arweave_adapter():
)
assert content is not None


@pytest.mark.asyncio
async def test_gen_fetch_base_adapter():
fetcher = MetadataFetcher()
Expand Down

0 comments on commit af3480a

Please sign in to comment.