Skip to content

Commit

Permalink
Merge pull request #1283 from skellet0r/fix/verification
Browse files Browse the repository at this point in the history
fix: etherscan verification w/ new flattener
  • Loading branch information
skellet0r authored Oct 12, 2021
2 parents cabaa2a + dcd10f4 commit 9143180
Show file tree
Hide file tree
Showing 8 changed files with 311 additions and 481 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [Unreleased](https://github.com/eth-brownie/brownie)
### Added
- Arguments from the command line can now be passed to brownie scripts. ([#398](https://github.com/eth-brownie/brownie/issues/398))
- Fix etherscan verification w/ new solidity flattener ([#1283](https://github.com/eth-brownie/brownie/pull/1283))

## [1.16.4](https://github.com/eth-brownie/brownie/tree/v1.16.4) - 2021-09-21
### Added
Expand Down
142 changes: 31 additions & 111 deletions brownie/network/contract.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
#!/usr/bin/python3

import io
import json
import os
import re
import time
import warnings
from collections import defaultdict
from pathlib import Path
from textwrap import TextWrapper
from threading import get_ident # noqa
Expand All @@ -14,7 +14,6 @@

import eth_abi
import requests
import solcast
import solcx
from eth_utils import remove_0x_prefix
from hexbytes import HexBytes
Expand All @@ -40,9 +39,9 @@
)
from brownie.project import compiler, ethpm
from brownie.project.compiler.solidity import SOLIDITY_ERROR_CODES
from brownie.project.flattener import Flattener
from brownie.typing import AccountsType, TransactionReceiptType
from brownie.utils import color
from brownie.utils.toposort import toposort_flatten

from . import accounts, chain
from .event import _add_deployment_topics, _get_topics
Expand Down Expand Up @@ -154,6 +153,10 @@ def __init__(self, project: Any, build: Dict) -> None:
self.deploy = ContractConstructor(self, self._name)
_revert_register(self)

# messes with tests if it is created on init
# instead we create when it's requested, but still define it here
self._flattener: Flattener = None # type: ignore

def __iter__(self) -> Iterator:
return iter(self._contracts)

Expand Down Expand Up @@ -258,118 +261,35 @@ def get_verification_info(self) -> Dict:
"for vyper contracts. You need to verify the source manually"
)
elif language == "Solidity":
# Scan the AST tree for needed information
nodes_source = [
{"node": solcast.from_ast(self._build["ast"]), "src": self._build["source"]}
]
for name in self._build["dependencies"]:
build_json = self._project._build.get(name)
if "ast" in build_json:
nodes_source.append(
{"node": solcast.from_ast(build_json["ast"]), "src": build_json["source"]}
if self._flattener is None:
source_fp = (
Path(self._project._path)
.joinpath(self._build["sourcePath"])
.resolve()
.as_posix()
)
config = self._project._compiler_config
remaps = dict(
map(
lambda s: s.split("=", 1),
compiler._get_solc_remappings(config["solc"]["remappings"]),
)
)
compiler_settings = {
"evmVersion": self._build["compiler"]["evm_version"],
"optimizer": config["solc"]["optimizer"],
}
self._flattener = Flattener(source_fp, self._name, remaps, compiler_settings)

pragma_statements = set()
global_structs = set()
global_enums = set()
import_aliases: Dict = defaultdict(list)
for n, src in [ns.values() for ns in nodes_source]:
for pragma in n.children(filters={"nodeType": "PragmaDirective"}):
pragma_statements.add(src[slice(*pragma.offset)])

for enum in n.children(filters={"nodeType": "EnumDefinition"}):
if enum.parent() == n:
# parent == source node -> global enum
global_enums.add(src[slice(*enum.offset)])

for struct in n.children(filters={"nodeType": "StructDefinition"}):
if struct.parent() == n:
# parent == source node -> global struct
global_structs.add(src[(slice(*struct.offset))])

for imp in n.children(filters={"nodeType": "ImportDirective"}):
if isinstance(imp.get("symbolAliases"), list):
for symbol_alias in imp.get("symbolAliases"):
if symbol_alias["local"] is not None:
import_aliases[imp.get("absolutePath")].append(
symbol_alias["local"],
)

abiencoder_str = ""
for pragma in ("pragma experimental ABIEncoderV2;", "pragma abicoder v2;"):
if pragma in pragma_statements:
abiencoder_str = f"{abiencoder_str}\n{pragma}"

# build dependency tree
dependency_tree: Dict = defaultdict(set)
dependency_tree["__root_node__"] = set(self._build["dependencies"])
for name in self._build["dependencies"]:
build_json = self._project._build.get(name)
if "dependencies" in build_json:
dependency_tree[name].update(build_json["dependencies"])

# sort dependencies, process them and insert them into the flattened file
flattened_source = ""
for name in toposort_flatten(dependency_tree):
if name == "__root_node__":
continue
build_json = self._project._build.get(name)
offset = build_json["offset"]
contract_name = build_json["contractName"]
source = self._slice_source(build_json["source"], offset)
# Check for import aliases and duplicate the contract with different name
if "sourcePath" in build_json:
for alias in import_aliases[build_json["sourcePath"]]:
# slice to contract definition and replace contract name
a_source = build_json["source"][offset[0] :]
a_source = re.sub(
rf"^(abstract)?(\s*)({build_json['type']})(\s+)({contract_name})",
rf"\1\2\3\4{alias}",
a_source,
)
# restore source, adjust offsets and slice source
a_source = f"{build_json['source'][:offset[0]]}{a_source}"
a_offset = [offset[0], offset[1] + (len(alias) - len(contract_name))]
a_source = self._slice_source(a_source, a_offset)
# add alias source to flattened file
a_name = f"{name} (Alias import as {alias})"
flattened_source = f"{flattened_source}\n\n// Part: {a_name}\n\n{a_source}"

flattened_source = f"{flattened_source}\n\n// Part: {name}\n\n{source}"

# Top level contract, defines compiler and license
build_json = self._build
version = build_json["compiler"]["version"]
version_short = re.findall(r"^[^+]+", version)[0]
offset = build_json["offset"]
source = self._slice_source(build_json["source"], offset)
file_name = Path(build_json["sourcePath"]).parts[-1]
licenses = re.findall(
r"SPDX-License-Identifier:(.*)\n", build_json["source"][: offset[0]]
)
license_identifier = licenses[0].strip() if len(licenses) >= 1 else "NONE"

# combine to final flattened source
lb = "\n"
is_global = len(global_enums) + len(global_structs) > 0
global_str = "// Global Enums and Structs\n\n" if is_global else ""
enum_structs = f"{lb.join(global_enums)}\n\n{lb.join(global_structs)}"
flattened_source = (
f"// SPDX-License-Identifier: {license_identifier}\n\n"
f"pragma solidity {version_short};"
f"{abiencoder_str}\n\n{global_str}"
f"{enum_structs if is_global else ''}"
f"{flattened_source}\n\n"
f"// File: {file_name}\n\n{source}\n"
)

return {
"flattened_source": flattened_source,
"standard_json_input": self._flattener.standard_input_json,
"contract_name": build_json["contractName"],
"compiler_version": version,
"compiler_version": build_json["compiler"]["version"],
"optimizer_enabled": build_json["compiler"]["optimizer"]["enabled"],
"optimizer_runs": build_json["compiler"]["optimizer"]["runs"],
"license_identifier": license_identifier,
"license_identifier": self._flattener.license,
"bytecode_len": len(build_json["bytecode"]),
}
else:
Expand Down Expand Up @@ -407,7 +327,7 @@ def publish_source(self, contract: Any, silent: bool = False) -> bool:

address = _resolve_address(contract.address)

# Get flattened source code and contract/compiler information
# Get source code and contract/compiler information
contract_info = self.get_verification_info()

# Select matching license code (https://etherscan.io/contract-license-types)
Expand Down Expand Up @@ -483,9 +403,9 @@ def publish_source(self, contract: Any, silent: bool = False) -> bool:
"module": "contract",
"action": "verifysourcecode",
"contractaddress": address,
"sourceCode": contract_info["flattened_source"],
"codeformat": "solidity-single-file",
"contractname": contract_info["contract_name"],
"sourceCode": io.StringIO(json.dumps(self._flattener.standard_input_json)),
"codeformat": "solidity-standard-json-input",
"contractname": f"{self._flattener.contract_file}:{self._flattener.contract_name}",
"compilerversion": f"v{contract_info['compiler_version']}",
"optimizationUsed": 1 if contract_info["optimizer_enabled"] else 0,
"runs": contract_info["optimizer_runs"],
Expand Down
139 changes: 139 additions & 0 deletions brownie/project/flattener.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,139 @@
import re
from collections import defaultdict
from pathlib import Path
from typing import DefaultDict, Dict, Set

from brownie.utils.toposort import toposort_flatten

# Patten matching Solidity `import-directive`, capturing path component
# https://docs.soliditylang.org/en/latest/grammar.html#a4.SolidityParser.importDirective
IMPORT_PATTERN = re.compile(r"(?<=\n)?import(?P<prefix>.*)\"(?P<path>.*)\"(?P<suffix>.*)(?=\n)")
PRAGMA_PATTERN = re.compile(r"^pragma.*;$", re.MULTILINE)
LICENSE_PATTERN = re.compile(r"^// SPDX-License-Identifier: (.*)$", re.MULTILINE)


class Flattener:
"""Brownie's Robust Solidity Flattener."""

def __init__(
self, primary_source_fp: str, contract_name: str, remappings: dict, compiler_settings: dict
) -> None:
self.sources: Dict[str, str] = {}
self.dependencies: DefaultDict[str, Set[str]] = defaultdict(set)
self.compiler_settings = compiler_settings
self.contract_name = contract_name
self.contract_file = Path(primary_source_fp).name
self.remappings = remappings

self.traverse(primary_source_fp)

license_search = LICENSE_PATTERN.search(self.sources[Path(primary_source_fp).name])
self.license = license_search.group(1) if license_search else "NONE"

def traverse(self, fp: str) -> None:
"""Traverse a contract source files dependencies.
Files are read in, import statement path components are substituted for their absolute
path, and the modified source is saved along with it's dependencies.
Args:
fp: The contract source file to traverse, if it's already been traversed, return early.
"""
# if already traversed file, return early
fp_obj = Path(fp)
if fp_obj.name in self.sources:
return

# read in the source file
source = fp_obj.read_text()

# path sanitization lambda fn
sanitize = lambda path: self.make_import_absolute( # noqa: E731
self.remap_import(path), fp_obj.parent.as_posix()
)
# replacement function for re.sub, we just sanitize the path
repl = ( # noqa: E731
lambda m: f'import{m.group("prefix")}'
+ f'"{Path(sanitize(m.group("path"))).name}"'
+ f'{m.group("suffix")}'
)

self.sources[fp_obj.name] = IMPORT_PATTERN.sub(repl, source)
if fp_obj.name not in self.dependencies:
self.dependencies[fp_obj.name] = set()

# traverse dependency files - can circular imports happen?
for m in IMPORT_PATTERN.finditer(source):
import_path = sanitize(m.group("path"))
self.dependencies[fp_obj.name].add(Path(import_path).name)
self.traverse(import_path)

@property
def flattened_source(self) -> str:
"""The flattened source code for use verifying."""
# all source files in the correct order for concatenation
sources = [self.sources[k] for k in toposort_flatten(self.dependencies)]
# all pragma statements, we already have the license used + know which compiler
# version is used via the build info
pragmas = set((match.strip() for src in sources for match in PRAGMA_PATTERN.findall(src)))
# now we go thorugh and remove all imports/pragmas/license stuff
wipe = lambda src: PRAGMA_PATTERN.sub( # noqa: E731
"", LICENSE_PATTERN.sub("", IMPORT_PATTERN.sub("", src))
)

sources = [
f"// File: {file}\n\n{wipe(src)}"
for src, file in zip(sources, toposort_flatten(self.dependencies))
]

flat = (
"\n".join([pragma for pragma in pragmas if "pragma solidity" not in pragma])
+ "\n\n"
+ "\n".join(sources)
)
# hopefully this doesn't mess up anything pretty, but just gotta remove all
# that extraneous whitespace
return re.sub(r"\n{3,}", "\n\n", flat)

@property
def standard_input_json(self) -> Dict:
"""Useful for etherscan verification via solidity-standard-json-input mode.
Sadly programmatic upload of this isn't available at the moment (2021-10-11)
"""
return {
"language": "Solidity",
"sources": {k: {"content": v} for k, v in self.sources.items()},
"settings": self.compiler_settings,
}

def remap_import(self, import_path: str) -> str:
"""Remap imports in a solidity source file.
Args:
import_path: The path component of an import directive from a solidity source file.
Returns:
str: The import path string correctly remapped.
"""
for k, v in self.remappings.items():
if import_path.startswith(k):
return import_path.replace(k, v, 1)
return import_path

@staticmethod
def make_import_absolute(import_path: str, source_file_dir: str) -> str:
"""Make an import path absolute, if it is not already.
Args:
source_file_dir: The parent directory of the source file where the import path appears.
import_path: The path component of an import directive (should already remapped).
Returns:
str: The import path string in absolute form.
"""
path: Path = Path(import_path)
if path.is_absolute():
return path.as_posix()

return (Path(source_file_dir) / path).resolve().as_posix()
Loading

0 comments on commit 9143180

Please sign in to comment.