-
Notifications
You must be signed in to change notification settings - Fork 1.8k
/
compile.rs
641 lines (556 loc) · 20.8 KB
/
compile.rs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
//! Support for compiling [foundry_compilers::Project]
use crate::{compact_to_contract, glob::GlobMatcher, term::SpinnerReporter, TestFunctionExt};
use comfy_table::{presets::ASCII_MARKDOWN, Attribute, Cell, Color, Table};
use eyre::{Context, Result};
use foundry_block_explorers::contract::Metadata;
use foundry_compilers::{
artifacts::{BytecodeObject, CompactContractBytecode, ContractBytecodeSome},
remappings::Remapping,
report::{BasicStdoutReporter, NoReporter, Report},
Artifact, ArtifactId, FileFilter, Graph, Project, ProjectCompileOutput, ProjectPathsConfig,
Solc, SolcConfig,
};
use rustc_hash::FxHashMap;
use std::{
collections::{BTreeMap, HashMap},
convert::Infallible,
fmt::Display,
io::IsTerminal,
path::{Path, PathBuf},
result,
str::FromStr,
time::Instant,
};
/// Builder type to configure how to compile a project.
///
/// This is merely a wrapper for [`Project::compile()`] which also prints to stdout depending on its
/// settings.
#[must_use = "ProjectCompiler does nothing unless you call a `compile*` method"]
pub struct ProjectCompiler {
/// Whether we are going to verify the contracts after compilation.
verify: Option<bool>,
/// Whether to also print contract names.
print_names: Option<bool>,
/// Whether to also print contract sizes.
print_sizes: Option<bool>,
/// Whether to print anything at all. Overrides other `print` options.
quiet: Option<bool>,
/// Whether to bail on compiler errors.
bail: Option<bool>,
/// Files to exclude.
filter: Option<Box<dyn FileFilter>>,
/// Extra files to include, that are not necessarily in the project's source dir.
files: Vec<PathBuf>,
}
impl Default for ProjectCompiler {
#[inline]
fn default() -> Self {
Self::new()
}
}
impl ProjectCompiler {
/// Create a new builder with the default settings.
#[inline]
pub fn new() -> Self {
Self {
verify: None,
print_names: None,
print_sizes: None,
quiet: Some(crate::shell::verbosity().is_silent()),
bail: None,
filter: None,
files: Vec::new(),
}
}
/// Sets whether we are going to verify the contracts after compilation.
#[inline]
pub fn verify(mut self, yes: bool) -> Self {
self.verify = Some(yes);
self
}
/// Sets whether to print contract names.
#[inline]
pub fn print_names(mut self, yes: bool) -> Self {
self.print_names = Some(yes);
self
}
/// Sets whether to print contract sizes.
#[inline]
pub fn print_sizes(mut self, yes: bool) -> Self {
self.print_sizes = Some(yes);
self
}
/// Sets whether to print anything at all. Overrides other `print` options.
#[inline]
#[doc(alias = "silent")]
pub fn quiet(mut self, yes: bool) -> Self {
self.quiet = Some(yes);
self
}
/// Do not print anything at all if true. Overrides other `print` options.
#[inline]
pub fn quiet_if(mut self, maybe: bool) -> Self {
if maybe {
self.quiet = Some(true);
}
self
}
/// Sets whether to bail on compiler errors.
#[inline]
pub fn bail(mut self, yes: bool) -> Self {
self.bail = Some(yes);
self
}
/// Sets the filter to use.
#[inline]
pub fn filter(mut self, filter: Box<dyn FileFilter>) -> Self {
self.filter = Some(filter);
self
}
/// Sets extra files to include, that are not necessarily in the project's source dir.
#[inline]
pub fn files(mut self, files: impl IntoIterator<Item = PathBuf>) -> Self {
self.files.extend(files);
self
}
/// Compiles the project.
pub fn compile(mut self, project: &Project) -> Result<ProjectCompileOutput> {
// TODO: Avoid process::exit
if !project.paths.has_input_files() && self.files.is_empty() {
println!("Nothing to compile");
// nothing to do here
std::process::exit(0);
}
// Taking is fine since we don't need these in `compile_with`.
let filter = std::mem::take(&mut self.filter);
let files = std::mem::take(&mut self.files);
self.compile_with(|| {
if !files.is_empty() {
project.compile_files(files)
} else if let Some(filter) = filter {
project.compile_sparse(filter)
} else {
project.compile()
}
.map_err(Into::into)
})
}
/// Compiles the project with the given closure
///
/// # Example
///
/// ```ignore
/// use foundry_common::compile::ProjectCompiler;
/// let config = foundry_config::Config::load();
/// let prj = config.project().unwrap();
/// ProjectCompiler::new().compile_with(|| Ok(prj.compile()?)).unwrap();
/// ```
#[instrument(target = "forge::compile", skip_all)]
fn compile_with<F>(self, f: F) -> Result<ProjectCompileOutput>
where
F: FnOnce() -> Result<ProjectCompileOutput>,
{
let quiet = self.quiet.unwrap_or(false);
let bail = self.bail.unwrap_or(true);
#[allow(clippy::collapsible_else_if)]
let reporter = if quiet {
Report::new(NoReporter::default())
} else {
if std::io::stdout().is_terminal() {
Report::new(SpinnerReporter::spawn())
} else {
Report::new(BasicStdoutReporter::default())
}
};
let output = foundry_compilers::report::with_scoped(&reporter, || {
tracing::debug!("compiling project");
let timer = Instant::now();
let r = f();
let elapsed = timer.elapsed();
tracing::debug!("finished compiling in {:.3}s", elapsed.as_secs_f64());
r
})?;
// need to drop the reporter here, so that the spinner terminates
drop(reporter);
if bail && output.has_compiler_errors() {
eyre::bail!("{output}")
}
if !quiet {
if output.is_unchanged() {
println!("No files changed, compilation skipped");
} else {
// print the compiler output / warnings
println!("{output}");
}
self.handle_output(&output);
}
Ok(output)
}
/// If configured, this will print sizes or names
fn handle_output(&self, output: &ProjectCompileOutput) {
let print_names = self.print_names.unwrap_or(false);
let print_sizes = self.print_sizes.unwrap_or(false);
// print any sizes or names
if print_names {
let mut artifacts: BTreeMap<_, Vec<_>> = BTreeMap::new();
for (name, (_, version)) in output.versioned_artifacts() {
artifacts.entry(version).or_default().push(name);
}
for (version, names) in artifacts {
println!(
" compiler version: {}.{}.{}",
version.major, version.minor, version.patch
);
for name in names {
println!(" - {name}");
}
}
}
if print_sizes {
// add extra newline if names were already printed
if print_names {
println!();
}
let mut size_report = SizeReport { contracts: BTreeMap::new() };
let artifacts: BTreeMap<_, _> = output.artifacts().collect();
for (name, artifact) in artifacts {
let size = deployed_contract_size(artifact).unwrap_or_default();
let dev_functions =
artifact.abi.as_ref().map(|abi| abi.functions()).into_iter().flatten().filter(
|func| {
func.name.is_test() ||
func.name.eq("IS_TEST") ||
func.name.eq("IS_SCRIPT")
},
);
let is_dev_contract = dev_functions.count() > 0;
size_report.contracts.insert(name, ContractInfo { size, is_dev_contract });
}
println!("{size_report}");
// TODO: avoid process::exit
// exit with error if any contract exceeds the size limit, excluding test contracts.
if size_report.exceeds_size_limit() {
std::process::exit(1);
}
}
}
}
/// Contract source code and bytecode.
#[derive(Clone, Debug, Default)]
pub struct ContractSources {
/// Map over artifacts' contract names -> vector of file IDs
pub ids_by_name: HashMap<String, Vec<u32>>,
/// Map over file_id -> (source code, contract)
pub sources_by_id: FxHashMap<u32, (String, ContractBytecodeSome)>,
}
impl ContractSources {
/// Collects the contract sources and artifacts from the project compile output.
pub fn from_project_output(
output: &ProjectCompileOutput,
root: &Path,
) -> Result<ContractSources> {
let mut sources = ContractSources::default();
for (id, artifact) in output.artifact_ids() {
if let Some(file_id) = artifact.id {
let abs_path = root.join(&id.source);
let source_code = std::fs::read_to_string(abs_path).wrap_err_with(|| {
format!("failed to read artifact source file for `{}`", id.identifier())
})?;
let compact = CompactContractBytecode {
abi: artifact.abi.clone(),
bytecode: artifact.bytecode.clone(),
deployed_bytecode: artifact.deployed_bytecode.clone(),
};
let contract = compact_to_contract(compact)?;
sources.insert(&id, file_id, source_code, contract);
} else {
warn!(id = id.identifier(), "source not found");
}
}
Ok(sources)
}
/// Inserts a contract into the sources.
pub fn insert(
&mut self,
artifact_id: &ArtifactId,
file_id: u32,
source: String,
bytecode: ContractBytecodeSome,
) {
self.ids_by_name.entry(artifact_id.name.clone()).or_default().push(file_id);
self.sources_by_id.insert(file_id, (source, bytecode));
}
/// Returns the source for a contract by file ID.
pub fn get(&self, id: u32) -> Option<&(String, ContractBytecodeSome)> {
self.sources_by_id.get(&id)
}
/// Returns all sources for a contract by name.
pub fn get_sources(
&self,
name: &str,
) -> Option<impl Iterator<Item = (u32, &(String, ContractBytecodeSome))>> {
self.ids_by_name
.get(name)
.map(|ids| ids.iter().filter_map(|id| Some((*id, self.sources_by_id.get(id)?))))
}
/// Returns all (name, source) pairs.
pub fn entries(&self) -> impl Iterator<Item = (String, &(String, ContractBytecodeSome))> {
self.ids_by_name.iter().flat_map(|(name, ids)| {
ids.iter().filter_map(|id| self.sources_by_id.get(id).map(|s| (name.clone(), s)))
})
}
}
// https://eips.ethereum.org/EIPS/eip-170
const CONTRACT_SIZE_LIMIT: usize = 24576;
/// Contracts with info about their size
pub struct SizeReport {
/// `contract name -> info`
pub contracts: BTreeMap<String, ContractInfo>,
}
impl SizeReport {
/// Returns the size of the largest contract, excluding test contracts.
pub fn max_size(&self) -> usize {
let mut max_size = 0;
for contract in self.contracts.values() {
if !contract.is_dev_contract && contract.size > max_size {
max_size = contract.size;
}
}
max_size
}
/// Returns true if any contract exceeds the size limit, excluding test contracts.
pub fn exceeds_size_limit(&self) -> bool {
self.max_size() > CONTRACT_SIZE_LIMIT
}
}
impl Display for SizeReport {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
let mut table = Table::new();
table.load_preset(ASCII_MARKDOWN);
table.set_header([
Cell::new("Contract").add_attribute(Attribute::Bold).fg(Color::Blue),
Cell::new("Size (kB)").add_attribute(Attribute::Bold).fg(Color::Blue),
Cell::new("Margin (kB)").add_attribute(Attribute::Bold).fg(Color::Blue),
]);
let contracts = self.contracts.iter().filter(|(_, c)| !c.is_dev_contract && c.size > 0);
for (name, contract) in contracts {
let margin = CONTRACT_SIZE_LIMIT as isize - contract.size as isize;
let color = match contract.size {
0..=17999 => Color::Reset,
18000..=CONTRACT_SIZE_LIMIT => Color::Yellow,
_ => Color::Red,
};
table.add_row([
Cell::new(name).fg(color),
Cell::new(contract.size as f64 / 1000.0).fg(color),
Cell::new(margin as f64 / 1000.0).fg(color),
]);
}
writeln!(f, "{table}")?;
Ok(())
}
}
/// Returns the size of the deployed contract
pub fn deployed_contract_size<T: Artifact>(artifact: &T) -> Option<usize> {
let bytecode = artifact.get_deployed_bytecode_object()?;
let size = match bytecode.as_ref() {
BytecodeObject::Bytecode(bytes) => bytes.len(),
BytecodeObject::Unlinked(unlinked) => {
// we don't need to account for placeholders here, because library placeholders take up
// 40 characters: `__$<library hash>$__` which is the same as a 20byte address in hex.
let mut size = unlinked.as_bytes().len();
if unlinked.starts_with("0x") {
size -= 2;
}
// hex -> bytes
size / 2
}
};
Some(size)
}
/// How big the contract is and whether it is a dev contract where size limits can be neglected
#[derive(Clone, Copy, Debug)]
pub struct ContractInfo {
/// size of the contract in bytes
pub size: usize,
/// A development contract is either a Script or a Test contract.
pub is_dev_contract: bool,
}
/// Compiles target file path.
///
/// If `quiet` no solc related output will be emitted to stdout.
///
/// If `verify` and it's a standalone script, throw error. Only allowed for projects.
///
/// **Note:** this expects the `target_path` to be absolute
pub fn compile_target_with_filter(
target_path: &Path,
project: &Project,
quiet: bool,
verify: bool,
skip: Vec<SkipBuildFilter>,
) -> Result<ProjectCompileOutput> {
let graph = Graph::resolve(&project.paths)?;
// Checking if it's a standalone script, or part of a project.
let mut compiler = ProjectCompiler::new().quiet(quiet);
if !skip.is_empty() {
compiler = compiler.filter(Box::new(SkipBuildFilters::new(skip)?));
}
if !graph.files().contains_key(target_path) {
if verify {
eyre::bail!("You can only verify deployments from inside a project! Make sure it exists with `forge tree`.");
}
compiler = compiler.files([target_path.into()]);
}
compiler.compile(project)
}
/// Compiles an Etherscan source from metadata by creating a project.
/// Returns the artifact_id, the file_id, and the bytecode
pub async fn compile_from_source(
metadata: &Metadata,
) -> Result<(ArtifactId, u32, ContractBytecodeSome)> {
let root = tempfile::tempdir()?;
let root_path = root.path();
let project = etherscan_project(metadata, root_path)?;
let project_output = project.compile()?;
if project_output.has_compiler_errors() {
eyre::bail!("{project_output}")
}
let (artifact_id, file_id, contract) = project_output
.into_artifacts()
.find(|(artifact_id, _)| artifact_id.name == metadata.contract_name)
.map(|(aid, art)| {
(aid, art.source_file().expect("no source file").id, art.into_contract_bytecode())
})
.ok_or_else(|| {
eyre::eyre!(
"Unable to find bytecode in compiled output for contract: {}",
metadata.contract_name
)
})?;
let bytecode = compact_to_contract(contract)?;
root.close()?;
Ok((artifact_id, file_id, bytecode))
}
/// Creates a [Project] from an Etherscan source.
pub fn etherscan_project(metadata: &Metadata, target_path: impl AsRef<Path>) -> Result<Project> {
let target_path = dunce::canonicalize(target_path.as_ref())?;
let sources_path = target_path.join(&metadata.contract_name);
metadata.source_tree().write_to(&target_path)?;
let mut settings = metadata.source_code.settings()?.unwrap_or_default();
// make remappings absolute with our root
for remapping in settings.remappings.iter_mut() {
let new_path = sources_path.join(remapping.path.trim_start_matches('/'));
remapping.path = new_path.display().to_string();
}
// add missing remappings
if !settings.remappings.iter().any(|remapping| remapping.name.starts_with("@openzeppelin/")) {
let oz = Remapping {
context: None,
name: "@openzeppelin/".into(),
path: sources_path.join("@openzeppelin").display().to_string(),
};
settings.remappings.push(oz);
}
// root/
// ContractName/
// [source code]
let paths = ProjectPathsConfig::builder()
.sources(sources_path.clone())
.remappings(settings.remappings.clone())
.build_with_root(sources_path);
let v = metadata.compiler_version()?;
let v = format!("{}.{}.{}", v.major, v.minor, v.patch);
let solc = Solc::find_or_install_svm_version(v)?;
Ok(Project::builder()
.solc_config(SolcConfig::builder().settings(settings).build())
.no_auto_detect()
.paths(paths)
.solc(solc)
.ephemeral()
.no_artifacts()
.build()?)
}
/// Bundles multiple `SkipBuildFilter` into a single `FileFilter`
#[derive(Clone, Debug)]
pub struct SkipBuildFilters(Vec<GlobMatcher>);
impl FileFilter for SkipBuildFilters {
/// Only returns a match if _no_ exclusion filter matches
fn is_match(&self, file: &Path) -> bool {
self.0.iter().all(|matcher| is_match_exclude(matcher, file))
}
}
impl SkipBuildFilters {
/// Creates a new `SkipBuildFilters` from multiple `SkipBuildFilter`.
pub fn new(matchers: impl IntoIterator<Item = SkipBuildFilter>) -> Result<Self> {
matchers.into_iter().map(|m| m.compile()).collect::<Result<_>>().map(Self)
}
}
/// A filter that excludes matching contracts from the build
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum SkipBuildFilter {
/// Exclude all `.t.sol` contracts
Tests,
/// Exclude all `.s.sol` contracts
Scripts,
/// Exclude if the file matches
Custom(String),
}
impl SkipBuildFilter {
fn new(s: &str) -> Self {
match s {
"test" | "tests" => SkipBuildFilter::Tests,
"script" | "scripts" => SkipBuildFilter::Scripts,
s => SkipBuildFilter::Custom(s.to_string()),
}
}
/// Returns the pattern to match against a file
fn file_pattern(&self) -> &str {
match self {
SkipBuildFilter::Tests => ".t.sol",
SkipBuildFilter::Scripts => ".s.sol",
SkipBuildFilter::Custom(s) => s.as_str(),
}
}
fn compile(&self) -> Result<GlobMatcher> {
self.file_pattern().parse().map_err(Into::into)
}
}
impl FromStr for SkipBuildFilter {
type Err = Infallible;
fn from_str(s: &str) -> result::Result<Self, Self::Err> {
Ok(Self::new(s))
}
}
/// Matches file only if the filter does not apply.
///
/// This returns the inverse of `file.name.contains(pattern) || matcher.is_match(file)`.
fn is_match_exclude(matcher: &GlobMatcher, path: &Path) -> bool {
fn is_match(matcher: &GlobMatcher, path: &Path) -> Option<bool> {
let file_name = path.file_name()?.to_str()?;
Some(file_name.contains(matcher.as_str()) || matcher.is_match(path))
}
!is_match(matcher, path).unwrap_or_default()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_build_filter() {
let tests = SkipBuildFilter::Tests.compile().unwrap();
let scripts = SkipBuildFilter::Scripts.compile().unwrap();
let custom = |s: &str| SkipBuildFilter::Custom(s.to_string()).compile().unwrap();
let file = Path::new("A.t.sol");
assert!(!is_match_exclude(&tests, file));
assert!(is_match_exclude(&scripts, file));
assert!(!is_match_exclude(&custom("A.t"), file));
let file = Path::new("A.s.sol");
assert!(is_match_exclude(&tests, file));
assert!(!is_match_exclude(&scripts, file));
assert!(!is_match_exclude(&custom("A.s"), file));
let file = Path::new("/home/test/Foo.sol");
assert!(!is_match_exclude(&custom("*/test/**"), file));
let file = Path::new("/home/script/Contract.sol");
assert!(!is_match_exclude(&custom("*/script/**"), file));
}
}