Skip to content

Commit

Permalink
Merge branch 'master' into rk/directed-graph-ssa-check
Browse files Browse the repository at this point in the history
  • Loading branch information
rkarabut authored Dec 3, 2024
2 parents 4c20131 + 304403f commit e85ca35
Show file tree
Hide file tree
Showing 66 changed files with 1,749 additions and 737 deletions.
88 changes: 88 additions & 0 deletions .github/workflows/memory_report.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
name: Report Peak Memory

on:
push:
branches:
- master
pull_request:

jobs:
build-nargo:
runs-on: ubuntu-latest
strategy:
matrix:
target: [x86_64-unknown-linux-gnu]

steps:
- name: Checkout Noir repo
uses: actions/checkout@v4

- name: Setup toolchain
uses: dtolnay/[email protected]

- uses: Swatinem/rust-cache@v2
with:
key: ${{ matrix.target }}
cache-on-failure: true
save-if: ${{ github.event_name != 'merge_group' }}

- name: Build Nargo
run: cargo build --package nargo_cli --release

- name: Package artifacts
run: |
mkdir dist
cp ./target/release/nargo ./dist/nargo
- name: Upload artifact
uses: actions/upload-artifact@v4
with:
name: nargo
path: ./dist/*
retention-days: 3

generate_memory_report:
needs: [build-nargo]
runs-on: ubuntu-latest
permissions:
pull-requests: write

steps:
- uses: actions/checkout@v4

- name: Download nargo binary
uses: actions/download-artifact@v4
with:
name: nargo
path: ./nargo

- name: Set nargo on PATH
run: |
nargo_binary="${{ github.workspace }}/nargo/nargo"
chmod +x $nargo_binary
echo "$(dirname $nargo_binary)" >> $GITHUB_PATH
export PATH="$PATH:$(dirname $nargo_binary)"
nargo -V
- name: Generate Memory report
working-directory: ./test_programs
run: |
chmod +x memory_report.sh
./memory_report.sh
mv memory_report.json ../memory_report.json
- name: Parse memory report
id: memory_report
uses: noir-lang/noir-bench-report@ccb0d806a91d3bd86dba0ba3d580a814eed5673c
with:
report: memory_report.json
header: |
# Memory Report
memory_report: true

- name: Add memory report to sticky comment
if: github.event_name == 'pull_request' || github.event_name == 'pull_request_target'
uses: marocchino/sticky-pull-request-comment@v2
with:
header: memory
message: ${{ steps.memory_report.outputs.markdown }}
30 changes: 23 additions & 7 deletions .github/workflows/test-js-packages.yml
Original file line number Diff line number Diff line change
Expand Up @@ -519,12 +519,25 @@ jobs:
fail-fast: false
matrix:
project:
# Disabled as these are currently failing with many visibility errors
- { repo: AztecProtocol/aztec-nr, path: ./ }
- { repo: noir-lang/ec, path: ./ }
- { repo: noir-lang/eddsa, path: ./ }
- { repo: noir-lang/mimc, path: ./ }
- { repo: noir-lang/noir_sort, path: ./ }
- { repo: noir-lang/noir-edwards, path: ./ }
- { repo: noir-lang/noir-bignum, path: ./ }
- { repo: noir-lang/noir_bigcurve, path: ./ }
- { repo: noir-lang/noir_base64, path: ./ }
- { repo: noir-lang/noir_string_search, path: ./ }
- { repo: noir-lang/sparse_array, path: ./ }
- { repo: noir-lang/noir_rsa, path: ./lib }
- { repo: AztecProtocol/aztec-packages, path: ./noir-projects/aztec-nr }
- { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-contracts }
# Disabled as aztec-packages requires a setup-step in order to generate a `Nargo.toml`
#- { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-protocol-circuits }
- { repo: noir-lang/noir-edwards, path: ./, ref: 3188ea74fe3b059219a2ea87899589c266256d74 }
- { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-protocol-circuits/crates/parity-lib }
- { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-protocol-circuits/crates/private-kernel-lib }
- { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-protocol-circuits/crates/reset-kernel-lib }
- { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-protocol-circuits/crates/rollup-lib }
- { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-protocol-circuits/crates/types }

name: Check external repo - ${{ matrix.project.repo }}
steps:
- name: Checkout
Expand Down Expand Up @@ -554,9 +567,12 @@ jobs:
# Github actions seems to not expand "**" in globs by default.
shopt -s globstar
sed -i '/^compiler_version/d' ./**/Nargo.toml
- name: Run nargo check
- name: Run nargo test
working-directory: ./test-repo/${{ matrix.project.path }}
run: nargo check
run: nargo test --silence-warnings
env:
NARGO_IGNORE_TEST_FAILURES_FROM_FOREIGN_CALLS: true

# This is a job which depends on all test jobs and reports the overall status.
# This allows us to add/remove test jobs without having to update the required workflows.
Expand Down
18 changes: 16 additions & 2 deletions compiler/noirc_driver/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ use noirc_abi::{AbiParameter, AbiType, AbiValue};
use noirc_errors::{CustomDiagnostic, FileDiagnostic};
use noirc_evaluator::create_program;
use noirc_evaluator::errors::RuntimeError;
use noirc_evaluator::ssa::SsaProgramArtifact;
use noirc_evaluator::ssa::{SsaLogging, SsaProgramArtifact};
use noirc_frontend::debug::build_debug_crate_file;
use noirc_frontend::hir::def_map::{Contract, CrateDefMap};
use noirc_frontend::hir::Context;
Expand Down Expand Up @@ -70,6 +70,11 @@ pub struct CompileOptions {
#[arg(long, hide = true)]
pub show_ssa: bool,

/// Only show SSA passes whose name contains the provided string.
/// This setting takes precedence over `show_ssa` if it's not empty.
#[arg(long, hide = true)]
pub show_ssa_pass_name: Option<String>,

/// Emit the unoptimized SSA IR to file.
/// The IR will be dumped into the workspace target directory,
/// under `[compiled-package].ssa.json`.
Expand Down Expand Up @@ -591,7 +596,16 @@ pub fn compile_no_check(
}
let return_visibility = program.return_visibility;
let ssa_evaluator_options = noirc_evaluator::ssa::SsaEvaluatorOptions {
enable_ssa_logging: options.show_ssa,
ssa_logging: match &options.show_ssa_pass_name {
Some(string) => SsaLogging::Contains(string.clone()),
None => {
if options.show_ssa {
SsaLogging::All
} else {
SsaLogging::None
}
}
},
enable_brillig_logging: options.show_brillig,
force_brillig_output: options.force_brillig,
print_codegen_timings: options.benchmark_codegen,
Expand Down
2 changes: 1 addition & 1 deletion compiler/noirc_evaluator/src/acir/acir_variable.rs
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ impl<'a> From<&'a SsaType> for AcirType {
SsaType::Numeric(numeric_type) => AcirType::NumericType(*numeric_type),
SsaType::Array(elements, size) => {
let elements = elements.iter().map(|e| e.into()).collect();
AcirType::Array(elements, *size)
AcirType::Array(elements, *size as usize)
}
_ => unreachable!("The type {value} cannot be represented in ACIR"),
}
Expand Down
27 changes: 12 additions & 15 deletions compiler/noirc_evaluator/src/acir/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -571,7 +571,7 @@ impl<'a> Context<'a> {
AcirValue::Array(_) => {
let block_id = self.block_id(param_id);
let len = if matches!(typ, Type::Array(_, _)) {
typ.flattened_size()
typ.flattened_size() as usize
} else {
return Err(InternalError::Unexpected {
expected: "Block params should be an array".to_owned(),
Expand Down Expand Up @@ -816,7 +816,9 @@ impl<'a> Context<'a> {
let inputs = vecmap(arguments, |arg| self.convert_value(*arg, dfg));
let output_count = result_ids
.iter()
.map(|result_id| dfg.type_of_value(*result_id).flattened_size())
.map(|result_id| {
dfg.type_of_value(*result_id).flattened_size() as usize
})
.sum();

let Some(acir_function_id) =
Expand Down Expand Up @@ -948,7 +950,7 @@ impl<'a> Context<'a> {
let block_id = self.block_id(&array_id);
let array_typ = dfg.type_of_value(array_id);
let len = if matches!(array_typ, Type::Array(_, _)) {
array_typ.flattened_size()
array_typ.flattened_size() as usize
} else {
Self::flattened_value_size(&output)
};
Expand Down Expand Up @@ -1444,7 +1446,7 @@ impl<'a> Context<'a> {
// a separate SSA value and restrictions on slice indices should be generated elsewhere in the SSA.
let array_typ = dfg.type_of_value(array);
let array_len = if !array_typ.contains_slice_element() {
array_typ.flattened_size()
array_typ.flattened_size() as usize
} else {
self.flattened_slice_size(array, dfg)
};
Expand Down Expand Up @@ -1539,7 +1541,7 @@ impl<'a> Context<'a> {
let value = self.convert_value(array, dfg);
let array_typ = dfg.type_of_value(array);
let len = if !array_typ.contains_slice_element() {
array_typ.flattened_size()
array_typ.flattened_size() as usize
} else {
self.flattened_slice_size(array, dfg)
};
Expand Down Expand Up @@ -1810,7 +1812,7 @@ impl<'a> Context<'a> {

return_values
.iter()
.fold(0, |acc, value_id| acc + dfg.type_of_value(*value_id).flattened_size())
.fold(0, |acc, value_id| acc + dfg.type_of_value(*value_id).flattened_size() as usize)
}

/// Converts an SSA terminator's return values into their ACIR representations
Expand Down Expand Up @@ -2156,7 +2158,7 @@ impl<'a> Context<'a> {
let inputs = vecmap(&arguments_no_slice_len, |arg| self.convert_value(*arg, dfg));

let output_count = result_ids.iter().fold(0usize, |sum, result_id| {
sum + dfg.try_get_array_length(*result_id).unwrap_or(1)
sum + dfg.try_get_array_length(*result_id).unwrap_or(1) as usize
});

let vars = self.acir_context.black_box_function(black_box, inputs, output_count)?;
Expand All @@ -2180,7 +2182,7 @@ impl<'a> Context<'a> {
endian,
field,
radix,
array_length as u32,
array_length,
result_type[0].clone().into(),
)
.map(|array| vec![array])
Expand All @@ -2194,12 +2196,7 @@ impl<'a> Context<'a> {
};

self.acir_context
.bit_decompose(
endian,
field,
array_length as u32,
result_type[0].clone().into(),
)
.bit_decompose(endian, field, array_length, result_type[0].clone().into())
.map(|array| vec![array])
}
Intrinsic::ArrayLen => {
Expand All @@ -2220,7 +2217,7 @@ impl<'a> Context<'a> {
let acir_value = self.convert_value(slice_contents, dfg);

let array_len = if !slice_typ.contains_slice_element() {
slice_typ.flattened_size()
slice_typ.flattened_size() as usize
} else {
self.flattened_slice_size(slice_contents, dfg)
};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1823,7 +1823,7 @@ impl<'block> BrilligBlock<'block> {
Type::Array(_, nested_size) => {
let inner_array = BrilligArray {
pointer: self.brillig_context.allocate_register(),
size: *nested_size,
size: *nested_size as usize,
};
self.allocate_foreign_call_result_array(element_type, inner_array);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ pub(crate) fn allocate_value<F, Registers: RegisterAllocator>(
}
Type::Array(item_typ, elem_count) => BrilligVariable::BrilligArray(BrilligArray {
pointer: brillig_context.allocate_register(),
size: compute_array_length(&item_typ, elem_count),
size: compute_array_length(&item_typ, elem_count as usize),
}),
Type::Slice(_) => BrilligVariable::BrilligVector(BrilligVector {
pointer: brillig_context.allocate_register(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ impl FunctionContext {
vecmap(item_type.iter(), |item_typ| {
FunctionContext::ssa_type_to_parameter(item_typ)
}),
*size,
*size as usize,
),
Type::Slice(_) => {
panic!("ICE: Slice parameters cannot be derived from type information")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ pub(crate) fn type_to_heap_value_type(typ: &Type) -> HeapValueType {
),
Type::Array(elem_type, size) => HeapValueType::Array {
value_types: elem_type.as_ref().iter().map(type_to_heap_value_type).collect(),
size: typ.element_size() * size,
size: typ.element_size() * *size as usize,
},
Type::Slice(elem_type) => HeapValueType::Vector {
value_types: elem_type.as_ref().iter().map(type_to_heap_value_type).collect(),
Expand Down
Loading

0 comments on commit e85ca35

Please sign in to comment.