diff --git a/noir/Earthfile b/noir/Earthfile index 848dedcc0beb..9337b11452a4 100644 --- a/noir/Earthfile +++ b/noir/Earthfile @@ -56,6 +56,9 @@ examples: WORKDIR noir-repo/examples/codegen-verifier RUN ./test.sh + WORKDIR ../prove_and_verify + RUN ./test.sh + format: FROM +nargo ENV PATH=$PATH:/usr/src/noir-repo/target/release diff --git a/noir/noir-repo/Cargo.lock b/noir/noir-repo/Cargo.lock index 859579c077f3..08e646f871ea 100644 --- a/noir/noir-repo/Cargo.lock +++ b/noir/noir-repo/Cargo.lock @@ -455,7 +455,6 @@ dependencies = [ name = "backend-interface" version = "0.29.0" dependencies = [ - "acvm", "bb_abstraction_leaks", "build-target", "const_format", @@ -2858,7 +2857,6 @@ dependencies = [ "dap", "dirs", "fm", - "hex", "iai", "iter-extended", "nargo", diff --git a/noir/noir-repo/docs/docs/getting_started/hello_noir/index.md b/noir/noir-repo/docs/docs/getting_started/hello_noir/index.md index 743c4d8d6348..3d74d052e163 100644 --- a/noir/noir-repo/docs/docs/getting_started/hello_noir/index.md +++ b/noir/noir-repo/docs/docs/getting_started/hello_noir/index.md @@ -94,9 +94,9 @@ Two additional files would be generated in your project directory: _Prover.toml_ houses input values, and _Verifier.toml_ houses public values. -## Prove Our Noir Program +## Execute Our Noir Program -Now that the project is set up, we can create a proof of correct execution of our Noir program. +Now that the project is set up, we can execute our Noir program. Fill in input values for execution in the _Prover.toml_ file. For example: @@ -105,37 +105,50 @@ x = "1" y = "2" ``` -Prove the valid execution of your Noir program: +Execute your Noir program: ```sh -nargo prove +nargo execute witness-name ``` -A new folder _proofs_ would then be generated in your project directory, containing the proof file -`.proof`, where the project name is defined in Nargo.toml. +The witness corresponding to this execution will then be written to the file `./target/witness-name.gz`. -The _Verifier.toml_ file would also be updated with the public values computed from program -execution (in this case the value of `y`): +The _Verifier.toml_ file would also be updated with the public values computed from program execution (in this case the value of `y`): ```toml y = "0x0000000000000000000000000000000000000000000000000000000000000002" ``` -> **Note:** Values in _Verifier.toml_ are computed as 32-byte hex values. +> **Note:** Fields in _Verifier.toml_ are outputted as 32-byte hex values. + +## Prove Our Noir Program + +:::info + +Nargo no longer handles communicating with backends in order to generate proofs. In order to prove/verify your Noir programs, you'll need an installation of [bb](../barretenberg/index.md). + +::: + +Prove the valid execution of your Noir program using `bb`: + +```sh +bb prove -b ./target/hello_world.json -w ./target/witness-name.gz -o ./proof +``` + +A new file called `proof` will be generated in your project directory, containing the generated proof for your program. ## Verify Our Noir Program -Once a proof is generated, we can verify correct execution of our Noir program by verifying the -proof file. +Once a proof is generated, we can verify correct execution of our Noir program by verifying the proof file. Verify your proof by running: ```sh -nargo verify +bb write_vk -b ./target/hello_world.json -o ./target/vk +bb verify -k ./target/vk -p ./proof ``` -The verification will complete in silence if it is successful. If it fails, it will log the -corresponding error instead. +The verification will complete in silence if it is successful. If it fails, it will log the corresponding error instead. Congratulations, you have now created and verified a proof for your very first Noir program! diff --git a/noir/noir-repo/docs/docs/getting_started/hello_noir/project_breakdown.md b/noir/noir-repo/docs/docs/getting_started/hello_noir/project_breakdown.md index 6160a102c6c9..b6aa9eab7806 100644 --- a/noir/noir-repo/docs/docs/getting_started/hello_noir/project_breakdown.md +++ b/noir/noir-repo/docs/docs/getting_started/hello_noir/project_breakdown.md @@ -92,20 +92,15 @@ fn main(x : Field, y : Field) { } ``` -The parameters `x` and `y` can be seen as the API for the program and must be supplied by the -prover. Since neither `x` nor `y` is marked as public, the verifier does not supply any inputs, when -verifying the proof. +The parameters `x` and `y` can be seen as the API for the program and must be supplied by the prover. Since neither `x` nor `y` is marked as public, the verifier does not supply any inputs, when verifying the proof. The prover supplies the values for `x` and `y` in the _Prover.toml_ file. -As for the program body, `assert` ensures that the condition to be satisfied (e.g. `x != y`) is -constrained by the proof of the execution of said program (i.e. if the condition was not met, the -verifier would reject the proof as an invalid proof). +As for the program body, `assert` ensures that the condition to be satisfied (e.g. `x != y`) is constrained by the proof of the execution of said program (i.e. if the condition was not met, the verifier would reject the proof as an invalid proof). ### Prover.toml -The _Prover.toml_ file is a file which the prover uses to supply his witness values(both private and -public). +The _Prover.toml_ file is a file which the prover uses to supply the inputs to the Noir program (both private and public). In our hello world program the _Prover.toml_ file looks like this: @@ -114,12 +109,9 @@ x = "1" y = "2" ``` -When the command `nargo prove` is executed, two processes happen: +When the command `nargo execute` is executed, nargo will execute the Noir program using the inputs specified in `Prover.toml`, aborting if it finds that these do not satisfy the constraints defined by `main`. In this example, `x` and `y` must satisfy the inequality constraint `assert(x != y)`. -1. Noir creates a proof that `x`, which holds the value of `1`, and `y`, which holds the value of `2`, - is not equal. This inequality constraint is due to the line `assert(x != y)`. - -2. Noir creates and stores the proof of this statement in the _proofs_ directory in a file called your-project.proof. So if your project is named "private_voting" (defined in the project Nargo.toml), the proof will be saved at `./proofs/private_voting.proof`. Opening this file will display the proof in hex format. +If an output name is specified such as `nargo execute foo`, the witness generated by this execution will be written to `./target/foo.gz`. This can then be used to generate a proof of the execution. #### Arrays of Structs @@ -155,45 +147,18 @@ baz = 2 #### Custom toml files -You can specify a `toml` file with a different name to use for proving by using the `--prover-name` or `-p` flags. +You can specify a `toml` file with a different name to use for execution by using the `--prover-name` or `-p` flags. -This command looks for proof inputs in the default **Prover.toml** and generates the proof and saves it at `./proofs/.proof`: +This command looks for proof inputs in the default **Prover.toml** and generates the witness and saves it at `./target/foo.gz`: ```bash -nargo prove +nargo execute foo ``` -This command looks for proof inputs in the custom **OtherProver.toml** and generates proof and saves it at `./proofs/.proof`: +This command looks for proof inputs in the custom **OtherProver.toml** and generates the witness and saves it at `./target/bar.gz`: ```bash -nargo prove -p OtherProver +nargo execute -p OtherProver bar ``` -## Verifying a Proof - -When the command `nargo verify` is executed, two processes happen: - -1. Noir checks in the _proofs_ directory for a proof file with the project name (eg. test_project.proof) - -2. If that file is found, the proof's validity is checked - -> **Note:** The validity of the proof is linked to the current Noir program; if the program is -> changed and the verifier verifies the proof, it will fail because the proof is not valid for the -> _modified_ Noir program. - -In production, the prover and the verifier are usually two separate entities. A prover would -retrieve the necessary inputs, execute the Noir program, generate a proof and pass it to the -verifier. The verifier would then retrieve the public inputs, usually from external sources, and -verify the validity of the proof against it. - -Take a private asset transfer as an example: - -A person using a browser as the prover would retrieve private inputs locally (e.g. the user's private key) and -public inputs (e.g. the user's encrypted balance on-chain), compute the transfer, generate a proof -and submit it to the verifier smart contract. - -The verifier contract would then draw the user's encrypted balance directly from the blockchain and -verify the proof submitted against it. If the verification passes, additional functions in the -verifier contract could trigger (e.g. approve the asset transfer). - Now that you understand the concepts, you'll probably want some editor feedback while you are writing more complex code. diff --git a/noir/noir-repo/docs/docs/how_to/how-to-oracles.md b/noir/noir-repo/docs/docs/how_to/how-to-oracles.md index 8cf8035a5c4f..5f427f1e23fa 100644 --- a/noir/noir-repo/docs/docs/how_to/how-to-oracles.md +++ b/noir/noir-repo/docs/docs/how_to/how-to-oracles.md @@ -177,7 +177,7 @@ interface ForeignCallResult { ## Step 3 - Usage with Nargo -Using the [`nargo` CLI tool](../getting_started/installation/index.md), you can use oracles in the `nargo test`, `nargo execute` and `nargo prove` commands by passing a value to `--oracle-resolver`. For example: +Using the [`nargo` CLI tool](../getting_started/installation/index.md), you can use oracles in the `nargo test` and `nargo execute` commands by passing a value to `--oracle-resolver`. For example: ```bash nargo test --oracle-resolver http://localhost:5555 @@ -203,7 +203,7 @@ As one can see, in NoirJS, the [`foreignCallHandler`](../reference/NoirJS/noir_j Does this mean you don't have to write an RPC server like in [Step #2](#step-2---write-an-rpc-server)? -You don't technically have to, but then how would you run `nargo test` or `nargo prove`? To use both `Nargo` and `NoirJS` in your development flow, you will have to write a JSON RPC server. +You don't technically have to, but then how would you run `nargo test`? To use both `Nargo` and `NoirJS` in your development flow, you will have to write a JSON RPC server. ::: diff --git a/noir/noir-repo/docs/docs/how_to/how-to-solidity-verifier.md b/noir/noir-repo/docs/docs/how_to/how-to-solidity-verifier.md index 36f4ab765b99..1bc1c9e256e1 100644 --- a/noir/noir-repo/docs/docs/how_to/how-to-solidity-verifier.md +++ b/noir/noir-repo/docs/docs/how_to/how-to-solidity-verifier.md @@ -131,11 +131,25 @@ To verify a proof using the Solidity verifier contract, we call the `verify` fun function verify(bytes calldata _proof, bytes32[] calldata _publicInputs) external view returns (bool) ``` -When using the default example in the [Hello Noir](../getting_started/hello_noir/index.md) guide, the easiest way to confirm that the verifier contract is doing its job is by calling the `verify` function via remix with the required parameters. For `_proof`, run `nargo prove` and use the string in `proof/.proof` (adding the hex `0x` prefix). We can also copy the public input from `Verifier.toml`, as it will be properly formatted as 32-byte strings: +When using the default example in the [Hello Noir](../getting_started/hello_noir/index.md) guide, the easiest way to confirm that the verifier contract is doing its job is by calling the `verify` function via remix with the required parameters. Note that the public inputs must be passed in separately to the rest of the proof so we must split the proof as returned from `bb`. +First generate a proof with `bb` at the location `./proof` using the steps in [get started](../getting_started/hello_noir/index.md), this proof is in a binary format but we want to convert it into a hex string to pass into Remix, this can be done with the + +```bash +# This value must be changed to match the number of public inputs (including return values!) in your program. +NUM_PUBLIC_INPUTS=1 +PUBLIC_INPUT_BYTES=32*NUM_PUBLIC_INPUTS +HEX_PUBLIC_INPUTS=$(head -c $PUBLIC_INPUT_BYTES ./proof | od -An -v -t x1 | tr -d $' \n') +HEX_PROOF=$(tail -c +$PUBLIC_INPUT_BYTES ./proof | od -An -v -t x1 | tr -d $' \n') + +echo "Public inputs:" +echo $HEX_PUBLIC_INPUTS + +echo "Proof:" +echo "0x$HEX_PROOF" ``` -0x...... , [0x0000.....02] -``` + +Remix expects that the public inputs will be split into an array of `bytes32` values so `HEX_PUBLIC_INPUTS` needs to be split up and prefixed with `0x` accordingly. You may notice that the public inputs match up with the values which are written in the `Verifier.toml` file so we can also copy the public input values from `Verifier.toml` which are already split up. Take care to ensure that the order of the public inputs aren't changed. A programmatic example of how the `verify` function is called can be seen in the example zk voting application [here](https://github.com/noir-lang/noir-examples/blob/33e598c257e2402ea3a6b68dd4c5ad492bce1b0a/foundry-voting/src/zkVote.sol#L35): @@ -152,11 +166,9 @@ function castVote(bytes calldata proof, uint proposalId, uint vote, bytes32 null :::info[Return Values] -A circuit doesn't have the concept of a return value. Return values are just syntactic sugar in -Noir. +A circuit doesn't have the concept of a return value. Return values are just syntactic sugar in Noir. -Under the hood, the return value is passed as an input to the circuit and is checked at the end of -the circuit program. +Under the hood, the return value is passed as an input to the circuit and is checked at the end of the circuit program. For example, if you have Noir program like this: @@ -170,11 +182,11 @@ fn main( ) -> pub Field ``` -the `verify` function will expect the public inputs array (second function parameter) to be of length 3, the two inputs and the return value. Like before, these values are populated in Verifier.toml after running `nargo prove`. +the `verify` function will expect the public inputs array (second function parameter) to be of length 3, the two inputs and the return value. Like before, these values are populated in Verifier.toml after running `nargo execute`. Passing only two inputs will result in an error such as `PUBLIC_INPUT_COUNT_INVALID(3, 2)`. -In this case, the inputs parameter to `verify` would be an array ordered as `[pubkey_x, pubkey_y, return]`. +In this case, the inputs parameter to `verify` would be an array ordered as `[pubkey_x, pubkey_y, return`. ::: diff --git a/noir/noir-repo/docs/docs/noir/concepts/data_types/integers.md b/noir/noir-repo/docs/docs/noir/concepts/data_types/integers.md index 6b2d37739125..c14fffa71743 100644 --- a/noir/noir-repo/docs/docs/noir/concepts/data_types/integers.md +++ b/noir/noir-repo/docs/docs/noir/concepts/data_types/integers.md @@ -115,7 +115,7 @@ y = "1" Would result in: ``` -$ nargo prove +$ nargo execute error: Assertion failed: 'attempt to add with overflow' ┌─ ~/src/main.nr:9:13 │ diff --git a/noir/noir-repo/examples/prove_and_verify/Nargo.toml b/noir/noir-repo/examples/prove_and_verify/Nargo.toml new file mode 100644 index 000000000000..2b367f30dbc1 --- /dev/null +++ b/noir/noir-repo/examples/prove_and_verify/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "hello_world" +type = "bin" +authors = [""] +compiler_version = ">=0.29.0" + +[dependencies] \ No newline at end of file diff --git a/noir/noir-repo/examples/prove_and_verify/Prover.toml b/noir/noir-repo/examples/prove_and_verify/Prover.toml new file mode 100644 index 000000000000..8c12ebba6cf7 --- /dev/null +++ b/noir/noir-repo/examples/prove_and_verify/Prover.toml @@ -0,0 +1,2 @@ +x = "1" +y = "2" diff --git a/noir/noir-repo/examples/prove_and_verify/proofs/proof b/noir/noir-repo/examples/prove_and_verify/proofs/proof new file mode 100644 index 000000000000..01d5ad276865 Binary files /dev/null and b/noir/noir-repo/examples/prove_and_verify/proofs/proof differ diff --git a/noir/noir-repo/examples/prove_and_verify/prove_and_verify.sh b/noir/noir-repo/examples/prove_and_verify/prove_and_verify.sh new file mode 100755 index 000000000000..01ee6c70738d --- /dev/null +++ b/noir/noir-repo/examples/prove_and_verify/prove_and_verify.sh @@ -0,0 +1,14 @@ +#!/bin/bash +set -eu + +BACKEND=${BACKEND:-bb} + +nargo execute witness + +# TODO: `bb` should create `proofs` directory if it doesn't exist. +mkdir -p proofs +$BACKEND prove -b ./target/hello_world.json -w ./target/witness.gz + +# TODO: backend should automatically generate vk if necessary. +$BACKEND write_vk -b ./target/hello_world.json +$BACKEND verify -v ./target/vk -p ./proofs/proof \ No newline at end of file diff --git a/noir/noir-repo/examples/prove_and_verify/src/main.nr b/noir/noir-repo/examples/prove_and_verify/src/main.nr new file mode 100644 index 000000000000..baef0c3786aa --- /dev/null +++ b/noir/noir-repo/examples/prove_and_verify/src/main.nr @@ -0,0 +1,3 @@ +fn main(x: Field, y: pub Field) { + assert(x != y); +} \ No newline at end of file diff --git a/noir/noir-repo/examples/prove_and_verify/test.sh b/noir/noir-repo/examples/prove_and_verify/test.sh new file mode 100755 index 000000000000..a8ae3cca1320 --- /dev/null +++ b/noir/noir-repo/examples/prove_and_verify/test.sh @@ -0,0 +1,10 @@ +#!/bin/bash +set -eu + +# This file is used for Noir CI and is not required. + +BACKEND=${BACKEND:-bb} + +rm -rf ./target ./proofs + +./prove_and_verify.sh \ No newline at end of file diff --git a/noir/noir-repo/tooling/backend_interface/Cargo.toml b/noir/noir-repo/tooling/backend_interface/Cargo.toml index b731c138c7db..32bf775ca20e 100644 --- a/noir/noir-repo/tooling/backend_interface/Cargo.toml +++ b/noir/noir-repo/tooling/backend_interface/Cargo.toml @@ -10,7 +10,6 @@ license.workspace = true # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -acvm.workspace = true dirs.workspace = true thiserror.workspace = true serde.workspace = true diff --git a/noir/noir-repo/tooling/backend_interface/src/cli/mod.rs b/noir/noir-repo/tooling/backend_interface/src/cli/mod.rs index ba084165e729..071fe042d728 100644 --- a/noir/noir-repo/tooling/backend_interface/src/cli/mod.rs +++ b/noir/noir-repo/tooling/backend_interface/src/cli/mod.rs @@ -1,20 +1,10 @@ // Reference: https://github.com/AztecProtocol/aztec-packages/blob/master/barretenberg/cpp/src/barretenberg/bb/main.cpp mod gates; -mod proof_as_fields; -mod prove; -mod verify; mod version; -mod vk_as_fields; -mod write_vk; pub(crate) use gates::GatesCommand; -pub(crate) use proof_as_fields::ProofAsFieldsCommand; -pub(crate) use prove::ProveCommand; -pub(crate) use verify::VerifyCommand; pub(crate) use version::VersionCommand; -pub(crate) use vk_as_fields::VkAsFieldsCommand; -pub(crate) use write_vk::WriteVkCommand; pub(crate) use gates::CircuitReport; diff --git a/noir/noir-repo/tooling/backend_interface/src/cli/proof_as_fields.rs b/noir/noir-repo/tooling/backend_interface/src/cli/proof_as_fields.rs deleted file mode 100644 index 7eb1c1ef35c4..000000000000 --- a/noir/noir-repo/tooling/backend_interface/src/cli/proof_as_fields.rs +++ /dev/null @@ -1,38 +0,0 @@ -use std::path::{Path, PathBuf}; - -use acvm::FieldElement; - -use crate::BackendError; - -use super::string_from_stderr; - -/// `ProofAsFieldsCommand` will call the barretenberg binary -/// to split a proof into a representation as [`FieldElement`]s. -pub(crate) struct ProofAsFieldsCommand { - pub(crate) proof_path: PathBuf, - pub(crate) vk_path: PathBuf, -} - -impl ProofAsFieldsCommand { - pub(crate) fn run(self, binary_path: &Path) -> Result, BackendError> { - let mut command = std::process::Command::new(binary_path); - - command - .arg("proof_as_fields") - .arg("-p") - .arg(self.proof_path) - .arg("-k") - .arg(self.vk_path) - .arg("-o") - .arg("-"); - - let output = command.output()?; - if output.status.success() { - let string_output = String::from_utf8(output.stdout).unwrap(); - serde_json::from_str(&string_output) - .map_err(|err| BackendError::CommandFailed(err.to_string())) - } else { - Err(BackendError::CommandFailed(string_from_stderr(&output.stderr))) - } - } -} diff --git a/noir/noir-repo/tooling/backend_interface/src/cli/prove.rs b/noir/noir-repo/tooling/backend_interface/src/cli/prove.rs deleted file mode 100644 index 30a27048b480..000000000000 --- a/noir/noir-repo/tooling/backend_interface/src/cli/prove.rs +++ /dev/null @@ -1,66 +0,0 @@ -use std::path::{Path, PathBuf}; - -use crate::BackendError; - -use super::string_from_stderr; - -/// ProveCommand will call the barretenberg binary -/// to create a proof, given the witness and the bytecode. -/// -/// Note:Internally barretenberg will create and discard the -/// proving key, so this is not returned. -/// -/// The proof will be written to the specified output file. -pub(crate) struct ProveCommand { - pub(crate) crs_path: PathBuf, - pub(crate) artifact_path: PathBuf, - pub(crate) witness_path: PathBuf, -} - -impl ProveCommand { - pub(crate) fn run(self, binary_path: &Path) -> Result, BackendError> { - let mut command = std::process::Command::new(binary_path); - - command - .arg("prove") - .arg("-c") - .arg(self.crs_path) - .arg("-b") - .arg(self.artifact_path) - .arg("-w") - .arg(self.witness_path) - .arg("-o") - .arg("-"); - - let output = command.output()?; - if output.status.success() { - Ok(output.stdout) - } else { - Err(BackendError::CommandFailed(string_from_stderr(&output.stderr))) - } - } -} - -#[test] -fn prove_command() -> Result<(), BackendError> { - use tempfile::tempdir; - - let backend = crate::get_mock_backend()?; - - let temp_directory = tempdir().expect("could not create a temporary directory"); - let temp_directory_path = temp_directory.path(); - let artifact_path = temp_directory_path.join("acir.gz"); - let witness_path = temp_directory_path.join("witness.tr"); - - std::fs::File::create(&artifact_path).expect("file should be created"); - std::fs::File::create(&witness_path).expect("file should be created"); - - let crs_path = backend.backend_directory(); - let prove_command = ProveCommand { crs_path, artifact_path, witness_path }; - - let proof = prove_command.run(backend.binary_path())?; - assert_eq!(proof, "proof".as_bytes()); - drop(temp_directory); - - Ok(()) -} diff --git a/noir/noir-repo/tooling/backend_interface/src/cli/verify.rs b/noir/noir-repo/tooling/backend_interface/src/cli/verify.rs deleted file mode 100644 index beea4bbec7d0..000000000000 --- a/noir/noir-repo/tooling/backend_interface/src/cli/verify.rs +++ /dev/null @@ -1,74 +0,0 @@ -use std::path::{Path, PathBuf}; - -use crate::BackendError; - -/// VerifyCommand will call the barretenberg binary -/// to verify a proof -pub(crate) struct VerifyCommand { - pub(crate) crs_path: PathBuf, - pub(crate) proof_path: PathBuf, - pub(crate) vk_path: PathBuf, -} - -impl VerifyCommand { - pub(crate) fn run(self, binary_path: &Path) -> Result { - let mut command = std::process::Command::new(binary_path); - - command - .arg("verify") - .arg("-c") - .arg(self.crs_path) - .arg("-p") - .arg(self.proof_path) - .arg("-k") - .arg(self.vk_path); - - let output = command.output()?; - - // We currently do not distinguish between an invalid proof and an error inside the backend. - Ok(output.status.success()) - } -} - -#[test] -fn verify_command() -> Result<(), BackendError> { - use tempfile::tempdir; - - use super::{ProveCommand, WriteVkCommand}; - use crate::proof_system::write_to_file; - - let backend = crate::get_mock_backend()?; - - let temp_directory = tempdir().expect("could not create a temporary directory"); - let temp_directory_path = temp_directory.path(); - let artifact_path = temp_directory_path.join("acir.json"); - let witness_path = temp_directory_path.join("witness.tr"); - let proof_path = temp_directory_path.join("1_mul.proof"); - let vk_path_output = temp_directory_path.join("vk"); - - let crs_path = backend.backend_directory(); - - std::fs::File::create(&artifact_path).expect("file should be created"); - std::fs::File::create(&witness_path).expect("file should be created"); - - let write_vk_command = WriteVkCommand { - artifact_path: artifact_path.clone(), - crs_path: crs_path.clone(), - vk_path_output: vk_path_output.clone(), - }; - - write_vk_command.run(backend.binary_path())?; - - let prove_command = ProveCommand { crs_path: crs_path.clone(), artifact_path, witness_path }; - let proof = prove_command.run(backend.binary_path())?; - - write_to_file(&proof, &proof_path); - - let verify_command = VerifyCommand { crs_path, proof_path, vk_path: vk_path_output }; - - let verified = verify_command.run(backend.binary_path())?; - assert!(verified); - - drop(temp_directory); - Ok(()) -} diff --git a/noir/noir-repo/tooling/backend_interface/src/cli/vk_as_fields.rs b/noir/noir-repo/tooling/backend_interface/src/cli/vk_as_fields.rs deleted file mode 100644 index 1b0212241c41..000000000000 --- a/noir/noir-repo/tooling/backend_interface/src/cli/vk_as_fields.rs +++ /dev/null @@ -1,39 +0,0 @@ -use std::path::{Path, PathBuf}; - -use acvm::FieldElement; - -use crate::BackendError; - -use super::string_from_stderr; - -/// VkAsFieldsCommand will call the barretenberg binary -/// to split a verification key into a representation as [`FieldElement`]s. -/// -/// The hash of the verification key will also be returned. -pub(crate) struct VkAsFieldsCommand { - pub(crate) vk_path: PathBuf, -} - -impl VkAsFieldsCommand { - pub(crate) fn run( - self, - binary_path: &Path, - ) -> Result<(FieldElement, Vec), BackendError> { - let mut command = std::process::Command::new(binary_path); - - command.arg("vk_as_fields").arg("-k").arg(self.vk_path).arg("-o").arg("-"); - - let output = command.output()?; - if output.status.success() { - let string_output = String::from_utf8(output.stdout).unwrap(); - let mut fields: Vec = serde_json::from_str(&string_output) - .map_err(|err| BackendError::CommandFailed(err.to_string()))?; - - // The first element of this vector is the hash of the verification key, we want to split that off. - let hash = fields.remove(0); - Ok((hash, fields)) - } else { - Err(BackendError::CommandFailed(string_from_stderr(&output.stderr))) - } - } -} diff --git a/noir/noir-repo/tooling/backend_interface/src/proof_system.rs b/noir/noir-repo/tooling/backend_interface/src/proof_system.rs index 49fd57c968f0..4c6090f19a44 100644 --- a/noir/noir-repo/tooling/backend_interface/src/proof_system.rs +++ b/noir/noir-repo/tooling/backend_interface/src/proof_system.rs @@ -1,16 +1,6 @@ -use std::io::Write; -use std::path::Path; -use std::{fs::File, path::PathBuf}; +use std::path::PathBuf; -use acvm::acir::native_types::{WitnessMap, WitnessStack}; -use acvm::FieldElement; -use tempfile::tempdir; -use tracing::warn; - -use crate::cli::{ - CircuitReport, GatesCommand, ProofAsFieldsCommand, ProveCommand, VerifyCommand, - VkAsFieldsCommand, WriteVkCommand, -}; +use crate::cli::{CircuitReport, GatesCommand}; use crate::{Backend, BackendError}; impl Backend { @@ -23,122 +13,4 @@ impl Backend { GatesCommand { crs_path: self.crs_directory(), artifact_path }.run(binary_path) } - - #[tracing::instrument(level = "trace", skip_all)] - pub fn prove( - &self, - artifact_path: PathBuf, - witness_stack: WitnessStack, - num_public_inputs: u32, - ) -> Result, BackendError> { - let binary_path = self.assert_binary_exists()?; - self.assert_correct_version()?; - - let temp_directory = tempdir().expect("could not create a temporary directory"); - let temp_directory = temp_directory.path().to_path_buf(); - - // Create a temporary file for the witness - let serialized_witnesses: Vec = - witness_stack.try_into().expect("could not serialize witness map"); - let witness_path = temp_directory.join("witness").with_extension("tr"); - write_to_file(&serialized_witnesses, &witness_path); - - // Create proof and store it in the specified path - let proof_with_public_inputs = - ProveCommand { crs_path: self.crs_directory(), artifact_path, witness_path } - .run(binary_path)?; - - let proof = bb_abstraction_leaks::remove_public_inputs( - // TODO(https://github.com/noir-lang/noir/issues/4428) - num_public_inputs as usize, - &proof_with_public_inputs, - ); - Ok(proof) - } - - #[tracing::instrument(level = "trace", skip_all)] - pub fn verify( - &self, - proof: &[u8], - public_inputs: WitnessMap, - artifact_path: PathBuf, - ) -> Result { - let binary_path = self.assert_binary_exists()?; - self.assert_correct_version()?; - - let temp_directory = tempdir().expect("could not create a temporary directory"); - let temp_directory = temp_directory.path().to_path_buf(); - - // Create a temporary file for the proof - let proof_with_public_inputs = - bb_abstraction_leaks::prepend_public_inputs(proof.to_vec(), public_inputs); - let proof_path = temp_directory.join("proof").with_extension("proof"); - write_to_file(&proof_with_public_inputs, &proof_path); - - // Create the verification key and write it to the specified path - let vk_path = temp_directory.join("vk"); - - WriteVkCommand { - crs_path: self.crs_directory(), - artifact_path, - vk_path_output: vk_path.clone(), - } - .run(binary_path)?; - - // Verify the proof - VerifyCommand { crs_path: self.crs_directory(), proof_path, vk_path }.run(binary_path) - } - - pub fn get_intermediate_proof_artifacts( - &self, - artifact_path: PathBuf, - proof: &[u8], - public_inputs: WitnessMap, - ) -> Result<(Vec, FieldElement, Vec), BackendError> { - let binary_path = self.assert_binary_exists()?; - self.assert_correct_version()?; - - let temp_directory = tempdir().expect("could not create a temporary directory"); - let temp_directory = temp_directory.path().to_path_buf(); - - // Create the verification key and write it to the specified path - let vk_path = temp_directory.join("vk"); - - WriteVkCommand { - crs_path: self.crs_directory(), - artifact_path, - vk_path_output: vk_path.clone(), - } - .run(binary_path)?; - - // Create a temporary file for the proof - - let proof_with_public_inputs = - bb_abstraction_leaks::prepend_public_inputs(proof.to_vec(), public_inputs); - let proof_path = temp_directory.join("proof").with_extension("proof"); - write_to_file(&proof_with_public_inputs, &proof_path); - - // Now ready to generate intermediate artifacts. - - let proof_as_fields = - ProofAsFieldsCommand { proof_path, vk_path: vk_path.clone() }.run(binary_path)?; - - let (vk_hash, vk_as_fields) = VkAsFieldsCommand { vk_path }.run(binary_path)?; - - Ok((proof_as_fields, vk_hash, vk_as_fields)) - } -} - -pub(super) fn write_to_file(bytes: &[u8], path: &Path) -> String { - let display = path.display(); - - let mut file = match File::create(path) { - Err(why) => panic!("couldn't create {display}: {why}"), - Ok(file) => file, - }; - - match file.write_all(bytes) { - Err(why) => panic!("couldn't write to {display}: {why}"), - Ok(_) => display.to_string(), - } } diff --git a/noir/noir-repo/tooling/nargo/src/constants.rs b/noir/noir-repo/tooling/nargo/src/constants.rs index 0b50d61fe376..1048d86fcd7f 100644 --- a/noir/noir-repo/tooling/nargo/src/constants.rs +++ b/noir/noir-repo/tooling/nargo/src/constants.rs @@ -13,8 +13,6 @@ pub const EXPORT_DIR: &str = "export"; // Files /// The file from which Nargo pulls prover inputs pub const PROVER_INPUT_FILE: &str = "Prover"; -/// The file from which Nargo pulls verifier inputs -pub const VERIFIER_INPUT_FILE: &str = "Verifier"; /// The package definition file for a Noir project. pub const PKG_FILE: &str = "Nargo.toml"; diff --git a/noir/noir-repo/tooling/nargo/src/package.rs b/noir/noir-repo/tooling/nargo/src/package.rs index ecbf35852107..44f0a3504f7d 100644 --- a/noir/noir-repo/tooling/nargo/src/package.rs +++ b/noir/noir-repo/tooling/nargo/src/package.rs @@ -2,7 +2,7 @@ use std::{collections::BTreeMap, fmt::Display, path::PathBuf}; use noirc_frontend::graph::CrateName; -use crate::constants::{PROVER_INPUT_FILE, VERIFIER_INPUT_FILE}; +use crate::constants::PROVER_INPUT_FILE; #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum PackageType { @@ -59,11 +59,6 @@ impl Package { // For now it is hard-coded to be toml. self.root_dir.join(format!("{PROVER_INPUT_FILE}.toml")) } - pub fn verifier_input_path(&self) -> PathBuf { - // TODO: This should be configurable, such as if we are looking for .json or .toml or custom paths - // For now it is hard-coded to be toml. - self.root_dir.join(format!("{VERIFIER_INPUT_FILE}.toml")) - } pub fn is_binary(&self) -> bool { self.package_type == PackageType::Binary diff --git a/noir/noir-repo/tooling/nargo_cli/Cargo.toml b/noir/noir-repo/tooling/nargo_cli/Cargo.toml index c20be037e621..b034bf371510 100644 --- a/noir/noir-repo/tooling/nargo_cli/Cargo.toml +++ b/noir/noir-repo/tooling/nargo_cli/Cargo.toml @@ -43,7 +43,6 @@ thiserror.workspace = true tower.workspace = true async-lsp = { workspace = true, features = ["client-monitor", "stdio", "tracing", "tokio"] } const_format.workspace = true -hex.workspace = true similar-asserts.workspace = true termcolor = "1.1.2" color-eyre = "0.6.2" diff --git a/noir/noir-repo/tooling/nargo_cli/src/backends.rs b/noir/noir-repo/tooling/nargo_cli/src/backends.rs deleted file mode 100644 index 2b3e9d8861f4..000000000000 --- a/noir/noir-repo/tooling/nargo_cli/src/backends.rs +++ /dev/null @@ -1,39 +0,0 @@ -use std::path::PathBuf; - -use backend_interface::backends_directory; -pub(crate) use backend_interface::Backend; - -fn active_backend_file_path() -> PathBuf { - backends_directory().join(".selected_backend") -} - -pub(crate) use backend_interface::ACVM_BACKEND_BARRETENBERG; - -pub(crate) fn clear_active_backend() { - let active_backend_file = active_backend_file_path(); - if active_backend_file.is_file() { - std::fs::remove_file(active_backend_file_path()) - .expect("should delete active backend file"); - } -} - -pub(crate) fn set_active_backend(backend_name: &str) { - let active_backend_file = active_backend_file_path(); - let backends_directory = - active_backend_file.parent().expect("active backend file should have parent"); - - std::fs::create_dir_all(backends_directory).expect("Could not create backends directory"); - std::fs::write(active_backend_file, backend_name.as_bytes()) - .expect("Could not write to active backend file"); -} - -pub(crate) fn get_active_backend() -> String { - let active_backend_file = active_backend_file_path(); - - if !active_backend_file.is_file() { - set_active_backend(ACVM_BACKEND_BARRETENBERG); - return ACVM_BACKEND_BARRETENBERG.to_string(); - } - - std::fs::read_to_string(active_backend_file).expect("Could not read active backend file") -} diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/backend_cmd/current_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/backend_cmd/current_cmd.rs deleted file mode 100644 index 5aba00764d31..000000000000 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/backend_cmd/current_cmd.rs +++ /dev/null @@ -1,13 +0,0 @@ -use clap::Args; - -use crate::{backends::get_active_backend, errors::CliError}; - -/// Prints the name of the currently active backend -#[derive(Debug, Clone, Args)] -pub(crate) struct CurrentCommand; - -pub(crate) fn run(_args: CurrentCommand) -> Result<(), CliError> { - println!("{}", get_active_backend()); - - Ok(()) -} diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/backend_cmd/install_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/backend_cmd/install_cmd.rs deleted file mode 100644 index 974db9ff7f5a..000000000000 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/backend_cmd/install_cmd.rs +++ /dev/null @@ -1,30 +0,0 @@ -use clap::Args; - -use backend_interface::{backends_directory, download_backend}; - -use crate::errors::{BackendError, CliError}; - -use super::ls_cmd::get_available_backends; - -/// Install a new backend from a URL. -#[derive(Debug, Clone, Args)] -pub(crate) struct InstallCommand { - /// The name of the backend to install. - backend: String, - - /// The URL from which to download the backend. - url: String, -} - -pub(crate) fn run(args: InstallCommand) -> Result<(), CliError> { - let installed_backends = get_available_backends(); - - if installed_backends.contains(&args.backend) { - return Err(BackendError::AlreadyInstalled(args.backend).into()); - } - - download_backend(&args.url, &backends_directory().join(args.backend).join("backend_binary")) - .map_err(BackendError::from)?; - - Ok(()) -} diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/backend_cmd/ls_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/backend_cmd/ls_cmd.rs deleted file mode 100644 index da37b104d656..000000000000 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/backend_cmd/ls_cmd.rs +++ /dev/null @@ -1,34 +0,0 @@ -use backend_interface::backends_directory; -use clap::Args; - -use crate::errors::CliError; - -/// Prints the list of currently installed backends -#[derive(Debug, Clone, Args)] -pub(crate) struct LsCommand; - -pub(crate) fn run(_args: LsCommand) -> Result<(), CliError> { - for backend in get_available_backends() { - println!("{backend}"); - } - - Ok(()) -} - -pub(super) fn get_available_backends() -> Vec { - let backend_directory_contents = std::fs::read_dir(backends_directory()) - .expect("Could not read backends directory contents"); - - // TODO: Highlight the currently active backend. - backend_directory_contents - .into_iter() - .filter_map(|entry| { - let path = entry.ok()?.path(); - if path.is_dir() { - path.file_name().map(|name| name.to_string_lossy().to_string()) - } else { - None - } - }) - .collect() -} diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/backend_cmd/mod.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/backend_cmd/mod.rs deleted file mode 100644 index 985dbbdb934e..000000000000 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/backend_cmd/mod.rs +++ /dev/null @@ -1,41 +0,0 @@ -use clap::{Args, Subcommand}; - -use crate::errors::CliError; - -mod current_cmd; -mod install_cmd; -mod ls_cmd; -mod uninstall_cmd; -mod use_cmd; - -#[non_exhaustive] -#[derive(Args, Clone, Debug)] -/// Install and select custom backends used to generate and verify proofs. -pub(crate) struct BackendCommand { - #[command(subcommand)] - command: BackendCommands, -} - -#[non_exhaustive] -#[derive(Subcommand, Clone, Debug)] -pub(crate) enum BackendCommands { - Current(current_cmd::CurrentCommand), - Ls(ls_cmd::LsCommand), - Use(use_cmd::UseCommand), - Install(install_cmd::InstallCommand), - Uninstall(uninstall_cmd::UninstallCommand), -} - -pub(crate) fn run(cmd: BackendCommand) -> Result<(), CliError> { - let BackendCommand { command } = cmd; - - match command { - BackendCommands::Current(args) => current_cmd::run(args), - BackendCommands::Ls(args) => ls_cmd::run(args), - BackendCommands::Use(args) => use_cmd::run(args), - BackendCommands::Install(args) => install_cmd::run(args), - BackendCommands::Uninstall(args) => uninstall_cmd::run(args), - }?; - - Ok(()) -} diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/backend_cmd/uninstall_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/backend_cmd/uninstall_cmd.rs deleted file mode 100644 index 7497f1bc2f6b..000000000000 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/backend_cmd/uninstall_cmd.rs +++ /dev/null @@ -1,59 +0,0 @@ -use clap::Args; - -use backend_interface::backends_directory; - -use crate::{ - backends::{ - clear_active_backend, get_active_backend, set_active_backend, ACVM_BACKEND_BARRETENBERG, - }, - errors::{BackendError, CliError}, -}; - -use super::ls_cmd::get_available_backends; - -/// Uninstalls a backend -#[derive(Debug, Clone, Args)] -pub(crate) struct UninstallCommand { - /// The name of the backend to uninstall. - backend: String, -} - -pub(crate) fn run(args: UninstallCommand) -> Result<(), CliError> { - let installed_backends = get_available_backends(); - - if !installed_backends.contains(&args.backend) { - return Err(BackendError::UnknownBackend(args.backend).into()); - } - - let active_backend = get_active_backend(); - - // Handle the case where we're uninstalling the currently active backend. - if active_backend == args.backend { - let barretenberg_is_installed = - installed_backends.iter().any(|backend_name| backend_name == ACVM_BACKEND_BARRETENBERG); - - let new_active_backend = - if args.backend != ACVM_BACKEND_BARRETENBERG && barretenberg_is_installed { - // Prefer switching to barretenberg if possible. - Some(ACVM_BACKEND_BARRETENBERG) - } else { - // Otherwise pick the first backend which isn't being uninstalled. - installed_backends - .iter() - .find(|&backend_name| backend_name != &args.backend) - .map(|name| name.as_str()) - }; - - if let Some(backend) = new_active_backend { - set_active_backend(backend); - } else { - // We've deleted the last backend. Clear the active backend file to be recreated once we install a new one. - clear_active_backend(); - } - } - - std::fs::remove_dir_all(backends_directory().join(args.backend)) - .expect("backend directory should be deleted"); - - Ok(()) -} diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/backend_cmd/use_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/backend_cmd/use_cmd.rs deleted file mode 100644 index 66a129c21489..000000000000 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/backend_cmd/use_cmd.rs +++ /dev/null @@ -1,26 +0,0 @@ -use clap::Args; - -use crate::{ - backends::set_active_backend, - errors::{BackendError, CliError}, -}; - -use super::ls_cmd::get_available_backends; - -/// Select the backend to use -#[derive(Debug, Clone, Args)] -pub(crate) struct UseCommand { - backend: String, -} - -pub(crate) fn run(args: UseCommand) -> Result<(), CliError> { - let backends = get_available_backends(); - - if !backends.contains(&args.backend) { - return Err(BackendError::UnknownBackend(args.backend).into()); - } - - set_active_backend(&args.backend); - - Ok(()) -} diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/check_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/check_cmd.rs index d5313d960760..e2e1f147b902 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/check_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/check_cmd.rs @@ -95,13 +95,11 @@ fn check_package( Ok(false) } else { // XXX: We can have a --overwrite flag to determine if you want to overwrite the Prover/Verifier.toml files - if let Some((parameters, return_type)) = compute_function_abi(&context, &crate_id) { + if let Some((parameters, _)) = compute_function_abi(&context, &crate_id) { let path_to_prover_input = package.prover_input_path(); - let path_to_verifier_input = package.verifier_input_path(); // Before writing the file, check if it exists and whether overwrite is set let should_write_prover = !path_to_prover_input.exists() || allow_overwrite; - let should_write_verifier = !path_to_verifier_input.exists() || allow_overwrite; if should_write_prover { let prover_toml = create_input_toml_template(parameters.clone(), None); @@ -110,19 +108,7 @@ fn check_package( eprintln!("Note: Prover.toml already exists. Use --overwrite to force overwrite."); } - if should_write_verifier { - let public_inputs = - parameters.into_iter().filter(|param| param.is_public()).collect(); - - let verifier_toml = create_input_toml_template(public_inputs, return_type); - write_to_file(verifier_toml.as_bytes(), &path_to_verifier_input); - } else { - eprintln!( - "Note: Verifier.toml already exists. Use --overwrite to force overwrite." - ); - } - - let any_file_written = should_write_prover || should_write_verifier; + let any_file_written = should_write_prover; Ok(any_file_written) } else { diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/fs/inputs.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/fs/inputs.rs index bd038c51ad5a..dee9a00507c7 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/fs/inputs.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/fs/inputs.rs @@ -6,8 +6,6 @@ use std::{collections::BTreeMap, path::Path}; use crate::errors::FilesystemError; -use super::write_to_file; - /// Returns the circuit's parameters and its return value, if one exists. /// # Examples /// @@ -36,99 +34,3 @@ pub(crate) fn read_inputs_from_file>( Ok((input_map, return_value)) } - -pub(crate) fn write_inputs_to_file>( - input_map: &InputMap, - return_value: &Option, - abi: &Abi, - path: P, - file_name: &str, - format: Format, -) -> Result<(), FilesystemError> { - let file_path = path.as_ref().join(file_name).with_extension(format.ext()); - - // We must insert the return value into the `InputMap` in order for it to be written to file. - let serialized_output = match return_value { - // Parameters and return values are kept separate except for when they're being written to file. - // As a result, we don't want to modify the original map and must clone it before insertion. - Some(return_value) => { - let mut input_map = input_map.clone(); - input_map.insert(MAIN_RETURN_NAME.to_owned(), return_value.clone()); - format.serialize(&input_map, abi)? - } - // If no return value exists, then we can serialize the original map directly. - None => format.serialize(input_map, abi)?, - }; - - write_to_file(serialized_output.as_bytes(), &file_path); - - Ok(()) -} - -#[cfg(test)] -mod tests { - use std::{collections::BTreeMap, vec}; - - use acvm::FieldElement; - use nargo::constants::VERIFIER_INPUT_FILE; - use noirc_abi::{ - input_parser::{Format, InputValue}, - Abi, AbiParameter, AbiReturnType, AbiType, AbiVisibility, - }; - use tempfile::TempDir; - - use super::{read_inputs_from_file, write_inputs_to_file}; - - #[test] - fn write_and_read_recovers_inputs_and_return_value() { - let input_dir = TempDir::new().unwrap().into_path(); - - // We purposefully test a simple ABI here as we're focussing on `fs`. - // Tests for serializing complex types should exist in `noirc_abi`. - let abi = Abi { - parameters: vec![ - AbiParameter { - name: "foo".into(), - typ: AbiType::Field, - visibility: AbiVisibility::Public, - }, - AbiParameter { - name: "bar".into(), - typ: AbiType::String { length: 11 }, - visibility: AbiVisibility::Private, - }, - ], - return_type: Some(AbiReturnType { - abi_type: AbiType::Field, - visibility: AbiVisibility::Public, - }), - - // Input serialization is only dependent on types, not position in witness map. - // Neither of these should be relevant so we leave them empty. - param_witnesses: BTreeMap::new(), - return_witnesses: Vec::new(), - error_types: BTreeMap::new(), - }; - let input_map = BTreeMap::from([ - ("foo".to_owned(), InputValue::Field(42u128.into())), - ("bar".to_owned(), InputValue::String("hello world".to_owned())), - ]); - let return_value = Some(InputValue::Field(FieldElement::zero())); - - write_inputs_to_file( - &input_map, - &return_value, - &abi, - &input_dir, - VERIFIER_INPUT_FILE, - Format::Toml, - ) - .unwrap(); - - let (loaded_inputs, loaded_return_value) = - read_inputs_from_file(input_dir, VERIFIER_INPUT_FILE, Format::Toml, &abi).unwrap(); - - assert_eq!(loaded_inputs, input_map); - assert_eq!(loaded_return_value, return_value); - } -} diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/fs/mod.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/fs/mod.rs index 4ebce3b3325b..8658bd5b2482 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/fs/mod.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/fs/mod.rs @@ -4,11 +4,8 @@ use std::{ path::{Path, PathBuf}, }; -use crate::errors::FilesystemError; - pub(super) mod inputs; pub(super) mod program; -pub(super) mod proof; pub(super) mod witness; pub(super) fn create_named_dir(named_dir: &Path, name: &str) -> PathBuf { @@ -31,12 +28,3 @@ pub(super) fn write_to_file(bytes: &[u8], path: &Path) -> String { Ok(_) => display.to_string(), } } - -pub(super) fn load_hex_data>(path: P) -> Result, FilesystemError> { - let hex_data: Vec<_> = std::fs::read(&path) - .map_err(|_| FilesystemError::PathNotValid(path.as_ref().to_path_buf()))?; - - let raw_bytes = hex::decode(hex_data).map_err(FilesystemError::HexArtifactNotValid)?; - - Ok(raw_bytes) -} diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/fs/proof.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/fs/proof.rs deleted file mode 100644 index d2b3050708bc..000000000000 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/fs/proof.rs +++ /dev/null @@ -1,20 +0,0 @@ -use std::path::{Path, PathBuf}; - -use nargo::constants::PROOF_EXT; - -use crate::errors::FilesystemError; - -use super::{create_named_dir, write_to_file}; - -pub(crate) fn save_proof_to_dir>( - proof: &[u8], - proof_name: &str, - proof_dir: P, -) -> Result { - create_named_dir(proof_dir.as_ref(), "proof"); - let proof_path = proof_dir.as_ref().join(proof_name).with_extension(PROOF_EXT); - - write_to_file(hex::encode(proof).as_bytes(), &proof_path); - - Ok(proof_path) -} diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs index d68aef497f67..daa80805acbf 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs @@ -1,4 +1,4 @@ -use std::{collections::HashMap, path::PathBuf}; +use std::collections::HashMap; use acvm::acir::circuit::ExpressionWidth; use backend_interface::BackendError; @@ -16,7 +16,6 @@ use prettytable::{row, table, Row}; use rayon::prelude::*; use serde::Serialize; -use crate::backends::Backend; use crate::errors::CliError; use super::{ @@ -50,11 +49,7 @@ pub(crate) struct InfoCommand { compile_options: CompileOptions, } -pub(crate) fn run( - backend: &Backend, - args: InfoCommand, - config: NargoConfig, -) -> Result<(), CliError> { +pub(crate) fn run(args: InfoCommand, config: NargoConfig) -> Result<(), CliError> { let toml_path = get_package_manifest(&config.program_dir)?; let default_selection = if args.workspace { PackageSelection::All } else { PackageSelection::DefaultOrAll }; @@ -93,8 +88,6 @@ pub(crate) fn run( .par_bridge() .map(|(package, program)| { count_opcodes_and_gates_in_program( - backend, - workspace.package_build_path(&package), program, &package, args.compile_options.expression_width, @@ -196,7 +189,6 @@ impl From for Vec { Fc->format!("{}", function.name), format!("{:?}", program_info.expression_width), Fc->format!("{}", function.acir_opcodes), - Fc->format!("{}", function.circuit_size), ] }) } @@ -215,7 +207,6 @@ struct ContractInfo { struct FunctionInfo { name: String, acir_opcodes: usize, - circuit_size: u32, } impl From for Vec { @@ -226,20 +217,16 @@ impl From for Vec { Fc->format!("{}", function.name), format!("{:?}", contract_info.expression_width), Fc->format!("{}", function.acir_opcodes), - Fc->format!("{}", function.circuit_size), ] }) } } fn count_opcodes_and_gates_in_program( - backend: &Backend, - program_artifact_path: PathBuf, compiled_program: ProgramArtifact, package: &Package, expression_width: ExpressionWidth, ) -> Result { - let program_circuit_sizes = backend.get_exact_circuit_sizes(program_artifact_path)?; let functions = compiled_program .bytecode .functions @@ -248,9 +235,7 @@ fn count_opcodes_and_gates_in_program( .map(|(i, function)| -> Result<_, BackendError> { Ok(FunctionInfo { name: compiled_program.names[i].clone(), - // Required while mock backend doesn't return correct circuit size. acir_opcodes: function.opcodes.len(), - circuit_size: program_circuit_sizes[i].circuit_size, }) }) .collect::>()?; diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/mod.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/mod.rs index b19bec2b83cb..485ccc7abafb 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/mod.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/mod.rs @@ -6,11 +6,8 @@ use std::path::PathBuf; use color_eyre::eyre; -use crate::backends::get_active_backend; - mod fs; -mod backend_cmd; mod check_cmd; mod compile_cmd; mod dap_cmd; @@ -22,9 +19,7 @@ mod info_cmd; mod init_cmd; mod lsp_cmd; mod new_cmd; -mod prove_cmd; mod test_cmd; -mod verify_cmd; const GIT_HASH: &str = env!("GIT_COMMIT"); const IS_DIRTY: &str = env!("GIT_DIRTY"); @@ -59,7 +54,6 @@ pub(crate) struct NargoConfig { #[non_exhaustive] #[derive(Subcommand, Clone, Debug)] enum NargoCommand { - Backend(backend_cmd::BackendCommand), Check(check_cmd::CheckCommand), Fmt(fmt_cmd::FormatCommand), #[command(alias = "build")] @@ -71,8 +65,6 @@ enum NargoCommand { Export(export_cmd::ExportCommand), #[command(hide = true)] // Hidden while the feature is being built out Debug(debug_cmd::DebugCommand), - Prove(prove_cmd::ProveCommand), - Verify(verify_cmd::VerifyCommand), Test(test_cmd::TestCommand), Info(info_cmd::InfoCommand), Lsp(lsp_cmd::LspCommand), @@ -92,18 +84,11 @@ pub(crate) fn start_cli() -> eyre::Result<()> { // Search through parent directories to find package root if necessary. if !matches!( command, - NargoCommand::New(_) - | NargoCommand::Init(_) - | NargoCommand::Lsp(_) - | NargoCommand::Backend(_) - | NargoCommand::Dap(_) + NargoCommand::New(_) | NargoCommand::Init(_) | NargoCommand::Lsp(_) | NargoCommand::Dap(_) ) { config.program_dir = find_package_root(&config.program_dir)?; } - let active_backend = get_active_backend(); - let backend = crate::backends::Backend::new(active_backend); - match command { NargoCommand::New(args) => new_cmd::run(args, config), NargoCommand::Init(args) => init_cmd::run(args, config), @@ -112,11 +97,8 @@ pub(crate) fn start_cli() -> eyre::Result<()> { NargoCommand::Debug(args) => debug_cmd::run(args, config), NargoCommand::Execute(args) => execute_cmd::run(args, config), NargoCommand::Export(args) => export_cmd::run(args, config), - NargoCommand::Prove(args) => prove_cmd::run(&backend, args, config), - NargoCommand::Verify(args) => verify_cmd::run(&backend, args, config), NargoCommand::Test(args) => test_cmd::run(args, config), - NargoCommand::Info(args) => info_cmd::run(&backend, args, config), - NargoCommand::Backend(args) => backend_cmd::run(args), + NargoCommand::Info(args) => info_cmd::run(args, config), NargoCommand::Lsp(args) => lsp_cmd::run(args, config), NargoCommand::Dap(args) => dap_cmd::run(args, config), NargoCommand::Fmt(args) => fmt_cmd::run(args, config), diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/prove_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/prove_cmd.rs deleted file mode 100644 index 127c5ac2ebbe..000000000000 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/prove_cmd.rs +++ /dev/null @@ -1,141 +0,0 @@ -use std::path::PathBuf; - -use clap::Args; -use nargo::constants::{PROVER_INPUT_FILE, VERIFIER_INPUT_FILE}; -use nargo::package::Package; -use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_abi::input_parser::Format; -use noirc_driver::{CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING}; -use noirc_frontend::graph::CrateName; - -use super::compile_cmd::compile_workspace_full; -use super::fs::program::read_program_from_file; -use super::fs::{ - inputs::{read_inputs_from_file, write_inputs_to_file}, - proof::save_proof_to_dir, -}; -use super::NargoConfig; -use crate::{backends::Backend, cli::execute_cmd::execute_program, errors::CliError}; - -/// Create proof for this program. The proof is returned as a hex encoded string. -#[derive(Debug, Clone, Args)] -#[clap(visible_alias = "p")] -pub(crate) struct ProveCommand { - /// The name of the toml file which contains the inputs for the prover - #[clap(long, short, default_value = PROVER_INPUT_FILE)] - prover_name: String, - - /// The name of the toml file which contains the inputs for the verifier - #[clap(long, short, default_value = VERIFIER_INPUT_FILE)] - verifier_name: String, - - /// Verify proof after proving - #[arg(long)] - verify: bool, - - /// The name of the package to prove - #[clap(long, conflicts_with = "workspace")] - package: Option, - - /// Prove all packages in the workspace - #[clap(long, conflicts_with = "package")] - workspace: bool, - - #[clap(flatten)] - compile_options: CompileOptions, - - /// JSON RPC url to solve oracle calls - #[clap(long)] - oracle_resolver: Option, -} - -pub(crate) fn run( - backend: &Backend, - args: ProveCommand, - config: NargoConfig, -) -> Result<(), CliError> { - let toml_path = get_package_manifest(&config.program_dir)?; - let default_selection = - if args.workspace { PackageSelection::All } else { PackageSelection::DefaultOrAll }; - let selection = args.package.map_or(default_selection, PackageSelection::Selected); - let workspace = resolve_workspace_from_toml( - &toml_path, - selection, - Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), - )?; - - // Compile the full workspace in order to generate any build artifacts. - compile_workspace_full(&workspace, &args.compile_options)?; - - let binary_packages = workspace.into_iter().filter(|package| package.is_binary()); - for package in binary_packages { - let program_artifact_path = workspace.package_build_path(package); - let program: CompiledProgram = read_program_from_file(&program_artifact_path)?.into(); - - let proof = prove_package( - backend, - package, - program, - program_artifact_path, - &args.prover_name, - &args.verifier_name, - args.verify, - args.oracle_resolver.as_deref(), - )?; - - save_proof_to_dir(&proof, &String::from(&package.name), workspace.proofs_directory_path())?; - } - - Ok(()) -} - -#[allow(clippy::too_many_arguments)] -fn prove_package( - backend: &Backend, - package: &Package, - compiled_program: CompiledProgram, - program_artifact_path: PathBuf, - prover_name: &str, - verifier_name: &str, - check_proof: bool, - foreign_call_resolver_url: Option<&str>, -) -> Result, CliError> { - // Parse the initial witness values from Prover.toml - let (inputs_map, _) = - read_inputs_from_file(&package.root_dir, prover_name, Format::Toml, &compiled_program.abi)?; - - let witness_stack = execute_program(&compiled_program, &inputs_map, foreign_call_resolver_url)?; - - // Write public inputs into Verifier.toml - let public_abi = compiled_program.abi.public_abi(); - // Get the entry point witness for the ABI - let main_witness = - &witness_stack.peek().expect("Should have at least one witness on the stack").witness; - let (public_inputs, return_value) = public_abi.decode(main_witness)?; - - write_inputs_to_file( - &public_inputs, - &return_value, - &public_abi, - &package.root_dir, - verifier_name, - Format::Toml, - )?; - - let proof = backend.prove( - program_artifact_path.clone(), - witness_stack, - compiled_program.program.functions[0].public_inputs().0.len() as u32, - )?; - - if check_proof { - let public_inputs = public_abi.encode(&public_inputs, return_value)?; - let valid_proof = backend.verify(&proof, public_inputs, program_artifact_path)?; - - if !valid_proof { - return Err(CliError::InvalidProof("".into())); - } - } - - Ok(proof) -} diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/verify_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/verify_cmd.rs deleted file mode 100644 index ad1978cabe00..000000000000 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/verify_cmd.rs +++ /dev/null @@ -1,92 +0,0 @@ -use super::compile_cmd::compile_workspace_full; -use super::fs::program::read_program_from_file; -use super::fs::{inputs::read_inputs_from_file, load_hex_data}; -use super::NargoConfig; -use crate::{backends::Backend, errors::CliError}; - -use clap::Args; -use nargo::constants::{PROOF_EXT, VERIFIER_INPUT_FILE}; -use nargo::package::Package; -use nargo::workspace::Workspace; -use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_abi::input_parser::Format; -use noirc_driver::{CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING}; -use noirc_frontend::graph::CrateName; - -/// Given a proof and a program, verify whether the proof is valid -#[derive(Debug, Clone, Args)] -#[clap(visible_alias = "v")] -pub(crate) struct VerifyCommand { - /// The name of the toml file which contains the inputs for the verifier - #[clap(long, short, default_value = VERIFIER_INPUT_FILE)] - verifier_name: String, - - /// The name of the package verify - #[clap(long, conflicts_with = "workspace")] - package: Option, - - /// Verify all packages in the workspace - #[clap(long, conflicts_with = "package")] - workspace: bool, - - #[clap(flatten)] - compile_options: CompileOptions, -} - -pub(crate) fn run( - backend: &Backend, - args: VerifyCommand, - config: NargoConfig, -) -> Result<(), CliError> { - let toml_path = get_package_manifest(&config.program_dir)?; - let default_selection = - if args.workspace { PackageSelection::All } else { PackageSelection::DefaultOrAll }; - let selection = args.package.map_or(default_selection, PackageSelection::Selected); - let workspace = resolve_workspace_from_toml( - &toml_path, - selection, - Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), - )?; - - // Compile the full workspace in order to generate any build artifacts. - compile_workspace_full(&workspace, &args.compile_options)?; - - let binary_packages = workspace.into_iter().filter(|package| package.is_binary()); - for package in binary_packages { - let program_artifact_path = workspace.package_build_path(package); - let program: CompiledProgram = read_program_from_file(program_artifact_path)?.into(); - - verify_package(backend, &workspace, package, program, &args.verifier_name)?; - } - - Ok(()) -} - -fn verify_package( - backend: &Backend, - workspace: &Workspace, - package: &Package, - compiled_program: CompiledProgram, - verifier_name: &str, -) -> Result<(), CliError> { - // Load public inputs (if any) from `verifier_name`. - let public_abi = compiled_program.abi.public_abi(); - let (public_inputs_map, return_value) = - read_inputs_from_file(&package.root_dir, verifier_name, Format::Toml, &public_abi)?; - - let public_inputs = public_abi.encode(&public_inputs_map, return_value)?; - - let proof_path = - workspace.proofs_directory_path().join(package.name.to_string()).with_extension(PROOF_EXT); - - let proof = load_hex_data(&proof_path)?; - - let valid_proof = - backend.verify(&proof, public_inputs, workspace.package_build_path(package))?; - - if valid_proof { - Ok(()) - } else { - Err(CliError::InvalidProof(proof_path)) - } -} diff --git a/noir/noir-repo/tooling/nargo_cli/src/errors.rs b/noir/noir-repo/tooling/nargo_cli/src/errors.rs index 40fb7886405a..4644e1be6fcb 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/errors.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/errors.rs @@ -1,5 +1,4 @@ use acvm::acir::native_types::WitnessStackError; -use hex::FromHexError; use nargo::{errors::CompileError, NargoError}; use nargo_toml::ManifestError; use noir_debugger::errors::DapError; @@ -11,8 +10,7 @@ use thiserror::Error; pub(crate) enum FilesystemError { #[error("Error: {} is not a valid path\nRun either `nargo compile` to generate missing build artifacts or `nargo prove` to construct a proof", .0.display())] PathNotValid(PathBuf), - #[error("Error: could not parse hex build artifact (proof, proving and/or verification keys, ACIR checksum) ({0})")] - HexArtifactNotValid(FromHexError), + #[error( " Error: cannot find {0}.toml file.\n Expected location: {1:?} \n Please generate this file at the expected location." )] @@ -37,9 +35,6 @@ pub(crate) enum CliError { #[error("Error: destination {} already exists", .0.display())] DestinationAlreadyExists(PathBuf), - #[error("Failed to verify proof {}", .0.display())] - InvalidProof(PathBuf), - #[error("Invalid package name {0}. Did you mean to use `--name`?")] InvalidPackageName(String), @@ -69,23 +64,7 @@ pub(crate) enum CliError { #[error(transparent)] CompileError(#[from] CompileError), - /// Error related to backend selection/installation. - #[error(transparent)] - BackendError(#[from] BackendError), - /// Error related to communication with backend. #[error(transparent)] BackendCommunicationError(#[from] backend_interface::BackendError), } - -#[derive(Debug, thiserror::Error)] -pub(crate) enum BackendError { - #[error("No backend is installed with the name {0}")] - UnknownBackend(String), - - #[error("The backend {0} is already installed")] - AlreadyInstalled(String), - - #[error("Backend installation failed: {0}")] - InstallationError(#[from] std::io::Error), -} diff --git a/noir/noir-repo/tooling/nargo_cli/src/main.rs b/noir/noir-repo/tooling/nargo_cli/src/main.rs index 6e2b7069bc46..a407d467ced4 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/main.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/main.rs @@ -7,7 +7,6 @@ //! This name was used because it sounds like `cargo` and //! Noir Package Manager abbreviated is npm, which is already taken. -mod backends; mod cli; mod errors;