Skip to content

Commit

Permalink
Update the circuits of cells tree, rows tree and block tree for gener…
Browse files Browse the repository at this point in the history
…ic extraction.
  • Loading branch information
silathdiir committed Oct 22, 2024
1 parent aea77c6 commit 7f9cb61
Show file tree
Hide file tree
Showing 23 changed files with 1,110 additions and 502 deletions.
19 changes: 10 additions & 9 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 3 additions & 0 deletions mp2-common/src/poseidon.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,9 @@ pub type H = <C as GenericConfig<D>>::Hasher;
pub type P = <H as AlgebraicHasher<GoldilocksField>>::AlgebraicPermutation;
pub type HashPermutation = <H as Hasher<F>>::Permutation;

/// The result of hash to integer has 4 Uint32 (128 bits).
pub const HASH_TO_INT_LEN: usize = 4;

/// The flattened length of Poseidon hash, each original field is splitted from an
/// Uint64 into two Uint32.
pub const FLATTEN_POSEIDON_LEN: usize = NUM_HASH_OUT_ELTS * 2;
Expand Down
7 changes: 7 additions & 0 deletions mp2-common/src/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ use plonky2::plonk::circuit_builder::CircuitBuilder;
use plonky2::plonk::circuit_data::VerifierCircuitData;
use plonky2::plonk::config::{GenericConfig, GenericHashOut, Hasher};
use plonky2_crypto::u32::arithmetic_u32::U32Target;
use plonky2_ecdsa::gadgets::biguint::BigUintTarget;

use plonky2_ecgfp5::gadgets::{base_field::QuinticExtensionTarget, curve::CurveTarget};
use sha3::Digest;
Expand Down Expand Up @@ -439,6 +440,12 @@ impl ToTargets for &[Target] {
}
}

impl ToTargets for BigUintTarget {
fn to_targets(&self) -> Vec<Target> {
self.limbs.iter().map(|u| u.0).collect()
}
}

pub trait TargetsConnector {
fn connect_targets<T: ToTargets>(&mut self, e1: T, e2: T);
fn is_equal_targets<T: ToTargets>(&mut self, e1: T, e2: T) -> BoolTarget;
Expand Down
4 changes: 3 additions & 1 deletion verifiable-db/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ edition = "2021"

[dependencies]
mp2_common = { path = "../mp2-common" }
num.workspace = true
plonky2_crypto.workspace = true
recursion_framework = { path = "../recursion-framework" }
ryhope = { path = "../ryhope" }
Expand All @@ -29,4 +30,5 @@ serial_test.workspace = true
tokio.workspace = true

[features]
original_poseidon = ["mp2_common/original_poseidon"]
original_poseidon = ["mp2_common/original_poseidon"]

2 changes: 1 addition & 1 deletion verifiable-db/src/block_tree/api.rs
Original file line number Diff line number Diff line change
Expand Up @@ -294,7 +294,7 @@ mod tests {
use std::iter;

const EXTRACTION_IO_LEN: usize = extraction::test::PublicInputs::<F>::TOTAL_LEN;
const ROWS_TREE_IO_LEN: usize = row_tree::PublicInputs::<F>::TOTAL_LEN;
const ROWS_TREE_IO_LEN: usize = row_tree::PublicInputs::<F>::total_len();

struct TestBuilder<E>
where
Expand Down
20 changes: 8 additions & 12 deletions verifiable-db/src/block_tree/leaf.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,14 @@
//! an existing node (or if there is no existing node, which happens for the
//! first block number).
use super::{compute_index_digest, public_inputs::PublicInputs};
use super::{compute_final_digest, compute_index_digest, public_inputs::PublicInputs};
use crate::{
extraction::{ExtractionPI, ExtractionPIWrap},
row_tree,
};
use anyhow::Result;
use mp2_common::{
default_config,
group_hashing::CircuitBuilderGroupHashing,
poseidon::{empty_poseidon_hash, H},
proof::ProofWithVK,
public_inputs::PublicInputCommon,
Expand Down Expand Up @@ -55,15 +54,12 @@ impl LeafCircuit {

let extraction_pi = E::PI::from_slice(extraction_pi);
let rows_tree_pi = row_tree::PublicInputs::<Target>::from_slice(rows_tree_pi);
let final_digest = compute_final_digest::<E>(b, &extraction_pi, &rows_tree_pi);

// in our case, the extraction proofs extracts from the blockchain and sets
// the block number as the primary index
let index_value = extraction_pi.primary_index_value();

// Enforce that the data extracted from the blockchain is the same as the data
// employed to build the rows tree for this node.
b.connect_curve_points(extraction_pi.value_set_digest(), rows_tree_pi.rows_digest());

// Compute the hash of table metadata, to be exposed as public input to prove to
// the verifier that we extracted the correct storage slots and we place the data
// in the expected columns of the constructed tree; we add also the identifier
Expand All @@ -82,7 +78,7 @@ impl LeafCircuit {
let inputs = iter::once(index_identifier)
.chain(index_value.iter().cloned())
.collect();
let node_digest = compute_index_digest(b, inputs, rows_tree_pi.rows_digest());
let node_digest = compute_index_digest(b, inputs, final_digest);

// Compute hash of the inserted node
// node_min = block_number
Expand All @@ -103,7 +99,7 @@ impl LeafCircuit {

// check that the rows tree built is for a merged table iff we extract data from MPT for a merged table
b.connect(
rows_tree_pi.is_merge_case().target,
rows_tree_pi.merge_flag_target().target,
extraction_pi.is_merge_case().target,
);

Expand Down Expand Up @@ -170,7 +166,7 @@ where
_verified_proofs: [&ProofWithPublicInputsTarget<D>; 0],
builder_parameters: Self::CircuitBuilderParams,
) -> Self {
const ROWS_TREE_IO: usize = row_tree::PublicInputs::<Target>::TOTAL_LEN;
const ROWS_TREE_IO: usize = row_tree::PublicInputs::<Target>::total_len();

let extraction_verifier =
RecursiveCircuitsVerifierGagdet::<F, C, D, { E::PI::TOTAL_LEN }>::new(
Expand Down Expand Up @@ -262,7 +258,7 @@ pub mod tests {
let hash = H::hash_no_pad(&inputs);
let int = hash_to_int_value(hash);
let scalar = Scalar::from_noncanonical_biguint(int);
let point = rows_tree_pi.rows_digest_field();
let point = rows_tree_pi.individual_digest_point();
let point = weierstrass_to_point(&point);
point * scalar
}
Expand All @@ -279,7 +275,7 @@ pub mod tests {

fn build(b: &mut CBuilder) -> Self::Wires {
let extraction_pi = b.add_virtual_targets(TestPITargets::TOTAL_LEN);
let rows_tree_pi = b.add_virtual_targets(row_tree::PublicInputs::<Target>::TOTAL_LEN);
let rows_tree_pi = b.add_virtual_targets(row_tree::PublicInputs::<Target>::total_len());

let leaf_wires = LeafCircuit::build::<TestPITargets>(b, &extraction_pi, &rows_tree_pi);

Expand All @@ -292,7 +288,7 @@ pub mod tests {
assert_eq!(wires.1.len(), TestPITargets::TOTAL_LEN);
pw.set_target_arr(&wires.1, self.extraction_pi);

assert_eq!(wires.2.len(), row_tree::PublicInputs::<Target>::TOTAL_LEN);
assert_eq!(wires.2.len(), row_tree::PublicInputs::<Target>::total_len());
pw.set_target_arr(&wires.2, self.rows_tree_pi);
}
}
Expand Down
81 changes: 77 additions & 4 deletions verifiable-db/src/block_tree/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,18 @@ mod membership;
mod parent;
mod public_inputs;

use crate::{
extraction::{ExtractionPI, ExtractionPIWrap},
row_tree,
};
pub use api::{CircuitInput, PublicParameters};
use mp2_common::{poseidon::hash_to_int_target, CHasher, D, F};
use plonky2::{iop::target::Target, plonk::circuit_builder::CircuitBuilder};
use mp2_common::{
group_hashing::{circuit_hashed_scalar_mul, CircuitBuilderGroupHashing},
poseidon::hash_to_int_target,
types::CBuilder,
CHasher, D, F,
};
use plonky2::{field::types::Field, iop::target::Target, plonk::circuit_builder::CircuitBuilder};
use plonky2_ecdsa::gadgets::nonnative::CircuitBuilderNonNative;

use plonky2_ecgfp5::gadgets::curve::{CircuitBuilderEcGFp5, CurveTarget};
Expand All @@ -25,10 +34,62 @@ pub(crate) fn compute_index_digest(
b.curve_scalar_mul(base, &scalar)
}

/// Compute the final digest.
pub(crate) fn compute_final_digest<'a, E>(
b: &mut CBuilder,
extraction_pi: &E::PI<'a>,
rows_tree_pi: &row_tree::PublicInputs<Target>,
) -> CurveTarget
where
E: ExtractionPIWrap,
{
// Compute the final row digest from rows_tree_proof for merge case:
// multiplier_digest = rows_tree_proof.row_id_multiplier * rows_tree_proof.multiplier_vd
let multiplier_vd = rows_tree_pi.multiplier_digest_target();
let row_id_multiplier = b.biguint_to_nonnative(&rows_tree_pi.row_id_multiplier_target());
let multiplier_digest = b.curve_scalar_mul(multiplier_vd, &row_id_multiplier);
// rows_digest_merge = multiplier_digest * rows_tree_proof.DR
let individual_digest = rows_tree_pi.individual_digest_target();
let rows_digest_merge = circuit_hashed_scalar_mul(b, multiplier_digest, individual_digest);

// Choose the final row digest depending on whether we are in merge case or not:
// final_digest = extraction_proof.is_merge ? rows_digest_merge : rows_tree_proof.DR
let final_digest = b.curve_select(
extraction_pi.is_merge_case(),
rows_digest_merge,
individual_digest,
);

// Enforce that the data extracted from the blockchain is the same as the data
// employed to build the rows tree for this node:
// assert final_digest == extraction_proof.DV
b.connect_curve_points(final_digest, extraction_pi.value_set_digest());

// Enforce that if we aren't in merge case, then no cells were accumulated in
// multiplier digest:
// assert extraction_proof.is_merge or rows_tree_proof.multiplier_vd != 0
// => (1 - is_merge) * is_multiplier_vd_zero == false
let ffalse = b._false();
let curve_zero = b.curve_zero();
let is_multiplier_vd_zero = b
.curve_eq(rows_tree_pi.multiplier_digest_target(), curve_zero)
.target;
let should_be_false = b.arithmetic(
F::NEG_ONE,
F::ONE,
extraction_pi.is_merge_case().target,
is_multiplier_vd_zero,
is_multiplier_vd_zero,
);
b.connect(should_be_false, ffalse.target);

final_digest
}

#[cfg(test)]
pub(crate) mod tests {
use alloy::primitives::U256;
use mp2_common::{keccak::PACKED_HASH_LEN, utils::ToFields, F};
use mp2_common::{keccak::PACKED_HASH_LEN, poseidon::HASH_TO_INT_LEN, utils::ToFields, F};
use mp2_test::utils::random_vector;
use plonky2::{
field::types::{Field, Sample},
Expand Down Expand Up @@ -79,7 +140,19 @@ pub(crate) mod tests {
let h = random_vector::<u32>(NUM_HASH_OUT_ELTS).to_fields();
let [min, max] = [0; 2].map(|_| U256::from_limbs(rng.gen::<[u64; 4]>()).to_fields());
let is_merge = [F::from_canonical_usize(is_merge_case as usize)];
row_tree::PublicInputs::new(&h, row_digest, &min, &max, &is_merge).to_vec()
let multiplier_digest = Point::sample(rng).to_weierstrass().to_fields();
let row_id_multiplier = random_vector::<u32>(HASH_TO_INT_LEN).to_fields();

row_tree::PublicInputs::new(
&h,
row_digest,
&min,
&max,
&is_merge,
&multiplier_digest,
&row_id_multiplier,
)
.to_vec()
}

/// Generate a random extraction public inputs.
Expand Down
18 changes: 7 additions & 11 deletions verifiable-db/src/block_tree/parent.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
//! This circuit is employed when the new node is inserted as parent of an existing node,
//! referred to as old node.
use super::{compute_index_digest, public_inputs::PublicInputs};
use super::{compute_final_digest, compute_index_digest, public_inputs::PublicInputs};
use crate::{
extraction::{ExtractionPI, ExtractionPIWrap},
row_tree,
Expand All @@ -10,7 +10,6 @@ use alloy::primitives::U256;
use anyhow::Result;
use mp2_common::{
default_config,
group_hashing::CircuitBuilderGroupHashing,
poseidon::{empty_poseidon_hash, H},
proof::ProofWithVK,
public_inputs::PublicInputCommon,
Expand Down Expand Up @@ -84,13 +83,10 @@ impl ParentCircuit {

let extraction_pi = E::PI::from_slice(extraction_pi);
let rows_tree_pi = row_tree::PublicInputs::<Target>::from_slice(rows_tree_pi);
let final_digest = compute_final_digest::<E>(b, &extraction_pi, &rows_tree_pi);

let block_number = extraction_pi.primary_index_value();

// Enforce that the data extracted from the blockchain is the same as the data
// employed to build the rows tree for this node.
b.connect_curve_points(extraction_pi.value_set_digest(), rows_tree_pi.rows_digest());

// Compute the hash of table metadata, to be exposed as public input to prove to
// the verifier that we extracted the correct storage slots and we place the data
// in the expected columns of the constructed tree; we add also the identifier
Expand All @@ -110,7 +106,7 @@ impl ParentCircuit {
let inputs = iter::once(index_identifier)
.chain(block_number.iter().cloned())
.collect();
let node_digest = compute_index_digest(b, inputs, rows_tree_pi.rows_digest());
let node_digest = compute_index_digest(b, inputs, final_digest);

// We recompute the hash of the old node to bind the `old_min` and `old_max`
// values to the hash of the old tree.
Expand Down Expand Up @@ -154,7 +150,7 @@ impl ParentCircuit {

// check that the rows tree built is for a merged table iff we extract data from MPT for a merged table
b.connect(
rows_tree_pi.is_merge_case().target,
rows_tree_pi.merge_flag_target().target,
extraction_pi.is_merge_case().target,
);

Expand Down Expand Up @@ -236,7 +232,7 @@ where
_verified_proofs: [&ProofWithPublicInputsTarget<D>; 0],
builder_parameters: Self::CircuitBuilderParams,
) -> Self {
const ROWS_TREE_IO: usize = row_tree::PublicInputs::<Target>::TOTAL_LEN;
const ROWS_TREE_IO: usize = row_tree::PublicInputs::<Target>::total_len();

let extraction_verifier =
RecursiveCircuitsVerifierGagdet::<F, C, D, { E::PI::TOTAL_LEN }>::new(
Expand Down Expand Up @@ -315,7 +311,7 @@ mod tests {

fn build(b: &mut CBuilder) -> Self::Wires {
let extraction_pi = b.add_virtual_targets(TestPITargets::TOTAL_LEN);
let rows_tree_pi = b.add_virtual_targets(row_tree::PublicInputs::<Target>::TOTAL_LEN);
let rows_tree_pi = b.add_virtual_targets(row_tree::PublicInputs::<Target>::total_len());

let parent_wires =
ParentCircuit::build::<TestPITargets>(b, &extraction_pi, &rows_tree_pi);
Expand All @@ -329,7 +325,7 @@ mod tests {
assert_eq!(wires.1.len(), TestPITargets::TOTAL_LEN);
pw.set_target_arr(&wires.1, self.extraction_pi);

assert_eq!(wires.2.len(), row_tree::PublicInputs::<Target>::TOTAL_LEN);
assert_eq!(wires.2.len(), row_tree::PublicInputs::<Target>::total_len());
pw.set_target_arr(&wires.2, self.rows_tree_pi);
}
}
Expand Down
Loading

0 comments on commit 7f9cb61

Please sign in to comment.