Skip to content

Commit

Permalink
Integrate mp2 batch queries feature.
Browse files Browse the repository at this point in the history
  • Loading branch information
silathdiir committed Dec 11, 2024
1 parent d06b037 commit 3bbb6b5
Show file tree
Hide file tree
Showing 12 changed files with 596 additions and 754 deletions.
631 changes: 374 additions & 257 deletions Cargo.lock

Large diffs are not rendered by default.

20 changes: 13 additions & 7 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,17 @@ resolver = "2"
members = ["lgn-auth", "lgn-messages", "lgn-provers", "lgn-worker"]

[workspace.dependencies]
groth16_framework_v1 = { git = "https://github.com/Lagrange-Labs/mapreduce-plonky2.git", rev = "v1.1.1", package = "groth16_framework" }
mp2_common = { git = "https://github.com/Lagrange-Labs/mapreduce-plonky2.git", rev = "v1.1.1" }
mp2_v1 = { git = "https://github.com/Lagrange-Labs/mapreduce-plonky2.git", rev = "v1.1.1" }
parsil = { git = "https://github.com/Lagrange-Labs/mapreduce-plonky2.git", rev = "v1.1.1" }
verifiable-db = { git = "https://github.com/Lagrange-Labs/mapreduce-plonky2.git", rev = "v1.1.1" }
# groth16_framework_v1 = { git = "https://github.com/Lagrange-Labs/mapreduce-plonky2.git", rev = "v1.1.1", package = "groth16_framework" }
# mp2_common = { git = "https://github.com/Lagrange-Labs/mapreduce-plonky2.git", rev = "v1.1.1" }
# mp2_v1 = { git = "https://github.com/Lagrange-Labs/mapreduce-plonky2.git", rev = "v1.1.1" }
# parsil = { git = "https://github.com/Lagrange-Labs/mapreduce-plonky2.git", rev = "v1.1.1" }
# verifiable-db = { git = "https://github.com/Lagrange-Labs/mapreduce-plonky2.git", rev = "v1.1.1" }
mp2_common = { path = "../mapreduce-plonky2/mp2-common/" }
mp2_v1 = { path = "../mapreduce-plonky2/mp2-v1/" }
parsil = { path = "../mapreduce-plonky2/parsil/" }
verifiable-db = { path = "../mapreduce-plonky2/verifiable-db/" }
ryhope = { path = "../mapreduce-plonky2/ryhope/" }
groth16_framework_v1 = { package = "groth16_framework", path = "../mapreduce-plonky2/groth16-framework/" }

anyhow = { version = "1.0" }
bincode = { version = "1.0" }
Expand All @@ -23,8 +29,8 @@ thiserror = { version = "1.0" }
tokio = { version = "1.0" }
tracing = { version = "0.1" }
tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] }
alloy = "0.2"
alloy-primitives = "0.7.7"
alloy = "0.6"
alloy-primitives = "0.8.14"
backtrace = "0.3"
base64 = "0.22"
bytes = "1.0"
Expand Down
1 change: 1 addition & 0 deletions lgn-messages/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ edition = "2021"
[dependencies]
ethers = { workspace = true }
mp2_common = { workspace = true }
mp2_v1 = { workspace = true }
object_store = { workspace = true }
serde = { workspace = true }
thiserror = { workspace = true }
Expand Down
41 changes: 41 additions & 0 deletions lgn-messages/src/types/v1/query/keys.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,12 @@
use std::fmt::Display;

use mp2_v1::query::batching_planner::UTKey;
use object_store::path::Path;
use serde_derive::Deserialize;
use serde_derive::Serialize;

use super::NUM_CHUNKS;

pub(crate) const KEYS_QUERIES_PREFIX: &str = "V1_QUERIES";

type QueryId = String;
Expand Down Expand Up @@ -31,6 +34,15 @@ pub enum ProofKey
BlockNr,
),

RowsChunk(
QueryId,
UTKey<NUM_CHUNKS>,
),

ChunkAggregation(QueryId),

NonExistence(QueryId),

Revelation(QueryId),
}

Expand Down Expand Up @@ -59,6 +71,35 @@ impl Display for ProofKey
KEYS_QUERIES_PREFIX, query_id, block_nr
)
},
ProofKey::RowsChunk(query_id, key) =>
{
write!(
f,
"{}/{}/rows_chunk/{}/{}",
KEYS_QUERIES_PREFIX,
query_id,
key.0
.0,
key.0
.1,
)
},
ProofKey::ChunkAggregation(query_id) =>
{
write!(
f,
"{}/{}/chunk_aggregation",
KEYS_QUERIES_PREFIX, query_id
)
},
ProofKey::NonExistence(query_id) =>
{
write!(
f,
"{}/{}/non_existence",
KEYS_QUERIES_PREFIX, query_id
)
},
ProofKey::Revelation(query_id) =>
{
write!(
Expand Down
3 changes: 3 additions & 0 deletions lgn-messages/src/types/v1/query/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,9 @@ pub mod tasks;

pub const ROUTING_DOMAIN: &str = "sc";

// This constant must be same with lgn-provers.
pub(crate) const NUM_CHUNKS: usize = 5;

#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct WorkerTask
{
Expand Down
141 changes: 34 additions & 107 deletions lgn-messages/src/types/v1/query/tasks.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,17 @@ use std::sync::Arc;

use alloy_primitives::U256;
use derive_debug_plus::Dbg;
use mp2_v1::query::batching_planner::UTKey;
use serde_derive::Deserialize;
use serde_derive::Serialize;
use verifiable_db::query::aggregation::ChildPosition;
use verifiable_db::query::aggregation::NodeInfo;
use verifiable_db::query::api::RowInput;
use verifiable_db::query::api::TreePathInputs;
use verifiable_db::query::computational_hash_ids::ColumnIDs;
use verifiable_db::query::universal_circuit::universal_circuit_inputs::RowCells;
use verifiable_db::revelation::api::MatchingRow;
use verifiable_db::revelation::RowPath;

use super::NUM_CHUNKS;
use crate::types::v1::preprocessing::db_keys;
use crate::types::v1::query::keys::ProofKey;
use crate::types::v1::query::PlaceHolderLgn;
Expand Down Expand Up @@ -39,76 +41,40 @@ pub enum QueryStep
}

#[derive(Dbg, Clone, PartialEq, Deserialize, Serialize)]
pub struct QueryInputPart
pub enum QueryInputPart
{
pub proof_key: ProofKey,

pub embedded_proof_input: Option<EmbeddedProofInputType>,
#[serde(rename = "1")]
Aggregation(
ProofKey,
ProofInputKind,
),

pub aggregation_input_kind: Option<ProofInputKind>,
// We only need to handle rows tree proving for now.
#[serde(rename = "2")]
Embedded(
ProofKey,
EmbeddedProofInputType,
),
}

#[derive(Clone, PartialEq, Dbg, Deserialize, Serialize)]
pub enum ProofInputKind
{
/// Match in the end of path or not matched branch
#[serde(rename = "1")]
SinglePathLeaf(SinglePathLeafInput),
RowsChunk(RowsChunkInput),

/// Match in the middle of path
#[serde(rename = "2")]
SinglePathBranch(SinglePathBranchInput),
ChunkAggregation(ChunkAggregationInput),

/// Node in tree with only one child
#[serde(rename = "3")]
PartialNode(PartialNodeInput),

/// Node in tree with both children
#[serde(rename = "4")]
FullNode(FullNodeInput),

NonExistence(NonExistenceInput),
}

#[derive(Clone, PartialEq, Dbg, Deserialize, Serialize)]
pub struct FullNodeInput
{
pub is_rows_tree_node: bool,

pub left_child_proof_location: ProofKey,

#[dbg(placeholder = "...")]
pub left_child_proof: Vec<u8>,

pub right_child_proof_location: ProofKey,

#[dbg(placeholder = "...")]
pub right_child_proof: Vec<u8>,
}

#[derive(Clone, PartialEq, Dbg, Deserialize, Serialize)]
pub struct PartialNodeInput
{
pub proven_child_position: ChildPosition,

pub proven_child_proof_location: ProofKey,

#[dbg(placeholder = "...")]
pub proven_child_proof: Vec<u8>,

pub unproven_child_info: Option<NodeInfo>,

pub is_rows_tree_node: bool,
}

#[derive(Clone, PartialEq, Dbg, Deserialize, Serialize)]
pub enum EmbeddedProofInputType
{
#[serde(rename = "1")]
RowsTree(RowsEmbeddedProofInput),

#[serde(rename = "2")]
IndexTree(IndexEmbeddedProofInput),
}

#[derive(Dbg, Clone, PartialEq, Deserialize, Serialize)]
Expand All @@ -121,51 +87,6 @@ pub struct RowsEmbeddedProofInput
pub is_leaf: bool,
}

#[derive(Dbg, Clone, PartialEq, Deserialize, Serialize)]
pub struct IndexEmbeddedProofInput
{
pub rows_proof_key: ProofKey,

#[dbg(placeholder = "...")]
pub rows_proof: Vec<u8>,
}

#[derive(Clone, PartialEq, Dbg, Deserialize, Serialize)]
pub struct SinglePathBranchInput
{
pub node_info: NodeInfo,

pub left_child_info: Option<NodeInfo>,

pub right_child_info: Option<NodeInfo>,

pub child_position: ChildPosition,

pub proven_child_location: ProofKey,

#[dbg(placeholder = "...")]
pub proven_child_proof: Vec<u8>,

pub is_rows_tree_node: bool,
}

#[derive(Clone, PartialEq, Dbg, Deserialize, Serialize)]
pub struct SinglePathLeafInput
{
pub node_info: NodeInfo,

pub left_child_info: Option<NodeInfo>,

pub right_child_info: Option<NodeInfo>,

pub is_rows_tree_node: bool,

pub embedded_proof_location: Option<ProofKey>,

#[dbg(placeholder = "...")]
pub embedded_proof: Vec<u8>,
}

#[derive(Clone, Dbg, Serialize, Deserialize)]
pub struct HydratableMatchingRow
{
Expand Down Expand Up @@ -315,19 +236,11 @@ pub enum RevelationInput
#[derive(Clone, PartialEq, Dbg, Deserialize, Serialize)]
pub struct NonExistenceInput
{
pub index_path: TreePathInputs,

pub column_ids: Vec<u64>,

pub placeholders: PlaceHolderLgn,

pub is_rows_tree_node: bool,

pub node_info: NodeInfo,

pub left_child_info: Option<NodeInfo>,

pub right_child_info: Option<NodeInfo>,

pub primary_index_value: U256,
}

impl From<&WorkerTask> for ProofKey
Expand All @@ -344,3 +257,17 @@ impl From<&WorkerTask> for ProofKey
}
}
}

#[derive(Clone, PartialEq, Dbg, Deserialize, Serialize)]
pub struct RowsChunkInput
{
pub rows: Vec<RowInput>,

pub placeholders: PlaceHolderLgn,
}

#[derive(Clone, PartialEq, Dbg, Deserialize, Serialize)]
pub struct ChunkAggregationInput
{
pub children_keys: Vec<UTKey<NUM_CHUNKS>>,
}
16 changes: 15 additions & 1 deletion lgn-provers/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,21 @@ serde_json = { workspace = true }
tracing = { workspace = true }
verifiable-db = { workspace = true }

alloy = { workspace = true, features = ["contract", "node-bindings", "providers", "network", "signer-local", "sol-types", "rpc", "rpc-types", "consensus", "rlp", "transports", "transport-http", "reqwest"] }
alloy = { workspace = true, features = [
"contract",
"node-bindings",
"providers",
"network",
"signer-local",
"sol-types",
"rpc",
"rpc-types",
"consensus",
"rlp",
"transports",
"transport-http",
"reqwest",
] }
bytes = { workspace = true }
metrics = { workspace = true }

Expand Down
Loading

0 comments on commit 3bbb6b5

Please sign in to comment.