Skip to content

Commit

Permalink
chore: update README; move csv folder to root
Browse files Browse the repository at this point in the history
  • Loading branch information
sifnoc committed Dec 7, 2023
1 parent 726dbd7 commit c8a2d7f
Show file tree
Hide file tree
Showing 19 changed files with 77 additions and 65 deletions.
1 change: 0 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -1,2 +1 @@
/target
/src/data
2 changes: 2 additions & 0 deletions bin/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@ Second, send two entries to the Mini Tree Server, execute the following script:
bash ./scripts/test_sending_entry.sh
```

Note: Execute this command from the project's root folder to ensure proper functioning of scripts.

Upon successful execution, you will receive a response similar to the following:
<details>
<summary>Click View Response</summary>
Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
6 changes: 3 additions & 3 deletions examples/aggregation_flow.rs
Original file line number Diff line number Diff line change
Expand Up @@ -73,8 +73,8 @@ async fn main() -> Result<(), Box<dyn Error>> {
let orchestrator = Orchestrator::<N_CURRENCIES, N_BYTES>::new(
Box::new(spawner),
vec![
"./src/orchestrator/csv/entry_16_1.csv".to_string(),
"./src/orchestrator/csv/entry_16_2.csv".to_string(),
"csv/entry_16_1.csv".to_string(),
"csv/entry_16_2.csv".to_string(),
],
);

Expand All @@ -99,7 +99,7 @@ async fn main() -> Result<(), Box<dyn Error>> {
//
// Here, we demonstrate generating the proof of inclusion for User 0.
let inclusion_proof_of_user0 = round.get_proof_of_inclusion(0).unwrap();
assert!(inclusion_proof_of_user0.get_public_inputs().len() > 0); // Check public input counts
assert!(!inclusion_proof_of_user0.get_public_inputs().is_empty()); // Check public input counts

println!("Generated User 0 proof of inclusion");
Ok(())
Expand Down
37 changes: 9 additions & 28 deletions src/aggregation_merkle_sum_tree.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
use halo2_proofs::halo2curves::bn256::Fr as Fp;
use num_bigint::BigUint;
use std::error::Error;
use summa_backend::merkle_sum_tree::utils::{build_merkle_tree_from_leaves, fp_to_big_uint};
use summa_backend::merkle_sum_tree::utils::build_merkle_tree_from_leaves;
use summa_backend::merkle_sum_tree::{
Cryptocurrency, Entry, MerkleProof, MerkleSumTree, Node, Tree,
};
Expand Down Expand Up @@ -162,19 +161,6 @@ impl<const N_CURRENCIES: usize, const N_BYTES: usize>
}
}

// Iterate through the balance accumulator and throw error if any balance is not in range 0, 2 ^ (8 * N_BYTES):
for balance in &balances_acc {
// transform the balance to a BigUint
let balance_big_uint = fp_to_big_uint(*balance);

if balance_big_uint >= BigUint::from(2_usize).pow(8 * N_BYTES as u32) {
return Err(
"Accumulated balance is not in the expected range, proof generation will fail!"
.into(),
);
}
}

let mut nodes = vec![];
let root = build_merkle_tree_from_leaves(&roots, depth, &mut nodes)?;

Expand Down Expand Up @@ -218,12 +204,10 @@ mod test {
fn test_aggregation_mst() {
// create new mini merkle sum tree
let mini_tree_1 =
MerkleSumTree::<N_CURRENCIES, N_BYTES>::from_csv("src/orchestrator/csv/entry_16_1.csv")
.unwrap();
MerkleSumTree::<N_CURRENCIES, N_BYTES>::from_csv("csv/entry_16_1.csv").unwrap();

let mini_tree_2 =
MerkleSumTree::<N_CURRENCIES, N_BYTES>::from_csv("src/orchestrator/csv/entry_16_2.csv")
.unwrap();
MerkleSumTree::<N_CURRENCIES, N_BYTES>::from_csv("csv/entry_16_2.csv").unwrap();

let aggregation_mst = AggregationMerkleSumTree::<N_CURRENCIES, N_BYTES>::new(
vec![mini_tree_1.clone(), mini_tree_2.clone()],
Expand Down Expand Up @@ -271,7 +255,7 @@ mod test {
let mut mini_trees = Vec::new();
for i in 1..=4 {
let mini_tree = MerkleSumTree::<N_CURRENCIES, N_BYTES>::from_csv(&format!(
"src/orchestrator/csv/entry_16_{}.csv",
"csv/entry_16_{}.csv",
i
))
.unwrap();
Expand All @@ -286,8 +270,7 @@ mod test {

// The entry_64.csv file is the aggregation of entry_16_1, entry_16_2, entry_16_3, entry_16_4
let single_merkle_sum_tree =
MerkleSumTree::<N_CURRENCIES, N_BYTES>::from_csv("src/orchestrator/csv/entry_64.csv")
.unwrap();
MerkleSumTree::<N_CURRENCIES, N_BYTES>::from_csv("csv/entry_64.csv").unwrap();

assert_eq!(
aggregation_mst_root.hash,
Expand All @@ -300,13 +283,11 @@ mod test {
// create new mini merkle sum tree. The accumulated balance for each mini tree is in the expected range
// note that the accumulated balance of the tree generated from entry_16_4 is just in the expected range for 1 unit
let merkle_sum_tree_1 =
MerkleSumTree::<N_CURRENCIES, N_BYTES>::from_csv("src/orchestrator/csv/entry_16.csv")
.unwrap();
MerkleSumTree::<N_CURRENCIES, N_BYTES>::from_csv("csv/entry_16.csv").unwrap();

let merkle_sum_tree_2 = MerkleSumTree::<N_CURRENCIES, N_BYTES>::from_csv(
"src/orchestrator/csv/entry_16_no_overflow.csv",
)
.unwrap();
let merkle_sum_tree_2 =
MerkleSumTree::<N_CURRENCIES, N_BYTES>::from_csv("csv/entry_16_no_overflow.csv")
.unwrap();

// When creating the aggregation merkle sum tree, the accumulated balance of the two mini trees is not in the expected range, an error is thrown
let result = AggregationMerkleSumTree::<N_CURRENCIES, N_BYTES>::new(
Expand Down
14 changes: 7 additions & 7 deletions src/executor/cloud_spawner.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,16 +20,16 @@ pub struct CloudSpawner {
}

/// CloudSpawner
///
/// Designed for cloud-based resources and Docker Swarm, CloudSpawner is optimized for scalability and high availability.
///
/// Designed for cloud-based resources and Docker Swarm, CloudSpawner is optimized for scalability and high availability.
/// While functioning similarly to LocalSpawner, it extends its capabilities by initializing workers on remote machines, making it particularly suitable for Swarm network setups.
///
///
/// CloudSpawner can be utilized in two ways:
///
/// - Without `service_info`, CloudSpawner does not directly manage Worker instances.
///
/// - Without `service_info`, CloudSpawner does not directly manage Worker instances.
/// In this mode, it does not control or interact with the Docker API for worker management.
///
/// - With `service_info`, CloudSpawner requires a `docker-compose` file. When provided with `service_info`,
///
/// - With `service_info`, CloudSpawner requires a `docker-compose` file. When provided with `service_info`,
/// it manages Docker services and networks, enabling dynamic scaling and orchestration of workers.
impl CloudSpawner {
pub fn new(
Expand Down
10 changes: 3 additions & 7 deletions src/executor/local_spawner.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,9 @@ use tokio::sync::oneshot;
use crate::executor::{Executor, ExecutorSpawner};

/// LocalSpawner
///
/// The LocalSpawner is tailored for use cases closer to actual deployment. It enables the initialization of Executors
/// and Workers within a local Docker environment. This spawner is ideal for development and testing phases,
///
/// The LocalSpawner is to use cases closer to actual deployment. It enables the initialization of Executors
/// and Workers within a local Docker environment. This spawner is ideal for development and testing phases,
/// where simplicity and direct control over the containers are beneficial.
pub struct LocalSpawner {
docker: Docker,
Expand Down Expand Up @@ -66,8 +66,6 @@ impl LocalSpawner {
platform: None,
};

println!("docker-info: {:?}", docker.info().await?);

docker
.create_container(Some(create_container_options), config.clone())
.await?;
Expand All @@ -82,8 +80,6 @@ impl LocalSpawner {
let container_info: ContainerInspectResponse =
docker.inspect_container(&container_name, None).await?;

println!("container_info: {:?}", container_info);

Ok(container_info)
}
}
Expand Down
2 changes: 1 addition & 1 deletion src/executor/mock_spawner.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ use crate::executor::{Executor, ExecutorSpawner};
use crate::mini_tree_generator::create_mst;

/// MockSpawner
///
///
/// Primarily used for testing purposes, the MockSpawner initializes Executors suitable for various test scenarios,
/// including negative test cases. It runs the `mini-tree-server` locally, allowing for a controlled testing environment.
pub struct MockSpawner {
Expand Down
9 changes: 3 additions & 6 deletions src/executor/test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,7 @@ async fn test_executor() -> Result<(), Box<dyn Error>> {
let spawner = MockSpawner::new(None);
let executor = spawner.spawn_executor().await;

let (_, entries) =
parse_csv_to_entries::<_, 2, 14>("./src/orchestrator/csv/entry_16.csv").unwrap();
let (_, entries) = parse_csv_to_entries::<_, 2, 14>("csv/entry_16.csv").unwrap();
let json_entries = entries
.iter()
.map(JsonEntry::from_entry)
Expand All @@ -55,10 +54,8 @@ async fn test_executor_block() -> Result<(), Box<dyn Error>> {
let executor = spawner.spawn_executor().await;

// Parse two csv files
let (_, entries_1) =
parse_csv_to_entries::<_, 2, 14>("./src/orchestrator/csv/entry_16.csv").unwrap();
let (_, entries_2) =
parse_csv_to_entries::<_, 2, 14>("./src/orchestrator/csv/entry_16.csv").unwrap();
let (_, entries_1) = parse_csv_to_entries::<_, 2, 14>("csv/entry_16.csv").unwrap();
let (_, entries_2) = parse_csv_to_entries::<_, 2, 14>("csv/entry_16.csv").unwrap();

// Convert entries to json_entries
let json_entries_1 = entries_1
Expand Down
27 changes: 27 additions & 0 deletions src/json_mst.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,18 +6,27 @@ use halo2_proofs::halo2curves::{bn256::Fr as Fp, group::ff::PrimeField};

use summa_backend::merkle_sum_tree::{Cryptocurrency, Entry, MerkleSumTree, Node, Tree};

/// JsonNode
/// Represents a entry in the Merkle Sum Tree in JSON format.
/// The balance in the Merkle Sum Tree was presented BigUint format, but in the JSON format, it is presented as a string.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct JsonEntry {
pub username: String,
pub balances: Vec<String>,
}

/// JsonNode
/// Represents a node in the Merkle Sum Tree in JSON format.
/// The balance in the Merkle Sum Tree was presented BigUint format, but in the JSON format, it is presented as a string.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct JsonNode {
pub hash: String,
pub balances: Vec<String>,
}

/// JsonMerkleSumTree
/// Represents the entire Merkle Sum Tree in JSON format.
/// It is used for transmitting tree data between the executor and mini-tree-server.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct JsonMerkleSumTree {
pub root: JsonNode,
Expand All @@ -44,6 +53,10 @@ impl JsonEntry {
JsonEntry { username, balances }
}

/// Converts an `Entry` to a `JsonEntry`.
///
/// This method translates an `Entry` into its JSON format.
/// It is used by the Executor to send Entry data to the mini-tree-server in JSON format.
pub fn from_entry<const N_CURRENCIES: usize>(entry: &Entry<N_CURRENCIES>) -> Self {
JsonEntry::new(
entry.username().to_string(),
Expand All @@ -55,6 +68,10 @@ impl JsonEntry {
)
}

/// Converts a `JsonEntry` back to an `Entry`.
///
/// This method is utilized by the mini-tree-server when processing data received from the executor in JSON format.
/// It converts `JsonEntry` objects back to the `Entry` struct, facilitating the construction of the Merkle Sum Tree.
pub fn to_entry<const N_CURRENCIES: usize>(&self) -> Entry<N_CURRENCIES> {
let mut balances: [BigUint; N_CURRENCIES] = std::array::from_fn(|_| BigUint::from(0u32));
self.balances.iter().enumerate().for_each(|(i, balance)| {
Expand All @@ -65,6 +82,7 @@ impl JsonEntry {
}
}

/// Converts a `JsonNode` back to a `Node` for reconstructing the Merkle Sum Tree from JSON data.
impl JsonNode {
pub fn to_node<const N_CURRENCIES: usize>(&self) -> Node<N_CURRENCIES> {
let hash = parse_fp_from_hex(&self.hash);
Expand All @@ -81,6 +99,10 @@ impl JsonNode {
}

impl JsonMerkleSumTree {
/// Converts a MerkleSumTree to its JSON representation.
///
/// This function is essential for the mini-tree-server to send the Merkle Sum Tree results back to the executor in JSON format,
/// facilitating the translation of the tree structure into a universally readable JSON form.
pub fn from_tree<const N_CURRENCIES: usize, const N_BYTES: usize>(
tree: MerkleSumTree<N_CURRENCIES, N_BYTES>,
) -> Self {
Expand Down Expand Up @@ -110,6 +132,11 @@ impl JsonMerkleSumTree {
}
}

/// Converts a JsonMerkleSumTree back to a MerkleSumTree.
///
/// This function is crucial when handling data received in JSON format from the mini-tree-server.
/// It rebuilds the MerkleSumTree on the main machine using the `from_params` method.
/// This method is preferred over `from_entries` as the nodes are pre-computed by the mini-tree-server, thus the tree doesn't need to be recomputed from scratch.
pub fn to_mst<const N_CURRENCIES: usize, const N_BYTES: usize>(
&self,
) -> Result<MerkleSumTree<N_CURRENCIES, N_BYTES>, Box<dyn Error>>
Expand Down
2 changes: 1 addition & 1 deletion src/mini_tree_generator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ use const_env::from_env;
use crate::json_mst::{JsonEntry, JsonMerkleSumTree};
use summa_backend::merkle_sum_tree::{Cryptocurrency, Entry, MerkleSumTree};

/// Mini Tree Generator is designed to create Merkle Sum Tree using the Axum web framework.
/// Mini Tree Generator is designed to create Merkle Sum Tree using the Axum web framework.
/// It primarily handles HTTP requests to generate tree based on provided JSON entries.
///
/// Constants:
Expand Down
10 changes: 10 additions & 0 deletions src/orchestrator/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,17 @@ impl<const N_CURRENCIES: usize, const N_BYTES: usize> Orchestrator<N_CURRENCIES,
/// * `executor_index` - The index of the executor.
/// * `total_executors` - The total number of executors.
///
/// Returns:
/// (start, end)
/// A tuple representing the start and end indices of the tasks assigned to the executor
///
/// The first value in the tuple, the `start` index, indicates the beginning of the task range for the executor,
/// while the second value, the `end` index, specifies the end of the range (exclusive).
///
/// This calculation divides the total number of tasks by the number of executors to distribute tasks evenly.
/// For instance, if there are 5 tasks and 2 executors, the tasks will be split as follows:
/// Executor_1: [1, 2, 3] (start index 0, end index 3)
/// Executor_2: [4, 5] (start index 3, end index 5)
fn calculate_task_range(
&self,
executor_index: usize,
Expand Down
22 changes: 11 additions & 11 deletions src/orchestrator/test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,8 @@ async fn test_single_mock_worker() {
let orchestrator = Orchestrator::<2, 14>::new(
Box::new(spawner),
vec![
"./src/orchestrator/csv/entry_16_1.csv".to_string(),
"./src/orchestrator/csv/entry_16_2.csv".to_string(),
"csv/entry_16_1.csv".to_string(),
"csv/entry_16_2.csv".to_string(),
],
);
let aggregation_merkle_sum_tree = orchestrator.create_aggregation_mst(1).await.unwrap();
Expand All @@ -26,8 +26,8 @@ async fn test_none_exist_csv() {
let orchestrator = Orchestrator::<2, 14>::new(
Box::new(spawner),
vec![
"./src/orchestrator/csv/entry_16.csv".to_string(),
"./src/orchestrator/csv/no_exist.csv".to_string(),
"csv/entry_16.csv".to_string(),
"csv/no_exist.csv".to_string(),
],
);
match orchestrator.create_aggregation_mst(2).await {
Expand All @@ -48,8 +48,8 @@ async fn test_none_exist_worker() {
let orchestrator = Orchestrator::<2, 14>::new(
Box::new(spawner),
vec![
"./src/orchestrator/csv/entry_16_1.csv".to_string(),
"./src/orchestrator/csv/entry_16_2.csv".to_string(),
"csv/entry_16_1.csv".to_string(),
"csv/entry_16_2.csv".to_string(),
],
);

Expand All @@ -63,7 +63,7 @@ async fn test_none_exist_worker() {
}
}

#[cfg(feature = "docker")]
// #[cfg(feature = "docker")]
#[tokio::test]
async fn test_with_containers() {
let spawner = LocalSpawner::new(
Expand All @@ -74,8 +74,8 @@ async fn test_with_containers() {
let orchestrator = Orchestrator::<2, 14>::new(
Box::new(spawner),
vec![
"./src/orchestrator/csv/entry_16_1.csv".to_string(),
"./src/orchestrator/csv/entry_16_2.csv".to_string(),
"csv/entry_16_1.csv".to_string(),
"csv/entry_16_2.csv".to_string(),
],
);
let aggregation_merkle_sum_tree = orchestrator.create_aggregation_mst(2).await.unwrap();
Expand All @@ -96,8 +96,8 @@ async fn test_with_swarm_service() {
let orchestrator = Orchestrator::<2, 14>::new(
Box::new(spawner),
vec![
"./src/orchestrator/csv/entry_16_1.csv".to_string(),
"./src/orchestrator/csv/entry_16_2.csv".to_string(),
"csv/entry_16_1.csv".to_string(),
"csv/entry_16_2.csv".to_string(),
],
);
let aggregation_merkle_sum_tree = orchestrator.create_aggregation_mst(2).await.unwrap();
Expand Down

0 comments on commit c8a2d7f

Please sign in to comment.