From d0a07418e8ea940ea2b35742246f74efa08e7e60 Mon Sep 17 00:00:00 2001 From: Dennis Garcia Date: Tue, 2 Jul 2024 16:28:49 -0500 Subject: [PATCH] Add WebStore to miden-client (#402) * Add WebStore to miden-client * Rebased off latest next * Rust docs command fix --------- Co-authored-by: Dennis Garcia --- crates/rust-client/Cargo.toml | 5 + crates/rust-client/src/notes.rs | 8 +- .../rpc/tonic_client/generated/requests.rs | 5 + .../rpc/tonic_client/generated/responses.rs | 7 +- crates/rust-client/src/store/mod.rs | 5 + .../rust-client/src/store/sqlite_store/mod.rs | 2 + .../store/web_store/accounts/js_bindings.rs | 72 +++ .../src/store/web_store/accounts/mod.rs | 196 ++++++++ .../src/store/web_store/accounts/models.rs | 62 +++ .../src/store/web_store/accounts/utils.rs | 113 +++++ .../store/web_store/chain_data/js_bindings.rs | 38 ++ .../src/store/web_store/chain_data/mod.rs | 175 +++++++ .../src/store/web_store/chain_data/models.rs | 20 + .../src/store/web_store/chain_data/utils.rs | 67 +++ .../src/store/web_store/js/accounts.js | 431 ++++++++++++++++++ .../src/store/web_store/js/chainData.js | 129 ++++++ .../src/store/web_store/js/notes.js | 405 ++++++++++++++++ .../src/store/web_store/js/schema.js | 88 ++++ .../src/store/web_store/js/sync.js | 300 ++++++++++++ .../src/store/web_store/js/transactions.js | 160 +++++++ crates/rust-client/src/store/web_store/mod.rs | 221 +++++++++ .../src/store/web_store/notes/js_bindings.rs | 80 ++++ .../src/store/web_store/notes/mod.rs | 200 ++++++++ .../src/store/web_store/notes/models.rs | 59 +++ .../src/store/web_store/notes/utils.rs | 422 +++++++++++++++++ .../src/store/web_store/sync/js_bindings.rs | 44 ++ .../src/store/web_store/sync/mod.rs | 209 +++++++++ .../src/store/web_store/sync/models.rs | 11 + .../web_store/transactions/js_bindings.rs | 33 ++ .../src/store/web_store/transactions/mod.rs | 161 +++++++ .../store/web_store/transactions/models.rs | 41 ++ .../src/store/web_store/transactions/utils.rs | 119 +++++ crates/rust-client/src/sync.rs | 6 +- 33 files changed, 3889 insertions(+), 5 deletions(-) create mode 100644 crates/rust-client/src/store/web_store/accounts/js_bindings.rs create mode 100644 crates/rust-client/src/store/web_store/accounts/mod.rs create mode 100644 crates/rust-client/src/store/web_store/accounts/models.rs create mode 100644 crates/rust-client/src/store/web_store/accounts/utils.rs create mode 100644 crates/rust-client/src/store/web_store/chain_data/js_bindings.rs create mode 100644 crates/rust-client/src/store/web_store/chain_data/mod.rs create mode 100644 crates/rust-client/src/store/web_store/chain_data/models.rs create mode 100644 crates/rust-client/src/store/web_store/chain_data/utils.rs create mode 100644 crates/rust-client/src/store/web_store/js/accounts.js create mode 100644 crates/rust-client/src/store/web_store/js/chainData.js create mode 100644 crates/rust-client/src/store/web_store/js/notes.js create mode 100644 crates/rust-client/src/store/web_store/js/schema.js create mode 100644 crates/rust-client/src/store/web_store/js/sync.js create mode 100644 crates/rust-client/src/store/web_store/js/transactions.js create mode 100644 crates/rust-client/src/store/web_store/mod.rs create mode 100644 crates/rust-client/src/store/web_store/notes/js_bindings.rs create mode 100644 crates/rust-client/src/store/web_store/notes/mod.rs create mode 100644 crates/rust-client/src/store/web_store/notes/models.rs create mode 100644 crates/rust-client/src/store/web_store/notes/utils.rs create mode 100644 crates/rust-client/src/store/web_store/sync/js_bindings.rs create mode 100644 crates/rust-client/src/store/web_store/sync/mod.rs create mode 100644 crates/rust-client/src/store/web_store/sync/models.rs create mode 100644 crates/rust-client/src/store/web_store/transactions/js_bindings.rs create mode 100644 crates/rust-client/src/store/web_store/transactions/mod.rs create mode 100644 crates/rust-client/src/store/web_store/transactions/models.rs create mode 100644 crates/rust-client/src/store/web_store/transactions/utils.rs diff --git a/crates/rust-client/Cargo.toml b/crates/rust-client/Cargo.toml index ea1c2e00f..307f86a64 100644 --- a/crates/rust-client/Cargo.toml +++ b/crates/rust-client/Cargo.toml @@ -18,6 +18,7 @@ crate-type = ["lib"] async = ["miden-tx/async"] concurrent = ["miden-lib/concurrent", "miden-objects/concurrent", "miden-tx/concurrent", "std"] default = ["std"] +idxdb = ["async", "dep:base64", "dep:serde-wasm-bindgen", "dep:wasm-bindgen", "dep:wasm-bindgen-futures"] integration = ["concurrent", "testing", "std", "sqlite", "tonic"] sqlite = ["dep:rusqlite", "dep:rusqlite_migration", "dep:lazy_static"] std = ["miden-objects/std"] @@ -25,6 +26,7 @@ testing = ["miden-objects/testing", "miden-lib/testing"] tonic = ["dep:hex", "dep:prost","dep:tonic"] [dependencies] +base64 = { version = "0.13", optional = true } chrono = { version = "0.4", optional = false } hex = { version = "0.4" , optional = true} lazy_static = { version = "1.4", optional = true } @@ -37,10 +39,13 @@ rusqlite = { version = "0.30", features = ["vtab", "array", "bundled"], optional rusqlite_migration = { version = "1.0", optional = true } serde = { version = "1.0", features = ["derive"] } serde_json = { version = "1.0", features = ["raw_value"] } +serde-wasm-bindgen = { version = "0.6", optional = true } thiserror = { version = "1.0", optional = true } tokio = { workspace = true , optional = true } tonic = { version = "0.11", optional = true } tracing = { workspace = true } +wasm-bindgen = { version = "0.2", features = ["serde-serialize"], optional = true } +wasm-bindgen-futures = { version = "0.4", optional = true } winter-maybe-async = "0.10.0" [target.'wasm32-unknown-unknown'.dependencies] diff --git a/crates/rust-client/src/notes.rs b/crates/rust-client/src/notes.rs index d3156ba0b..281877497 100644 --- a/crates/rust-client/src/notes.rs +++ b/crates/rust-client/src/notes.rs @@ -183,8 +183,12 @@ impl Client let tracked_note = tracked_note?; // TODO: Join these calls to one method that updates both fields with one query (issue #404) - self.store.update_note_inclusion_proof(tracked_note.id(), inclusion_proof)?; - self.store.update_note_metadata(tracked_note.id(), *note_details.metadata())?; + maybe_await!(self + .store + .update_note_inclusion_proof(tracked_note.id(), inclusion_proof))?; + maybe_await!(self + .store + .update_note_metadata(tracked_note.id(), *note_details.metadata()))?; return Ok(tracked_note.id()); } diff --git a/crates/rust-client/src/rpc/tonic_client/generated/requests.rs b/crates/rust-client/src/rpc/tonic_client/generated/requests.rs index 8d5c5d129..7ce1e69ac 100644 --- a/crates/rust-client/src/rpc/tonic_client/generated/requests.rs +++ b/crates/rust-client/src/rpc/tonic_client/generated/requests.rs @@ -70,6 +70,9 @@ pub struct GetBlockInputsRequest { /// Array of nullifiers for all notes consumed by a transaction. #[prost(message, repeated, tag = "2")] pub nullifiers: ::prost::alloc::vec::Vec, + /// Array of note IDs to be checked for existence in the database. + #[prost(message, repeated, tag = "3")] + pub unauthenticated_notes: ::prost::alloc::vec::Vec, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] @@ -78,6 +81,8 @@ pub struct GetTransactionInputsRequest { pub account_id: ::core::option::Option, #[prost(message, repeated, tag = "2")] pub nullifiers: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "3")] + pub unauthenticated_notes: ::prost::alloc::vec::Vec, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] diff --git a/crates/rust-client/src/rpc/tonic_client/generated/responses.rs b/crates/rust-client/src/rpc/tonic_client/generated/responses.rs index 79d581890..fce40c797 100644 --- a/crates/rust-client/src/rpc/tonic_client/generated/responses.rs +++ b/crates/rust-client/src/rpc/tonic_client/generated/responses.rs @@ -82,7 +82,7 @@ pub struct GetBlockInputsResponse { /// The latest block header #[prost(message, optional, tag = "1")] pub block_header: ::core::option::Option, - /// Peaks of the above block's mmr, The `forest` value is equal to the block number. + /// Peaks of the above block's mmr, The `forest` value is equal to the block number #[prost(message, repeated, tag = "2")] pub mmr_peaks: ::prost::alloc::vec::Vec, /// The hashes of the requested accounts and their authentication paths @@ -91,6 +91,9 @@ pub struct GetBlockInputsResponse { /// The requested nullifiers and their authentication paths #[prost(message, repeated, tag = "4")] pub nullifiers: ::prost::alloc::vec::Vec, + /// The list of requested notes which were found in the database + #[prost(message, repeated, tag = "5")] + pub found_unauthenticated_notes: ::prost::alloc::vec::Vec, } /// An account returned as a response to the GetTransactionInputs #[allow(clippy::derive_partial_eq_without_eq)] @@ -119,6 +122,8 @@ pub struct GetTransactionInputsResponse { pub account_state: ::core::option::Option, #[prost(message, repeated, tag = "2")] pub nullifiers: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "3")] + pub missing_unauthenticated_notes: ::prost::alloc::vec::Vec, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] diff --git a/crates/rust-client/src/store/mod.rs b/crates/rust-client/src/store/mod.rs index 8ac1af866..f62d307f4 100644 --- a/crates/rust-client/src/store/mod.rs +++ b/crates/rust-client/src/store/mod.rs @@ -20,6 +20,9 @@ pub mod data_store; #[cfg(feature = "sqlite")] pub mod sqlite_store; +#[cfg(feature = "idxdb")] +pub mod web_store; + mod note_record; pub use note_record::{InputNoteRecord, NoteRecordDetails, NoteStatus, OutputNoteRecord}; @@ -119,6 +122,7 @@ pub trait Store { #[maybe_async] fn insert_input_note(&self, note: InputNoteRecord) -> Result<(), StoreError>; + #[maybe_async] /// Updates the inclusion proof of the input note with the provided ID fn update_note_inclusion_proof( &self, @@ -126,6 +130,7 @@ pub trait Store { inclusion_proof: NoteInclusionProof, ) -> Result<(), StoreError>; + #[maybe_async] /// Updates the metadata of the input note with the provided ID fn update_note_metadata( &self, diff --git a/crates/rust-client/src/store/sqlite_store/mod.rs b/crates/rust-client/src/store/sqlite_store/mod.rs index cf420fbba..ea4b00b84 100644 --- a/crates/rust-client/src/store/sqlite_store/mod.rs +++ b/crates/rust-client/src/store/sqlite_store/mod.rs @@ -264,6 +264,7 @@ impl Store for SqliteStore { self.get_unspent_input_note_nullifiers() } + #[maybe_async] fn update_note_inclusion_proof( &self, note_id: miden_objects::notes::NoteId, @@ -272,6 +273,7 @@ impl Store for SqliteStore { self.update_note_inclusion_proof(note_id, inclusion_proof) } + #[maybe_async] fn update_note_metadata( &self, note_id: miden_objects::notes::NoteId, diff --git a/crates/rust-client/src/store/web_store/accounts/js_bindings.rs b/crates/rust-client/src/store/web_store/accounts/js_bindings.rs new file mode 100644 index 000000000..6c7c58ccf --- /dev/null +++ b/crates/rust-client/src/store/web_store/accounts/js_bindings.rs @@ -0,0 +1,72 @@ +use wasm_bindgen::prelude::*; +use wasm_bindgen_futures::*; + +// Account IndexedDB Operations +#[wasm_bindgen(module = "/src/store/web_store/js/accounts.js")] +extern "C" { + // GETS + // ================================================================================================ + #[wasm_bindgen(js_name = getAccountIds)] + pub fn idxdb_get_account_ids() -> js_sys::Promise; + + #[wasm_bindgen(js_name = getAllAccountStubs)] + pub fn idxdb_get_account_stubs() -> js_sys::Promise; + + #[wasm_bindgen(js_name = getAccountStub)] + pub fn idxdb_get_account_stub(account_id: String) -> js_sys::Promise; + + #[wasm_bindgen(js_name = getAccountCode)] + pub fn idxdb_get_account_code(code_root: String) -> js_sys::Promise; + + #[wasm_bindgen(js_name = getAccountStorage)] + pub fn idxdb_get_account_storage(storage_root: String) -> js_sys::Promise; + + #[wasm_bindgen(js_name = getAccountAssetVault)] + pub fn idxdb_get_account_asset_vault(vault_root: String) -> js_sys::Promise; + + #[wasm_bindgen(js_name = getAccountAuth)] + pub fn idxdb_get_account_auth(account_id: String) -> js_sys::Promise; + + #[wasm_bindgen(js_name = getAccountAuthByPubKey)] + pub fn idxdb_get_account_auth_by_pub_key(pub_key: Vec) -> JsValue; + + #[wasm_bindgen(js_name = fetchAndCacheAccountAuthByPubKey)] + pub fn idxdb_fetch_and_cache_account_auth_by_pub_key(account_id: String) -> js_sys::Promise; + + // INSERTS + // ================================================================================================ + + #[wasm_bindgen(js_name = insertAccountCode)] + pub fn idxdb_insert_account_code( + code_root: String, + code: String, + module: Vec, + ) -> js_sys::Promise; + + #[wasm_bindgen(js_name = insertAccountStorage)] + pub fn idxdb_insert_account_storage( + storage_root: String, + storage_slots: Vec, + ) -> js_sys::Promise; + + #[wasm_bindgen(js_name = insertAccountAssetVault)] + pub fn idxdb_insert_account_asset_vault(vault_root: String, assets: String) -> js_sys::Promise; + + #[wasm_bindgen(js_name = insertAccountRecord)] + pub fn idxdb_insert_account_record( + id: String, + code_root: String, + storage_root: String, + vault_root: String, + nonce: String, + committed: bool, + account_seed: Option>, + ) -> js_sys::Promise; + + #[wasm_bindgen(js_name = insertAccountAuth)] + pub fn idxdb_insert_account_auth( + id: String, + auth_info: Vec, + pub_key: Vec, + ) -> js_sys::Promise; +} diff --git a/crates/rust-client/src/store/web_store/accounts/mod.rs b/crates/rust-client/src/store/web_store/accounts/mod.rs new file mode 100644 index 000000000..3940a090e --- /dev/null +++ b/crates/rust-client/src/store/web_store/accounts/mod.rs @@ -0,0 +1,196 @@ +use miden_lib::transaction::TransactionKernel; +use miden_objects::{ + accounts::{Account, AccountCode, AccountId, AccountStorage, AccountStub, AuthSecretKey}, + assembly::ModuleAst, + assets::{Asset, AssetVault}, + Digest, Word, +}; +use miden_tx::utils::{Deserializable, Serializable}; +use serde_wasm_bindgen::from_value; +use wasm_bindgen_futures::*; + +use super::WebStore; +use crate::errors::StoreError; + +mod js_bindings; +use js_bindings::*; + +mod models; +use models::*; + +pub(crate) mod utils; +use utils::*; + +impl WebStore { + pub(super) async fn get_account_ids(&self) -> Result, StoreError> { + let promise = idxdb_get_account_ids(); + let js_value = JsFuture::from(promise).await.unwrap(); + let account_ids_as_strings: Vec = from_value(js_value).unwrap(); + + let native_account_ids: Vec = account_ids_as_strings + .into_iter() + .map(|id| AccountId::from_hex(&id).unwrap()) + .collect(); + + Ok(native_account_ids) + } + + pub(super) async fn get_account_stubs( + &self, + ) -> Result)>, StoreError> { + let promise = idxdb_get_account_stubs(); + let js_value = JsFuture::from(promise).await.unwrap(); + let account_stubs_idxdb: Vec = from_value(js_value).unwrap(); + + let account_stubs: Result)>, StoreError> = + account_stubs_idxdb.into_iter().map(parse_account_record_idxdb_object).collect(); // Collect results into a single Result + + account_stubs + } + + pub(crate) async fn get_account_stub( + &self, + account_id: AccountId, + ) -> Result<(AccountStub, Option), StoreError> { + let account_id_str = account_id.to_string(); + + let promise = idxdb_get_account_stub(account_id_str); + let js_value = JsFuture::from(promise).await.unwrap(); + let account_stub_idxdb: AccountRecordIdxdbOjbect = from_value(js_value).unwrap(); + + parse_account_record_idxdb_object(account_stub_idxdb) + } + + pub(crate) async fn get_account( + &self, + account_id: AccountId, + ) -> Result<(Account, Option), StoreError> { + let (account_stub, seed) = self.get_account_stub(account_id).await.unwrap(); + let (_procedures, module_ast) = + self.get_account_code(account_stub.code_root()).await.unwrap(); + let account_code = AccountCode::new(module_ast, &TransactionKernel::assembler()).unwrap(); + let account_storage = self.get_account_storage(account_stub.storage_root()).await.unwrap(); + let account_vault = self.get_vault_assets(account_stub.vault_root()).await.unwrap(); + let account_vault = AssetVault::new(&account_vault).unwrap(); + + let account = Account::from_parts( + account_stub.id(), + account_vault, + account_storage, + account_code, + account_stub.nonce(), + ); + + Ok((account, seed)) + } + + pub(super) async fn get_account_code( + &self, + root: Digest, + ) -> Result<(Vec, ModuleAst), StoreError> { + let root_serialized = root.to_string(); + + let promise = idxdb_get_account_code(root_serialized); + let js_value = JsFuture::from(promise).await.unwrap(); + let account_code_idxdb: AccountCodeIdxdbObject = from_value(js_value).unwrap(); + + let procedures = serde_json::from_str(&account_code_idxdb.procedures).unwrap(); + + let module = ModuleAst::from_bytes(&account_code_idxdb.module).unwrap(); + + Ok((procedures, module)) + } + + pub(super) async fn get_account_storage( + &self, + root: Digest, + ) -> Result { + let root_serialized = root.to_string(); + + let promise = idxdb_get_account_storage(root_serialized); + let js_value = JsFuture::from(promise).await.unwrap(); + let account_storage_idxdb: AccountStorageIdxdbObject = from_value(js_value).unwrap(); + + let storage = AccountStorage::read_from_bytes(&account_storage_idxdb.storage).unwrap(); + Ok(storage) + } + + pub(super) async fn get_vault_assets(&self, root: Digest) -> Result, StoreError> { + let root_serialized = serde_json::to_string(&root.to_string()).unwrap(); + + let promise = idxdb_get_account_asset_vault(root_serialized); + let js_value = JsFuture::from(promise).await.unwrap(); + let vault_assets_idxdb: AccountVaultIdxdbObject = from_value(js_value).unwrap(); + + let assets = serde_json::from_str(&vault_assets_idxdb.assets).unwrap(); + Ok(assets) + } + + pub(crate) async fn get_account_auth( + &self, + account_id: AccountId, + ) -> Result { + let account_id_str = account_id.to_string(); + + let promise = idxdb_get_account_auth(account_id_str); + let js_value = JsFuture::from(promise).await.unwrap(); + let auth_info_idxdb: AccountAuthIdxdbObject = from_value(js_value).unwrap(); + + // Convert the auth_info to the appropriate AuthInfo enum variant + let auth_info = AuthSecretKey::read_from_bytes(&auth_info_idxdb.auth_info)?; + + Ok(auth_info) + } + + pub(crate) async fn insert_account( + &self, + account: &Account, + account_seed: Option, + auth_info: &AuthSecretKey, + ) -> Result<(), StoreError> { + insert_account_code(account.code()).await.unwrap(); + + insert_account_storage(account.storage()).await.unwrap(); + + insert_account_asset_vault(account.vault()).await.unwrap(); + + insert_account_record(account, account_seed).await.unwrap(); + + insert_account_auth(account.id(), auth_info).await.unwrap(); + + Ok(()) + } + + /// Returns an [AuthSecretKey] by a public key represented by a [Word] + pub(crate) fn get_account_auth_by_pub_key( + &self, + pub_key: Word, + ) -> Result { + let pub_key_bytes = pub_key.to_bytes(); + + let js_value = idxdb_get_account_auth_by_pub_key(pub_key_bytes); + let account_auth_idxdb: AccountAuthIdxdbObject = from_value(js_value).unwrap(); + + // Convert the auth_info to the appropriate AuthInfo enum variant + let auth_info = AuthSecretKey::read_from_bytes(&account_auth_idxdb.auth_info)?; + + Ok(auth_info) + } + + /// Fetches an [AuthSecretKey] by a public key represented by a [Word] and caches it in the store. + /// This is used in the web_client so adding this to ignore the dead code warning. + #[allow(dead_code)] + pub(crate) async fn fetch_and_cache_account_auth_by_pub_key( + &self, + account_id: String, + ) -> Result { + let promise = idxdb_fetch_and_cache_account_auth_by_pub_key(account_id); + let js_value = JsFuture::from(promise).await.unwrap(); + let account_auth_idxdb: AccountAuthIdxdbObject = from_value(js_value).unwrap(); + + // Convert the auth_info to the appropriate AuthInfo enum variant + let auth_info = AuthSecretKey::read_from_bytes(&account_auth_idxdb.auth_info)?; + + Ok(auth_info) + } +} diff --git a/crates/rust-client/src/store/web_store/accounts/models.rs b/crates/rust-client/src/store/web_store/accounts/models.rs new file mode 100644 index 000000000..2460407f8 --- /dev/null +++ b/crates/rust-client/src/store/web_store/accounts/models.rs @@ -0,0 +1,62 @@ +use base64::decode as base64_decode; +use serde::{de::Error, Deserialize, Deserializer, Serialize}; + +#[derive(Serialize, Deserialize)] +pub struct AccountCodeIdxdbObject { + pub root: String, + pub procedures: String, + #[serde(deserialize_with = "base64_to_vec_u8_required", default)] + pub module: Vec, +} + +#[derive(Serialize, Deserialize)] +pub struct AccountAuthIdxdbObject { + pub id: String, + #[serde(deserialize_with = "base64_to_vec_u8_required", default)] + pub auth_info: Vec, +} + +#[derive(Serialize, Deserialize)] +pub struct AccountStorageIdxdbObject { + pub root: String, + #[serde(deserialize_with = "base64_to_vec_u8_required", default)] + pub storage: Vec, +} + +#[derive(Serialize, Deserialize)] +pub struct AccountVaultIdxdbObject { + pub root: String, + pub assets: String, +} + +#[derive(Serialize, Deserialize)] +pub struct AccountRecordIdxdbOjbect { + pub id: String, + pub nonce: String, + pub vault_root: String, + pub storage_root: String, + pub code_root: String, + #[serde(deserialize_with = "base64_to_vec_u8_optional", default)] + pub account_seed: Option>, +} + +fn base64_to_vec_u8_required<'de, D>(deserializer: D) -> Result, D::Error> +where + D: Deserializer<'de>, +{ + let base64_str: String = Deserialize::deserialize(deserializer)?; + base64_decode(&base64_str).map_err(|e| Error::custom(format!("Base64 decode error: {}", e))) +} + +fn base64_to_vec_u8_optional<'de, D>(deserializer: D) -> Result>, D::Error> +where + D: Deserializer<'de>, +{ + let base64_str: Option = Option::deserialize(deserializer)?; + match base64_str { + Some(str) => base64_decode(&str) + .map(Some) + .map_err(|e| Error::custom(format!("Base64 decode error: {}", e))), + None => Ok(None), + } +} diff --git a/crates/rust-client/src/store/web_store/accounts/utils.rs b/crates/rust-client/src/store/web_store/accounts/utils.rs new file mode 100644 index 000000000..f00e183d4 --- /dev/null +++ b/crates/rust-client/src/store/web_store/accounts/utils.rs @@ -0,0 +1,113 @@ +use miden_objects::{ + accounts::{Account, AccountCode, AccountId, AccountStorage, AccountStub, AuthSecretKey}, + assembly::AstSerdeOptions, + assets::{Asset, AssetVault}, + utils::Deserializable, + Digest, Felt, Word, +}; +use miden_tx::utils::Serializable; +use wasm_bindgen_futures::*; + +use super::{js_bindings::*, models::*}; +use crate::errors::StoreError; + +pub async fn insert_account_code(account_code: &AccountCode) -> Result<(), ()> { + let root = account_code.root().to_string(); + let procedures = serde_json::to_string(account_code.procedures()).unwrap(); + let module = account_code.module().to_bytes(AstSerdeOptions { serialize_imports: true }); + + let promise = idxdb_insert_account_code(root, procedures, module); + let _ = JsFuture::from(promise).await; + + Ok(()) +} + +pub async fn insert_account_storage(account_storage: &AccountStorage) -> Result<(), ()> { + let root = account_storage.root().to_string(); + + let storage = account_storage.to_bytes(); + + let promise = idxdb_insert_account_storage(root, storage); + let _ = JsFuture::from(promise).await; + + Ok(()) +} + +pub async fn insert_account_asset_vault(asset_vault: &AssetVault) -> Result<(), ()> { + let root = serde_json::to_string(&asset_vault.commitment()).unwrap(); + let assets: Vec = asset_vault.assets().collect(); + let assets_as_str = serde_json::to_string(&assets).unwrap(); + + let promise = idxdb_insert_account_asset_vault(root, assets_as_str); + let _ = JsFuture::from(promise).await; + Ok(()) +} + +pub async fn insert_account_auth( + account_id: AccountId, + auth_info: &AuthSecretKey, +) -> Result<(), ()> { + let pub_key = match auth_info { + AuthSecretKey::RpoFalcon512(secret) => Word::from(secret.public_key()), + } + .to_bytes(); + + let account_id_str = account_id.to_string(); + let auth_info = auth_info.to_bytes(); + + let promise = idxdb_insert_account_auth(account_id_str, auth_info, pub_key); + let _ = JsFuture::from(promise).await; + + Ok(()) +} + +pub async fn insert_account_record( + account: &Account, + account_seed: Option, +) -> Result<(), ()> { + let account_id_str = account.id().to_string(); + let code_root = account.code().root().to_string(); + let storage_root = account.storage().root().to_string(); + let vault_root = serde_json::to_string(&account.vault().commitment()).unwrap(); + let committed = account.is_on_chain(); + let nonce = account.nonce().to_string(); + let account_seed = account_seed.map(|seed| seed.to_bytes()); + + let promise = idxdb_insert_account_record( + account_id_str, + code_root, + storage_root, + vault_root, + nonce, + committed, + account_seed, + ); + let _ = JsFuture::from(promise).await; + + Ok(()) +} + +pub fn parse_account_record_idxdb_object( + account_stub_idxdb: AccountRecordIdxdbOjbect, +) -> Result<(AccountStub, Option), StoreError> { + let native_account_id: AccountId = AccountId::from_hex(&account_stub_idxdb.id).unwrap(); + let native_nonce: u64 = account_stub_idxdb + .nonce + .parse::() + .map_err(|err| StoreError::ParsingError(err.to_string()))?; + let account_seed = account_stub_idxdb + .account_seed + .map(|seed| Word::read_from_bytes(&seed)) + .transpose()?; + + let account_stub = AccountStub::new( + native_account_id, + Felt::new(native_nonce), + serde_json::from_str(&account_stub_idxdb.vault_root) + .map_err(StoreError::InputSerializationError)?, + Digest::try_from(&account_stub_idxdb.storage_root)?, + Digest::try_from(&account_stub_idxdb.code_root)?, + ); + + Ok((account_stub, account_seed)) +} diff --git a/crates/rust-client/src/store/web_store/chain_data/js_bindings.rs b/crates/rust-client/src/store/web_store/chain_data/js_bindings.rs new file mode 100644 index 000000000..fb99c4646 --- /dev/null +++ b/crates/rust-client/src/store/web_store/chain_data/js_bindings.rs @@ -0,0 +1,38 @@ +use wasm_bindgen::prelude::*; +use wasm_bindgen_futures::*; + +// ChainData IndexedDB Operations +#[wasm_bindgen(module = "/src/store/web_store/js/chainData.js")] +extern "C" { + // GETS + // ================================================================================================ + + #[wasm_bindgen(js_name = getBlockHeaders)] + pub fn idxdb_get_block_headers(block_numbers: Vec) -> js_sys::Promise; + + #[wasm_bindgen(js_name = getTrackedBlockHeaders)] + pub fn idxdb_get_tracked_block_headers() -> js_sys::Promise; + + #[wasm_bindgen(js_name = getChainMmrNodesAll)] + pub fn idxdb_get_chain_mmr_nodes_all() -> js_sys::Promise; + + #[wasm_bindgen(js_name = getChainMmrNodes)] + pub fn idxdb_get_chain_mmr_nodes(ids: Vec) -> js_sys::Promise; + + #[wasm_bindgen(js_name = getChainMmrPeaksByBlockNum)] + pub fn idxdb_get_chain_mmr_peaks_by_block_num(block_num: String) -> js_sys::Promise; + + // INSERTS + // ================================================================================================ + + #[wasm_bindgen(js_name = insertBlockHeader)] + pub fn idxdb_insert_block_header( + block_num: String, + header: String, + chain_mmr_peaks: String, + has_client_notes: bool, + ) -> js_sys::Promise; + + #[wasm_bindgen(js_name = insertChainMmrNodes)] + pub fn idxdb_insert_chain_mmr_nodes(ids: Vec, nodes: Vec) -> js_sys::Promise; +} diff --git a/crates/rust-client/src/store/web_store/chain_data/mod.rs b/crates/rust-client/src/store/web_store/chain_data/mod.rs new file mode 100644 index 000000000..dc8453866 --- /dev/null +++ b/crates/rust-client/src/store/web_store/chain_data/mod.rs @@ -0,0 +1,175 @@ +use std::collections::BTreeMap; + +use miden_objects::{ + crypto::merkle::{InOrderIndex, MmrPeaks}, + BlockHeader, Digest, +}; +use serde_wasm_bindgen::from_value; +use wasm_bindgen_futures::JsFuture; + +use super::WebStore; +use crate::{errors::StoreError, store::ChainMmrNodeFilter}; + +mod js_bindings; +use js_bindings::*; + +mod models; +use models::*; + +pub mod utils; +use utils::*; + +impl WebStore { + pub(crate) async fn insert_block_header( + &self, + block_header: BlockHeader, + chain_mmr_peaks: MmrPeaks, + has_client_notes: bool, + ) -> Result<(), StoreError> { + let chain_mmr_peaks = chain_mmr_peaks.peaks().to_vec(); + let serialized_data = + serialize_block_header(block_header, chain_mmr_peaks, has_client_notes)?; + + let promise = idxdb_insert_block_header( + serialized_data.block_num, + serialized_data.header, + serialized_data.chain_mmr_peaks, + serialized_data.has_client_notes, + ); + JsFuture::from(promise).await.unwrap(); + + Ok(()) + } + + pub(crate) async fn get_block_headers( + &self, + block_numbers: &[u32], + ) -> Result, StoreError> { + let formatted_block_numbers_list: Vec = block_numbers + .iter() + .map(|block_number| (*block_number as i64).to_string()) + .collect(); + + let promise = idxdb_get_block_headers(formatted_block_numbers_list); + let js_value = JsFuture::from(promise).await.unwrap(); + let block_headers_idxdb: Vec> = + from_value(js_value).unwrap(); + + // Transform the list of Option to a list of results + let results: Result, StoreError> = block_headers_idxdb + .into_iter() + .filter_map(|record_option| record_option.map(Ok)) + .map(|record_result: Result| { + let record = record_result?; + let block_header = serde_json::from_str(&record.header) + .map_err(StoreError::JsonDataDeserializationError)?; + let has_client_notes = record.has_client_notes; + + Ok((block_header, has_client_notes)) + }) + .collect(); // Collects into Result, StoreError> + + results + } + + pub(crate) async fn get_tracked_block_headers(&self) -> Result, StoreError> { + let promise = idxdb_get_tracked_block_headers(); + let js_value = JsFuture::from(promise).await.unwrap(); + let block_headers_idxdb: Vec = from_value(js_value).unwrap(); + + let results: Result, StoreError> = block_headers_idxdb + .into_iter() + .map(|record| { + let block_header = serde_json::from_str(&record.header).unwrap(); + + Ok(block_header) + }) + .collect(); + + results + } + + pub(crate) async fn get_chain_mmr_nodes<'a>( + &'a self, + filter: ChainMmrNodeFilter<'a>, + ) -> Result, StoreError> { + match filter { + ChainMmrNodeFilter::All => { + let promise = idxdb_get_chain_mmr_nodes_all(); + let js_value = JsFuture::from(promise).await.unwrap(); + process_chain_mmr_nodes_from_js_value(js_value) + }, + ChainMmrNodeFilter::List(ids) => { + let formatted_list: Vec = + ids.iter().map(|id| (Into::::into(*id)).to_string()).collect(); + + let promise = idxdb_get_chain_mmr_nodes(formatted_list); + let js_value = JsFuture::from(promise).await.unwrap(); + process_chain_mmr_nodes_from_js_value(js_value) + }, + } + } + + pub(crate) async fn get_chain_mmr_peaks_by_block_num( + &self, + block_num: u32, + ) -> Result { + let block_num_as_str = block_num.to_string(); + + let promise = idxdb_get_chain_mmr_peaks_by_block_num(block_num_as_str); + let js_value = JsFuture::from(promise).await.unwrap(); + let mmr_peaks_idxdb: MmrPeaksIdxdbObject = from_value(js_value).unwrap(); + + if let Some(peaks) = mmr_peaks_idxdb.peaks { + let mmr_peaks_nodes: Vec = + serde_json::from_str(&peaks).map_err(StoreError::JsonDataDeserializationError)?; + + return MmrPeaks::new(block_num as usize, mmr_peaks_nodes) + .map_err(StoreError::MmrError); + } + + Ok(MmrPeaks::new(0, vec![])?) + } + + pub(crate) async fn insert_chain_mmr_nodes( + &self, + nodes: &[(InOrderIndex, Digest)], + ) -> Result<(), StoreError> { + let mut serialized_node_ids = Vec::new(); + let mut serialized_nodes = Vec::new(); + for (id, node) in nodes.iter() { + let serialized_data = serialize_chain_mmr_node(*id, *node)?; + serialized_node_ids.push(serialized_data.id); + serialized_nodes.push(serialized_data.node); + } + + let promise = idxdb_insert_chain_mmr_nodes(serialized_node_ids, serialized_nodes); + JsFuture::from(promise).await.unwrap(); + + Ok(()) + } + + /// This function is not used in this crate, rather it is used in the 'miden-client' crate. + /// https://github.com/0xPolygonMiden/miden-client/blob/c273847726ed325d2e627e4db18bf9f3ab8c28ba/src/store/sqlite_store/sync.rs#L105 + /// It is duplicated here due to its reliance on the store. + #[allow(dead_code)] + pub(crate) async fn insert_block_header_tx( + block_header: BlockHeader, + chain_mmr_peaks: MmrPeaks, + has_client_notes: bool, + ) -> Result<(), StoreError> { + let chain_mmr_peaks = chain_mmr_peaks.peaks().to_vec(); + let serialized_data = + serialize_block_header(block_header, chain_mmr_peaks, has_client_notes)?; + + let promise = idxdb_insert_block_header( + serialized_data.block_num, + serialized_data.header, + serialized_data.chain_mmr_peaks, + serialized_data.has_client_notes, + ); + JsFuture::from(promise).await.unwrap(); + + Ok(()) + } +} diff --git a/crates/rust-client/src/store/web_store/chain_data/models.rs b/crates/rust-client/src/store/web_store/chain_data/models.rs new file mode 100644 index 000000000..512756e65 --- /dev/null +++ b/crates/rust-client/src/store/web_store/chain_data/models.rs @@ -0,0 +1,20 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Serialize, Deserialize)] +pub struct BlockHeaderIdxdbObject { + pub block_num: String, + pub header: String, + pub chain_mmr: String, + pub has_client_notes: bool, +} + +#[derive(Serialize, Deserialize)] +pub struct ChainMmrNodeIdxdbObject { + pub id: String, + pub node: String, +} + +#[derive(Serialize, Deserialize)] +pub struct MmrPeaksIdxdbObject { + pub peaks: Option, +} diff --git a/crates/rust-client/src/store/web_store/chain_data/utils.rs b/crates/rust-client/src/store/web_store/chain_data/utils.rs new file mode 100644 index 000000000..834baa942 --- /dev/null +++ b/crates/rust-client/src/store/web_store/chain_data/utils.rs @@ -0,0 +1,67 @@ +use std::{collections::BTreeMap, num::NonZeroUsize}; + +use miden_objects::{crypto::merkle::InOrderIndex, BlockHeader, Digest}; +use serde_wasm_bindgen::from_value; +use wasm_bindgen::JsValue; + +use crate::{errors::StoreError, store::web_store::chain_data::ChainMmrNodeIdxdbObject}; + +pub struct SerializedBlockHeaderData { + pub block_num: String, + pub header: String, + pub chain_mmr_peaks: String, + pub has_client_notes: bool, +} + +pub struct SerializedChainMmrNodeData { + pub id: String, + pub node: String, +} + +pub fn serialize_block_header( + block_header: BlockHeader, + chain_mmr_peaks: Vec, + has_client_notes: bool, +) -> Result { + let block_num = block_header.block_num().to_string(); + let header = + serde_json::to_string(&block_header).map_err(StoreError::InputSerializationError)?; + let chain_mmr_peaks = + serde_json::to_string(&chain_mmr_peaks).map_err(StoreError::InputSerializationError)?; + + Ok(SerializedBlockHeaderData { + block_num, + header, + chain_mmr_peaks, + has_client_notes, + }) +} + +pub fn serialize_chain_mmr_node( + id: InOrderIndex, + node: Digest, +) -> Result { + let id: u64 = id.into(); + let id_as_str = id.to_string(); + let node = serde_json::to_string(&node).map_err(StoreError::InputSerializationError)?; + Ok(SerializedChainMmrNodeData { id: id_as_str, node }) +} + +pub fn process_chain_mmr_nodes_from_js_value( + js_value: JsValue, +) -> Result, StoreError> { + let chain_mmr_nodes_idxdb: Vec = from_value(js_value).unwrap(); + + let results: Result, StoreError> = chain_mmr_nodes_idxdb + .into_iter() + .map(|record| { + let id_as_u64: u64 = record.id.parse::().unwrap(); + let id = InOrderIndex::new(NonZeroUsize::new(id_as_u64 as usize).unwrap()); + let node: Digest = serde_json::from_str(&record.node) + .map_err(StoreError::JsonDataDeserializationError)?; + Ok((id, node)) + }) + .collect(); + + results +} diff --git a/crates/rust-client/src/store/web_store/js/accounts.js b/crates/rust-client/src/store/web_store/js/accounts.js new file mode 100644 index 000000000..06151d808 --- /dev/null +++ b/crates/rust-client/src/store/web_store/js/accounts.js @@ -0,0 +1,431 @@ +import { + accountCodes, + accountStorages, + accountVaults, + accountAuths, + accounts +} from './schema.js'; + +// GET FUNCTIONS +export async function getAccountIds() { + try { + let allIds = new Set(); // Use a Set to ensure uniqueness + + // Iterate over each account entry + await accounts.each(account => { + allIds.add(account.id); // Assuming 'account' has an 'id' property + }); + + return Array.from(allIds); // Convert back to array to return a list of unique IDs + } catch (error) { + console.error("Failed to retrieve account IDs: ", error); + throw error; // Or handle the error as fits your application's error handling strategy + } +} + +export async function getAllAccountStubs() { + try { + // Use a Map to track the latest record for each id based on nonce + const latestRecordsMap = new Map(); + + await accounts.each(record => { + const existingRecord = latestRecordsMap.get(record.id); + if (!existingRecord || BigInt(record.nonce) > BigInt(existingRecord.nonce)) { + latestRecordsMap.set(record.id, record); + } + }); + + // Extract the latest records from the Map + const latestRecords = Array.from(latestRecordsMap.values()); + + const resultObject = await Promise.all(latestRecords.map(async record => { + let accountSeedBase64 = null; + if (record.accountSeed) { + // Ensure accountSeed is processed as a Uint8Array and converted to Base64 + let accountSeedArrayBuffer = await record.accountSeed.arrayBuffer(); + let accountSeedArray = new Uint8Array(accountSeedArrayBuffer); + accountSeedBase64 = uint8ArrayToBase64(accountSeedArray); + } + + return { + id: record.id, + nonce: record.nonce, + vault_root: record.vaultRoot, + storage_root: record.storageRoot, + code_root: record.codeRoot, + account_seed: accountSeedBase64 // Now correctly formatted as Base64 + }; + })); + + return resultObject; + } catch (error) { + console.error('Error fetching all latest account stubs:', error); + throw error; + } +} + +export async function getAccountStub( + accountId +) { + try { + // Fetch all records matching the given id + const allMatchingRecords = await accounts + .where('id') + .equals(accountId) + .toArray(); + + if (allMatchingRecords.length === 0) { + console.log('No records found for given ID.'); + return null; // No records found + } + + // Convert nonce to BigInt and sort + // Note: This assumes all nonces are valid BigInt strings. + const sortedRecords = allMatchingRecords.sort((a, b) => { + const bigIntA = BigInt(a.nonce); + const bigIntB = BigInt(b.nonce); + return bigIntA > bigIntB ? -1 : bigIntA < bigIntB ? 1 : 0; + }); + + // The first record is the most recent one due to the sorting + const mostRecentRecord = sortedRecords[0]; + + let accountSeedBase64 = null; + if (mostRecentRecord.accountSeed) { + // Ensure accountSeed is processed as a Uint8Array and converted to Base64 + let accountSeedArrayBuffer = await mostRecentRecord.accountSeed.arrayBuffer(); + let accountSeedArray = new Uint8Array(accountSeedArrayBuffer); + accountSeedBase64 = uint8ArrayToBase64(accountSeedArray); + } + const accountStub = { + id: mostRecentRecord.id, + nonce: mostRecentRecord.nonce, + vault_root: mostRecentRecord.vaultRoot, + storage_root: mostRecentRecord.storageRoot, + code_root: mostRecentRecord.codeRoot, + account_seed: accountSeedBase64 + } + return accountStub; + } catch (error) { + console.error('Error fetching most recent account record:', error); + throw error; // Re-throw the error for further handling + } +} + +export async function getAccountCode( + codeRoot +) { + try { + // Fetch all records matching the given root + const allMatchingRecords = await accountCodes + .where('root') + .equals(codeRoot) + .toArray(); + + if (allMatchingRecords.length === 0) { + console.log('No records found for given code root.'); + return null; // No records found + } + + // The first record is the only one due to the uniqueness constraint + const codeRecord = allMatchingRecords[0]; + + // Convert the module Blob to an ArrayBuffer + const moduleArrayBuffer = await codeRecord.module.arrayBuffer(); + const moduleArray = new Uint8Array(moduleArrayBuffer); + const moduleBase64 = uint8ArrayToBase64(moduleArray); + return { + root: codeRecord.root, + procedures: codeRecord.procedures, + module: moduleBase64, + }; + } catch (error) { + console.error('Error fetching code record:', error); + throw error; // Re-throw the error for further handling + } +} + +export async function getAccountStorage( + storageRoot +) { + try { + // Fetch all records matching the given root + const allMatchingRecords = await accountStorages + .where('root') + .equals(storageRoot) + .toArray(); + + if (allMatchingRecords.length === 0) { + console.log('No records found for given storage root.'); + return null; // No records found + } + + // The first record is the only one due to the uniqueness constraint + const storageRecord = allMatchingRecords[0]; + + // Convert the module Blob to an ArrayBuffer + const storageArrayBuffer = await storageRecord.slots.arrayBuffer(); + const storageArray = new Uint8Array(storageArrayBuffer); + const storageBase64 = uint8ArrayToBase64(storageArray); + return { + root: storageRecord.root, + storage: storageBase64 + }; + } catch (error) { + console.error('Error fetching code record:', error); + throw error; // Re-throw the error for further handling + } +} + +export async function getAccountAssetVault( + vaultRoot +) { + try { + // Fetch all records matching the given root + const allMatchingRecords = await accountVaults + .where('root') + .equals(vaultRoot) + .toArray(); + + if (allMatchingRecords.length === 0) { + console.log('No records found for given vault root.'); + return null; // No records found + } + + // The first record is the only one due to the uniqueness constraint + const vaultRecord = allMatchingRecords[0]; + + return { + root: vaultRecord.root, + assets: vaultRecord.assets + }; + } catch (error) { + console.error('Error fetching code record:', error); + throw error; // Re-throw the error for further handling + } +} + +export async function getAccountAuth( + accountId +) { + try { + // Fetch all records matching the given id + const allMatchingRecords = await accountAuths + .where('accountId') + .equals(accountId) + .toArray(); + + if (allMatchingRecords.length === 0) { + console.log('No records found for given account ID.'); + return null; // No records found + } + + // The first record is the only one due to the uniqueness constraint + const authRecord = allMatchingRecords[0]; + + // Convert the authInfo Blob to an ArrayBuffer + const authInfoArrayBuffer = await authRecord.authInfo.arrayBuffer(); + const authInfoArray = new Uint8Array(authInfoArrayBuffer); + const authInfoBase64 = uint8ArrayToBase64(authInfoArray); + + return { + id: authRecord.accountId, + auth_info: authInfoBase64 + }; + } catch (err) { + console.error('Error fetching account auth:', err); + throw err; // Re-throw the error for further handling + } +} + +export function getAccountAuthByPubKey( + pubKey +) { + // Try to get the account auth from the cache + let pubKeyArray = new Uint8Array(pubKey); + let pubKeyBase64 = uint8ArrayToBase64(pubKeyArray); + let cachedAccountAuth = ACCOUNT_AUTH_MAP.get(pubKeyBase64); + + // If it's not in the cache, throw an error + if (!cachedAccountAuth) { + throw new Error('Account auth not found in cache.'); + } + + let data = { + id: cachedAccountAuth.id, + auth_info: cachedAccountAuth.auth_info + } + + return data; +} + +var ACCOUNT_AUTH_MAP = new Map(); +export async function fetchAndCacheAccountAuthByPubKey( + accountId +) { + try { + // Fetch all records matching the given id + const allMatchingRecords = await accountAuths + .where('accountId') + .equals(accountId) + .toArray(); + + if (allMatchingRecords.length === 0) { + console.log('No records found for given account ID.'); + return null; // No records found + } + + // The first record is the only one due to the uniqueness constraint + const authRecord = allMatchingRecords[0]; + + // Convert the authInfo Blob to an ArrayBuffer + const authInfoArrayBuffer = await authRecord.authInfo.arrayBuffer(); + const authInfoArray = new Uint8Array(authInfoArrayBuffer); + const authInfoBase64 = uint8ArrayToBase64(authInfoArray); + + // Store the auth info in the map + ACCOUNT_AUTH_MAP.set(authRecord.pubKey, { + id: authRecord.accountId, + auth_info: authInfoBase64 + }); + + return { + id: authRecord.accountId, + auth_info: authInfoBase64 + }; + } catch (err) { + console.error('Error fetching account auth by public key:', err); + throw err; // Re-throw the error for further handling + } +} + +// INSERT FUNCTIONS + +export async function insertAccountCode( + codeRoot, + code, + module +) { + try { + // Create a Blob from the ArrayBuffer + const moduleBlob = new Blob([new Uint8Array(module)]); + + // Prepare the data object to insert + const data = { + root: codeRoot, // Using codeRoot as the key + procedures: code, + module: moduleBlob, // Blob created from ArrayBuffer + }; + + // Perform the insert using Dexie + await accountCodes.add(data); + } catch (error) { + console.error(`Error inserting code with root: ${codeRoot}:`, error); + throw error; // Rethrow the error to handle it further up the call chain if needed + } +} + +export async function insertAccountStorage( + storageRoot, + storageSlots +) { + try { + const storageSlotsBlob = new Blob([new Uint8Array(storageSlots)]); + + // Prepare the data object to insert + const data = { + root: storageRoot, // Using storageRoot as the key + slots: storageSlotsBlob, // Blob created from ArrayBuffer + }; + + // Perform the insert using Dexie + await accountStorages.add(data); + } catch (error) { + console.error(`Error inserting storage with root: ${storageRoot}:`, error); + throw error; // Rethrow the error to handle it further up the call chain if needed + } +} + +export async function insertAccountAssetVault( + vaultRoot, + assets +) { + try { + // Prepare the data object to insert + const data = { + root: vaultRoot, // Using vaultRoot as the key + assets: assets, + }; + + // Perform the insert using Dexie + await accountVaults.add(data); + } catch (error) { + console.error(`Error inserting vault with root: ${vaultRoot}:`, error); + throw error; // Rethrow the error to handle it further up the call chain if needed + } +} + +export async function insertAccountRecord( + accountId, + code_root, + storage_root, + vault_root, + nonce, + committed, + account_seed +) { + try { + let accountSeedBlob = null; + if (account_seed) { + accountSeedBlob = new Blob([new Uint8Array(account_seed)]); + } + + + // Prepare the data object to insert + const data = { + id: accountId, // Using accountId as the key + codeRoot: code_root, + storageRoot: storage_root, + vaultRoot: vault_root, + nonce: nonce, + committed: committed, + accountSeed: accountSeedBlob, + }; + + // Perform the insert using Dexie + await accounts.add(data); + } catch (error) { + console.error(`Error inserting account: ${accountId}:`, error); + throw error; // Rethrow the error to handle it further up the call chain if needed + } +} + +export async function insertAccountAuth( + accountId, + authInfo, + pubKey +) { + try { + let authInfoBlob = new Blob([new Uint8Array(authInfo)]); + let pubKeyArray = new Uint8Array(pubKey); + let pubKeyBase64 = uint8ArrayToBase64(pubKeyArray); + + // Prepare the data object to insert + const data = { + accountId: accountId, // Using accountId as the key + authInfo: authInfoBlob, + pubKey: pubKeyBase64 + }; + + // Perform the insert using Dexie + await accountAuths.add(data); + } catch (error) { + console.error(`Error inserting auth for account: ${accountId}:`, error); + throw error; // Rethrow the error to handle it further up the call chain if needed + } +} + +function uint8ArrayToBase64(bytes) { + const binary = bytes.reduce((acc, byte) => acc + String.fromCharCode(byte), ''); + return btoa(binary); +} diff --git a/crates/rust-client/src/store/web_store/js/chainData.js b/crates/rust-client/src/store/web_store/js/chainData.js new file mode 100644 index 000000000..a567c6e1e --- /dev/null +++ b/crates/rust-client/src/store/web_store/js/chainData.js @@ -0,0 +1,129 @@ +import { + blockHeaders, + chainMmrNodes +} from './schema.js' + +// INSERT FUNCTIONS +export async function insertBlockHeader( + blockNum, + header, + chainMmrPeaks, + hasClientNotes +) { + try { + const data = { + blockNum: blockNum, + header: header, + chainMmrPeaks: chainMmrPeaks, + hasClientNotes: hasClientNotes + }; + + const existingBlockHeader = await blockHeaders.get(blockNum); + + if (!existingBlockHeader) { + await blockHeaders.add(data); + } else { + console.log("Block header already exists, ignoring."); + } + } catch (err) { + console.error("Failed to insert block header: ", err); + throw err; + } +} + +export async function insertChainMmrNodes( + ids, + nodes +) { + try { + const data = nodes.map((node, index) => { + return { + id: ids[index], + node: node + } + }); + + await chainMmrNodes.bulkAdd(data); + } catch (err) { + console.error("Failed to insert chain mmr nodes: ", err); + throw err; + } +} + +// GET FUNCTIONS +export async function getBlockHeaders( + blockNumbers +) { + try { + const results = await blockHeaders.bulkGet(blockNumbers); + + results.forEach((result, index) => { + if (result === undefined) { + results[index] = null; + } else { + results[index] = { + block_num: results[index].blockNum, + header: results[index].header, + chain_mmr: results[index].chainMmrPeaks, + has_client_notes: results[index].hasClientNotes + } + } + }); + + return results + } catch (err) { + console.error("Failed to get block headers: ", err); + throw err; + } +} + +export async function getTrackedBlockHeaders() { + try { + // Fetch all records matching the given root + const allMatchingRecords = await blockHeaders + .where('hasClientNotes') + .equals(true) + .toArray(); + return allMatchingRecords; + } catch (err) { + console.error("Failed to get tracked block headers: ", err); + throw err; + } +} + +export async function getChainMmrPeaksByBlockNum( + blockNum +) { + try { + const blockHeader = await blockHeaders.get(blockNum); + return { + peaks: blockHeader.chainMmrPeaks + }; + } catch (err) { + console.error("Failed to get chain mmr peaks: ", err); + throw err; + } +} + +export async function getChainMmrNodesAll() { + try { + const chainMmrNodesAll = await chainMmrNodes.toArray(); + return chainMmrNodesAll; + } catch (err) { + console.error("Failed to get chain mmr nodes: ", err); + throw err; + } +} + +export async function getChainMmrNodes( + ids +) { + try { + const results = await chainMmrNodes.bulkGet(ids); + + return results; + } catch (err) { + console.error("Failed to get chain mmr nodes: ", err); + throw err; + } +} diff --git a/crates/rust-client/src/store/web_store/js/notes.js b/crates/rust-client/src/store/web_store/js/notes.js new file mode 100644 index 000000000..bd7647f0f --- /dev/null +++ b/crates/rust-client/src/store/web_store/js/notes.js @@ -0,0 +1,405 @@ +import { + db, + inputNotes, + outputNotes, + notesScripts, + transactions +} from './schema.js'; + +export async function getOutputNotes( + status +) { + try { + let notes; + + // Fetch the records based on the filter + if (status === 'All') { + notes = await outputNotes.toArray(); + } else { + notes = await outputNotes.where('status').equals(status).toArray(); + } + + return await processOutputNotes(notes); + } catch (err) { + console.error("Failed to get input notes: ", err); + throw err; + } +} + +export async function getInputNotes( + status +) { + try { + let notes; + + // Fetch the records based on the filter + if (status === 'All') { + notes = await inputNotes.toArray(); + } else { + notes = await inputNotes + .where('status') + .equals(status) + .and(note => note.ignored === false) + .toArray(); + } + + return await processInputNotes(notes); + } catch (err) { + console.error("Failed to get input notes: ", err); + throw err; + } +} + +export async function getIgnoredInputNotes() { + try { + const notes = await inputNotes + .where('ignored') + .equals(true) + .toArray(); + + return await processInputNotes(notes); + } catch (err) { + console.error("Failed to get ignored input notes: ", err); + throw err; + } +} + +export async function getIgnoredOutputNotes() { + try { + const notes = await outputNotes + .where('ignored') + .equals(true) + .toArray(); + + return await processOutputNotes(notes); + } catch (err) { + console.error("Failed to get ignored output notes: ", err); + throw err; + } + +} + +export async function getInputNotesFromIds( + noteIds +) { + try { + let notes; + + // Fetch the records based on a list of IDs + notes = await inputNotes.where('noteId').anyOf(noteIds).toArray(); + + return await processInputNotes(notes); + } catch (err) { + console.error("Failed to get input notes: ", err); + throw err; + } +} + +export async function getOutputNotesFromIds( + noteIds +) { + try { + let notes; + + // Fetch the records based on a list of IDs + notes = await outputNotes.where('noteId').anyOf(noteIds).toArray(); + + return await processOutputNotes(notes); + } catch (err) { + console.error("Failed to get input notes: ", err); + throw err; + } +} + +export async function getUnspentInputNoteNullifiers() { + try { + const notes = await inputNotes + .where('status') + .anyOf(['Committed', 'Processing']) + .toArray(); + const nullifiers = notes.map(note => JSON.parse(note.details).nullifier); + + return nullifiers; + } catch (err) { + console.error("Failed to get unspent input note nullifiers: ", err); + throw err; + } +} + +export async function insertInputNote( + noteId, + assets, + recipient, + status, + metadata, + details, + noteScriptHash, + serializedNoteScript, + inclusionProof, + serializedCreatedAt, + ignored, + importedTag +) { + return db.transaction('rw', inputNotes, notesScripts, async (tx) => { + try { + let assetsBlob = new Blob([new Uint8Array(assets)]); + + // Prepare the data object to insert + const data = { + noteId: noteId, + assets: assetsBlob, + recipient: recipient, + status: status, + metadata: metadata ? metadata : null, + details: details, + inclusionProof: inclusionProof ? JSON.stringify(inclusionProof) : null, + consumerTransactionId: null, + createdAt: serializedCreatedAt, + ignored: ignored, + importedTag: importedTag ? importedTag : null + }; + + // Perform the insert using Dexie + await tx.inputNotes.add(data); + + let serializedNoteScriptBlob = new Blob([new Uint8Array(serializedNoteScript)]); + + const noteScriptData = { + scriptHash: noteScriptHash, + serializedNoteScript: serializedNoteScriptBlob, + }; + + await tx.notesScripts.put(noteScriptData); + } catch { + console.error(`Error inserting note: ${noteId}:`, error); + throw error; // Rethrow the error to handle it further up the call chain if needed + } + }); +} + +export async function insertOutputNote( + noteId, + assets, + recipient, + status, + metadata, + details, + noteScriptHash, + serializedNoteScript, + inclusionProof, + serializedCreatedAt, +) { + return db.transaction('rw', outputNotes, notesScripts, async (tx) => { + try { + let assetsBlob = new Blob([new Uint8Array(assets)]); + + // Prepare the data object to insert + const data = { + noteId: noteId, + assets: assetsBlob, + recipient: recipient, + status: status, + metadata: metadata, + details: details ? details : null, + inclusionProof: inclusionProof ? JSON.stringify(inclusionProof) : null, + consumerTransactionId: null, + createdAt: serializedCreatedAt, + ignored: false, + imported_tag: null + }; + + // Perform the insert using Dexie + await tx.outputNotes.add(data); + + if (noteScriptHash) { + const exists = await tx.notesScripts.get(noteScriptHash); + if (!exists) { + let serializedNoteScriptBlob = null; + if (serializedNoteScript) { + serializedNoteScriptBlob = new Blob([new Uint8Array(serializedNoteScript)]); + } + + const data = { + scriptHash: noteScriptHash, + serializedNoteScript: serializedNoteScriptBlob, + }; + await tx.notesScripts.add(data); + } + } + } catch { + console.error(`Error inserting note: ${noteId}:`, error); + throw error; // Rethrow the error to handle it further up the call chain if needed + } + }); +} + +export async function updateNoteConsumerTxId(noteId, consumerTxId, submittedAt) { + try { + // Start a transaction that covers both tables + await db.transaction('rw', inputNotes, outputNotes, async (tx) => { + // Update input_notes where note_id matches + const updatedInputNotes = await tx.inputNotes + .where('noteId') + .equals(noteId) + .modify({ consumerTransactionId: consumerTxId, submittedAt: submittedAt, status: "Processing" }); + + // Update output_notes where note_id matches + const updatedOutputNotes = await tx.outputNotes + .where('noteId') + .equals(noteId) + .modify({ consumerTransactionId: consumerTxId, submittedAt: submittedAt, status: "Processing" }); + + // Log the count of updated entries in both tables (optional) + console.log(`Updated ${updatedInputNotes} input notes and ${updatedOutputNotes} output notes`); + }); + } catch (err) { + console.error("Failed to update note consumer transaction ID: ", err); + throw err; + } +} + +export async function updateNoteInclusionProof( + noteId, + inclusionProof +) { + try { + await inputNotes + .where('noteId') + .equals(noteId) + .modify({ inclusionProof: inclusionProof, status: "Committed" }); + + } catch (err) { + console.error("Failed to update inclusion proof: ", err); + throw err; + } +} + +export async function updateNoteMetadata( + noteId, + metadata +) { + try { + await inputNotes + .where('noteId') + .equals(noteId) + .modify({ metadata: metadata }); + + } catch (err) { + console.error("Failed to update inclusion proof: ", err); + throw err; + } +} + +async function processInputNotes( + notes +) { + // Fetch all scripts from the scripts table for joining + const scripts = await notesScripts.toArray(); + const scriptMap = new Map(scripts.map(script => [script.scriptHash, script.serializedNoteScript])); + + const transactionRecords = await transactions.toArray(); + const transactionMap = new Map(transactionRecords.map(transaction => [transaction.id, transaction.accountId])); + + const processedNotes = await Promise.all(notes.map(async note => { + // Convert the assets blob to base64 + const assetsArrayBuffer = await note.assets.arrayBuffer(); + const assetsArray = new Uint8Array(assetsArrayBuffer); + const assetsBase64 = uint8ArrayToBase64(assetsArray); + note.assets = assetsBase64; + + // Convert the serialized note script blob to base64 + let serializedNoteScriptBase64 = null; + // Parse details JSON and perform a "join" + if (note.details) { + const details = JSON.parse(note.details); + if (details.script_hash) { + let serializedNoteScript = scriptMap.get(details.script_hash); + let serializedNoteScriptArrayBuffer = await serializedNoteScript.arrayBuffer(); + const serializedNoteScriptArray = new Uint8Array(serializedNoteScriptArrayBuffer); + serializedNoteScriptBase64 = uint8ArrayToBase64(serializedNoteScriptArray); + } + } + + // Perform a "join" with the transactions table + let consumerAccountId = null; + if (transactionMap.has(note.consumerTransactionId)) { + consumerAccountId = transactionMap.get(note.consumerTransactionId); + } + + return { + assets: note.assets, + details: note.details, + recipient: note.recipient, + status: note.status, + metadata: note.metadata ? note.metadata : null, + inclusion_proof: note.inclusionProof ? note.inclusionProof : null, + serialized_note_script: serializedNoteScriptBase64, + consumer_account_id: consumerAccountId, + created_at: note.createdAt, + submitted_at: note.submittedAt ? note.submittedAt : null, + nullifier_height: note.nullifierHeight ? note.nullifierHeight : null, + ignored: note.ignored, + imported_tag: note.importedTag ? note.importedTag : null + }; + })); + return processedNotes; +} + +async function processOutputNotes( + notes +) { + // Fetch all scripts from the scripts table for joining + const scripts = await notesScripts.toArray(); + const scriptMap = new Map(scripts.map(script => [script.scriptHash, script.serializedNoteScript])); + + const transactionRecords = await transactions.toArray(); + const transactionMap = new Map(transactionRecords.map(transaction => [transaction.id, transaction.accountId])); + + // Process each note to convert 'blobField' from Blob to Uint8Array + const processedNotes = await Promise.all(notes.map(async note => { + const assetsArrayBuffer = await note.assets.arrayBuffer(); + const assetsArray = new Uint8Array(assetsArrayBuffer); + const assetsBase64 = uint8ArrayToBase64(assetsArray); + note.assets = assetsBase64; + + let serializedNoteScriptBase64 = null; + // Parse details JSON and perform a "join" + if (note.details) { + const details = JSON.parse(note.details); + if (details.script_hash) { + let serializedNoteScript = scriptMap.get(details.script_hash); + let serializedNoteScriptArrayBuffer = await serializedNoteScript.arrayBuffer(); + const serializedNoteScriptArray = new Uint8Array(serializedNoteScriptArrayBuffer); + serializedNoteScriptBase64 = uint8ArrayToBase64(serializedNoteScriptArray); + } + } + + // Perform a "join" with the transactions table + let consumerAccountId = null; + if (transactionMap.has(note.consumerTransactionId)) { + consumerAccountId = transactionMap.get(note.consumerTransactionId); + } + + return { + assets: note.assets, + details: note.details ? note.details : null, + recipient: note.recipient, + status: note.status, + metadata: note.metadata, + inclusion_proof: note.inclusionProof ? note.inclusionProof : null, + serialized_note_script: serializedNoteScriptBase64, + consumer_account_id: consumerAccountId, + created_at: note.createdAt, + submitted_at: note.submittedAt ? note.submittedAt : null, + nullifier_height: note.nullifierHeight ? note.nullifierHeight : null, + ignored: note.ignored, + imported_tag: note.importedTag ? note.importedTag : null + }; + })); + return processedNotes; +} + +function uint8ArrayToBase64(bytes) { + const binary = bytes.reduce((acc, byte) => acc + String.fromCharCode(byte), ''); + return btoa(binary); +} diff --git a/crates/rust-client/src/store/web_store/js/schema.js b/crates/rust-client/src/store/web_store/js/schema.js new file mode 100644 index 000000000..86d59ea80 --- /dev/null +++ b/crates/rust-client/src/store/web_store/js/schema.js @@ -0,0 +1,88 @@ +import Dexie from "dexie"; + +const DATABASE_NAME = 'MidenClientDB'; + +export async function openDatabase() { + console.log('Opening database...') + try { + await db.open(); + console.log("Database opened successfully"); + return true; + } catch (err) { + console.error("Failed to open database: ", err); + return false; + } +} + +const Table = { + AccountCode: 'accountCode', + AccountStorage: 'accountStorage', + AccountVaults: 'accountVaults', + AccountAuth: 'accountAuth', + Accounts: 'accounts', + Transactions: 'transactions', + TransactionScripts: 'transactionScripts', + InputNotes: 'inputNotes', + OutputNotes: 'outputNotes', + NotesScripts: 'notesScripts', + StateSync: 'stateSync', + BlockHeaders: 'blockHeaders', + ChainMmrNodes: 'chainMmrNodes', +}; + +const db = new Dexie(DATABASE_NAME); +db.version(1).stores({ + [Table.AccountCode]: indexes('root'), + [Table.AccountStorage]: indexes('root'), + [Table.AccountVaults]: indexes('root'), + [Table.AccountAuth]: indexes('accountId', 'pubKey'), + [Table.Accounts]: indexes('[id+nonce]', 'codeRoot', 'storageRoot', 'vaultRoot'), + [Table.Transactions]: indexes('id'), + [Table.TransactionScripts]: indexes('scriptHash'), + [Table.InputNotes]: indexes('noteId', 'recipient', 'status'), + [Table.OutputNotes]: indexes('noteId', 'recipient', 'status'), + [Table.NotesScripts]: indexes('scriptHash'), + [Table.StateSync]: indexes('id'), + [Table.BlockHeaders]: indexes('blockNum'), + [Table.ChainMmrNodes]: indexes('id'), +}); + +function indexes(...items) { + return items.join(','); +} + +db.on('populate', () => { + // Populate the stateSync table with default values + db.stateSync.put({ id: 1, blockNum: "0", tags: [] }); +}); + +const accountCodes = db.table(Table.AccountCode); +const accountStorages = db.table(Table.AccountStorage); +const accountVaults = db.table(Table.AccountVaults); +const accountAuths = db.table(Table.AccountAuth); +const accounts = db.table(Table.Accounts); +const transactions = db.table(Table.Transactions); +const transactionScripts = db.table(Table.TransactionScripts); +const inputNotes = db.table(Table.InputNotes); +const outputNotes = db.table(Table.OutputNotes); +const notesScripts = db.table(Table.NotesScripts); +const stateSync = db.table(Table.StateSync); +const blockHeaders = db.table(Table.BlockHeaders); +const chainMmrNodes = db.table(Table.ChainMmrNodes); + +export { + db, + accountCodes, + accountStorages, + accountVaults, + accountAuths, + accounts, + transactions, + transactionScripts, + inputNotes, + outputNotes, + notesScripts, + stateSync, + blockHeaders, + chainMmrNodes, +}; diff --git a/crates/rust-client/src/store/web_store/js/sync.js b/crates/rust-client/src/store/web_store/js/sync.js new file mode 100644 index 000000000..cba06c475 --- /dev/null +++ b/crates/rust-client/src/store/web_store/js/sync.js @@ -0,0 +1,300 @@ +import { + db, + stateSync, + inputNotes, + outputNotes, + transactions, + blockHeaders, + chainMmrNodes, +} from './schema.js'; + +export async function getNoteTags() { + try { + const record = await stateSync.get(1); // Since id is the primary key and always 1 + if (record) { + let data = null; + if (record.tags.length === 0) { + data = { + tags: JSON.stringify(record.tags) + } + } else { + data = { + tags: record.tags + } + }; + return data; + } else { + return null; + } + } catch (error) { + console.error('Error fetching record:', error); + return null; + } +} + +export async function getSyncHeight() { + try { + const record = await stateSync.get(1); // Since id is the primary key and always 1 + if (record) { + let data = { + block_num: record.blockNum + }; + return data; + } else { + return null; + } + } catch (error) { + console.error('Error fetching record:', error); + return null; + } +} + +export async function addNoteTag( + tags +) { + try { + await stateSync.update(1, { tags: tags }); + } catch { + console.error("Failed to add note tag: ", err); + throw err; + } +} + +export async function applyStateSync( + blockNum, + nullifiers, + nullifierBlockNums, + blockHeader, + chainMmrPeaks, + hasClientNotes, + nodeIndexes, + nodes, + outputNoteIds, + outputNoteInclusionProofs, + inputNoteIds, + inputNoteInluclusionProofs, + inputeNoteMetadatas, + transactionIds, + transactionBlockNums +) { + return db.transaction('rw', stateSync, inputNotes, outputNotes, transactions, blockHeaders, chainMmrNodes, async (tx) => { + await updateSyncHeight(tx, blockNum); + await updateSpentNotes(tx, nullifierBlockNums, nullifiers); + await updateBlockHeader(tx, blockNum, blockHeader, chainMmrPeaks, hasClientNotes); + await updateChainMmrNodes(tx, nodeIndexes, nodes); + await updateCommittedNotes(tx, outputNoteIds, outputNoteInclusionProofs, inputNoteIds, inputNoteInluclusionProofs, inputeNoteMetadatas); + await updateCommittedTransactions(tx, transactionBlockNums, transactionIds); + }); +} + +async function updateSyncHeight( + tx, + blockNum +) { + try { + await tx.stateSync.update(1, { blockNum: blockNum }); + } catch (error) { + console.error("Failed to update sync height: ", error); + throw error; + } +} + +// NOTE: nullifierBlockNums are the same length and ordered consistently with nullifiers +async function updateSpentNotes( + tx, + nullifierBlockNums, + nullifiers +) { + try { + // Fetch all notes + const inputNotes = await tx.inputNotes.toArray(); + const outputNotes = await tx.outputNotes.toArray(); + + // Pre-parse all details and store them with their respective note ids for quick access + const parsedInputNotes = inputNotes.map(note => ({ + noteId: note.noteId, + details: JSON.parse(note.details) // Parse the JSON string into an object + })); + + // Iterate through each parsed note and check against the list of nullifiers + for (const note of parsedInputNotes) { + if (note.details && note.details.nullifier) { + const nullifierIndex = nullifiers.indexOf(note.details.nullifier); + if (nullifierIndex !== -1) { + // If the nullifier is in the list, update the note's status and set nullifierHeight to the index + await tx.inputNotes.update(note.noteId, { status: 'Consumed', nullifierHeight: nullifierBlockNums[nullifierIndex] }); + } + } + } + + // Pre-parse all details and store them with their respective note ids for quick access + const parsedOutputNotes = outputNotes.map(note => ({ + noteId: note.noteId, + details: JSON.parse(note.details) // Parse the JSON string into an object + })); + + // Iterate through each parsed note and check against the list of nullifiers + for (const note of parsedOutputNotes) { + if (note.details && note.details.nullifier) { + const nullifierIndex = nullifiers.indexOf(note.details.nullifier); + if (nullifierIndex !== -1) { + // If the nullifier is in the list, update the note's status and set nullifierHeight to the index + await tx.outputNotes.update(note.noteId, { status: 'Consumed', nullifierHeight: nullifierBlockNums[nullifierIndex] }); + } + } + } + } catch (error) { + console.error("Error updating input notes:", error); + throw error; + } +} + +async function updateBlockHeader( + tx, + blockNum, + blockHeader, + chainMmrPeaks, + hasClientNotes +) { + try { + const data = { + blockNum: blockNum, + header: blockHeader, + chainMmrPeaks: chainMmrPeaks, + hasClientNotes: hasClientNotes + }; + + await tx.blockHeaders.add(data); + } catch (err) { + console.error("Failed to insert block header: ", err); + throw error; + } +} + +async function updateChainMmrNodes( + tx, + nodeIndexes, + nodes +) { + try { + // Check if the arrays are not of the same length + if (nodeIndexes.length !== nodes.length) { + throw new Error("nodeIndexes and nodes arrays must be of the same length"); + } + + if (nodeIndexes.length === 0) { + return; + } + + // Create the updates array with objects matching the structure expected by your IndexedDB schema + const updates = nodeIndexes.map((index, i) => ({ + id: index, // Assuming 'index' is the primary key or part of it + node: nodes[i] // Other attributes of the object + })); + + // Perform bulk update or insertion; assumes tx.chainMmrNodes is a valid table reference in a transaction + await tx.chainMmrNodes.bulkAdd(updates); + } catch (err) { + console.error("Failed to update chain mmr nodes: ", err); + throw error; + } +} + +async function updateCommittedNotes( + tx, + outputNoteIds, + outputNoteInclusionProofs, + inputNoteIds, + inputNoteInclusionProofs, + inputNoteMetadatas +) { + try { + if (outputNoteIds.length !== outputNoteInclusionProofs.length) { + throw new Error("Arrays outputNoteIds and outputNoteInclusionProofs must be of the same length"); + } + + if ( + inputNoteIds.length !== inputNoteInclusionProofs.length && + inputNoteIds.length !== inputNoteMetadatas.length && + inputNoteInclusionProofs.length !== inputNoteMetadatas.length + ) { + throw new Error("Arrays inputNoteIds and inputNoteInclusionProofs and inputNoteMetadatas must be of the same length"); + } + + for (let i = 0; i < outputNoteIds.length; i++) { + const noteId = outputNoteIds[i]; + const inclusionProof = outputNoteInclusionProofs[i]; + + // Update output notes + await tx.outputNotes.where({ noteId: noteId }).modify({ + status: 'Committed', + inclusionProof: inclusionProof + }); + } + + for (let i = 0; i < inputNoteIds.length; i++) { + const noteId = inputNoteIds[i]; + const inclusionProof = inputNoteInclusionProofs[i]; + const metadata = inputNoteMetadatas[i]; + + // Update input notes + await tx.inputNotes.where({ noteId: noteId }).modify({ + status: 'Committed', + inclusionProof: inclusionProof, + metadata: metadata + }); + } + } catch (error) { + console.error("Error updating committed notes:", error); + throw error; + } +} + +async function updateCommittedTransactions( + tx, + blockNums, + transactionIds +) { + try { + if (transactionIds.length === 0) { + return; + } + + // Fetch existing records + const existingRecords = await tx.transactions.where('id').anyOf(transactionIds).toArray(); + + // Create a mapping of transaction IDs to block numbers + const transactionBlockMap = transactionIds.reduce((map, id, index) => { + map[id] = blockNums[index]; + return map; + }, {}); + + // Create updates by merging existing records with the new values + const updates = existingRecords.map(record => ({ + ...record, // Spread existing fields + commitHeight: transactionBlockMap[record.id] // Update specific field + })); + + // Perform the update + await tx.transactions.bulkPut(updates); + } catch (err) { + console.error("Failed to mark transactions as committed: ", err); + throw err; + } +} + +async function updateIgnoredNotesForTag( + tag +) { + try { + await inputNotes + .where('importedTag') + .equals(tag) + .modify(note => { + note.ignored = false; + }); + } catch (err) { + console.error("Failed to update ignored field for notes: ", err); + throw err; + } +} diff --git a/crates/rust-client/src/store/web_store/js/transactions.js b/crates/rust-client/src/store/web_store/js/transactions.js new file mode 100644 index 000000000..75fce9fe6 --- /dev/null +++ b/crates/rust-client/src/store/web_store/js/transactions.js @@ -0,0 +1,160 @@ +import { + transactions, + transactionScripts, +} from './schema.js' + +export async function getTransactions( + filter +) { + let transactionRecords; + + try { + if (filter === 'Uncomitted') { + transactionRecords = await transactions.filter(tx => tx.commitHeight === undefined || tx.commitHeight === null).toArray(); + } else { + transactionRecords = await transactions.toArray(); + } + + if (transactionRecords.length === 0) { + return []; + } + + const scriptHashes = transactionRecords.map(transactionRecord => { + return transactionRecord.scriptHash + }); + + const scripts = await transactionScripts.where("scriptHash").anyOf(scriptHashes).toArray(); + + // Create a map of scriptHash to script for quick lookup + const scriptMap = new Map(); + scripts.forEach(script => { + scriptMap.set(script.scriptHash, script.program); + }); + + const processedTransactions = await Promise.all(transactionRecords.map(async transactionRecord => { + let scriptProgramBase64 = null; + + if (transactionRecord.scriptHash) { + const scriptProgram = scriptMap.get(transactionRecord.scriptHash); + + if (scriptProgram) { + let scriptProgramArrayBuffer = await scriptProgram.arrayBuffer(); + let scriptProgramArray = new Uint8Array(scriptProgramArrayBuffer); + scriptProgramBase64 = uint8ArrayToBase64(scriptProgramArray); + } + } + + let outputNotesArrayBuffer = await transactionRecord.outputNotes.arrayBuffer(); + let outputNotesArray = new Uint8Array(outputNotesArrayBuffer); + let outputNotesBase64 = uint8ArrayToBase64(outputNotesArray); + + transactionRecord.outputNotes = outputNotesBase64; + + let data = { + id: transactionRecord.id, + account_id: transactionRecord.accountId, + init_account_state: transactionRecord.initAccountState, + final_account_state: transactionRecord.finalAccountState, + input_notes: transactionRecord.inputNotes, + output_notes: transactionRecord.outputNotes, + script_hash: transactionRecord.scriptHash ? transactionRecord.scriptHash : null, + script_program: scriptProgramBase64, + script_inputs: transactionRecord.scriptInputs ? transactionRecord.scriptInputs : null, + block_num: transactionRecord.blockNum, + commit_height: transactionRecord.commitHeight ? transactionRecord.commitHeight : null + } + + return data; + })); + + return processedTransactions + } catch { + console.error("Failed to get transactions: ", err); + throw err; + } +} + +export async function insertTransactionScript( + scriptHash, + scriptProgram +) { + try { + // check if script hash already exists + let record = await transactionScripts.where("scriptHash").equals(scriptHash).first(); + + if (record) { + return; + } + + if (!scriptHash) { + throw new Error("Script hash must be provided"); + } + + let scriptHashArray = new Uint8Array(scriptHash); + let scriptHashBase64 = uint8ArrayToBase64(scriptHashArray); + let scriptProgramBlob = null; + + if (scriptProgram ) { + scriptProgramBlob = new Blob([new Uint8Array(scriptProgram)]); + } + + const data = { + scriptHash: scriptHashBase64, + program: scriptProgramBlob + } + + await transactionScripts.add(data); + } catch (error) { + // Check if the error is because the record already exists + if (error.name === 'ConstraintError') { + } else { + // Re-throw the error if it's not a constraint error + throw error; + } + } +} + +export async function insertProvenTransactionData( + transactionId, + accountId, + initAccountState, + finalAccountState, + inputNotes, + outputNotes, + scriptHash, + scriptInputs, + blockNum, + committed +) { + try { + let scriptHashBase64 = null; + let outputNotesBlob = new Blob([new Uint8Array(outputNotes)]); + if (scriptHash !== null) { + let scriptHashArray = new Uint8Array(scriptHash); + scriptHashBase64 = uint8ArrayToBase64(scriptHashArray); + } + + const data = { + id: transactionId, + accountId: accountId, + initAccountState: initAccountState, + finalAccountState: finalAccountState, + inputNotes: inputNotes, + outputNotes: outputNotesBlob, + scriptHash: scriptHashBase64, + scriptInputs: scriptInputs ? scriptInputs : null, + blockNum: blockNum, + commitHeight: committed ? committed : null + } + + await transactions.add(data); + } catch (err) { + console.error("Failed to insert proven transaction data: ", err); + throw err; + } +} + +function uint8ArrayToBase64(bytes) { + const binary = bytes.reduce((acc, byte) => acc + String.fromCharCode(byte), ''); + return btoa(binary); +} diff --git a/crates/rust-client/src/store/web_store/mod.rs b/crates/rust-client/src/store/web_store/mod.rs new file mode 100644 index 000000000..63fa27abc --- /dev/null +++ b/crates/rust-client/src/store/web_store/mod.rs @@ -0,0 +1,221 @@ +use std::collections::BTreeMap; + +use miden_objects::{ + accounts::{Account, AccountId, AccountStub, AuthSecretKey}, + crypto::merkle::{InOrderIndex, MmrPeaks}, + notes::{NoteTag, Nullifier}, + BlockHeader, Digest, Word, +}; +use wasm_bindgen::prelude::*; +use wasm_bindgen_futures::*; +use winter_maybe_async::{maybe_async, maybe_await}; + +use crate::{ + errors::StoreError, + store::{ + ChainMmrNodeFilter, InputNoteRecord, NoteFilter, OutputNoteRecord, Store, TransactionFilter, + }, + sync::StateSyncUpdate, + transactions::{TransactionRecord, TransactionResult}, +}; + +pub mod accounts; +pub mod chain_data; +pub mod notes; +pub mod sync; +pub mod transactions; + +// Initialize IndexedDB +#[wasm_bindgen(module = "/src/store/web_store/js/schema.js")] +extern "C" { + #[wasm_bindgen(js_name = openDatabase)] + fn setup_indexed_db() -> js_sys::Promise; +} + +pub struct WebStore {} + +impl WebStore { + pub async fn new() -> Result { + let _ = JsFuture::from(setup_indexed_db()).await; + Ok(WebStore {}) + } +} + +impl Store for WebStore { + // SYNC + // -------------------------------------------------------------------------------------------- + + #[maybe_async] + fn get_note_tags(&self) -> Result, StoreError> { + maybe_await!(self.get_note_tags()) + } + + #[maybe_async] + fn add_note_tag(&self, tag: NoteTag) -> Result { + maybe_await!(self.add_note_tag(tag)) + } + + #[maybe_async] + fn remove_note_tag(&self, tag: NoteTag) -> Result { + maybe_await!(self.remove_note_tag(tag)) + } + + #[maybe_async] + fn get_sync_height(&self) -> Result { + maybe_await!(self.get_sync_height()) + } + + #[maybe_async] + fn apply_state_sync(&self, state_sync_update: StateSyncUpdate) -> Result<(), StoreError> { + maybe_await!(self.apply_state_sync(state_sync_update)) + } + + // TRANSACTIONS + // -------------------------------------------------------------------------------------------- + + #[maybe_async] + fn get_transactions( + &self, + transaction_filter: TransactionFilter, + ) -> Result, StoreError> { + maybe_await!(self.get_transactions(transaction_filter)) + } + + #[maybe_async] + fn apply_transaction(&self, tx_result: TransactionResult) -> Result<(), StoreError> { + maybe_await!(self.apply_transaction(tx_result)) + } + + // NOTES + // -------------------------------------------------------------------------------------------- + + #[maybe_async] + fn get_input_notes(&self, filter: NoteFilter<'_>) -> Result, StoreError> { + maybe_await!(self.get_input_notes(filter)) + } + + #[maybe_async] + fn get_output_notes( + &self, + note_filter: NoteFilter<'_>, + ) -> Result, StoreError> { + maybe_await!(self.get_output_notes(note_filter)) + } + + #[maybe_async] + fn insert_input_note(&self, note: InputNoteRecord) -> Result<(), StoreError> { + maybe_await!(self.insert_input_note(note)) + } + + #[maybe_async] + fn update_note_inclusion_proof( + &self, + note_id: miden_objects::notes::NoteId, + inclusion_proof: miden_objects::notes::NoteInclusionProof, + ) -> Result<(), StoreError> { + maybe_await!(self.update_note_inclusion_proof(note_id, inclusion_proof)) + } + + #[maybe_async] + fn update_note_metadata( + &self, + note_id: miden_objects::notes::NoteId, + metadata: miden_objects::notes::NoteMetadata, + ) -> Result<(), StoreError> { + maybe_await!(self.update_note_metadata(note_id, metadata)) + } + + // CHAIN DATA + // -------------------------------------------------------------------------------------------- + + #[maybe_async] + fn insert_block_header( + &self, + block_header: BlockHeader, + chain_mmr_peaks: MmrPeaks, + has_client_notes: bool, + ) -> Result<(), StoreError> { + maybe_await!(self.insert_block_header(block_header, chain_mmr_peaks, has_client_notes)) + } + + #[maybe_async] + fn get_block_headers( + &self, + block_numbers: &[u32], + ) -> Result, StoreError> { + maybe_await!(self.get_block_headers(block_numbers)) + } + + #[maybe_async] + fn get_tracked_block_headers(&self) -> Result, StoreError> { + maybe_await!(self.get_tracked_block_headers()) + } + + #[maybe_async] + fn get_chain_mmr_nodes<'a>( + &self, + filter: ChainMmrNodeFilter<'a>, + ) -> Result, StoreError> { + maybe_await!(self.get_chain_mmr_nodes(filter)) + } + + #[maybe_async] + fn insert_chain_mmr_nodes(&self, nodes: &[(InOrderIndex, Digest)]) -> Result<(), StoreError> { + maybe_await!(self.insert_chain_mmr_nodes(nodes)) + } + + #[maybe_async] + fn get_chain_mmr_peaks_by_block_num(&self, block_num: u32) -> Result { + maybe_await!(self.get_chain_mmr_peaks_by_block_num(block_num)) + } + + // ACCOUNTS + // -------------------------------------------------------------------------------------------- + + #[maybe_async] + fn insert_account( + &self, + account: &Account, + account_seed: Option, + auth_info: &AuthSecretKey, + ) -> Result<(), StoreError> { + maybe_await!(self.insert_account(account, account_seed, auth_info)) + } + + #[maybe_async] + fn get_account_ids(&self) -> Result, StoreError> { + maybe_await!(self.get_account_ids()) + } + + #[maybe_async] + fn get_account_stubs(&self) -> Result)>, StoreError> { + maybe_await!(self.get_account_stubs()) + } + + #[maybe_async] + fn get_account_stub( + &self, + account_id: AccountId, + ) -> Result<(AccountStub, Option), StoreError> { + maybe_await!(self.get_account_stub(account_id)) + } + + #[maybe_async] + fn get_account(&self, account_id: AccountId) -> Result<(Account, Option), StoreError> { + maybe_await!(self.get_account(account_id)) + } + + #[maybe_async] + fn get_account_auth(&self, account_id: AccountId) -> Result { + maybe_await!(self.get_account_auth(account_id)) + } + + fn get_account_auth_by_pub_key(&self, pub_key: Word) -> Result { + self.get_account_auth_by_pub_key(pub_key) + } + + #[maybe_async] + fn get_unspent_input_note_nullifiers(&self) -> Result, StoreError> { + maybe_await!(self.get_unspent_input_note_nullifiers()) + } +} diff --git a/crates/rust-client/src/store/web_store/notes/js_bindings.rs b/crates/rust-client/src/store/web_store/notes/js_bindings.rs new file mode 100644 index 000000000..099dc9a43 --- /dev/null +++ b/crates/rust-client/src/store/web_store/notes/js_bindings.rs @@ -0,0 +1,80 @@ +use wasm_bindgen::prelude::*; +use wasm_bindgen_futures::*; + +// Notes IndexedDB Operations +#[wasm_bindgen(module = "/src/store/web_store/js/notes.js")] + +extern "C" { + // GETS + // ================================================================================================ + + #[wasm_bindgen(js_name = getInputNotes)] + pub fn idxdb_get_input_notes(status: String) -> js_sys::Promise; + + #[wasm_bindgen(js_name = getIgnoredInputNotes)] + pub fn idxdb_get_ignored_input_notes() -> js_sys::Promise; + + #[wasm_bindgen(js_name = getIgnoredOutputNotes)] + pub fn idxdb_get_ignored_output_notes() -> js_sys::Promise; + + #[wasm_bindgen(js_name = getInputNotesFromIds)] + pub fn idxdb_get_input_notes_from_ids(note_ids: Vec) -> js_sys::Promise; + + #[wasm_bindgen(js_name = getOutputNotes)] + pub fn idxdb_get_output_notes(status: String) -> js_sys::Promise; + + #[wasm_bindgen(js_name = getOutputNotesFromIds)] + pub fn idxdb_get_output_notes_from_ids(note_ids: Vec) -> js_sys::Promise; + + #[wasm_bindgen(js_name = getUnspentInputNoteNullifiers)] + pub fn idxdb_get_unspent_input_note_nullifiers() -> js_sys::Promise; + + // INSERTS + // ================================================================================================ + + #[wasm_bindgen(js_name = insertInputNote)] + pub fn idxdb_insert_input_note( + note_id: String, + assets: Vec, + recipient: String, + status: String, + metadata: Option, + details: String, + note_script_hash: String, + serialized_note_script: Vec, + inclusion_proof: Option, + serialized_created_at: String, + ignored: bool, + imported_tag: Option, + ) -> js_sys::Promise; + + #[wasm_bindgen(js_name = insertOutputNote)] + pub fn idxdb_insert_output_note( + note_id: String, + assets: Vec, + recipient: String, + status: String, + metadata: String, + details: Option, + note_script_hash: Option, + serialized_note_script: Option>, + inclusion_proof: Option, + serialized_created_at: String, + ) -> js_sys::Promise; + + #[wasm_bindgen(js_name = updateNoteConsumerTxId)] + pub fn idxdb_update_note_consumer_tx_id( + note_id: String, + consumer_tx_id: String, + submitted_at: String, + ) -> js_sys::Promise; + + #[wasm_bindgen(js_name = updateNoteInclusionProof)] + pub fn idxdb_update_note_inclusion_proof( + note_id: String, + inclusion_proof: String, + ) -> js_sys::Promise; + + #[wasm_bindgen(js_name = updateNoteMetadata)] + pub fn idxdb_update_note_metadata(note_id: String, metadata: String) -> js_sys::Promise; +} diff --git a/crates/rust-client/src/store/web_store/notes/mod.rs b/crates/rust-client/src/store/web_store/notes/mod.rs new file mode 100644 index 000000000..8834f8946 --- /dev/null +++ b/crates/rust-client/src/store/web_store/notes/mod.rs @@ -0,0 +1,200 @@ +use miden_objects::{ + notes::{NoteId, NoteInclusionProof, NoteMetadata, Nullifier}, + Digest, +}; +use serde_wasm_bindgen::from_value; +use wasm_bindgen_futures::*; + +use super::WebStore; +use crate::{ + errors::StoreError, + store::{InputNoteRecord, NoteFilter, OutputNoteRecord}, +}; + +mod js_bindings; +use js_bindings::*; + +mod models; +use models::*; + +pub(crate) mod utils; +use utils::*; + +impl WebStore { + pub(crate) async fn get_input_notes( + &self, + filter: NoteFilter<'_>, + ) -> Result, StoreError> { + let promise = match filter { + NoteFilter::All + | NoteFilter::Consumed + | NoteFilter::Committed + | NoteFilter::Expected + | NoteFilter::Processing => { + let filter_as_str = match filter { + NoteFilter::All => "All", + NoteFilter::Consumed => "Consumed", + NoteFilter::Committed => "Committed", + NoteFilter::Expected => "Expected", + NoteFilter::Processing => "Processing", + _ => unreachable!(), // Safety net, should never be reached + }; + + // Assuming `js_fetch_notes` is your JavaScript function that handles simple string filters + idxdb_get_input_notes(filter_as_str.to_string()) + }, + NoteFilter::Ignored => idxdb_get_ignored_input_notes(), + NoteFilter::List(ids) => { + let note_ids_as_str: Vec = + ids.iter().map(|id| id.inner().to_string()).collect(); + idxdb_get_input_notes_from_ids(note_ids_as_str) + }, + NoteFilter::Unique(id) => { + let note_id_as_str = id.inner().to_string(); + let note_ids = vec![note_id_as_str]; + idxdb_get_input_notes_from_ids(note_ids) + }, + }; + + let js_value = JsFuture::from(promise).await.unwrap(); + let input_notes_idxdb: Vec = from_value(js_value).unwrap(); + + let native_input_notes: Result, StoreError> = input_notes_idxdb + .into_iter() + .map(parse_input_note_idxdb_object) // Simplified closure + .collect::, _>>(); // Collect results into a single Result + + match native_input_notes { + Ok(ref notes) => match filter { + NoteFilter::Unique(note_id) if notes.is_empty() => { + return Err(StoreError::NoteNotFound(note_id)); + }, + NoteFilter::List(note_ids) if note_ids.len() != notes.len() => { + let missing_note_id = note_ids + .iter() + .find(|&¬e_id| { + !notes.iter().any(|note_record| note_record.id() == note_id) + }) + .expect("should find one note id that wasn't retrieved by the db"); + return Err(StoreError::NoteNotFound(*missing_note_id)); + }, + _ => {}, + }, + Err(e) => return Err(e), + } + + native_input_notes + } + + pub(crate) async fn get_output_notes( + &self, + filter: NoteFilter<'_>, + ) -> Result, StoreError> { + let promise = match filter { + NoteFilter::All + | NoteFilter::Consumed + | NoteFilter::Committed + | NoteFilter::Expected + | NoteFilter::Processing => { + let filter_as_str = match filter { + NoteFilter::All => "All", + NoteFilter::Consumed => "Consumed", + NoteFilter::Committed => "Committed", + NoteFilter::Expected => "Expected", + NoteFilter::Processing => "Processing", + _ => unreachable!(), // Safety net, should never be reached + }; + + // Assuming `js_fetch_notes` is your JavaScript function that handles simple string filters + + idxdb_get_output_notes(filter_as_str.to_string()) + }, + NoteFilter::Ignored => idxdb_get_ignored_output_notes(), + NoteFilter::List(ids) => { + let note_ids_as_str: Vec = + ids.iter().map(|id| id.inner().to_string()).collect(); + idxdb_get_output_notes_from_ids(note_ids_as_str) + }, + NoteFilter::Unique(id) => { + let note_id_as_str = id.inner().to_string(); + let note_ids = vec![note_id_as_str]; + idxdb_get_output_notes_from_ids(note_ids) + }, + }; + + let js_value = JsFuture::from(promise).await.unwrap(); + + let output_notes_idxdb: Vec = from_value(js_value).unwrap(); + + let native_output_notes: Result, StoreError> = output_notes_idxdb + .into_iter() + .map(parse_output_note_idxdb_object) // Simplified closure + .collect::, _>>(); // Collect results into a single Result + + match native_output_notes { + Ok(ref notes) => match filter { + NoteFilter::Unique(note_id) if notes.is_empty() => { + return Err(StoreError::NoteNotFound(note_id)); + }, + NoteFilter::List(note_ids) if note_ids.len() != notes.len() => { + let missing_note_id = note_ids + .iter() + .find(|&¬e_id| { + !notes.iter().any(|note_record| note_record.id() == note_id) + }) + .expect("should find one note id that wasn't retrieved by the db"); + return Err(StoreError::NoteNotFound(*missing_note_id)); + }, + _ => {}, + }, + Err(e) => return Err(e), + } + + native_output_notes + } + + pub(crate) async fn get_unspent_input_note_nullifiers( + &self, + ) -> Result, StoreError> { + let promise = idxdb_get_unspent_input_note_nullifiers(); + let js_value = JsFuture::from(promise).await.unwrap(); + let nullifiers_as_str: Vec = from_value(js_value).unwrap(); + + nullifiers_as_str + .into_iter() + .map(|s| Digest::try_from(s).map(Nullifier::from).map_err(StoreError::HexParseError)) + .collect::, _>>() + } + + pub(crate) async fn insert_input_note(&self, note: InputNoteRecord) -> Result<(), StoreError> { + insert_input_note_tx(note).await + } + + pub async fn update_note_inclusion_proof( + &self, + note_id: NoteId, + inclusion_proof: NoteInclusionProof, + ) -> Result<(), StoreError> { + let note_id_as_str = note_id.inner().to_string(); + let inclusion_proof_as_str = serde_json::to_string(&inclusion_proof).unwrap(); + + let promise = idxdb_update_note_inclusion_proof(note_id_as_str, inclusion_proof_as_str); + let _ = JsFuture::from(promise).await.unwrap(); + + Ok(()) + } + + pub async fn update_note_metadata( + &self, + note_id: NoteId, + metadata: NoteMetadata, + ) -> Result<(), StoreError> { + let note_id_as_str = note_id.inner().to_string(); + let metadata_as_str = serde_json::to_string(&metadata).unwrap(); + + let promise = idxdb_update_note_metadata(note_id_as_str, metadata_as_str); + let _ = JsFuture::from(promise).await.unwrap(); + + Ok(()) + } +} diff --git a/crates/rust-client/src/store/web_store/notes/models.rs b/crates/rust-client/src/store/web_store/notes/models.rs new file mode 100644 index 000000000..ef4faff2a --- /dev/null +++ b/crates/rust-client/src/store/web_store/notes/models.rs @@ -0,0 +1,59 @@ +use base64::decode as base64_decode; +use serde::{de::Error, Deserialize, Deserializer, Serialize}; + +#[derive(Serialize, Deserialize)] +pub struct InputNoteIdxdbObject { + #[serde(deserialize_with = "base64_to_vec_u8_required", default)] + pub assets: Vec, + pub details: String, + pub recipient: String, + pub status: String, + pub metadata: Option, + pub inclusion_proof: Option, + #[serde(deserialize_with = "base64_to_vec_u8_required", default)] + pub serialized_note_script: Vec, + pub consumer_account_id: Option, + pub created_at: String, + pub submitted_at: Option, + pub nullifier_height: Option, + pub ignored: bool, + pub imported_tag: Option, +} + +#[derive(Serialize, Deserialize)] +pub struct OutputNoteIdxdbObject { + #[serde(deserialize_with = "base64_to_vec_u8_required", default)] + pub assets: Vec, + pub details: Option, + pub recipient: String, + pub status: String, + pub metadata: String, + pub inclusion_proof: Option, + #[serde(deserialize_with = "base64_to_vec_u8_optional", default)] + pub serialized_note_script: Option>, + pub consumer_account_id: Option, + pub created_at: String, + pub submitted_at: Option, + pub nullifier_height: Option, +} + +fn base64_to_vec_u8_required<'de, D>(deserializer: D) -> Result, D::Error> +where + D: Deserializer<'de>, +{ + let base64_str: String = Deserialize::deserialize(deserializer)?; + base64_decode(&base64_str).map_err(|e| Error::custom(format!("Base64 decode error: {}", e))) +} + +fn base64_to_vec_u8_optional<'de, D>(deserializer: D) -> Result>, D::Error> +where + D: Deserializer<'de>, +{ + let base64_str: Option = Option::deserialize(deserializer)?; + match base64_str { + Some(str) => base64_decode(&str) + .map(Some) + .map_err(|e| Error::custom(format!("Base64 decode error: {}", e))), + None => Ok(None), + } +} diff --git a/crates/rust-client/src/store/web_store/notes/utils.rs b/crates/rust-client/src/store/web_store/notes/utils.rs new file mode 100644 index 000000000..6f423e1b1 --- /dev/null +++ b/crates/rust-client/src/store/web_store/notes/utils.rs @@ -0,0 +1,422 @@ +use chrono::Utc; +use miden_objects::{ + accounts::AccountId, + notes::{NoteAssets, NoteId, NoteInclusionProof, NoteMetadata, NoteScript, NoteTag}, + transaction::TransactionId, + utils::Deserializable, + Digest, +}; +use miden_tx::utils::{DeserializationError, Serializable}; +use wasm_bindgen_futures::*; + +use super::{js_bindings::*, InputNoteIdxdbObject, OutputNoteIdxdbObject}; +use crate::{ + errors::StoreError, + store::{ + note_record::{ + NOTE_STATUS_COMMITTED, NOTE_STATUS_CONSUMED, NOTE_STATUS_EXPECTED, + NOTE_STATUS_PROCESSING, + }, + InputNoteRecord, NoteRecordDetails, NoteStatus, OutputNoteRecord, + }, +}; + +// TYPES +// ================================================================================================ + +pub struct SerializedInputNoteData { + pub note_id: String, + pub note_assets: Vec, + pub recipient: String, + pub status: String, + pub metadata: Option, + pub details: String, + pub note_script_hash: String, + pub note_script: Vec, + pub inclusion_proof: Option, + pub created_at: String, + pub ignored: bool, + pub imported_tag: Option, +} + +pub struct SerializedOutputNoteData { + pub note_id: String, + pub note_assets: Vec, + pub recipient: String, + pub status: String, + pub metadata: String, + pub details: Option, + pub note_script_hash: Option, + pub note_script: Option>, + pub inclusion_proof: Option, + pub created_at: String, +} + +// ================================================================================================ + +pub(crate) async fn update_note_consumer_tx_id( + note_id: NoteId, + consumer_tx_id: TransactionId, +) -> Result<(), StoreError> { + let serialized_note_id = note_id.inner().to_string(); + let serialized_consumer_tx_id = consumer_tx_id.to_string(); + let serialized_submitted_at = Utc::now().timestamp().to_string(); + + let promise = idxdb_update_note_consumer_tx_id( + serialized_note_id, + serialized_consumer_tx_id, + serialized_submitted_at, + ); + JsFuture::from(promise).await.unwrap(); + + Ok(()) +} + +pub(crate) fn serialize_input_note( + note: InputNoteRecord, +) -> Result { + let note_id = note.id().inner().to_string(); + let note_assets = note.assets().to_bytes(); + + let (inclusion_proof, status) = match note.inclusion_proof() { + Some(proof) => { + let block_num = proof.origin().block_num; + let node_index = proof.origin().node_index.value(); + let sub_hash = proof.sub_hash(); + let note_root = proof.note_root(); + + let inclusion_proof = serde_json::to_string(&NoteInclusionProof::new( + block_num, + sub_hash, + note_root, + node_index, + proof.note_path().clone(), + )?) + .map_err(StoreError::InputSerializationError)?; + + let status = NOTE_STATUS_COMMITTED.to_string(); + (Some(inclusion_proof), status) + }, + None => { + let status = NOTE_STATUS_EXPECTED.to_string(); + + (None, status) + }, + }; + let recipient = note.recipient().to_hex(); + + let metadata = if let Some(metadata) = note.metadata() { + Some(serde_json::to_string(metadata).map_err(StoreError::InputSerializationError)?) + } else { + None + }; + + let details = + serde_json::to_string(¬e.details()).map_err(StoreError::InputSerializationError)?; + let note_script_hash = note.details().script_hash().to_hex(); + let note_script = note.details().script().to_bytes(); + let created_at = Utc::now().timestamp().to_string(); + let ignored = note.ignored(); + let imported_tag: Option = note.imported_tag().map(|tag| tag.into()); + let imported_tag_str: Option = imported_tag.map(|tag| tag.to_string()); + + Ok(SerializedInputNoteData { + note_id, + note_assets, + recipient, + status, + metadata, + details, + note_script_hash, + note_script, + inclusion_proof, + created_at, + ignored, + imported_tag: imported_tag_str, + }) +} + +pub async fn insert_input_note_tx(note: InputNoteRecord) -> Result<(), StoreError> { + let serialized_data = serialize_input_note(note)?; + + let promise = idxdb_insert_input_note( + serialized_data.note_id, + serialized_data.note_assets, + serialized_data.recipient, + serialized_data.status, + serialized_data.metadata, + serialized_data.details, + serialized_data.note_script_hash, + serialized_data.note_script, + serialized_data.inclusion_proof, + serialized_data.created_at, + serialized_data.ignored, + serialized_data.imported_tag, + ); + JsFuture::from(promise).await.unwrap(); + + Ok(()) +} + +pub(crate) fn serialize_output_note( + note: &OutputNoteRecord, +) -> Result { + let note_id = note.id().inner().to_string(); + let note_assets = note.assets().to_bytes(); + let (inclusion_proof, status) = match note.inclusion_proof() { + Some(proof) => { + let block_num = proof.origin().block_num; + let node_index = proof.origin().node_index.value(); + let sub_hash = proof.sub_hash(); + let note_root = proof.note_root(); + + let inclusion_proof = serde_json::to_string(&NoteInclusionProof::new( + block_num, + sub_hash, + note_root, + node_index, + proof.note_path().clone(), + )?) + .map_err(StoreError::InputSerializationError)?; + + let status = NOTE_STATUS_COMMITTED.to_string(); + + (Some(inclusion_proof), status) + }, + None => { + let status = NOTE_STATUS_EXPECTED.to_string(); + + (None, status) + }, + }; + let recipient = note.recipient().to_hex(); + + let metadata = + serde_json::to_string(note.metadata()).map_err(StoreError::InputSerializationError)?; + + let details = if let Some(details) = note.details() { + Some(serde_json::to_string(&details).map_err(StoreError::InputSerializationError)?) + } else { + None + }; + let note_script_hash = note.details().map(|details| details.script_hash().to_hex()); + let note_script = note.details().map(|details| details.script().to_bytes()); + let created_at = Utc::now().timestamp().to_string(); + + Ok(SerializedOutputNoteData { + note_id, + note_assets, + recipient, + status, + metadata, + details, + note_script_hash, + note_script, + inclusion_proof, + created_at, + }) +} + +pub async fn insert_output_note_tx(note: &OutputNoteRecord) -> Result<(), StoreError> { + let serialized_data = serialize_output_note(note)?; + + let result = JsFuture::from(idxdb_insert_output_note( + serialized_data.note_id, + serialized_data.note_assets, + serialized_data.recipient, + serialized_data.status, + serialized_data.metadata, + serialized_data.details, + serialized_data.note_script_hash, + serialized_data.note_script, + serialized_data.inclusion_proof, + serialized_data.created_at, + )) + .await; + match result { + Ok(_) => Ok(()), + Err(_) => Err(StoreError::QueryError("Failed to insert output note".to_string())), + } +} + +pub fn parse_input_note_idxdb_object( + note_idxdb: InputNoteIdxdbObject, +) -> Result { + // Merge the info that comes from the input notes table and the notes script table + let note_script = NoteScript::read_from_bytes(¬e_idxdb.serialized_note_script)?; + let note_details: NoteRecordDetails = serde_json::from_str(¬e_idxdb.details) + .map_err(StoreError::JsonDataDeserializationError)?; + let note_details = NoteRecordDetails::new( + note_details.nullifier().to_string(), + note_script, + note_details.inputs().clone(), + note_details.serial_num(), + ); + + let note_metadata: Option = + if let Some(metadata_as_json_str) = note_idxdb.metadata { + Some( + serde_json::from_str(&metadata_as_json_str) + .map_err(StoreError::JsonDataDeserializationError)?, + ) + } else { + None + }; + + let note_assets = NoteAssets::read_from_bytes(¬e_idxdb.assets)?; + + let inclusion_proof = match note_idxdb.inclusion_proof { + Some(note_inclusion_proof) => { + let note_inclusion_proof: NoteInclusionProof = + serde_json::from_str(¬e_inclusion_proof) + .map_err(StoreError::JsonDataDeserializationError)?; + + Some(note_inclusion_proof) + }, + _ => None, + }; + + let recipient = Digest::try_from(note_idxdb.recipient)?; + let id = NoteId::new(recipient, note_assets.commitment()); + let consumer_account_id: Option = match note_idxdb.consumer_account_id { + Some(account_id) => Some(AccountId::from_hex(&account_id)?), + None => None, + }; + let created_at = note_idxdb.created_at.parse::().expect("Failed to parse created_at"); + let submitted_at: Option = note_idxdb + .submitted_at + .map(|submitted_at| submitted_at.parse::().expect("Failed to parse submitted_at")); + let nullifier_height: Option = note_idxdb.nullifier_height.map(|nullifier_height| { + nullifier_height.parse::().expect("Failed to parse nullifier_height") + }); + + // If the note is committed and has a consumer account id, then it was consumed locally but the client is not synced with the chain + let status = match note_idxdb.status.as_str() { + NOTE_STATUS_EXPECTED => NoteStatus::Expected { created_at }, + NOTE_STATUS_COMMITTED => NoteStatus::Committed { + block_height: inclusion_proof + .clone() + .map(|proof| proof.origin().block_num as u64) + .expect("Committed note should have inclusion proof"), + }, + NOTE_STATUS_PROCESSING => NoteStatus::Processing { + consumer_account_id: consumer_account_id + .expect("Processing note should have consumer account id"), + submitted_at: submitted_at.expect("REASON"), + }, + NOTE_STATUS_CONSUMED => NoteStatus::Consumed { + consumer_account_id, + block_height: nullifier_height.expect("REASON"), + }, + _ => { + return Err(StoreError::DataDeserializationError(DeserializationError::InvalidValue( + format!("NoteStatus: {}", note_idxdb.status), + ))) + }, + }; + + let imported_tag_as_u32: Option = + note_idxdb.imported_tag.as_ref().map(|tag| tag.parse::().ok()).flatten(); + + Ok(InputNoteRecord::new( + id, + recipient, + note_assets, + status, + note_metadata, + inclusion_proof, + note_details, + note_idxdb.ignored, + imported_tag_as_u32.map(NoteTag::from), + )) +} + +pub fn parse_output_note_idxdb_object( + note_idxdb: OutputNoteIdxdbObject, +) -> Result { + let note_details: Option = + if let Some(details_as_json_str) = note_idxdb.details { + // Merge the info that comes from the input notes table and the notes script table + let serialized_note_script = note_idxdb + .serialized_note_script + .expect("Has note details so it should have the serialized script"); + let note_script = NoteScript::read_from_bytes(&serialized_note_script)?; + let note_details: NoteRecordDetails = serde_json::from_str(&details_as_json_str) + .map_err(StoreError::JsonDataDeserializationError)?; + let note_details = NoteRecordDetails::new( + note_details.nullifier().to_string(), + note_script, + note_details.inputs().clone(), + note_details.serial_num(), + ); + + Some(note_details) + } else { + None + }; + let note_metadata: NoteMetadata = serde_json::from_str(¬e_idxdb.metadata) + .map_err(StoreError::JsonDataDeserializationError)?; + + let note_assets = NoteAssets::read_from_bytes(¬e_idxdb.assets)?; + + let inclusion_proof = match note_idxdb.inclusion_proof { + Some(note_inclusion_proof) => { + let note_inclusion_proof: NoteInclusionProof = + serde_json::from_str(¬e_inclusion_proof) + .map_err(StoreError::JsonDataDeserializationError)?; + + Some(note_inclusion_proof) + }, + _ => None, + }; + + let recipient = Digest::try_from(note_idxdb.recipient)?; + let id = NoteId::new(recipient, note_assets.commitment()); + + let consumer_account_id: Option = match note_idxdb.consumer_account_id { + Some(account_id) => Some(AccountId::from_hex(&account_id)?), + None => None, + }; + let created_at = note_idxdb.created_at.parse::().expect("Failed to parse created_at"); + let submitted_at: Option = note_idxdb + .submitted_at + .map(|submitted_at| submitted_at.parse::().expect("Failed to parse submitted_at")); + let nullifier_height: Option = note_idxdb.nullifier_height.map(|nullifier_height| { + nullifier_height.parse::().expect("Failed to parse nullifier_height") + }); + + // If the note is committed and has a consumer account id, then it was consumed locally but the client is not synced with the chain + let status = match note_idxdb.status.as_str() { + NOTE_STATUS_EXPECTED => NoteStatus::Expected { created_at }, + NOTE_STATUS_COMMITTED => NoteStatus::Committed { + block_height: inclusion_proof + .clone() + .map(|proof| proof.origin().block_num as u64) + .expect("Committed note should have inclusion proof"), + }, + NOTE_STATUS_PROCESSING => NoteStatus::Processing { + consumer_account_id: consumer_account_id + .expect("Processing note should have consumer account id"), + submitted_at: submitted_at.expect("Processing note should have submition timestamp"), + }, + NOTE_STATUS_CONSUMED => NoteStatus::Consumed { + consumer_account_id, + block_height: nullifier_height.expect("Consumed note should have nullifier height"), + }, + _ => { + return Err(StoreError::DataDeserializationError(DeserializationError::InvalidValue( + format!("NoteStatus: {}", note_idxdb.status), + ))) + }, + }; + + Ok(OutputNoteRecord::new( + id, + recipient, + note_assets, + status, + note_metadata, + inclusion_proof, + note_details, + )) +} diff --git a/crates/rust-client/src/store/web_store/sync/js_bindings.rs b/crates/rust-client/src/store/web_store/sync/js_bindings.rs new file mode 100644 index 000000000..63fea829d --- /dev/null +++ b/crates/rust-client/src/store/web_store/sync/js_bindings.rs @@ -0,0 +1,44 @@ +use wasm_bindgen::prelude::*; +use wasm_bindgen_futures::*; + +// Sync IndexedDB Operations +#[wasm_bindgen(module = "/src/store/web_store/js/sync.js")] + +extern "C" { + // GETS + // ================================================================================================ + + #[wasm_bindgen(js_name = getSyncHeight)] + pub fn idxdb_get_sync_height() -> js_sys::Promise; + + #[wasm_bindgen(js_name = getNoteTags)] + pub fn idxdb_get_note_tags() -> js_sys::Promise; + + // INSERTS + // ================================================================================================ + + #[wasm_bindgen(js_name = addNoteTag)] + pub fn idxdb_add_note_tag(tags: String) -> js_sys::Promise; + + #[wasm_bindgen(js_name = applyStateSync)] + pub fn idxdb_apply_state_sync( + block_num: String, + nullifiers: Vec, + nullifier_block_nums: Vec, + block_header: String, + chain_mmr_peaks: String, + has_client_notes: bool, + serialized_node_ids: Vec, + serialized_nodes: Vec, + output_note_ids: Vec, + output_note_inclusion_proofs: Vec, + input_note_ids: Vec, + input_note_inclusion_proofs: Vec, + input_note_metadatas: Vec, + transactions_to_commit: Vec, + transactions_to_commit_block_nums: Vec, + ) -> js_sys::Promise; + + #[wasm_bindgen(js_name = updateIgnoredNotesForTag)] + pub fn idxdb_update_ignored_notes_for_tag(tag: String) -> js_sys::Promise; +} diff --git a/crates/rust-client/src/store/web_store/sync/mod.rs b/crates/rust-client/src/store/web_store/sync/mod.rs new file mode 100644 index 000000000..bd0b948e6 --- /dev/null +++ b/crates/rust-client/src/store/web_store/sync/mod.rs @@ -0,0 +1,209 @@ +use miden_objects::notes::{NoteInclusionProof, NoteTag}; +use serde_wasm_bindgen::from_value; +use wasm_bindgen_futures::*; + +use super::{ + chain_data::utils::serialize_chain_mmr_node, notes::utils::insert_input_note_tx, + transactions::utils::update_account, WebStore, +}; +use crate::{errors::StoreError, sync::StateSyncUpdate}; + +mod js_bindings; +use js_bindings::*; + +mod models; +use models::*; + +impl WebStore { + pub(crate) async fn get_note_tags(&self) -> Result, StoreError> { + let promise = idxdb_get_note_tags(); + let js_value = JsFuture::from(promise).await.unwrap(); + let tags_idxdb: NoteTagsIdxdbObject = from_value(js_value).unwrap(); + + let tags: Vec = serde_json::from_str(&tags_idxdb.tags).unwrap(); + + Ok(tags) + } + + pub(super) async fn get_sync_height(&self) -> Result { + let promise = idxdb_get_sync_height(); + let js_value = JsFuture::from(promise).await.unwrap(); + let block_num_idxdb: SyncHeightIdxdbObject = from_value(js_value).unwrap(); + + let block_num_as_u32: u32 = block_num_idxdb.block_num.parse::().unwrap(); + Ok(block_num_as_u32) + } + + pub(super) async fn add_note_tag(&self, tag: NoteTag) -> Result { + let mut tags = self.get_note_tags().await.unwrap(); + if tags.contains(&tag) { + return Ok(false); + } + tags.push(tag); + let tags = serde_json::to_string(&tags).map_err(StoreError::InputSerializationError)?; + + let promise = idxdb_add_note_tag(tags); + JsFuture::from(promise).await.unwrap(); + + let tag_as_u32 = u32::from(tag); + let tag_as_str = tag_as_u32.to_string(); + let promise = idxdb_update_ignored_notes_for_tag(tag_as_str); + JsFuture::from(promise).await.unwrap(); + + Ok(true) + } + + pub(super) async fn remove_note_tag(&self, tag: NoteTag) -> Result { + let mut tags = self.get_note_tags().await?; + if let Some(index_of_tag) = tags.iter().position(|&tag_candidate| tag_candidate == tag) { + tags.remove(index_of_tag); + + let tags = serde_json::to_string(&tags).map_err(StoreError::InputSerializationError)?; + + let promise = idxdb_add_note_tag(tags); + JsFuture::from(promise).await.unwrap(); + return Ok(true); + } + + Ok(false) + } + + pub(super) async fn apply_state_sync( + &self, + state_sync_update: StateSyncUpdate, + ) -> Result<(), StoreError> { + let StateSyncUpdate { + block_header, + nullifiers, + synced_new_notes: committed_notes, + transactions_to_commit: committed_transactions, + new_mmr_peaks, + new_authentication_nodes, + updated_onchain_accounts, + block_has_relevant_notes, + } = state_sync_update; + + // Serialize data for updating state sync and block header + let block_num_as_str = block_header.block_num().to_string(); + + // Serialize data for updating spent notes + let nullifiers_as_str = nullifiers + .iter() + .map(|nullifier_update| nullifier_update.nullifier.to_hex()) + .collect(); + let nullifier_block_nums_as_str = nullifiers + .iter() + .map(|nullifier_update| nullifier_update.block_num.to_string()) + .collect(); + + // Serialize data for updating block header + let block_header_as_str = + serde_json::to_string(&block_header).map_err(StoreError::InputSerializationError)?; + let new_mmr_peaks_as_str = serde_json::to_string(&new_mmr_peaks.peaks().to_vec()) + .map_err(StoreError::InputSerializationError)?; + + // Serialize data for updating chain MMR nodes + let mut serialized_node_ids = Vec::new(); + let mut serialized_nodes = Vec::new(); + for (id, node) in new_authentication_nodes.iter() { + let serialized_data = serialize_chain_mmr_node(*id, *node)?; + serialized_node_ids.push(serialized_data.id); + serialized_nodes.push(serialized_data.node); + } + + // Serialize data for updating committed notes + let output_note_ids_as_str: Vec = committed_notes + .updated_output_notes() + .iter() + .map(|(note_id, _)| note_id.inner().to_hex()) + .collect(); + let output_note_inclusion_proofs_as_str: Vec = committed_notes + .updated_output_notes() + .iter() + .map(|(_, inclusion_proof)| { + let block_num = inclusion_proof.origin().block_num; + let sub_hash = inclusion_proof.sub_hash(); + let note_root = inclusion_proof.note_root(); + let note_index = inclusion_proof.origin().node_index.value(); + + // Create a NoteInclusionProof and serialize it to JSON, handle errors with `?` + let proof = NoteInclusionProof::new( + block_num, + sub_hash, + note_root, + note_index, + inclusion_proof.note_path().clone(), + ) + .unwrap(); + + serde_json::to_string(&proof).unwrap() + }) + .collect(); + + let input_note_ids_as_str: Vec = committed_notes + .updated_input_notes() + .iter() + .map(|input_note| input_note.id().inner().to_hex()) + .collect(); + let input_note_inclusion_proofs_as_str: Vec = committed_notes + .updated_input_notes() + .iter() + .map(|input_note| { + let inclusion_proof = + input_note.proof().expect("Expected a valid NoteInclusionProof"); + serde_json::to_string(inclusion_proof).unwrap() + }) + .collect(); + let input_note_metadatas_as_str: Vec = committed_notes + .updated_input_notes() + .iter() + .map(|input_note| { + let metadata = input_note.note().metadata(); + serde_json::to_string(metadata).unwrap() + }) + .collect(); + + // TODO: LOP INTO idxdb_apply_state_sync call + // Commit new public notes + for note in committed_notes.new_public_notes() { + insert_input_note_tx(note.clone().into()).await.unwrap(); + } + + // Serialize data for updating committed transactions + let transactions_to_commit_block_nums_as_str = committed_transactions + .iter() + .map(|tx_update| tx_update.block_num.to_string()) + .collect(); + let transactions_to_commit_as_str: Vec = committed_transactions + .iter() + .map(|tx_update| tx_update.transaction_id.to_string()) + .collect(); + + // TODO: LOP INTO idxdb_apply_state_sync call + // Update onchain accounts on the db that have been updated onchain + for account in updated_onchain_accounts { + update_account(&account.clone()).await.unwrap(); + } + + let promise = idxdb_apply_state_sync( + block_num_as_str, + nullifiers_as_str, + nullifier_block_nums_as_str, + block_header_as_str, + new_mmr_peaks_as_str, + block_has_relevant_notes, + serialized_node_ids, + serialized_nodes, + output_note_ids_as_str, + output_note_inclusion_proofs_as_str, + input_note_ids_as_str, + input_note_inclusion_proofs_as_str, + input_note_metadatas_as_str, + transactions_to_commit_as_str, + transactions_to_commit_block_nums_as_str, + ); + JsFuture::from(promise).await.unwrap(); + + Ok(()) + } +} diff --git a/crates/rust-client/src/store/web_store/sync/models.rs b/crates/rust-client/src/store/web_store/sync/models.rs new file mode 100644 index 000000000..169f0c10c --- /dev/null +++ b/crates/rust-client/src/store/web_store/sync/models.rs @@ -0,0 +1,11 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Serialize, Deserialize)] +pub struct SyncHeightIdxdbObject { + pub block_num: String, +} + +#[derive(Serialize, Deserialize)] +pub struct NoteTagsIdxdbObject { + pub tags: String, +} diff --git a/crates/rust-client/src/store/web_store/transactions/js_bindings.rs b/crates/rust-client/src/store/web_store/transactions/js_bindings.rs new file mode 100644 index 000000000..808cb75d7 --- /dev/null +++ b/crates/rust-client/src/store/web_store/transactions/js_bindings.rs @@ -0,0 +1,33 @@ +use wasm_bindgen::prelude::*; +use wasm_bindgen_futures::*; + +// Transactions IndexedDB Operations +#[wasm_bindgen(module = "/src/store/web_store/js/transactions.js")] + +extern "C" { + // GETS + // ================================================================================================ + + #[wasm_bindgen(js_name = getTransactions)] + pub fn idxdb_get_transactions(filter: String) -> js_sys::Promise; + + #[wasm_bindgen(js_name = insertTransactionScript)] + pub fn idxdb_insert_transaction_script( + script_hash: Vec, + script_program: Option>, + ) -> js_sys::Promise; + + #[wasm_bindgen(js_name = insertProvenTransactionData)] + pub fn idxdb_insert_proven_transaction_data( + transaction_id: String, + account_id: String, + init_account_state: String, + final_account_state: String, + input_notes: String, + output_notes: Vec, + script_hash: Option>, + script_inputs: Option, + block_num: String, + committed: Option, + ) -> js_sys::Promise; +} diff --git a/crates/rust-client/src/store/web_store/transactions/mod.rs b/crates/rust-client/src/store/web_store/transactions/mod.rs new file mode 100644 index 000000000..3cce8e02e --- /dev/null +++ b/crates/rust-client/src/store/web_store/transactions/mod.rs @@ -0,0 +1,161 @@ +use std::collections::BTreeMap; + +use miden_objects::{ + accounts::AccountId, + assembly::ProgramAst, + transaction::{OutputNotes, TransactionScript}, + Digest, Felt, +}; +use miden_tx::utils::Deserializable; +use serde_wasm_bindgen::from_value; +use wasm_bindgen_futures::*; + +use super::{ + notes::utils::{insert_input_note_tx, insert_output_note_tx, update_note_consumer_tx_id}, + WebStore, +}; +use crate::{ + errors::StoreError, + store::TransactionFilter, + transactions::{TransactionRecord, TransactionResult, TransactionStatus}, +}; + +mod js_bindings; +use js_bindings::*; + +mod models; +use models::*; + +pub mod utils; +use utils::*; + +impl WebStore { + pub async fn get_transactions( + &self, + filter: TransactionFilter, + ) -> Result, StoreError> { + let filter_as_str = match filter { + TransactionFilter::All => "All", + TransactionFilter::Uncomitted => "Uncomitted", + }; + + let promise = idxdb_get_transactions(filter_as_str.to_string()); + let js_value = JsFuture::from(promise).await.unwrap(); + let transactions_idxdb: Vec = from_value(js_value).unwrap(); + + let transaction_records: Result, StoreError> = transactions_idxdb + .into_iter() + .map(|tx_idxdb| { + let native_account_id = AccountId::from_hex(&tx_idxdb.account_id).unwrap(); + let block_num_as_u32: u32 = tx_idxdb.block_num.parse::().unwrap(); + let commit_height_as_u32: Option = + tx_idxdb.commit_height.map(|height| height.parse::().unwrap()); + + let id: Digest = tx_idxdb.id.try_into()?; + let init_account_state: Digest = tx_idxdb.init_account_state.try_into()?; + + let final_account_state: Digest = tx_idxdb.final_account_state.try_into()?; + + let input_note_nullifiers: Vec = + serde_json::from_str(&tx_idxdb.input_notes) + .map_err(StoreError::JsonDataDeserializationError)?; + + let output_notes = OutputNotes::read_from_bytes(&tx_idxdb.output_notes)?; + + let transaction_script: Option = + if tx_idxdb.script_hash.is_some() { + let script_hash = tx_idxdb + .script_hash + .map(|hash| Digest::read_from_bytes(&hash)) + .transpose()? + .expect("Script hash should be included in the row"); + + let script_program = tx_idxdb + .script_program + .map(|program| ProgramAst::from_bytes(&program)) + .transpose()? + .expect("Script program should be included in the row"); + + let script_inputs = tx_idxdb + .script_inputs + .map(|hash| serde_json::from_str::>>(&hash)) + .transpose() + .map_err(StoreError::JsonDataDeserializationError)? + .expect("Script inputs should be included in the row"); + + let tx_script = TransactionScript::from_parts( + script_program, + script_hash, + script_inputs.into_iter().map(|(k, v)| (k.into(), v)), + )?; + + Some(tx_script) + } else { + None + }; + + let transaction_status = commit_height_as_u32 + .map_or(TransactionStatus::Pending, TransactionStatus::Committed); + + Ok(TransactionRecord { + id: id.into(), + account_id: native_account_id, + init_account_state, + final_account_state, + input_note_nullifiers, + output_notes, + transaction_script, + block_num: block_num_as_u32, + transaction_status, + }) + }) + .collect(); + + transaction_records + } + + pub async fn apply_transaction(&self, tx_result: TransactionResult) -> Result<(), StoreError> { + let transaction_id = tx_result.executed_transaction().id(); + let account_id = tx_result.executed_transaction().account_id(); + let account_delta = tx_result.account_delta(); + + let (mut account, _seed) = self.get_account(account_id).await.unwrap(); + + account.apply_delta(account_delta).map_err(StoreError::AccountError)?; + + // Save only input notes that we care for (based on the note screener assessment) + let created_input_notes = tx_result.relevant_notes().to_vec(); + + // Save all output notes + let created_output_notes = tx_result + .created_notes() + .iter() + .cloned() + .filter_map(|output_note| output_note.try_into().ok()) + .collect::>(); + + let consumed_note_ids = + tx_result.consumed_notes().iter().map(|note| note.id()).collect::>(); + + // Transaction Data + insert_proven_transaction_data(tx_result).await.unwrap(); + + // Account Data + update_account(&account).await.unwrap(); + + // Updates for notes + for note in created_input_notes { + insert_input_note_tx(note).await?; + } + + for note in &created_output_notes { + insert_output_note_tx(note).await?; + } + + for note_id in consumed_note_ids { + update_note_consumer_tx_id(note_id, transaction_id).await?; + } + + Ok(()) + } +} diff --git a/crates/rust-client/src/store/web_store/transactions/models.rs b/crates/rust-client/src/store/web_store/transactions/models.rs new file mode 100644 index 000000000..fd613fb5a --- /dev/null +++ b/crates/rust-client/src/store/web_store/transactions/models.rs @@ -0,0 +1,41 @@ +use base64::decode as base64_decode; +use serde::{de::Error, Deserialize, Deserializer, Serialize}; + +#[derive(Serialize, Deserialize)] +pub struct TransactionIdxdbObject { + pub id: String, + pub account_id: String, // usually i64 + pub init_account_state: String, + pub final_account_state: String, + pub input_notes: String, + #[serde(deserialize_with = "base64_to_vec_u8_required", default)] + pub output_notes: Vec, + #[serde(deserialize_with = "base64_to_vec_u8_optional", default)] + pub script_hash: Option>, + #[serde(deserialize_with = "base64_to_vec_u8_optional", default)] + pub script_program: Option>, + pub script_inputs: Option, + pub block_num: String, // usually u32 + pub commit_height: Option, // usually Option +} + +fn base64_to_vec_u8_required<'de, D>(deserializer: D) -> Result, D::Error> +where + D: Deserializer<'de>, +{ + let base64_str: String = Deserialize::deserialize(deserializer)?; + base64_decode(&base64_str).map_err(|e| Error::custom(format!("Base64 decode error: {}", e))) +} + +fn base64_to_vec_u8_optional<'de, D>(deserializer: D) -> Result>, D::Error> +where + D: Deserializer<'de>, +{ + let base64_str: Option = Option::deserialize(deserializer)?; + match base64_str { + Some(str) => base64_decode(&str) + .map(Some) + .map_err(|e| Error::custom(format!("Base64 decode error: {}", e))), + None => Ok(None), + } +} diff --git a/crates/rust-client/src/store/web_store/transactions/utils.rs b/crates/rust-client/src/store/web_store/transactions/utils.rs new file mode 100644 index 000000000..c13a700c1 --- /dev/null +++ b/crates/rust-client/src/store/web_store/transactions/utils.rs @@ -0,0 +1,119 @@ +use miden_objects::{ + accounts::Account, assembly::AstSerdeOptions, transaction::ToInputNoteCommitments, Digest, +}; +use miden_tx::utils::Serializable; +use wasm_bindgen_futures::*; + +use super::js_bindings::*; +use crate::{ + errors::StoreError, + store::web_store::accounts::utils::{ + insert_account_asset_vault, insert_account_record, insert_account_storage, + }, + transactions::TransactionResult, +}; + +// TYPES +// ================================================================================================ + +pub struct SerializedTransactionData { + pub transaction_id: String, + pub account_id: String, + pub init_account_state: String, + pub final_account_state: String, + pub input_notes: String, + pub output_notes: Vec, + pub script_program: Option>, + pub script_hash: Option>, + pub script_inputs: Option, + pub block_num: String, + pub commit_height: Option, +} + +// ================================================================================================ + +pub async fn insert_proven_transaction_data( + transaction_result: TransactionResult, +) -> Result<(), StoreError> { + let serialized_data = serialize_transaction_data(transaction_result)?; + + if let Some(hash) = serialized_data.script_hash.clone() { + let promise = idxdb_insert_transaction_script(hash, serialized_data.script_program.clone()); + JsFuture::from(promise).await.unwrap(); + } + + let promise = idxdb_insert_proven_transaction_data( + serialized_data.transaction_id, + serialized_data.account_id, + serialized_data.init_account_state, + serialized_data.final_account_state, + serialized_data.input_notes, + serialized_data.output_notes, + serialized_data.script_hash.clone(), + serialized_data.script_inputs.clone(), + serialized_data.block_num, + serialized_data.commit_height, + ); + JsFuture::from(promise).await.unwrap(); + + Ok(()) +} + +pub(super) fn serialize_transaction_data( + transaction_result: TransactionResult, +) -> Result { + let executed_transaction = transaction_result.executed_transaction(); + let transaction_id: String = executed_transaction.id().inner().into(); + + let account_id_as_str: String = executed_transaction.account_id().to_string(); + let init_account_state = &executed_transaction.initial_account().hash().to_string(); + let final_account_state = &executed_transaction.final_account().hash().to_string(); + + // TODO: Double check if saving nullifiers as input notes is enough + let nullifiers: Vec = executed_transaction + .input_notes() + .iter() + .map(|x| x.nullifier().inner()) + .collect(); + + let input_notes = + serde_json::to_string(&nullifiers).map_err(StoreError::InputSerializationError)?; + + let output_notes = executed_transaction.output_notes(); + + // TODO: Scripts should be in their own tables and only identifiers should be stored here + let transaction_args = transaction_result.transaction_arguments(); + let mut script_program = None; + let mut script_hash = None; + let mut script_inputs = None; + + if let Some(tx_script) = transaction_args.tx_script() { + script_program = + Some(tx_script.code().to_bytes(AstSerdeOptions { serialize_imports: true })); + script_hash = Some(tx_script.hash().to_bytes()); + script_inputs = Some( + serde_json::to_string(&tx_script.inputs()) + .map_err(StoreError::InputSerializationError)?, + ); + } + + Ok(SerializedTransactionData { + transaction_id, + account_id: account_id_as_str, + init_account_state: init_account_state.to_owned(), + final_account_state: final_account_state.to_owned(), + input_notes, + output_notes: output_notes.to_bytes(), + script_program, + script_hash, + script_inputs, + block_num: transaction_result.block_num().to_string(), + commit_height: None, + }) +} + +pub async fn update_account(new_account_state: &Account) -> Result<(), ()> { + let _ = insert_account_storage(new_account_state.storage()).await; + let _ = insert_account_asset_vault(new_account_state.vault()).await; + insert_account_record(new_account_state, None).await +} diff --git a/crates/rust-client/src/sync.rs b/crates/rust-client/src/sync.rs index 72703653f..09892b2a8 100644 --- a/crates/rust-client/src/sync.rs +++ b/crates/rust-client/src/sync.rs @@ -301,8 +301,10 @@ impl Client ) .map_err(ClientError::NoteError)?; - self.store.update_note_inclusion_proof(details.id(), note_inclusion_proof)?; - self.store.update_note_metadata(details.id(), *details.metadata())?; + maybe_await!(self + .store + .update_note_inclusion_proof(details.id(), note_inclusion_proof))?; + maybe_await!(self.store.update_note_metadata(details.id(), *details.metadata()))?; } let mut sync_summary = SyncSummary::new_empty(0);