From 60f2a1173e26068c80d1e0631d996b217dd99bea Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Tom=C3=A1=C5=A1=20Zemanovi=C4=8D?= <tomas@heliax.dev>
Date: Fri, 4 Nov 2022 12:24:55 +0100
Subject: [PATCH 1/9] add new crate "namada_core" for core types, storage_api,
 tx_env, vp_env

---
 Cargo.lock                                    |  35 ++++++
 Cargo.toml                                    |   1 +
 core/Cargo.toml                               |  62 +++++++++++
 core/build.rs                                 |  55 ++++++++++
 {shared => core}/proto                        |   0
 {shared => core}/src/bytes.rs                 |   0
 {shared => core}/src/ledger/gas.rs            |   0
 core/src/ledger/governance/mod.rs             |   6 ++
 .../src/ledger/governance/parameters.rs       |   0
 .../src/ledger/governance/storage.rs          |   0
 .../src/ledger/ibc/actions.rs                 |   0
 .../src/types => core/src/ledger}/ibc/data.rs |   0
 core/src/ledger/ibc/mod.rs                    |   2 +
 {shared => core}/src/ledger/ibc/storage.rs    |   0
 core/src/ledger/mod.rs                        |  11 ++
 {shared => core}/src/ledger/parameters/mod.rs |   0
 .../src/ledger/parameters/storage.rs          |   0
 .../src/ledger/storage/ics23_specs.rs         |   0
 .../src/ledger/storage/merkle_tree.rs         |  19 +++-
 {shared => core}/src/ledger/storage/mockdb.rs |   0
 {shared => core}/src/ledger/storage/mod.rs    |   6 --
 {shared => core}/src/ledger/storage/traits.rs |  77 +------------
 {shared => core}/src/ledger/storage/types.rs  |   0
 .../storage_api/collections/lazy_map.rs       |   0
 .../storage_api/collections/lazy_vec.rs       |   0
 .../src/ledger/storage_api/collections/mod.rs |   0
 .../src/ledger/storage_api/error.rs           |   0
 .../src/ledger/storage_api/key.rs             |   0
 .../src/ledger/storage_api/mod.rs             |   0
 .../src/ledger/storage_api/validation/mod.rs  |   0
 {shared => core}/src/ledger/tx_env.rs         |   0
 {shared => core}/src/ledger/vp_env.rs         |   0
 core/src/lib.rs                               |  11 ++
 {shared => core}/src/proto/generated.rs       |   0
 .../src/proto/generated/.gitignore            |   0
 {shared => core}/src/proto/mod.rs             |   0
 {shared => core}/src/proto/types.rs           |   0
 {shared => core}/src/types/address.rs         |   0
 {shared => core}/src/types/chain.rs           |   0
 {shared => core}/src/types/governance.rs      |  14 +--
 {shared => core}/src/types/hash.rs            |  22 ++--
 .../ibc/event.rs => core/src/types/ibc.rs     |   0
 {shared => core}/src/types/internal.rs        |   0
 {shared => core}/src/types/key/common.rs      |  13 +--
 .../src/types/key/dkg_session_keys.rs         |   0
 {shared => core}/src/types/key/ed25519.rs     |   0
 {shared => core}/src/types/key/mod.rs         |   0
 {shared => core}/src/types/key/secp256k1.rs   |   0
 {shared => core}/src/types/masp.rs            |   0
 core/src/types/mod.rs                         |  12 +++
 {shared => core}/src/types/named_address.rs   |   0
 {shared => core}/src/types/storage.rs         |  23 ++--
 {shared => core}/src/types/time.rs            |  81 +++++++-------
 {shared => core}/src/types/token.rs           |   0
 .../src/types/transaction/decrypted.rs        |   0
 .../src/types/transaction/encrypted.rs        |   0
 .../src/types/transaction/governance.rs       |  26 +++++
 {shared => core}/src/types/transaction/mod.rs |   0
 {shared => core}/src/types/transaction/pos.rs |   0
 .../src/types/transaction/protocol.rs         |   0
 .../src/types/transaction/wrapper.rs          |   0
 .../src/types/validity_predicate.rs           |   0
 .../pos => proof_of_stake/src}/storage.rs     |   0
 shared/Cargo.toml                             |   1 +
 shared/build.rs                               |  49 +--------
 shared/src/ledger/ibc/mod.rs                  |   5 +-
 shared/src/ledger/ibc/vp/channel.rs           |  88 ++++++++-------
 shared/src/ledger/ibc/vp/client.rs            |   6 +-
 shared/src/ledger/ibc/vp/connection.rs        |   6 +-
 shared/src/ledger/ibc/vp/mod.rs               |  20 ++--
 shared/src/ledger/ibc/vp/packet.rs            |  17 +--
 shared/src/ledger/ibc/vp/sequence.rs          |   2 +-
 shared/src/ledger/mod.rs                      |  11 +-
 .../ledger/{ => native_vp}/governance/mod.rs  |  13 +--
 .../{ => native_vp}/governance/utils.rs       |   0
 .../ledger/{native_vp.rs => native_vp/mod.rs} |   4 +
 shared/src/ledger/native_vp/parameters.rs     | 101 ++++++++++++++++++
 shared/src/ledger/pos/mod.rs                  |  90 ++++++++--------
 shared/src/ledger/pos/vp.rs                   |   2 +-
 shared/src/ledger/protocol/mod.rs             |   2 +-
 shared/src/ledger/queries/shell.rs            |  40 ++++++-
 shared/src/lib.rs                             |   3 +-
 shared/src/types/dylib.rs                     |  16 ---
 shared/src/types/ibc/mod.rs                   |   6 +-
 shared/src/types/mod.rs                       |  17 +--
 85 files changed, 602 insertions(+), 373 deletions(-)
 create mode 100644 core/Cargo.toml
 create mode 100644 core/build.rs
 rename {shared => core}/proto (100%)
 rename {shared => core}/src/bytes.rs (100%)
 rename {shared => core}/src/ledger/gas.rs (100%)
 create mode 100644 core/src/ledger/governance/mod.rs
 rename {shared => core}/src/ledger/governance/parameters.rs (100%)
 rename {shared => core}/src/ledger/governance/storage.rs (100%)
 rename shared/src/ledger/ibc/handler.rs => core/src/ledger/ibc/actions.rs (100%)
 rename {shared/src/types => core/src/ledger}/ibc/data.rs (100%)
 create mode 100644 core/src/ledger/ibc/mod.rs
 rename {shared => core}/src/ledger/ibc/storage.rs (100%)
 create mode 100644 core/src/ledger/mod.rs
 rename {shared => core}/src/ledger/parameters/mod.rs (100%)
 rename {shared => core}/src/ledger/parameters/storage.rs (100%)
 rename {shared => core}/src/ledger/storage/ics23_specs.rs (100%)
 rename {shared => core}/src/ledger/storage/merkle_tree.rs (98%)
 rename {shared => core}/src/ledger/storage/mockdb.rs (100%)
 rename {shared => core}/src/ledger/storage/mod.rs (99%)
 rename {shared => core}/src/ledger/storage/traits.rs (80%)
 rename {shared => core}/src/ledger/storage/types.rs (100%)
 rename {shared => core}/src/ledger/storage_api/collections/lazy_map.rs (100%)
 rename {shared => core}/src/ledger/storage_api/collections/lazy_vec.rs (100%)
 rename {shared => core}/src/ledger/storage_api/collections/mod.rs (100%)
 rename {shared => core}/src/ledger/storage_api/error.rs (100%)
 rename {shared => core}/src/ledger/storage_api/key.rs (100%)
 rename {shared => core}/src/ledger/storage_api/mod.rs (100%)
 rename {shared => core}/src/ledger/storage_api/validation/mod.rs (100%)
 rename {shared => core}/src/ledger/tx_env.rs (100%)
 rename {shared => core}/src/ledger/vp_env.rs (100%)
 create mode 100644 core/src/lib.rs
 rename {shared => core}/src/proto/generated.rs (100%)
 rename {shared => core}/src/proto/generated/.gitignore (100%)
 rename {shared => core}/src/proto/mod.rs (100%)
 rename {shared => core}/src/proto/types.rs (100%)
 rename {shared => core}/src/types/address.rs (100%)
 rename {shared => core}/src/types/chain.rs (100%)
 rename {shared => core}/src/types/governance.rs (97%)
 rename {shared => core}/src/types/hash.rs (91%)
 rename shared/src/types/ibc/event.rs => core/src/types/ibc.rs (100%)
 rename {shared => core}/src/types/internal.rs (100%)
 rename {shared => core}/src/types/key/common.rs (97%)
 rename {shared => core}/src/types/key/dkg_session_keys.rs (100%)
 rename {shared => core}/src/types/key/ed25519.rs (100%)
 rename {shared => core}/src/types/key/mod.rs (100%)
 rename {shared => core}/src/types/key/secp256k1.rs (100%)
 rename {shared => core}/src/types/masp.rs (100%)
 create mode 100644 core/src/types/mod.rs
 rename {shared => core}/src/types/named_address.rs (100%)
 rename {shared => core}/src/types/storage.rs (99%)
 rename {shared => core}/src/types/time.rs (77%)
 rename {shared => core}/src/types/token.rs (100%)
 rename {shared => core}/src/types/transaction/decrypted.rs (100%)
 rename {shared => core}/src/types/transaction/encrypted.rs (100%)
 rename {shared => core}/src/types/transaction/governance.rs (60%)
 rename {shared => core}/src/types/transaction/mod.rs (100%)
 rename {shared => core}/src/types/transaction/pos.rs (100%)
 rename {shared => core}/src/types/transaction/protocol.rs (100%)
 rename {shared => core}/src/types/transaction/wrapper.rs (100%)
 rename {shared => core}/src/types/validity_predicate.rs (100%)
 rename {shared/src/ledger/pos => proof_of_stake/src}/storage.rs (100%)
 rename shared/src/ledger/{ => native_vp}/governance/mod.rs (99%)
 rename shared/src/ledger/{ => native_vp}/governance/utils.rs (100%)
 rename shared/src/ledger/{native_vp.rs => native_vp/mod.rs} (99%)
 create mode 100644 shared/src/ledger/native_vp/parameters.rs
 delete mode 100644 shared/src/types/dylib.rs

diff --git a/Cargo.lock b/Cargo.lock
index 75c3c2f770..6b7ca3cb7c 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -3659,6 +3659,7 @@ dependencies = [
  "loupe",
  "masp_primitives",
  "masp_proofs",
+ "namada_core",
  "namada_proof_of_stake",
  "parity-wasm",
  "paste",
@@ -3785,6 +3786,40 @@ dependencies = [
  "winapi 0.3.9",
 ]
 
+[[package]]
+name = "namada_core"
+version = "0.9.0"
+dependencies = [
+ "ark-ec",
+ "assert_matches",
+ "bech32",
+ "borsh",
+ "chrono",
+ "data-encoding",
+ "derivative",
+ "ed25519-consensus",
+ "ferveo",
+ "group-threshold-cryptography",
+ "ics23",
+ "itertools",
+ "libsecp256k1",
+ "pretty_assertions",
+ "proptest",
+ "rand 0.8.5",
+ "rand_core 0.6.4",
+ "rust_decimal",
+ "serde 1.0.147",
+ "serde_json",
+ "sha2 0.9.9",
+ "sparse-merkle-tree",
+ "test-log",
+ "thiserror",
+ "tonic-build",
+ "tracing 0.1.37",
+ "tracing-subscriber 0.3.16",
+ "zeroize",
+]
+
 [[package]]
 name = "namada_encoding_spec"
 version = "0.10.1"
diff --git a/Cargo.toml b/Cargo.toml
index fab5ebd8d9..42a99343fc 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -3,6 +3,7 @@ resolver = "2"
 
 members = [
   "apps",
+  "core",
   "proof_of_stake",
   "shared",
   "tests",
diff --git a/core/Cargo.toml b/core/Cargo.toml
new file mode 100644
index 0000000000..5dbf217d34
--- /dev/null
+++ b/core/Cargo.toml
@@ -0,0 +1,62 @@
+[package]
+authors = ["Heliax AG <hello@heliax.dev>"]
+edition = "2021"
+license = "GPL-3.0"
+name = "namada_core"
+resolver = "2"
+version = "0.9.0"
+
+[features]
+default = []
+ferveo-tpke = [
+  "ferveo",
+  "tpke",
+  "ark-ec",
+  "rand_core",
+  "rand",
+]
+# for integration tests and test utilies
+testing = [
+  "rand",
+  "rand_core",
+]
+
+[dependencies]
+ark-ec = {version = "0.3", optional = true}
+# We switch off "blake2b" because it cannot be compiled to wasm
+# branch = "bat/arse-merkle-tree"
+arse-merkle-tree = {package = "sparse-merkle-tree", git = "https://github.com/heliaxdev/sparse-merkle-tree", rev = "04ad1eeb28901b57a7599bbe433b3822965dabe8", default-features = false, features = ["std", "borsh"]}
+bech32 = "0.8.0"
+borsh = "0.9.0"
+chrono = {version = "0.4.22", default-features = false, features = ["clock", "std"]}
+data-encoding = "2.3.2"
+derivative = "2.2.0"
+ed25519-consensus = "1.2.0"
+ferveo = {optional = true, git = "https://github.com/anoma/ferveo"}
+tpke = {package = "group-threshold-cryptography", optional = true, git = "https://github.com/anoma/ferveo"}
+ics23 = "0.7.0"
+itertools = "0.10.0"
+libsecp256k1 = {git = "https://github.com/heliaxdev/libsecp256k1", rev = "bbb3bd44a49db361f21d9db80f9a087c194c0ae9", default-features = false, features = ["std", "static-context"]}
+rand = {version = "0.8", optional = true}
+rand_core = {version = "0.6", optional = true}
+rust_decimal = "1.26.1"
+serde = {version = "1.0.125", features = ["derive"]}
+serde_json = "1.0.62"
+sha2 = "0.9.3"
+thiserror = "1.0.30"
+tracing = "0.1.30"
+zeroize = {version = "1.5.5", features = ["zeroize_derive"]}
+
+[dev-dependencies]
+assert_matches = "1.5.0"
+libsecp256k1 = {git = "https://github.com/heliaxdev/libsecp256k1", rev = "bbb3bd44a49db361f21d9db80f9a087c194c0ae9"}
+pretty_assertions = "0.7.2"
+# A fork with state machine testing
+proptest = {git = "https://github.com/heliaxdev/proptest", branch = "tomas/sm"}
+rand = {version = "0.8"}
+rand_core = {version = "0.6"}
+test-log = {version = "0.2.7", default-features = false, features = ["trace"]}
+tracing-subscriber = {version = "0.3.7", default-features = false, features = ["env-filter", "fmt"]}
+
+[build-dependencies]
+tonic-build = "0.6.0"
\ No newline at end of file
diff --git a/core/build.rs b/core/build.rs
new file mode 100644
index 0000000000..3c6f017a22
--- /dev/null
+++ b/core/build.rs
@@ -0,0 +1,55 @@
+use std::fs::read_to_string;
+use std::process::Command;
+use std::{env, str};
+
+/// Path to the .proto source files, relative to `core` directory
+const PROTO_SRC: &str = "./proto";
+
+/// The version should match the one we use in the `Makefile`
+const RUSTFMT_TOOLCHAIN_SRC: &str = "../rust-nightly-version";
+
+fn main() {
+    if let Ok(val) = env::var("COMPILE_PROTO") {
+        if val.to_ascii_lowercase() == "false" {
+            // Skip compiling proto files
+            return;
+        }
+    }
+
+    // Tell Cargo that if the given file changes, to rerun this build script.
+    println!("cargo:rerun-if-changed={}", PROTO_SRC);
+
+    let mut use_rustfmt = false;
+
+    // The version should match the one we use in the `Makefile`
+    if let Ok(rustfmt_toolchain) = read_to_string(RUSTFMT_TOOLCHAIN_SRC) {
+        // Try to find the path to rustfmt.
+        if let Ok(output) = Command::new("rustup")
+            .args(&[
+                "which",
+                "rustfmt",
+                "--toolchain",
+                rustfmt_toolchain.trim(),
+            ])
+            .output()
+        {
+            if let Ok(rustfmt) = str::from_utf8(&output.stdout) {
+                // Set the command to be used by tonic_build below to format the
+                // generated files
+                let rustfmt = rustfmt.trim();
+                if !rustfmt.is_empty() {
+                    println!("using rustfmt from path \"{}\"", rustfmt);
+                    env::set_var("RUSTFMT", rustfmt);
+                    use_rustfmt = true
+                }
+            }
+        }
+    }
+
+    tonic_build::configure()
+        .out_dir("src/proto/generated")
+        .format(use_rustfmt)
+        .protoc_arg("--experimental_allow_proto3_optional")
+        .compile(&[format!("{}/types.proto", PROTO_SRC)], &[PROTO_SRC])
+        .unwrap();
+}
diff --git a/shared/proto b/core/proto
similarity index 100%
rename from shared/proto
rename to core/proto
diff --git a/shared/src/bytes.rs b/core/src/bytes.rs
similarity index 100%
rename from shared/src/bytes.rs
rename to core/src/bytes.rs
diff --git a/shared/src/ledger/gas.rs b/core/src/ledger/gas.rs
similarity index 100%
rename from shared/src/ledger/gas.rs
rename to core/src/ledger/gas.rs
diff --git a/core/src/ledger/governance/mod.rs b/core/src/ledger/governance/mod.rs
new file mode 100644
index 0000000000..44d20f50e0
--- /dev/null
+++ b/core/src/ledger/governance/mod.rs
@@ -0,0 +1,6 @@
+//! Governance library code
+
+/// governance parameters
+pub mod parameters;
+/// governance storage
+pub mod storage;
diff --git a/shared/src/ledger/governance/parameters.rs b/core/src/ledger/governance/parameters.rs
similarity index 100%
rename from shared/src/ledger/governance/parameters.rs
rename to core/src/ledger/governance/parameters.rs
diff --git a/shared/src/ledger/governance/storage.rs b/core/src/ledger/governance/storage.rs
similarity index 100%
rename from shared/src/ledger/governance/storage.rs
rename to core/src/ledger/governance/storage.rs
diff --git a/shared/src/ledger/ibc/handler.rs b/core/src/ledger/ibc/actions.rs
similarity index 100%
rename from shared/src/ledger/ibc/handler.rs
rename to core/src/ledger/ibc/actions.rs
diff --git a/shared/src/types/ibc/data.rs b/core/src/ledger/ibc/data.rs
similarity index 100%
rename from shared/src/types/ibc/data.rs
rename to core/src/ledger/ibc/data.rs
diff --git a/core/src/ledger/ibc/mod.rs b/core/src/ledger/ibc/mod.rs
new file mode 100644
index 0000000000..efb7f7f0d5
--- /dev/null
+++ b/core/src/ledger/ibc/mod.rs
@@ -0,0 +1,2 @@
+pub mod actions;
+pub mod storage;
diff --git a/shared/src/ledger/ibc/storage.rs b/core/src/ledger/ibc/storage.rs
similarity index 100%
rename from shared/src/ledger/ibc/storage.rs
rename to core/src/ledger/ibc/storage.rs
diff --git a/core/src/ledger/mod.rs b/core/src/ledger/mod.rs
new file mode 100644
index 0000000000..bbd12bc60d
--- /dev/null
+++ b/core/src/ledger/mod.rs
@@ -0,0 +1,11 @@
+//! The ledger modules
+
+pub mod gas;
+pub mod governance;
+#[cfg(feature = "ibc-rs")]
+pub mod ibc;
+pub mod parameters;
+pub mod storage;
+pub mod storage_api;
+pub mod tx_env;
+pub mod vp_env;
diff --git a/shared/src/ledger/parameters/mod.rs b/core/src/ledger/parameters/mod.rs
similarity index 100%
rename from shared/src/ledger/parameters/mod.rs
rename to core/src/ledger/parameters/mod.rs
diff --git a/shared/src/ledger/parameters/storage.rs b/core/src/ledger/parameters/storage.rs
similarity index 100%
rename from shared/src/ledger/parameters/storage.rs
rename to core/src/ledger/parameters/storage.rs
diff --git a/shared/src/ledger/storage/ics23_specs.rs b/core/src/ledger/storage/ics23_specs.rs
similarity index 100%
rename from shared/src/ledger/storage/ics23_specs.rs
rename to core/src/ledger/storage/ics23_specs.rs
diff --git a/shared/src/ledger/storage/merkle_tree.rs b/core/src/ledger/storage/merkle_tree.rs
similarity index 98%
rename from shared/src/ledger/storage/merkle_tree.rs
rename to core/src/ledger/storage/merkle_tree.rs
index 49afb3dcfc..3ce35ab837 100644
--- a/shared/src/ledger/storage/merkle_tree.rs
+++ b/core/src/ledger/storage/merkle_tree.rs
@@ -10,11 +10,11 @@ use arse_merkle_tree::{
 use borsh::{BorshDeserialize, BorshSerialize};
 use ics23::commitment_proof::Proof as Ics23Proof;
 use ics23::{CommitmentProof, ExistenceProof, NonExistenceProof};
+use namada_core::types::storage::{TreeKeyError, IBC_KEY_LIMIT};
 use prost::Message;
 use thiserror::Error;
 
 use super::traits::{StorageHasher, SubTreeRead, SubTreeWrite};
-use super::IBC_KEY_LIMIT;
 use crate::bytes::ByteBuf;
 use crate::ledger::storage::ics23_specs::{self, ibc_leaf_spec};
 use crate::ledger::storage::types;
@@ -22,8 +22,7 @@ use crate::tendermint::merkle::proof::{Proof, ProofOp};
 use crate::types::address::{Address, InternalAddress};
 use crate::types::hash::Hash;
 use crate::types::storage::{
-    DbKeySeg, Error as StorageError, Key, MembershipProof, MerkleValue,
-    StringKey, TreeBytes,
+    DbKeySeg, Error as StorageError, Key, MerkleValue, StringKey, TreeBytes,
 };
 
 #[allow(missing_docs)]
@@ -33,6 +32,8 @@ pub enum Error {
     InvalidKey(StorageError),
     #[error("Invalid key for merkle tree: {0}")]
     InvalidMerkleKey(String),
+    #[error("Storage tree key error: {0}")]
+    StorageTreeKey(#[from] TreeKeyError),
     #[error("Empty Key: {0}")]
     EmptyKey(String),
     #[error("Merkle Tree error: {0}")]
@@ -829,3 +830,15 @@ mod test {
         assert!(basetree_verification_res);
     }
 }
+
+/// Type of membership proof from a merkle tree
+pub enum MembershipProof {
+    /// ICS23 compliant membership proof
+    ICS23(CommitmentProof),
+}
+
+impl From<CommitmentProof> for MembershipProof {
+    fn from(proof: CommitmentProof) -> Self {
+        Self::ICS23(proof)
+    }
+}
diff --git a/shared/src/ledger/storage/mockdb.rs b/core/src/ledger/storage/mockdb.rs
similarity index 100%
rename from shared/src/ledger/storage/mockdb.rs
rename to core/src/ledger/storage/mockdb.rs
diff --git a/shared/src/ledger/storage/mod.rs b/core/src/ledger/storage/mod.rs
similarity index 99%
rename from shared/src/ledger/storage/mod.rs
rename to core/src/ledger/storage/mod.rs
index 571d33b4ab..c2a246ce15 100644
--- a/shared/src/ledger/storage/mod.rs
+++ b/core/src/ledger/storage/mod.rs
@@ -4,9 +4,7 @@ pub mod ics23_specs;
 mod merkle_tree;
 #[cfg(any(test, feature = "testing"))]
 pub mod mockdb;
-pub mod traits;
 pub mod types;
-pub mod write_log;
 
 use core::fmt::Debug;
 use std::array;
@@ -336,7 +334,6 @@ where
     pub fn open(
         db_path: impl AsRef<std::path::Path>,
         chain_id: ChainId,
-        native_token: Address,
         cache: Option<&D::Cache>,
     ) -> Self {
         let block = BlockStorage {
@@ -363,7 +360,6 @@ where
             conversion_state: ConversionState::default(),
             #[cfg(feature = "ferveo-tpke")]
             tx_queue: TxQueue::default(),
-            native_token,
         }
     }
 
@@ -1158,7 +1154,6 @@ pub mod testing {
     use super::mockdb::MockDB;
     use super::*;
     use crate::ledger::storage::traits::Sha256Hasher;
-    use crate::types::address;
     /// Storage with a mock DB for testing
     pub type TestStorage = Storage<MockDB, Sha256Hasher>;
 
@@ -1190,7 +1185,6 @@ pub mod testing {
                 conversion_state: ConversionState::default(),
                 #[cfg(feature = "ferveo-tpke")]
                 tx_queue: TxQueue::default(),
-                native_token: address::nam(),
             }
         }
     }
diff --git a/shared/src/ledger/storage/traits.rs b/core/src/ledger/storage/traits.rs
similarity index 80%
rename from shared/src/ledger/storage/traits.rs
rename to core/src/ledger/storage/traits.rs
index e382f34d73..1f84c6e421 100644
--- a/shared/src/ledger/storage/traits.rs
+++ b/core/src/ledger/storage/traits.rs
@@ -7,14 +7,13 @@ use arse_merkle_tree::traits::{Hasher, Value};
 use arse_merkle_tree::{Key as TreeKey, H256};
 use ics23::commitment_proof::Proof as Ics23Proof;
 use ics23::{CommitmentProof, ExistenceProof};
+use namada_core::types::storage::IBC_KEY_LIMIT;
 use sha2::{Digest, Sha256};
 
-use super::merkle_tree::{Amt, Error, Smt};
-use super::{ics23_specs, IBC_KEY_LIMIT};
+use super::ics23_specs;
+use super::merkle_tree::{Amt, Error, MembershipProof, Smt};
 use crate::types::hash::Hash;
-use crate::types::storage::{
-    Key, MembershipProof, MerkleValue, StringKey, TreeBytes,
-};
+use crate::types::storage::{Key, MerkleValue, StringKey, TreeBytes};
 
 /// Trait for reading from a merkle tree that is a sub-tree
 /// of the global merkle tree.
@@ -159,74 +158,6 @@ impl<'a, H: StorageHasher + Default> SubTreeWrite for &'a mut Amt<H> {
     }
 }
 
-impl TreeKey<IBC_KEY_LIMIT> for StringKey {
-    type Error = Error;
-
-    fn as_slice(&self) -> &[u8] {
-        &self.original.as_slice()[..self.length]
-    }
-
-    fn try_from_bytes(bytes: &[u8]) -> Result<Self, Error> {
-        let mut tree_key = [0u8; IBC_KEY_LIMIT];
-        let mut original = [0u8; IBC_KEY_LIMIT];
-        let mut length = 0;
-        for (i, byte) in bytes.iter().enumerate() {
-            if i >= IBC_KEY_LIMIT {
-                return Err(Error::InvalidMerkleKey(
-                    "Input IBC key is too large".into(),
-                ));
-            }
-            original[i] = *byte;
-            tree_key[i] = byte.wrapping_add(1);
-            length += 1;
-        }
-        Ok(Self {
-            original,
-            tree_key: tree_key.into(),
-            length,
-        })
-    }
-}
-
-impl Value for Hash {
-    fn as_slice(&self) -> &[u8] {
-        self.0.as_slice()
-    }
-
-    fn zero() -> Self {
-        Hash([0u8; 32])
-    }
-}
-
-impl From<Hash> for H256 {
-    fn from(hash: Hash) -> Self {
-        hash.0.into()
-    }
-}
-
-impl From<H256> for Hash {
-    fn from(hash: H256) -> Self {
-        Self(hash.into())
-    }
-}
-
-impl From<&H256> for Hash {
-    fn from(hash: &H256) -> Self {
-        let hash = hash.to_owned();
-        Self(hash.into())
-    }
-}
-
-impl Value for TreeBytes {
-    fn as_slice(&self) -> &[u8] {
-        self.0.as_slice()
-    }
-
-    fn zero() -> Self {
-        TreeBytes::zero()
-    }
-}
-
 /// The storage hasher used for the merkle tree.
 pub trait StorageHasher: Hasher + Default {
     /// Hash the value to store
diff --git a/shared/src/ledger/storage/types.rs b/core/src/ledger/storage/types.rs
similarity index 100%
rename from shared/src/ledger/storage/types.rs
rename to core/src/ledger/storage/types.rs
diff --git a/shared/src/ledger/storage_api/collections/lazy_map.rs b/core/src/ledger/storage_api/collections/lazy_map.rs
similarity index 100%
rename from shared/src/ledger/storage_api/collections/lazy_map.rs
rename to core/src/ledger/storage_api/collections/lazy_map.rs
diff --git a/shared/src/ledger/storage_api/collections/lazy_vec.rs b/core/src/ledger/storage_api/collections/lazy_vec.rs
similarity index 100%
rename from shared/src/ledger/storage_api/collections/lazy_vec.rs
rename to core/src/ledger/storage_api/collections/lazy_vec.rs
diff --git a/shared/src/ledger/storage_api/collections/mod.rs b/core/src/ledger/storage_api/collections/mod.rs
similarity index 100%
rename from shared/src/ledger/storage_api/collections/mod.rs
rename to core/src/ledger/storage_api/collections/mod.rs
diff --git a/shared/src/ledger/storage_api/error.rs b/core/src/ledger/storage_api/error.rs
similarity index 100%
rename from shared/src/ledger/storage_api/error.rs
rename to core/src/ledger/storage_api/error.rs
diff --git a/shared/src/ledger/storage_api/key.rs b/core/src/ledger/storage_api/key.rs
similarity index 100%
rename from shared/src/ledger/storage_api/key.rs
rename to core/src/ledger/storage_api/key.rs
diff --git a/shared/src/ledger/storage_api/mod.rs b/core/src/ledger/storage_api/mod.rs
similarity index 100%
rename from shared/src/ledger/storage_api/mod.rs
rename to core/src/ledger/storage_api/mod.rs
diff --git a/shared/src/ledger/storage_api/validation/mod.rs b/core/src/ledger/storage_api/validation/mod.rs
similarity index 100%
rename from shared/src/ledger/storage_api/validation/mod.rs
rename to core/src/ledger/storage_api/validation/mod.rs
diff --git a/shared/src/ledger/tx_env.rs b/core/src/ledger/tx_env.rs
similarity index 100%
rename from shared/src/ledger/tx_env.rs
rename to core/src/ledger/tx_env.rs
diff --git a/shared/src/ledger/vp_env.rs b/core/src/ledger/vp_env.rs
similarity index 100%
rename from shared/src/ledger/vp_env.rs
rename to core/src/ledger/vp_env.rs
diff --git a/core/src/lib.rs b/core/src/lib.rs
new file mode 100644
index 0000000000..5ee0ebba0e
--- /dev/null
+++ b/core/src/lib.rs
@@ -0,0 +1,11 @@
+//! The core public types, storage_api, VpEnv and TxEnv.
+
+#![doc(html_favicon_url = "https://dev.anoma.net/master/favicon.png")]
+#![doc(html_logo_url = "https://dev.anoma.net/master/rustdoc-logo.png")]
+#![warn(missing_docs)]
+#![deny(rustdoc::broken_intra_doc_links)]
+#![deny(rustdoc::private_intra_doc_links)]
+
+pub mod bytes;
+pub mod ledger;
+pub mod types;
diff --git a/shared/src/proto/generated.rs b/core/src/proto/generated.rs
similarity index 100%
rename from shared/src/proto/generated.rs
rename to core/src/proto/generated.rs
diff --git a/shared/src/proto/generated/.gitignore b/core/src/proto/generated/.gitignore
similarity index 100%
rename from shared/src/proto/generated/.gitignore
rename to core/src/proto/generated/.gitignore
diff --git a/shared/src/proto/mod.rs b/core/src/proto/mod.rs
similarity index 100%
rename from shared/src/proto/mod.rs
rename to core/src/proto/mod.rs
diff --git a/shared/src/proto/types.rs b/core/src/proto/types.rs
similarity index 100%
rename from shared/src/proto/types.rs
rename to core/src/proto/types.rs
diff --git a/shared/src/types/address.rs b/core/src/types/address.rs
similarity index 100%
rename from shared/src/types/address.rs
rename to core/src/types/address.rs
diff --git a/shared/src/types/chain.rs b/core/src/types/chain.rs
similarity index 100%
rename from shared/src/types/chain.rs
rename to core/src/types/chain.rs
diff --git a/shared/src/types/governance.rs b/core/src/types/governance.rs
similarity index 97%
rename from shared/src/types/governance.rs
rename to core/src/types/governance.rs
index a7de68c8ff..fcfa6dcb54 100644
--- a/shared/src/types/governance.rs
+++ b/core/src/types/governance.rs
@@ -9,13 +9,13 @@ use rust_decimal::Decimal;
 use serde::{Deserialize, Serialize};
 use thiserror::Error;
 
-use super::address::Address;
-use super::hash::Hash;
-use super::key::common::{self, Signature};
-use super::key::SigScheme;
-use super::storage::Epoch;
-use super::token::SCALE;
-use super::transaction::governance::InitProposalData;
+use crate::types::address::Address;
+use crate::types::hash::Hash;
+use crate::types::key::common::{self, Signature};
+use crate::types::key::SigScheme;
+use crate::types::storage::Epoch;
+use crate::types::token::SCALE;
+use crate::types::transaction::governance::InitProposalData;
 
 /// Type alias for vote power
 pub type VotePower = u128;
diff --git a/shared/src/types/hash.rs b/core/src/types/hash.rs
similarity index 91%
rename from shared/src/types/hash.rs
rename to core/src/types/hash.rs
index 4198cac4d5..5f8b1be95f 100644
--- a/shared/src/types/hash.rs
+++ b/core/src/types/hash.rs
@@ -4,16 +4,16 @@ use std::fmt::{self, Display};
 use std::ops::Deref;
 use std::str::FromStr;
 
-use arse_merkle_tree::traits::Value;
-use arse_merkle_tree::Hash as TreeHash;
+// use arse_merkle_tree::traits::Value;
+// use arse_merkle_tree::Hash as TreeHash;
 use borsh::{BorshDeserialize, BorshSchema, BorshSerialize};
 use hex::FromHex;
 use serde::{Deserialize, Serialize};
 use sha2::{Digest, Sha256};
 use thiserror::Error;
 
-use crate::tendermint::abci::transaction;
-use crate::tendermint::Hash as TmHash;
+// use crate::tendermint::abci::transaction;
+// use crate::tendermint::Hash as TmHash;
 
 /// The length of the raw transaction hash.
 pub const HASH_LENGTH: usize = 32;
@@ -130,17 +130,21 @@ impl Hash {
         Self(*digest.as_ref())
     }
 
+    fn zero() -> Self {
+        Self([0u8; 32])
+    }
+
     /// Check if the hash is all zeros
     pub fn is_zero(&self) -> bool {
         self == &Self::zero()
     }
 }
 
-impl From<Hash> for TmHash {
-    fn from(hash: Hash) -> Self {
-        TmHash::Sha256(hash.0)
-    }
-}
+// impl From<Hash> for TmHash {
+//     fn from(hash: Hash) -> Self {
+//         TmHash::Sha256(hash.0)
+//     }
+// }
 
 impl From<Hash> for TreeHash {
     fn from(hash: Hash) -> Self {
diff --git a/shared/src/types/ibc/event.rs b/core/src/types/ibc.rs
similarity index 100%
rename from shared/src/types/ibc/event.rs
rename to core/src/types/ibc.rs
diff --git a/shared/src/types/internal.rs b/core/src/types/internal.rs
similarity index 100%
rename from shared/src/types/internal.rs
rename to core/src/types/internal.rs
diff --git a/shared/src/types/key/common.rs b/core/src/types/key/common.rs
similarity index 97%
rename from shared/src/types/key/common.rs
rename to core/src/types/key/common.rs
index fc4a4732dd..7ec041d638 100644
--- a/shared/src/types/key/common.rs
+++ b/core/src/types/key/common.rs
@@ -5,7 +5,7 @@ use std::str::FromStr;
 
 use borsh::{BorshDeserialize, BorshSchema, BorshSerialize};
 use data_encoding::HEXLOWER;
-use namada_proof_of_stake::types::PublicKeyTmRawHash;
+// use namada_proof_of_stake::types::PublicKeyTmRawHash;
 #[cfg(feature = "rand")]
 use rand::{CryptoRng, RngCore};
 use serde::{Deserialize, Serialize};
@@ -325,8 +325,9 @@ impl super::SigScheme for SigScheme {
     }
 }
 
-impl PublicKeyTmRawHash for PublicKey {
-    fn tm_raw_hash(&self) -> String {
-        tm_consensus_key_raw_hash(self)
-    }
-}
+// TODO
+// impl PublicKeyTmRawHash for PublicKey {
+//     fn tm_raw_hash(&self) -> String {
+//         tm_consensus_key_raw_hash(self)
+//     }
+// }
diff --git a/shared/src/types/key/dkg_session_keys.rs b/core/src/types/key/dkg_session_keys.rs
similarity index 100%
rename from shared/src/types/key/dkg_session_keys.rs
rename to core/src/types/key/dkg_session_keys.rs
diff --git a/shared/src/types/key/ed25519.rs b/core/src/types/key/ed25519.rs
similarity index 100%
rename from shared/src/types/key/ed25519.rs
rename to core/src/types/key/ed25519.rs
diff --git a/shared/src/types/key/mod.rs b/core/src/types/key/mod.rs
similarity index 100%
rename from shared/src/types/key/mod.rs
rename to core/src/types/key/mod.rs
diff --git a/shared/src/types/key/secp256k1.rs b/core/src/types/key/secp256k1.rs
similarity index 100%
rename from shared/src/types/key/secp256k1.rs
rename to core/src/types/key/secp256k1.rs
diff --git a/shared/src/types/masp.rs b/core/src/types/masp.rs
similarity index 100%
rename from shared/src/types/masp.rs
rename to core/src/types/masp.rs
diff --git a/core/src/types/mod.rs b/core/src/types/mod.rs
new file mode 100644
index 0000000000..6e7b71dcda
--- /dev/null
+++ b/core/src/types/mod.rs
@@ -0,0 +1,12 @@
+//! Types definitions.
+
+pub mod address;
+pub mod chain;
+pub mod governance;
+pub mod hash;
+pub mod key;
+pub mod masp;
+pub mod storage;
+pub mod time;
+pub mod token;
+pub mod validity_predicate;
diff --git a/shared/src/types/named_address.rs b/core/src/types/named_address.rs
similarity index 100%
rename from shared/src/types/named_address.rs
rename to core/src/types/named_address.rs
diff --git a/shared/src/types/storage.rs b/core/src/types/storage.rs
similarity index 99%
rename from shared/src/types/storage.rs
rename to core/src/types/storage.rs
index bec847e8aa..5dc05ae716 100644
--- a/shared/src/types/storage.rs
+++ b/core/src/types/storage.rs
@@ -456,17 +456,18 @@ impl From<TreeBytes> for Vec<u8> {
     }
 }
 
-/// Type of membership proof from a merkle tree
-pub enum MembershipProof {
-    /// ICS23 compliant membership proof
-    ICS23(CommitmentProof),
-}
-
-impl From<CommitmentProof> for MembershipProof {
-    fn from(proof: CommitmentProof) -> Self {
-        Self::ICS23(proof)
-    }
-}
+// TODO not sure
+// /// Type of membership proof from a merkle tree
+// pub enum MembershipProof {
+//     /// ICS23 compliant membership proof
+//     ICS23(CommitmentProof),
+// }
+
+// impl From<CommitmentProof> for MembershipProof {
+//     fn from(proof: CommitmentProof) -> Self {
+//         Self::ICS23(proof)
+//     }
+// }
 
 impl Key {
     /// Parses string and returns a key
diff --git a/shared/src/types/time.rs b/core/src/types/time.rs
similarity index 77%
rename from shared/src/types/time.rs
rename to core/src/types/time.rs
index dfca614c82..13de2685ad 100644
--- a/shared/src/types/time.rs
+++ b/core/src/types/time.rs
@@ -7,10 +7,6 @@ use std::ops::{Add, Sub};
 use borsh::{BorshDeserialize, BorshSchema, BorshSerialize};
 pub use chrono::{DateTime, Duration, TimeZone, Utc};
 
-use crate::tendermint::time::Time;
-use crate::tendermint::Error as TendermintError;
-use crate::tendermint_proto::google::protobuf;
-
 /// Check if the given `duration` has passed since the given `start.
 pub fn duration_passed(
     current: DateTimeUtc,
@@ -179,35 +175,37 @@ impl From<DateTime<Utc>> for DateTimeUtc {
     }
 }
 
-impl TryFrom<prost_types::Timestamp> for DateTimeUtc {
-    type Error = prost_types::TimestampOutOfSystemRangeError;
+// TODO move
+// impl TryFrom<prost_types::Timestamp> for DateTimeUtc {
+//     type Error = prost_types::TimestampOutOfSystemRangeError;
 
-    fn try_from(
-        timestamp: prost_types::Timestamp,
-    ) -> Result<Self, Self::Error> {
-        let system_time: std::time::SystemTime = timestamp.try_into()?;
-        Ok(Self(system_time.into()))
-    }
-}
+//     fn try_from(
+//         timestamp: prost_types::Timestamp,
+//     ) -> Result<Self, Self::Error> {
+//         let system_time: std::time::SystemTime = timestamp.try_into()?;
+//         Ok(Self(system_time.into()))
+//     }
+// }
 
-impl From<DateTimeUtc> for prost_types::Timestamp {
-    fn from(dt: DateTimeUtc) -> Self {
-        let seconds = dt.0.timestamp();
-        let nanos = dt.0.timestamp_subsec_nanos() as i32;
-        prost_types::Timestamp { seconds, nanos }
-    }
-}
+// impl From<DateTimeUtc> for prost_types::Timestamp {
+//     fn from(dt: DateTimeUtc) -> Self {
+//         let seconds = dt.0.timestamp();
+//         let nanos = dt.0.timestamp_subsec_nanos() as i32;
+//         prost_types::Timestamp { seconds, nanos }
+//     }
+// }
 
-impl TryFrom<protobuf::Timestamp> for DateTimeUtc {
-    type Error = prost_types::TimestampOutOfSystemRangeError;
+// TODO move
+// impl TryFrom<protobuf::Timestamp> for DateTimeUtc {
+//     type Error = prost_types::TimestampOutOfSystemRangeError;
 
-    fn try_from(timestamp: protobuf::Timestamp) -> Result<Self, Self::Error> {
-        Self::try_from(prost_types::Timestamp {
-            seconds: timestamp.seconds,
-            nanos: timestamp.nanos,
-        })
-    }
-}
+//     fn try_from(timestamp: protobuf::Timestamp) -> Result<Self, Self::Error>
+// {         Self::try_from(prost_types::Timestamp {
+//             seconds: timestamp.seconds,
+//             nanos: timestamp.nanos,
+//         })
+//     }
+// }
 
 impl From<DateTimeUtc> for std::time::SystemTime {
     fn from(dt: DateTimeUtc) -> Self {
@@ -230,18 +228,19 @@ impl From<DateTimeUtc> for Rfc3339String {
     }
 }
 
-impl TryFrom<DateTimeUtc> for Time {
-    type Error = TendermintError;
+// TODO move
+// impl TryFrom<DateTimeUtc> for Time {
+//     type Error = TendermintError;
 
-    fn try_from(dt: DateTimeUtc) -> Result<Self, Self::Error> {
-        Self::parse_from_rfc3339(&DateTime::to_rfc3339(&dt.0))
-    }
-}
+//     fn try_from(dt: DateTimeUtc) -> Result<Self, Self::Error> {
+//         Self::parse_from_rfc3339(&DateTime::to_rfc3339(&dt.0))
+//     }
+// }
 
-impl TryFrom<Time> for DateTimeUtc {
-    type Error = chrono::ParseError;
+// impl TryFrom<Time> for DateTimeUtc {
+//     type Error = chrono::ParseError;
 
-    fn try_from(t: Time) -> Result<Self, Self::Error> {
-        Rfc3339String(t.to_rfc3339()).try_into()
-    }
-}
+//     fn try_from(t: Time) -> Result<Self, Self::Error> {
+//         Rfc3339String(t.to_rfc3339()).try_into()
+//     }
+// }
diff --git a/shared/src/types/token.rs b/core/src/types/token.rs
similarity index 100%
rename from shared/src/types/token.rs
rename to core/src/types/token.rs
diff --git a/shared/src/types/transaction/decrypted.rs b/core/src/types/transaction/decrypted.rs
similarity index 100%
rename from shared/src/types/transaction/decrypted.rs
rename to core/src/types/transaction/decrypted.rs
diff --git a/shared/src/types/transaction/encrypted.rs b/core/src/types/transaction/encrypted.rs
similarity index 100%
rename from shared/src/types/transaction/encrypted.rs
rename to core/src/types/transaction/encrypted.rs
diff --git a/shared/src/types/transaction/governance.rs b/core/src/types/transaction/governance.rs
similarity index 60%
rename from shared/src/types/transaction/governance.rs
rename to core/src/types/transaction/governance.rs
index 21f96485c2..31a11572fb 100644
--- a/shared/src/types/transaction/governance.rs
+++ b/core/src/types/transaction/governance.rs
@@ -1,4 +1,5 @@
 use borsh::{BorshDeserialize, BorshSerialize};
+use namada_core::types::governance::{Proposal, ProposalError};
 use serde::{Deserialize, Serialize};
 
 use crate::types::address::Address;
@@ -52,3 +53,28 @@ pub struct VoteProposalData {
     /// Delegator addreses
     pub delegations: Vec<Address>,
 }
+
+impl TryFrom<Proposal> for InitProposalData {
+    type Error = ProposalError;
+
+    fn try_from(proposal: Proposal) -> Result<Self, Self::Error> {
+        let proposal_code = if let Some(path) = proposal.proposal_code_path {
+            match std::fs::read(path) {
+                Ok(bytes) => Some(bytes),
+                Err(_) => return Err(Self::Error::InvalidProposalData),
+            }
+        } else {
+            None
+        };
+
+        Ok(InitProposalData {
+            id: proposal.id,
+            content: proposal.content.try_to_vec().unwrap(),
+            author: proposal.author,
+            voting_start_epoch: proposal.voting_start_epoch,
+            voting_end_epoch: proposal.voting_end_epoch,
+            grace_epoch: proposal.grace_epoch,
+            proposal_code,
+        })
+    }
+}
diff --git a/shared/src/types/transaction/mod.rs b/core/src/types/transaction/mod.rs
similarity index 100%
rename from shared/src/types/transaction/mod.rs
rename to core/src/types/transaction/mod.rs
diff --git a/shared/src/types/transaction/pos.rs b/core/src/types/transaction/pos.rs
similarity index 100%
rename from shared/src/types/transaction/pos.rs
rename to core/src/types/transaction/pos.rs
diff --git a/shared/src/types/transaction/protocol.rs b/core/src/types/transaction/protocol.rs
similarity index 100%
rename from shared/src/types/transaction/protocol.rs
rename to core/src/types/transaction/protocol.rs
diff --git a/shared/src/types/transaction/wrapper.rs b/core/src/types/transaction/wrapper.rs
similarity index 100%
rename from shared/src/types/transaction/wrapper.rs
rename to core/src/types/transaction/wrapper.rs
diff --git a/shared/src/types/validity_predicate.rs b/core/src/types/validity_predicate.rs
similarity index 100%
rename from shared/src/types/validity_predicate.rs
rename to core/src/types/validity_predicate.rs
diff --git a/shared/src/ledger/pos/storage.rs b/proof_of_stake/src/storage.rs
similarity index 100%
rename from shared/src/ledger/pos/storage.rs
rename to proof_of_stake/src/storage.rs
diff --git a/shared/Cargo.toml b/shared/Cargo.toml
index 6212d8bdd4..35b3865b5d 100644
--- a/shared/Cargo.toml
+++ b/shared/Cargo.toml
@@ -82,6 +82,7 @@ abciplus = [
 ]
 
 [dependencies]
+namada_core = {path = "../core"}
 namada_proof_of_stake = {path = "../proof_of_stake"}
 ark-bls12-381 = {version = "0.3"}
 ark-ec = {version = "0.3", optional = true}
diff --git a/shared/build.rs b/shared/build.rs
index c872467fcf..22f00a0f75 100644
--- a/shared/build.rs
+++ b/shared/build.rs
@@ -1,24 +1,6 @@
-use std::fs::read_to_string;
-use std::process::Command;
-use std::{env, str};
-
-/// Path to the .proto source files, relative to `shared` directory
-const PROTO_SRC: &str = "./proto";
-
-/// The version should match the one we use in the `Makefile`
-const RUSTFMT_TOOLCHAIN_SRC: &str = "../rust-nightly-version";
+use std::env;
 
 fn main() {
-    if let Ok(val) = env::var("COMPILE_PROTO") {
-        if val.to_ascii_lowercase() == "false" {
-            // Skip compiling proto files
-            return;
-        }
-    }
-
-    // Tell Cargo that if the given file changes, to rerun this build script.
-    println!("cargo:rerun-if-changed={}", PROTO_SRC);
-
     // Tell Cargo to build when the `ANOMA_DEV` env var changes
     println!("cargo:rerun-if-env-changed=ANOMA_DEV");
     // Enable "dev" feature if `ANOMA_DEV` is trueish
@@ -27,33 +9,4 @@ fn main() {
             println!("cargo:rustc-cfg=feature=\"dev\"");
         }
     }
-
-    let mut use_rustfmt = false;
-
-    // The version should match the one we use in the `Makefile`
-    if let Ok(rustfmt_toolchain) = read_to_string(RUSTFMT_TOOLCHAIN_SRC) {
-        // Try to find the path to rustfmt.
-        if let Ok(output) = Command::new("rustup")
-            .args(["which", "rustfmt", "--toolchain", rustfmt_toolchain.trim()])
-            .output()
-        {
-            if let Ok(rustfmt) = str::from_utf8(&output.stdout) {
-                // Set the command to be used by tonic_build below to format the
-                // generated files
-                let rustfmt = rustfmt.trim();
-                if !rustfmt.is_empty() {
-                    println!("using rustfmt from path \"{}\"", rustfmt);
-                    env::set_var("RUSTFMT", rustfmt);
-                    use_rustfmt = true
-                }
-            }
-        }
-    }
-
-    tonic_build::configure()
-        .out_dir("src/proto/generated")
-        .format(use_rustfmt)
-        .protoc_arg("--experimental_allow_proto3_optional")
-        .compile(&[format!("{}/types.proto", PROTO_SRC)], &[PROTO_SRC])
-        .unwrap();
 }
diff --git a/shared/src/ledger/ibc/mod.rs b/shared/src/ledger/ibc/mod.rs
index 5fb599d979..831b240a9a 100644
--- a/shared/src/ledger/ibc/mod.rs
+++ b/shared/src/ledger/ibc/mod.rs
@@ -1,10 +1,9 @@
 //! IBC integration
 
-pub mod handler;
-pub mod storage;
+pub use namada_core::ledger::ibc::storage;
 pub mod vp;
 
-use storage::{
+use namada_core::ledger::ibc::storage::{
     capability_index_key, channel_counter_key, client_counter_key,
     connection_counter_key,
 };
diff --git a/shared/src/ledger/ibc/vp/channel.rs b/shared/src/ledger/ibc/vp/channel.rs
index 80adb7969f..18994ae5f5 100644
--- a/shared/src/ledger/ibc/vp/channel.rs
+++ b/shared/src/ledger/ibc/vp/channel.rs
@@ -2,64 +2,60 @@
 
 use core::time::Duration;
 
-use sha2::Digest;
-use thiserror::Error;
-
-use super::super::handler::{
+use ibc::core::ics02_client::client_consensus::AnyConsensusState;
+use ibc::core::ics02_client::client_state::AnyClientState;
+use ibc::core::ics02_client::context::ClientReader;
+use ibc::core::ics02_client::height::Height;
+use ibc::core::ics03_connection::connection::ConnectionEnd;
+use ibc::core::ics03_connection::context::ConnectionReader;
+use ibc::core::ics03_connection::error::Error as Ics03Error;
+use ibc::core::ics04_channel::channel::{ChannelEnd, Counterparty, State};
+use ibc::core::ics04_channel::commitment::{
+    AcknowledgementCommitment, PacketCommitment,
+};
+use ibc::core::ics04_channel::context::ChannelReader;
+use ibc::core::ics04_channel::error::Error as Ics04Error;
+use ibc::core::ics04_channel::handler::verify::verify_channel_proofs;
+use ibc::core::ics04_channel::msgs::chan_close_confirm::MsgChannelCloseConfirm;
+use ibc::core::ics04_channel::msgs::chan_open_ack::MsgChannelOpenAck;
+use ibc::core::ics04_channel::msgs::chan_open_confirm::MsgChannelOpenConfirm;
+use ibc::core::ics04_channel::msgs::chan_open_try::MsgChannelOpenTry;
+use ibc::core::ics04_channel::msgs::{ChannelMsg, PacketMsg};
+use ibc::core::ics04_channel::packet::{Receipt, Sequence};
+use ibc::core::ics05_port::capabilities::{Capability, ChannelCapability};
+use ibc::core::ics05_port::context::PortReader;
+use ibc::core::ics24_host::identifier::{
+    ChannelId, ClientId, ConnectionId, PortChannelId, PortId,
+};
+use ibc::core::ics26_routing::context::ModuleId;
+use ibc::core::ics26_routing::msgs::Ics26Envelope;
+use ibc::proofs::Proofs;
+use ibc::timestamp::Timestamp;
+use namada_core::ledger::ibc::actions::{
     make_close_confirm_channel_event, make_close_init_channel_event,
     make_open_ack_channel_event, make_open_confirm_channel_event,
     make_open_init_channel_event, make_open_try_channel_event,
     make_timeout_event,
 };
-use super::super::storage::{
+use namada_core::ledger::ibc::data::{
+    Error as IbcDataError, IbcMessage, PacketReceipt,
+};
+use namada_core::ledger::ibc::storage::{
     ack_key, channel_counter_key, channel_key, client_update_height_key,
     client_update_timestamp_key, commitment_key, is_channel_counter_key,
     next_sequence_ack_key, next_sequence_recv_key, next_sequence_send_key,
     port_channel_id, receipt_key, Error as IbcStorageError,
 };
+use namada_core::ledger::parameters;
+use namada_core::ledger::storage::{self as ledger_storage, StorageHasher};
+use namada_core::types::storage::Key;
+use sha2::Digest;
+use tendermint::Time;
+use tendermint_proto::Protobuf;
+use thiserror::Error;
+
 use super::{Ibc, StateChange};
-use crate::ibc::core::ics02_client::client_consensus::AnyConsensusState;
-use crate::ibc::core::ics02_client::client_state::AnyClientState;
-use crate::ibc::core::ics02_client::context::ClientReader;
-use crate::ibc::core::ics02_client::height::Height;
-use crate::ibc::core::ics03_connection::connection::ConnectionEnd;
-use crate::ibc::core::ics03_connection::context::ConnectionReader;
-use crate::ibc::core::ics03_connection::error::Error as Ics03Error;
-use crate::ibc::core::ics04_channel::channel::{
-    ChannelEnd, Counterparty, State,
-};
-use crate::ibc::core::ics04_channel::commitment::{
-    AcknowledgementCommitment, PacketCommitment,
-};
-use crate::ibc::core::ics04_channel::context::ChannelReader;
-use crate::ibc::core::ics04_channel::error::Error as Ics04Error;
-use crate::ibc::core::ics04_channel::handler::verify::verify_channel_proofs;
-use crate::ibc::core::ics04_channel::msgs::chan_close_confirm::MsgChannelCloseConfirm;
-use crate::ibc::core::ics04_channel::msgs::chan_open_ack::MsgChannelOpenAck;
-use crate::ibc::core::ics04_channel::msgs::chan_open_confirm::MsgChannelOpenConfirm;
-use crate::ibc::core::ics04_channel::msgs::chan_open_try::MsgChannelOpenTry;
-use crate::ibc::core::ics04_channel::msgs::{ChannelMsg, PacketMsg};
-use crate::ibc::core::ics04_channel::packet::{Receipt, Sequence};
-use crate::ibc::core::ics05_port::capabilities::{
-    Capability, ChannelCapability,
-};
-use crate::ibc::core::ics05_port::context::PortReader;
-use crate::ibc::core::ics24_host::identifier::{
-    ChannelId, ClientId, ConnectionId, PortChannelId, PortId,
-};
-use crate::ibc::core::ics26_routing::context::ModuleId;
-use crate::ibc::core::ics26_routing::msgs::Ics26Envelope;
-use crate::ibc::proofs::Proofs;
-use crate::ibc::timestamp::Timestamp;
 use crate::ledger::native_vp::{Error as NativeVpError, VpEnv};
-use crate::ledger::parameters;
-use crate::ledger::storage::{self as ledger_storage, StorageHasher};
-use crate::tendermint::Time;
-use crate::tendermint_proto::Protobuf;
-use crate::types::ibc::data::{
-    Error as IbcDataError, IbcMessage, PacketReceipt,
-};
-use crate::types::storage::Key;
 use crate::vm::WasmCacheAccess;
 
 #[allow(missing_docs)]
diff --git a/shared/src/ledger/ibc/vp/client.rs b/shared/src/ledger/ibc/vp/client.rs
index 05ac3c3528..40807673f1 100644
--- a/shared/src/ledger/ibc/vp/client.rs
+++ b/shared/src/ledger/ibc/vp/client.rs
@@ -2,12 +2,12 @@
 use std::convert::TryInto;
 use std::str::FromStr;
 
-use thiserror::Error;
-
-use super::super::handler::{
+use namada_core::ledger::ibc::actions::{
     make_create_client_event, make_update_client_event,
     make_upgrade_client_event,
 };
+use thiserror::Error;
+
 use super::super::storage::{
     client_counter_key, client_state_key, client_type_key,
     client_update_height_key, client_update_timestamp_key, consensus_height,
diff --git a/shared/src/ledger/ibc/vp/connection.rs b/shared/src/ledger/ibc/vp/connection.rs
index 0130dd3b84..5f8df9a8a8 100644
--- a/shared/src/ledger/ibc/vp/connection.rs
+++ b/shared/src/ledger/ibc/vp/connection.rs
@@ -1,12 +1,12 @@
 //! IBC validity predicate for connection module
 
-use thiserror::Error;
-
-use super::super::handler::{
+use namada_core::ledger::ibc::actions::{
     commitment_prefix, make_open_ack_connection_event,
     make_open_confirm_connection_event, make_open_init_connection_event,
     make_open_try_connection_event,
 };
+use thiserror::Error;
+
 use super::super::storage::{
     connection_counter_key, connection_id, connection_key,
     is_connection_counter_key, Error as IbcStorageError,
diff --git a/shared/src/ledger/ibc/vp/mod.rs b/shared/src/ledger/ibc/vp/mod.rs
index 874a697b12..3704f6131b 100644
--- a/shared/src/ledger/ibc/vp/mod.rs
+++ b/shared/src/ledger/ibc/vp/mod.rs
@@ -12,18 +12,20 @@ mod token;
 use std::collections::{BTreeSet, HashSet};
 
 use borsh::BorshDeserialize;
+use ibc::core::ics02_client::context::ClientReader;
+use ibc::events::IbcEvent;
+use namada_core::ledger::ibc::storage::{
+    client_id, ibc_prefix, is_client_counter_key, IbcPrefix,
+};
+use namada_core::ledger::storage::{self as ledger_storage, StorageHasher};
+use namada_core::proto::SignedTxData;
+use namada_core::types::address::{Address, InternalAddress};
+use namada_core::types::ibc::IbcEvent as WrappedIbcEvent;
+use namada_core::types::storage::Key;
 use thiserror::Error;
 pub use token::{Error as IbcTokenError, IbcToken};
 
-use super::storage::{client_id, ibc_prefix, is_client_counter_key, IbcPrefix};
-use crate::ibc::core::ics02_client::context::ClientReader;
-use crate::ibc::events::IbcEvent;
 use crate::ledger::native_vp::{self, Ctx, NativeVp, VpEnv};
-use crate::ledger::storage::{self as ledger_storage, StorageHasher};
-use crate::proto::SignedTxData;
-use crate::types::address::{Address, InternalAddress};
-use crate::types::ibc::IbcEvent as WrappedIbcEvent;
-use crate::types::storage::Key;
 use crate::vm::WasmCacheAccess;
 
 #[allow(missing_docs)]
@@ -358,7 +360,7 @@ mod tests {
     use crate::tendermint_proto::Protobuf;
 
     use super::get_dummy_header;
-    use super::super::handler::{
+    use namada_core::ledger::ibc::actions::{
         self, commitment_prefix, init_connection, make_create_client_event,
         make_open_ack_channel_event, make_open_ack_connection_event,
         make_open_confirm_channel_event, make_open_confirm_connection_event,
diff --git a/shared/src/ledger/ibc/vp/packet.rs b/shared/src/ledger/ibc/vp/packet.rs
index 6a815dcdc3..7eecf92472 100644
--- a/shared/src/ledger/ibc/vp/packet.rs
+++ b/shared/src/ledger/ibc/vp/packet.rs
@@ -1,14 +1,18 @@
 //! IBC validity predicate for packets
 
-use thiserror::Error;
-
-use super::super::handler::{
+use namada_core::ledger::ibc::actions::{
     self, make_send_packet_event, make_timeout_event, packet_from_message,
 };
-use super::super::storage::{
+use namada_core::ledger::ibc::data::{Error as IbcDataError, IbcMessage};
+use namada_core::ledger::ibc::storage::{
     ibc_denom_key, port_channel_sequence_id, token_hash_from_denom,
     Error as IbcStorageError,
 };
+use namada_core::types::ibc::data::{
+    Error as IbcDataError, FungibleTokenPacketData, IbcMessage,
+};
+use thiserror::Error;
+
 use super::{Ibc, StateChange};
 use crate::ibc::core::ics02_client::height::Height;
 use crate::ibc::core::ics04_channel::channel::{
@@ -35,9 +39,6 @@ use crate::ibc::core::ics26_routing::msgs::Ics26Envelope;
 use crate::ibc::proofs::Proofs;
 use crate::ledger::native_vp::VpEnv;
 use crate::ledger::storage::{self, StorageHasher};
-use crate::types::ibc::data::{
-    Error as IbcDataError, FungibleTokenPacketData, IbcMessage,
-};
 use crate::types::storage::Key;
 use crate::vm::WasmCacheAccess;
 
@@ -446,7 +447,7 @@ where
         packet: &Packet,
         commitment: PacketCommitment,
     ) -> Result<()> {
-        if commitment == handler::commitment(packet) {
+        if commitment == namada_core::ledger::ibc::actions::commitment(packet) {
             Ok(())
         } else {
             Err(Error::InvalidPacket(
diff --git a/shared/src/ledger/ibc/vp/sequence.rs b/shared/src/ledger/ibc/vp/sequence.rs
index 0e751ea0de..dfefaab5b9 100644
--- a/shared/src/ledger/ibc/vp/sequence.rs
+++ b/shared/src/ledger/ibc/vp/sequence.rs
@@ -1,5 +1,6 @@
 //! IBC validity predicate for sequences
 
+use namada_core::ledger::ibc::actions::packet_from_message;
 use thiserror::Error;
 
 use super::super::storage::{port_channel_id, Error as IbcStorageError};
@@ -7,7 +8,6 @@ use super::Ibc;
 use crate::ibc::core::ics04_channel::channel::Order;
 use crate::ibc::core::ics04_channel::context::ChannelReader;
 use crate::ibc::core::ics24_host::identifier::PortChannelId;
-use crate::ledger::ibc::handler::packet_from_message;
 use crate::ledger::storage::{self as ledger_storage, StorageHasher};
 use crate::types::ibc::data::{Error as IbcDataError, IbcMessage};
 use crate::types::storage::Key;
diff --git a/shared/src/ledger/mod.rs b/shared/src/ledger/mod.rs
index 8ae3f7cf0f..1eaee2b714 100644
--- a/shared/src/ledger/mod.rs
+++ b/shared/src/ledger/mod.rs
@@ -2,18 +2,15 @@
 
 pub mod eth_bridge;
 pub mod events;
-pub mod gas;
-pub mod governance;
 pub mod ibc;
 pub mod masp;
 pub mod native_vp;
-pub mod parameters;
 pub mod pos;
 #[cfg(all(feature = "wasm-runtime", feature = "ferveo-tpke"))]
 pub mod protocol;
 pub mod queries;
 pub mod slash_fund;
-pub mod storage;
-pub mod storage_api;
-pub mod tx_env;
-pub mod vp_env;
+
+pub use namada_core::ledger::{
+    gas, governance, parameters, storage, storage_api, tx_env, vp_env,
+};
diff --git a/shared/src/ledger/governance/mod.rs b/shared/src/ledger/native_vp/governance/mod.rs
similarity index 99%
rename from shared/src/ledger/governance/mod.rs
rename to shared/src/ledger/native_vp/governance/mod.rs
index 88a806bc39..7ba1478ff3 100644
--- a/shared/src/ledger/governance/mod.rs
+++ b/shared/src/ledger/native_vp/governance/mod.rs
@@ -1,21 +1,18 @@
 //! Governance VP
 
-/// governance parameters
-pub mod parameters;
-/// governance storage
-pub mod storage;
-/// utility function
+/// utility functions
 pub mod utils;
 
 use std::collections::BTreeSet;
 
+use namada_core::ledger::native_vp;
+use namada_core::ledger::vp_env::VpEnv;
+pub use namada_core::ledger::{parameters, storage};
 use thiserror::Error;
+use utils::is_valid_validator_voting_period;
 
 use self::storage as gov_storage;
-use self::utils::is_valid_validator_voting_period;
-use super::native_vp;
 use super::storage_api::StorageRead;
-use super::vp_env::VpEnv;
 use crate::ledger::native_vp::{Ctx, NativeVp};
 use crate::ledger::pos::{self as pos_storage, BondId, Bonds};
 use crate::ledger::storage::{self as ledger_storage, StorageHasher};
diff --git a/shared/src/ledger/governance/utils.rs b/shared/src/ledger/native_vp/governance/utils.rs
similarity index 100%
rename from shared/src/ledger/governance/utils.rs
rename to shared/src/ledger/native_vp/governance/utils.rs
diff --git a/shared/src/ledger/native_vp.rs b/shared/src/ledger/native_vp/mod.rs
similarity index 99%
rename from shared/src/ledger/native_vp.rs
rename to shared/src/ledger/native_vp/mod.rs
index 149ae0d3e5..1755c52241 100644
--- a/shared/src/ledger/native_vp.rs
+++ b/shared/src/ledger/native_vp/mod.rs
@@ -1,5 +1,9 @@
 //! Native validity predicate interface associated with internal accounts such
 //! as the PoS and IBC modules.
+
+pub mod parameters;
+pub mod governance;
+
 use std::cell::RefCell;
 use std::collections::BTreeSet;
 
diff --git a/shared/src/ledger/native_vp/parameters.rs b/shared/src/ledger/native_vp/parameters.rs
new file mode 100644
index 0000000000..1f05baa7eb
--- /dev/null
+++ b/shared/src/ledger/native_vp/parameters.rs
@@ -0,0 +1,101 @@
+//! Native VP for protocol parameters
+
+use std::collections::BTreeSet;
+
+use borsh::BorshDeserialize;
+use namada_core::ledger::storage;
+use namada_core::types::address::{Address, InternalAddress};
+use namada_core::types::storage::Key;
+use thiserror::Error;
+
+use crate::ledger::governance::vp::is_proposal_accepted;
+use crate::ledger::native_vp::{self, Ctx, NativeVp};
+use crate::vm::WasmCacheAccess;
+
+#[allow(missing_docs)]
+#[derive(Error, Debug)]
+pub enum Error {
+    #[error("Native VP error: {0}")]
+    NativeVpError(native_vp::Error),
+}
+
+/// Parameters functions result
+pub type Result<T> = std::result::Result<T, Error>;
+
+/// Parameters VP
+pub struct ParametersVp<'a, DB, H, CA>
+where
+    DB: storage::DB + for<'iter> storage::DBIter<'iter>,
+    H: storage::StorageHasher,
+    CA: WasmCacheAccess,
+{
+    /// Context to interact with the host structures.
+    pub ctx: Ctx<'a, DB, H, CA>,
+}
+
+impl<'a, DB, H, CA> NativeVp for ParametersVp<'a, DB, H, CA>
+where
+    DB: 'static + storage::DB + for<'iter> storage::DBIter<'iter>,
+    H: 'static + storage::StorageHasher,
+    CA: 'static + WasmCacheAccess,
+{
+    type Error = Error;
+
+    const ADDR: InternalAddress = InternalAddress::Parameters;
+
+    fn validate_tx(
+        &self,
+        tx_data: &[u8],
+        keys_changed: &BTreeSet<Key>,
+        _verifiers: &BTreeSet<Address>,
+    ) -> Result<bool> {
+        let result = keys_changed.iter().all(|key| {
+            let key_type: KeyType = key.into();
+            match key_type {
+                KeyType::PARAMETER => {
+                    let proposal_id = u64::try_from_slice(tx_data).ok();
+                    match proposal_id {
+                        Some(id) => is_proposal_accepted(&self.ctx, id),
+                        _ => false,
+                    }
+                }
+                KeyType::UNKNOWN_PARAMETER => false,
+                KeyType::UNKNOWN => true,
+            }
+        });
+        Ok(result)
+    }
+}
+
+impl From<native_vp::Error> for Error {
+    fn from(err: native_vp::Error) -> Self {
+        Self::NativeVpError(err)
+    }
+}
+
+#[allow(clippy::upper_case_acronyms)]
+enum KeyType {
+    #[allow(clippy::upper_case_acronyms)]
+    PARAMETER,
+    #[allow(clippy::upper_case_acronyms)]
+    #[allow(non_camel_case_types)]
+    UNKNOWN_PARAMETER,
+    #[allow(clippy::upper_case_acronyms)]
+    UNKNOWN,
+}
+
+impl From<&Key> for KeyType {
+    fn from(value: &Key) -> Self {
+        if namada_core::ledger::parameters::storage::is_protocol_parameter_key(
+            value,
+        ) {
+            KeyType::PARAMETER
+        } else if namada_core::ledger::parameters::storage::is_parameter_key(
+            value,
+        ) {
+            KeyType::UNKNOWN_PARAMETER
+        } else {
+            KeyType::UNKNOWN
+        }
+    }
+}
diff --git a/shared/src/ledger/pos/mod.rs b/shared/src/ledger/pos/mod.rs
index 7d062cfafd..a6900dd172 100644
--- a/shared/src/ledger/pos/mod.rs
+++ b/shared/src/ledger/pos/mod.rs
@@ -82,56 +82,58 @@ pub type GenesisValidator = namada_proof_of_stake::types::GenesisValidator<
 
 /// Alias for a PoS type with the same name with concrete type parameters
 pub type CommissionRates = namada_proof_of_stake::types::CommissionRates;
+
 /// Alias for a PoS type with the same name with concrete type parameters
 pub type TotalDeltas = namada_proof_of_stake::types::TotalDeltas<token::Change>;
 
-impl From<Epoch> for namada_proof_of_stake::types::Epoch {
-    fn from(epoch: Epoch) -> Self {
-        let epoch: u64 = epoch.into();
-        namada_proof_of_stake::types::Epoch::from(epoch)
-    }
-}
-
-impl From<namada_proof_of_stake::types::Epoch> for Epoch {
-    fn from(epoch: namada_proof_of_stake::types::Epoch) -> Self {
-        let epoch: u64 = epoch.into();
-        Epoch(epoch)
-    }
-}
+// TODO pos will depend on core, so it will be able to `Epoch` type directly
+// impl From<Epoch> for namada_proof_of_stake::types::Epoch {
+//     fn from(epoch: Epoch) -> Self {
+//         let epoch: u64 = epoch.into();
+//         namada_proof_of_stake::types::Epoch::from(epoch)
+//     }
+// }
+
+// impl From<namada_proof_of_stake::types::Epoch> for Epoch {
+//     fn from(epoch: namada_proof_of_stake::types::Epoch) -> Self {
+//         let epoch: u64 = epoch.into();
+//         Epoch(epoch)
+//     }
+// }
 
 // The error conversions are needed to implement `PosActions` in
 // `tx_prelude/src/proof_of_stake.rs`
-impl From<namada_proof_of_stake::BecomeValidatorError<Address>>
-    for storage_api::Error
-{
-    fn from(err: namada_proof_of_stake::BecomeValidatorError<Address>) -> Self {
-        Self::new(err)
-    }
-}
-
-impl From<namada_proof_of_stake::BondError<Address>> for storage_api::Error {
-    fn from(err: namada_proof_of_stake::BondError<Address>) -> Self {
-        Self::new(err)
-    }
-}
-
-impl From<namada_proof_of_stake::UnbondError<Address, token::Amount>>
-    for storage_api::Error
-{
-    fn from(
-        err: namada_proof_of_stake::UnbondError<Address, token::Amount>,
-    ) -> Self {
-        Self::new(err)
-    }
-}
-
-impl From<namada_proof_of_stake::WithdrawError<Address>>
-    for storage_api::Error
-{
-    fn from(err: namada_proof_of_stake::WithdrawError<Address>) -> Self {
-        Self::new(err)
-    }
-}
+// impl From<namada_proof_of_stake::BecomeValidatorError<Address>>
+//     for storage_api::Error
+// {
+//     fn from(err: namada_proof_of_stake::BecomeValidatorError<Address>) ->
+// Self {         Self::new(err)
+//     }
+// }
+
+// impl From<namada_proof_of_stake::BondError<Address>> for storage_api::Error {
+//     fn from(err: namada_proof_of_stake::BondError<Address>) -> Self {
+//         Self::new(err)
+//     }
+// }
+
+// impl From<namada_proof_of_stake::UnbondError<Address, token::Amount>>
+//     for storage_api::Error
+// {
+//     fn from(
+//         err: namada_proof_of_stake::UnbondError<Address, token::Amount>,
+//     ) -> Self {
+//         Self::new(err)
+//     }
+// }
+
+// impl From<namada_proof_of_stake::WithdrawError<Address>>
+//     for storage_api::Error
+// {
+//     fn from(err: namada_proof_of_stake::WithdrawError<Address>) -> Self {
+//         Self::new(err)
+//     }
+// }
 
 impl From<namada_proof_of_stake::CommissionRateChangeError<Address>>
     for storage_api::Error
diff --git a/shared/src/ledger/pos/vp.rs b/shared/src/ledger/pos/vp.rs
index 47addfa858..1f3faaf9c5 100644
--- a/shared/src/ledger/pos/vp.rs
+++ b/shared/src/ledger/pos/vp.rs
@@ -9,7 +9,7 @@ pub use namada_proof_of_stake;
 pub use namada_proof_of_stake::parameters::PosParams;
 pub use namada_proof_of_stake::types::{self, Slash, Slashes, ValidatorStates};
 use namada_proof_of_stake::validation::validate;
-use namada_proof_of_stake::{validation, PosReadOnly};
+use namada_proof_of_stake::{impl_pos_read_only, validation, PosReadOnly};
 use rust_decimal::Decimal;
 use thiserror::Error;
 
diff --git a/shared/src/ledger/protocol/mod.rs b/shared/src/ledger/protocol/mod.rs
index fb596e4b92..9041725738 100644
--- a/shared/src/ledger/protocol/mod.rs
+++ b/shared/src/ledger/protocol/mod.rs
@@ -9,8 +9,8 @@ use crate::ledger::eth_bridge::vp::EthBridge;
 use crate::ledger::gas::{self, BlockGasMeter, VpGasMeter};
 use crate::ledger::governance::GovernanceVp;
 use crate::ledger::ibc::vp::{Ibc, IbcToken};
+use crate::ledger::native_vp::parameters::{self, ParametersVp};
 use crate::ledger::native_vp::{self, NativeVp};
-use crate::ledger::parameters::{self, ParametersVp};
 use crate::ledger::pos::{self, PosVP};
 use crate::ledger::slash_fund::SlashFundVp;
 use crate::ledger::storage::write_log::WriteLog;
diff --git a/shared/src/ledger/queries/shell.rs b/shared/src/ledger/queries/shell.rs
index 4662381499..766f88c5a1 100644
--- a/shared/src/ledger/queries/shell.rs
+++ b/shared/src/ledger/queries/shell.rs
@@ -1,7 +1,10 @@
-use borsh::{BorshDeserialize, BorshSerialize};
+use borsh::BorshSerialize;
 use masp_primitives::asset_type::AssetType;
 use masp_primitives::merkle_tree::MerklePath;
 use masp_primitives::sapling::Node;
+use namada_core::ledger::storage::merkle_tree;
+use prost::Message;
+use tendermint_proto::crypto::{ProofOp, ProofOps};
 
 use crate::ledger::events::log::dumb_queries;
 use crate::ledger::events::Event;
@@ -446,3 +449,38 @@ mod test {
         Ok(())
     }
 }
+
+/// Convert merkle tree proof to tendermint proof
+fn proof_to_tm_proof(
+    merkle_tree::Proof {
+        key,
+        sub_proof,
+        base_proof,
+    }: merkle_tree::Proof,
+) -> tendermint::merkle::proof::Proof {
+    use crate::tendermint::merkle::proof::{Proof, ProofOp};
+    let mut data = vec![];
+    sub_proof
+        .encode(&mut data)
+        .expect("Encoding proof shouldn't fail");
+    let sub_proof_op = ProofOp {
+        field_type: "ics23_CommitmentProof".to_string(),
+        key: key.to_string().as_bytes().to_vec(),
+        data,
+    };
+
+    let mut data = vec![];
+    base_proof
+        .encode(&mut data)
+        .expect("Encoding proof shouldn't fail");
+    let base_proof_op = ProofOp {
+        field_type: "ics23_CommitmentProof".to_string(),
+        key: key.to_string().as_bytes().to_vec(),
+        data,
+    };
+
+    // Set ProofOps from leaf to root
+    Proof {
+        ops: vec![sub_proof_op, base_proof_op],
+    }
+}
diff --git a/shared/src/lib.rs b/shared/src/lib.rs
index 1d34093f68..f37cb9e867 100644
--- a/shared/src/lib.rs
+++ b/shared/src/lib.rs
@@ -19,9 +19,8 @@ pub use {
     tendermint_proto_abcipp as tendermint_proto,
 };
 
-pub mod bytes;
 pub mod ledger;
-pub mod proto;
+pub use namada_core::proto;
 pub mod types;
 pub mod vm;
 
diff --git a/shared/src/types/dylib.rs b/shared/src/types/dylib.rs
deleted file mode 100644
index d8dcc7eaae..0000000000
--- a/shared/src/types/dylib.rs
+++ /dev/null
@@ -1,16 +0,0 @@
-//! Dynamic library helpers
-
-/// The file extension for dynamic library for this target
-#[allow(dead_code)]
-#[cfg(windows)]
-pub const FILE_EXT: &str = "dll";
-
-/// The file extension for dynamic library for this target
-#[allow(dead_code)]
-#[cfg(target_os = "macos")]
-pub const FILE_EXT: &str = "dylib";
-
-/// The file extension for dynamic library for this target
-#[allow(dead_code)]
-#[cfg(all(unix, not(target_os = "macos")))]
-pub const FILE_EXT: &str = "so";
diff --git a/shared/src/types/ibc/mod.rs b/shared/src/types/ibc/mod.rs
index af1172ab02..b83b2b5f07 100644
--- a/shared/src/types/ibc/mod.rs
+++ b/shared/src/types/ibc/mod.rs
@@ -1,6 +1,4 @@
 //! Types that are used in IBC.
 
-pub mod data;
-pub mod event;
-
-pub use event::*;
+pub use namada_core::ledger::ibc::data;
+pub use namada_core::types::ibc::*;
diff --git a/shared/src/types/mod.rs b/shared/src/types/mod.rs
index d06c8edfe7..1b73329efe 100644
--- a/shared/src/types/mod.rs
+++ b/shared/src/types/mod.rs
@@ -1,16 +1,9 @@
 //! Types definitions.
 
-pub mod address;
-pub mod chain;
-pub mod dylib;
-pub mod governance;
-pub mod hash;
 pub mod ibc;
-pub mod internal;
 pub mod key;
-pub mod masp;
-pub mod storage;
-pub mod time;
-pub mod token;
-pub mod transaction;
-pub mod validity_predicate;
+
+pub use namada_core::types::{
+    address, chain, governance, hash, internal, masp, storage, time, token,
+    transaction, validity_predicate,
+};

From e71718cd89d8a7eaa5e01b497ae55002ff8e9a4e Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Tom=C3=A1=C5=A1=20Zemanovi=C4=8D?= <tomas@heliax.dev>
Date: Fri, 4 Nov 2022 12:47:47 +0100
Subject: [PATCH 2/9] update paths for core split and apply many small fixes

---
 Cargo.lock                                    |  47 ++-
 Makefile                                      |   3 +-
 apps/Cargo.toml                               |  10 +-
 apps/src/lib/client/rpc.rs                    |  11 +-
 apps/src/lib/client/tx.rs                     |   4 +-
 apps/src/lib/node/ledger/rpc.rs               | 123 ------
 apps/src/lib/node/ledger/shell/governance.rs  |   9 +-
 apps/src/lib/node/ledger/storage/rocksdb.rs   |   4 +-
 core/Cargo.toml                               |  52 ++-
 core/build.rs                                 |   2 +-
 core/src/ledger/governance/mod.rs             |   5 +
 core/src/ledger/ibc/actions.rs                |   8 +-
 core/src/ledger/ibc/mod.rs                    |   3 +
 core/src/ledger/mod.rs                        |   3 +-
 core/src/ledger/parameters/mod.rs             | 131 +-----
 core/src/ledger/slash_fund/mod.rs             |   8 +
 .../src/ledger/slash_fund/storage.rs          |   5 +-
 core/src/ledger/storage/merkle_tree.rs        | 155 ++++---
 core/src/ledger/storage/mockdb.rs             |   2 +-
 core/src/ledger/storage/mod.rs                |  64 +--
 core/src/ledger/storage/traits.rs             |   1 -
 core/src/ledger/vp_env.rs                     | 373 -----------------
 core/src/lib.rs                               |  15 +
 core/src/proto/types.rs                       |   8 +-
 core/src/types/governance.rs                  |  26 --
 core/src/types/hash.rs                        |  93 +++--
 core/src/types/ibc.rs                         |  63 +--
 core/src/types/internal.rs                    |  49 +++
 core/src/types/key/common.rs                  |  14 +-
 core/src/types/key/mod.rs                     |  20 +-
 core/src/types/mod.rs                         |   3 +
 core/src/types/storage.rs                     | 180 ++++++---
 core/src/types/time.rs                        |  92 +++--
 core/src/types/token.rs                       |   9 +-
 core/src/types/transaction/decrypted.rs       |   1 +
 core/src/types/transaction/governance.rs      |   3 +-
 core/src/types/transaction/mod.rs             |   8 +-
 proof_of_stake/Cargo.toml                     |   6 +-
 proof_of_stake/src/lib.rs                     |  71 +++-
 proof_of_stake/src/storage.rs                 | 200 ++++++++-
 proof_of_stake/src/types.rs                   | 114 +-----
 proof_of_stake/src/validation.rs              |   7 +-
 shared/Cargo.toml                             |  74 +---
 shared/src/ledger/events.rs                   |   6 +-
 shared/src/ledger/ibc/mod.rs                  |   2 +-
 shared/src/ledger/ibc/vp/channel.rs           |  66 +--
 shared/src/ledger/ibc/vp/mod.rs               |  10 +-
 shared/src/ledger/ibc/vp/packet.rs            |   9 +-
 shared/src/ledger/mod.rs                      |   5 +-
 shared/src/ledger/native_vp/governance/mod.rs |  61 ++-
 .../src/ledger/native_vp/governance/utils.rs  |   2 +
 shared/src/ledger/native_vp/mod.rs            | 106 +++--
 shared/src/ledger/native_vp/parameters.rs     |  15 +-
 .../mod.rs => native_vp/slash_fund.rs}        |  17 +-
 shared/src/ledger/pos/mod.rs                  | 206 +---------
 shared/src/ledger/pos/vp.rs                   |   7 +-
 shared/src/ledger/protocol/mod.rs             |   8 +-
 shared/src/ledger/queries/mod.rs              |   9 +-
 shared/src/ledger/queries/shell.rs            |  49 +--
 shared/src/ledger/storage/mod.rs              |   7 +
 shared/src/ledger/storage/write_log.rs        |   4 +-
 shared/src/ledger/storage_api.rs              |   3 +
 shared/src/ledger/vp_host_fns.rs              | 381 ++++++++++++++++++
 shared/src/lib.rs                             |   8 +-
 shared/src/types/key/mod.rs                   |   3 +
 shared/src/vm/host_env.rs                     | 194 ++++-----
 shared/src/vm/types.rs                        |  15 +-
 tests/src/e2e/ibc_tests.rs                    |   2 +-
 tests/src/native_vp/pos.rs                    |  38 +-
 tests/src/vm_host_env/ibc.rs                  |   2 +-
 tests/src/vm_host_env/mod.rs                  |   2 +-
 tx_prelude/Cargo.toml                         |   5 +-
 tx_prelude/src/governance.rs                  |   6 +-
 tx_prelude/src/ibc.rs                         |  14 +-
 tx_prelude/src/key.rs                         |   2 +-
 tx_prelude/src/lib.rs                         |  64 ++-
 tx_prelude/src/proof_of_stake.rs              |  17 +-
 tx_prelude/src/token.rs                       |  16 +-
 vm_env/Cargo.toml                             |  12 +-
 vm_env/src/lib.rs                             |   3 +-
 vp_prelude/Cargo.toml                         |   5 +-
 vp_prelude/src/key.rs                         |   4 +-
 vp_prelude/src/lib.rs                         |  25 +-
 vp_prelude/src/token.rs                       |  12 +-
 wasm/Cargo.lock                               |  82 +++-
 wasm/wasm_source/src/tx_bond.rs               |   5 +-
 .../src/tx_change_validator_commission.rs     |   4 +-
 wasm/wasm_source/src/tx_unbond.rs             |   5 +-
 wasm/wasm_source/src/tx_withdraw.rs           |   3 +-
 wasm_for_tests/wasm_source/Cargo.lock         |  82 +++-
 90 files changed, 1831 insertions(+), 1851 deletions(-)
 delete mode 100644 apps/src/lib/node/ledger/rpc.rs
 create mode 100644 core/src/ledger/slash_fund/mod.rs
 rename {shared => core}/src/ledger/slash_fund/storage.rs (80%)
 rename shared/src/ledger/{slash_fund/mod.rs => native_vp/slash_fund.rs} (87%)
 create mode 100644 shared/src/ledger/storage/mod.rs
 create mode 100644 shared/src/ledger/storage_api.rs
 create mode 100644 shared/src/ledger/vp_host_fns.rs
 create mode 100644 shared/src/types/key/mod.rs

diff --git a/Cargo.lock b/Cargo.lock
index 6b7ca3cb7c..79a4079e7b 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -3628,32 +3628,20 @@ checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a"
 name = "namada"
 version = "0.10.1"
 dependencies = [
- "ark-bls12-381",
- "ark-ec",
- "ark-serialize",
  "assert_matches",
  "async-trait",
- "bech32",
  "bellman",
- "bit-vec",
  "bls12_381",
  "borsh",
  "byte-unit",
- "chrono",
  "circular-queue",
  "clru",
  "data-encoding",
  "derivative",
- "ed25519-consensus",
- "ferveo",
- "ferveo-common",
- "group-threshold-cryptography",
- "hex",
  "ibc 0.14.0 (git+https://github.com/heliaxdev/ibc-rs?rev=9fcc1c8c19db6af50806ffe5b2f6c214adcbfd5d)",
  "ibc 0.14.0 (git+https://github.com/heliaxdev/ibc-rs.git?rev=f4703dfe2c1f25cc431279ab74f10f3e0f6827e2)",
  "ibc-proto 0.17.1 (git+https://github.com/heliaxdev/ibc-rs?rev=9fcc1c8c19db6af50806ffe5b2f6c214adcbfd5d)",
  "ibc-proto 0.17.1 (git+https://github.com/heliaxdev/ibc-rs.git?rev=f4703dfe2c1f25cc431279ab74f10f3e0f6827e2)",
- "ics23",
  "itertools",
  "libsecp256k1",
  "loupe",
@@ -3666,17 +3654,11 @@ dependencies = [
  "pretty_assertions",
  "proptest",
  "prost",
- "prost-types",
  "pwasm-utils",
- "rand 0.8.5",
- "rand_core 0.6.4",
  "rayon",
  "rust_decimal",
- "rust_decimal_macros",
- "serde 1.0.147",
  "serde_json",
  "sha2 0.9.9",
- "sparse-merkle-tree",
  "tempfile",
  "tendermint 0.23.5",
  "tendermint 0.23.6",
@@ -3687,7 +3669,6 @@ dependencies = [
  "test-log",
  "thiserror",
  "tokio",
- "tonic-build",
  "tracing 0.1.37",
  "tracing-subscriber 0.3.16",
  "wasmer",
@@ -3790,28 +3771,46 @@ dependencies = [
 name = "namada_core"
 version = "0.9.0"
 dependencies = [
+ "ark-bls12-381",
  "ark-ec",
+ "ark-serialize",
  "assert_matches",
  "bech32",
+ "bellman",
+ "bit-vec",
  "borsh",
  "chrono",
  "data-encoding",
  "derivative",
  "ed25519-consensus",
  "ferveo",
+ "ferveo-common",
  "group-threshold-cryptography",
+ "ibc 0.14.0 (git+https://github.com/heliaxdev/ibc-rs?rev=9fcc1c8c19db6af50806ffe5b2f6c214adcbfd5d)",
+ "ibc 0.14.0 (git+https://github.com/heliaxdev/ibc-rs.git?rev=f4703dfe2c1f25cc431279ab74f10f3e0f6827e2)",
+ "ibc-proto 0.17.1 (git+https://github.com/heliaxdev/ibc-rs?rev=9fcc1c8c19db6af50806ffe5b2f6c214adcbfd5d)",
+ "ibc-proto 0.17.1 (git+https://github.com/heliaxdev/ibc-rs.git?rev=f4703dfe2c1f25cc431279ab74f10f3e0f6827e2)",
  "ics23",
  "itertools",
  "libsecp256k1",
+ "masp_primitives",
  "pretty_assertions",
  "proptest",
+ "prost",
+ "prost-types",
  "rand 0.8.5",
  "rand_core 0.6.4",
+ "rayon",
  "rust_decimal",
+ "rust_decimal_macros",
  "serde 1.0.147",
  "serde_json",
  "sha2 0.9.9",
  "sparse-merkle-tree",
+ "tendermint 0.23.5",
+ "tendermint 0.23.6",
+ "tendermint-proto 0.23.5",
+ "tendermint-proto 0.23.6",
  "test-log",
  "thiserror",
  "tonic-build",
@@ -3845,10 +3844,12 @@ version = "0.10.1"
 dependencies = [
  "borsh",
  "derivative",
+ "namada_core",
  "proptest",
  "rust_decimal",
  "rust_decimal_macros",
  "thiserror",
+ "tracing 0.1.37",
 ]
 
 [[package]]
@@ -3901,8 +3902,9 @@ version = "0.10.1"
 dependencies = [
  "borsh",
  "masp_primitives",
- "namada",
+ "namada_core",
  "namada_macros",
+ "namada_proof_of_stake",
  "namada_vm_env",
  "rust_decimal",
  "sha2 0.10.6",
@@ -3917,7 +3919,7 @@ dependencies = [
  "hex",
  "masp_primitives",
  "masp_proofs",
- "namada",
+ "namada_core",
 ]
 
 [[package]]
@@ -3925,8 +3927,9 @@ name = "namada_vp_prelude"
 version = "0.10.1"
 dependencies = [
  "borsh",
- "namada",
+ "namada_core",
  "namada_macros",
+ "namada_proof_of_stake",
  "namada_vm_env",
  "sha2 0.10.6",
  "thiserror",
diff --git a/Makefile b/Makefile
index fabcdb61d3..fdcf707a94 100644
--- a/Makefile
+++ b/Makefile
@@ -72,8 +72,7 @@ clippy-abcipp:
 	$(cargo) +$(nightly) clippy \
 		--all-targets \
 		--manifest-path ./vm_env/Cargo.toml \
-		--no-default-features \
-		--features "abcipp" && \
+		--no-default-features && \
 	make -C $(wasms) clippy && \
 	$(foreach wasm,$(wasm_templates),$(clippy-wasm) && ) true
 
diff --git a/apps/Cargo.toml b/apps/Cargo.toml
index 6068bf347f..896168c63f 100644
--- a/apps/Cargo.toml
+++ b/apps/Cargo.toml
@@ -47,25 +47,27 @@ std = ["ed25519-consensus/std", "rand/std", "rand_core/std"]
 testing = ["dev"]
 
 abcipp = [
+    "namada/abcipp",
+    "namada/tendermint-rpc-abcipp",
     "tendermint-abcipp",
     "tendermint-config-abcipp",
     "tendermint-proto-abcipp",
     "tendermint-rpc-abcipp",
     "tower-abci-abcipp",
-    "namada/abcipp"
 ]
 
 abciplus = [
+    "namada/abciplus",
+    "namada/tendermint-rpc",
     "tendermint",
     "tendermint-config",
     "tendermint-rpc",
     "tendermint-proto",
     "tower-abci",
-    "namada/abciplus"
 ]
 
 [dependencies]
-namada = {path = "../shared", features = ["wasm-runtime", "ferveo-tpke", "rand", "tendermint-rpc", "secp256k1-sign-verify"]}
+namada = {path = "../shared", default-features = false, features = ["wasm-runtime", "ferveo-tpke"]}
 ark-serialize = "0.3.0"
 ark-std = "0.3.0"
 # branch = "bat/arse-merkle-tree"
@@ -148,7 +150,7 @@ rust_decimal = "1.26.1"
 rust_decimal_macros = "1.26.1"
 
 [dev-dependencies]
-namada = {path = "../shared", features = ["testing", "wasm-runtime"]}
+namada = {path = "../shared", default-features = false, features = ["testing", "wasm-runtime"]}
 bit-set = "0.5.2"
 # A fork with state machime testing
 proptest = {git = "https://github.com/heliaxdev/proptest", branch = "tomas/sm"}
diff --git a/apps/src/lib/client/rpc.rs b/apps/src/lib/client/rpc.rs
index 9fa4fc9ee0..b496c0fe4d 100644
--- a/apps/src/lib/client/rpc.rs
+++ b/apps/src/lib/client/rpc.rs
@@ -25,7 +25,7 @@ use masp_primitives::zip32::ExtendedFullViewingKey;
 use namada::ledger::events::Event;
 use namada::ledger::governance::parameters::GovParams;
 use namada::ledger::governance::storage as gov_storage;
-use namada::ledger::governance::utils::Votes;
+use namada::ledger::native_vp::governance::utils::Votes;
 use namada::ledger::parameters::{storage as param_storage, EpochDuration};
 use namada::ledger::pos::types::{
     decimal_mult_u64, Epoch as PosEpoch, WeightedValidator,
@@ -2065,8 +2065,7 @@ fn process_bonds_query(
                 .unwrap();
             delta = apply_slashes(slashes, delta, *epoch_start, None, Some(w));
             current_total += delta;
-            let epoch_start: Epoch = (*epoch_start).into();
-            if epoch >= &epoch_start {
+            if epoch >= epoch_start {
                 total_active += delta;
             }
         }
@@ -2121,8 +2120,7 @@ fn process_unbonds_query(
                 Some(w),
             );
             current_total += delta;
-            let epoch_end: Epoch = (*epoch_end).into();
-            if epoch > &epoch_end {
+            if epoch > epoch_end {
                 withdrawable += delta;
             }
         }
@@ -2800,8 +2798,7 @@ pub async fn get_bond_amount_at(
                         None,
                         None,
                     );
-                    let epoch_start: Epoch = (*epoch_start).into();
-                    if epoch >= epoch_start {
+                    if epoch >= *epoch_start {
                         delegated_amount += delta;
                     }
                 }
diff --git a/apps/src/lib/client/tx.rs b/apps/src/lib/client/tx.rs
index 65a0fac3f5..bb667cabec 100644
--- a/apps/src/lib/client/tx.rs
+++ b/apps/src/lib/client/tx.rs
@@ -2179,7 +2179,7 @@ async fn is_safe_voting_window(
 
     match proposal_end_epoch {
         Some(proposal_end_epoch) => {
-            !namada::ledger::governance::utils::is_valid_validator_voting_period(
+            !namada::ledger::native_vp::governance::utils::is_valid_validator_voting_period(
                 current_epoch,
                 proposal_start_epoch,
                 proposal_end_epoch,
@@ -2486,7 +2486,7 @@ pub async fn submit_validator_commission_change(
         match (commission_rates, max_change) {
             (Some(rates), Some(max_change)) => {
                 // Assuming that pipeline length = 2
-                let rate_next_epoch = rates.get(epoch + 1).unwrap();
+                let rate_next_epoch = rates.get(epoch.next()).unwrap();
                 if (args.rate - rate_next_epoch).abs() > max_change {
                     eprintln!(
                         "New rate is too large of a change with respect to \
diff --git a/apps/src/lib/node/ledger/rpc.rs b/apps/src/lib/node/ledger/rpc.rs
deleted file mode 100644
index b7a1ebcfad..0000000000
--- a/apps/src/lib/node/ledger/rpc.rs
+++ /dev/null
@@ -1,123 +0,0 @@
-//! RPC endpoint is used for ledger state queries
-
-use std::fmt::Display;
-use std::str::FromStr;
-
-use masp_primitives::asset_type::AssetType;
-use namada::types::address::Address;
-use namada::types::storage;
-use namada::types::token::CONVERSION_KEY_PREFIX;
-use thiserror::Error;
-
-use crate::facade::tendermint::abci::Path as AbciPath;
-
-/// RPC query path
-#[derive(Debug, Clone)]
-pub enum Path {
-    /// Dry run a transaction
-    DryRunTx,
-    /// Epoch of the last committed block
-    Epoch,
-    /// Results of all committed blocks
-    Results,
-    /// Read a storage value with exact storage key
-    Value(storage::Key),
-    /// Read a range of storage values with a matching key prefix
-    Prefix(storage::Key),
-    /// Check if the given storage key exists
-    HasKey(storage::Key),
-    /// Conversion associated with given asset type
-    Conversion(AssetType),
-}
-
-#[derive(Debug, Clone)]
-pub struct BalanceQuery {
-    #[allow(dead_code)]
-    owner: Option<Address>,
-    #[allow(dead_code)]
-    token: Option<Address>,
-}
-
-const DRY_RUN_TX_PATH: &str = "dry_run_tx";
-const EPOCH_PATH: &str = "epoch";
-const RESULTS_PATH: &str = "results";
-const VALUE_PREFIX: &str = "value";
-const PREFIX_PREFIX: &str = "prefix";
-const HAS_KEY_PREFIX: &str = "has_key";
-
-impl Display for Path {
-    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
-        match self {
-            Path::DryRunTx => write!(f, "{}", DRY_RUN_TX_PATH),
-            Path::Epoch => write!(f, "{}", EPOCH_PATH),
-            Path::Results => write!(f, "{}", RESULTS_PATH),
-            Path::Value(storage_key) => {
-                write!(f, "{}/{}", VALUE_PREFIX, storage_key)
-            }
-            Path::Prefix(storage_key) => {
-                write!(f, "{}/{}", PREFIX_PREFIX, storage_key)
-            }
-            Path::HasKey(storage_key) => {
-                write!(f, "{}/{}", HAS_KEY_PREFIX, storage_key)
-            }
-            Path::Conversion(asset_type) => {
-                write!(f, "{}/{}", CONVERSION_KEY_PREFIX, asset_type)
-            }
-        }
-    }
-}
-
-impl FromStr for Path {
-    type Err = PathParseError;
-
-    fn from_str(s: &str) -> Result<Self, Self::Err> {
-        match s {
-            DRY_RUN_TX_PATH => Ok(Self::DryRunTx),
-            EPOCH_PATH => Ok(Self::Epoch),
-            RESULTS_PATH => Ok(Self::Results),
-            _ => match s.split_once('/') {
-                Some((VALUE_PREFIX, storage_key)) => {
-                    let key = storage::Key::parse(storage_key)
-                        .map_err(PathParseError::InvalidStorageKey)?;
-                    Ok(Self::Value(key))
-                }
-                Some((PREFIX_PREFIX, storage_key)) => {
-                    let key = storage::Key::parse(storage_key)
-                        .map_err(PathParseError::InvalidStorageKey)?;
-                    Ok(Self::Prefix(key))
-                }
-                Some((HAS_KEY_PREFIX, storage_key)) => {
-                    let key = storage::Key::parse(storage_key)
-                        .map_err(PathParseError::InvalidStorageKey)?;
-                    Ok(Self::HasKey(key))
-                }
-                Some((CONVERSION_KEY_PREFIX, asset_type)) => {
-                    let key = AssetType::from_str(asset_type)
-                        .map_err(PathParseError::InvalidAssetType)?;
-                    Ok(Self::Conversion(key))
-                }
-                _ => Err(PathParseError::InvalidPath(s.to_string())),
-            },
-        }
-    }
-}
-
-impl From<Path> for AbciPath {
-    fn from(path: Path) -> Self {
-        let path = path.to_string();
-        // TODO: update in tendermint-rs to allow to construct this from owned
-        // string. It's what `from_str` does anyway
-        AbciPath::from_str(&path).unwrap()
-    }
-}
-
-#[allow(missing_docs)]
-#[derive(Error, Debug)]
-pub enum PathParseError {
-    #[error("Unrecognized query path: {0}")]
-    InvalidPath(String),
-    #[error("Invalid storage key: {0}")]
-    InvalidStorageKey(storage::Error),
-    #[error("Unrecognized asset type: {0}")]
-    InvalidAssetType(std::io::Error),
-}
diff --git a/apps/src/lib/node/ledger/shell/governance.rs b/apps/src/lib/node/ledger/shell/governance.rs
index 71c6049afd..02a22b1caa 100644
--- a/apps/src/lib/node/ledger/shell/governance.rs
+++ b/apps/src/lib/node/ledger/shell/governance.rs
@@ -1,11 +1,12 @@
+use namada::core::ledger::slash_fund::ADDRESS as slash_fund_address;
 use namada::ledger::events::EventType;
-use namada::ledger::governance::utils::{
-    compute_tally, get_proposal_votes, ProposalEvent,
-};
 use namada::ledger::governance::{
     storage as gov_storage, ADDRESS as gov_address,
 };
-use namada::ledger::slash_fund::ADDRESS as slash_fund_address;
+use namada::ledger::native_vp::governance::utils::{
+    compute_tally, get_proposal_votes, ProposalEvent,
+};
+use namada::ledger::protocol;
 use namada::ledger::storage::types::encode;
 use namada::ledger::storage::{DBIter, StorageHasher, DB};
 use namada::types::address::Address;
diff --git a/apps/src/lib/node/ledger/storage/rocksdb.rs b/apps/src/lib/node/ledger/storage/rocksdb.rs
index 05075def5a..32d8aaeed2 100644
--- a/apps/src/lib/node/ledger/storage/rocksdb.rs
+++ b/apps/src/lib/node/ledger/storage/rocksdb.rs
@@ -37,9 +37,9 @@ use namada::ledger::storage::{
     types, BlockStateRead, BlockStateWrite, DBIter, DBWriteBatch, Error,
     MerkleTreeStoresRead, Result, StoreType, DB,
 };
+use namada::types::internal::TxQueue;
 use namada::types::storage::{
-    BlockHeight, BlockResults, Header, Key, KeySeg, TxQueue,
-    KEY_SEGMENT_SEPARATOR,
+    BlockHeight, BlockResults, Header, Key, KeySeg, KEY_SEGMENT_SEPARATOR,
 };
 use namada::types::time::DateTimeUtc;
 use rocksdb::{
diff --git a/core/Cargo.toml b/core/Cargo.toml
index 5dbf217d34..50015c4f50 100644
--- a/core/Cargo.toml
+++ b/core/Cargo.toml
@@ -15,34 +15,84 @@ ferveo-tpke = [
   "rand_core",
   "rand",
 ]
+wasm-runtime = [
+  "rayon",
+]
+# secp256k1 key signing and verification, disabled in WASM build by default as 
+# it bloats the build a lot
+secp256k1-sign-verify = [
+  "libsecp256k1/hmac",
+]
+
+abcipp = [
+  "ibc-proto-abcipp",
+  "ibc-abcipp",
+  "tendermint-abcipp",
+  "tendermint-proto-abcipp"
+]
+abciplus = [
+  "ibc",
+  "ibc-proto",
+  "tendermint",
+  "tendermint-proto",
+]
+
+ibc-mocks = [
+  "ibc/mocks",
+]
+ibc-mocks-abcipp = [
+  "ibc-abcipp/mocks",
+]
+
 # for integration tests and test utilies
 testing = [
   "rand",
   "rand_core",
+  "proptest",
 ]
 
 [dependencies]
+ark-bls12-381 = {version = "0.3"}
 ark-ec = {version = "0.3", optional = true}
+ark-serialize = {version = "0.3"}
 # We switch off "blake2b" because it cannot be compiled to wasm
 # branch = "bat/arse-merkle-tree"
 arse-merkle-tree = {package = "sparse-merkle-tree", git = "https://github.com/heliaxdev/sparse-merkle-tree", rev = "04ad1eeb28901b57a7599bbe433b3822965dabe8", default-features = false, features = ["std", "borsh"]}
 bech32 = "0.8.0"
+bellman = "0.11.2"
+bit-vec = "0.6.3"
 borsh = "0.9.0"
 chrono = {version = "0.4.22", default-features = false, features = ["clock", "std"]}
 data-encoding = "2.3.2"
 derivative = "2.2.0"
 ed25519-consensus = "1.2.0"
 ferveo = {optional = true, git = "https://github.com/anoma/ferveo"}
+ferveo-common = {git = "https://github.com/anoma/ferveo"}
 tpke = {package = "group-threshold-cryptography", optional = true, git = "https://github.com/anoma/ferveo"}
+# TODO using the same version of tendermint-rs as we do here.
+ibc = {version = "0.14.0", default-features = false, optional = true}
+ibc-proto = {version = "0.17.1", default-features = false, optional = true}
+ibc-abcipp = {package = "ibc", git = "https://github.com/heliaxdev/ibc-rs", rev = "9fcc1c8c19db6af50806ffe5b2f6c214adcbfd5d", default-features = false, optional = true}
+ibc-proto-abcipp = {package = "ibc-proto", git = "https://github.com/heliaxdev/ibc-rs", rev = "9fcc1c8c19db6af50806ffe5b2f6c214adcbfd5d", default-features = false, optional = true}
 ics23 = "0.7.0"
 itertools = "0.10.0"
 libsecp256k1 = {git = "https://github.com/heliaxdev/libsecp256k1", rev = "bbb3bd44a49db361f21d9db80f9a087c194c0ae9", default-features = false, features = ["std", "static-context"]}
+masp_primitives = { git = "https://github.com/anoma/masp", rev = "bee40fc465f6afbd10558d12fe96eb1742eee45c" }
+proptest = {git = "https://github.com/heliaxdev/proptest", branch = "tomas/sm", optional = true}
+prost = "0.9.0"
+prost-types = "0.9.0"
 rand = {version = "0.8", optional = true}
 rand_core = {version = "0.6", optional = true}
-rust_decimal = "1.26.1"
+rayon = {version = "=1.5.3", optional = true}
+rust_decimal = { version = "1.26.1", features = ["borsh"] }
+rust_decimal_macros = "1.26.1"
 serde = {version = "1.0.125", features = ["derive"]}
 serde_json = "1.0.62"
 sha2 = "0.9.3"
+tendermint = {version = "0.23.6", optional = true}
+tendermint-proto = {version = "0.23.6", optional = true}
+tendermint-abcipp = {package = "tendermint", git = "https://github.com/heliaxdev/tendermint-rs", rev = "95c52476bc37927218374f94ac8e2a19bd35bec9", optional = true}
+tendermint-proto-abcipp = {package = "tendermint-proto", git = "https://github.com/heliaxdev/tendermint-rs", rev = "95c52476bc37927218374f94ac8e2a19bd35bec9", optional = true}
 thiserror = "1.0.30"
 tracing = "0.1.30"
 zeroize = {version = "1.5.5", features = ["zeroize_derive"]}
diff --git a/core/build.rs b/core/build.rs
index 3c6f017a22..5c58f08c73 100644
--- a/core/build.rs
+++ b/core/build.rs
@@ -25,7 +25,7 @@ fn main() {
     if let Ok(rustfmt_toolchain) = read_to_string(RUSTFMT_TOOLCHAIN_SRC) {
         // Try to find the path to rustfmt.
         if let Ok(output) = Command::new("rustup")
-            .args(&[
+            .args([
                 "which",
                 "rustfmt",
                 "--toolchain",
diff --git a/core/src/ledger/governance/mod.rs b/core/src/ledger/governance/mod.rs
index 44d20f50e0..8e3fb977f3 100644
--- a/core/src/ledger/governance/mod.rs
+++ b/core/src/ledger/governance/mod.rs
@@ -1,6 +1,11 @@
 //! Governance library code
 
+use crate::types::address::{Address, InternalAddress};
+
 /// governance parameters
 pub mod parameters;
 /// governance storage
 pub mod storage;
+
+/// The governance internal address
+pub const ADDRESS: Address = Address::Internal(InternalAddress::Governance);
diff --git a/core/src/ledger/ibc/actions.rs b/core/src/ledger/ibc/actions.rs
index c79f8ac6fb..cbba1d748d 100644
--- a/core/src/ledger/ibc/actions.rs
+++ b/core/src/ledger/ibc/actions.rs
@@ -68,15 +68,15 @@ use crate::ibc::events::IbcEvent;
 #[cfg(any(feature = "ibc-mocks-abcipp", feature = "ibc-mocks"))]
 use crate::ibc::mock::client_state::{MockClientState, MockConsensusState};
 use crate::ibc::timestamp::Timestamp;
+use crate::ledger::ibc::data::{
+    Error as IbcDataError, FungibleTokenPacketData, IbcMessage, PacketAck,
+    PacketReceipt,
+};
 use crate::ledger::ibc::storage;
 use crate::ledger::storage_api;
 use crate::tendermint::Time;
 use crate::tendermint_proto::{Error as ProtoError, Protobuf};
 use crate::types::address::{Address, InternalAddress};
-use crate::types::ibc::data::{
-    Error as IbcDataError, FungibleTokenPacketData, IbcMessage, PacketAck,
-    PacketReceipt,
-};
 use crate::types::ibc::IbcEvent as AnomaIbcEvent;
 use crate::types::storage::{BlockHeight, Key};
 use crate::types::time::Rfc3339String;
diff --git a/core/src/ledger/ibc/mod.rs b/core/src/ledger/ibc/mod.rs
index efb7f7f0d5..f98fb2e432 100644
--- a/core/src/ledger/ibc/mod.rs
+++ b/core/src/ledger/ibc/mod.rs
@@ -1,2 +1,5 @@
+//! IBC library code
+
 pub mod actions;
+pub mod data;
 pub mod storage;
diff --git a/core/src/ledger/mod.rs b/core/src/ledger/mod.rs
index bbd12bc60d..83568c0da7 100644
--- a/core/src/ledger/mod.rs
+++ b/core/src/ledger/mod.rs
@@ -2,9 +2,10 @@
 
 pub mod gas;
 pub mod governance;
-#[cfg(feature = "ibc-rs")]
+#[cfg(any(feature = "abciplus", feature = "abcipp"))]
 pub mod ibc;
 pub mod parameters;
+pub mod slash_fund;
 pub mod storage;
 pub mod storage_api;
 pub mod tx_env;
diff --git a/core/src/ledger/parameters/mod.rs b/core/src/ledger/parameters/mod.rs
index 75862404db..cb84bd56e7 100644
--- a/core/src/ledger/parameters/mod.rs
+++ b/core/src/ledger/parameters/mod.rs
@@ -1,104 +1,19 @@
 //! Protocol parameters
 pub mod storage;
 
-use std::collections::BTreeSet;
-
 use borsh::{BorshDeserialize, BorshSchema, BorshSerialize};
 use rust_decimal::Decimal;
 use thiserror::Error;
 
-use self::storage as parameter_storage;
-use super::governance::{self};
 use super::storage::types::{decode, encode};
 use super::storage::{types, Storage};
-use crate::ledger::native_vp::{self, Ctx, NativeVp};
-use crate::ledger::storage::{self as ledger_storage, StorageHasher};
+use crate::ledger::storage::{self as ledger_storage};
 use crate::types::address::{Address, InternalAddress};
 use crate::types::storage::Key;
 use crate::types::time::DurationSecs;
-use crate::vm::WasmCacheAccess;
 
 const ADDRESS: Address = Address::Internal(InternalAddress::Parameters);
 
-#[allow(missing_docs)]
-#[derive(Error, Debug)]
-pub enum Error {
-    #[error("Native VP error: {0}")]
-    NativeVpError(native_vp::Error),
-}
-
-/// Parameters functions result
-pub type Result<T> = std::result::Result<T, Error>;
-
-/// Parameters VP
-pub struct ParametersVp<'a, DB, H, CA>
-where
-    DB: ledger_storage::DB + for<'iter> ledger_storage::DBIter<'iter>,
-    H: StorageHasher,
-    CA: WasmCacheAccess,
-{
-    /// Context to interact with the host structures.
-    pub ctx: Ctx<'a, DB, H, CA>,
-}
-
-impl<'a, DB, H, CA> NativeVp for ParametersVp<'a, DB, H, CA>
-where
-    DB: 'static + ledger_storage::DB + for<'iter> ledger_storage::DBIter<'iter>,
-    H: 'static + StorageHasher,
-    CA: 'static + WasmCacheAccess,
-{
-    type Error = Error;
-
-    const ADDR: InternalAddress = InternalAddress::Parameters;
-
-    fn validate_tx(
-        &self,
-        tx_data: &[u8],
-        keys_changed: &BTreeSet<Key>,
-        _verifiers: &BTreeSet<Address>,
-    ) -> Result<bool> {
-        let result = keys_changed.iter().all(|key| {
-            let key_type: KeyType = key.into();
-            match key_type {
-                KeyType::PARAMETER => governance::utils::is_proposal_accepted(
-                    self.ctx.storage,
-                    tx_data,
-                )
-                .unwrap_or(false),
-                KeyType::UNKNOWN_PARAMETER => false,
-                KeyType::UNKNOWN => true,
-            }
-        });
-        Ok(result)
-    }
-}
-
-impl From<native_vp::Error> for Error {
-    fn from(err: native_vp::Error) -> Self {
-        Self::NativeVpError(err)
-    }
-}
-
-#[allow(missing_docs)]
-#[derive(Error, Debug)]
-pub enum ReadError {
-    #[error("Storage error: {0}")]
-    StorageError(ledger_storage::Error),
-    #[error("Storage type error: {0}")]
-    StorageTypeError(types::Error),
-    #[error("Protocol parameters are missing, they must be always set")]
-    ParametersMissing,
-}
-
-#[allow(missing_docs)]
-#[derive(Error, Debug)]
-pub enum WriteError {
-    #[error("Storage error: {0}")]
-    StorageError(ledger_storage::Error),
-    #[error("Serialize error: {0}")]
-    SerializeError(String),
-}
-
 /// Protocol parameters
 #[derive(
     Clone,
@@ -156,6 +71,26 @@ pub struct EpochDuration {
     pub min_duration: DurationSecs,
 }
 
+#[allow(missing_docs)]
+#[derive(Error, Debug)]
+pub enum ReadError {
+    #[error("Storage error: {0}")]
+    StorageError(ledger_storage::Error),
+    #[error("Storage type error: {0}")]
+    StorageTypeError(types::Error),
+    #[error("Protocol parameters are missing, they must be always set")]
+    ParametersMissing,
+}
+
+#[allow(missing_docs)]
+#[derive(Error, Debug)]
+pub enum WriteError {
+    #[error("Storage error: {0}")]
+    StorageError(ledger_storage::Error),
+    #[error("Serialize error: {0}")]
+    SerializeError(String),
+}
+
 impl Parameters {
     /// Initialize parameters in storage in the genesis block.
     pub fn init_storage<DB, H>(&self, storage: &mut Storage<DB, H>)
@@ -254,7 +189,6 @@ impl Parameters {
         );
     }
 }
-
 /// Update the max_expected_time_per_block parameter in storage. Returns the
 /// parameters and gas cost.
 pub fn update_max_expected_time_per_block_parameter<DB, H>(
@@ -555,26 +489,3 @@ where
             + gas_reward,
     ))
 }
-
-#[allow(clippy::upper_case_acronyms)]
-enum KeyType {
-    #[allow(clippy::upper_case_acronyms)]
-    PARAMETER,
-    #[allow(clippy::upper_case_acronyms)]
-    #[allow(non_camel_case_types)]
-    UNKNOWN_PARAMETER,
-    #[allow(clippy::upper_case_acronyms)]
-    UNKNOWN,
-}
-
-impl From<&Key> for KeyType {
-    fn from(value: &Key) -> Self {
-        if parameter_storage::is_protocol_parameter_key(value) {
-            KeyType::PARAMETER
-        } else if parameter_storage::is_parameter_key(value) {
-            KeyType::UNKNOWN_PARAMETER
-        } else {
-            KeyType::UNKNOWN
-        }
-    }
-}
diff --git a/core/src/ledger/slash_fund/mod.rs b/core/src/ledger/slash_fund/mod.rs
new file mode 100644
index 0000000000..7a7d53963b
--- /dev/null
+++ b/core/src/ledger/slash_fund/mod.rs
@@ -0,0 +1,8 @@
+//! SlashFund library code
+
+use crate::types::address::{Address, InternalAddress};
+
+/// Internal SlashFund address
+pub const ADDRESS: Address = Address::Internal(InternalAddress::SlashFund);
+
+pub mod storage;
diff --git a/shared/src/ledger/slash_fund/storage.rs b/core/src/ledger/slash_fund/storage.rs
similarity index 80%
rename from shared/src/ledger/slash_fund/storage.rs
rename to core/src/ledger/slash_fund/storage.rs
index 60d29f0f48..9c437da591 100644
--- a/shared/src/ledger/slash_fund/storage.rs
+++ b/core/src/ledger/slash_fund/storage.rs
@@ -1,7 +1,8 @@
-use super::ADDRESS;
+//! Slash fund storage
+
 use crate::types::storage::{DbKeySeg, Key};
 
 /// Check if a key is a slash fund key
 pub fn is_slash_fund_key(key: &Key) -> bool {
-    matches!(&key.segments[0], DbKeySeg::AddressSeg(addr) if addr == &ADDRESS)
+    matches!(&key.segments[0], DbKeySeg::AddressSeg(addr) if addr == &super::ADDRESS)
 }
diff --git a/core/src/ledger/storage/merkle_tree.rs b/core/src/ledger/storage/merkle_tree.rs
index 3ce35ab837..eb690356e5 100644
--- a/core/src/ledger/storage/merkle_tree.rs
+++ b/core/src/ledger/storage/merkle_tree.rs
@@ -10,19 +10,17 @@ use arse_merkle_tree::{
 use borsh::{BorshDeserialize, BorshSerialize};
 use ics23::commitment_proof::Proof as Ics23Proof;
 use ics23::{CommitmentProof, ExistenceProof, NonExistenceProof};
-use namada_core::types::storage::{TreeKeyError, IBC_KEY_LIMIT};
-use prost::Message;
 use thiserror::Error;
 
 use super::traits::{StorageHasher, SubTreeRead, SubTreeWrite};
 use crate::bytes::ByteBuf;
 use crate::ledger::storage::ics23_specs::{self, ibc_leaf_spec};
 use crate::ledger::storage::types;
-use crate::tendermint::merkle::proof::{Proof, ProofOp};
 use crate::types::address::{Address, InternalAddress};
 use crate::types::hash::Hash;
 use crate::types::storage::{
-    DbKeySeg, Error as StorageError, Key, MerkleValue, StringKey, TreeBytes,
+    self, DbKeySeg, Error as StorageError, Key, MerkleValue, StringKey,
+    TreeBytes, TreeKeyError, IBC_KEY_LIMIT,
 };
 
 #[allow(missing_docs)]
@@ -53,10 +51,14 @@ pub enum Error {
 /// Result for functions that may fail
 type Result<T> = std::result::Result<T, Error>;
 
-/// Type aliases for the different merkle trees and backing stores
+// Type aliases for the different merkle trees and backing stores
+/// Sparse-merkle-tree store
 pub type SmtStore = DefaultStore<SmtHash, Hash, 32>;
+/// Arse-merkle-tree store
 pub type AmtStore = DefaultStore<StringKey, TreeBytes, IBC_KEY_LIMIT>;
+/// Sparse-merkle-tree
 pub type Smt<H> = ArseMerkleTree<H, SmtHash, Hash, SmtStore, 32>;
+/// Arse-merkle-tree
 pub type Amt<H> =
     ArseMerkleTree<H, StringKey, TreeBytes, AmtStore, IBC_KEY_LIMIT>;
 
@@ -96,6 +98,7 @@ pub enum Store {
 }
 
 impl Store {
+    /// Convert to a `StoreRef` with borrowed store
     pub fn as_ref(&self) -> StoreRef {
         match self {
             Self::Base(store) => StoreRef::Base(store),
@@ -119,6 +122,7 @@ pub enum StoreRef<'a> {
 }
 
 impl<'a> StoreRef<'a> {
+    /// Convert to an owned `Store`.
     pub fn to_owned(&self) -> Store {
         match *self {
             Self::Base(store) => Store::Base(store.to_owned()),
@@ -128,6 +132,7 @@ impl<'a> StoreRef<'a> {
         }
     }
 
+    /// Encode a `StoreRef`.
     pub fn encode(&self) -> Vec<u8> {
         match self {
             Self::Base(store) => store.try_to_vec(),
@@ -392,25 +397,15 @@ impl<H: StorageHasher + Default> MerkleTree<H> {
         }
 
         // Get a proof of the sub tree
-        self.get_tendermint_proof(key, nep)
+        self.get_sub_tree_proof(key, nep)
     }
 
     /// Get the Tendermint proof with the base proof
-    pub fn get_tendermint_proof(
+    pub fn get_sub_tree_proof(
         &self,
         key: &Key,
         sub_proof: CommitmentProof,
     ) -> Result<Proof> {
-        let mut data = vec![];
-        sub_proof
-            .encode(&mut data)
-            .expect("Encoding proof shouldn't fail");
-        let sub_proof_op = ProofOp {
-            field_type: "ics23_CommitmentProof".to_string(),
-            key: key.to_string().as_bytes().to_vec(),
-            data,
-        };
-
         // Get a membership proof of the base tree because the sub root should
         // exist
         let (store_type, _) = StoreType::sub_key(key)?;
@@ -429,19 +424,10 @@ impl<H: StorageHasher + Default> MerkleTree<H> {
             _ => unreachable!(),
         };
 
-        let mut data = vec![];
-        base_proof
-            .encode(&mut data)
-            .expect("Encoding proof shouldn't fail");
-        let base_proof_op = ProofOp {
-            field_type: "ics23_CommitmentProof".to_string(),
-            key: key.to_string().as_bytes().to_vec(),
-            data,
-        };
-
-        // Set ProofOps from leaf to root
         Ok(Proof {
-            ops: vec![sub_proof_op, base_proof_op],
+            key: key.clone(),
+            sub_proof,
+            base_proof,
         })
     }
 }
@@ -541,6 +527,69 @@ impl From<MtError> for Error {
     }
 }
 
+/// Type of membership proof from a merkle tree
+pub enum MembershipProof {
+    /// ICS23 compliant membership proof
+    ICS23(CommitmentProof),
+}
+
+impl From<CommitmentProof> for MembershipProof {
+    fn from(proof: CommitmentProof) -> Self {
+        Self::ICS23(proof)
+    }
+}
+
+/// A storage key existence or non-existence proof
+#[derive(Debug)]
+pub struct Proof {
+    /// Storage key
+    pub key: storage::Key,
+    /// Sub proof
+    pub sub_proof: CommitmentProof,
+    /// Base proof
+    pub base_proof: CommitmentProof,
+}
+
+#[cfg(any(feature = "tendermint", feature = "tendermint-abcipp"))]
+impl From<Proof> for crate::tendermint::merkle::proof::Proof {
+    fn from(
+        Proof {
+            key,
+            sub_proof,
+            base_proof,
+        }: Proof,
+    ) -> Self {
+        use prost::Message;
+
+        use crate::tendermint::merkle::proof::{Proof, ProofOp};
+
+        let mut data = vec![];
+        sub_proof
+            .encode(&mut data)
+            .expect("Encoding proof shouldn't fail");
+        let sub_proof_op = ProofOp {
+            field_type: "ics23_CommitmentProof".to_string(),
+            key: key.to_string().as_bytes().to_vec(),
+            data,
+        };
+
+        let mut data = vec![];
+        base_proof
+            .encode(&mut data)
+            .expect("Encoding proof shouldn't fail");
+        let base_proof_op = ProofOp {
+            field_type: "ics23_CommitmentProof".to_string(),
+            key: key.to_string().as_bytes().to_vec(),
+            data,
+        };
+
+        // Set ProofOps from leaf to root
+        Proof {
+            ops: vec![sub_proof_op, base_proof_op],
+        }
+    }
+}
+
 #[cfg(test)]
 mod test {
     use super::*;
@@ -585,9 +634,7 @@ mod test {
         let nep = tree
             .get_non_existence_proof(&ibc_non_key)
             .expect("Test failed");
-        let subtree_nep = nep.ops.get(0).expect("Test failed");
-        let nep_commitment_proof =
-            CommitmentProof::decode(&*subtree_nep.data).expect("Test failed");
+        let nep_commitment_proof = nep.sub_proof;
         let non_existence_proof =
             match nep_commitment_proof.clone().proof.expect("Test failed") {
                 Ics23Proof::Nonexist(nep) => nep,
@@ -611,9 +658,7 @@ mod test {
             sub_key.to_string().as_bytes(),
         );
         assert!(nep_verification_res);
-        let basetree_ep = nep.ops.get(1).unwrap();
-        let basetree_ep_commitment_proof =
-            CommitmentProof::decode(&*basetree_ep.data).unwrap();
+        let basetree_ep_commitment_proof = nep.base_proof;
         let basetree_ics23_ep =
             match basetree_ep_commitment_proof.clone().proof.unwrap() {
                 Ics23Proof::Exist(ep) => ep,
@@ -679,17 +724,19 @@ mod test {
                 vec![ibc_val.clone().into()],
             )
             .unwrap();
-        let proof = tree.get_tendermint_proof(&ibc_key, proof).unwrap();
+        let proof = tree.get_sub_tree_proof(&ibc_key, proof).unwrap();
         let (store_type, sub_key) = StoreType::sub_key(&ibc_key).unwrap();
         let paths = vec![sub_key.to_string(), store_type.to_string()];
         let mut sub_root = ibc_val.clone();
         let mut value = ibc_val;
         // First, the sub proof is verified. Next the base proof is verified
         // with the sub root
-        for ((p, spec), key) in
-            proof.ops.iter().zip(specs.iter()).zip(paths.iter())
+        for ((commitment_proof, spec), key) in
+            [proof.sub_proof, proof.base_proof]
+                .into_iter()
+                .zip(specs.iter())
+                .zip(paths.iter())
         {
-            let commitment_proof = CommitmentProof::decode(&*p.data).unwrap();
             let existence_proof = match commitment_proof.clone().proof.unwrap()
             {
                 Ics23Proof::Exist(ep) => ep,
@@ -734,17 +781,19 @@ mod test {
                 vec![pos_val.clone().into()],
             )
             .unwrap();
-        let proof = tree.get_tendermint_proof(&pos_key, proof).unwrap();
+        let proof = tree.get_sub_tree_proof(&pos_key, proof).unwrap();
         let (store_type, sub_key) = StoreType::sub_key(&pos_key).unwrap();
         let paths = vec![sub_key.to_string(), store_type.to_string()];
         let mut sub_root = pos_val.clone();
         let mut value = pos_val;
         // First, the sub proof is verified. Next the base proof is verified
         // with the sub root
-        for ((p, spec), key) in
-            proof.ops.iter().zip(specs.iter()).zip(paths.iter())
+        for ((commitment_proof, spec), key) in
+            [proof.sub_proof, proof.base_proof]
+                .into_iter()
+                .zip(specs.iter())
+                .zip(paths.iter())
         {
-            let commitment_proof = CommitmentProof::decode(&*p.data).unwrap();
             let existence_proof = match commitment_proof.clone().proof.unwrap()
             {
                 Ics23Proof::Exist(ep) => ep,
@@ -784,9 +833,7 @@ mod test {
         let nep = tree
             .get_non_existence_proof(&ibc_non_key)
             .expect("Test failed");
-        let subtree_nep = nep.ops.get(0).expect("Test failed");
-        let nep_commitment_proof =
-            CommitmentProof::decode(&*subtree_nep.data).expect("Test failed");
+        let nep_commitment_proof = nep.sub_proof;
         let non_existence_proof =
             match nep_commitment_proof.clone().proof.expect("Test failed") {
                 Ics23Proof::Nonexist(nep) => nep,
@@ -810,9 +857,7 @@ mod test {
             sub_key.to_string().as_bytes(),
         );
         assert!(nep_verification_res);
-        let basetree_ep = nep.ops.get(1).unwrap();
-        let basetree_ep_commitment_proof =
-            CommitmentProof::decode(&*basetree_ep.data).unwrap();
+        let basetree_ep_commitment_proof = nep.base_proof;
         let basetree_ics23_ep =
             match basetree_ep_commitment_proof.clone().proof.unwrap() {
                 Ics23Proof::Exist(ep) => ep,
@@ -830,15 +875,3 @@ mod test {
         assert!(basetree_verification_res);
     }
 }
-
-/// Type of membership proof from a merkle tree
-pub enum MembershipProof {
-    /// ICS23 compliant membership proof
-    ICS23(CommitmentProof),
-}
-
-impl From<CommitmentProof> for MembershipProof {
-    fn from(proof: CommitmentProof) -> Self {
-        Self::ICS23(proof)
-    }
-}
diff --git a/core/src/ledger/storage/mockdb.rs b/core/src/ledger/storage/mockdb.rs
index 0daa61d935..011d8faac8 100644
--- a/core/src/ledger/storage/mockdb.rs
+++ b/core/src/ledger/storage/mockdb.rs
@@ -14,7 +14,7 @@ use super::{
 };
 use crate::ledger::storage::types::{self, KVBytes, PrefixIterator};
 #[cfg(feature = "ferveo-tpke")]
-use crate::types::storage::TxQueue;
+use crate::types::internal::TxQueue;
 use crate::types::storage::{
     BlockHeight, BlockResults, Header, Key, KeySeg, KEY_SEGMENT_SEPARATOR,
 };
diff --git a/core/src/ledger/storage/mod.rs b/core/src/ledger/storage/mod.rs
index c2a246ce15..9944dc3804 100644
--- a/core/src/ledger/storage/mod.rs
+++ b/core/src/ledger/storage/mod.rs
@@ -1,13 +1,13 @@
 //! Ledger's state storage with key-value backed store and a merkle tree
 
 pub mod ics23_specs;
-mod merkle_tree;
+pub mod merkle_tree;
 #[cfg(any(test, feature = "testing"))]
 pub mod mockdb;
+pub mod traits;
 pub mod types;
 
 use core::fmt::Debug;
-use std::array;
 use std::collections::BTreeMap;
 
 use borsh::{BorshDeserialize, BorshSerialize};
@@ -15,6 +15,10 @@ use masp_primitives::asset_type::AssetType;
 use masp_primitives::convert::AllowedConversion;
 use masp_primitives::merkle_tree::FrozenCommitmentTree;
 use masp_primitives::sapling::Node;
+pub use merkle_tree::{
+    MembershipProof, MerkleTree, MerkleTreeStoresRead, MerkleTreeStoresWrite,
+    StoreType,
+};
 #[cfg(feature = "wasm-runtime")]
 use rayon::iter::{
     IndexedParallelIterator, IntoParallelIterator, ParallelIterator,
@@ -22,29 +26,27 @@ use rayon::iter::{
 #[cfg(feature = "wasm-runtime")]
 use rayon::prelude::ParallelSlice;
 use thiserror::Error;
+pub use traits::{Sha256Hasher, StorageHasher};
 
-use super::parameters::{self, Parameters};
-use super::storage_api;
-use super::storage_api::{ResultExt, StorageRead, StorageWrite};
 use crate::ledger::gas::MIN_STORAGE_GAS;
-use crate::ledger::parameters::EpochDuration;
+use crate::ledger::parameters::{self, EpochDuration, Parameters};
 use crate::ledger::storage::merkle_tree::{
     Error as MerkleTreeError, MerkleRoot,
 };
-pub use crate::ledger::storage::merkle_tree::{
-    MerkleTree, MerkleTreeStoresRead, MerkleTreeStoresWrite, StoreType,
-};
-pub use crate::ledger::storage::traits::{Sha256Hasher, StorageHasher};
+use crate::ledger::storage_api;
+use crate::ledger::storage_api::{ResultExt, StorageRead, StorageWrite};
+#[cfg(any(feature = "tendermint", feature = "tendermint-abcipp"))]
 use crate::tendermint::merkle::proof::Proof;
 use crate::types::address::{
     masp, Address, EstablishedAddressGen, InternalAddress,
 };
 use crate::types::chain::{ChainId, CHAIN_ID_LENGTH};
+// TODO
 #[cfg(feature = "ferveo-tpke")]
-use crate::types::storage::TxQueue;
+use crate::types::internal::TxQueue;
 use crate::types::storage::{
     BlockHash, BlockHeight, BlockResults, Epoch, Epochs, Header, Key, KeySeg,
-    MembershipProof, MerkleValue, TxIndex, BLOCK_HASH_LENGTH,
+    TxIndex, BLOCK_HASH_LENGTH,
 };
 use crate::types::time::DateTimeUtc;
 use crate::types::token;
@@ -61,8 +63,6 @@ pub struct ConversionState {
     /// Map assets to their latest conversion and position in Merkle tree
     pub assets: BTreeMap<AssetType, (Address, Epoch, AllowedConversion, usize)>,
 }
-/// The maximum size of an IBC key (in bytes) allowed in merkle-ized storage
-pub const IBC_KEY_LIMIT: usize = 120;
 
 /// The storage data
 #[derive(Debug)]
@@ -334,6 +334,7 @@ where
     pub fn open(
         db_path: impl AsRef<std::path::Path>,
         chain_id: ChainId,
+        native_token: Address,
         cache: Option<&D::Cache>,
     ) -> Self {
         let block = BlockStorage {
@@ -360,6 +361,7 @@ where
             conversion_state: ConversionState::default(),
             #[cfg(feature = "ferveo-tpke")]
             tx_queue: TxQueue::default(),
+            native_token,
         }
     }
 
@@ -618,12 +620,15 @@ where
     }
 
     /// Get the existence proof
+    #[cfg(any(feature = "tendermint", feature = "tendermint-abcipp"))]
     pub fn get_existence_proof(
         &self,
         key: &Key,
-        value: MerkleValue,
+        value: crate::types::storage::MerkleValue,
         height: BlockHeight,
     ) -> Result<Proof> {
+        use std::array;
+
         if height >= self.get_block_height().0 {
             let MembershipProof::ICS23(proof) = self
                 .block
@@ -632,7 +637,8 @@ where
                 .map_err(Error::MerkleTreeError)?;
             self.block
                 .tree
-                .get_tendermint_proof(key, proof)
+                .get_sub_tree_proof(key, proof)
+                .map(Into::into)
                 .map_err(Error::MerkleTreeError)
         } else {
             match self.db.read_merkle_tree_stores(height)? {
@@ -644,7 +650,8 @@ where
                             vec![value],
                         )
                         .map_err(Error::MerkleTreeError)?;
-                    tree.get_tendermint_proof(key, proof)
+                    tree.get_sub_tree_proof(key, proof)
+                        .map(Into::into)
                         .map_err(Error::MerkleTreeError)
                 }
                 None => Err(Error::NoMerkleTree { height }),
@@ -653,17 +660,24 @@ where
     }
 
     /// Get the non-existence proof
+    #[cfg(any(feature = "tendermint", feature = "tendermint-abcipp"))]
     pub fn get_non_existence_proof(
         &self,
         key: &Key,
         height: BlockHeight,
     ) -> Result<Proof> {
         if height >= self.last_height {
-            Ok(self.block.tree.get_non_existence_proof(key)?)
+            self.block
+                .tree
+                .get_non_existence_proof(key)
+                .map(Into::into)
+                .map_err(Error::MerkleTreeError)
         } else {
             match self.db.read_merkle_tree_stores(height)? {
-                Some(stores) => Ok(MerkleTree::<H>::new(stores)
-                    .get_non_existence_proof(key)?),
+                Some(stores) => MerkleTree::<H>::new(stores)
+                    .get_non_existence_proof(key)
+                    .map(Into::into)
+                    .map_err(Error::MerkleTreeError),
                 None => Err(Error::NoMerkleTree { height }),
             }
         }
@@ -952,19 +966,19 @@ where
     }
 
     /// Start write batch.
-    fn batch() -> D::WriteBatch {
+    pub fn batch() -> D::WriteBatch {
         D::batch()
     }
 
     /// Execute write batch.
-    fn exec_batch(&mut self, batch: D::WriteBatch) -> Result<()> {
+    pub fn exec_batch(&mut self, batch: D::WriteBatch) -> Result<()> {
         self.db.exec_batch(batch)
     }
 
     /// Batch write the value with the given height and account subspace key to
     /// the DB. Returns the size difference from previous value, if any, or
     /// the size of the value otherwise.
-    fn batch_write_subspace_val(
+    pub fn batch_write_subspace_val(
         &mut self,
         batch: &mut D::WriteBatch,
         key: &Key,
@@ -979,7 +993,7 @@ where
     /// Batch delete the value with the given height and account subspace key
     /// from the DB. Returns the size of the removed value, if any, 0 if no
     /// previous value was found.
-    fn batch_delete_subspace_val(
+    pub fn batch_delete_subspace_val(
         &mut self,
         batch: &mut D::WriteBatch,
         key: &Key,
@@ -1154,6 +1168,7 @@ pub mod testing {
     use super::mockdb::MockDB;
     use super::*;
     use crate::ledger::storage::traits::Sha256Hasher;
+    use crate::types::address;
     /// Storage with a mock DB for testing
     pub type TestStorage = Storage<MockDB, Sha256Hasher>;
 
@@ -1185,6 +1200,7 @@ pub mod testing {
                 conversion_state: ConversionState::default(),
                 #[cfg(feature = "ferveo-tpke")]
                 tx_queue: TxQueue::default(),
+                native_token: address::nam(),
             }
         }
     }
diff --git a/core/src/ledger/storage/traits.rs b/core/src/ledger/storage/traits.rs
index 1f84c6e421..79427c06fb 100644
--- a/core/src/ledger/storage/traits.rs
+++ b/core/src/ledger/storage/traits.rs
@@ -7,7 +7,6 @@ use arse_merkle_tree::traits::{Hasher, Value};
 use arse_merkle_tree::{Key as TreeKey, H256};
 use ics23::commitment_proof::Proof as Ics23Proof;
 use ics23::{CommitmentProof, ExistenceProof};
-use namada_core::types::storage::IBC_KEY_LIMIT;
 use sha2::{Digest, Sha256};
 
 use super::ics23_specs;
diff --git a/core/src/ledger/vp_env.rs b/core/src/ledger/vp_env.rs
index 3ae81bc777..49bd5d515c 100644
--- a/core/src/ledger/vp_env.rs
+++ b/core/src/ledger/vp_env.rs
@@ -1,18 +1,9 @@
 //! Validity predicate environment contains functions that can be called from
 //! inside validity predicates.
 
-use std::num::TryFromIntError;
-
 use borsh::BorshDeserialize;
-use thiserror::Error;
 
-use super::gas::MIN_STORAGE_GAS;
 use super::storage_api::{self, StorageRead};
-use crate::ledger::gas;
-use crate::ledger::gas::VpGasMeter;
-use crate::ledger::storage::write_log::WriteLog;
-use crate::ledger::storage::{self, write_log, Storage, StorageHasher};
-use crate::proto::Tx;
 use crate::types::address::Address;
 use crate::types::hash::Hash;
 use crate::types::key::common;
@@ -187,367 +178,3 @@ pub trait VpEnv<'view> {
         self.post().iter_next(iter)
     }
 }
-
-/// These runtime errors will abort VP execution immediately
-#[allow(missing_docs)]
-#[derive(Error, Debug)]
-pub enum RuntimeError {
-    #[error("Out of gas: {0}")]
-    OutOfGas(gas::Error),
-    #[error("Storage error: {0}")]
-    StorageError(storage::Error),
-    #[error("Storage data error: {0}")]
-    StorageDataError(crate::types::storage::Error),
-    #[error("Encoding error: {0}")]
-    EncodingError(std::io::Error),
-    #[error("Numeric conversion error: {0}")]
-    NumConversionError(TryFromIntError),
-    #[error("Memory error: {0}")]
-    MemoryError(Box<dyn std::error::Error + Sync + Send + 'static>),
-    #[error("Trying to read a temporary value with read_post")]
-    ReadTemporaryValueError,
-    #[error("Trying to read a permament value with read_temp")]
-    ReadPermanentValueError,
-}
-
-/// VP environment function result
-pub type EnvResult<T> = std::result::Result<T, RuntimeError>;
-
-/// Add a gas cost incured in a validity predicate
-pub fn add_gas(gas_meter: &mut VpGasMeter, used_gas: u64) -> EnvResult<()> {
-    let result = gas_meter.add(used_gas).map_err(RuntimeError::OutOfGas);
-    if let Err(err) = &result {
-        tracing::info!("Stopping VP execution because of gas error: {}", err);
-    }
-    result
-}
-
-/// Storage read prior state (before tx execution). It will try to read from the
-/// storage.
-pub fn read_pre<DB, H>(
-    gas_meter: &mut VpGasMeter,
-    storage: &Storage<DB, H>,
-    write_log: &WriteLog,
-    key: &Key,
-) -> EnvResult<Option<Vec<u8>>>
-where
-    DB: storage::DB + for<'iter> storage::DBIter<'iter>,
-    H: StorageHasher,
-{
-    let (log_val, gas) = write_log.read_pre(key);
-    add_gas(gas_meter, gas)?;
-    match log_val {
-        Some(&write_log::StorageModification::Write { ref value }) => {
-            Ok(Some(value.clone()))
-        }
-        Some(&write_log::StorageModification::Delete) => {
-            // Given key has been deleted
-            Ok(None)
-        }
-        Some(&write_log::StorageModification::InitAccount {
-            ref vp, ..
-        }) => {
-            // Read the VP of a new account
-            Ok(Some(vp.clone()))
-        }
-        Some(&write_log::StorageModification::Temp { .. }) => {
-            Err(RuntimeError::ReadTemporaryValueError)
-        }
-        None => {
-            // When not found in write log, try to read from the storage
-            let (value, gas) =
-                storage.read(key).map_err(RuntimeError::StorageError)?;
-            add_gas(gas_meter, gas)?;
-            Ok(value)
-        }
-    }
-}
-
-/// Storage read posterior state (after tx execution). It will try to read from
-/// the write log first and if no entry found then from the storage.
-pub fn read_post<DB, H>(
-    gas_meter: &mut VpGasMeter,
-    storage: &Storage<DB, H>,
-    write_log: &WriteLog,
-    key: &Key,
-) -> EnvResult<Option<Vec<u8>>>
-where
-    DB: storage::DB + for<'iter> storage::DBIter<'iter>,
-    H: StorageHasher,
-{
-    // Try to read from the write log first
-    let (log_val, gas) = write_log.read(key);
-    add_gas(gas_meter, gas)?;
-    match log_val {
-        Some(&write_log::StorageModification::Write { ref value }) => {
-            Ok(Some(value.clone()))
-        }
-        Some(&write_log::StorageModification::Delete) => {
-            // Given key has been deleted
-            Ok(None)
-        }
-        Some(&write_log::StorageModification::InitAccount {
-            ref vp, ..
-        }) => {
-            // Read the VP of a new account
-            Ok(Some(vp.clone()))
-        }
-        Some(&write_log::StorageModification::Temp { .. }) => {
-            Err(RuntimeError::ReadTemporaryValueError)
-        }
-        None => {
-            // When not found in write log, try to read from the storage
-            let (value, gas) =
-                storage.read(key).map_err(RuntimeError::StorageError)?;
-            add_gas(gas_meter, gas)?;
-            Ok(value)
-        }
-    }
-}
-
-/// Storage read temporary state (after tx execution). It will try to read from
-/// only the write log.
-pub fn read_temp(
-    gas_meter: &mut VpGasMeter,
-    write_log: &WriteLog,
-    key: &Key,
-) -> EnvResult<Option<Vec<u8>>> {
-    // Try to read from the write log first
-    let (log_val, gas) = write_log.read(key);
-    add_gas(gas_meter, gas)?;
-    match log_val {
-        Some(&write_log::StorageModification::Temp { ref value }) => {
-            Ok(Some(value.clone()))
-        }
-        None => Ok(None),
-        _ => Err(RuntimeError::ReadPermanentValueError),
-    }
-}
-
-/// Storage `has_key` in prior state (before tx execution). It will try to read
-/// from the storage.
-pub fn has_key_pre<DB, H>(
-    gas_meter: &mut VpGasMeter,
-    storage: &Storage<DB, H>,
-    key: &Key,
-) -> EnvResult<bool>
-where
-    DB: storage::DB + for<'iter> storage::DBIter<'iter>,
-    H: StorageHasher,
-{
-    let (present, gas) =
-        storage.has_key(key).map_err(RuntimeError::StorageError)?;
-    add_gas(gas_meter, gas)?;
-    Ok(present)
-}
-
-/// Storage `has_key` in posterior state (after tx execution). It will try to
-/// check the write log first and if no entry found then the storage.
-pub fn has_key_post<DB, H>(
-    gas_meter: &mut VpGasMeter,
-    storage: &Storage<DB, H>,
-    write_log: &WriteLog,
-    key: &Key,
-) -> EnvResult<bool>
-where
-    DB: storage::DB + for<'iter> storage::DBIter<'iter>,
-    H: StorageHasher,
-{
-    // Try to read from the write log first
-    let (log_val, gas) = write_log.read(key);
-    add_gas(gas_meter, gas)?;
-    match log_val {
-        Some(&write_log::StorageModification::Write { .. }) => Ok(true),
-        Some(&write_log::StorageModification::Delete) => {
-            // The given key has been deleted
-            Ok(false)
-        }
-        Some(&write_log::StorageModification::InitAccount { .. }) => Ok(true),
-        Some(&write_log::StorageModification::Temp { .. }) => Ok(true),
-        None => {
-            // When not found in write log, try to check the storage
-            let (present, gas) =
-                storage.has_key(key).map_err(RuntimeError::StorageError)?;
-            add_gas(gas_meter, gas)?;
-            Ok(present)
-        }
-    }
-}
-
-/// Getting the chain ID.
-pub fn get_chain_id<DB, H>(
-    gas_meter: &mut VpGasMeter,
-    storage: &Storage<DB, H>,
-) -> EnvResult<String>
-where
-    DB: storage::DB + for<'iter> storage::DBIter<'iter>,
-    H: StorageHasher,
-{
-    let (chain_id, gas) = storage.get_chain_id();
-    add_gas(gas_meter, gas)?;
-    Ok(chain_id)
-}
-
-/// Getting the block height. The height is that of the block to which the
-/// current transaction is being applied.
-pub fn get_block_height<DB, H>(
-    gas_meter: &mut VpGasMeter,
-    storage: &Storage<DB, H>,
-) -> EnvResult<BlockHeight>
-where
-    DB: storage::DB + for<'iter> storage::DBIter<'iter>,
-    H: StorageHasher,
-{
-    let (height, gas) = storage.get_block_height();
-    add_gas(gas_meter, gas)?;
-    Ok(height)
-}
-
-/// Getting the block hash. The height is that of the block to which the
-/// current transaction is being applied.
-pub fn get_block_hash<DB, H>(
-    gas_meter: &mut VpGasMeter,
-    storage: &Storage<DB, H>,
-) -> EnvResult<BlockHash>
-where
-    DB: storage::DB + for<'iter> storage::DBIter<'iter>,
-    H: StorageHasher,
-{
-    let (hash, gas) = storage.get_block_hash();
-    add_gas(gas_meter, gas)?;
-    Ok(hash)
-}
-
-/// Getting the block hash. The height is that of the block to which the
-/// current transaction is being applied.
-pub fn get_tx_code_hash(
-    gas_meter: &mut VpGasMeter,
-    tx: &Tx,
-) -> EnvResult<Hash> {
-    let hash = Hash(tx.code_hash());
-    add_gas(gas_meter, MIN_STORAGE_GAS)?;
-    Ok(hash)
-}
-
-/// Getting the block epoch. The epoch is that of the block to which the
-/// current transaction is being applied.
-pub fn get_block_epoch<DB, H>(
-    gas_meter: &mut VpGasMeter,
-    storage: &Storage<DB, H>,
-) -> EnvResult<Epoch>
-where
-    DB: storage::DB + for<'iter> storage::DBIter<'iter>,
-    H: StorageHasher,
-{
-    let (epoch, gas) = storage.get_current_epoch();
-    add_gas(gas_meter, gas)?;
-    Ok(epoch)
-}
-
-/// Getting the block epoch. The epoch is that of the block to which the
-/// current transaction is being applied.
-pub fn get_tx_index(
-    gas_meter: &mut VpGasMeter,
-    tx_index: &TxIndex,
-) -> EnvResult<TxIndex> {
-    add_gas(gas_meter, MIN_STORAGE_GAS)?;
-    Ok(*tx_index)
-}
-
-/// Getting the chain ID.
-pub fn get_native_token<DB, H>(
-    gas_meter: &mut VpGasMeter,
-    storage: &Storage<DB, H>,
-) -> EnvResult<Address>
-where
-    DB: storage::DB + for<'iter> storage::DBIter<'iter>,
-    H: StorageHasher,
-{
-    add_gas(gas_meter, MIN_STORAGE_GAS)?;
-    Ok(storage.native_token.clone())
-}
-
-/// Storage prefix iterator, ordered by storage keys. It will try to get an
-/// iterator from the storage.
-pub fn iter_prefix<'a, DB, H>(
-    gas_meter: &mut VpGasMeter,
-    storage: &'a Storage<DB, H>,
-    prefix: &Key,
-) -> EnvResult<<DB as storage::DBIter<'a>>::PrefixIter>
-where
-    DB: storage::DB + for<'iter> storage::DBIter<'iter>,
-    H: StorageHasher,
-{
-    let (iter, gas) = storage.iter_prefix(prefix);
-    add_gas(gas_meter, gas)?;
-    Ok(iter)
-}
-
-/// Storage prefix iterator, reverse ordered by storage keys. It will try to get
-/// an iterator from the storage.
-pub fn rev_iter_prefix<'a, DB, H>(
-    gas_meter: &mut VpGasMeter,
-    storage: &'a Storage<DB, H>,
-    prefix: &Key,
-) -> EnvResult<<DB as storage::DBIter<'a>>::PrefixIter>
-where
-    DB: storage::DB + for<'iter> storage::DBIter<'iter>,
-    H: StorageHasher,
-{
-    let (iter, gas) = storage.rev_iter_prefix(prefix);
-    add_gas(gas_meter, gas)?;
-    Ok(iter)
-}
-
-/// Storage prefix iterator for prior state (before tx execution). It will try
-/// to read from the storage.
-pub fn iter_pre_next<DB>(
-    gas_meter: &mut VpGasMeter,
-    iter: &mut <DB as storage::DBIter<'_>>::PrefixIter,
-) -> EnvResult<Option<(String, Vec<u8>)>>
-where
-    DB: storage::DB + for<'iter> storage::DBIter<'iter>,
-{
-    if let Some((key, val, gas)) = iter.next() {
-        add_gas(gas_meter, gas)?;
-        return Ok(Some((key, val)));
-    }
-    Ok(None)
-}
-
-/// Storage prefix iterator next for posterior state (after tx execution). It
-/// will try to read from the write log first and if no entry found then from
-/// the storage.
-pub fn iter_post_next<DB>(
-    gas_meter: &mut VpGasMeter,
-    write_log: &WriteLog,
-    iter: &mut <DB as storage::DBIter<'_>>::PrefixIter,
-) -> EnvResult<Option<(String, Vec<u8>)>>
-where
-    DB: storage::DB + for<'iter> storage::DBIter<'iter>,
-{
-    for (key, val, iter_gas) in iter {
-        let (log_val, log_gas) = write_log.read(
-            &Key::parse(key.clone()).map_err(RuntimeError::StorageDataError)?,
-        );
-        add_gas(gas_meter, iter_gas + log_gas)?;
-        match log_val {
-            Some(&write_log::StorageModification::Write { ref value }) => {
-                return Ok(Some((key, value.clone())));
-            }
-            Some(&write_log::StorageModification::Delete) => {
-                // check the next because the key has already deleted
-                continue;
-            }
-            Some(&write_log::StorageModification::InitAccount { .. }) => {
-                // a VP of a new account doesn't need to be iterated
-                continue;
-            }
-            Some(&write_log::StorageModification::Temp { .. }) => {
-                return Err(RuntimeError::ReadTemporaryValueError);
-            }
-            None => return Ok(Some((key, val))),
-        }
-    }
-    Ok(None)
-}
diff --git a/core/src/lib.rs b/core/src/lib.rs
index 5ee0ebba0e..b20e4aa150 100644
--- a/core/src/lib.rs
+++ b/core/src/lib.rs
@@ -8,4 +8,19 @@
 
 pub mod bytes;
 pub mod ledger;
+pub mod proto;
 pub mod types;
+
+#[cfg(feature = "abciplus")]
+pub use {ibc, ibc_proto, tendermint, tendermint_proto};
+#[cfg(feature = "abcipp")]
+pub use {
+    ibc_abcipp as ibc, ibc_proto_abcipp as ibc_proto,
+    tendermint_abcipp as tendermint,
+    tendermint_proto_abcipp as tendermint_proto,
+};
+
+// A handy macro for tests
+#[cfg(test)]
+#[macro_use]
+extern crate assert_matches;
diff --git a/core/src/proto/types.rs b/core/src/proto/types.rs
index f2df360051..5901c02dca 100644
--- a/core/src/proto/types.rs
+++ b/core/src/proto/types.rs
@@ -7,10 +7,7 @@ use serde::{Deserialize, Serialize};
 use thiserror::Error;
 
 use super::generated::types;
-#[cfg(feature = "ferveo-tpke")]
-use crate::tendermint_proto::abci::Event;
-#[cfg(feature = "ferveo-tpke")]
-use crate::tendermint_proto::abci::EventAttribute;
+#[cfg(any(feature = "tendermint", feature = "tendermint-abcipp"))]
 use crate::tendermint_proto::abci::ResponseDeliverTx;
 use crate::types::key::*;
 use crate::types::time::DateTimeUtc;
@@ -164,6 +161,7 @@ impl From<Tx> for types::Tx {
     }
 }
 
+#[cfg(any(feature = "tendermint", feature = "tendermint-abcipp"))]
 impl From<Tx> for ResponseDeliverTx {
     #[cfg(not(feature = "ferveo-tpke"))]
     fn from(_tx: Tx) -> ResponseDeliverTx {
@@ -173,6 +171,8 @@ impl From<Tx> for ResponseDeliverTx {
     /// Annotate the Tx with meta-data based on its contents
     #[cfg(feature = "ferveo-tpke")]
     fn from(tx: Tx) -> ResponseDeliverTx {
+        use crate::tendermint_proto::abci::{Event, EventAttribute};
+
         #[cfg(feature = "ABCI")]
         fn encode_str(x: &str) -> Vec<u8> {
             x.as_bytes().to_vec()
diff --git a/core/src/types/governance.rs b/core/src/types/governance.rs
index fcfa6dcb54..438017a370 100644
--- a/core/src/types/governance.rs
+++ b/core/src/types/governance.rs
@@ -15,7 +15,6 @@ use crate::types::key::common::{self, Signature};
 use crate::types::key::SigScheme;
 use crate::types::storage::Epoch;
 use crate::types::token::SCALE;
-use crate::types::transaction::governance::InitProposalData;
 
 /// Type alias for vote power
 pub type VotePower = u128;
@@ -163,31 +162,6 @@ pub enum ProposalError {
     InvalidProposalData,
 }
 
-impl TryFrom<Proposal> for InitProposalData {
-    type Error = ProposalError;
-
-    fn try_from(proposal: Proposal) -> Result<Self, Self::Error> {
-        let proposal_code = if let Some(path) = proposal.proposal_code_path {
-            match std::fs::read(path) {
-                Ok(bytes) => Some(bytes),
-                Err(_) => return Err(Self::Error::InvalidProposalData),
-            }
-        } else {
-            None
-        };
-
-        Ok(InitProposalData {
-            id: proposal.id,
-            content: proposal.content.try_to_vec().unwrap(),
-            author: proposal.author,
-            voting_start_epoch: proposal.voting_start_epoch,
-            voting_end_epoch: proposal.voting_end_epoch,
-            grace_epoch: proposal.grace_epoch,
-            proposal_code,
-        })
-    }
-}
-
 #[derive(
     Debug, Clone, BorshSerialize, BorshDeserialize, Serialize, Deserialize,
 )]
diff --git a/core/src/types/hash.rs b/core/src/types/hash.rs
index 5f8b1be95f..74bfe3dd45 100644
--- a/core/src/types/hash.rs
+++ b/core/src/types/hash.rs
@@ -4,18 +4,15 @@ use std::fmt::{self, Display};
 use std::ops::Deref;
 use std::str::FromStr;
 
-// use arse_merkle_tree::traits::Value;
-// use arse_merkle_tree::Hash as TreeHash;
+use arse_merkle_tree::traits::Value;
+use arse_merkle_tree::{Hash as TreeHash, H256};
 use borsh::{BorshDeserialize, BorshSchema, BorshSerialize};
-use hex::FromHex;
+use data_encoding::HEXUPPER;
 use serde::{Deserialize, Serialize};
 use sha2::{Digest, Sha256};
 use thiserror::Error;
 
-// use crate::tendermint::abci::transaction;
-// use crate::tendermint::Hash as TmHash;
-
-/// The length of the raw transaction hash.
+/// The length of the transaction hash string
 pub const HASH_LENGTH: usize = 32;
 
 #[allow(missing_docs)]
@@ -26,7 +23,7 @@ pub enum Error {
     #[error("Failed trying to convert slice to a hash: {0}")]
     ConversionFailed(std::array::TryFromSliceError),
     #[error("Failed to convert string into a hash: {0}")]
-    FromStringError(hex::FromHexError),
+    FromStringError(data_encoding::DecodeError),
 }
 
 /// Result for functions that may fail
@@ -46,14 +43,11 @@ pub type HashResult<T> = std::result::Result<T, Error>;
     Deserialize,
 )]
 /// A hash, typically a sha-2 hash of a tx
-pub struct Hash(pub [u8; 32]);
+pub struct Hash(pub [u8; HASH_LENGTH]);
 
 impl Display for Hash {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        for byte in &self.0 {
-            write!(f, "{:02X}", byte)?;
-        }
-        Ok(())
+        write!(f, "{}", HEXUPPER.encode(&self.0))
     }
 }
 
@@ -64,7 +58,7 @@ impl AsRef<[u8]> for Hash {
 }
 
 impl Deref for Hash {
-    type Target = [u8; 32];
+    type Target = [u8; HASH_LENGTH];
 
     fn deref(&self) -> &Self::Target {
         &self.0
@@ -84,7 +78,7 @@ impl TryFrom<&[u8]> for Hash {
                 ),
             });
         }
-        let hash: [u8; 32] =
+        let hash: [u8; HASH_LENGTH] =
             TryFrom::try_from(value).map_err(Error::ConversionFailed)?;
         Ok(Hash(hash))
     }
@@ -102,16 +96,10 @@ impl TryFrom<&str> for Hash {
     type Error = self::Error;
 
     fn try_from(string: &str) -> HashResult<Self> {
-        Ok(Self(
-            <[u8; HASH_LENGTH]>::from_hex(string)
-                .map_err(Error::FromStringError)?,
-        ))
-    }
-}
-
-impl From<Hash> for transaction::Hash {
-    fn from(hash: Hash) -> Self {
-        Self::new(hash.0)
+        let vec = HEXUPPER
+            .decode(string.as_ref())
+            .map_err(Error::FromStringError)?;
+        Self::try_from(&vec[..])
     }
 }
 
@@ -131,7 +119,7 @@ impl Hash {
     }
 
     fn zero() -> Self {
-        Self([0u8; 32])
+        Self([0u8; HASH_LENGTH])
     }
 
     /// Check if the hash is all zeros
@@ -140,11 +128,19 @@ impl Hash {
     }
 }
 
-// impl From<Hash> for TmHash {
-//     fn from(hash: Hash) -> Self {
-//         TmHash::Sha256(hash.0)
-//     }
-// }
+#[cfg(any(feature = "tendermint", feature = "tendermint-abcipp"))]
+impl From<Hash> for crate::tendermint::abci::transaction::Hash {
+    fn from(hash: Hash) -> Self {
+        Self::new(hash.0)
+    }
+}
+
+#[cfg(any(feature = "tendermint", feature = "tendermint-abcipp"))]
+impl From<Hash> for crate::tendermint::Hash {
+    fn from(hash: Hash) -> Self {
+        Self::Sha256(hash.0)
+    }
+}
 
 impl From<Hash> for TreeHash {
     fn from(hash: Hash) -> Self {
@@ -152,22 +148,31 @@ impl From<Hash> for TreeHash {
     }
 }
 
-#[cfg(test)]
-mod tests {
-    use proptest::prelude::*;
-    use proptest::string::{string_regex, RegexGeneratorStrategy};
+impl Value for Hash {
+    fn as_slice(&self) -> &[u8] {
+        self.0.as_slice()
+    }
+
+    fn zero() -> Self {
+        Hash([0u8; HASH_LENGTH])
+    }
+}
 
-    use super::*;
+impl From<Hash> for H256 {
+    fn from(hash: Hash) -> Self {
+        hash.0.into()
+    }
+}
 
-    /// Returns a proptest strategy that yields hex encoded hashes.
-    fn hex_encoded_hash_strat() -> RegexGeneratorStrategy<String> {
-        string_regex(r"[a-fA-F0-9]{64}").unwrap()
+impl From<H256> for Hash {
+    fn from(hash: H256) -> Self {
+        Self(hash.into())
     }
+}
 
-    proptest! {
-        #[test]
-        fn test_hash_string(hex_hash in hex_encoded_hash_strat()) {
-            let _: Hash = hex_hash.try_into().unwrap();
-        }
+impl From<&H256> for Hash {
+    fn from(hash: &H256) -> Self {
+        let hash = hash.to_owned();
+        Self(hash.into())
     }
 }
diff --git a/core/src/types/ibc.rs b/core/src/types/ibc.rs
index ad31a5f3d4..3d537cb025 100644
--- a/core/src/types/ibc.rs
+++ b/core/src/types/ibc.rs
@@ -3,20 +3,6 @@
 use std::collections::HashMap;
 
 use borsh::{BorshDeserialize, BorshSchema, BorshSerialize};
-use thiserror::Error;
-
-use crate::ibc::events::{Error as IbcEventError, IbcEvent as RawIbcEvent};
-use crate::tendermint::abci::Event as AbciEvent;
-
-#[allow(missing_docs)]
-#[derive(Error, Debug)]
-pub enum Error {
-    #[error("IBC event error: {0}")]
-    IbcEvent(IbcEventError),
-}
-
-/// Conversion functions result
-pub type Result<T> = std::result::Result<T, Error>;
 
 /// Wrapped IbcEvent
 #[derive(
@@ -45,19 +31,44 @@ impl std::fmt::Display for IbcEvent {
     }
 }
 
-impl TryFrom<RawIbcEvent> for IbcEvent {
-    type Error = Error;
+#[cfg(any(feature = "abciplus", feature = "abcipp"))]
+mod ibc_rs_conversion {
+    use std::collections::HashMap;
+
+    use thiserror::Error;
+
+    use super::IbcEvent;
+    use crate::ibc::events::{Error as IbcEventError, IbcEvent as RawIbcEvent};
+    use crate::tendermint::abci::Event as AbciEvent;
 
-    fn try_from(e: RawIbcEvent) -> Result<Self> {
-        let event_type = e.event_type().as_str().to_string();
-        let mut attributes = HashMap::new();
-        let abci_event = AbciEvent::try_from(e).map_err(Error::IbcEvent)?;
-        for tag in abci_event.attributes.iter() {
-            attributes.insert(tag.key.to_string(), tag.value.to_string());
+    #[allow(missing_docs)]
+    #[derive(Error, Debug)]
+    pub enum Error {
+        #[error("IBC event error: {0}")]
+        IbcEvent(IbcEventError),
+    }
+
+    /// Conversion functions result
+    pub type Result<T> = std::result::Result<T, Error>;
+
+    impl TryFrom<RawIbcEvent> for IbcEvent {
+        type Error = Error;
+
+        fn try_from(e: RawIbcEvent) -> Result<Self> {
+            let event_type = e.event_type().as_str().to_string();
+            let abci_event = AbciEvent::try_from(e).map_err(Error::IbcEvent)?;
+            let attributes: HashMap<_, _> = abci_event
+                .attributes
+                .iter()
+                .map(|tag| (tag.key.to_string(), tag.value.to_string()))
+                .collect();
+            Ok(Self {
+                event_type,
+                attributes,
+            })
         }
-        Ok(Self {
-            event_type,
-            attributes,
-        })
     }
 }
+
+#[cfg(any(feature = "abciplus", feature = "abcipp"))]
+pub use ibc_rs_conversion::*;
diff --git a/core/src/types/internal.rs b/core/src/types/internal.rs
index 383de2b7b1..848c09bec1 100644
--- a/core/src/types/internal.rs
+++ b/core/src/types/internal.rs
@@ -1,5 +1,7 @@
 //! Shared internal types between the host env and guest (wasm).
 
+use borsh::{BorshDeserialize, BorshSerialize};
+
 /// A result of a wasm call to host functions that may fail.
 #[derive(Clone, Copy, Debug, PartialEq, Eq)]
 pub enum HostEnvResult {
@@ -9,6 +11,16 @@ pub enum HostEnvResult {
     Fail = -1,
 }
 
+/// Key-value pair represents data from account's subspace.
+/// It is used for prefix iterator's WASM host_env functions.
+#[derive(Clone, Debug, BorshSerialize, BorshDeserialize)]
+pub struct KeyVal {
+    /// The storage key
+    pub key: String,
+    /// The value as arbitrary bytes
+    pub val: Vec<u8>,
+}
+
 impl HostEnvResult {
     /// Convert result to `i64`, which can be passed to wasm
     pub fn to_i64(self) -> i64 {
@@ -31,3 +43,40 @@ impl From<bool> for HostEnvResult {
         if success { Self::Success } else { Self::Fail }
     }
 }
+
+#[cfg(feature = "ferveo-tpke")]
+mod tx_queue {
+    use borsh::{BorshDeserialize, BorshSerialize};
+
+    use crate::types::transaction::WrapperTx;
+
+    #[derive(Default, Debug, Clone, BorshDeserialize, BorshSerialize)]
+    /// Wrapper txs to be decrypted in the next block proposal
+    pub struct TxQueue(std::collections::VecDeque<WrapperTx>);
+
+    impl TxQueue {
+        /// Add a new wrapper at the back of the queue
+        pub fn push(&mut self, wrapper: WrapperTx) {
+            self.0.push_back(wrapper);
+        }
+
+        /// Remove the wrapper at the head of the queue
+        pub fn pop(&mut self) -> Option<WrapperTx> {
+            self.0.pop_front()
+        }
+
+        /// Get an iterator over the queue
+        pub fn iter(&self) -> impl std::iter::Iterator<Item = &WrapperTx> {
+            self.0.iter()
+        }
+
+        /// Check if there are any txs in the queue
+        #[allow(dead_code)]
+        pub fn is_empty(&self) -> bool {
+            self.0.is_empty()
+        }
+    }
+}
+
+#[cfg(feature = "ferveo-tpke")]
+pub use tx_queue::TxQueue;
diff --git a/core/src/types/key/common.rs b/core/src/types/key/common.rs
index 7ec041d638..e928579367 100644
--- a/core/src/types/key/common.rs
+++ b/core/src/types/key/common.rs
@@ -5,15 +5,14 @@ use std::str::FromStr;
 
 use borsh::{BorshDeserialize, BorshSchema, BorshSerialize};
 use data_encoding::HEXLOWER;
-// use namada_proof_of_stake::types::PublicKeyTmRawHash;
 #[cfg(feature = "rand")]
 use rand::{CryptoRng, RngCore};
 use serde::{Deserialize, Serialize};
 
 use super::{
-    ed25519, secp256k1, tm_consensus_key_raw_hash, ParsePublicKeyError,
-    ParseSecretKeyError, ParseSignatureError, RefTo, SchemeType,
-    SigScheme as SigSchemeTrait, VerifySigError,
+    ed25519, secp256k1, ParsePublicKeyError, ParseSecretKeyError,
+    ParseSignatureError, RefTo, SchemeType, SigScheme as SigSchemeTrait,
+    VerifySigError,
 };
 
 /// Public key
@@ -324,10 +323,3 @@ impl super::SigScheme for SigScheme {
         }
     }
 }
-
-// TODO
-// impl PublicKeyTmRawHash for PublicKey {
-//     fn tm_raw_hash(&self) -> String {
-//         tm_consensus_key_raw_hash(self)
-//     }
-// }
diff --git a/core/src/types/key/mod.rs b/core/src/types/key/mod.rs
index 78efcc5375..8836b56981 100644
--- a/core/src/types/key/mod.rs
+++ b/core/src/types/key/mod.rs
@@ -1,6 +1,10 @@
 //! Cryptographic keys
+
+pub mod common;
 /// Elliptic curve keys for the DKG
 pub mod dkg_session_keys;
+pub mod ed25519;
+pub mod secp256k1;
 
 use std::fmt::{Debug, Display};
 use std::hash::Hash;
@@ -18,10 +22,6 @@ use super::address::Address;
 use super::storage::{self, DbKeySeg, Key, KeySeg};
 use crate::types::address;
 
-pub mod common;
-pub mod ed25519;
-pub mod secp256k1;
-
 const PK_STORAGE_KEY: &str = "public_key";
 const PROTOCOL_PK_STORAGE_KEY: &str = "protocol_public_key";
 
@@ -352,6 +352,18 @@ impl<PK: PublicKey> From<&PK> for PublicKeyHash {
     }
 }
 
+/// Derive Tendermint raw hash from the public key
+pub trait PublicKeyTmRawHash {
+    /// Derive Tendermint raw hash from the public key
+    fn tm_raw_hash(&self) -> String;
+}
+
+impl PublicKeyTmRawHash for common::PublicKey {
+    fn tm_raw_hash(&self) -> String {
+        tm_consensus_key_raw_hash(self)
+    }
+}
+
 /// Convert validator's consensus key into address raw hash that is compatible
 /// with Tendermint
 pub fn tm_consensus_key_raw_hash(pk: &common::PublicKey) -> String {
diff --git a/core/src/types/mod.rs b/core/src/types/mod.rs
index 6e7b71dcda..0550060498 100644
--- a/core/src/types/mod.rs
+++ b/core/src/types/mod.rs
@@ -4,9 +4,12 @@ pub mod address;
 pub mod chain;
 pub mod governance;
 pub mod hash;
+pub mod ibc;
+pub mod internal;
 pub mod key;
 pub mod masp;
 pub mod storage;
 pub mod time;
 pub mod token;
+pub mod transaction;
 pub mod validity_predicate;
diff --git a/core/src/types/storage.rs b/core/src/types/storage.rs
index 5dc05ae716..bdd68e070e 100644
--- a/core/src/types/storage.rs
+++ b/core/src/types/storage.rs
@@ -1,27 +1,27 @@
 //! Storage types
 use std::convert::{TryFrom, TryInto};
 use std::fmt::Display;
+use std::io::Write;
 use std::num::ParseIntError;
 use std::ops::{Add, Deref, Div, Mul, Rem, Sub};
 use std::str::FromStr;
 
-use arse_merkle_tree::InternalKey;
+use arse_merkle_tree::traits::Value;
+use arse_merkle_tree::{InternalKey, Key as TreeKey};
 use bit_vec::BitVec;
-use borsh::maybestd::io::Write;
 use borsh::{BorshDeserialize, BorshSchema, BorshSerialize};
 use data_encoding::BASE32HEX_NOPAD;
-use ics23::CommitmentProof;
 use serde::{Deserialize, Deserializer, Serialize, Serializer};
 use thiserror::Error;
 
-#[cfg(feature = "ferveo-tpke")]
-use super::transaction::WrapperTx;
 use crate::bytes::ByteBuf;
-use crate::ledger::storage::IBC_KEY_LIMIT;
 use crate::types::address::{self, Address};
 use crate::types::hash::Hash;
 use crate::types::time::DateTimeUtc;
 
+/// The maximum size of an IBC key (in bytes) allowed in merkle-ized storage
+pub const IBC_KEY_LIMIT: usize = 120;
+
 #[allow(missing_docs)]
 #[derive(Error, Debug)]
 pub enum Error {
@@ -389,6 +389,42 @@ pub struct StringKey {
     pub length: usize,
 }
 
+#[allow(missing_docs)]
+#[derive(Error, Debug)]
+pub enum TreeKeyError {
+    #[error("Invalid key for merkle tree: {0}")]
+    InvalidMerkleKey(String),
+}
+
+impl TreeKey<IBC_KEY_LIMIT> for StringKey {
+    type Error = TreeKeyError;
+
+    fn as_slice(&self) -> &[u8] {
+        &self.original.as_slice()[..self.length]
+    }
+
+    fn try_from_bytes(bytes: &[u8]) -> std::result::Result<Self, Self::Error> {
+        let mut tree_key = [0u8; IBC_KEY_LIMIT];
+        let mut original = [0u8; IBC_KEY_LIMIT];
+        let mut length = 0;
+        for (i, byte) in bytes.iter().enumerate() {
+            if i >= IBC_KEY_LIMIT {
+                return Err(TreeKeyError::InvalidMerkleKey(
+                    "Input IBC key is too large".into(),
+                ));
+            }
+            original[i] = *byte;
+            tree_key[i] = byte.wrapping_add(1);
+            length += 1;
+        }
+        Ok(Self {
+            original,
+            tree_key: tree_key.into(),
+            length,
+        })
+    }
+}
+
 impl Deref for StringKey {
     type Target = InternalKey<IBC_KEY_LIMIT>;
 
@@ -456,18 +492,15 @@ impl From<TreeBytes> for Vec<u8> {
     }
 }
 
-// TODO not sure
-// /// Type of membership proof from a merkle tree
-// pub enum MembershipProof {
-//     /// ICS23 compliant membership proof
-//     ICS23(CommitmentProof),
-// }
+impl Value for TreeBytes {
+    fn as_slice(&self) -> &[u8] {
+        self.0.as_slice()
+    }
 
-// impl From<CommitmentProof> for MembershipProof {
-//     fn from(proof: CommitmentProof) -> Self {
-//         Self::ICS23(proof)
-//     }
-// }
+    fn zero() -> Self {
+        TreeBytes::zero()
+    }
+}
 
 impl Key {
     /// Parses string and returns a key
@@ -893,6 +926,53 @@ impl Epoch {
     pub fn prev(&self) -> Self {
         Self(self.0 - 1)
     }
+
+    /// Iterate a range of consecutive epochs starting from `self` of a given
+    /// length. Work-around for `Step` implementation pending on stabilization of <https://github.com/rust-lang/rust/issues/42168>.
+    pub fn iter_range(self, len: u64) -> impl Iterator<Item = Epoch> + Clone {
+        let start_ix: u64 = self.into();
+        let end_ix: u64 = start_ix + len;
+        (start_ix..end_ix).map(Epoch::from)
+    }
+
+    /// Checked epoch subtraction. Computes self - rhs, returning None if
+    /// overflow occurred.
+    #[must_use = "this returns the result of the operation, without modifying \
+                  the original"]
+    pub fn checked_sub(self, rhs: Epoch) -> Option<Self> {
+        if rhs.0 > self.0 {
+            None
+        } else {
+            Some(Self(self.0 - rhs.0))
+        }
+    }
+
+    /// Checked epoch subtraction. Computes self - rhs, returning default
+    /// `Epoch(0)` if overflow occurred.
+    #[must_use = "this returns the result of the operation, without modifying \
+                  the original"]
+    pub fn sub_or_default(self, rhs: Epoch) -> Self {
+        self.checked_sub(rhs).unwrap_or_default()
+    }
+}
+
+impl From<u64> for Epoch {
+    fn from(epoch: u64) -> Self {
+        Epoch(epoch)
+    }
+}
+
+impl From<Epoch> for u64 {
+    fn from(epoch: Epoch) -> Self {
+        epoch.0
+    }
+}
+
+// TODO remove this once it's not being used
+impl From<Epoch> for usize {
+    fn from(epoch: Epoch) -> Self {
+        epoch.0 as usize
+    }
 }
 
 impl Add<u64> for Epoch {
@@ -903,6 +983,15 @@ impl Add<u64> for Epoch {
     }
 }
 
+// TODO remove this once it's not being used
+impl Add<usize> for Epoch {
+    type Output = Self;
+
+    fn add(self, rhs: usize) -> Self::Output {
+        Epoch(self.0 + rhs as u64)
+    }
+}
+
 impl Sub<u64> for Epoch {
     type Output = Epoch;
 
@@ -911,6 +1000,14 @@ impl Sub<u64> for Epoch {
     }
 }
 
+impl Sub<Epoch> for Epoch {
+    type Output = Self;
+
+    fn sub(self, rhs: Epoch) -> Self::Output {
+        Epoch(self.0 - rhs.0)
+    }
+}
+
 impl Mul<u64> for Epoch {
     type Output = Epoch;
 
@@ -935,14 +1032,6 @@ impl Rem<u64> for Epoch {
     }
 }
 
-impl Sub for Epoch {
-    type Output = Epoch;
-
-    fn sub(self, rhs: Self) -> Self::Output {
-        Self(self.0 - rhs.0)
-    }
-}
-
 impl Add for Epoch {
     type Output = Epoch;
 
@@ -959,18 +1048,6 @@ impl Mul for Epoch {
     }
 }
 
-impl From<Epoch> for u64 {
-    fn from(epoch: Epoch) -> Self {
-        epoch.0
-    }
-}
-
-impl From<u64> for Epoch {
-    fn from(value: u64) -> Self {
-        Self(value)
-    }
-}
-
 /// Predecessor block epochs
 #[derive(
     Clone,
@@ -1051,35 +1128,6 @@ impl Epochs {
     }
 }
 
-#[cfg(feature = "ferveo-tpke")]
-#[derive(Default, Debug, Clone, BorshDeserialize, BorshSerialize)]
-/// Wrapper txs to be decrypted in the next block proposal
-pub struct TxQueue(std::collections::VecDeque<WrapperTx>);
-
-#[cfg(feature = "ferveo-tpke")]
-impl TxQueue {
-    /// Add a new wrapper at the back of the queue
-    pub fn push(&mut self, wrapper: WrapperTx) {
-        self.0.push_back(wrapper);
-    }
-
-    /// Remove the wrapper at the head of the queue
-    pub fn pop(&mut self) -> Option<WrapperTx> {
-        self.0.pop_front()
-    }
-
-    /// Get an iterator over the queue
-    pub fn iter(&self) -> impl std::iter::Iterator<Item = &WrapperTx> {
-        self.0.iter()
-    }
-
-    /// Check if there are any txs in the queue
-    #[allow(dead_code)]
-    pub fn is_empty(&self) -> bool {
-        self.0.is_empty()
-    }
-}
-
 /// A value of a storage prefix iterator.
 #[derive(Debug, Clone, BorshSerialize, BorshDeserialize, BorshSchema)]
 pub struct PrefixValue {
diff --git a/core/src/types/time.rs b/core/src/types/time.rs
index 13de2685ad..a508501d94 100644
--- a/core/src/types/time.rs
+++ b/core/src/types/time.rs
@@ -175,37 +175,40 @@ impl From<DateTime<Utc>> for DateTimeUtc {
     }
 }
 
-// TODO move
-// impl TryFrom<prost_types::Timestamp> for DateTimeUtc {
-//     type Error = prost_types::TimestampOutOfSystemRangeError;
-
-//     fn try_from(
-//         timestamp: prost_types::Timestamp,
-//     ) -> Result<Self, Self::Error> {
-//         let system_time: std::time::SystemTime = timestamp.try_into()?;
-//         Ok(Self(system_time.into()))
-//     }
-// }
-
-// impl From<DateTimeUtc> for prost_types::Timestamp {
-//     fn from(dt: DateTimeUtc) -> Self {
-//         let seconds = dt.0.timestamp();
-//         let nanos = dt.0.timestamp_subsec_nanos() as i32;
-//         prost_types::Timestamp { seconds, nanos }
-//     }
-// }
-
-// TODO move
-// impl TryFrom<protobuf::Timestamp> for DateTimeUtc {
-//     type Error = prost_types::TimestampOutOfSystemRangeError;
-
-//     fn try_from(timestamp: protobuf::Timestamp) -> Result<Self, Self::Error>
-// {         Self::try_from(prost_types::Timestamp {
-//             seconds: timestamp.seconds,
-//             nanos: timestamp.nanos,
-//         })
-//     }
-// }
+impl TryFrom<prost_types::Timestamp> for DateTimeUtc {
+    type Error = prost_types::TimestampOutOfSystemRangeError;
+
+    fn try_from(
+        timestamp: prost_types::Timestamp,
+    ) -> Result<Self, Self::Error> {
+        let system_time: std::time::SystemTime = timestamp.try_into()?;
+        Ok(Self(system_time.into()))
+    }
+}
+
+impl From<DateTimeUtc> for prost_types::Timestamp {
+    fn from(dt: DateTimeUtc) -> Self {
+        let seconds = dt.0.timestamp();
+        let nanos = dt.0.timestamp_subsec_nanos() as i32;
+        prost_types::Timestamp { seconds, nanos }
+    }
+}
+
+#[cfg(any(feature = "tendermint", feature = "tendermint-abcipp"))]
+impl TryFrom<crate::tendermint_proto::google::protobuf::Timestamp>
+    for DateTimeUtc
+{
+    type Error = prost_types::TimestampOutOfSystemRangeError;
+
+    fn try_from(
+        timestamp: crate::tendermint_proto::google::protobuf::Timestamp,
+    ) -> Result<Self, Self::Error> {
+        Self::try_from(prost_types::Timestamp {
+            seconds: timestamp.seconds,
+            nanos: timestamp.nanos,
+        })
+    }
+}
 
 impl From<DateTimeUtc> for std::time::SystemTime {
     fn from(dt: DateTimeUtc) -> Self {
@@ -228,19 +231,20 @@ impl From<DateTimeUtc> for Rfc3339String {
     }
 }
 
-// TODO move
-// impl TryFrom<DateTimeUtc> for Time {
-//     type Error = TendermintError;
+#[cfg(any(feature = "tendermint", feature = "tendermint-abcipp"))]
+impl TryFrom<DateTimeUtc> for crate::tendermint::time::Time {
+    type Error = crate::tendermint::Error;
 
-//     fn try_from(dt: DateTimeUtc) -> Result<Self, Self::Error> {
-//         Self::parse_from_rfc3339(&DateTime::to_rfc3339(&dt.0))
-//     }
-// }
+    fn try_from(dt: DateTimeUtc) -> Result<Self, Self::Error> {
+        Self::parse_from_rfc3339(&DateTime::to_rfc3339(&dt.0))
+    }
+}
 
-// impl TryFrom<Time> for DateTimeUtc {
-//     type Error = chrono::ParseError;
+#[cfg(any(feature = "tendermint", feature = "tendermint-abcipp"))]
+impl TryFrom<crate::tendermint::time::Time> for DateTimeUtc {
+    type Error = chrono::ParseError;
 
-//     fn try_from(t: Time) -> Result<Self, Self::Error> {
-//         Rfc3339String(t.to_rfc3339()).try_into()
-//     }
-// }
+    fn try_from(t: crate::tendermint::time::Time) -> Result<Self, Self::Error> {
+        Rfc3339String(t.to_rfc3339()).try_into()
+    }
+}
diff --git a/core/src/types/token.rs b/core/src/types/token.rs
index 1cbc7d9674..10c690bb06 100644
--- a/core/src/types/token.rs
+++ b/core/src/types/token.rs
@@ -1,6 +1,5 @@
 //! A basic fungible token
 
-use std::convert::TryFrom;
 use std::fmt::Display;
 use std::ops::{Add, AddAssign, Mul, Sub, SubAssign};
 use std::str::FromStr;
@@ -12,7 +11,6 @@ use serde::{Deserialize, Serialize};
 use thiserror::Error;
 
 use crate::types::address::{masp, Address, DecodeError as AddressError};
-use crate::types::ibc::data::FungibleTokenPacketData;
 use crate::types::storage::{DbKeySeg, Key, KeySeg};
 
 /// Amount in micro units. For different granularity another representation
@@ -424,10 +422,13 @@ pub enum TransferError {
     NoToken,
 }
 
-impl TryFrom<FungibleTokenPacketData> for Transfer {
+#[cfg(any(feature = "abciplus", feature = "abcipp"))]
+impl TryFrom<crate::ledger::ibc::data::FungibleTokenPacketData> for Transfer {
     type Error = TransferError;
 
-    fn try_from(data: FungibleTokenPacketData) -> Result<Self, Self::Error> {
+    fn try_from(
+        data: crate::ledger::ibc::data::FungibleTokenPacketData,
+    ) -> Result<Self, Self::Error> {
         let source =
             Address::decode(&data.sender).map_err(TransferError::Address)?;
         let target =
diff --git a/core/src/types/transaction/decrypted.rs b/core/src/types/transaction/decrypted.rs
index e0f13d65b9..d0697dc18f 100644
--- a/core/src/types/transaction/decrypted.rs
+++ b/core/src/types/transaction/decrypted.rs
@@ -1,4 +1,5 @@
 pub use ark_bls12_381::Bls12_381 as EllipticCurve;
+
 /// Integration of Ferveo cryptographic primitives
 /// to enable decrypting txs.
 /// *Not wasm compatible*
diff --git a/core/src/types/transaction/governance.rs b/core/src/types/transaction/governance.rs
index 31a11572fb..ba2bd5f933 100644
--- a/core/src/types/transaction/governance.rs
+++ b/core/src/types/transaction/governance.rs
@@ -1,9 +1,8 @@
 use borsh::{BorshDeserialize, BorshSerialize};
-use namada_core::types::governance::{Proposal, ProposalError};
 use serde::{Deserialize, Serialize};
 
 use crate::types::address::Address;
-use crate::types::governance::ProposalVote;
+use crate::types::governance::{Proposal, ProposalError, ProposalVote};
 use crate::types::storage::Epoch;
 
 /// A tx data type to hold proposal data
diff --git a/core/src/types/transaction/mod.rs b/core/src/types/transaction/mod.rs
index 3ee6ebd218..69b212f4c8 100644
--- a/core/src/types/transaction/mod.rs
+++ b/core/src/types/transaction/mod.rs
@@ -26,12 +26,12 @@ use serde::{Deserialize, Serialize};
 use sha2::{Digest, Sha256};
 pub use wrapper::*;
 
-use super::ibc::IbcEvent;
-use super::storage;
 use crate::ledger::gas::VpsGas;
 use crate::types::address::Address;
 use crate::types::hash::Hash;
+use crate::types::ibc::IbcEvent;
 use crate::types::key::*;
+use crate::types::storage;
 
 /// Get the hash of a transaction
 pub fn hash_tx(tx_bytes: &[u8]) -> Hash {
@@ -188,7 +188,7 @@ pub struct InitValidator {
     /// Public key used to sign protocol transactions
     pub protocol_key: common::PublicKey,
     /// Serialization of the public session key used in the DKG
-    pub dkg_key: DkgPublicKey,
+    pub dkg_key: crate::types::key::dkg_session_keys::DkgPublicKey,
     /// The initial commission rate charged for delegation rewards
     pub commission_rate: Decimal,
     /// The maximum change allowed per epoch to the commission rate. This is
@@ -532,5 +532,3 @@ pub mod tx_types {
 
 #[cfg(feature = "ferveo-tpke")]
 pub use tx_types::*;
-
-use crate::types::key::dkg_session_keys::DkgPublicKey;
diff --git a/proof_of_stake/Cargo.toml b/proof_of_stake/Cargo.toml
index 49efecfd2c..99d7744055 100644
--- a/proof_of_stake/Cargo.toml
+++ b/proof_of_stake/Cargo.toml
@@ -14,12 +14,14 @@ default = []
 testing = ["proptest"]
 
 [dependencies]
+namada_core = {path = "../core"}
 borsh = "0.9.1"
-thiserror = "1.0.30"
+derivative = "2.2.0"
 # A fork with state machine testing
 proptest = {git = "https://github.com/heliaxdev/proptest", branch = "tomas/sm", optional = true}
-derivative = "2.2.0"
 rust_decimal = { version = "1.26.1", features = ["borsh"] }
 rust_decimal_macros = "1.26.1"
+thiserror = "1.0.30"
+tracing = "0.1.30"
 
 [dev-dependencies]
diff --git a/proof_of_stake/src/lib.rs b/proof_of_stake/src/lib.rs
index 30e167a7d1..c77d120239 100644
--- a/proof_of_stake/src/lib.rs
+++ b/proof_of_stake/src/lib.rs
@@ -15,6 +15,7 @@
 pub mod btree_set;
 pub mod epoched;
 pub mod parameters;
+pub mod storage;
 pub mod types;
 pub mod validation;
 
@@ -30,7 +31,9 @@ use borsh::{BorshDeserialize, BorshSchema, BorshSerialize};
 use epoched::{
     DynEpochOffset, EpochOffset, Epoched, EpochedDelta, OffsetPipelineLen,
 };
-use parameters::PosParams;
+use namada_core::ledger::storage_api;
+use namada_core::types::address::{self, Address, InternalAddress};
+pub use parameters::PosParams;
 use rust_decimal::Decimal;
 use thiserror::Error;
 use types::{
@@ -45,6 +48,18 @@ use crate::types::{
     decimal_mult_i128, decimal_mult_u64, Bond, BondId, WeightedValidator,
 };
 
+/// Address of the PoS account implemented as a native VP
+pub const ADDRESS: Address = Address::Internal(InternalAddress::PoS);
+
+/// Address of the PoS slash pool account
+pub const SLASH_POOL_ADDRESS: Address =
+    Address::Internal(InternalAddress::PosSlashPool);
+
+/// Address of the staking token (NAM)
+pub fn staking_token_address() -> Address {
+    address::nam()
+}
+
 /// Read-only part of the PoS system
 pub trait PosReadOnly {
     /// Address type
@@ -2133,3 +2148,57 @@ where
         slashed,
     })
 }
+
+impl From<BecomeValidatorError<namada_core::types::address::Address>>
+    for storage_api::Error
+{
+    fn from(
+        err: BecomeValidatorError<namada_core::types::address::Address>,
+    ) -> Self {
+        Self::new(err)
+    }
+}
+
+impl From<BondError<namada_core::types::address::Address>>
+    for storage_api::Error
+{
+    fn from(err: BondError<namada_core::types::address::Address>) -> Self {
+        Self::new(err)
+    }
+}
+
+impl
+    From<
+        UnbondError<
+            namada_core::types::address::Address,
+            namada_core::types::token::Amount,
+        >,
+    > for storage_api::Error
+{
+    fn from(
+        err: UnbondError<
+            namada_core::types::address::Address,
+            namada_core::types::token::Amount,
+        >,
+    ) -> Self {
+        Self::new(err)
+    }
+}
+
+impl From<WithdrawError<namada_core::types::address::Address>>
+    for storage_api::Error
+{
+    fn from(err: WithdrawError<namada_core::types::address::Address>) -> Self {
+        Self::new(err)
+    }
+}
+
+impl From<CommissionRateChangeError<namada_core::types::address::Address>>
+    for storage_api::Error
+{
+    fn from(
+        err: CommissionRateChangeError<namada_core::types::address::Address>,
+    ) -> Self {
+        Self::new(err)
+    }
+}
diff --git a/proof_of_stake/src/storage.rs b/proof_of_stake/src/storage.rs
index 0be7600822..8903ebf6e2 100644
--- a/proof_of_stake/src/storage.rs
+++ b/proof_of_stake/src/storage.rs
@@ -1,18 +1,49 @@
 //! Proof-of-Stake storage keys and storage integration via [`PosBase`] trait.
 
-use namada_proof_of_stake::parameters::PosParams;
-use namada_proof_of_stake::types::ValidatorStates;
-use namada_proof_of_stake::{types, PosBase};
-
-use super::{
-    BondId, Bonds, CommissionRates, TotalDeltas, ValidatorConsensusKeys,
-    ValidatorDeltas, ValidatorSets, ADDRESS,
-};
-use crate::ledger::storage::types::{decode, encode};
-use crate::ledger::storage::{self, Storage, StorageHasher};
-use crate::types::address::Address;
-use crate::types::storage::{DbKeySeg, Key, KeySeg};
-use crate::types::{key, token};
+use namada_core::ledger::storage::types::{decode, encode};
+use namada_core::ledger::storage::{self, Storage, StorageHasher};
+use namada_core::ledger::storage_api;
+use namada_core::types::address::Address;
+use namada_core::types::storage::{DbKeySeg, Key, KeySeg};
+use namada_core::types::{key, token};
+use rust_decimal::Decimal;
+
+use super::ADDRESS;
+use crate::parameters::PosParams;
+pub use crate::types::{CommissionRates, ValidatorStates};
+use crate::{types, PosBase, PosReadOnly};
+
+// TODO: these are not needed anymore, remove together with all the generics in
+// this crate
+
+/// Alias for a PoS type with the same name with concrete type parameters
+pub type ValidatorConsensusKeys =
+    crate::types::ValidatorConsensusKeys<key::common::PublicKey>;
+
+/// Alias for a PoS type with the same name with concrete type parameters
+pub type ValidatorDeltas = crate::types::ValidatorDeltas<token::Change>;
+
+/// Alias for a PoS type with the same name with concrete type parameters
+pub type Bonds = crate::types::Bonds<token::Amount>;
+
+/// Alias for a PoS type with the same name with concrete type parameters
+pub type Unbonds = crate::types::Unbonds<token::Amount>;
+
+/// Alias for a PoS type with the same name with concrete type parameters
+pub type ValidatorSets = crate::types::ValidatorSets<Address>;
+
+/// Alias for a PoS type with the same name with concrete type parameters
+pub type BondId = crate::types::BondId<Address>;
+
+/// Alias for a PoS type with the same name with concrete type parameters
+pub type GenesisValidator = crate::types::GenesisValidator<
+    Address,
+    token::Amount,
+    key::common::PublicKey,
+>;
+
+/// Alias for a PoS type with the same name with concrete type parameters
+pub type TotalDeltas = crate::types::TotalDeltas<token::Change>;
 
 const PARAMS_STORAGE_KEY: &str = "params";
 const VALIDATOR_STORAGE_PREFIX: &str = "validator";
@@ -423,7 +454,7 @@ where
     fn read_validator_max_commission_rate_change(
         &self,
         key: &Self::Address,
-    ) -> rust_decimal::Decimal {
+    ) -> Decimal {
         let (value, _gas) = self
             .read(&validator_max_commission_rate_change_key(key))
             .unwrap();
@@ -507,7 +538,7 @@ where
         validator: &Self::Address,
         value: types::Slash,
     ) {
-        let mut slashes = self.read_validator_slashes(validator);
+        let mut slashes = PosBase::read_validator_slashes(self, validator);
         slashes.push(value);
         self.write(&validator_slashes_key(validator), encode(&slashes))
             .unwrap();
@@ -588,3 +619,142 @@ where
         }
     }
 }
+
+/// Implement `PosReadOnly` for a type that implements
+/// [`trait@namada_core::ledger::storage_api::StorageRead`].
+///
+/// Excuse the horrible syntax - we haven't found a better way to use this
+/// for native_vp `CtxPreStorageRead`/`CtxPostStorageRead`, which have
+/// generics and explicit lifetimes.
+///
+/// # Examples
+///
+/// ```ignore
+/// impl_pos_read_only! { impl PosReadOnly for X }
+/// ```
+#[macro_export]
+macro_rules! impl_pos_read_only {
+    (
+        // Type error type has to be declared before the impl.
+        // This error type must `impl From<storage_api::Error> for $error`.
+        type $error:tt = $err_ty:ty ;
+        // Matches anything, so that we can use lifetimes and generic types.
+        // This expects `impl(<.*>)? PoSReadOnly for $ty(<.*>)?`.
+        $( $any:tt )* )
+    => {
+        $( $any )*
+        {
+            type Address = namada_core::types::address::Address;
+            type $error = $err_ty;
+            type PublicKey = namada_core::types::key::common::PublicKey;
+            type TokenAmount = namada_core::types::token::Amount;
+            type TokenChange = namada_core::types::token::Change;
+
+            const POS_ADDRESS: Self::Address = $crate::ADDRESS;
+
+            fn staking_token_address(&self) -> Self::Address {
+                namada_core::ledger::storage_api::StorageRead::get_native_token(self)
+                    .expect("Native token must be available")
+            }
+
+            fn read_pos_params(&self) -> std::result::Result<PosParams, Self::Error> {
+                let value = namada_core::ledger::storage_api::StorageRead::read_bytes(self, &params_key())?.unwrap();
+                Ok(namada_core::ledger::storage::types::decode(value).unwrap())
+            }
+
+            fn read_validator_consensus_key(
+                &self,
+                key: &Self::Address,
+            ) -> std::result::Result<Option<ValidatorConsensusKeys>, Self::Error> {
+                let value =
+                    namada_core::ledger::storage_api::StorageRead::read_bytes(self, &validator_consensus_key_key(key))?;
+                Ok(value.map(|value| namada_core::ledger::storage::types::decode(value).unwrap()))
+            }
+
+            fn read_validator_commission_rate(
+                &self,
+                key: &Self::Address,
+            ) -> std::result::Result<Option<CommissionRates>, Self::Error> {
+                let value =
+                    namada_core::ledger::storage_api::StorageRead::read_bytes(self, &validator_commission_rate_key(key))?;
+                Ok(value.map(|value| namada_core::ledger::storage::types::decode(value).unwrap()))
+            }
+
+            fn read_validator_max_commission_rate_change(
+                &self,
+                key: &Self::Address,
+            ) -> std::result::Result<Option<Decimal>, Self::Error> {
+                let value =
+                    namada_core::ledger::storage_api::StorageRead::read_bytes(self, &validator_max_commission_rate_change_key(key))?;
+                Ok(value.map(|value| namada_core::ledger::storage::types::decode(value).unwrap()))
+            }
+
+            fn read_validator_state(
+                &self,
+                key: &Self::Address,
+            ) -> std::result::Result<Option<ValidatorStates>, Self::Error> {
+                let value = namada_core::ledger::storage_api::StorageRead::read_bytes(self, &validator_state_key(key))?;
+                Ok(value.map(|value| namada_core::ledger::storage::types::decode(value).unwrap()))
+            }
+
+            fn read_validator_deltas(
+                &self,
+                key: &Self::Address,
+            ) -> std::result::Result<Option<ValidatorDeltas>, Self::Error> {
+                let value =
+                    namada_core::ledger::storage_api::StorageRead::read_bytes(self, &validator_deltas_key(key))?;
+                Ok(value.map(|value| namada_core::ledger::storage::types::decode(value).unwrap()))
+            }
+
+            fn read_validator_slashes(
+                &self,
+                key: &Self::Address,
+            ) -> std::result::Result<Vec<types::Slash>, Self::Error> {
+                let value = namada_core::ledger::storage_api::StorageRead::read_bytes(self, &validator_slashes_key(key))?;
+                Ok(value
+                    .map(|value| namada_core::ledger::storage::types::decode(value).unwrap())
+                    .unwrap_or_default())
+            }
+
+            fn read_bond(
+                &self,
+                key: &BondId,
+            ) -> std::result::Result<Option<Bonds>, Self::Error> {
+                let value = namada_core::ledger::storage_api::StorageRead::read_bytes(self, &bond_key(key))?;
+                Ok(value.map(|value| namada_core::ledger::storage::types::decode(value).unwrap()))
+            }
+
+            fn read_unbond(
+                &self,
+                key: &BondId,
+            ) -> std::result::Result<Option<Unbonds>, Self::Error> {
+                let value = namada_core::ledger::storage_api::StorageRead::read_bytes(self, &unbond_key(key))?;
+                Ok(value.map(|value| namada_core::ledger::storage::types::decode(value).unwrap()))
+            }
+
+            fn read_validator_set(
+                &self,
+            ) -> std::result::Result<ValidatorSets, Self::Error> {
+                let value =
+                    namada_core::ledger::storage_api::StorageRead::read_bytes(self, &validator_set_key())?.unwrap();
+                Ok(namada_core::ledger::storage::types::decode(value).unwrap())
+            }
+
+            fn read_total_deltas(
+                &self,
+            ) -> std::result::Result<TotalDeltas, Self::Error> {
+                let value =
+                    namada_core::ledger::storage_api::StorageRead::read_bytes(self, &total_deltas_key())?.unwrap();
+                Ok(namada_core::ledger::storage::types::decode(value).unwrap())
+            }
+        }
+    }
+}
+
+impl_pos_read_only! {
+    type Error = storage_api::Error;
+    impl<DB, H> PosReadOnly for Storage<DB, H>
+        where
+            DB: storage::DB + for<'iter> storage::DBIter<'iter> +'static,
+            H: StorageHasher +'static,
+}
diff --git a/proof_of_stake/src/types.rs b/proof_of_stake/src/types.rs
index b8e47193bd..480e06e9fa 100644
--- a/proof_of_stake/src/types.rs
+++ b/proof_of_stake/src/types.rs
@@ -8,6 +8,7 @@ use std::hash::Hash;
 use std::ops::{Add, AddAssign, Sub};
 
 use borsh::{BorshDeserialize, BorshSchema, BorshSerialize};
+pub use namada_core::types::storage::Epoch;
 use rust_decimal::prelude::{Decimal, ToPrimitive};
 
 use crate::epoched::{
@@ -39,27 +40,6 @@ pub type TotalDeltas<TokenChange> =
 /// Epoched validator commission rate
 pub type CommissionRates = Epoched<Decimal, OffsetPipelineLen>;
 
-/// Epoch identifier. Epochs are identified by consecutive natural numbers.
-///
-/// In the API functions, this type is wrapped in [`Into`]. When using this
-/// library, to replace [`Epoch`] with a custom type, simply implement [`From`]
-/// to and from the types here.
-#[derive(
-    Debug,
-    Default,
-    Clone,
-    Copy,
-    PartialEq,
-    Eq,
-    PartialOrd,
-    Ord,
-    Hash,
-    BorshDeserialize,
-    BorshSerialize,
-    BorshSchema,
-)]
-pub struct Epoch(u64);
-
 /// A genesis validator definition.
 #[derive(
     Debug,
@@ -306,98 +286,6 @@ pub enum SlashType {
     LightClientAttack,
 }
 
-/// Derive Tendermint raw hash from the public key
-pub trait PublicKeyTmRawHash {
-    /// Derive Tendermint raw hash from the public key
-    fn tm_raw_hash(&self) -> String;
-}
-
-impl Epoch {
-    /// Iterate a range of consecutive epochs starting from `self` of a given
-    /// length. Work-around for `Step` implementation pending on stabilization of <https://github.com/rust-lang/rust/issues/42168>.
-    pub fn iter_range(self, len: u64) -> impl Iterator<Item = Epoch> + Clone {
-        let start_ix: u64 = self.into();
-        let end_ix: u64 = start_ix + len;
-        (start_ix..end_ix).map(Epoch::from)
-    }
-
-    /// Checked epoch subtraction. Computes self - rhs, returning None if
-    /// overflow occurred.
-    #[must_use = "this returns the result of the operation, without modifying \
-                  the original"]
-    pub fn checked_sub(self, rhs: Epoch) -> Option<Self> {
-        if rhs.0 > self.0 {
-            None
-        } else {
-            Some(Self(self.0 - rhs.0))
-        }
-    }
-
-    /// Checked epoch subtraction. Computes self - rhs, returning default
-    /// `Epoch(0)` if overflow occurred.
-    #[must_use = "this returns the result of the operation, without modifying \
-                  the original"]
-    pub fn sub_or_default(self, rhs: Epoch) -> Self {
-        self.checked_sub(rhs).unwrap_or_default()
-    }
-}
-
-impl From<u64> for Epoch {
-    fn from(epoch: u64) -> Self {
-        Epoch(epoch)
-    }
-}
-
-impl From<Epoch> for u64 {
-    fn from(epoch: Epoch) -> Self {
-        epoch.0
-    }
-}
-
-impl From<Epoch> for usize {
-    fn from(epoch: Epoch) -> Self {
-        epoch.0 as usize
-    }
-}
-
-impl Add<u64> for Epoch {
-    type Output = Self;
-
-    fn add(self, rhs: u64) -> Self::Output {
-        Epoch(self.0 + rhs)
-    }
-}
-
-impl Add<usize> for Epoch {
-    type Output = Self;
-
-    fn add(self, rhs: usize) -> Self::Output {
-        Epoch(self.0 + rhs as u64)
-    }
-}
-
-impl Sub<u64> for Epoch {
-    type Output = Epoch;
-
-    fn sub(self, rhs: u64) -> Self::Output {
-        Epoch(self.0 - rhs)
-    }
-}
-
-impl Sub<Epoch> for Epoch {
-    type Output = Self;
-
-    fn sub(self, rhs: Epoch) -> Self::Output {
-        Epoch(self.0 - rhs.0)
-    }
-}
-
-impl Display for Epoch {
-    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
-        write!(f, "{}", self.0)
-    }
-}
-
 impl<Address> Display for BondId<Address>
 where
     Address: Display
diff --git a/proof_of_stake/src/validation.rs b/proof_of_stake/src/validation.rs
index 79749f9ab1..048004dc04 100644
--- a/proof_of_stake/src/validation.rs
+++ b/proof_of_stake/src/validation.rs
@@ -10,6 +10,7 @@ use std::ops::{Add, AddAssign, Neg, Sub, SubAssign};
 
 use borsh::{BorshDeserialize, BorshSchema, BorshSerialize};
 use derivative::Derivative;
+use namada_core::types::key::PublicKeyTmRawHash;
 use rust_decimal::Decimal;
 use thiserror::Error;
 
@@ -18,9 +19,9 @@ use crate::epoched::DynEpochOffset;
 use crate::parameters::PosParams;
 use crate::types::{
     decimal_mult_i128, decimal_mult_u64, BondId, Bonds, CommissionRates, Epoch,
-    PublicKeyTmRawHash, Slash, Slashes, TotalDeltas, Unbonds,
-    ValidatorConsensusKeys, ValidatorDeltas, ValidatorSets, ValidatorState,
-    ValidatorStates, WeightedValidator,
+    Slash, Slashes, TotalDeltas, Unbonds, ValidatorConsensusKeys,
+    ValidatorDeltas, ValidatorSets, ValidatorState, ValidatorStates,
+    WeightedValidator,
 };
 
 #[allow(missing_docs)]
diff --git a/shared/Cargo.toml b/shared/Cargo.toml
index 35b3865b5d..47bac417e3 100644
--- a/shared/Cargo.toml
+++ b/shared/Cargo.toml
@@ -13,28 +13,10 @@ default = ["abciplus"]
 # NOTE "dev" features that shouldn't be used in live networks are enabled by default for now
 dev = []
 ferveo-tpke = [
-  "ferveo",
-  "tpke",
-  "ark-ec",
-  "rand_core",
-  "rand",
-]
-ibc-mocks = [
-  "ibc/mocks",
-]
-ibc-mocks-abcipp = [
-  "ibc-abcipp/mocks",
-]
-# for integration tests and test utilies
-testing = [
-  "async-client",
-  "proptest",
-  "rand",
-  "rand_core",
-  "tempfile",
-  "namada_proof_of_stake/testing",
+  "namada_core/ferveo-tpke",
 ]
 wasm-runtime = [
+  "namada_core/wasm-runtime",
   "loupe",
   "parity-wasm",
   "pwasm-utils",
@@ -46,11 +28,6 @@ wasm-runtime = [
   "wasmer-vm",
   "wasmer",
 ]
-# secp256k1 key signing and verification, disabled in WASM build by default as 
-# it bloats the build a lot
-secp256k1-sign-verify = [
-  "libsecp256k1/hmac",
-]
 # Enable queries support for an async client
 async-client = [
   "async-trait",
@@ -66,6 +43,7 @@ tendermint-rpc-abcipp = [
 ]
 
 abcipp = [
+  "namada_core/abcipp",
   "ibc-proto-abcipp",
   "ibc-abcipp",
   "tendermint-abcipp",
@@ -73,68 +51,61 @@ abcipp = [
   # it's OK to include the tendermint-rpc feature here, as we aren't currently building wasms with `abcipp`
   "tendermint-rpc-abcipp",
 ]
-
 abciplus = [
+  "namada_core/abciplus",
   "ibc",
   "ibc-proto",
   "tendermint",
   "tendermint-proto",
 ]
 
+ibc-mocks = [
+  "namada_core/ibc-mocks",
+]
+ibc-mocks-abcipp = [
+  "namada_core/ibc-mocks-abcipp",
+]
+
+# for integration tests and test utilies
+testing = [
+  "namada_core/testing",
+  "namada_proof_of_stake/testing",
+  "async-client",
+  "proptest",
+  "tempfile",
+]
+
 [dependencies]
-namada_core = {path = "../core"}
+namada_core = {path = "../core", features = ["secp256k1-sign-verify"]}
 namada_proof_of_stake = {path = "../proof_of_stake"}
-ark-bls12-381 = {version = "0.3"}
-ark-ec = {version = "0.3", optional = true}
-ark-serialize = "0.3"
-# We switch off "blake2b" because it cannot be compiled to wasm
-# branch = "bat/arse-merkle-tree"
-arse-merkle-tree = {package = "sparse-merkle-tree", git = "https://github.com/heliaxdev/sparse-merkle-tree", rev = "04ad1eeb28901b57a7599bbe433b3822965dabe8", default-features = false, features = ["std", "borsh"]}
 async-trait = {version = "0.1.51", optional = true}
-bech32 = "0.8.0"
 bellman = "0.11.2"
-bit-vec = "0.6.3"
 bls12_381 = "0.6.1"
 borsh = "0.9.0"
 circular-queue = "0.2.6"
-chrono = {version = "0.4.22", default-features = false, features = ["clock", "std"]}
 # Using unreleased commit on top of version 0.5.0 that adds Sync to the CLruCache
 clru = {git = "https://github.com/marmeladema/clru-rs.git", rev = "71ca566"}
 data-encoding = "2.3.2"
 derivative = "2.2.0"
-ed25519-consensus = "1.2.0"
-ferveo = {optional = true, git = "https://github.com/anoma/ferveo"}
-ferveo-common = {git = "https://github.com/anoma/ferveo"}
-tpke = {package = "group-threshold-cryptography", optional = true, git = "https://github.com/anoma/ferveo"}
-hex = "0.4.3"
 # TODO using the same version of tendermint-rs as we do here.
 ibc-abcipp = {package = "ibc", git = "https://github.com/heliaxdev/ibc-rs", rev = "9fcc1c8c19db6af50806ffe5b2f6c214adcbfd5d", default-features = false, optional = true}
 ibc-proto-abcipp = {package = "ibc-proto", git = "https://github.com/heliaxdev/ibc-rs", rev = "9fcc1c8c19db6af50806ffe5b2f6c214adcbfd5d", default-features = false, optional = true}
 ibc = {version = "0.14.0", default-features = false, optional = true}
 ibc-proto = {version = "0.17.1", default-features = false, optional = true}
-ics23 = "0.7.0"
 itertools = "0.10.0"
 loupe = {version = "0.1.3", optional = true}
-libsecp256k1 = {git = "https://github.com/heliaxdev/libsecp256k1", rev = "bbb3bd44a49db361f21d9db80f9a087c194c0ae9", default-features = false, features = ["std", "static-context"]}
 parity-wasm = {version = "0.42.2", optional = true}
 paste = "1.0.9"
 # A fork with state machine testing
 proptest = {git = "https://github.com/heliaxdev/proptest", branch = "tomas/sm", optional = true}
 prost = "0.9.0"
-prost-types = "0.9.0"
 pwasm-utils = {version = "0.18.0", optional = true}
-rand = {version = "0.8", optional = true}
-# TODO proptest rexports the RngCore trait but the re-implementations only work for version `0.8`. *sigh*
-rand_core = {version = "0.6", optional = true}
 rayon = {version = "=1.5.3", optional = true}
 rust_decimal = "1.26.1"
-rust_decimal_macros = "1.26.1"
-serde = {version = "1.0.125", features = ["derive"]}
 serde_json = "1.0.62"
 sha2 = "0.9.3"
 # We switch off "blake2b" because it cannot be compiled to wasm
 tempfile = {version = "3.2.0", optional = true}
-# temporarily using fork work-around for https://github.com/informalsystems/tendermint-rs/issues/971
 tendermint-abcipp = {package = "tendermint", git = "https://github.com/heliaxdev/tendermint-rs", rev = "95c52476bc37927218374f94ac8e2a19bd35bec9", optional = true}
 tendermint-rpc-abcipp = {package = "tendermint-rpc", git = "https://github.com/heliaxdev/tendermint-rs", rev = "95c52476bc37927218374f94ac8e2a19bd35bec9", features = ["http-client"], optional = true}
 tendermint-proto-abcipp = {package = "tendermint-proto", git = "https://github.com/heliaxdev/tendermint-rs", rev = "95c52476bc37927218374f94ac8e2a19bd35bec9", optional = true}
@@ -166,6 +137,3 @@ proptest = {git = "https://github.com/heliaxdev/proptest", branch = "tomas/sm"}
 test-log = {version = "0.2.7", default-features = false, features = ["trace"]}
 tokio = {version = "1.8.2", default-features = false, features = ["rt", "macros"]}
 tracing-subscriber = {version = "0.3.7", default-features = false, features = ["env-filter", "fmt"]}
-
-[build-dependencies]
-tonic-build = "0.6.0"
diff --git a/shared/src/ledger/events.rs b/shared/src/ledger/events.rs
index b17d2db83d..bc58d09aa5 100644
--- a/shared/src/ledger/events.rs
+++ b/shared/src/ledger/events.rs
@@ -7,10 +7,10 @@ use std::fmt::{self, Display};
 use std::ops::{Index, IndexMut};
 
 use borsh::{BorshDeserialize, BorshSerialize};
-use tendermint_proto::abci::EventAttribute;
 use thiserror::Error;
 
-use crate::ledger::governance::utils::ProposalEvent;
+use crate::ledger::native_vp::governance::utils::ProposalEvent;
+use crate::tendermint_proto::abci::EventAttribute;
 use crate::types::ibc::IbcEvent;
 #[cfg(feature = "ferveo-tpke")]
 use crate::types::transaction::{hash_tx, TxType};
@@ -162,7 +162,7 @@ impl From<ProposalEvent> for Event {
 }
 
 /// Convert our custom event into the necessary tendermint proto type
-impl From<Event> for tendermint_proto::abci::Event {
+impl From<Event> for crate::tendermint_proto::abci::Event {
     fn from(event: Event) -> Self {
         Self {
             r#type: event.event_type.to_string(),
diff --git a/shared/src/ledger/ibc/mod.rs b/shared/src/ledger/ibc/mod.rs
index 831b240a9a..6cf1d6c9f1 100644
--- a/shared/src/ledger/ibc/mod.rs
+++ b/shared/src/ledger/ibc/mod.rs
@@ -1,6 +1,6 @@
 //! IBC integration
 
-pub use namada_core::ledger::ibc::storage;
+pub use namada_core::ledger::ibc::{actions as handler, storage};
 pub mod vp;
 
 use namada_core::ledger::ibc::storage::{
diff --git a/shared/src/ledger/ibc/vp/channel.rs b/shared/src/ledger/ibc/vp/channel.rs
index 18994ae5f5..05cbf7ad15 100644
--- a/shared/src/ledger/ibc/vp/channel.rs
+++ b/shared/src/ledger/ibc/vp/channel.rs
@@ -2,35 +2,6 @@
 
 use core::time::Duration;
 
-use ibc::core::ics02_client::client_consensus::AnyConsensusState;
-use ibc::core::ics02_client::client_state::AnyClientState;
-use ibc::core::ics02_client::context::ClientReader;
-use ibc::core::ics02_client::height::Height;
-use ibc::core::ics03_connection::connection::ConnectionEnd;
-use ibc::core::ics03_connection::context::ConnectionReader;
-use ibc::core::ics03_connection::error::Error as Ics03Error;
-use ibc::core::ics04_channel::channel::{ChannelEnd, Counterparty, State};
-use ibc::core::ics04_channel::commitment::{
-    AcknowledgementCommitment, PacketCommitment,
-};
-use ibc::core::ics04_channel::context::ChannelReader;
-use ibc::core::ics04_channel::error::Error as Ics04Error;
-use ibc::core::ics04_channel::handler::verify::verify_channel_proofs;
-use ibc::core::ics04_channel::msgs::chan_close_confirm::MsgChannelCloseConfirm;
-use ibc::core::ics04_channel::msgs::chan_open_ack::MsgChannelOpenAck;
-use ibc::core::ics04_channel::msgs::chan_open_confirm::MsgChannelOpenConfirm;
-use ibc::core::ics04_channel::msgs::chan_open_try::MsgChannelOpenTry;
-use ibc::core::ics04_channel::msgs::{ChannelMsg, PacketMsg};
-use ibc::core::ics04_channel::packet::{Receipt, Sequence};
-use ibc::core::ics05_port::capabilities::{Capability, ChannelCapability};
-use ibc::core::ics05_port::context::PortReader;
-use ibc::core::ics24_host::identifier::{
-    ChannelId, ClientId, ConnectionId, PortChannelId, PortId,
-};
-use ibc::core::ics26_routing::context::ModuleId;
-use ibc::core::ics26_routing::msgs::Ics26Envelope;
-use ibc::proofs::Proofs;
-use ibc::timestamp::Timestamp;
 use namada_core::ledger::ibc::actions::{
     make_close_confirm_channel_event, make_close_init_channel_event,
     make_open_ack_channel_event, make_open_confirm_channel_event,
@@ -50,12 +21,45 @@ use namada_core::ledger::parameters;
 use namada_core::ledger::storage::{self as ledger_storage, StorageHasher};
 use namada_core::types::storage::Key;
 use sha2::Digest;
-use tendermint::Time;
-use tendermint_proto::Protobuf;
 use thiserror::Error;
 
 use super::{Ibc, StateChange};
+use crate::ibc::core::ics02_client::client_consensus::AnyConsensusState;
+use crate::ibc::core::ics02_client::client_state::AnyClientState;
+use crate::ibc::core::ics02_client::context::ClientReader;
+use crate::ibc::core::ics02_client::height::Height;
+use crate::ibc::core::ics03_connection::connection::ConnectionEnd;
+use crate::ibc::core::ics03_connection::context::ConnectionReader;
+use crate::ibc::core::ics03_connection::error::Error as Ics03Error;
+use crate::ibc::core::ics04_channel::channel::{
+    ChannelEnd, Counterparty, State,
+};
+use crate::ibc::core::ics04_channel::commitment::{
+    AcknowledgementCommitment, PacketCommitment,
+};
+use crate::ibc::core::ics04_channel::context::ChannelReader;
+use crate::ibc::core::ics04_channel::error::Error as Ics04Error;
+use crate::ibc::core::ics04_channel::handler::verify::verify_channel_proofs;
+use crate::ibc::core::ics04_channel::msgs::chan_close_confirm::MsgChannelCloseConfirm;
+use crate::ibc::core::ics04_channel::msgs::chan_open_ack::MsgChannelOpenAck;
+use crate::ibc::core::ics04_channel::msgs::chan_open_confirm::MsgChannelOpenConfirm;
+use crate::ibc::core::ics04_channel::msgs::chan_open_try::MsgChannelOpenTry;
+use crate::ibc::core::ics04_channel::msgs::{ChannelMsg, PacketMsg};
+use crate::ibc::core::ics04_channel::packet::{Receipt, Sequence};
+use crate::ibc::core::ics05_port::capabilities::{
+    Capability, ChannelCapability,
+};
+use crate::ibc::core::ics05_port::context::PortReader;
+use crate::ibc::core::ics24_host::identifier::{
+    ChannelId, ClientId, ConnectionId, PortChannelId, PortId,
+};
+use crate::ibc::core::ics26_routing::context::ModuleId;
+use crate::ibc::core::ics26_routing::msgs::Ics26Envelope;
+use crate::ibc::proofs::Proofs;
+use crate::ibc::timestamp::Timestamp;
 use crate::ledger::native_vp::{Error as NativeVpError, VpEnv};
+use crate::tendermint::Time;
+use crate::tendermint_proto::Protobuf;
 use crate::vm::WasmCacheAccess;
 
 #[allow(missing_docs)]
diff --git a/shared/src/ledger/ibc/vp/mod.rs b/shared/src/ledger/ibc/vp/mod.rs
index 3704f6131b..8a807ce754 100644
--- a/shared/src/ledger/ibc/vp/mod.rs
+++ b/shared/src/ledger/ibc/vp/mod.rs
@@ -12,8 +12,6 @@ mod token;
 use std::collections::{BTreeSet, HashSet};
 
 use borsh::BorshDeserialize;
-use ibc::core::ics02_client::context::ClientReader;
-use ibc::events::IbcEvent;
 use namada_core::ledger::ibc::storage::{
     client_id, ibc_prefix, is_client_counter_key, IbcPrefix,
 };
@@ -25,6 +23,8 @@ use namada_core::types::storage::Key;
 use thiserror::Error;
 pub use token::{Error as IbcTokenError, IbcToken};
 
+use crate::ibc::core::ics02_client::context::ClientReader;
+use crate::ibc::events::IbcEvent;
 use crate::ledger::native_vp::{self, Ctx, NativeVp, VpEnv};
 use crate::vm::WasmCacheAccess;
 
@@ -1521,7 +1521,7 @@ mod tests {
         let counterparty = get_channel_counterparty();
         let packet = packet_from_message(&msg, sequence, &counterparty);
         // insert a commitment
-        let commitment = handler::commitment(&packet);
+        let commitment = actions::commitment(&packet);
         let key = commitment_key(&get_port_id(), &get_channel_id(), sequence);
         write_log
             .write(&key, commitment.into_vec())
@@ -1683,7 +1683,7 @@ mod tests {
         let bytes = channel.encode_vec().expect("encoding failed");
         write_log.write(&channel_key, bytes).expect("write failed");
         // insert a commitment
-        let commitment = handler::commitment(&packet);
+        let commitment = actions::commitment(&packet);
         let commitment_key =
             commitment_key(&get_port_id(), &get_channel_id(), sequence);
         write_log
@@ -1784,7 +1784,7 @@ mod tests {
         let counterparty = get_channel_counterparty();
         let packet = packet_from_message(&msg, sequence, &counterparty);
         // insert a commitment
-        let commitment = handler::commitment(&packet);
+        let commitment = actions::commitment(&packet);
         let commitment_key = commitment_key(
             &packet.source_port,
             &packet.source_channel,
diff --git a/shared/src/ledger/ibc/vp/packet.rs b/shared/src/ledger/ibc/vp/packet.rs
index 7eecf92472..09346daf06 100644
--- a/shared/src/ledger/ibc/vp/packet.rs
+++ b/shared/src/ledger/ibc/vp/packet.rs
@@ -3,14 +3,13 @@
 use namada_core::ledger::ibc::actions::{
     self, make_send_packet_event, make_timeout_event, packet_from_message,
 };
-use namada_core::ledger::ibc::data::{Error as IbcDataError, IbcMessage};
+use namada_core::ledger::ibc::data::{
+    Error as IbcDataError, FungibleTokenPacketData, IbcMessage,
+};
 use namada_core::ledger::ibc::storage::{
     ibc_denom_key, port_channel_sequence_id, token_hash_from_denom,
     Error as IbcStorageError,
 };
-use namada_core::types::ibc::data::{
-    Error as IbcDataError, FungibleTokenPacketData, IbcMessage,
-};
 use thiserror::Error;
 
 use super::{Ibc, StateChange};
@@ -447,7 +446,7 @@ where
         packet: &Packet,
         commitment: PacketCommitment,
     ) -> Result<()> {
-        if commitment == namada_core::ledger::ibc::actions::commitment(packet) {
+        if commitment == actions::commitment(packet) {
             Ok(())
         } else {
             Err(Error::InvalidPacket(
diff --git a/shared/src/ledger/mod.rs b/shared/src/ledger/mod.rs
index 1eaee2b714..73f39dda05 100644
--- a/shared/src/ledger/mod.rs
+++ b/shared/src/ledger/mod.rs
@@ -9,8 +9,9 @@ pub mod pos;
 #[cfg(all(feature = "wasm-runtime", feature = "ferveo-tpke"))]
 pub mod protocol;
 pub mod queries;
-pub mod slash_fund;
+pub mod storage;
+pub mod vp_host_fns;
 
 pub use namada_core::ledger::{
-    gas, governance, parameters, storage, storage_api, tx_env, vp_env,
+    gas, governance, parameters, storage_api, tx_env, vp_env,
 };
diff --git a/shared/src/ledger/native_vp/governance/mod.rs b/shared/src/ledger/native_vp/governance/mod.rs
index 7ba1478ff3..f6b3187073 100644
--- a/shared/src/ledger/native_vp/governance/mod.rs
+++ b/shared/src/ledger/native_vp/governance/mod.rs
@@ -1,32 +1,27 @@
 //! Governance VP
 
-/// utility functions
 pub mod utils;
 
 use std::collections::BTreeSet;
 
-use namada_core::ledger::native_vp;
+use namada_core::ledger::governance::storage as gov_storage;
+use namada_core::ledger::storage;
 use namada_core::ledger::vp_env::VpEnv;
-pub use namada_core::ledger::{parameters, storage};
 use thiserror::Error;
 use utils::is_valid_validator_voting_period;
 
-use self::storage as gov_storage;
-use super::storage_api::StorageRead;
+use crate::ledger::native_vp;
 use crate::ledger::native_vp::{Ctx, NativeVp};
-use crate::ledger::pos::{self as pos_storage, BondId, Bonds};
-use crate::ledger::storage::{self as ledger_storage, StorageHasher};
+use crate::ledger::pos::{self, BondId, Bonds};
+use crate::ledger::storage_api::StorageRead;
 use crate::types::address::{Address, InternalAddress};
 use crate::types::storage::{Epoch, Key};
-use crate::types::token as token_storage;
+use crate::types::token;
 use crate::vm::WasmCacheAccess;
 
 /// for handling Governance NativeVP errors
 pub type Result<T> = std::result::Result<T, Error>;
 
-/// The governance internal address
-pub const ADDRESS: Address = Address::Internal(InternalAddress::Governance);
-
 #[allow(missing_docs)]
 #[derive(Error, Debug)]
 pub enum Error {
@@ -37,8 +32,8 @@ pub enum Error {
 /// Governance VP
 pub struct GovernanceVp<'a, DB, H, CA>
 where
-    DB: ledger_storage::DB + for<'iter> ledger_storage::DBIter<'iter>,
-    H: StorageHasher,
+    DB: storage::DB + for<'iter> storage::DBIter<'iter>,
+    H: storage::StorageHasher,
     CA: WasmCacheAccess,
 {
     /// Context to interact with the host structures.
@@ -47,8 +42,8 @@ where
 
 impl<'a, DB, H, CA> NativeVp for GovernanceVp<'a, DB, H, CA>
 where
-    DB: 'static + ledger_storage::DB + for<'iter> ledger_storage::DBIter<'iter>,
-    H: 'static + StorageHasher,
+    DB: 'static + storage::DB + for<'iter> storage::DBIter<'iter>,
+    H: 'static + storage::StorageHasher,
     CA: 'static + WasmCacheAccess,
 {
     type Error = Error;
@@ -116,8 +111,8 @@ where
 
 impl<'a, DB, H, CA> GovernanceVp<'a, DB, H, CA>
 where
-    DB: 'static + ledger_storage::DB + for<'iter> ledger_storage::DBIter<'iter>,
-    H: 'static + StorageHasher,
+    DB: 'static + storage::DB + for<'iter> storage::DBIter<'iter>,
+    H: 'static + storage::StorageHasher,
     CA: 'static + WasmCacheAccess,
 {
     fn is_valid_key_set(&self, keys: &BTreeSet<Key>) -> Result<(bool, u64)> {
@@ -411,16 +406,16 @@ where
     ) -> Result<bool> {
         let funds_key = gov_storage::get_funds_key(proposal_id);
         let balance_key =
-            token_storage::balance_key(native_token_address, self.ctx.address);
+            token::balance_key(native_token_address, self.ctx.address);
         let min_funds_parameter_key = gov_storage::get_min_proposal_fund_key();
 
-        let min_funds_parameter: Option<token_storage::Amount> =
+        let min_funds_parameter: Option<token::Amount> =
             self.ctx.pre().read(&min_funds_parameter_key)?;
-        let pre_balance: Option<token_storage::Amount> =
+        let pre_balance: Option<token::Amount> =
             self.ctx.pre().read(&balance_key)?;
-        let post_balance: Option<token_storage::Amount> =
+        let post_balance: Option<token::Amount> =
             self.ctx.post().read(&balance_key)?;
-        let post_funds: Option<token_storage::Amount> =
+        let post_funds: Option<token::Amount> =
             self.ctx.post().read(&funds_key)?;
 
         match (min_funds_parameter, pre_balance, post_balance, post_funds) {
@@ -447,14 +442,14 @@ where
     /// Validate a balance key
     fn is_valid_balance(&self, native_token_address: &Address) -> Result<bool> {
         let balance_key =
-            token_storage::balance_key(native_token_address, self.ctx.address);
+            token::balance_key(native_token_address, self.ctx.address);
         let min_funds_parameter_key = gov_storage::get_min_proposal_fund_key();
 
-        let min_funds_parameter: Option<token_storage::Amount> =
+        let min_funds_parameter: Option<token::Amount> =
             self.ctx.pre().read(&min_funds_parameter_key)?;
-        let pre_balance: Option<token_storage::Amount> =
+        let pre_balance: Option<token::Amount> =
             self.ctx.pre().read(&balance_key)?;
-        let post_balance: Option<token_storage::Amount> =
+        let post_balance: Option<token::Amount> =
             self.ctx.post().read(&balance_key)?;
 
         match (min_funds_parameter, pre_balance, post_balance) {
@@ -549,14 +544,12 @@ where
         delegation_address: &Address,
     ) -> Result<bool>
     where
-        DB: 'static
-            + ledger_storage::DB
-            + for<'iter> ledger_storage::DBIter<'iter>,
-        H: 'static + StorageHasher,
+        DB: 'static + storage::DB + for<'iter> storage::DBIter<'iter>,
+        H: 'static + storage::StorageHasher,
         CA: 'static + WasmCacheAccess,
     {
-        let validator_set_key = pos_storage::validator_set_key();
-        let pre_validator_set: pos_storage::ValidatorSets =
+        let validator_set_key = pos::validator_set_key();
+        let pre_validator_set: pos::ValidatorSets =
             self.ctx.pre().read(&validator_set_key)?.unwrap();
 
         let validator_set = pre_validator_set.get(epoch);
@@ -588,7 +581,7 @@ where
         address: &Address,
         delegation_address: &Address,
     ) -> Result<bool> {
-        let bond_key = pos_storage::bond_key(&BondId {
+        let bond_key = pos::bond_key(&BondId {
             source: address.clone(),
             validator: delegation_address.clone(),
         });
@@ -658,7 +651,7 @@ impl KeyType {
             KeyType::COUNTER
         } else if gov_storage::is_parameter_key(key) {
             KeyType::PARAMETER
-        } else if token_storage::is_balance_key(native_token, key).is_some() {
+        } else if token::is_balance_key(native_token, key).is_some() {
             KeyType::BALANCE
         } else if gov_storage::is_governance_key(key) {
             KeyType::UNKNOWN_GOVERNANCE
diff --git a/shared/src/ledger/native_vp/governance/utils.rs b/shared/src/ledger/native_vp/governance/utils.rs
index f079f6a5ca..eb96948c49 100644
--- a/shared/src/ledger/native_vp/governance/utils.rs
+++ b/shared/src/ledger/native_vp/governance/utils.rs
@@ -1,3 +1,5 @@
+//! Governance utility functions
+
 use std::collections::HashMap;
 use std::str::FromStr;
 
diff --git a/shared/src/ledger/native_vp/mod.rs b/shared/src/ledger/native_vp/mod.rs
index 1755c52241..a2af3630cf 100644
--- a/shared/src/ledger/native_vp/mod.rs
+++ b/shared/src/ledger/native_vp/mod.rs
@@ -1,18 +1,21 @@
 //! Native validity predicate interface associated with internal accounts such
 //! as the PoS and IBC modules.
 
-pub mod parameters;
 pub mod governance;
+pub mod parameters;
+pub mod slash_fund;
 
 use std::cell::RefCell;
 use std::collections::BTreeSet;
 
+pub use namada_core::ledger::vp_env::VpEnv;
+
 use super::storage_api::{self, ResultExt, StorageRead};
-pub use super::vp_env::VpEnv;
+use super::vp_host_fns;
 use crate::ledger::gas::VpGasMeter;
+use crate::ledger::storage;
 use crate::ledger::storage::write_log::WriteLog;
 use crate::ledger::storage::{Storage, StorageHasher};
-use crate::ledger::{storage, vp_env};
 use crate::proto::Tx;
 use crate::types::address::{Address, InternalAddress};
 use crate::types::hash::Hash;
@@ -21,7 +24,7 @@ use crate::vm::prefix_iter::PrefixIterators;
 use crate::vm::WasmCacheAccess;
 
 /// Possible error in a native VP host function call
-/// The `storage_api::Error` may wrap the `vp_env::RuntimeError` and can
+/// The `storage_api::Error` may wrap the `vp_host_fns::RuntimeError` and can
 /// be extended with other custom errors when using `trait VpEnv`.
 pub type Error = storage_api::Error;
 
@@ -144,8 +147,11 @@ where
     }
 
     /// Add a gas cost incured in a validity predicate
-    pub fn add_gas(&self, used_gas: u64) -> Result<(), vp_env::RuntimeError> {
-        vp_env::add_gas(&mut self.gas_meter.borrow_mut(), used_gas)
+    pub fn add_gas(
+        &self,
+        used_gas: u64,
+    ) -> Result<(), vp_host_fns::RuntimeError> {
+        vp_host_fns::add_gas(&mut self.gas_meter.borrow_mut(), used_gas)
     }
 
     /// Read access to the prior storage (state before tx execution)
@@ -176,7 +182,7 @@ where
         &self,
         key: &crate::types::storage::Key,
     ) -> Result<Option<Vec<u8>>, storage_api::Error> {
-        vp_env::read_pre(
+        vp_host_fns::read_pre(
             &mut self.ctx.gas_meter.borrow_mut(),
             self.ctx.storage,
             self.ctx.write_log,
@@ -189,7 +195,7 @@ where
         &self,
         key: &crate::types::storage::Key,
     ) -> Result<bool, storage_api::Error> {
-        vp_env::has_key_pre(
+        vp_host_fns::has_key_pre(
             &mut self.ctx.gas_meter.borrow_mut(),
             self.ctx.storage,
             key,
@@ -208,8 +214,11 @@ where
         &self,
         iter: &mut Self::PrefixIter,
     ) -> Result<Option<(String, Vec<u8>)>, storage_api::Error> {
-        vp_env::iter_pre_next::<DB>(&mut self.ctx.gas_meter.borrow_mut(), iter)
-            .into_storage_result()
+        vp_host_fns::iter_pre_next::<DB>(
+            &mut self.ctx.gas_meter.borrow_mut(),
+            iter,
+        )
+        .into_storage_result()
     }
 
     // ---- Methods below are implemented in `self.ctx`, because they are
@@ -260,7 +269,7 @@ where
         &self,
         key: &crate::types::storage::Key,
     ) -> Result<Option<Vec<u8>>, storage_api::Error> {
-        vp_env::read_post(
+        vp_host_fns::read_post(
             &mut self.ctx.gas_meter.borrow_mut(),
             self.ctx.storage,
             self.ctx.write_log,
@@ -273,7 +282,7 @@ where
         &self,
         key: &crate::types::storage::Key,
     ) -> Result<bool, storage_api::Error> {
-        vp_env::has_key_post(
+        vp_host_fns::has_key_post(
             &mut self.ctx.gas_meter.borrow_mut(),
             self.ctx.storage,
             self.ctx.write_log,
@@ -293,7 +302,7 @@ where
         &self,
         iter: &mut Self::PrefixIter,
     ) -> Result<Option<(String, Vec<u8>)>, storage_api::Error> {
-        vp_env::iter_post_next::<DB>(
+        vp_host_fns::iter_post_next::<DB>(
             &mut self.ctx.gas_meter.borrow_mut(),
             self.ctx.write_log,
             iter,
@@ -358,56 +367,82 @@ where
         &self,
         key: &Key,
     ) -> Result<Option<T>, storage_api::Error> {
-        vp_env::read_temp(&mut self.gas_meter.borrow_mut(), self.write_log, key)
-            .map(|data| data.and_then(|t| T::try_from_slice(&t[..]).ok()))
-            .into_storage_result()
+        vp_host_fns::read_temp(
+            &mut self.gas_meter.borrow_mut(),
+            self.write_log,
+            key,
+        )
+        .map(|data| data.and_then(|t| T::try_from_slice(&t[..]).ok()))
+        .into_storage_result()
     }
 
     fn read_bytes_temp(
         &self,
         key: &Key,
     ) -> Result<Option<Vec<u8>>, storage_api::Error> {
-        vp_env::read_temp(&mut self.gas_meter.borrow_mut(), self.write_log, key)
-            .into_storage_result()
+        vp_host_fns::read_temp(
+            &mut self.gas_meter.borrow_mut(),
+            self.write_log,
+            key,
+        )
+        .into_storage_result()
     }
 
     fn get_chain_id(&'view self) -> Result<String, storage_api::Error> {
-        vp_env::get_chain_id(&mut self.gas_meter.borrow_mut(), self.storage)
-            .into_storage_result()
+        vp_host_fns::get_chain_id(
+            &mut self.gas_meter.borrow_mut(),
+            self.storage,
+        )
+        .into_storage_result()
     }
 
     fn get_block_height(
         &'view self,
     ) -> Result<BlockHeight, storage_api::Error> {
-        vp_env::get_block_height(&mut self.gas_meter.borrow_mut(), self.storage)
-            .into_storage_result()
+        vp_host_fns::get_block_height(
+            &mut self.gas_meter.borrow_mut(),
+            self.storage,
+        )
+        .into_storage_result()
     }
 
     fn get_block_hash(&'view self) -> Result<BlockHash, storage_api::Error> {
-        vp_env::get_block_hash(&mut self.gas_meter.borrow_mut(), self.storage)
-            .into_storage_result()
+        vp_host_fns::get_block_hash(
+            &mut self.gas_meter.borrow_mut(),
+            self.storage,
+        )
+        .into_storage_result()
     }
 
     fn get_block_epoch(&'view self) -> Result<Epoch, storage_api::Error> {
-        vp_env::get_block_epoch(&mut self.gas_meter.borrow_mut(), self.storage)
-            .into_storage_result()
+        vp_host_fns::get_block_epoch(
+            &mut self.gas_meter.borrow_mut(),
+            self.storage,
+        )
+        .into_storage_result()
     }
 
     fn get_tx_index(&'view self) -> Result<TxIndex, storage_api::Error> {
-        vp_env::get_tx_index(&mut self.gas_meter.borrow_mut(), self.tx_index)
-            .into_storage_result()
+        vp_host_fns::get_tx_index(
+            &mut self.gas_meter.borrow_mut(),
+            self.tx_index,
+        )
+        .into_storage_result()
     }
 
     fn get_native_token(&'view self) -> Result<Address, storage_api::Error> {
-        vp_env::get_native_token(&mut self.gas_meter.borrow_mut(), self.storage)
-            .into_storage_result()
+        vp_host_fns::get_native_token(
+            &mut self.gas_meter.borrow_mut(),
+            self.storage,
+        )
+        .into_storage_result()
     }
 
     fn iter_prefix(
         &'view self,
         prefix: &Key,
     ) -> Result<Self::PrefixIter, storage_api::Error> {
-        vp_env::iter_prefix(
+        vp_host_fns::iter_prefix(
             &mut self.gas_meter.borrow_mut(),
             self.storage,
             prefix,
@@ -419,7 +454,7 @@ where
         &self,
         prefix: &Key,
     ) -> Result<Self::PrefixIter, storage_api::Error> {
-        vp_env::rev_iter_prefix(
+        vp_host_fns::rev_iter_prefix(
             &mut self.gas_meter.borrow_mut(),
             self.storage,
             prefix,
@@ -499,8 +534,11 @@ where
     }
 
     fn get_tx_code_hash(&self) -> Result<Hash, storage_api::Error> {
-        vp_env::get_tx_code_hash(&mut self.gas_meter.borrow_mut(), self.tx)
-            .into_storage_result()
+        vp_host_fns::get_tx_code_hash(
+            &mut self.gas_meter.borrow_mut(),
+            self.tx,
+        )
+        .into_storage_result()
     }
 
     fn read_pre<T: borsh::BorshDeserialize>(
diff --git a/shared/src/ledger/native_vp/parameters.rs b/shared/src/ledger/native_vp/parameters.rs
index 1f05baa7eb..0a762ca4f6 100644
--- a/shared/src/ledger/native_vp/parameters.rs
+++ b/shared/src/ledger/native_vp/parameters.rs
@@ -2,13 +2,12 @@
 
 use std::collections::BTreeSet;
 
-use borsh::BorshDeserialize;
 use namada_core::ledger::storage;
 use namada_core::types::address::{Address, InternalAddress};
 use namada_core::types::storage::Key;
 use thiserror::Error;
 
-use crate::ledger::governance::vp::is_proposal_accepted;
+use super::governance;
 use crate::ledger::native_vp::{self, Ctx, NativeVp};
 use crate::vm::WasmCacheAccess;
 
@@ -52,13 +51,11 @@ where
         let result = keys_changed.iter().all(|key| {
             let key_type: KeyType = key.into();
             match key_type {
-                KeyType::PARAMETER => {
-                    let proposal_id = u64::try_from_slice(tx_data).ok();
-                    match proposal_id {
-                        Some(id) => is_proposal_accepted(&self.ctx, id),
-                        _ => false,
-                    }
-                }
+                KeyType::PARAMETER => governance::utils::is_proposal_accepted(
+                    self.ctx.storage,
+                    tx_data,
+                )
+                .unwrap_or(false),
                 KeyType::UNKNOWN_PARAMETER => false,
                 KeyType::UNKNOWN => true,
             }
diff --git a/shared/src/ledger/slash_fund/mod.rs b/shared/src/ledger/native_vp/slash_fund.rs
similarity index 87%
rename from shared/src/ledger/slash_fund/mod.rs
rename to shared/src/ledger/native_vp/slash_fund.rs
index c3bdd8976d..4950705647 100644
--- a/shared/src/ledger/slash_fund/mod.rs
+++ b/shared/src/ledger/native_vp/slash_fund.rs
@@ -2,24 +2,19 @@
 
 use std::collections::BTreeSet;
 
+use namada_core::ledger::slash_fund;
 /// SlashFund storage
-pub mod storage;
-
+pub use namada_core::ledger::slash_fund::storage;
 use thiserror::Error;
 
-use self::storage as slash_fund_storage;
-use super::governance::{self};
-use super::storage_api::StorageRead;
-use crate::ledger::native_vp::{self, Ctx, NativeVp};
+use crate::ledger::native_vp::{self, governance, Ctx, NativeVp};
 use crate::ledger::storage::{self as ledger_storage, StorageHasher};
+use crate::ledger::storage_api::StorageRead;
 use crate::types::address::{Address, InternalAddress};
 use crate::types::storage::Key;
 use crate::types::token;
 use crate::vm::WasmCacheAccess;
 
-/// Internal SlashFund address
-pub const ADDRESS: Address = Address::Internal(InternalAddress::SlashFund);
-
 #[allow(missing_docs)]
 #[derive(Error, Debug)]
 pub enum Error {
@@ -62,7 +57,7 @@ where
             let key_type: KeyType = get_key_type(key, &native_token);
             match key_type {
                 KeyType::BALANCE(addr) => {
-                    if addr.ne(&ADDRESS) {
+                    if addr.ne(&slash_fund::ADDRESS) {
                         return true;
                     }
                     governance::utils::is_proposal_accepted(
@@ -91,7 +86,7 @@ enum KeyType {
 }
 
 fn get_key_type(value: &Key, native_token: &Address) -> KeyType {
-    if slash_fund_storage::is_slash_fund_key(value) {
+    if storage::is_slash_fund_key(value) {
         KeyType::UNKNOWN_SLASH_FUND
     } else if token::is_any_token_balance_key(value).is_some() {
         match token::is_balance_key(native_token, value) {
diff --git a/shared/src/ledger/pos/mod.rs b/shared/src/ledger/pos/mod.rs
index a6900dd172..db16c3eebf 100644
--- a/shared/src/ledger/pos/mod.rs
+++ b/shared/src/ledger/pos/mod.rs
@@ -1,20 +1,18 @@
 //! Proof-of-Stake integration as a native validity predicate
 
-mod storage;
 pub mod vp;
 
 pub use namada_proof_of_stake;
 pub use namada_proof_of_stake::parameters::PosParams;
+pub use namada_proof_of_stake::storage::*;
 pub use namada_proof_of_stake::types::{
     self, decimal_mult_u64, Slash, Slashes, ValidatorStates,
 };
-use namada_proof_of_stake::{PosBase, PosReadOnly};
+use namada_proof_of_stake::PosBase;
 use rust_decimal::Decimal;
-pub use storage::*;
 pub use vp::PosVP;
 
 use crate::ledger::storage::{self as ledger_storage, Storage, StorageHasher};
-use crate::ledger::storage_api::{self, StorageRead};
 use crate::types::address::{Address, InternalAddress};
 use crate::types::storage::Epoch;
 use crate::types::{key, token};
@@ -85,203 +83,3 @@ pub type CommissionRates = namada_proof_of_stake::types::CommissionRates;
 
 /// Alias for a PoS type with the same name with concrete type parameters
 pub type TotalDeltas = namada_proof_of_stake::types::TotalDeltas<token::Change>;
-
-// TODO pos will depend on core, so it will be able to `Epoch` type directly
-// impl From<Epoch> for namada_proof_of_stake::types::Epoch {
-//     fn from(epoch: Epoch) -> Self {
-//         let epoch: u64 = epoch.into();
-//         namada_proof_of_stake::types::Epoch::from(epoch)
-//     }
-// }
-
-// impl From<namada_proof_of_stake::types::Epoch> for Epoch {
-//     fn from(epoch: namada_proof_of_stake::types::Epoch) -> Self {
-//         let epoch: u64 = epoch.into();
-//         Epoch(epoch)
-//     }
-// }
-
-// The error conversions are needed to implement `PosActions` in
-// `tx_prelude/src/proof_of_stake.rs`
-// impl From<namada_proof_of_stake::BecomeValidatorError<Address>>
-//     for storage_api::Error
-// {
-//     fn from(err: namada_proof_of_stake::BecomeValidatorError<Address>) ->
-// Self {         Self::new(err)
-//     }
-// }
-
-// impl From<namada_proof_of_stake::BondError<Address>> for storage_api::Error {
-//     fn from(err: namada_proof_of_stake::BondError<Address>) -> Self {
-//         Self::new(err)
-//     }
-// }
-
-// impl From<namada_proof_of_stake::UnbondError<Address, token::Amount>>
-//     for storage_api::Error
-// {
-//     fn from(
-//         err: namada_proof_of_stake::UnbondError<Address, token::Amount>,
-//     ) -> Self {
-//         Self::new(err)
-//     }
-// }
-
-// impl From<namada_proof_of_stake::WithdrawError<Address>>
-//     for storage_api::Error
-// {
-//     fn from(err: namada_proof_of_stake::WithdrawError<Address>) -> Self {
-//         Self::new(err)
-//     }
-// }
-
-impl From<namada_proof_of_stake::CommissionRateChangeError<Address>>
-    for storage_api::Error
-{
-    fn from(
-        err: namada_proof_of_stake::CommissionRateChangeError<Address>,
-    ) -> Self {
-        Self::new(err)
-    }
-}
-
-#[macro_use]
-mod macros {
-    /// Implement `PosReadOnly` for a type that implements
-    /// [`trait@crate::ledger::storage_api::StorageRead`].
-    ///
-    /// Excuse the horrible syntax - we haven't found a better way to use this
-    /// for native_vp `CtxPreStorageRead`/`CtxPostStorageRead`, which have
-    /// generics and explicit lifetimes.
-    ///
-    /// # Examples
-    ///
-    /// ```ignore
-    /// impl_pos_read_only! { impl PosReadOnly for X }
-    /// ```
-    #[macro_export]
-    macro_rules! impl_pos_read_only {
-    (
-        // Type error type has to be declared before the impl.
-        // This error type must `impl From<storage_api::Error> for $error`.
-        type $error:tt = $err_ty:ty ;
-        // Matches anything, so that we can use lifetimes and generic types.
-        // This expects `impl(<.*>)? PoSReadOnly for $ty(<.*>)?`.
-        $( $any:tt )* )
-    => {
-        $( $any )*
-        {
-            type Address = $crate::types::address::Address;
-            type $error = $err_ty;
-            type PublicKey = $crate::types::key::common::PublicKey;
-            type TokenAmount = $crate::types::token::Amount;
-            type TokenChange = $crate::types::token::Change;
-
-            const POS_ADDRESS: Self::Address = $crate::ledger::pos::ADDRESS;
-
-            fn staking_token_address(&self) -> Self::Address {
-                self.get_native_token().expect("Native token must be available")
-            }
-
-            fn read_pos_params(&self) -> std::result::Result<PosParams, Self::Error> {
-                let value = $crate::ledger::storage_api::StorageRead::read_bytes(self, &params_key())?.unwrap();
-                Ok($crate::ledger::storage::types::decode(value).unwrap())
-            }
-
-            fn read_validator_consensus_key(
-                &self,
-                key: &Self::Address,
-            ) -> std::result::Result<Option<ValidatorConsensusKeys>, Self::Error> {
-                let value =
-                    $crate::ledger::storage_api::StorageRead::read_bytes(self, &validator_consensus_key_key(key))?;
-                Ok(value.map(|value| $crate::ledger::storage::types::decode(value).unwrap()))
-            }
-
-            fn read_validator_commission_rate(
-                &self,
-                key: &Self::Address,
-            ) -> std::result::Result<Option<CommissionRates>, Self::Error> {
-                let value =
-                    $crate::ledger::storage_api::StorageRead::read_bytes(self, &validator_commission_rate_key(key))?;
-                Ok(value.map(|value| $crate::ledger::storage::types::decode(value).unwrap()))
-            }
-
-            fn read_validator_max_commission_rate_change(
-                &self,
-                key: &Self::Address,
-            ) -> std::result::Result<Option<Decimal>, Self::Error> {
-                let value =
-                    $crate::ledger::storage_api::StorageRead::read_bytes(self, &validator_max_commission_rate_change_key(key))?;
-                Ok(value.map(|value| $crate::ledger::storage::types::decode(value).unwrap()))
-            }
-
-            fn read_validator_state(
-                &self,
-                key: &Self::Address,
-            ) -> std::result::Result<Option<ValidatorStates>, Self::Error> {
-                let value = $crate::ledger::storage_api::StorageRead::read_bytes(self, &validator_state_key(key))?;
-                Ok(value.map(|value| $crate::ledger::storage::types::decode(value).unwrap()))
-            }
-
-            fn read_validator_deltas(
-                &self,
-                key: &Self::Address,
-            ) -> std::result::Result<Option<ValidatorDeltas>, Self::Error> {
-                let value =
-                    $crate::ledger::storage_api::StorageRead::read_bytes(self, &validator_deltas_key(key))?;
-                Ok(value.map(|value| $crate::ledger::storage::types::decode(value).unwrap()))
-            }
-
-            fn read_validator_slashes(
-                &self,
-                key: &Self::Address,
-            ) -> std::result::Result<Vec<types::Slash>, Self::Error> {
-                let value = $crate::ledger::storage_api::StorageRead::read_bytes(self, &validator_slashes_key(key))?;
-                Ok(value
-                    .map(|value| $crate::ledger::storage::types::decode(value).unwrap())
-                    .unwrap_or_default())
-            }
-
-            fn read_bond(
-                &self,
-                key: &BondId,
-            ) -> std::result::Result<Option<Bonds>, Self::Error> {
-                let value = $crate::ledger::storage_api::StorageRead::read_bytes(self, &bond_key(key))?;
-                Ok(value.map(|value| $crate::ledger::storage::types::decode(value).unwrap()))
-            }
-
-            fn read_unbond(
-                &self,
-                key: &BondId,
-            ) -> std::result::Result<Option<Unbonds>, Self::Error> {
-                let value = $crate::ledger::storage_api::StorageRead::read_bytes(self, &unbond_key(key))?;
-                Ok(value.map(|value| $crate::ledger::storage::types::decode(value).unwrap()))
-            }
-
-            fn read_validator_set(
-                &self,
-            ) -> std::result::Result<ValidatorSets, Self::Error> {
-                let value =
-                    $crate::ledger::storage_api::StorageRead::read_bytes(self, &validator_set_key())?.unwrap();
-                Ok($crate::ledger::storage::types::decode(value).unwrap())
-            }
-
-            fn read_total_deltas(
-                &self,
-            ) -> std::result::Result<types::TotalDeltas<Self::TokenChange>, Self::Error> {
-                let value =
-                    $crate::ledger::storage_api::StorageRead::read_bytes(self, &total_deltas_key())?.unwrap();
-                Ok($crate::ledger::storage::types::decode(value).unwrap())
-            }
-        }
-    }
-}
-}
-
-impl_pos_read_only! {
-    type Error = storage_api::Error;
-    impl<DB, H> PosReadOnly for Storage<DB, H>
-        where
-            DB: ledger_storage::DB + for<'iter> ledger_storage::DBIter<'iter> +'static,
-            H: StorageHasher +'static,
-}
diff --git a/shared/src/ledger/pos/vp.rs b/shared/src/ledger/pos/vp.rs
index 1f3faaf9c5..0094ad9b5c 100644
--- a/shared/src/ledger/pos/vp.rs
+++ b/shared/src/ledger/pos/vp.rs
@@ -20,12 +20,11 @@ use super::{
     validator_consensus_key_key, validator_deltas_key,
     validator_max_commission_rate_change_key, validator_set_key,
     validator_slashes_key, validator_state_key, BondId, Bonds, CommissionRates,
-    Unbonds, ValidatorConsensusKeys, ValidatorDeltas, ValidatorSets,
+    TotalDeltas, Unbonds, ValidatorConsensusKeys, ValidatorDeltas,
+    ValidatorSets,
 };
-use crate::impl_pos_read_only;
-use crate::ledger::governance;
 use crate::ledger::native_vp::{
-    self, Ctx, CtxPostStorageRead, CtxPreStorageRead, NativeVp,
+    self, governance, Ctx, CtxPostStorageRead, CtxPreStorageRead, NativeVp,
 };
 use crate::ledger::pos::{
     is_validator_address_raw_hash_key, is_validator_commission_rate_key,
diff --git a/shared/src/ledger/protocol/mod.rs b/shared/src/ledger/protocol/mod.rs
index 9041725738..edb801700b 100644
--- a/shared/src/ledger/protocol/mod.rs
+++ b/shared/src/ledger/protocol/mod.rs
@@ -7,12 +7,12 @@ use thiserror::Error;
 
 use crate::ledger::eth_bridge::vp::EthBridge;
 use crate::ledger::gas::{self, BlockGasMeter, VpGasMeter};
-use crate::ledger::governance::GovernanceVp;
 use crate::ledger::ibc::vp::{Ibc, IbcToken};
+use crate::ledger::native_vp::governance::GovernanceVp;
 use crate::ledger::native_vp::parameters::{self, ParametersVp};
+use crate::ledger::native_vp::slash_fund::SlashFundVp;
 use crate::ledger::native_vp::{self, NativeVp};
 use crate::ledger::pos::{self, PosVP};
-use crate::ledger::slash_fund::SlashFundVp;
 use crate::ledger::storage::write_log::WriteLog;
 use crate::ledger::storage::{DBIter, Storage, StorageHasher, DB};
 use crate::proto::{self, Tx};
@@ -51,9 +51,9 @@ pub enum Error {
     #[error("IBC Token native VP: {0}")]
     IbcTokenNativeVpError(crate::ledger::ibc::vp::IbcTokenError),
     #[error("Governance native VP error: {0}")]
-    GovernanceNativeVpError(crate::ledger::governance::Error),
+    GovernanceNativeVpError(crate::ledger::native_vp::governance::Error),
     #[error("SlashFund native VP error: {0}")]
-    SlashFundNativeVpError(crate::ledger::slash_fund::Error),
+    SlashFundNativeVpError(crate::ledger::native_vp::slash_fund::Error),
     #[error("Ethereum bridge native VP error: {0}")]
     EthBridgeNativeVpError(crate::ledger::eth_bridge::vp::Error),
     #[error("Access to an internal address {0} is forbidden")]
diff --git a/shared/src/ledger/queries/mod.rs b/shared/src/ledger/queries/mod.rs
index 5a9c97879c..4644909b1a 100644
--- a/shared/src/ledger/queries/mod.rs
+++ b/shared/src/ledger/queries/mod.rs
@@ -87,7 +87,7 @@ pub fn require_no_data(request: &RequestQuery) -> storage_api::Result<()> {
     Ok(())
 }
 
-#[cfg(any(test, feature = "tendermint-rpc"))]
+#[cfg(any(feature = "tendermint-rpc", feature = "tendermint-rpc-abcipp",))]
 /// Provides [`Client`] implementation for Tendermint RPC client
 pub mod tm {
     use thiserror::Error;
@@ -135,15 +135,14 @@ pub mod tm {
                 prove,
             )
             .await?;
+            use crate::tendermint::abci::Code;
             match response.code {
-                crate::tendermint::abci::Code::Ok => Ok(EncodedResponseQuery {
+                Code::Ok => Ok(EncodedResponseQuery {
                     data: response.value,
                     info: response.info,
                     proof: response.proof,
                 }),
-                crate::tendermint::abci::Code::Err(code) => {
-                    Err(Error::Query(response.info, code))
-                }
+                Code::Err(code) => Err(Error::Query(response.info, code)),
             }
         }
     }
diff --git a/shared/src/ledger/queries/shell.rs b/shared/src/ledger/queries/shell.rs
index 766f88c5a1..f428a2e572 100644
--- a/shared/src/ledger/queries/shell.rs
+++ b/shared/src/ledger/queries/shell.rs
@@ -1,10 +1,10 @@
-use borsh::BorshSerialize;
+use borsh::{BorshDeserialize, BorshSerialize};
 use masp_primitives::asset_type::AssetType;
 use masp_primitives::merkle_tree::MerklePath;
 use masp_primitives::sapling::Node;
-use namada_core::ledger::storage::merkle_tree;
-use prost::Message;
-use tendermint_proto::crypto::{ProofOp, ProofOps};
+use namada_core::types::address::Address;
+use namada_core::types::hash::Hash;
+use namada_core::types::storage::BlockResults;
 
 use crate::ledger::events::log::dumb_queries;
 use crate::ledger::events::Event;
@@ -14,9 +14,7 @@ use crate::ledger::storage::traits::StorageHasher;
 use crate::ledger::storage::{DBIter, DB};
 use crate::ledger::storage_api::{self, ResultExt, StorageRead};
 use crate::tendermint::merkle::proof::Proof;
-use crate::types::address::Address;
-use crate::types::hash::Hash;
-use crate::types::storage::{self, BlockResults, Epoch, PrefixValue};
+use crate::types::storage::{self, Epoch, PrefixValue};
 #[cfg(any(test, feature = "async-client"))]
 use crate::types::transaction::TxResult;
 
@@ -265,7 +263,7 @@ where
     let proof = if request.prove {
         let mut ops = vec![];
         for PrefixValue { key, value } in &data {
-            let mut proof = ctx
+            let mut proof: crate::tendermint::merkle::proof::Proof = ctx
                 .storage
                 .get_existence_proof(key, value.clone().into(), request.height)
                 .into_storage_result()?;
@@ -449,38 +447,3 @@ mod test {
         Ok(())
     }
 }
-
-/// Convert merkle tree proof to tendermint proof
-fn proof_to_tm_proof(
-    merkle_tree::Proof {
-        key,
-        sub_proof,
-        base_proof,
-    }: merkle_tree::Proof,
-) -> tendermint::merkle::proof::Proof {
-    use crate::tendermint::merkle::proof::{Proof, ProofOp};
-    let mut data = vec![];
-    sub_proof
-        .encode(&mut data)
-        .expect("Encoding proof shouldn't fail");
-    let sub_proof_op = ProofOp {
-        field_type: "ics23_CommitmentProof".to_string(),
-        key: key.to_string().as_bytes().to_vec(),
-        data,
-    };
-
-    let mut data = vec![];
-    base_proof
-        .encode(&mut data)
-        .expect("Encoding proof shouldn't fail");
-    let base_proof_op = ProofOp {
-        field_type: "ics23_CommitmentProof".to_string(),
-        key: key.to_string().as_bytes().to_vec(),
-        data,
-    };
-
-    // Set ProofOps from leaf to root
-    Proof {
-        ops: vec![sub_proof_op, base_proof_op],
-    }
-}
diff --git a/shared/src/ledger/storage/mod.rs b/shared/src/ledger/storage/mod.rs
new file mode 100644
index 0000000000..d55a96db39
--- /dev/null
+++ b/shared/src/ledger/storage/mod.rs
@@ -0,0 +1,7 @@
+//! Ledger's state storage with key-value backed store and a merkle tree
+
+pub mod write_log;
+
+#[cfg(any(test, feature = "testing"))]
+pub use namada_core::ledger::storage::mockdb;
+pub use namada_core::ledger::storage::{traits, *};
diff --git a/shared/src/ledger/storage/write_log.rs b/shared/src/ledger/storage/write_log.rs
index 098204b707..6e78612f56 100644
--- a/shared/src/ledger/storage/write_log.rs
+++ b/shared/src/ledger/storage/write_log.rs
@@ -682,12 +682,12 @@ mod tests {
 /// Helpers for testing with write log.
 #[cfg(any(test, feature = "testing"))]
 pub mod testing {
+    use namada_core::types::address::testing::arb_address;
+    use namada_core::types::storage::testing::arb_key;
     use proptest::collection;
     use proptest::prelude::{any, prop_oneof, Just, Strategy};
 
     use super::*;
-    use crate::types::address::testing::arb_address;
-    use crate::types::storage::testing::arb_key;
 
     /// Generate an arbitrary tx write log of [`HashMap<storage::Key,
     /// StorageModification>`].
diff --git a/shared/src/ledger/storage_api.rs b/shared/src/ledger/storage_api.rs
new file mode 100644
index 0000000000..1a377fefc8
--- /dev/null
+++ b/shared/src/ledger/storage_api.rs
@@ -0,0 +1,3 @@
+//! Re-export of core storage_api for backward compatibility
+
+pub use namada_core::ledger::storage_api::*;
diff --git a/shared/src/ledger/vp_host_fns.rs b/shared/src/ledger/vp_host_fns.rs
new file mode 100644
index 0000000000..8fc013075a
--- /dev/null
+++ b/shared/src/ledger/vp_host_fns.rs
@@ -0,0 +1,381 @@
+//! Host functions for VPs used for both native and WASM VPs.
+
+use std::num::TryFromIntError;
+
+use namada_core::types::address::Address;
+use namada_core::types::hash::Hash;
+use namada_core::types::storage::{
+    BlockHash, BlockHeight, Epoch, Key, TxIndex,
+};
+use thiserror::Error;
+
+use super::gas::MIN_STORAGE_GAS;
+use crate::ledger::gas;
+use crate::ledger::gas::VpGasMeter;
+use crate::ledger::storage::write_log::WriteLog;
+use crate::ledger::storage::{self, write_log, Storage, StorageHasher};
+use crate::proto::Tx;
+
+/// These runtime errors will abort VP execution immediately
+#[allow(missing_docs)]
+#[derive(Error, Debug)]
+pub enum RuntimeError {
+    #[error("Out of gas: {0}")]
+    OutOfGas(gas::Error),
+    #[error("Storage error: {0}")]
+    StorageError(storage::Error),
+    #[error("Storage data error: {0}")]
+    StorageDataError(crate::types::storage::Error),
+    #[error("Encoding error: {0}")]
+    EncodingError(std::io::Error),
+    #[error("Numeric conversion error: {0}")]
+    NumConversionError(TryFromIntError),
+    #[error("Memory error: {0}")]
+    MemoryError(Box<dyn std::error::Error + Sync + Send + 'static>),
+    #[error("Trying to read a temporary value with read_post")]
+    ReadTemporaryValueError,
+    #[error("Trying to read a permament value with read_temp")]
+    ReadPermanentValueError,
+}
+
+/// VP environment function result
+pub type EnvResult<T> = std::result::Result<T, RuntimeError>;
+
+/// Add a gas cost incured in a validity predicate
+pub fn add_gas(gas_meter: &mut VpGasMeter, used_gas: u64) -> EnvResult<()> {
+    let result = gas_meter.add(used_gas).map_err(RuntimeError::OutOfGas);
+    if let Err(err) = &result {
+        tracing::info!("Stopping VP execution because of gas error: {}", err);
+    }
+    result
+}
+
+/// Storage read prior state (before tx execution). It will try to read from the
+/// storage.
+pub fn read_pre<DB, H>(
+    gas_meter: &mut VpGasMeter,
+    storage: &Storage<DB, H>,
+    write_log: &WriteLog,
+    key: &Key,
+) -> EnvResult<Option<Vec<u8>>>
+where
+    DB: storage::DB + for<'iter> storage::DBIter<'iter>,
+    H: StorageHasher,
+{
+    let (log_val, gas) = write_log.read_pre(key);
+    add_gas(gas_meter, gas)?;
+    match log_val {
+        Some(&write_log::StorageModification::Write { ref value }) => {
+            Ok(Some(value.clone()))
+        }
+        Some(&write_log::StorageModification::Delete) => {
+            // Given key has been deleted
+            Ok(None)
+        }
+        Some(&write_log::StorageModification::InitAccount {
+            ref vp, ..
+        }) => {
+            // Read the VP of a new account
+            Ok(Some(vp.clone()))
+        }
+        Some(&write_log::StorageModification::Temp { .. }) => {
+            Err(RuntimeError::ReadTemporaryValueError)
+        }
+        None => {
+            // When not found in write log, try to read from the storage
+            let (value, gas) =
+                storage.read(key).map_err(RuntimeError::StorageError)?;
+            add_gas(gas_meter, gas)?;
+            Ok(value)
+        }
+    }
+}
+
+/// Storage read posterior state (after tx execution). It will try to read from
+/// the write log first and if no entry found then from the storage.
+pub fn read_post<DB, H>(
+    gas_meter: &mut VpGasMeter,
+    storage: &Storage<DB, H>,
+    write_log: &WriteLog,
+    key: &Key,
+) -> EnvResult<Option<Vec<u8>>>
+where
+    DB: storage::DB + for<'iter> storage::DBIter<'iter>,
+    H: StorageHasher,
+{
+    // Try to read from the write log first
+    let (log_val, gas) = write_log.read(key);
+    add_gas(gas_meter, gas)?;
+    match log_val {
+        Some(&write_log::StorageModification::Write { ref value }) => {
+            Ok(Some(value.clone()))
+        }
+        Some(&write_log::StorageModification::Delete) => {
+            // Given key has been deleted
+            Ok(None)
+        }
+        Some(&write_log::StorageModification::InitAccount {
+            ref vp, ..
+        }) => {
+            // Read the VP of a new account
+            Ok(Some(vp.clone()))
+        }
+        Some(&write_log::StorageModification::Temp { .. }) => {
+            Err(RuntimeError::ReadTemporaryValueError)
+        }
+        None => {
+            // When not found in write log, try to read from the storage
+            let (value, gas) =
+                storage.read(key).map_err(RuntimeError::StorageError)?;
+            add_gas(gas_meter, gas)?;
+            Ok(value)
+        }
+    }
+}
+
+/// Storage read temporary state (after tx execution). It will try to read from
+/// only the write log.
+pub fn read_temp(
+    gas_meter: &mut VpGasMeter,
+    write_log: &WriteLog,
+    key: &Key,
+) -> EnvResult<Option<Vec<u8>>> {
+    // Try to read from the write log first
+    let (log_val, gas) = write_log.read(key);
+    add_gas(gas_meter, gas)?;
+    match log_val {
+        Some(&write_log::StorageModification::Temp { ref value }) => {
+            Ok(Some(value.clone()))
+        }
+        None => Ok(None),
+        _ => Err(RuntimeError::ReadPermanentValueError),
+    }
+}
+
+/// Storage `has_key` in prior state (before tx execution). It will try to read
+/// from the storage.
+pub fn has_key_pre<DB, H>(
+    gas_meter: &mut VpGasMeter,
+    storage: &Storage<DB, H>,
+    key: &Key,
+) -> EnvResult<bool>
+where
+    DB: storage::DB + for<'iter> storage::DBIter<'iter>,
+    H: StorageHasher,
+{
+    let (present, gas) =
+        storage.has_key(key).map_err(RuntimeError::StorageError)?;
+    add_gas(gas_meter, gas)?;
+    Ok(present)
+}
+
+/// Storage `has_key` in posterior state (after tx execution). It will try to
+/// check the write log first and if no entry found then the storage.
+pub fn has_key_post<DB, H>(
+    gas_meter: &mut VpGasMeter,
+    storage: &Storage<DB, H>,
+    write_log: &WriteLog,
+    key: &Key,
+) -> EnvResult<bool>
+where
+    DB: storage::DB + for<'iter> storage::DBIter<'iter>,
+    H: StorageHasher,
+{
+    // Try to read from the write log first
+    let (log_val, gas) = write_log.read(key);
+    add_gas(gas_meter, gas)?;
+    match log_val {
+        Some(&write_log::StorageModification::Write { .. }) => Ok(true),
+        Some(&write_log::StorageModification::Delete) => {
+            // The given key has been deleted
+            Ok(false)
+        }
+        Some(&write_log::StorageModification::InitAccount { .. }) => Ok(true),
+        Some(&write_log::StorageModification::Temp { .. }) => Ok(true),
+        None => {
+            // When not found in write log, try to check the storage
+            let (present, gas) =
+                storage.has_key(key).map_err(RuntimeError::StorageError)?;
+            add_gas(gas_meter, gas)?;
+            Ok(present)
+        }
+    }
+}
+
+/// Getting the chain ID.
+pub fn get_chain_id<DB, H>(
+    gas_meter: &mut VpGasMeter,
+    storage: &Storage<DB, H>,
+) -> EnvResult<String>
+where
+    DB: storage::DB + for<'iter> storage::DBIter<'iter>,
+    H: StorageHasher,
+{
+    let (chain_id, gas) = storage.get_chain_id();
+    add_gas(gas_meter, gas)?;
+    Ok(chain_id)
+}
+
+/// Getting the block height. The height is that of the block to which the
+/// current transaction is being applied.
+pub fn get_block_height<DB, H>(
+    gas_meter: &mut VpGasMeter,
+    storage: &Storage<DB, H>,
+) -> EnvResult<BlockHeight>
+where
+    DB: storage::DB + for<'iter> storage::DBIter<'iter>,
+    H: StorageHasher,
+{
+    let (height, gas) = storage.get_block_height();
+    add_gas(gas_meter, gas)?;
+    Ok(height)
+}
+
+/// Getting the block hash. The height is that of the block to which the
+/// current transaction is being applied.
+pub fn get_block_hash<DB, H>(
+    gas_meter: &mut VpGasMeter,
+    storage: &Storage<DB, H>,
+) -> EnvResult<BlockHash>
+where
+    DB: storage::DB + for<'iter> storage::DBIter<'iter>,
+    H: StorageHasher,
+{
+    let (hash, gas) = storage.get_block_hash();
+    add_gas(gas_meter, gas)?;
+    Ok(hash)
+}
+
+/// Getting the block hash. The height is that of the block to which the
+/// current transaction is being applied.
+pub fn get_tx_code_hash(
+    gas_meter: &mut VpGasMeter,
+    tx: &Tx,
+) -> EnvResult<Hash> {
+    let hash = Hash(tx.code_hash());
+    add_gas(gas_meter, MIN_STORAGE_GAS)?;
+    Ok(hash)
+}
+
+/// Getting the block epoch. The epoch is that of the block to which the
+/// current transaction is being applied.
+pub fn get_block_epoch<DB, H>(
+    gas_meter: &mut VpGasMeter,
+    storage: &Storage<DB, H>,
+) -> EnvResult<Epoch>
+where
+    DB: storage::DB + for<'iter> storage::DBIter<'iter>,
+    H: StorageHasher,
+{
+    let (epoch, gas) = storage.get_current_epoch();
+    add_gas(gas_meter, gas)?;
+    Ok(epoch)
+}
+
+/// Getting the block epoch. The epoch is that of the block to which the
+/// current transaction is being applied.
+pub fn get_tx_index(
+    gas_meter: &mut VpGasMeter,
+    tx_index: &TxIndex,
+) -> EnvResult<TxIndex> {
+    add_gas(gas_meter, MIN_STORAGE_GAS)?;
+    Ok(*tx_index)
+}
+
+/// Getting the chain ID.
+pub fn get_native_token<DB, H>(
+    gas_meter: &mut VpGasMeter,
+    storage: &Storage<DB, H>,
+) -> EnvResult<Address>
+where
+    DB: storage::DB + for<'iter> storage::DBIter<'iter>,
+    H: StorageHasher,
+{
+    add_gas(gas_meter, MIN_STORAGE_GAS)?;
+    Ok(storage.native_token.clone())
+}
+
+/// Storage prefix iterator, ordered by storage keys. It will try to get an
+/// iterator from the storage.
+pub fn iter_prefix<'a, DB, H>(
+    gas_meter: &mut VpGasMeter,
+    storage: &'a Storage<DB, H>,
+    prefix: &Key,
+) -> EnvResult<<DB as storage::DBIter<'a>>::PrefixIter>
+where
+    DB: storage::DB + for<'iter> storage::DBIter<'iter>,
+    H: StorageHasher,
+{
+    let (iter, gas) = storage.iter_prefix(prefix);
+    add_gas(gas_meter, gas)?;
+    Ok(iter)
+}
+
+/// Storage prefix iterator, reverse ordered by storage keys. It will try to get
+/// an iterator from the storage.
+pub fn rev_iter_prefix<'a, DB, H>(
+    gas_meter: &mut VpGasMeter,
+    storage: &'a Storage<DB, H>,
+    prefix: &Key,
+) -> EnvResult<<DB as storage::DBIter<'a>>::PrefixIter>
+where
+    DB: storage::DB + for<'iter> storage::DBIter<'iter>,
+    H: StorageHasher,
+{
+    let (iter, gas) = storage.rev_iter_prefix(prefix);
+    add_gas(gas_meter, gas)?;
+    Ok(iter)
+}
+
+/// Storage prefix iterator for prior state (before tx execution). It will try
+/// to read from the storage.
+pub fn iter_pre_next<DB>(
+    gas_meter: &mut VpGasMeter,
+    iter: &mut <DB as storage::DBIter<'_>>::PrefixIter,
+) -> EnvResult<Option<(String, Vec<u8>)>>
+where
+    DB: storage::DB + for<'iter> storage::DBIter<'iter>,
+{
+    if let Some((key, val, gas)) = iter.next() {
+        add_gas(gas_meter, gas)?;
+        return Ok(Some((key, val)));
+    }
+    Ok(None)
+}
+
+/// Storage prefix iterator next for posterior state (after tx execution). It
+/// will try to read from the write log first and if no entry found then from
+/// the storage.
+pub fn iter_post_next<DB>(
+    gas_meter: &mut VpGasMeter,
+    write_log: &WriteLog,
+    iter: &mut <DB as storage::DBIter<'_>>::PrefixIter,
+) -> EnvResult<Option<(String, Vec<u8>)>>
+where
+    DB: storage::DB + for<'iter> storage::DBIter<'iter>,
+{
+    for (key, val, iter_gas) in iter {
+        let (log_val, log_gas) = write_log.read(
+            &Key::parse(key.clone()).map_err(RuntimeError::StorageDataError)?,
+        );
+        add_gas(gas_meter, iter_gas + log_gas)?;
+        match log_val {
+            Some(&write_log::StorageModification::Write { ref value }) => {
+                return Ok(Some((key, value.clone())));
+            }
+            Some(&write_log::StorageModification::Delete) => {
+                // check the next because the key has already deleted
+                continue;
+            }
+            Some(&write_log::StorageModification::InitAccount { .. }) => {
+                // a VP of a new account doesn't need to be iterated
+                continue;
+            }
+            Some(&write_log::StorageModification::Temp { .. }) => {
+                return Err(RuntimeError::ReadTemporaryValueError);
+            }
+            None => return Ok(Some((key, val))),
+        }
+    }
+    Ok(None)
+}
diff --git a/shared/src/lib.rs b/shared/src/lib.rs
index f37cb9e867..8c514b8f9d 100644
--- a/shared/src/lib.rs
+++ b/shared/src/lib.rs
@@ -6,11 +6,11 @@
 #![deny(rustdoc::broken_intra_doc_links)]
 #![deny(rustdoc::private_intra_doc_links)]
 
-#[cfg(all(not(feature = "abcipp"), feature = "ferveo-tpke"))]
+#[cfg(feature = "tendermint-rpc")]
 pub use tendermint_rpc;
-#[cfg(all(feature = "abcipp", feature = "ferveo-tpke"))]
+#[cfg(feature = "tendermint-rpc-abcipp")]
 pub use tendermint_rpc_abcipp as tendermint_rpc;
-#[cfg(not(feature = "abcipp"))]
+#[cfg(feature = "abciplus")]
 pub use {ibc, ibc_proto, tendermint, tendermint_proto};
 #[cfg(feature = "abcipp")]
 pub use {
@@ -18,7 +18,7 @@ pub use {
     tendermint_abcipp as tendermint,
     tendermint_proto_abcipp as tendermint_proto,
 };
-
+pub use {namada_core as core, namada_proof_of_stake as proof_of_stake};
 pub mod ledger;
 pub use namada_core::proto;
 pub mod types;
diff --git a/shared/src/types/key/mod.rs b/shared/src/types/key/mod.rs
new file mode 100644
index 0000000000..11a7af5533
--- /dev/null
+++ b/shared/src/types/key/mod.rs
@@ -0,0 +1,3 @@
+//! Cryptographic keys
+
+pub use namada_core::types::key::*;
diff --git a/shared/src/vm/host_env.rs b/shared/src/vm/host_env.rs
index abcc9e984d..90e6e4405f 100644
--- a/shared/src/vm/host_env.rs
+++ b/shared/src/vm/host_env.rs
@@ -5,6 +5,7 @@ use std::convert::TryInto;
 use std::num::TryFromIntError;
 
 use borsh::{BorshDeserialize, BorshSerialize};
+use namada_core::types::internal::KeyVal;
 use thiserror::Error;
 
 #[cfg(feature = "wasm-runtime")]
@@ -15,7 +16,7 @@ use super::WasmCacheAccess;
 use crate::ledger::gas::{self, BlockGasMeter, VpGasMeter, MIN_STORAGE_GAS};
 use crate::ledger::storage::write_log::{self, WriteLog};
 use crate::ledger::storage::{self, Storage, StorageHasher};
-use crate::ledger::vp_env;
+use crate::ledger::vp_host_fns;
 use crate::proto::Tx;
 use crate::types::address::{self, Address};
 use crate::types::ibc::IbcEvent;
@@ -24,7 +25,6 @@ use crate::types::key::*;
 use crate::types::storage::{Key, TxIndex};
 use crate::vm::memory::VmMemory;
 use crate::vm::prefix_iter::{PrefixIteratorId, PrefixIterators};
-use crate::vm::types::KeyVal;
 use crate::vm::{
     validate_untrusted_wasm, HostRef, MutHostRef, WasmValidationError,
 };
@@ -490,7 +490,7 @@ where
 pub fn vp_charge_gas<MEM, DB, H, EVAL, CA>(
     env: &VpVmEnv<MEM, DB, H, EVAL, CA>,
     used_gas: i32,
-) -> vp_env::EnvResult<()>
+) -> vp_host_fns::EnvResult<()>
 where
     MEM: VmMemory,
     DB: storage::DB + for<'iter> storage::DBIter<'iter>,
@@ -499,11 +499,11 @@ where
     CA: WasmCacheAccess,
 {
     let gas_meter = unsafe { env.ctx.gas_meter.get() };
-    vp_env::add_gas(
+    vp_host_fns::add_gas(
         gas_meter,
         used_gas
             .try_into()
-            .map_err(vp_env::RuntimeError::NumConversionError)?,
+            .map_err(vp_host_fns::RuntimeError::NumConversionError)?,
     )
 }
 
@@ -1015,7 +1015,7 @@ pub fn vp_read_pre<MEM, DB, H, EVAL, CA>(
     env: &VpVmEnv<MEM, DB, H, EVAL, CA>,
     key_ptr: u64,
     key_len: u64,
-) -> vp_env::EnvResult<i64>
+) -> vp_host_fns::EnvResult<i64>
 where
     MEM: VmMemory,
     DB: storage::DB + for<'iter> storage::DBIter<'iter>,
@@ -1026,16 +1026,16 @@ where
     let (key, gas) = env
         .memory
         .read_string(key_ptr, key_len as _)
-        .map_err(|e| vp_env::RuntimeError::MemoryError(Box::new(e)))?;
+        .map_err(|e| vp_host_fns::RuntimeError::MemoryError(Box::new(e)))?;
     let gas_meter = unsafe { env.ctx.gas_meter.get() };
-    vp_env::add_gas(gas_meter, gas)?;
+    vp_host_fns::add_gas(gas_meter, gas)?;
 
     // try to read from the storage
     let key =
-        Key::parse(key).map_err(vp_env::RuntimeError::StorageDataError)?;
+        Key::parse(key).map_err(vp_host_fns::RuntimeError::StorageDataError)?;
     let storage = unsafe { env.ctx.storage.get() };
     let write_log = unsafe { env.ctx.write_log.get() };
-    let value = vp_env::read_pre(gas_meter, storage, write_log, &key)?;
+    let value = vp_host_fns::read_pre(gas_meter, storage, write_log, &key)?;
     tracing::debug!(
         "vp_read_pre addr {}, key {}, value {:?}",
         unsafe { env.ctx.address.get() },
@@ -1047,7 +1047,7 @@ where
             let len: i64 = value
                 .len()
                 .try_into()
-                .map_err(vp_env::RuntimeError::NumConversionError)?;
+                .map_err(vp_host_fns::RuntimeError::NumConversionError)?;
             let result_buffer = unsafe { env.ctx.result_buffer.get() };
             result_buffer.replace(value);
             len
@@ -1066,7 +1066,7 @@ pub fn vp_read_post<MEM, DB, H, EVAL, CA>(
     env: &VpVmEnv<MEM, DB, H, EVAL, CA>,
     key_ptr: u64,
     key_len: u64,
-) -> vp_env::EnvResult<i64>
+) -> vp_host_fns::EnvResult<i64>
 where
     MEM: VmMemory,
     DB: storage::DB + for<'iter> storage::DBIter<'iter>,
@@ -1077,24 +1077,24 @@ where
     let (key, gas) = env
         .memory
         .read_string(key_ptr, key_len as _)
-        .map_err(|e| vp_env::RuntimeError::MemoryError(Box::new(e)))?;
+        .map_err(|e| vp_host_fns::RuntimeError::MemoryError(Box::new(e)))?;
     let gas_meter = unsafe { env.ctx.gas_meter.get() };
-    vp_env::add_gas(gas_meter, gas)?;
+    vp_host_fns::add_gas(gas_meter, gas)?;
 
     tracing::debug!("vp_read_post {}, key {}", key, key_ptr,);
 
     // try to read from the write log first
     let key =
-        Key::parse(key).map_err(vp_env::RuntimeError::StorageDataError)?;
+        Key::parse(key).map_err(vp_host_fns::RuntimeError::StorageDataError)?;
     let storage = unsafe { env.ctx.storage.get() };
     let write_log = unsafe { env.ctx.write_log.get() };
-    let value = vp_env::read_post(gas_meter, storage, write_log, &key)?;
+    let value = vp_host_fns::read_post(gas_meter, storage, write_log, &key)?;
     Ok(match value {
         Some(value) => {
             let len: i64 = value
                 .len()
                 .try_into()
-                .map_err(vp_env::RuntimeError::NumConversionError)?;
+                .map_err(vp_host_fns::RuntimeError::NumConversionError)?;
             let result_buffer = unsafe { env.ctx.result_buffer.get() };
             result_buffer.replace(value);
             len
@@ -1112,7 +1112,7 @@ pub fn vp_read_temp<MEM, DB, H, EVAL, CA>(
     env: &VpVmEnv<MEM, DB, H, EVAL, CA>,
     key_ptr: u64,
     key_len: u64,
-) -> vp_env::EnvResult<i64>
+) -> vp_host_fns::EnvResult<i64>
 where
     MEM: VmMemory,
     DB: storage::DB + for<'iter> storage::DBIter<'iter>,
@@ -1123,23 +1123,23 @@ where
     let (key, gas) = env
         .memory
         .read_string(key_ptr, key_len as _)
-        .map_err(|e| vp_env::RuntimeError::MemoryError(Box::new(e)))?;
+        .map_err(|e| vp_host_fns::RuntimeError::MemoryError(Box::new(e)))?;
     let gas_meter = unsafe { env.ctx.gas_meter.get() };
-    vp_env::add_gas(gas_meter, gas)?;
+    vp_host_fns::add_gas(gas_meter, gas)?;
 
     tracing::debug!("vp_read_temp {}, key {}", key, key_ptr);
 
     // try to read from the write log
     let key =
-        Key::parse(key).map_err(vp_env::RuntimeError::StorageDataError)?;
+        Key::parse(key).map_err(vp_host_fns::RuntimeError::StorageDataError)?;
     let write_log = unsafe { env.ctx.write_log.get() };
-    let value = vp_env::read_temp(gas_meter, write_log, &key)?;
+    let value = vp_host_fns::read_temp(gas_meter, write_log, &key)?;
     Ok(match value {
         Some(value) => {
             let len: i64 = value
                 .len()
                 .try_into()
-                .map_err(vp_env::RuntimeError::NumConversionError)?;
+                .map_err(vp_host_fns::RuntimeError::NumConversionError)?;
             let result_buffer = unsafe { env.ctx.result_buffer.get() };
             result_buffer.replace(value);
             len
@@ -1159,7 +1159,7 @@ where
 pub fn vp_result_buffer<MEM, DB, H, EVAL, CA>(
     env: &VpVmEnv<MEM, DB, H, EVAL, CA>,
     result_ptr: u64,
-) -> vp_env::EnvResult<()>
+) -> vp_host_fns::EnvResult<()>
 where
     MEM: VmMemory,
     DB: storage::DB + for<'iter> storage::DBIter<'iter>,
@@ -1172,9 +1172,9 @@ where
     let gas = env
         .memory
         .write_bytes(result_ptr, value)
-        .map_err(|e| vp_env::RuntimeError::MemoryError(Box::new(e)))?;
+        .map_err(|e| vp_host_fns::RuntimeError::MemoryError(Box::new(e)))?;
     let gas_meter = unsafe { env.ctx.gas_meter.get() };
-    vp_env::add_gas(gas_meter, gas)
+    vp_host_fns::add_gas(gas_meter, gas)
 }
 
 /// Storage `has_key` in prior state (before tx execution) function exposed to
@@ -1183,7 +1183,7 @@ pub fn vp_has_key_pre<MEM, DB, H, EVAL, CA>(
     env: &VpVmEnv<MEM, DB, H, EVAL, CA>,
     key_ptr: u64,
     key_len: u64,
-) -> vp_env::EnvResult<i64>
+) -> vp_host_fns::EnvResult<i64>
 where
     MEM: VmMemory,
     DB: storage::DB + for<'iter> storage::DBIter<'iter>,
@@ -1194,16 +1194,16 @@ where
     let (key, gas) = env
         .memory
         .read_string(key_ptr, key_len as _)
-        .map_err(|e| vp_env::RuntimeError::MemoryError(Box::new(e)))?;
+        .map_err(|e| vp_host_fns::RuntimeError::MemoryError(Box::new(e)))?;
     let gas_meter = unsafe { env.ctx.gas_meter.get() };
-    vp_env::add_gas(gas_meter, gas)?;
+    vp_host_fns::add_gas(gas_meter, gas)?;
 
     tracing::debug!("vp_has_key_pre {}, key {}", key, key_ptr,);
 
     let key =
-        Key::parse(key).map_err(vp_env::RuntimeError::StorageDataError)?;
+        Key::parse(key).map_err(vp_host_fns::RuntimeError::StorageDataError)?;
     let storage = unsafe { env.ctx.storage.get() };
-    let present = vp_env::has_key_pre(gas_meter, storage, &key)?;
+    let present = vp_host_fns::has_key_pre(gas_meter, storage, &key)?;
     Ok(HostEnvResult::from(present).to_i64())
 }
 
@@ -1214,7 +1214,7 @@ pub fn vp_has_key_post<MEM, DB, H, EVAL, CA>(
     env: &VpVmEnv<MEM, DB, H, EVAL, CA>,
     key_ptr: u64,
     key_len: u64,
-) -> vp_env::EnvResult<i64>
+) -> vp_host_fns::EnvResult<i64>
 where
     MEM: VmMemory,
     DB: storage::DB + for<'iter> storage::DBIter<'iter>,
@@ -1225,17 +1225,18 @@ where
     let (key, gas) = env
         .memory
         .read_string(key_ptr, key_len as _)
-        .map_err(|e| vp_env::RuntimeError::MemoryError(Box::new(e)))?;
+        .map_err(|e| vp_host_fns::RuntimeError::MemoryError(Box::new(e)))?;
     let gas_meter = unsafe { env.ctx.gas_meter.get() };
-    vp_env::add_gas(gas_meter, gas)?;
+    vp_host_fns::add_gas(gas_meter, gas)?;
 
     tracing::debug!("vp_has_key_post {}, key {}", key, key_ptr,);
 
     let key =
-        Key::parse(key).map_err(vp_env::RuntimeError::StorageDataError)?;
+        Key::parse(key).map_err(vp_host_fns::RuntimeError::StorageDataError)?;
     let storage = unsafe { env.ctx.storage.get() };
     let write_log = unsafe { env.ctx.write_log.get() };
-    let present = vp_env::has_key_post(gas_meter, storage, write_log, &key)?;
+    let present =
+        vp_host_fns::has_key_post(gas_meter, storage, write_log, &key)?;
     Ok(HostEnvResult::from(present).to_i64())
 }
 
@@ -1246,7 +1247,7 @@ pub fn vp_iter_prefix<MEM, DB, H, EVAL, CA>(
     env: &VpVmEnv<MEM, DB, H, EVAL, CA>,
     prefix_ptr: u64,
     prefix_len: u64,
-) -> vp_env::EnvResult<u64>
+) -> vp_host_fns::EnvResult<u64>
 where
     MEM: VmMemory,
     DB: storage::DB + for<'iter> storage::DBIter<'iter>,
@@ -1257,16 +1258,16 @@ where
     let (prefix, gas) = env
         .memory
         .read_string(prefix_ptr, prefix_len as _)
-        .map_err(|e| vp_env::RuntimeError::MemoryError(Box::new(e)))?;
+        .map_err(|e| vp_host_fns::RuntimeError::MemoryError(Box::new(e)))?;
     let gas_meter = unsafe { env.ctx.gas_meter.get() };
-    vp_env::add_gas(gas_meter, gas)?;
+    vp_host_fns::add_gas(gas_meter, gas)?;
 
-    let prefix =
-        Key::parse(prefix).map_err(vp_env::RuntimeError::StorageDataError)?;
+    let prefix = Key::parse(prefix)
+        .map_err(vp_host_fns::RuntimeError::StorageDataError)?;
     tracing::debug!("vp_iter_prefix {}", prefix);
 
     let storage = unsafe { env.ctx.storage.get() };
-    let iter = vp_env::iter_prefix(gas_meter, storage, &prefix)?;
+    let iter = vp_host_fns::iter_prefix(gas_meter, storage, &prefix)?;
     let iterators = unsafe { env.ctx.iterators.get() };
     Ok(iterators.insert(iter).id())
 }
@@ -1278,7 +1279,7 @@ pub fn vp_rev_iter_prefix<MEM, DB, H, EVAL, CA>(
     env: &VpVmEnv<MEM, DB, H, EVAL, CA>,
     prefix_ptr: u64,
     prefix_len: u64,
-) -> vp_env::EnvResult<u64>
+) -> vp_host_fns::EnvResult<u64>
 where
     MEM: VmMemory,
     DB: storage::DB + for<'iter> storage::DBIter<'iter>,
@@ -1289,16 +1290,16 @@ where
     let (prefix, gas) = env
         .memory
         .read_string(prefix_ptr, prefix_len as _)
-        .map_err(|e| vp_env::RuntimeError::MemoryError(Box::new(e)))?;
+        .map_err(|e| vp_host_fns::RuntimeError::MemoryError(Box::new(e)))?;
     let gas_meter = unsafe { env.ctx.gas_meter.get() };
-    vp_env::add_gas(gas_meter, gas)?;
+    vp_host_fns::add_gas(gas_meter, gas)?;
 
-    let prefix =
-        Key::parse(prefix).map_err(vp_env::RuntimeError::StorageDataError)?;
+    let prefix = Key::parse(prefix)
+        .map_err(vp_host_fns::RuntimeError::StorageDataError)?;
     tracing::debug!("vp_rev_iter_prefix {}", prefix);
 
     let storage = unsafe { env.ctx.storage.get() };
-    let iter = vp_env::rev_iter_prefix(gas_meter, storage, &prefix)?;
+    let iter = vp_host_fns::rev_iter_prefix(gas_meter, storage, &prefix)?;
     let iterators = unsafe { env.ctx.iterators.get() };
     Ok(iterators.insert(iter).id())
 }
@@ -1311,7 +1312,7 @@ where
 pub fn vp_iter_pre_next<MEM, DB, H, EVAL, CA>(
     env: &VpVmEnv<MEM, DB, H, EVAL, CA>,
     iter_id: u64,
-) -> vp_env::EnvResult<i64>
+) -> vp_host_fns::EnvResult<i64>
 where
     MEM: VmMemory,
     DB: storage::DB + for<'iter> storage::DBIter<'iter>,
@@ -1325,15 +1326,16 @@ where
     let iter_id = PrefixIteratorId::new(iter_id);
     if let Some(iter) = iterators.get_mut(iter_id) {
         let gas_meter = unsafe { env.ctx.gas_meter.get() };
-        if let Some((key, val)) = vp_env::iter_pre_next::<DB>(gas_meter, iter)?
+        if let Some((key, val)) =
+            vp_host_fns::iter_pre_next::<DB>(gas_meter, iter)?
         {
             let key_val = KeyVal { key, val }
                 .try_to_vec()
-                .map_err(vp_env::RuntimeError::EncodingError)?;
+                .map_err(vp_host_fns::RuntimeError::EncodingError)?;
             let len: i64 = key_val
                 .len()
                 .try_into()
-                .map_err(vp_env::RuntimeError::NumConversionError)?;
+                .map_err(vp_host_fns::RuntimeError::NumConversionError)?;
             let result_buffer = unsafe { env.ctx.result_buffer.get() };
             result_buffer.replace(key_val);
             return Ok(len);
@@ -1351,7 +1353,7 @@ where
 pub fn vp_iter_post_next<MEM, DB, H, EVAL, CA>(
     env: &VpVmEnv<MEM, DB, H, EVAL, CA>,
     iter_id: u64,
-) -> vp_env::EnvResult<i64>
+) -> vp_host_fns::EnvResult<i64>
 where
     MEM: VmMemory,
     DB: storage::DB + for<'iter> storage::DBIter<'iter>,
@@ -1367,15 +1369,15 @@ where
         let gas_meter = unsafe { env.ctx.gas_meter.get() };
         let write_log = unsafe { env.ctx.write_log.get() };
         if let Some((key, val)) =
-            vp_env::iter_post_next::<DB>(gas_meter, write_log, iter)?
+            vp_host_fns::iter_post_next::<DB>(gas_meter, write_log, iter)?
         {
             let key_val = KeyVal { key, val }
                 .try_to_vec()
-                .map_err(vp_env::RuntimeError::EncodingError)?;
+                .map_err(vp_host_fns::RuntimeError::EncodingError)?;
             let len: i64 = key_val
                 .len()
                 .try_into()
-                .map_err(vp_env::RuntimeError::NumConversionError)?;
+                .map_err(vp_host_fns::RuntimeError::NumConversionError)?;
             let result_buffer = unsafe { env.ctx.result_buffer.get() };
             result_buffer.replace(key_val);
             return Ok(len);
@@ -1553,7 +1555,7 @@ where
 /// transaction is being applied.
 pub fn vp_get_tx_index<MEM, DB, H, EVAL, CA>(
     env: &VpVmEnv<MEM, DB, H, EVAL, CA>,
-) -> vp_env::EnvResult<u32>
+) -> vp_host_fns::EnvResult<u32>
 where
     MEM: VmMemory,
     DB: storage::DB + for<'iter> storage::DBIter<'iter>,
@@ -1563,7 +1565,7 @@ where
 {
     let gas_meter = unsafe { env.ctx.gas_meter.get() };
     let tx_index = unsafe { env.ctx.tx_index.get() };
-    let tx_idx = vp_env::get_tx_index(gas_meter, tx_index)?;
+    let tx_idx = vp_host_fns::get_tx_index(gas_meter, tx_index)?;
     Ok(tx_idx.0)
 }
 
@@ -1633,7 +1635,7 @@ where
 pub fn vp_get_chain_id<MEM, DB, H, EVAL, CA>(
     env: &VpVmEnv<MEM, DB, H, EVAL, CA>,
     result_ptr: u64,
-) -> vp_env::EnvResult<()>
+) -> vp_host_fns::EnvResult<()>
 where
     MEM: VmMemory,
     DB: storage::DB + for<'iter> storage::DBIter<'iter>,
@@ -1643,12 +1645,12 @@ where
 {
     let gas_meter = unsafe { env.ctx.gas_meter.get() };
     let storage = unsafe { env.ctx.storage.get() };
-    let chain_id = vp_env::get_chain_id(gas_meter, storage)?;
+    let chain_id = vp_host_fns::get_chain_id(gas_meter, storage)?;
     let gas = env
         .memory
         .write_string(result_ptr, chain_id)
-        .map_err(|e| vp_env::RuntimeError::MemoryError(Box::new(e)))?;
-    vp_env::add_gas(gas_meter, gas)
+        .map_err(|e| vp_host_fns::RuntimeError::MemoryError(Box::new(e)))?;
+    vp_host_fns::add_gas(gas_meter, gas)
 }
 
 /// Getting the block height function exposed to the wasm VM VP
@@ -1656,7 +1658,7 @@ where
 /// transaction is being applied.
 pub fn vp_get_block_height<MEM, DB, H, EVAL, CA>(
     env: &VpVmEnv<MEM, DB, H, EVAL, CA>,
-) -> vp_env::EnvResult<u64>
+) -> vp_host_fns::EnvResult<u64>
 where
     MEM: VmMemory,
     DB: storage::DB + for<'iter> storage::DBIter<'iter>,
@@ -1666,7 +1668,7 @@ where
 {
     let gas_meter = unsafe { env.ctx.gas_meter.get() };
     let storage = unsafe { env.ctx.storage.get() };
-    let height = vp_env::get_block_height(gas_meter, storage)?;
+    let height = vp_host_fns::get_block_height(gas_meter, storage)?;
     Ok(height.0)
 }
 
@@ -1711,7 +1713,7 @@ where
 pub fn vp_get_block_hash<MEM, DB, H, EVAL, CA>(
     env: &VpVmEnv<MEM, DB, H, EVAL, CA>,
     result_ptr: u64,
-) -> vp_env::EnvResult<()>
+) -> vp_host_fns::EnvResult<()>
 where
     MEM: VmMemory,
     DB: storage::DB + for<'iter> storage::DBIter<'iter>,
@@ -1721,19 +1723,19 @@ where
 {
     let gas_meter = unsafe { env.ctx.gas_meter.get() };
     let storage = unsafe { env.ctx.storage.get() };
-    let hash = vp_env::get_block_hash(gas_meter, storage)?;
+    let hash = vp_host_fns::get_block_hash(gas_meter, storage)?;
     let gas = env
         .memory
         .write_bytes(result_ptr, hash.0)
-        .map_err(|e| vp_env::RuntimeError::MemoryError(Box::new(e)))?;
-    vp_env::add_gas(gas_meter, gas)
+        .map_err(|e| vp_host_fns::RuntimeError::MemoryError(Box::new(e)))?;
+    vp_host_fns::add_gas(gas_meter, gas)
 }
 
 /// Getting the transaction hash function exposed to the wasm VM VP environment.
 pub fn vp_get_tx_code_hash<MEM, DB, H, EVAL, CA>(
     env: &VpVmEnv<MEM, DB, H, EVAL, CA>,
     result_ptr: u64,
-) -> vp_env::EnvResult<()>
+) -> vp_host_fns::EnvResult<()>
 where
     MEM: VmMemory,
     DB: storage::DB + for<'iter> storage::DBIter<'iter>,
@@ -1743,12 +1745,12 @@ where
 {
     let gas_meter = unsafe { env.ctx.gas_meter.get() };
     let tx = unsafe { env.ctx.tx.get() };
-    let hash = vp_env::get_tx_code_hash(gas_meter, tx)?;
+    let hash = vp_host_fns::get_tx_code_hash(gas_meter, tx)?;
     let gas = env
         .memory
         .write_bytes(result_ptr, hash.0)
-        .map_err(|e| vp_env::RuntimeError::MemoryError(Box::new(e)))?;
-    vp_env::add_gas(gas_meter, gas)
+        .map_err(|e| vp_host_fns::RuntimeError::MemoryError(Box::new(e)))?;
+    vp_host_fns::add_gas(gas_meter, gas)
 }
 
 /// Getting the block epoch function exposed to the wasm VM VP
@@ -1756,7 +1758,7 @@ where
 /// transaction is being applied.
 pub fn vp_get_block_epoch<MEM, DB, H, EVAL, CA>(
     env: &VpVmEnv<MEM, DB, H, EVAL, CA>,
-) -> vp_env::EnvResult<u64>
+) -> vp_host_fns::EnvResult<u64>
 where
     MEM: VmMemory,
     DB: storage::DB + for<'iter> storage::DBIter<'iter>,
@@ -1766,7 +1768,7 @@ where
 {
     let gas_meter = unsafe { env.ctx.gas_meter.get() };
     let storage = unsafe { env.ctx.storage.get() };
-    let epoch = vp_env::get_block_epoch(gas_meter, storage)?;
+    let epoch = vp_host_fns::get_block_epoch(gas_meter, storage)?;
     Ok(epoch.0)
 }
 
@@ -1777,7 +1779,7 @@ pub fn vp_verify_tx_signature<MEM, DB, H, EVAL, CA>(
     pk_len: u64,
     sig_ptr: u64,
     sig_len: u64,
-) -> vp_env::EnvResult<i64>
+) -> vp_host_fns::EnvResult<i64>
 where
     MEM: VmMemory,
     DB: storage::DB + for<'iter> storage::DBIter<'iter>,
@@ -1788,21 +1790,21 @@ where
     let (pk, gas) = env
         .memory
         .read_bytes(pk_ptr, pk_len as _)
-        .map_err(|e| vp_env::RuntimeError::MemoryError(Box::new(e)))?;
+        .map_err(|e| vp_host_fns::RuntimeError::MemoryError(Box::new(e)))?;
     let gas_meter = unsafe { env.ctx.gas_meter.get() };
-    vp_env::add_gas(gas_meter, gas)?;
+    vp_host_fns::add_gas(gas_meter, gas)?;
     let pk: common::PublicKey = BorshDeserialize::try_from_slice(&pk)
-        .map_err(vp_env::RuntimeError::EncodingError)?;
+        .map_err(vp_host_fns::RuntimeError::EncodingError)?;
 
     let (sig, gas) = env
         .memory
         .read_bytes(sig_ptr, sig_len as _)
-        .map_err(|e| vp_env::RuntimeError::MemoryError(Box::new(e)))?;
-    vp_env::add_gas(gas_meter, gas)?;
+        .map_err(|e| vp_host_fns::RuntimeError::MemoryError(Box::new(e)))?;
+    vp_host_fns::add_gas(gas_meter, gas)?;
     let sig: common::Signature = BorshDeserialize::try_from_slice(&sig)
-        .map_err(vp_env::RuntimeError::EncodingError)?;
+        .map_err(vp_host_fns::RuntimeError::EncodingError)?;
 
-    vp_env::add_gas(gas_meter, VERIFY_TX_SIG_GAS_COST)?;
+    vp_host_fns::add_gas(gas_meter, VERIFY_TX_SIG_GAS_COST)?;
     let tx = unsafe { env.ctx.tx.get() };
     Ok(HostEnvResult::from(tx.verify_sig(&pk, &sig).is_ok()).to_i64())
 }
@@ -1812,7 +1814,7 @@ pub fn vp_verify_masp<MEM, DB, H, EVAL, CA>(
     env: &VpVmEnv<MEM, DB, H, EVAL, CA>,
     tx_ptr: u64,
     tx_len: u64,
-) -> vp_env::EnvResult<i64>
+) -> vp_host_fns::EnvResult<i64>
 where
     MEM: VmMemory,
     DB: storage::DB + for<'iter> storage::DBIter<'iter>,
@@ -1827,8 +1829,8 @@ where
     let (tx_bytes, gas) = env
         .memory
         .read_bytes(tx_ptr, tx_len as _)
-        .map_err(|e| vp_env::RuntimeError::MemoryError(Box::new(e)))?;
-    vp_env::add_gas(gas_meter, gas)?;
+        .map_err(|e| vp_host_fns::RuntimeError::MemoryError(Box::new(e)))?;
+    vp_host_fns::add_gas(gas_meter, gas)?;
     let full_tx: Transfer =
         BorshDeserialize::try_from_slice(tx_bytes.as_slice()).unwrap();
     let shielded_tx: Transaction = full_tx.shielded.unwrap();
@@ -1882,7 +1884,7 @@ pub fn vp_eval<MEM, DB, H, EVAL, CA>(
     vp_code_len: u64,
     input_data_ptr: u64,
     input_data_len: u64,
-) -> vp_env::EnvResult<i64>
+) -> vp_host_fns::EnvResult<i64>
 where
     MEM: VmMemory,
     DB: storage::DB + for<'iter> storage::DBIter<'iter>,
@@ -1893,15 +1895,15 @@ where
     let (vp_code, gas) =
         env.memory
             .read_bytes(vp_code_ptr, vp_code_len as _)
-            .map_err(|e| vp_env::RuntimeError::MemoryError(Box::new(e)))?;
+            .map_err(|e| vp_host_fns::RuntimeError::MemoryError(Box::new(e)))?;
     let gas_meter = unsafe { env.ctx.gas_meter.get() };
-    vp_env::add_gas(gas_meter, gas)?;
+    vp_host_fns::add_gas(gas_meter, gas)?;
 
     let (input_data, gas) = env
         .memory
         .read_bytes(input_data_ptr, input_data_len as _)
-        .map_err(|e| vp_env::RuntimeError::MemoryError(Box::new(e)))?;
-    vp_env::add_gas(gas_meter, gas)?;
+        .map_err(|e| vp_host_fns::RuntimeError::MemoryError(Box::new(e)))?;
+    vp_host_fns::add_gas(gas_meter, gas)?;
 
     let eval_runner = unsafe { env.ctx.eval_runner.get() };
     Ok(eval_runner
@@ -1913,7 +1915,7 @@ where
 pub fn vp_get_native_token<MEM, DB, H, EVAL, CA>(
     env: &VpVmEnv<MEM, DB, H, EVAL, CA>,
     result_ptr: u64,
-) -> vp_env::EnvResult<()>
+) -> vp_host_fns::EnvResult<()>
 where
     MEM: VmMemory,
     DB: storage::DB + for<'iter> storage::DBIter<'iter>,
@@ -1923,13 +1925,13 @@ where
 {
     let gas_meter = unsafe { env.ctx.gas_meter.get() };
     let storage = unsafe { env.ctx.storage.get() };
-    let native_token = vp_env::get_native_token(gas_meter, storage)?;
+    let native_token = vp_host_fns::get_native_token(gas_meter, storage)?;
     let native_token_string = native_token.encode();
     let gas = env
         .memory
         .write_string(result_ptr, native_token_string)
-        .map_err(|e| vp_env::RuntimeError::MemoryError(Box::new(e)))?;
-    vp_env::add_gas(gas_meter, gas)
+        .map_err(|e| vp_host_fns::RuntimeError::MemoryError(Box::new(e)))?;
+    vp_host_fns::add_gas(gas_meter, gas)
 }
 
 /// Log a string from exposed to the wasm VM VP environment. The message will be
@@ -1939,7 +1941,7 @@ pub fn vp_log_string<MEM, DB, H, EVAL, CA>(
     env: &VpVmEnv<MEM, DB, H, EVAL, CA>,
     str_ptr: u64,
     str_len: u64,
-) -> vp_env::EnvResult<()>
+) -> vp_host_fns::EnvResult<()>
 where
     MEM: VmMemory,
     DB: storage::DB + for<'iter> storage::DBIter<'iter>,
@@ -1950,7 +1952,7 @@ where
     let (str, _gas) = env
         .memory
         .read_string(str_ptr, str_len as _)
-        .map_err(|e| vp_env::RuntimeError::MemoryError(Box::new(e)))?;
+        .map_err(|e| vp_host_fns::RuntimeError::MemoryError(Box::new(e)))?;
     tracing::info!("WASM Validity predicate log: {}", str);
     Ok(())
 }
diff --git a/shared/src/vm/types.rs b/shared/src/vm/types.rs
index 480190ad08..16e525f4be 100644
--- a/shared/src/vm/types.rs
+++ b/shared/src/vm/types.rs
@@ -11,10 +11,8 @@
 
 use std::collections::BTreeSet;
 
-use borsh::{BorshDeserialize, BorshSerialize};
-
 use crate::types::address::Address;
-use crate::types::storage::Key;
+use crate::types::storage;
 
 /// Input for validity predicate wasm module call
 pub struct VpInput<'a> {
@@ -24,16 +22,7 @@ pub struct VpInput<'a> {
     pub data: &'a [u8],
     /// The storage changed keys from the write log of storage updates
     /// performed by the transaction for the account associated with the VP
-    pub keys_changed: &'a BTreeSet<Key>,
+    pub keys_changed: &'a BTreeSet<storage::Key>,
     /// The verifiers to trigger VPs
     pub verifiers: &'a BTreeSet<Address>,
 }
-
-/// Key-value pair represents data from account's subspace
-#[derive(Clone, Debug, BorshSerialize, BorshDeserialize)]
-pub struct KeyVal {
-    /// The storage key
-    pub key: String,
-    /// The value as arbitrary bytes
-    pub val: Vec<u8>,
-}
diff --git a/tests/src/e2e/ibc_tests.rs b/tests/src/e2e/ibc_tests.rs
index 2433d29186..c8e907b87d 100644
--- a/tests/src/e2e/ibc_tests.rs
+++ b/tests/src/e2e/ibc_tests.rs
@@ -64,7 +64,7 @@ use ibc_relayer::config::{AddressType, ChainConfig, GasPrice, PacketFilter};
 use ibc_relayer::keyring::Store;
 use ibc_relayer::light_client::tendermint::LightClient as TmLightClient;
 use ibc_relayer::light_client::{LightClient, Verified};
-use namada::ledger::ibc::handler::{commitment_prefix, port_channel_id};
+use namada::core::ledger::ibc::actions::{commitment_prefix, port_channel_id};
 use namada::ledger::ibc::storage::*;
 use namada::ledger::storage::ics23_specs::ibc_proof_specs;
 use namada::ledger::storage::Sha256Hasher;
diff --git a/tests/src/native_vp/pos.rs b/tests/src/native_vp/pos.rs
index ca8700dd3b..aff88abef9 100644
--- a/tests/src/native_vp/pos.rs
+++ b/tests/src/native_vp/pos.rs
@@ -99,8 +99,9 @@
 //! - add slashes
 
 use namada::ledger::pos::namada_proof_of_stake::PosBase;
+use namada::proof_of_stake::storage::GenesisValidator;
+use namada::proof_of_stake::PosParams;
 use namada::types::storage::Epoch;
-use namada_tx_prelude::proof_of_stake::{GenesisValidator, PosParams};
 
 use crate::tx::tx_host_env;
 
@@ -137,12 +138,11 @@ pub fn init_pos(
 #[cfg(test)]
 mod tests {
 
-    use namada::ledger::pos::PosParams;
+    use namada::ledger::pos::{PosParams, PosVP};
     use namada::types::key::common::PublicKey;
     use namada::types::storage::Epoch;
     use namada::types::{address, token};
     use namada_tx_prelude::proof_of_stake::parameters::testing::arb_pos_params;
-    use namada_tx_prelude::proof_of_stake::PosVP;
     use namada_tx_prelude::Address;
     use proptest::prelude::*;
     use proptest::prop_state_machine;
@@ -572,20 +572,19 @@ pub mod testing {
     use derivative::Derivative;
     use itertools::Either;
     use namada::ledger::pos::namada_proof_of_stake::btree_set::BTreeSetShims;
-    use namada::types::key::common::PublicKey;
-    use namada::types::key::RefTo;
-    use namada::types::storage::Epoch;
-    use namada::types::{address, key, token};
-    use namada_tx_prelude::proof_of_stake::epoched::{
+    use namada::proof_of_stake::epoched::{
         DynEpochOffset, Epoched, EpochedDelta,
     };
-    use namada_tx_prelude::proof_of_stake::parameters::testing::arb_rate;
-    use namada_tx_prelude::proof_of_stake::types::{
+    use namada::proof_of_stake::parameters::testing::arb_rate;
+    use namada::proof_of_stake::storage::{BondId, Bonds, Unbonds};
+    use namada::proof_of_stake::types::{
         Bond, Unbond, ValidatorState, WeightedValidator,
     };
-    use namada_tx_prelude::proof_of_stake::{
-        BondId, Bonds, PosParams, Unbonds,
-    };
+    use namada::proof_of_stake::PosParams;
+    use namada::types::key::common::PublicKey;
+    use namada::types::key::RefTo;
+    use namada::types::storage::Epoch;
+    use namada::types::{address, key, token};
     use namada_tx_prelude::{Address, StorageRead, StorageWrite};
     use proptest::prelude::*;
     use rust_decimal::Decimal;
@@ -1081,10 +1080,9 @@ pub mod testing {
                         pos_deltas: HashMap::default(),
                         neg_deltas: Default::default(),
                     };
-                    value.pos_deltas.insert(
-                        (current_epoch + offset.value(params)).into(),
-                        amount,
-                    );
+                    value
+                        .pos_deltas
+                        .insert(current_epoch + offset.value(params), amount);
                     match bonds {
                         Some(mut bonds) => {
                             // Resize the data if needed (the offset may be
@@ -1163,8 +1161,7 @@ pub mod testing {
                                 value.deltas.insert(
                                     (
                                         *start_epoch,
-                                        (current_epoch + offset.value(params))
-                                            .into(),
+                                        (current_epoch + offset.value(params)),
                                     ),
                                     to_unbond,
                                 );
@@ -1175,8 +1172,7 @@ pub mod testing {
                                 value.deltas.insert(
                                     (
                                         *start_epoch,
-                                        (current_epoch + offset.value(params))
-                                            .into(),
+                                        (current_epoch + offset.value(params)),
                                     ),
                                     *delta,
                                 );
diff --git a/tests/src/vm_host_env/ibc.rs b/tests/src/vm_host_env/ibc.rs
index bc8d490f52..88f2d205a9 100644
--- a/tests/src/vm_host_env/ibc.rs
+++ b/tests/src/vm_host_env/ibc.rs
@@ -2,6 +2,7 @@ use core::time::Duration;
 use std::collections::HashMap;
 use std::str::FromStr;
 
+pub use namada::core::ledger::ibc::actions::*;
 use namada::ibc::applications::ics20_fungible_token_transfer::msgs::transfer::MsgTransfer;
 use namada::ibc::core::ics02_client::client_consensus::ConsensusState;
 use namada::ibc::core::ics02_client::client_state::{
@@ -45,7 +46,6 @@ use namada::ibc_proto::cosmos::base::v1beta1::Coin;
 use namada::ibc_proto::ibc::core::commitment::v1::MerkleProof;
 use namada::ibc_proto::ics23::CommitmentProof;
 use namada::ledger::gas::VpGasMeter;
-pub use namada::ledger::ibc::handler::*;
 use namada::ledger::ibc::init_genesis_storage;
 pub use namada::ledger::ibc::storage::{
     ack_key, capability_index_key, capability_key, channel_counter_key,
diff --git a/tests/src/vm_host_env/mod.rs b/tests/src/vm_host_env/mod.rs
index f4bf4e7358..aa8c160339 100644
--- a/tests/src/vm_host_env/mod.rs
+++ b/tests/src/vm_host_env/mod.rs
@@ -21,8 +21,8 @@ mod tests {
     use std::panic;
 
     use itertools::Itertools;
+    use namada::core::ledger::ibc::actions::IbcActions;
     use namada::ibc::tx_msg::Msg;
-    use namada::ledger::ibc::handler::IbcActions;
     use namada::ledger::ibc::storage as ibc_storage;
     use namada::ledger::ibc::vp::{
         get_dummy_header as tm_dummy_header, Error as IbcError,
diff --git a/tx_prelude/Cargo.toml b/tx_prelude/Cargo.toml
index cbd4f82226..c15293016a 100644
--- a/tx_prelude/Cargo.toml
+++ b/tx_prelude/Cargo.toml
@@ -11,9 +11,10 @@ default = []
 
 [dependencies]
 masp_primitives = { git = "https://github.com/anoma/masp", rev = "bee40fc465f6afbd10558d12fe96eb1742eee45c" }
-namada = {path = "../shared"}
-namada_vm_env = {path = "../vm_env"}
+namada_core = {path = "../core", features = ["abciplus"]}
 namada_macros = {path = "../macros"}
+namada_proof_of_stake = {path = "../proof_of_stake"}
+namada_vm_env = {path = "../vm_env"}
 borsh = "0.9.0"
 sha2 = "0.10.1"
 thiserror = "1.0.30"
diff --git a/tx_prelude/src/governance.rs b/tx_prelude/src/governance.rs
index 47558d34a9..3e1afb6a68 100644
--- a/tx_prelude/src/governance.rs
+++ b/tx_prelude/src/governance.rs
@@ -1,8 +1,8 @@
 //! Governance
 
-use namada::ledger::governance::{storage, ADDRESS as governance_address};
-use namada::types::token::Amount;
-use namada::types::transaction::governance::{
+use namada_core::ledger::governance::{storage, ADDRESS as governance_address};
+use namada_core::types::token::Amount;
+use namada_core::types::transaction::governance::{
     InitProposalData, VoteProposalData,
 };
 
diff --git a/tx_prelude/src/ibc.rs b/tx_prelude/src/ibc.rs
index e17270cfb9..898988d35c 100644
--- a/tx_prelude/src/ibc.rs
+++ b/tx_prelude/src/ibc.rs
@@ -1,12 +1,12 @@
 //! IBC lower-level functions for transactions.
 
-pub use namada::ledger::ibc::handler::{Error, IbcActions, Result};
-use namada::ledger::storage_api::{StorageRead, StorageWrite};
-use namada::ledger::tx_env::TxEnv;
-pub use namada::types::ibc::IbcEvent;
-use namada::types::storage::{BlockHeight, Key};
-use namada::types::time::Rfc3339String;
-use namada::types::token::Amount;
+pub use namada_core::ledger::ibc::actions::{Error, IbcActions, Result};
+use namada_core::ledger::storage_api::{StorageRead, StorageWrite};
+use namada_core::ledger::tx_env::TxEnv;
+pub use namada_core::types::ibc::IbcEvent;
+use namada_core::types::storage::{BlockHeight, Key};
+use namada_core::types::time::Rfc3339String;
+use namada_core::types::token::Amount;
 
 use crate::token::transfer_with_keys;
 use crate::Ctx;
diff --git a/tx_prelude/src/key.rs b/tx_prelude/src/key.rs
index fa83f3d1b4..32a6c2089d 100644
--- a/tx_prelude/src/key.rs
+++ b/tx_prelude/src/key.rs
@@ -1,6 +1,6 @@
 //! Cryptographic signature keys
 
-pub use namada::types::key::*;
+pub use namada_core::types::key::*;
 
 use super::*;
 
diff --git a/tx_prelude/src/lib.rs b/tx_prelude/src/lib.rs
index 820c99b725..1899aff22d 100644
--- a/tx_prelude/src/lib.rs
+++ b/tx_prelude/src/lib.rs
@@ -16,25 +16,26 @@ use core::slice;
 use std::marker::PhantomData;
 
 pub use borsh::{BorshDeserialize, BorshSerialize};
-pub use namada::ledger::governance::storage as gov_storage;
-pub use namada::ledger::parameters::storage as parameters_storage;
-pub use namada::ledger::slash_fund::storage as slash_fund_storage;
-pub use namada::ledger::storage::types::encode;
-pub use namada::ledger::storage_api::{
+pub use namada_core::ledger::governance::storage as gov_storage;
+pub use namada_core::ledger::parameters::storage as parameters_storage;
+pub use namada_core::ledger::slash_fund::storage as slash_fund_storage;
+pub use namada_core::ledger::storage::types::encode;
+pub use namada_core::ledger::storage_api::{
     self, iter_prefix, iter_prefix_bytes, rev_iter_prefix,
     rev_iter_prefix_bytes, Error, OptionExt, ResultExt, StorageRead,
     StorageWrite,
 };
-pub use namada::ledger::tx_env::TxEnv;
-pub use namada::proto::{Signed, SignedTxData};
-pub use namada::types::address::Address;
-use namada::types::chain::CHAIN_ID_LENGTH;
-use namada::types::internal::HostEnvResult;
-use namada::types::storage::{
-    BlockHash, BlockHeight, Epoch, TxIndex, BLOCK_HASH_LENGTH,
+pub use namada_core::ledger::tx_env::TxEnv;
+pub use namada_core::proto::{Signed, SignedTxData};
+pub use namada_core::types::address::Address;
+use namada_core::types::chain::CHAIN_ID_LENGTH;
+use namada_core::types::internal::HostEnvResult;
+use namada_core::types::storage::TxIndex;
+pub use namada_core::types::storage::{
+    self, BlockHash, BlockHeight, Epoch, BLOCK_HASH_LENGTH,
 };
-use namada::types::time::Rfc3339String;
-pub use namada::types::*;
+use namada_core::types::time::Rfc3339String;
+pub use namada_core::types::*;
 pub use namada_macros::transaction;
 use namada_vm_env::tx::*;
 use namada_vm_env::{read_from_buffer, read_key_val_bytes_from_buffer};
@@ -114,20 +115,14 @@ pub struct KeyValIterator<T>(pub u64, pub PhantomData<T>);
 impl StorageRead<'_> for Ctx {
     type PrefixIter = KeyValIterator<(String, Vec<u8>)>;
 
-    fn read_bytes(
-        &self,
-        key: &namada::types::storage::Key,
-    ) -> Result<Option<Vec<u8>>, Error> {
+    fn read_bytes(&self, key: &storage::Key) -> Result<Option<Vec<u8>>, Error> {
         let key = key.to_string();
         let read_result =
             unsafe { anoma_tx_read(key.as_ptr() as _, key.len() as _) };
         Ok(read_from_buffer(read_result, anoma_tx_result_buffer))
     }
 
-    fn has_key(
-        &self,
-        key: &namada::types::storage::Key,
-    ) -> Result<bool, Error> {
+    fn has_key(&self, key: &storage::Key) -> Result<bool, Error> {
         let key = key.to_string();
         let found =
             unsafe { anoma_tx_has_key(key.as_ptr() as _, key.len() as _) };
@@ -147,13 +142,13 @@ impl StorageRead<'_> for Ctx {
 
     fn get_block_height(
         &self,
-    ) -> Result<namada::types::storage::BlockHeight, Error> {
+    ) -> Result<namada_core::types::storage::BlockHeight, Error> {
         Ok(BlockHeight(unsafe { anoma_tx_get_block_height() }))
     }
 
     fn get_block_hash(
         &self,
-    ) -> Result<namada::types::storage::BlockHash, Error> {
+    ) -> Result<namada_core::types::storage::BlockHash, Error> {
         let result = Vec::with_capacity(BLOCK_HASH_LENGTH);
         unsafe {
             anoma_tx_get_block_hash(result.as_ptr() as _);
@@ -164,7 +159,9 @@ impl StorageRead<'_> for Ctx {
         Ok(BlockHash::try_from(slice).expect("Cannot convert the hash"))
     }
 
-    fn get_block_epoch(&self) -> Result<namada::types::storage::Epoch, Error> {
+    fn get_block_epoch(
+        &self,
+    ) -> Result<namada_core::types::storage::Epoch, Error> {
         Ok(Epoch(unsafe { anoma_tx_get_block_epoch() }))
     }
 
@@ -184,7 +181,7 @@ impl StorageRead<'_> for Ctx {
 
     fn iter_prefix(
         &self,
-        prefix: &namada::types::storage::Key,
+        prefix: &storage::Key,
     ) -> Result<Self::PrefixIter, Error> {
         let prefix = prefix.to_string();
         let iter_id = unsafe {
@@ -215,9 +212,7 @@ impl StorageRead<'_> for Ctx {
         ))
     }
 
-    fn get_tx_index(
-        &self,
-    ) -> Result<namada::types::storage::TxIndex, storage_api::Error> {
+    fn get_tx_index(&self) -> Result<TxIndex, storage_api::Error> {
         let tx_index = unsafe { anoma_tx_get_tx_index() };
         Ok(TxIndex(tx_index))
     }
@@ -226,7 +221,7 @@ impl StorageRead<'_> for Ctx {
 impl StorageWrite for Ctx {
     fn write_bytes(
         &mut self,
-        key: &namada::types::storage::Key,
+        key: &storage::Key,
         val: impl AsRef<[u8]>,
     ) -> storage_api::Result<()> {
         let key = key.to_string();
@@ -241,10 +236,7 @@ impl StorageWrite for Ctx {
         Ok(())
     }
 
-    fn delete(
-        &mut self,
-        key: &namada::types::storage::Key,
-    ) -> storage_api::Result<()> {
+    fn delete(&mut self, key: &storage::Key) -> storage_api::Result<()> {
         let key = key.to_string();
         unsafe { anoma_tx_delete(key.as_ptr() as _, key.len() as _) };
         Ok(())
@@ -264,7 +256,7 @@ impl TxEnv<'_> for Ctx {
 
     fn write_temp<T: BorshSerialize>(
         &mut self,
-        key: &namada::types::storage::Key,
+        key: &storage::Key,
         val: T,
     ) -> Result<(), Error> {
         let buf = val.try_to_vec().unwrap();
@@ -273,7 +265,7 @@ impl TxEnv<'_> for Ctx {
 
     fn write_bytes_temp(
         &mut self,
-        key: &namada::types::storage::Key,
+        key: &storage::Key,
         val: impl AsRef<[u8]>,
     ) -> Result<(), Error> {
         let key = key.to_string();
diff --git a/tx_prelude/src/proof_of_stake.rs b/tx_prelude/src/proof_of_stake.rs
index 490f019052..003f7302e0 100644
--- a/tx_prelude/src/proof_of_stake.rs
+++ b/tx_prelude/src/proof_of_stake.rs
@@ -1,16 +1,17 @@
 //! Proof of Stake system integration with functions for transactions
 
-pub use namada::ledger::pos::*;
-use namada::ledger::pos::{
-    bond_key, namada_proof_of_stake, params_key, unbond_key,
+use namada_core::types::transaction::InitValidator;
+use namada_core::types::{key, token};
+pub use namada_proof_of_stake::parameters::PosParams;
+use namada_proof_of_stake::storage::{
+    bond_key, params_key, total_deltas_key, unbond_key,
     validator_address_raw_hash_key, validator_commission_rate_key,
     validator_consensus_key_key, validator_deltas_key,
     validator_max_commission_rate_change_key, validator_set_key,
-    validator_slashes_key, validator_state_key,
+    validator_slashes_key, validator_state_key, BondId, Bonds, TotalDeltas,
+    Unbonds, ValidatorConsensusKeys, ValidatorDeltas, ValidatorSets,
 };
-use namada::types::address::Address;
-use namada::types::transaction::InitValidator;
-use namada::types::{key, token};
+use namada_proof_of_stake::types::{CommissionRates, ValidatorStates};
 pub use namada_proof_of_stake::{
     epoched, parameters, types, PosActions as PosWrite, PosReadOnly as PosRead,
 };
@@ -125,7 +126,7 @@ impl Ctx {
     }
 }
 
-namada::impl_pos_read_only! {
+namada_proof_of_stake::impl_pos_read_only! {
     type Error = crate::Error;
     impl namada_proof_of_stake::PosReadOnly for Ctx
 }
diff --git a/tx_prelude/src/token.rs b/tx_prelude/src/token.rs
index 77f1a572b1..5ea8549554 100644
--- a/tx_prelude/src/token.rs
+++ b/tx_prelude/src/token.rs
@@ -1,8 +1,8 @@
 use masp_primitives::transaction::Transaction;
-use namada::types::address::{masp, Address, InternalAddress};
-use namada::types::storage::{Key, KeySeg};
-use namada::types::token;
-pub use namada::types::token::*;
+use namada_core::types::address::{Address, InternalAddress};
+use namada_core::types::storage::KeySeg;
+use namada_core::types::token;
+pub use namada_core::types::token::*;
 
 use super::*;
 
@@ -87,14 +87,14 @@ pub fn transfer(
     // If this transaction has a shielded component, then handle it
     // separately
     if let Some(shielded) = shielded {
-        let masp_addr = masp();
+        let masp_addr = address::masp();
         ctx.insert_verifier(&masp_addr)?;
-        let head_tx_key = Key::from(masp_addr.to_db_key())
+        let head_tx_key = storage::Key::from(masp_addr.to_db_key())
             .push(&HEAD_TX_KEY.to_owned())
             .expect("Cannot obtain a storage key");
         let current_tx_idx: u64 =
             ctx.read(&head_tx_key).unwrap_or(None).unwrap_or(0);
-        let current_tx_key = Key::from(masp_addr.to_db_key())
+        let current_tx_key = storage::Key::from(masp_addr.to_db_key())
             .push(&(TX_KEY_PREFIX.to_owned() + &current_tx_idx.to_string()))
             .expect("Cannot obtain a storage key");
         // Save the Transfer object and its location within the blockchain
@@ -122,7 +122,7 @@ pub fn transfer(
         ctx.write(&head_tx_key, current_tx_idx + 1)?;
         // If storage key has been supplied, then pin this transaction to it
         if let Some(key) = key {
-            let pin_key = Key::from(masp_addr.to_db_key())
+            let pin_key = storage::Key::from(masp_addr.to_db_key())
                 .push(&(PIN_KEY_PREFIX.to_owned() + key))
                 .expect("Cannot obtain a storage key");
             ctx.write(&pin_key, current_tx_idx)?;
diff --git a/vm_env/Cargo.toml b/vm_env/Cargo.toml
index 421fee8244..161f6079de 100644
--- a/vm_env/Cargo.toml
+++ b/vm_env/Cargo.toml
@@ -7,18 +7,10 @@ resolver = "2"
 version = "0.10.1"
 
 [features]
-default = ["abciplus"]
-
-abciplus = [
-  "namada/abciplus",
-]
-
-abcipp = [
-  "namada/abcipp",
-]
+default = []
 
 [dependencies]
-namada = {path = "../shared", default-features = false}
+namada_core = {path = "../core"}
 borsh = "0.9.0"
 #libmasp = { git = "https://github.com/anoma/masp", branch = "murisi/masp-incentive" }
 masp_primitives = { git = "https://github.com/anoma/masp", rev = "bee40fc465f6afbd10558d12fe96eb1742eee45c" }
diff --git a/vm_env/src/lib.rs b/vm_env/src/lib.rs
index 6ec880007c..05cc849349 100644
--- a/vm_env/src/lib.rs
+++ b/vm_env/src/lib.rs
@@ -8,8 +8,7 @@
 use std::mem::ManuallyDrop;
 
 use borsh::BorshDeserialize;
-use namada::types::internal::HostEnvResult;
-use namada::vm::types::KeyVal;
+use namada_core::types::internal::{HostEnvResult, KeyVal};
 
 /// Transaction environment imports
 pub mod tx {
diff --git a/vp_prelude/Cargo.toml b/vp_prelude/Cargo.toml
index 468ba5adfe..6909eb80f2 100644
--- a/vp_prelude/Cargo.toml
+++ b/vp_prelude/Cargo.toml
@@ -10,9 +10,10 @@ version = "0.10.1"
 default = []
 
 [dependencies]
-namada = {path = "../shared"}
-namada_vm_env = {path = "../vm_env"}
+namada_core = {path = "../core"}
 namada_macros = {path = "../macros"}
+namada_proof_of_stake = {path = "../proof_of_stake"}
+namada_vm_env = {path = "../vm_env"}
 borsh = "0.9.0"
 sha2 = "0.10.1"
 thiserror = "1.0.30"
diff --git a/vp_prelude/src/key.rs b/vp_prelude/src/key.rs
index 5cd034852f..95946ff268 100644
--- a/vp_prelude/src/key.rs
+++ b/vp_prelude/src/key.rs
@@ -1,7 +1,7 @@
 //! Cryptographic signature keys
 
-use namada::types::address::Address;
-pub use namada::types::key::*;
+use namada_core::types::address::Address;
+pub use namada_core::types::key::*;
 
 use super::*;
 
diff --git a/vp_prelude/src/lib.rs b/vp_prelude/src/lib.rs
index c5b919bdde..613311c0a5 100644
--- a/vp_prelude/src/lib.rs
+++ b/vp_prelude/src/lib.rs
@@ -17,24 +17,25 @@ use std::convert::TryFrom;
 use std::marker::PhantomData;
 
 pub use borsh::{BorshDeserialize, BorshSerialize};
-pub use namada::ledger::governance::storage as gov_storage;
-pub use namada::ledger::storage_api::{
+pub use namada_core::ledger::governance::storage as gov_storage;
+pub use namada_core::ledger::parameters;
+pub use namada_core::ledger::storage_api::{
     self, iter_prefix, iter_prefix_bytes, rev_iter_prefix,
     rev_iter_prefix_bytes, Error, OptionExt, ResultExt, StorageRead,
 };
-pub use namada::ledger::vp_env::VpEnv;
-pub use namada::ledger::{parameters, pos as proof_of_stake};
-pub use namada::proto::{Signed, SignedTxData};
-pub use namada::types::address::Address;
-use namada::types::chain::CHAIN_ID_LENGTH;
-use namada::types::hash::{Hash, HASH_LENGTH};
-use namada::types::internal::HostEnvResult;
-use namada::types::key::*;
-use namada::types::storage::{
+pub use namada_core::ledger::vp_env::VpEnv;
+pub use namada_core::proto::{Signed, SignedTxData};
+pub use namada_core::types::address::Address;
+use namada_core::types::chain::CHAIN_ID_LENGTH;
+use namada_core::types::hash::{Hash, HASH_LENGTH};
+use namada_core::types::internal::HostEnvResult;
+use namada_core::types::key::*;
+use namada_core::types::storage::{
     BlockHash, BlockHeight, Epoch, TxIndex, BLOCK_HASH_LENGTH,
 };
-pub use namada::types::*;
+pub use namada_core::types::*;
 pub use namada_macros::validity_predicate;
+pub use namada_proof_of_stake::storage as proof_of_stake;
 use namada_vm_env::vp::*;
 use namada_vm_env::{read_from_buffer, read_key_val_bytes_from_buffer};
 pub use sha2::{Digest, Sha256, Sha384, Sha512};
diff --git a/vp_prelude/src/token.rs b/vp_prelude/src/token.rs
index d74efb3d03..0785fbf97d 100644
--- a/vp_prelude/src/token.rs
+++ b/vp_prelude/src/token.rs
@@ -2,12 +2,12 @@
 
 use std::collections::BTreeSet;
 
-use namada::types::address::{masp, Address, InternalAddress};
-use namada::types::storage::Key;
+use namada_core::types::address::{self, Address, InternalAddress};
+use namada_core::types::storage::Key;
 /// Vp imports and functions.
-use namada::types::storage::KeySeg;
-use namada::types::token;
-pub use namada::types::token::*;
+use namada_core::types::storage::KeySeg;
+use namada_core::types::token;
+pub use namada_core::types::token::*;
 
 use super::*;
 
@@ -57,7 +57,7 @@ pub fn vp(
                 change += this_change;
                 // make sure that the spender approved the transaction
                 if this_change < 0
-                    && !(verifiers.contains(owner) || *owner == masp())
+                    && !(verifiers.contains(owner) || *owner == address::masp())
                 {
                     return reject();
                 }
diff --git a/wasm/Cargo.lock b/wasm/Cargo.lock
index 3163cde9e4..be208e97bd 100644
--- a/wasm/Cargo.lock
+++ b/wasm/Cargo.lock
@@ -1747,6 +1747,17 @@ dependencies = [
  "digest 0.9.0",
 ]
 
+[[package]]
+name = "hmac-drbg"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "17ea0a1394df5b6574da6e0c1ade9e78868c9fb0a4e5ef4428e32da4676b85b1"
+dependencies = [
+ "digest 0.9.0",
+ "generic-array",
+ "hmac 0.8.1",
+]
+
 [[package]]
 name = "http"
 version = "0.2.8"
@@ -2164,12 +2175,14 @@ dependencies = [
  "arrayref",
  "base64",
  "digest 0.9.0",
+ "hmac-drbg",
  "libsecp256k1-core",
  "libsecp256k1-gen-ecmult",
  "libsecp256k1-gen-genmult",
  "rand 0.8.5",
  "serde",
  "sha2 0.9.9",
+ "typenum",
 ]
 
 [[package]]
@@ -2436,51 +2449,35 @@ checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a"
 name = "namada"
 version = "0.10.1"
 dependencies = [
- "ark-bls12-381",
- "ark-serialize",
  "async-trait",
- "bech32",
  "bellman",
- "bit-vec",
  "bls12_381",
  "borsh",
- "chrono",
  "circular-queue",
  "clru",
  "data-encoding",
  "derivative",
- "ed25519-consensus",
- "ferveo-common",
- "hex",
  "ibc",
  "ibc-proto",
- "ics23",
  "itertools",
- "libsecp256k1",
  "loupe",
  "masp_primitives",
  "masp_proofs",
+ "namada_core",
  "namada_proof_of_stake",
  "parity-wasm",
  "paste",
  "proptest",
  "prost",
- "prost-types",
  "pwasm-utils",
- "rand 0.8.5",
- "rand_core 0.6.4",
  "rayon",
  "rust_decimal",
- "rust_decimal_macros",
- "serde",
  "serde_json",
  "sha2 0.9.9",
- "sparse-merkle-tree",
  "tempfile",
  "tendermint",
  "tendermint-proto",
  "thiserror",
- "tonic-build",
  "tracing",
  "wasmer",
  "wasmer-cache",
@@ -2492,6 +2489,47 @@ dependencies = [
  "zeroize",
 ]
 
+[[package]]
+name = "namada_core"
+version = "0.9.0"
+dependencies = [
+ "ark-bls12-381",
+ "ark-serialize",
+ "bech32",
+ "bellman",
+ "bit-vec",
+ "borsh",
+ "chrono",
+ "data-encoding",
+ "derivative",
+ "ed25519-consensus",
+ "ferveo-common",
+ "ibc",
+ "ibc-proto",
+ "ics23",
+ "itertools",
+ "libsecp256k1",
+ "masp_primitives",
+ "proptest",
+ "prost",
+ "prost-types",
+ "rand 0.8.5",
+ "rand_core 0.6.4",
+ "rayon",
+ "rust_decimal",
+ "rust_decimal_macros",
+ "serde",
+ "serde_json",
+ "sha2 0.9.9",
+ "sparse-merkle-tree",
+ "tendermint",
+ "tendermint-proto",
+ "thiserror",
+ "tonic-build",
+ "tracing",
+ "zeroize",
+]
+
 [[package]]
 name = "namada_macros"
 version = "0.10.1"
@@ -2506,10 +2544,12 @@ version = "0.10.1"
 dependencies = [
  "borsh",
  "derivative",
+ "namada_core",
  "proptest",
  "rust_decimal",
  "rust_decimal_macros",
  "thiserror",
+ "tracing",
 ]
 
 [[package]]
@@ -2547,8 +2587,9 @@ version = "0.10.1"
 dependencies = [
  "borsh",
  "masp_primitives",
- "namada",
+ "namada_core",
  "namada_macros",
+ "namada_proof_of_stake",
  "namada_vm_env",
  "rust_decimal",
  "sha2 0.10.6",
@@ -2563,7 +2604,7 @@ dependencies = [
  "hex",
  "masp_primitives",
  "masp_proofs",
- "namada",
+ "namada_core",
 ]
 
 [[package]]
@@ -2571,8 +2612,9 @@ name = "namada_vp_prelude"
 version = "0.10.1"
 dependencies = [
  "borsh",
- "namada",
+ "namada_core",
  "namada_macros",
+ "namada_proof_of_stake",
  "namada_vm_env",
  "sha2 0.10.6",
  "thiserror",
diff --git a/wasm/wasm_source/src/tx_bond.rs b/wasm/wasm_source/src/tx_bond.rs
index 1993369f67..894aa40763 100644
--- a/wasm/wasm_source/src/tx_bond.rs
+++ b/wasm/wasm_source/src/tx_bond.rs
@@ -17,7 +17,8 @@ fn apply_tx(ctx: &mut Ctx, tx_data: Vec<u8>) -> TxResult {
 mod tests {
     use std::collections::HashMap;
 
-    use namada::ledger::pos::PosParams;
+    use namada::ledger::pos::{BondId, GenesisValidator, PosParams, PosVP};
+    use namada::proof_of_stake::types::Bond;
     use namada::proto::Tx;
     use namada::types::storage::Epoch;
     use namada_tests::log::test;
@@ -32,8 +33,6 @@ mod tests {
     use namada_tx_prelude::key::RefTo;
     use namada_tx_prelude::proof_of_stake::parameters::testing::arb_pos_params;
     use namada_tx_prelude::token;
-    use namada_vp_prelude::proof_of_stake::types::Bond;
-    use namada_vp_prelude::proof_of_stake::{BondId, GenesisValidator, PosVP};
     use proptest::prelude::*;
     use rust_decimal;
 
diff --git a/wasm/wasm_source/src/tx_change_validator_commission.rs b/wasm/wasm_source/src/tx_change_validator_commission.rs
index 0f72355af5..f32d83f34e 100644
--- a/wasm/wasm_source/src/tx_change_validator_commission.rs
+++ b/wasm/wasm_source/src/tx_change_validator_commission.rs
@@ -18,7 +18,7 @@ fn apply_tx(ctx: &mut Ctx, tx_data: Vec<u8>) -> TxResult {
 
 #[cfg(test)]
 mod tests {
-    use namada::ledger::pos::PosParams;
+    use namada::ledger::pos::{PosParams, PosVP};
     use namada::proto::Tx;
     use namada::types::storage::Epoch;
     use namada_tests::log::test;
@@ -31,7 +31,7 @@ mod tests {
     use namada_tx_prelude::proof_of_stake::parameters::testing::arb_pos_params;
     use namada_tx_prelude::token;
     use namada_vp_prelude::proof_of_stake::{
-        CommissionRates, GenesisValidator, PosVP,
+        CommissionRates, GenesisValidator,
     };
     use proptest::prelude::*;
     use rust_decimal::prelude::ToPrimitive;
diff --git a/wasm/wasm_source/src/tx_unbond.rs b/wasm/wasm_source/src/tx_unbond.rs
index f7b6c21a1a..dbc650d056 100644
--- a/wasm/wasm_source/src/tx_unbond.rs
+++ b/wasm/wasm_source/src/tx_unbond.rs
@@ -18,7 +18,8 @@ fn apply_tx(ctx: &mut Ctx, tx_data: Vec<u8>) -> TxResult {
 mod tests {
     use std::collections::HashMap;
 
-    use namada::ledger::pos::PosParams;
+    use namada::ledger::pos::{BondId, GenesisValidator, PosParams, PosVP};
+    use namada::proof_of_stake::types::{Bond, Unbond};
     use namada::proto::Tx;
     use namada::types::storage::Epoch;
     use namada_tests::log::test;
@@ -30,8 +31,6 @@ mod tests {
     use namada_tx_prelude::key::RefTo;
     use namada_tx_prelude::proof_of_stake::parameters::testing::arb_pos_params;
     use namada_tx_prelude::token;
-    use namada_vp_prelude::proof_of_stake::types::{Bond, Unbond};
-    use namada_vp_prelude::proof_of_stake::{BondId, GenesisValidator, PosVP};
     use proptest::prelude::*;
 
     use super::*;
diff --git a/wasm/wasm_source/src/tx_withdraw.rs b/wasm/wasm_source/src/tx_withdraw.rs
index 2aea074f01..8819063a0c 100644
--- a/wasm/wasm_source/src/tx_withdraw.rs
+++ b/wasm/wasm_source/src/tx_withdraw.rs
@@ -21,7 +21,7 @@ fn apply_tx(ctx: &mut Ctx, tx_data: Vec<u8>) -> TxResult {
 
 #[cfg(test)]
 mod tests {
-    use namada::ledger::pos::PosParams;
+    use namada::ledger::pos::{BondId, GenesisValidator, PosParams, PosVP};
     use namada::proto::Tx;
     use namada::types::storage::Epoch;
     use namada_tests::log::test;
@@ -35,7 +35,6 @@ mod tests {
     use namada_tx_prelude::key::testing::arb_common_keypair;
     use namada_tx_prelude::key::RefTo;
     use namada_tx_prelude::proof_of_stake::parameters::testing::arb_pos_params;
-    use namada_vp_prelude::proof_of_stake::{BondId, GenesisValidator, PosVP};
     use proptest::prelude::*;
 
     use super::*;
diff --git a/wasm_for_tests/wasm_source/Cargo.lock b/wasm_for_tests/wasm_source/Cargo.lock
index e143060bc9..21c207b531 100644
--- a/wasm_for_tests/wasm_source/Cargo.lock
+++ b/wasm_for_tests/wasm_source/Cargo.lock
@@ -1747,6 +1747,17 @@ dependencies = [
  "digest 0.9.0",
 ]
 
+[[package]]
+name = "hmac-drbg"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "17ea0a1394df5b6574da6e0c1ade9e78868c9fb0a4e5ef4428e32da4676b85b1"
+dependencies = [
+ "digest 0.9.0",
+ "generic-array",
+ "hmac 0.8.1",
+]
+
 [[package]]
 name = "http"
 version = "0.2.8"
@@ -2164,12 +2175,14 @@ dependencies = [
  "arrayref",
  "base64",
  "digest 0.9.0",
+ "hmac-drbg",
  "libsecp256k1-core",
  "libsecp256k1-gen-ecmult",
  "libsecp256k1-gen-genmult",
  "rand 0.8.5",
  "serde",
  "sha2 0.9.9",
+ "typenum",
 ]
 
 [[package]]
@@ -2436,51 +2449,35 @@ checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a"
 name = "namada"
 version = "0.10.1"
 dependencies = [
- "ark-bls12-381",
- "ark-serialize",
  "async-trait",
- "bech32",
  "bellman",
- "bit-vec",
  "bls12_381",
  "borsh",
- "chrono",
  "circular-queue",
  "clru",
  "data-encoding",
  "derivative",
- "ed25519-consensus",
- "ferveo-common",
- "hex",
  "ibc",
  "ibc-proto",
- "ics23",
  "itertools",
- "libsecp256k1",
  "loupe",
  "masp_primitives",
  "masp_proofs",
+ "namada_core",
  "namada_proof_of_stake",
  "parity-wasm",
  "paste",
  "proptest",
  "prost",
- "prost-types",
  "pwasm-utils",
- "rand 0.8.5",
- "rand_core 0.6.4",
  "rayon",
  "rust_decimal",
- "rust_decimal_macros",
- "serde",
  "serde_json",
  "sha2 0.9.9",
- "sparse-merkle-tree",
  "tempfile",
  "tendermint",
  "tendermint-proto",
  "thiserror",
- "tonic-build",
  "tracing",
  "wasmer",
  "wasmer-cache",
@@ -2492,6 +2489,47 @@ dependencies = [
  "zeroize",
 ]
 
+[[package]]
+name = "namada_core"
+version = "0.9.0"
+dependencies = [
+ "ark-bls12-381",
+ "ark-serialize",
+ "bech32",
+ "bellman",
+ "bit-vec",
+ "borsh",
+ "chrono",
+ "data-encoding",
+ "derivative",
+ "ed25519-consensus",
+ "ferveo-common",
+ "ibc",
+ "ibc-proto",
+ "ics23",
+ "itertools",
+ "libsecp256k1",
+ "masp_primitives",
+ "proptest",
+ "prost",
+ "prost-types",
+ "rand 0.8.5",
+ "rand_core 0.6.4",
+ "rayon",
+ "rust_decimal",
+ "rust_decimal_macros",
+ "serde",
+ "serde_json",
+ "sha2 0.9.9",
+ "sparse-merkle-tree",
+ "tendermint",
+ "tendermint-proto",
+ "thiserror",
+ "tonic-build",
+ "tracing",
+ "zeroize",
+]
+
 [[package]]
 name = "namada_macros"
 version = "0.10.1"
@@ -2506,10 +2544,12 @@ version = "0.10.1"
 dependencies = [
  "borsh",
  "derivative",
+ "namada_core",
  "proptest",
  "rust_decimal",
  "rust_decimal_macros",
  "thiserror",
+ "tracing",
 ]
 
 [[package]]
@@ -2547,8 +2587,9 @@ version = "0.10.1"
 dependencies = [
  "borsh",
  "masp_primitives",
- "namada",
+ "namada_core",
  "namada_macros",
+ "namada_proof_of_stake",
  "namada_vm_env",
  "rust_decimal",
  "sha2 0.10.6",
@@ -2563,7 +2604,7 @@ dependencies = [
  "hex",
  "masp_primitives",
  "masp_proofs",
- "namada",
+ "namada_core",
 ]
 
 [[package]]
@@ -2571,8 +2612,9 @@ name = "namada_vp_prelude"
 version = "0.10.1"
 dependencies = [
  "borsh",
- "namada",
+ "namada_core",
  "namada_macros",
+ "namada_proof_of_stake",
  "namada_vm_env",
  "sha2 0.10.6",
  "thiserror",

From 8e40ba593a4d0194fb302bf68c752405315e05e3 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Tom=C3=A1=C5=A1=20Zemanovi=C4=8D?= <tomas@heliax.dev>
Date: Mon, 21 Nov 2022 16:21:50 +0100
Subject: [PATCH 3/9] pos: replace generic types with concrete types from core

---
 apps/src/lib/client/rpc.rs        |   13 +-
 proof_of_stake/src/epoched.rs     |    2 +-
 proof_of_stake/src/lib.rs         | 1062 +++++++----------------------
 proof_of_stake/src/storage.rs     |  155 ++---
 proof_of_stake/src/types.rs       |  154 ++---
 proof_of_stake/src/validation.rs  |  654 +++++-------------
 shared/src/ledger/pos/mod.rs      |   40 +-
 shared/src/ledger/pos/vp.rs       |    6 +-
 tests/src/native_vp/pos.rs        |    6 +-
 tx_prelude/src/proof_of_stake.rs  |   58 +-
 wasm/wasm_source/src/tx_bond.rs   |   21 +-
 wasm/wasm_source/src/tx_unbond.rs |   18 +-
 12 files changed, 575 insertions(+), 1614 deletions(-)

diff --git a/apps/src/lib/client/rpc.rs b/apps/src/lib/client/rpc.rs
index b496c0fe4d..3fb4d8eea8 100644
--- a/apps/src/lib/client/rpc.rs
+++ b/apps/src/lib/client/rpc.rs
@@ -27,9 +27,7 @@ use namada::ledger::governance::parameters::GovParams;
 use namada::ledger::governance::storage as gov_storage;
 use namada::ledger::native_vp::governance::utils::Votes;
 use namada::ledger::parameters::{storage as param_storage, EpochDuration};
-use namada::ledger::pos::types::{
-    decimal_mult_u64, Epoch as PosEpoch, WeightedValidator,
-};
+use namada::ledger::pos::types::{decimal_mult_u64, WeightedValidator};
 use namada::ledger::pos::{
     self, is_validator_slashes_key, BondId, Bonds, PosParams, Slash, Unbonds,
 };
@@ -2012,8 +2010,8 @@ pub async fn known_address(
 fn apply_slashes(
     slashes: &[Slash],
     mut delta: token::Amount,
-    epoch_start: PosEpoch,
-    withdraw_epoch: Option<PosEpoch>,
+    epoch_start: Epoch,
+    withdraw_epoch: Option<Epoch>,
     mut w: Option<&mut std::io::StdoutLock>,
 ) -> token::Amount {
     let mut slashed = token::Amount::default();
@@ -2638,11 +2636,8 @@ pub async fn get_proposal_offline_votes(
                     .await
                     .unwrap_or_default();
                     let mut delegated_amount: token::Amount = 0.into();
-                    let epoch = namada::ledger::pos::types::Epoch::from(
-                        proposal.tally_epoch.0,
-                    );
                     let bond = epoched_bonds
-                        .get(epoch)
+                        .get(proposal.tally_epoch)
                         .expect("Delegation bond should be defined.");
                     let mut to_deduct = bond.neg_deltas;
                     for (start_epoch, &(mut delta)) in
diff --git a/proof_of_stake/src/epoched.rs b/proof_of_stake/src/epoched.rs
index 96debb1c9a..2bf58d9f82 100644
--- a/proof_of_stake/src/epoched.rs
+++ b/proof_of_stake/src/epoched.rs
@@ -5,8 +5,8 @@ use core::marker::PhantomData;
 use core::{cmp, fmt, ops};
 
 use borsh::{BorshDeserialize, BorshSchema, BorshSerialize};
+use namada_core::types::storage::Epoch;
 
-use crate::types::Epoch;
 use crate::PosParams;
 
 /// Data that may have values set for future epochs, up to an epoch at offset as
diff --git a/proof_of_stake/src/lib.rs b/proof_of_stake/src/lib.rs
index c77d120239..08540e1e72 100644
--- a/proof_of_stake/src/lib.rs
+++ b/proof_of_stake/src/lib.rs
@@ -22,22 +22,21 @@ pub mod validation;
 use core::fmt::Debug;
 use std::collections::{BTreeSet, HashMap, HashSet};
 use std::convert::TryFrom;
-use std::fmt::Display;
-use std::hash::Hash;
 use std::num::TryFromIntError;
-use std::ops::{Add, AddAssign, Neg, Sub, SubAssign};
 
-use borsh::{BorshDeserialize, BorshSchema, BorshSerialize};
 use epoched::{
     DynEpochOffset, EpochOffset, Epoched, EpochedDelta, OffsetPipelineLen,
 };
 use namada_core::ledger::storage_api;
 use namada_core::types::address::{self, Address, InternalAddress};
+use namada_core::types::key::common;
+use namada_core::types::storage::Epoch;
+use namada_core::types::token;
 pub use parameters::PosParams;
 use rust_decimal::Decimal;
 use thiserror::Error;
 use types::{
-    ActiveValidator, Bonds, CommissionRates, Epoch, GenesisValidator, Slash,
+    ActiveValidator, Bonds, CommissionRates, GenesisValidator, Slash,
     SlashType, Slashes, TotalDeltas, Unbond, Unbonds, ValidatorConsensusKeys,
     ValidatorDeltas, ValidatorSet, ValidatorSetUpdate, ValidatorSets,
     ValidatorState, ValidatorStates,
@@ -62,122 +61,69 @@ pub fn staking_token_address() -> Address {
 
 /// Read-only part of the PoS system
 pub trait PosReadOnly {
-    /// Address type
-    type Address: Display
-        + Debug
-        + Clone
-        + PartialEq
-        + Eq
-        + PartialOrd
-        + Ord
-        + Hash
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema;
-    /// Token amount type
-    type TokenAmount: Display
-        + Debug
-        + Default
-        + Clone
-        + Copy
-        + Add<Output = Self::TokenAmount>
-        + AddAssign
-        + Sub<Output = Self::TokenAmount>
-        + PartialOrd
-        + Into<u64>
-        + From<u64>
-        + Into<Self::TokenChange>
-        + SubAssign
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema;
-    /// Token change type
-    type TokenChange: Display
-        + Debug
-        + Default
-        + Clone
-        + Copy
-        + Add<Output = Self::TokenChange>
-        + Sub<Output = Self::TokenChange>
-        + From<Self::TokenAmount>
-        + Into<i128>
-        + Neg<Output = Self::TokenChange>
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema;
-    /// Cryptographic public key type
-    type PublicKey: Debug
-        + Clone
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema;
-
-    /// Underlying read (and write in [`PosActions`]) interface errors
-    type Error;
-
     /// Address of the PoS account
-    const POS_ADDRESS: Self::Address;
+    const POS_ADDRESS: Address;
 
     /// Address of the staking token
-    fn staking_token_address(&self) -> Self::Address;
+    fn staking_token_address(&self) -> Address;
 
     /// Read PoS parameters.
-    fn read_pos_params(&self) -> Result<PosParams, Self::Error>;
+    fn read_pos_params(&self) -> Result<PosParams, storage_api::Error>;
     /// Read PoS validator's consensus key (used for signing block votes).
     fn read_validator_consensus_key(
         &self,
-        key: &Self::Address,
-    ) -> Result<Option<ValidatorConsensusKeys<Self::PublicKey>>, Self::Error>;
+        key: &Address,
+    ) -> Result<Option<ValidatorConsensusKeys>, storage_api::Error>;
     /// Read PoS validator's state.
     fn read_validator_state(
         &self,
-        key: &Self::Address,
-    ) -> Result<Option<ValidatorStates>, Self::Error>;
+        key: &Address,
+    ) -> Result<Option<ValidatorStates>, storage_api::Error>;
     /// Read PoS validator's total deltas of their bonds (validator self-bonds
     /// and delegations).
     fn read_validator_deltas(
         &self,
-        key: &Self::Address,
-    ) -> Result<Option<ValidatorDeltas<Self::TokenChange>>, Self::Error>;
+        key: &Address,
+    ) -> Result<Option<ValidatorDeltas>, storage_api::Error>;
 
     /// Read PoS slashes applied to a validator.
     fn read_validator_slashes(
         &self,
-        key: &Self::Address,
-    ) -> Result<Vec<Slash>, Self::Error>;
+        key: &Address,
+    ) -> Result<Vec<Slash>, storage_api::Error>;
     /// Read PoS validator's commission rate for delegation rewards
     fn read_validator_commission_rate(
         &self,
-        key: &Self::Address,
-    ) -> Result<Option<CommissionRates>, Self::Error>;
+        key: &Address,
+    ) -> Result<Option<CommissionRates>, storage_api::Error>;
     /// Read PoS validator's maximum change in the commission rate for
     /// delegation rewards
     fn read_validator_max_commission_rate_change(
         &self,
-        key: &Self::Address,
-    ) -> Result<Option<Decimal>, Self::Error>;
+        key: &Address,
+    ) -> Result<Option<Decimal>, storage_api::Error>;
     /// Read PoS bond (validator self-bond or a delegation).
     fn read_bond(
         &self,
-        key: &BondId<Self::Address>,
-    ) -> Result<Option<Bonds<Self::TokenAmount>>, Self::Error>;
+        key: &BondId,
+    ) -> Result<Option<Bonds>, storage_api::Error>;
     /// Read PoS unbond (unbonded tokens from validator self-bond or a
     /// delegation).
     fn read_unbond(
         &self,
-        key: &BondId<Self::Address>,
-    ) -> Result<Option<Unbonds<Self::TokenAmount>>, Self::Error>;
+        key: &BondId,
+    ) -> Result<Option<Unbonds>, storage_api::Error>;
     /// Read PoS validator set (active and inactive).
-    fn read_validator_set(
-        &self,
-    ) -> Result<ValidatorSets<Self::Address>, Self::Error>;
+    fn read_validator_set(&self) -> Result<ValidatorSets, storage_api::Error>;
+    /// Read PoS total deltas for all validators (active and inactive)
+    fn read_total_deltas(&self) -> Result<TotalDeltas, storage_api::Error>;
 
     /// Check if the given address is a validator by checking that it has some
     /// state.
     fn is_validator(
         &self,
-        address: &Self::Address,
-    ) -> Result<bool, Self::Error> {
+        address: &Address,
+    ) -> Result<bool, storage_api::Error> {
         let state = self.read_validator_state(address)?;
         Ok(state.is_some())
     }
@@ -185,9 +131,9 @@ pub trait PosReadOnly {
     /// Get the total bond amount for the given bond ID at the given epoch.
     fn bond_amount(
         &self,
-        bond_id: &BondId<Self::Address>,
-        epoch: impl Into<Epoch>,
-    ) -> Result<Self::TokenAmount, Self::Error> {
+        bond_id: &BondId,
+        epoch: Epoch,
+    ) -> Result<token::Amount, storage_api::Error> {
         // TODO new slash logic
         let slashes = self.read_validator_slashes(&bond_id.validator)?;
         // TODO apply rewards, if any
@@ -210,7 +156,7 @@ pub trait PosReadOnly {
                         }
                     }
                     let neg_deltas: u64 = bond.neg_deltas.into();
-                    Self::TokenAmount::from(total - neg_deltas)
+                    token::Amount::from(total - neg_deltas)
                 })
             })
             .unwrap_or_default())
@@ -220,8 +166,8 @@ pub trait PosReadOnly {
     /// state, e.g. active, inactive or jailed.
     fn validator_addresses(
         &self,
-        epoch: impl Into<Epoch>,
-    ) -> Result<HashSet<Self::Address>, Self::Error> {
+        epoch: Epoch,
+    ) -> Result<HashSet<Address>, storage_api::Error> {
         let validator_sets = self.read_validator_set()?;
         let validator_set = validator_sets.get(epoch).unwrap();
 
@@ -237,14 +183,13 @@ pub trait PosReadOnly {
     /// delegations to their address.
     fn validator_stake(
         &self,
-        validator: &Self::Address,
-        epoch: impl Into<Epoch>,
-    ) -> Result<Self::TokenAmount, Self::Error> {
-        let total_deltas = self.read_validator_deltas(validator)?;
-        let total_stake = total_deltas
-            .and_then(|total_deltas| total_deltas.get(epoch))
-            .and_then(|total_stake| {
-                let sum: i128 = total_stake.into();
+        validator: &Address,
+        epoch: Epoch,
+    ) -> Result<token::Amount, storage_api::Error> {
+        let deltas = self.read_validator_deltas(validator)?;
+        let total_stake = deltas.and_then(|deltas| deltas.get(epoch)).and_then(
+            |total_stake| {
+                let sum: i128 = total_stake;
                 let sum: u64 = sum.try_into().ok()?;
                 Some(sum.into())
             });
@@ -255,145 +200,119 @@ pub trait PosReadOnly {
     /// `None`.
     fn total_stake(
         &self,
-        epoch: impl Into<Epoch>,
-    ) -> Result<Self::TokenAmount, Self::Error> {
-        let epoch = epoch.into();
+        epoch: Epoch,
+    ) -> Result<token::Amount, storage_api::Error> {
+        let epoch = epoch;
         // TODO read total stake from storage once added
         self.validator_addresses(epoch)?
             .into_iter()
-            .try_fold(Self::TokenAmount::default(), |acc, validator| {
+            .try_fold(token::Amount::default(), |acc, validator| {
                 Ok(acc + self.validator_stake(&validator, epoch)?)
             })
     }
-
-    /// Read PoS total deltas for all validators (active and inactive)
-    fn read_total_deltas(
-        &self,
-    ) -> Result<TotalDeltas<Self::TokenChange>, Self::Error>;
 }
 
 /// PoS system trait to be implemented in integration that can read and write
 /// PoS data.
 pub trait PosActions: PosReadOnly {
-    /// Error in `PosActions::become_validator`
-    type BecomeValidatorError: From<Self::Error>
-        + From<BecomeValidatorError<Self::Address>>;
-
-    /// Error in `PosActions::bond_tokens`
-    type BondError: From<Self::Error> + From<BondError<Self::Address>>;
-
-    /// Error in `PosActions::unbond_tokens`
-    type UnbondError: From<Self::Error>
-        + From<UnbondError<Self::Address, Self::TokenAmount>>;
-
-    /// Error in `PosActions::withdraw_tokens`
-    type WithdrawError: From<Self::Error> + From<WithdrawError<Self::Address>>;
-
-    /// Error in `PosActions::change_commission_rate`
-    type CommissionRateChangeError: From<Self::Error>
-        + From<CommissionRateChangeError<Self::Address>>;
-
     /// Write PoS parameters.
     fn write_pos_params(
         &mut self,
         params: &PosParams,
-    ) -> Result<(), Self::Error>;
+    ) -> Result<(), storage_api::Error>;
     /// Write PoS validator's raw hash of its consensus key.
     fn write_validator_address_raw_hash(
         &mut self,
-        address: &Self::Address,
-        consensus_key: &Self::PublicKey,
-    ) -> Result<(), Self::Error>;
+        address: &Address,
+        consensus_key: &common::PublicKey,
+    ) -> Result<(), storage_api::Error>;
     /// Write PoS validator's consensus key (used for signing block votes).
     fn write_validator_consensus_key(
         &mut self,
-        key: &Self::Address,
-        value: ValidatorConsensusKeys<Self::PublicKey>,
-    ) -> Result<(), Self::Error>;
+        key: &Address,
+        value: ValidatorConsensusKeys,
+    ) -> Result<(), storage_api::Error>;
     /// Write PoS validator's state.
     fn write_validator_state(
         &mut self,
-        key: &Self::Address,
+        key: &Address,
         value: ValidatorStates,
-    ) -> Result<(), Self::Error>;
+    ) -> Result<(), storage_api::Error>;
     /// Write PoS validator's commission rate for delegator rewards
     fn write_validator_commission_rate(
         &mut self,
-        key: &Self::Address,
+        key: &Address,
         value: CommissionRates,
-    ) -> Result<(), Self::Error>;
+    ) -> Result<(), storage_api::Error>;
     /// Write PoS validator's maximum change in the commission rate per epoch
     fn write_validator_max_commission_rate_change(
         &mut self,
-        key: &Self::Address,
+        key: &Address,
         value: Decimal,
-    ) -> Result<(), Self::Error>;
+    ) -> Result<(), storage_api::Error>;
     /// Write PoS validator's total deltas of their bonds (validator self-bonds
     /// and delegations).
     fn write_validator_deltas(
         &mut self,
-        key: &Self::Address,
-        value: ValidatorDeltas<Self::TokenChange>,
-    ) -> Result<(), Self::Error>;
+        key: &Address,
+        value: ValidatorDeltas,
+    ) -> Result<(), storage_api::Error>;
 
     /// Write PoS bond (validator self-bond or a delegation).
     fn write_bond(
         &mut self,
-        key: &BondId<Self::Address>,
-        value: Bonds<Self::TokenAmount>,
-    ) -> Result<(), Self::Error>;
+        key: &BondId,
+        value: Bonds,
+    ) -> Result<(), storage_api::Error>;
     /// Write PoS unbond (unbonded tokens from validator self-bond or a
     /// delegation).
     fn write_unbond(
         &mut self,
-        key: &BondId<Self::Address>,
-        value: Unbonds<Self::TokenAmount>,
-    ) -> Result<(), Self::Error>;
+        key: &BondId,
+        value: Unbonds,
+    ) -> Result<(), storage_api::Error>;
     /// Write PoS validator set (active and inactive).
     fn write_validator_set(
         &mut self,
-        value: ValidatorSets<Self::Address>,
-    ) -> Result<(), Self::Error>;
+        value: ValidatorSets,
+    ) -> Result<(), storage_api::Error>;
     /// Write PoS total deltas of all validators (active and inactive).
     fn write_total_deltas(
         &mut self,
-        value: TotalDeltas<Self::TokenChange>,
-    ) -> Result<(), Self::Error>;
+        value: TotalDeltas,
+    ) -> Result<(), storage_api::Error>;
     /// Delete an emptied PoS bond (validator self-bond or a delegation).
-    fn delete_bond(
-        &mut self,
-        key: &BondId<Self::Address>,
-    ) -> Result<(), Self::Error>;
+    fn delete_bond(&mut self, key: &BondId) -> Result<(), storage_api::Error>;
     /// Delete an emptied PoS unbond (unbonded tokens from validator self-bond
     /// or a delegation).
-    fn delete_unbond(
-        &mut self,
-        key: &BondId<Self::Address>,
-    ) -> Result<(), Self::Error>;
+    fn delete_unbond(&mut self, key: &BondId)
+    -> Result<(), storage_api::Error>;
 
     /// Transfer tokens from the `src` to the `dest`.
     fn transfer(
         &mut self,
-        token: &Self::Address,
-        amount: Self::TokenAmount,
-        src: &Self::Address,
-        dest: &Self::Address,
-    ) -> Result<(), Self::Error>;
+        token: &Address,
+        amount: token::Amount,
+        src: &Address,
+        dest: &Address,
+    ) -> Result<(), storage_api::Error>;
 
     /// Attempt to update the given account to become a validator.
     fn become_validator(
         &mut self,
-        address: &Self::Address,
-        consensus_key: &Self::PublicKey,
-        current_epoch: impl Into<Epoch>,
+        address: &Address,
+        consensus_key: &common::PublicKey,
+        current_epoch: Epoch,
         commission_rate: Decimal,
         max_commission_rate_change: Decimal,
-    ) -> Result<(), Self::BecomeValidatorError> {
-        let current_epoch = current_epoch.into();
+    ) -> Result<(), storage_api::Error> {
         let params = self.read_pos_params()?;
         let mut validator_set = self.read_validator_set()?;
         if self.is_validator(address)? {
-            Err(BecomeValidatorError::AlreadyValidator(address.clone()))?;
+            return Err(BecomeValidatorError::AlreadyValidator(
+                address.clone(),
+            )
+            .into());
         }
         let consensus_key_clone = consensus_key.clone();
         let BecomeValidatorData {
@@ -434,15 +353,17 @@ pub trait PosActions: PosReadOnly {
     /// `validator`.
     fn bond_tokens(
         &mut self,
-        source: Option<&Self::Address>,
-        validator: &Self::Address,
-        amount: Self::TokenAmount,
-        current_epoch: impl Into<Epoch>,
-    ) -> Result<(), Self::BondError> {
-        let current_epoch = current_epoch.into();
+        source: Option<&Address>,
+        validator: &Address,
+        amount: token::Amount,
+        current_epoch: Epoch,
+    ) -> Result<(), storage_api::Error> {
         if let Some(source) = source {
             if source != validator && self.is_validator(source)? {
-                Err(BondError::SourceMustNotBeAValidator(source.clone()))?;
+                return Err(BondError::SourceMustNotBeAValidator(
+                    source.clone(),
+                )
+                .into());
             }
         }
         let params = self.read_pos_params()?;
@@ -491,12 +412,11 @@ pub trait PosActions: PosReadOnly {
     /// the `source` to the `validator`.
     fn unbond_tokens(
         &mut self,
-        source: Option<&Self::Address>,
-        validator: &Self::Address,
-        amount: Self::TokenAmount,
-        current_epoch: impl Into<Epoch>,
-    ) -> Result<(), Self::UnbondError> {
-        let current_epoch = current_epoch.into();
+        source: Option<&Address>,
+        validator: &Address,
+        amount: token::Amount,
+        current_epoch: Epoch,
+    ) -> Result<(), storage_api::Error> {
         let params = self.read_pos_params()?;
         let source = source.unwrap_or(validator);
         let bond_id = BondId {
@@ -556,11 +476,10 @@ pub trait PosActions: PosReadOnly {
     /// tokens delegated to the `validator` to the `source`.
     fn withdraw_tokens(
         &mut self,
-        source: Option<&Self::Address>,
-        validator: &Self::Address,
-        current_epoch: impl Into<Epoch>,
-    ) -> Result<Self::TokenAmount, Self::WithdrawError> {
-        let current_epoch = current_epoch.into();
+        source: Option<&Address>,
+        validator: &Address,
+        current_epoch: Epoch,
+    ) -> Result<token::Amount, storage_api::Error> {
         let params = self.read_pos_params()?;
         let source = source.unwrap_or(validator);
         let bond_id = BondId {
@@ -612,10 +531,10 @@ pub trait PosActions: PosReadOnly {
     /// Change the commission rate of a validator
     fn change_validator_commission_rate(
         &mut self,
-        validator: &Self::Address,
+        validator: &Address,
         new_rate: Decimal,
-        current_epoch: impl Into<Epoch>,
-    ) -> Result<(), Self::CommissionRateChangeError> {
+        current_epoch: Epoch,
+    ) -> Result<(), storage_api::Error> {
         if new_rate < Decimal::ZERO {
             return Err(CommissionRateChangeError::NegativeRate(
                 new_rate,
@@ -623,7 +542,7 @@ pub trait PosActions: PosReadOnly {
             )
             .into());
         }
-        let current_epoch = current_epoch.into();
+
         let max_change = self
             .read_validator_max_commission_rate_change(validator)
             .map_err(|_| {
@@ -689,68 +608,12 @@ pub trait PosActions: PosReadOnly {
 /// PoS system base trait for system initialization on genesis block, updating
 /// the validator on a new epoch and applying slashes.
 pub trait PosBase {
-    /// Address type
-    type Address: 'static
-        + Display
-        + Debug
-        + Clone
-        + PartialEq
-        + Eq
-        + PartialOrd
-        + Ord
-        + Hash
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema;
-    /// Token amount type
-    type TokenAmount: 'static
-        + Display
-        + Debug
-        + Default
-        + Clone
-        + Copy
-        + Add<Output = Self::TokenAmount>
-        + AddAssign
-        + Sub
-        + PartialOrd
-        + Into<u64>
-        + From<u64>
-        + Into<Self::TokenChange>
-        + SubAssign
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema;
-    /// Token change type
-    type TokenChange: 'static
-        + Display
-        + Debug
-        + Default
-        + Clone
-        + Copy
-        + PartialOrd
-        + Add<Output = Self::TokenChange>
-        + Sub<Output = Self::TokenChange>
-        + From<Self::TokenAmount>
-        + From<i128>
-        + Into<i128>
-        + Neg<Output = Self::TokenChange>
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema;
-    /// Cryptographic public key type
-    type PublicKey: 'static
-        + Debug
-        + Clone
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema;
-
     /// Address of the PoS account
-    const POS_ADDRESS: Self::Address;
+    const POS_ADDRESS: Address;
     /// Address of the staking token
-    fn staking_token_address(&self) -> Self::Address;
+    fn staking_token_address(&self) -> Address;
     /// Address of the slash pool, into which slashed tokens are transferred.
-    const POS_SLASH_POOL_ADDRESS: Self::Address;
+    const POS_SLASH_POOL_ADDRESS: Address;
 
     /// Read PoS parameters.
     fn read_pos_params(&self) -> PosParams;
@@ -758,111 +621,90 @@ pub trait PosBase {
     fn read_validator_address_raw_hash(
         &self,
         raw_hash: impl AsRef<str>,
-    ) -> Option<Self::Address>;
+    ) -> Option<Address>;
     /// Read PoS validator's consensus key (used for signing block votes).
     fn read_validator_consensus_key(
         &self,
-        key: &Self::Address,
-    ) -> Option<ValidatorConsensusKeys<Self::PublicKey>>;
+        key: &Address,
+    ) -> Option<ValidatorConsensusKeys>;
     /// Read PoS validator's state.
-    fn read_validator_state(
-        &self,
-        key: &Self::Address,
-    ) -> Option<ValidatorStates>;
+    fn read_validator_state(&self, key: &Address) -> Option<ValidatorStates>;
     /// Read PoS validator's total deltas of their bonds (validator self-bonds
     /// and delegations).
-    fn read_validator_deltas(
-        &self,
-        key: &Self::Address,
-    ) -> Option<ValidatorDeltas<Self::TokenChange>>;
+    fn read_validator_deltas(&self, key: &Address) -> Option<ValidatorDeltas>;
 
     /// Read PoS slashes applied to a validator.
-    fn read_validator_slashes(&self, key: &Self::Address) -> Slashes;
+    fn read_validator_slashes(&self, key: &Address) -> Slashes;
     /// Read PoS validator's commission rate
-    fn read_validator_commission_rate(
-        &self,
-        key: &Self::Address,
-    ) -> CommissionRates;
+    fn read_validator_commission_rate(&self, key: &Address) -> CommissionRates;
     /// Read PoS validator's maximum commission rate change per epoch
     fn read_validator_max_commission_rate_change(
         &self,
-        key: &Self::Address,
+        key: &Address,
     ) -> Decimal;
     /// Read PoS validator set (active and inactive).
-    fn read_validator_set(&self) -> ValidatorSets<Self::Address>;
+    fn read_validator_set(&self) -> ValidatorSets;
     /// Read PoS total deltas of all validators (active and inactive).
-    fn read_total_deltas(&self) -> TotalDeltas<Self::TokenChange>;
+    fn read_total_deltas(&self) -> TotalDeltas;
 
     /// Write PoS parameters.
     fn write_pos_params(&mut self, params: &PosParams);
     /// Write PoS validator's raw hash of its consensus key.
     fn write_validator_address_raw_hash(
         &mut self,
-        address: &Self::Address,
-        consensus_key: &Self::PublicKey,
+        address: &Address,
+        consensus_key: &common::PublicKey,
     );
     /// Write PoS validator's consensus key (used for signing block votes).
     fn write_validator_consensus_key(
         &mut self,
-        key: &Self::Address,
-        value: &ValidatorConsensusKeys<Self::PublicKey>,
+        key: &Address,
+        value: &ValidatorConsensusKeys,
     );
     /// Write PoS validator's state.
-    fn write_validator_state(
-        &mut self,
-        key: &Self::Address,
-        value: &ValidatorStates,
-    );
+    fn write_validator_state(&mut self, key: &Address, value: &ValidatorStates);
     /// Write PoS validator's total deltas of their bonds (validator self-bonds
     /// and delegations).
     fn write_validator_deltas(
         &mut self,
-        key: &Self::Address,
-        value: &ValidatorDeltas<Self::TokenChange>,
+        key: &Address,
+        value: &ValidatorDeltas,
     );
     /// Write PoS validator's commission rate.
     fn write_validator_commission_rate(
         &mut self,
-        key: &Self::Address,
+        key: &Address,
         value: &CommissionRates,
     );
     /// Write PoS validator's maximum change in the commission rate.
     fn write_validator_max_commission_rate_change(
         &mut self,
-        key: &Self::Address,
+        key: &Address,
         value: &Decimal,
     );
     /// Write (append) PoS slash applied to a validator.
-    fn write_validator_slash(
-        &mut self,
-        validator: &Self::Address,
-        value: Slash,
-    );
+    fn write_validator_slash(&mut self, validator: &Address, value: Slash);
     /// Write PoS bond (validator self-bond or a delegation).
-    fn write_bond(
-        &mut self,
-        key: &BondId<Self::Address>,
-        value: &Bonds<Self::TokenAmount>,
-    );
+    fn write_bond(&mut self, key: &BondId, value: &Bonds);
     /// Write PoS validator set (active and inactive).
-    fn write_validator_set(&mut self, value: &ValidatorSets<Self::Address>);
+    fn write_validator_set(&mut self, value: &ValidatorSets);
     /// Write total deltas in PoS for all validators (active and inactive)
-    fn write_total_deltas(&mut self, value: &TotalDeltas<Self::TokenChange>);
+    fn write_total_deltas(&mut self, value: &TotalDeltas);
     /// Credit tokens to the `target` account. This should only be used at
     /// genesis.
     fn credit_tokens(
         &mut self,
-        token: &Self::Address,
-        target: &Self::Address,
-        amount: Self::TokenAmount,
+        token: &Address,
+        target: &Address,
+        amount: token::Amount,
     );
     /// Transfer tokens from the `src` to the `dest`.
     fn transfer(
         &mut self,
-        token: &Self::Address,
-        amount: Self::TokenAmount,
-        src: &Self::Address,
-        dest: &Self::Address,
+        token: &Address,
+        amount: token::Amount,
+        src: &Address,
+        dest: &Address,
     );
 
     /// Initialize the PoS system storage data in the genesis block for the
@@ -872,17 +714,9 @@ pub trait PosBase {
     fn init_genesis<'a>(
         &mut self,
         params: &'a PosParams,
-        validators: impl Iterator<
-            Item = &'a GenesisValidator<
-                Self::Address,
-                Self::TokenAmount,
-                Self::PublicKey,
-            >,
-        > + Clone
-        + 'a,
-        current_epoch: impl Into<Epoch>,
+        validators: impl Iterator<Item = &'a GenesisValidator> + Clone + 'a,
+        current_epoch: Epoch,
     ) -> Result<(), GenesisError> {
-        let current_epoch = current_epoch.into();
         self.write_pos_params(params);
 
         let GenesisData {
@@ -935,10 +769,10 @@ pub trait PosBase {
     /// Calls a closure on each validator update element.
     fn validator_set_update(
         &self,
-        current_epoch: impl Into<Epoch>,
-        f: impl FnMut(ValidatorSetUpdate<Self::PublicKey>),
+        current_epoch: Epoch,
+        f: impl FnMut(ValidatorSetUpdate),
     ) {
-        let current_epoch: Epoch = current_epoch.into();
+        let current_epoch: Epoch = current_epoch;
         let current_epoch_u64: u64 = current_epoch.into();
         // INVARIANT: We can only access the previous epochs data, because
         // this function is called on a beginning of a new block, before
@@ -964,7 +798,7 @@ pub trait PosBase {
         // validator slots are filled with non-0 voting power validators, but we
         // still need to guard against it.
         let active_validators = cur_validators.active.iter().filter_map(
-            |validator: &WeightedValidator<_>| {
+            |validator: &WeightedValidator| {
                 // If the validators set from previous epoch contains the same
                 // validator, it means its voting power hasn't changed and hence
                 // doesn't need to updated.
@@ -1010,7 +844,7 @@ pub trait PosBase {
             },
         );
         let inactive_validators = cur_validators.inactive.iter().filter_map(
-            |validator: &WeightedValidator<Self::Address>| {
+            |validator: &WeightedValidator| {
                 // If the validators set from previous epoch contains the same
                 // validator, it means its voting power hasn't changed and hence
                 // doesn't need to updated.
@@ -1051,14 +885,13 @@ pub trait PosBase {
     fn slash(
         &mut self,
         params: &PosParams,
-        current_epoch: impl Into<Epoch>,
-        evidence_epoch: impl Into<Epoch>,
+        current_epoch: Epoch,
+        evidence_epoch: Epoch,
         evidence_block_height: impl Into<u64>,
         slash_type: SlashType,
-        validator: &Self::Address,
-    ) -> Result<(), SlashError<Self::Address>> {
-        let current_epoch = current_epoch.into();
-        let evidence_epoch = evidence_epoch.into();
+        validator: &Address,
+    ) -> Result<(), SlashError> {
+        let evidence_epoch = evidence_epoch;
         let rate = slash_type.get_slash_rate(params);
         let validator_slash = Slash {
             epoch: evidence_epoch,
@@ -1083,10 +916,10 @@ pub trait PosBase {
             &mut validator_set,
             &mut total_deltas,
         )?;
-        let slashed_change: i128 = slashed_change.into();
+        let slashed_change: i128 = slashed_change;
         let slashed_amount = u64::try_from(slashed_change)
             .map_err(|_err| SlashError::InvalidSlashChange(slashed_change))?;
-        let slashed_amount = Self::TokenAmount::from(slashed_amount);
+        let slashed_amount = token::Amount::from(slashed_amount);
 
         self.write_validator_deltas(validator, &deltas);
         self.write_validator_slash(validator, validator_slash);
@@ -1115,14 +948,14 @@ pub enum GenesisError {
 
 #[allow(missing_docs)]
 #[derive(Error, Debug)]
-pub enum BecomeValidatorError<Address: Display + Debug> {
+pub enum BecomeValidatorError {
     #[error("The given address {0} is already a validator")]
     AlreadyValidator(Address),
 }
 
 #[allow(missing_docs)]
 #[derive(Error, Debug)]
-pub enum BondError<Address: Display + Debug> {
+pub enum BondError {
     #[error("The given address {0} is not a validator address")]
     NotAValidator(Address),
     #[error(
@@ -1140,13 +973,13 @@ pub enum BondError<Address: Display + Debug> {
 
 #[allow(missing_docs)]
 #[derive(Error, Debug)]
-pub enum UnbondError<Address: Display + Debug, TokenAmount: Display + Debug> {
+pub enum UnbondError {
     #[error("No bond could be found")]
     NoBondFound,
     #[error(
         "Trying to withdraw more tokens ({0}) than the amount bonded ({0})"
     )]
-    UnbondAmountGreaterThanBond(TokenAmount, TokenAmount),
+    UnbondAmountGreaterThanBond(token::Amount, token::Amount),
     #[error("No bonds found for the validator {0}")]
     ValidatorHasNoBonds(Address),
     #[error("Voting power not found for the validator {0}")]
@@ -1159,30 +992,16 @@ pub enum UnbondError<Address: Display + Debug, TokenAmount: Display + Debug> {
 
 #[allow(missing_docs)]
 #[derive(Error, Debug)]
-pub enum WithdrawError<Address>
-where
-    Address: Display
-        + Debug
-        + Clone
-        + PartialOrd
-        + Ord
-        + Hash
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-{
+pub enum WithdrawError {
     #[error("No unbond could be found for {0}")]
-    NoUnbondFound(BondId<Address>),
+    NoUnbondFound(BondId),
     #[error("No unbond may be withdrawn yet for {0}")]
-    NoWithdrawableUnbond(BondId<Address>),
+    NoWithdrawableUnbond(BondId),
 }
 
 #[allow(missing_docs)]
 #[derive(Error, Debug)]
-pub enum SlashError<Address>
-where
-    Address: Display + Debug + Clone + PartialOrd + Ord + Hash,
-{
+pub enum SlashError {
     #[error("The validator {0} has no total deltas value")]
     ValidatorHasNoTotalDeltas(Address),
     #[error("The validator {0} has no voting power")]
@@ -1197,18 +1016,7 @@ where
 
 #[allow(missing_docs)]
 #[derive(Error, Debug)]
-pub enum CommissionRateChangeError<Address>
-where
-    Address: Display
-        + Debug
-        + Clone
-        + PartialOrd
-        + Ord
-        + Hash
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-{
+pub enum CommissionRateChangeError {
     #[error("Unexpected negative commission rate {0} for validator {1}")]
     NegativeRate(Decimal, Address),
     #[error("Rate change of {0} is too large for validator {1}")]
@@ -1225,148 +1033,52 @@ where
     CannotRead(Address),
 }
 
-struct GenesisData<Validators, Address, TokenAmount, TokenChange, PK>
+struct GenesisData<Validators>
 where
-    Validators: Iterator<
-        Item = Result<
-            GenesisValidatorData<Address, TokenAmount, TokenChange, PK>,
-            GenesisError,
-        >,
-    >,
-    Address: Display
-        + Debug
-        + Clone
-        + Ord
-        + Hash
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-    TokenAmount: Debug
-        + Default
-        + Clone
-        + Add<Output = TokenAmount>
-        + AddAssign
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-    TokenChange: Debug
-        + Copy
-        + Add<Output = TokenChange>
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-    PK: Debug + Clone + BorshDeserialize + BorshSerialize + BorshSchema,
+    Validators: Iterator<Item = Result<GenesisValidatorData, GenesisError>>,
 {
     validators: Validators,
     /// Active and inactive validator sets
-    validator_set: ValidatorSets<Address>,
+    validator_set: ValidatorSets,
     /// The sum of all active and inactive validators' bonded deltas
-    total_deltas: TotalDeltas<TokenChange>,
+    total_deltas: TotalDeltas,
     /// The sum of all active and inactive validators' bonded tokens
-    total_bonded_balance: TokenAmount,
+    total_bonded_balance: token::Amount,
 }
-struct GenesisValidatorData<Address, TokenAmount, TokenChange, PK>
-where
-    Address: Display
-        + Debug
-        + Clone
-        + Ord
-        + Hash
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-    TokenAmount: Debug
-        + Default
-        + Clone
-        + Add<Output = TokenAmount>
-        + AddAssign
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-    TokenChange: Debug
-        + Copy
-        + Add<Output = TokenChange>
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-    PK: Debug + Clone + BorshDeserialize + BorshSerialize + BorshSchema,
-{
+struct GenesisValidatorData {
     address: Address,
-    consensus_key: ValidatorConsensusKeys<PK>,
+    consensus_key: ValidatorConsensusKeys,
     commission_rate: CommissionRates,
     max_commission_rate_change: Decimal,
     state: ValidatorStates,
-    deltas: ValidatorDeltas<TokenChange>,
-    bond: (BondId<Address>, Bonds<TokenAmount>),
+    deltas: ValidatorDeltas,
+    bond: (BondId, Bonds),
 }
 
 /// A function that returns genesis data created from the initial validator set.
-fn init_genesis<'a, Address, TokenAmount, TokenChange, PK>(
+fn init_genesis<'a>(
     params: &'a PosParams,
-    validators: impl Iterator<Item = &'a GenesisValidator<Address, TokenAmount, PK>>
-    + Clone
-    + 'a,
+    validators: impl Iterator<Item = &'a GenesisValidator> + Clone + 'a,
     current_epoch: Epoch,
 ) -> Result<
     GenesisData<
-        impl Iterator<
-            Item = Result<
-                GenesisValidatorData<Address, TokenAmount, TokenChange, PK>,
-                GenesisError,
-            >,
-        > + 'a,
-        Address,
-        TokenAmount,
-        TokenChange,
-        PK,
+        impl Iterator<Item = Result<GenesisValidatorData, GenesisError>> + 'a,
     >,
     GenesisError,
->
-where
-    Address: 'a
-        + Display
-        + Debug
-        + Clone
-        + Ord
-        + Hash
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-    TokenAmount: 'a
-        + Debug
-        + Default
-        + Clone
-        + Copy
-        + Add<Output = TokenAmount>
-        + AddAssign
-        + Into<u64>
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-    TokenChange: 'a
-        + Debug
-        + Default
-        + Copy
-        + Add<Output = TokenChange>
-        + From<TokenAmount>
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-    PK: 'a + Debug + Clone + BorshDeserialize + BorshSerialize + BorshSchema,
-{
+> {
     // Accumulate the validator set and total bonded token balance
-    let mut active: BTreeSet<WeightedValidator<Address>> = BTreeSet::default();
-    let mut total_bonded_delta = TokenChange::default();
-    let mut total_bonded_balance = TokenAmount::default();
+    let mut active: BTreeSet<WeightedValidator> = BTreeSet::default();
+    let mut total_bonded_delta = token::Change::default();
+    let mut total_bonded_balance = token::Amount::default();
     for GenesisValidator {
         address, tokens, ..
     } in validators.clone()
     {
         total_bonded_balance += *tokens;
         // is some extra error handling needed here for casting the delta as
-        // i64? (TokenChange)
-        let delta = TokenChange::from(*tokens);
-        total_bonded_delta = total_bonded_delta + delta;
+        // i64? (token::Change)
+        let delta = token::Change::from(*tokens);
+        total_bonded_delta += delta;
         active.insert(WeightedValidator {
             bonded_stake: (*tokens).into(),
             address: address.clone(),
@@ -1374,8 +1086,7 @@ where
     }
     // Pop the smallest validators from the active set until its size is under
     // the limit and insert them into the inactive set
-    let mut inactive: BTreeSet<WeightedValidator<Address>> =
-        BTreeSet::default();
+    let mut inactive: BTreeSet<WeightedValidator> = BTreeSet::default();
     while active.len() > params.max_validator_slots as usize {
         match active.pop_first_shim() {
             Some(first) => {
@@ -1406,7 +1117,7 @@ where
                 ValidatorState::Candidate,
                 current_epoch,
             );
-            let token_delta = TokenChange::from(*tokens);
+            let token_delta = token::Change::from(*tokens);
             let deltas =
                 EpochedDelta::init_at_genesis(token_delta, current_epoch);
             let bond_id = BondId {
@@ -1445,49 +1156,26 @@ where
 
 /// A function to apply a slash to byzantine validator.
 #[allow(clippy::too_many_arguments)]
-fn slash<Address, TokenChange>(
+fn slash(
     params: &PosParams,
     current_epoch: Epoch,
     validator: &Address,
     slash: &Slash,
-    validator_deltas: &mut ValidatorDeltas<TokenChange>,
-    validator_set: &mut ValidatorSets<Address>,
-    total_deltas: &mut TotalDeltas<TokenChange>,
-) -> Result<TokenChange, SlashError<Address>>
-where
-    Address: Display
-        + Debug
-        + Clone
-        + Ord
-        + Hash
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-    TokenChange: Display
-        + Debug
-        + Copy
-        + Default
-        + Neg<Output = TokenChange>
-        + Add<Output = TokenChange>
-        + Sub<Output = TokenChange>
-        + From<i128>
-        + Into<i128>
-        + PartialOrd
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-{
-    let current_stake: TokenChange =
+    validator_deltas: &mut ValidatorDeltas,
+    validator_set: &mut ValidatorSets,
+    total_deltas: &mut TotalDeltas,
+) -> Result<token::Change, SlashError> {
+    let current_stake: token::Change =
         validator_deltas.get(current_epoch).unwrap_or_default();
-    if current_stake < TokenChange::default() {
+    if current_stake < token::Change::default() {
         return Err(SlashError::NegativeStake(
-            current_stake.into(),
+            current_stake,
             validator.clone(),
         ));
     }
-    let raw_current_stake: i128 = current_stake.into();
-    let slashed_amount: TokenChange =
-        decimal_mult_i128(slash.rate, raw_current_stake).into();
+    let raw_current_stake: i128 = current_stake;
+    let slashed_amount: token::Change =
+        decimal_mult_i128(slash.rate, raw_current_stake);
     let token_change = -slashed_amount;
 
     // Apply slash at pipeline offset
@@ -1525,53 +1213,24 @@ where
     Ok(slashed_amount)
 }
 
-struct BecomeValidatorData<PK, TokenChange>
-where
-    PK: Debug + Clone + BorshDeserialize + BorshSerialize + BorshSchema,
-    TokenChange: Default
-        + Debug
-        + Clone
-        + Copy
-        + Add<Output = TokenChange>
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-{
-    consensus_key: ValidatorConsensusKeys<PK>,
+struct BecomeValidatorData {
+    consensus_key: ValidatorConsensusKeys,
     state: ValidatorStates,
-    deltas: ValidatorDeltas<TokenChange>,
+    deltas: ValidatorDeltas,
     commission_rate: Decimal,
     max_commission_rate_change: Decimal,
 }
 
 /// A function that initialized data for a new validator.
-fn become_validator<Address, PK, TokenChange>(
+fn become_validator(
     params: &PosParams,
     address: &Address,
-    consensus_key: &PK,
-    validator_set: &mut ValidatorSets<Address>,
+    consensus_key: &common::PublicKey,
+    validator_set: &mut ValidatorSets,
     current_epoch: Epoch,
     commission_rate: Decimal,
     max_commission_rate_change: Decimal,
-) -> BecomeValidatorData<PK, TokenChange>
-where
-    Address: Debug
-        + Clone
-        + Ord
-        + Hash
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-    PK: Debug + Clone + BorshDeserialize + BorshSerialize + BorshSchema,
-    TokenChange: Default
-        + Debug
-        + Clone
-        + Copy
-        + Add<Output = TokenChange>
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-{
+) -> BecomeValidatorData {
     let consensus_key =
         Epoched::init(consensus_key.clone(), current_epoch, params);
 
@@ -1613,81 +1272,25 @@ where
     }
 }
 
-struct BondData<TokenAmount, TokenChange>
-where
-    TokenAmount: Debug
-        + Default
-        + Clone
-        + Copy
-        + Add<Output = TokenAmount>
-        + AddAssign
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-    TokenChange: Debug
-        + Clone
-        + Copy
-        + Add<Output = TokenChange>
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-{
-    pub bond: Bonds<TokenAmount>,
-    pub validator_deltas: ValidatorDeltas<TokenChange>,
+struct BondData {
+    pub bond: Bonds,
+    pub validator_deltas: ValidatorDeltas,
 }
 
 /// Bond tokens to a validator (self-bond or delegation).
 #[allow(clippy::too_many_arguments)]
-fn bond_tokens<Address, TokenAmount, TokenChange>(
+fn bond_tokens(
     params: &PosParams,
     validator_state: Option<ValidatorStates>,
-    bond_id: &BondId<Address>,
-    current_bond: Option<Bonds<TokenAmount>>,
-    amount: TokenAmount,
-    validator_deltas: Option<ValidatorDeltas<TokenChange>>,
-    total_deltas: &mut TotalDeltas<TokenChange>,
-    validator_set: &mut ValidatorSets<Address>,
+    bond_id: &BondId,
+    current_bond: Option<Bonds>,
+    amount: token::Amount,
+    validator_deltas: Option<ValidatorDeltas>,
+    total_deltas: &mut TotalDeltas,
+    validator_set: &mut ValidatorSets,
     current_epoch: Epoch,
-) -> Result<BondData<TokenAmount, TokenChange>, BondError<Address>>
-where
-    Address: Display
-        + Debug
-        + Clone
-        + PartialEq
-        + Eq
-        + PartialOrd
-        + Ord
-        + Hash
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-    TokenAmount: Display
-        + Debug
-        + Default
-        + Clone
-        + Copy
-        + PartialEq
-        + Add<Output = TokenAmount>
-        + AddAssign
-        + Into<u64>
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-    TokenChange: Display
-        + Debug
-        + Default
-        + Clone
-        + Copy
-        + Neg
-        + Add<Output = TokenChange>
-        + Sub
-        + From<TokenAmount>
-        + Into<i128>
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-{
-    if amount == TokenAmount::default() {
+) -> Result<BondData, BondError> {
+    if amount == token::Amount::default() {
         return Err(BondError::ZeroAmount);
     }
     // Check the validator state
@@ -1716,7 +1319,7 @@ where
     //
     let mut value = Bond {
         pos_deltas: HashMap::default(),
-        neg_deltas: TokenAmount::default(),
+        neg_deltas: token::Amount::default(),
     };
     // Initialize the bond at the pipeline offset
     let update_offset = DynEpochOffset::PipelineLen;
@@ -1739,7 +1342,7 @@ where
     // Update validator set. This has to be done before we update the
     // `validator_deltas`, because we need to look-up the validator with
     // its voting power before the change.
-    let token_change = TokenChange::from(amount);
+    let token_change = token::Change::from(amount);
     update_validator_set(
         params,
         &bond_id.validator,
@@ -1751,7 +1354,7 @@ where
     );
 
     // Update validator's total deltas and total staked token deltas
-    let delta = TokenChange::from(amount);
+    let delta = token::Change::from(amount);
     let validator_deltas = match validator_deltas {
         Some(mut validator_deltas) => {
             validator_deltas.add_at_offset(
@@ -1778,77 +1381,25 @@ where
     })
 }
 
-struct UnbondData<TokenAmount>
-where
-    TokenAmount: Debug
-        + Default
-        + Clone
-        + Copy
-        + Add<Output = TokenAmount>
-        + AddAssign
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-{
-    pub unbond: Unbonds<TokenAmount>,
+struct UnbondData {
+    pub unbond: Unbonds,
 }
 
 /// Unbond tokens from a validator's bond (self-bond or delegation).
 #[allow(clippy::too_many_arguments)]
-fn unbond_tokens<Address, TokenAmount, TokenChange>(
+fn unbond_tokens(
     params: &PosParams,
-    bond_id: &BondId<Address>,
-    bond: &mut Bonds<TokenAmount>,
-    unbond: Option<Unbonds<TokenAmount>>,
-    amount: TokenAmount,
+    bond_id: &BondId,
+    bond: &mut Bonds,
+    unbond: Option<Unbonds>,
+    amount: token::Amount,
     slashes: Slashes,
-    validator_deltas: &mut ValidatorDeltas<TokenChange>,
-    total_deltas: &mut TotalDeltas<TokenChange>,
-    validator_set: &mut ValidatorSets<Address>,
+    validator_deltas: &mut ValidatorDeltas,
+    total_deltas: &mut TotalDeltas,
+    validator_set: &mut ValidatorSets,
     current_epoch: Epoch,
-) -> Result<UnbondData<TokenAmount>, UnbondError<Address, TokenAmount>>
-where
-    Address: Display
-        + Debug
-        + Clone
-        + PartialEq
-        + Eq
-        + PartialOrd
-        + Ord
-        + Hash
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-    TokenAmount: Display
-        + Debug
-        + Default
-        + Clone
-        + Copy
-        + PartialOrd
-        + Add<Output = TokenAmount>
-        + AddAssign
-        + Into<u64>
-        + From<u64>
-        + Sub<Output = TokenAmount>
-        + SubAssign
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-    TokenChange: Display
-        + Debug
-        + Default
-        + Clone
-        + Copy
-        + Add<Output = TokenChange>
-        + Sub<Output = TokenChange>
-        + From<TokenAmount>
-        + Neg<Output = TokenChange>
-        + Into<i128>
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-{
-    if amount == TokenAmount::default() {
+) -> Result<UnbondData, UnbondError> {
+    if amount == token::Amount::default() {
         return Err(UnbondError::ZeroAmount);
     }
     // We can unbond tokens that are bonded for a future epoch (not yet
@@ -1872,7 +1423,7 @@ where
     let update_offset = DynEpochOffset::UnbondingLen;
     let mut to_unbond = amount;
     let to_unbond = &mut to_unbond;
-    let mut slashed_amount = TokenAmount::default();
+    let mut slashed_amount = token::Amount::default();
     // Decrement the bond deltas starting from the rightmost value (a bond in a
     // future-most epoch) until whole amount is decremented
     bond.rev_while(
@@ -1906,7 +1457,7 @@ where
                         let raw_slashed_delta =
                             decimal_mult_u64(slash.rate, raw_delta);
                         let slashed_delta =
-                            TokenAmount::from(raw_slashed_delta);
+                            token::Amount::from(raw_slashed_delta);
                         slashed_bond_delta -= slashed_delta;
                     }
                 }
@@ -1935,7 +1486,7 @@ where
     // Update validator set. This has to be done before we update the
     // `validator_deltas`, because we need to look-up the validator with
     // its voting power before the change.
-    let token_change = -TokenChange::from(slashed_amount);
+    let token_change = -token::Change::from(slashed_amount);
     update_validator_set(
         params,
         &bond_id.validator,
@@ -1958,38 +1509,15 @@ where
 
 /// Update validator set when a validator's receives a new bond and when its
 /// bond is unbonded (self-bond or delegation).
-fn update_validator_set<Address, TokenChange>(
+fn update_validator_set(
     params: &PosParams,
     validator: &Address,
-    token_change: TokenChange,
+    token_change: token::Change,
     change_offset: DynEpochOffset,
-    validator_set: &mut ValidatorSets<Address>,
-    validator_deltas: Option<&ValidatorDeltas<TokenChange>>,
+    validator_set: &mut ValidatorSets,
+    validator_deltas: Option<&ValidatorDeltas>,
     current_epoch: Epoch,
-) where
-    Address: Display
-        + Debug
-        + Clone
-        + PartialEq
-        + Eq
-        + PartialOrd
-        + Ord
-        + Hash
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-    TokenChange: Display
-        + Default
-        + Debug
-        + Clone
-        + Copy
-        + Add<Output = TokenChange>
-        + Sub
-        + Into<i128>
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-{
+) {
     validator_set.update_from_offset(
         |validator_set, epoch| {
             // Find the validator's bonded stake at the epoch that's being
@@ -1998,8 +1526,8 @@ fn update_validator_set<Address, TokenChange>(
                 .and_then(|d| d.get(epoch))
                 .unwrap_or_default();
             let tokens_post = tokens_pre + token_change;
-            let tokens_pre: i128 = tokens_pre.into();
-            let tokens_post: i128 = tokens_post.into();
+            let tokens_pre: i128 = tokens_pre;
+            let tokens_post: i128 = tokens_post;
             let tokens_pre: u64 = TryFrom::try_from(tokens_pre).unwrap();
             let tokens_post: u64 = TryFrom::try_from(tokens_post).unwrap();
 
@@ -2065,75 +1593,37 @@ fn update_validator_set<Address, TokenChange>(
     )
 }
 
-struct WithdrawData<TokenAmount>
-where
-    TokenAmount: Debug
-        + Default
-        + Clone
-        + Copy
-        + Add<Output = TokenAmount>
-        + AddAssign
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-{
-    pub unbond: Unbonds<TokenAmount>,
-    pub withdrawn: TokenAmount,
-    pub slashed: TokenAmount,
+struct WithdrawData {
+    pub unbond: Unbonds,
+    pub withdrawn: token::Amount,
+    pub slashed: token::Amount,
 }
 
 /// Withdraw tokens from unbonds of self-bonds or delegations.
-fn withdraw_unbonds<Address, TokenAmount>(
+fn withdraw_unbonds(
     params: &PosParams,
-    bond_id: &BondId<Address>,
-    unbond: Option<Unbonds<TokenAmount>>,
+    bond_id: &BondId,
+    unbond: Option<Unbonds>,
     slashes: Vec<Slash>,
     current_epoch: Epoch,
-) -> Result<WithdrawData<TokenAmount>, WithdrawError<Address>>
-where
-    Address: Display
-        + Debug
-        + Clone
-        + PartialEq
-        + Eq
-        + PartialOrd
-        + Ord
-        + Hash
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-    TokenAmount: Display
-        + Debug
-        + Default
-        + Clone
-        + Copy
-        + PartialOrd
-        + Add<Output = TokenAmount>
-        + AddAssign
-        + Into<u64>
-        + From<u64>
-        + SubAssign
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-{
+) -> Result<WithdrawData, WithdrawError> {
     let mut unbond =
         unbond.ok_or_else(|| WithdrawError::NoUnbondFound(bond_id.clone()))?;
     let withdrawable_unbond = unbond
         .get(current_epoch)
         .ok_or_else(|| WithdrawError::NoWithdrawableUnbond(bond_id.clone()))?;
-    let mut slashed = TokenAmount::default();
+    let mut slashed = token::Amount::default();
     let withdrawn_amount = withdrawable_unbond.deltas.iter().fold(
-        TokenAmount::default(),
+        token::Amount::default(),
         |sum, ((epoch_start, epoch_end), delta)| {
             let mut delta = *delta;
             // Check and apply slashes, if any
             for slash in &slashes {
                 if slash.epoch >= *epoch_start && slash.epoch <= *epoch_end {
                     let raw_delta: u64 = delta.into();
-                    let current_slashed = TokenAmount::from(decimal_mult_u64(
-                        slash.rate, raw_delta,
-                    ));
+                    let current_slashed = token::Amount::from(
+                        decimal_mult_u64(slash.rate, raw_delta),
+                    );
                     slashed += current_slashed;
                     delta -= current_slashed;
                 }
@@ -2149,56 +1639,32 @@ where
     })
 }
 
-impl From<BecomeValidatorError<namada_core::types::address::Address>>
-    for storage_api::Error
-{
-    fn from(
-        err: BecomeValidatorError<namada_core::types::address::Address>,
-    ) -> Self {
+impl From<BecomeValidatorError> for storage_api::Error {
+    fn from(err: BecomeValidatorError) -> Self {
         Self::new(err)
     }
 }
 
-impl From<BondError<namada_core::types::address::Address>>
-    for storage_api::Error
-{
-    fn from(err: BondError<namada_core::types::address::Address>) -> Self {
+impl From<BondError> for storage_api::Error {
+    fn from(err: BondError) -> Self {
         Self::new(err)
     }
 }
 
-impl
-    From<
-        UnbondError<
-            namada_core::types::address::Address,
-            namada_core::types::token::Amount,
-        >,
-    > for storage_api::Error
-{
-    fn from(
-        err: UnbondError<
-            namada_core::types::address::Address,
-            namada_core::types::token::Amount,
-        >,
-    ) -> Self {
+impl From<UnbondError> for storage_api::Error {
+    fn from(err: UnbondError) -> Self {
         Self::new(err)
     }
 }
 
-impl From<WithdrawError<namada_core::types::address::Address>>
-    for storage_api::Error
-{
-    fn from(err: WithdrawError<namada_core::types::address::Address>) -> Self {
+impl From<WithdrawError> for storage_api::Error {
+    fn from(err: WithdrawError) -> Self {
         Self::new(err)
     }
 }
 
-impl From<CommissionRateChangeError<namada_core::types::address::Address>>
-    for storage_api::Error
-{
-    fn from(
-        err: CommissionRateChangeError<namada_core::types::address::Address>,
-    ) -> Self {
+impl From<CommissionRateChangeError> for storage_api::Error {
+    fn from(err: CommissionRateChangeError) -> Self {
         Self::new(err)
     }
 }
diff --git a/proof_of_stake/src/storage.rs b/proof_of_stake/src/storage.rs
index 8903ebf6e2..5e11165c55 100644
--- a/proof_of_stake/src/storage.rs
+++ b/proof_of_stake/src/storage.rs
@@ -2,7 +2,6 @@
 
 use namada_core::ledger::storage::types::{decode, encode};
 use namada_core::ledger::storage::{self, Storage, StorageHasher};
-use namada_core::ledger::storage_api;
 use namada_core::types::address::Address;
 use namada_core::types::storage::{DbKeySeg, Key, KeySeg};
 use namada_core::types::{key, token};
@@ -10,41 +9,9 @@ use rust_decimal::Decimal;
 
 use super::ADDRESS;
 use crate::parameters::PosParams;
-pub use crate::types::{CommissionRates, ValidatorStates};
+pub use crate::types::*;
 use crate::{types, PosBase, PosReadOnly};
 
-// TODO: these are not needed anymore, remove together with all the generics in
-// this crate
-
-/// Alias for a PoS type with the same name with concrete type parameters
-pub type ValidatorConsensusKeys =
-    crate::types::ValidatorConsensusKeys<key::common::PublicKey>;
-
-/// Alias for a PoS type with the same name with concrete type parameters
-pub type ValidatorDeltas = crate::types::ValidatorDeltas<token::Change>;
-
-/// Alias for a PoS type with the same name with concrete type parameters
-pub type Bonds = crate::types::Bonds<token::Amount>;
-
-/// Alias for a PoS type with the same name with concrete type parameters
-pub type Unbonds = crate::types::Unbonds<token::Amount>;
-
-/// Alias for a PoS type with the same name with concrete type parameters
-pub type ValidatorSets = crate::types::ValidatorSets<Address>;
-
-/// Alias for a PoS type with the same name with concrete type parameters
-pub type BondId = crate::types::BondId<Address>;
-
-/// Alias for a PoS type with the same name with concrete type parameters
-pub type GenesisValidator = crate::types::GenesisValidator<
-    Address,
-    token::Amount,
-    key::common::PublicKey,
->;
-
-/// Alias for a PoS type with the same name with concrete type parameters
-pub type TotalDeltas = crate::types::TotalDeltas<token::Change>;
-
 const PARAMS_STORAGE_KEY: &str = "params";
 const VALIDATOR_STORAGE_PREFIX: &str = "validator";
 const VALIDATOR_ADDRESS_RAW_HASH: &str = "address_raw_hash";
@@ -383,15 +350,11 @@ where
     D: storage::DB + for<'iter> storage::DBIter<'iter>,
     H: StorageHasher,
 {
-    type Address = Address;
-    type PublicKey = key::common::PublicKey;
-    type TokenAmount = token::Amount;
-    type TokenChange = token::Change;
+    const POS_ADDRESS: namada_core::types::address::Address = super::ADDRESS;
+    const POS_SLASH_POOL_ADDRESS: namada_core::types::address::Address =
+        super::SLASH_POOL_ADDRESS;
 
-    const POS_ADDRESS: Self::Address = super::ADDRESS;
-    const POS_SLASH_POOL_ADDRESS: Self::Address = super::SLASH_POOL_ADDRESS;
-
-    fn staking_token_address(&self) -> Self::Address {
+    fn staking_token_address(&self) -> namada_core::types::address::Address {
         self.native_token.clone()
     }
 
@@ -403,7 +366,7 @@ where
     fn read_validator_address_raw_hash(
         &self,
         raw_hash: impl AsRef<str>,
-    ) -> Option<Self::Address> {
+    ) -> Option<namada_core::types::address::Address> {
         let (value, _gas) = self
             .read(&validator_address_raw_hash_key(raw_hash))
             .unwrap();
@@ -412,7 +375,7 @@ where
 
     fn read_validator_consensus_key(
         &self,
-        key: &Self::Address,
+        key: &namada_core::types::address::Address,
     ) -> Option<ValidatorConsensusKeys> {
         let (value, _gas) =
             self.read(&validator_consensus_key_key(key)).unwrap();
@@ -421,7 +384,7 @@ where
 
     fn read_validator_state(
         &self,
-        key: &Self::Address,
+        key: &namada_core::types::address::Address,
     ) -> Option<ValidatorStates> {
         let (value, _gas) = self.read(&validator_state_key(key)).unwrap();
         value.map(|value| decode(value).unwrap())
@@ -429,13 +392,16 @@ where
 
     fn read_validator_deltas(
         &self,
-        key: &Self::Address,
-    ) -> Option<types::ValidatorDeltas<Self::TokenChange>> {
+        key: &namada_core::types::address::Address,
+    ) -> Option<types::ValidatorDeltas> {
         let (value, _gas) = self.read(&validator_deltas_key(key)).unwrap();
         value.map(|value| decode(value).unwrap())
     }
 
-    fn read_validator_slashes(&self, key: &Self::Address) -> types::Slashes {
+    fn read_validator_slashes(
+        &self,
+        key: &namada_core::types::address::Address,
+    ) -> types::Slashes {
         let (value, _gas) = self.read(&validator_slashes_key(key)).unwrap();
         value
             .map(|value| decode(value).unwrap())
@@ -444,7 +410,7 @@ where
 
     fn read_validator_commission_rate(
         &self,
-        key: &Self::Address,
+        key: &namada_core::types::address::Address,
     ) -> CommissionRates {
         let (value, _gas) =
             self.read(&validator_commission_rate_key(key)).unwrap();
@@ -453,7 +419,7 @@ where
 
     fn read_validator_max_commission_rate_change(
         &self,
-        key: &Self::Address,
+        key: &namada_core::types::address::Address,
     ) -> Decimal {
         let (value, _gas) = self
             .read(&validator_max_commission_rate_change_key(key))
@@ -477,8 +443,8 @@ where
 
     fn write_validator_address_raw_hash(
         &mut self,
-        address: &Self::Address,
-        consensus_key: &Self::PublicKey,
+        address: &namada_core::types::address::Address,
+        consensus_key: &namada_core::types::key::common::PublicKey,
     ) {
         let raw_hash = key::tm_consensus_key_raw_hash(consensus_key);
         self.write(&validator_address_raw_hash_key(raw_hash), encode(address))
@@ -487,7 +453,7 @@ where
 
     fn write_validator_commission_rate(
         &mut self,
-        key: &Self::Address,
+        key: &namada_core::types::address::Address,
         value: &CommissionRates,
     ) {
         self.write(&validator_commission_rate_key(key), encode(value))
@@ -496,7 +462,7 @@ where
 
     fn write_validator_max_commission_rate_change(
         &mut self,
-        key: &Self::Address,
+        key: &namada_core::types::address::Address,
         value: &rust_decimal::Decimal,
     ) {
         self.write(
@@ -508,7 +474,7 @@ where
 
     fn write_validator_consensus_key(
         &mut self,
-        key: &Self::Address,
+        key: &namada_core::types::address::Address,
         value: &ValidatorConsensusKeys,
     ) {
         self.write(&validator_consensus_key_key(key), encode(value))
@@ -517,7 +483,7 @@ where
 
     fn write_validator_state(
         &mut self,
-        key: &Self::Address,
+        key: &namada_core::types::address::Address,
         value: &ValidatorStates,
     ) {
         self.write(&validator_state_key(key), encode(value))
@@ -526,7 +492,7 @@ where
 
     fn write_validator_deltas(
         &mut self,
-        key: &Self::Address,
+        key: &namada_core::types::address::Address,
         value: &ValidatorDeltas,
     ) {
         self.write(&validator_deltas_key(key), encode(value))
@@ -535,7 +501,7 @@ where
 
     fn write_validator_slash(
         &mut self,
-        validator: &Self::Address,
+        validator: &namada_core::types::address::Address,
         value: types::Slash,
     ) {
         let mut slashes = PosBase::read_validator_slashes(self, validator);
@@ -558,9 +524,9 @@ where
 
     fn credit_tokens(
         &mut self,
-        token: &Self::Address,
-        target: &Self::Address,
-        amount: Self::TokenAmount,
+        token: &namada_core::types::address::Address,
+        target: &namada_core::types::address::Address,
+        amount: namada_core::types::token::Amount,
     ) {
         let key = token::balance_key(token, target);
         let new_balance = match self
@@ -568,7 +534,7 @@ where
             .expect("Unable to read token balance for PoS system")
         {
             (Some(balance), _gas) => {
-                let balance: Self::TokenAmount =
+                let balance: namada_core::types::token::Amount =
                     decode(balance).unwrap_or_default();
                 balance + amount
             }
@@ -580,10 +546,10 @@ where
 
     fn transfer(
         &mut self,
-        token: &Self::Address,
-        amount: Self::TokenAmount,
-        src: &Self::Address,
-        dest: &Self::Address,
+        token: &namada_core::types::address::Address,
+        amount: namada_core::types::token::Amount,
+        src: &namada_core::types::address::Address,
+        dest: &namada_core::types::address::Address,
     ) {
         let src_key = token::balance_key(token, src);
         let dest_key = token::balance_key(token, dest);
@@ -591,7 +557,7 @@ where
             .read(&src_key)
             .expect("Unable to read token balance for PoS system")
         {
-            let mut src_balance: Self::TokenAmount =
+            let mut src_balance: namada_core::types::token::Amount =
                 decode(src_balance).unwrap_or_default();
             if src_balance < amount {
                 tracing::error!(
@@ -604,9 +570,10 @@ where
             }
             src_balance.spend(&amount);
             let (dest_balance, _gas) = self.read(&dest_key).unwrap_or_default();
-            let mut dest_balance: Self::TokenAmount = dest_balance
-                .and_then(|b| decode(b).ok())
-                .unwrap_or_default();
+            let mut dest_balance: namada_core::types::token::Amount =
+                dest_balance
+                    .and_then(|b| decode(b).ok())
+                    .unwrap_or_default();
             dest_balance.receive(&amount);
             self.write(&src_key, encode(&src_balance))
                 .expect("Unable to write token balance for PoS system");
@@ -635,37 +602,28 @@ where
 #[macro_export]
 macro_rules! impl_pos_read_only {
     (
-        // Type error type has to be declared before the impl.
-        // This error type must `impl From<storage_api::Error> for $error`.
-        type $error:tt = $err_ty:ty ;
         // Matches anything, so that we can use lifetimes and generic types.
         // This expects `impl(<.*>)? PoSReadOnly for $ty(<.*>)?`.
         $( $any:tt )* )
     => {
         $( $any )*
         {
-            type Address = namada_core::types::address::Address;
-            type $error = $err_ty;
-            type PublicKey = namada_core::types::key::common::PublicKey;
-            type TokenAmount = namada_core::types::token::Amount;
-            type TokenChange = namada_core::types::token::Change;
-
-            const POS_ADDRESS: Self::Address = $crate::ADDRESS;
+            const POS_ADDRESS: namada_core::types::address::Address = $crate::ADDRESS;
 
-            fn staking_token_address(&self) -> Self::Address {
+            fn staking_token_address(&self) -> namada_core::types::address::Address {
                 namada_core::ledger::storage_api::StorageRead::get_native_token(self)
                     .expect("Native token must be available")
             }
 
-            fn read_pos_params(&self) -> std::result::Result<PosParams, Self::Error> {
+            fn read_pos_params(&self) -> namada_core::ledger::storage_api::Result<PosParams> {
                 let value = namada_core::ledger::storage_api::StorageRead::read_bytes(self, &params_key())?.unwrap();
                 Ok(namada_core::ledger::storage::types::decode(value).unwrap())
             }
 
             fn read_validator_consensus_key(
                 &self,
-                key: &Self::Address,
-            ) -> std::result::Result<Option<ValidatorConsensusKeys>, Self::Error> {
+                key: &namada_core::types::address::Address,
+            ) -> namada_core::ledger::storage_api::Result<Option<ValidatorConsensusKeys>> {
                 let value =
                     namada_core::ledger::storage_api::StorageRead::read_bytes(self, &validator_consensus_key_key(key))?;
                 Ok(value.map(|value| namada_core::ledger::storage::types::decode(value).unwrap()))
@@ -673,8 +631,8 @@ macro_rules! impl_pos_read_only {
 
             fn read_validator_commission_rate(
                 &self,
-                key: &Self::Address,
-            ) -> std::result::Result<Option<CommissionRates>, Self::Error> {
+                key: &namada_core::types::address::Address,
+            ) -> namada_core::ledger::storage_api::Result<Option<CommissionRates>> {
                 let value =
                     namada_core::ledger::storage_api::StorageRead::read_bytes(self, &validator_commission_rate_key(key))?;
                 Ok(value.map(|value| namada_core::ledger::storage::types::decode(value).unwrap()))
@@ -682,8 +640,8 @@ macro_rules! impl_pos_read_only {
 
             fn read_validator_max_commission_rate_change(
                 &self,
-                key: &Self::Address,
-            ) -> std::result::Result<Option<Decimal>, Self::Error> {
+                key: &namada_core::types::address::Address,
+            ) -> namada_core::ledger::storage_api::Result<Option<Decimal>> {
                 let value =
                     namada_core::ledger::storage_api::StorageRead::read_bytes(self, &validator_max_commission_rate_change_key(key))?;
                 Ok(value.map(|value| namada_core::ledger::storage::types::decode(value).unwrap()))
@@ -691,16 +649,16 @@ macro_rules! impl_pos_read_only {
 
             fn read_validator_state(
                 &self,
-                key: &Self::Address,
-            ) -> std::result::Result<Option<ValidatorStates>, Self::Error> {
+                key: &namada_core::types::address::Address,
+            ) -> namada_core::ledger::storage_api::Result<Option<ValidatorStates>> {
                 let value = namada_core::ledger::storage_api::StorageRead::read_bytes(self, &validator_state_key(key))?;
                 Ok(value.map(|value| namada_core::ledger::storage::types::decode(value).unwrap()))
             }
 
             fn read_validator_deltas(
                 &self,
-                key: &Self::Address,
-            ) -> std::result::Result<Option<ValidatorDeltas>, Self::Error> {
+                key: &namada_core::types::address::Address,
+            ) -> namada_core::ledger::storage_api::Result<Option<ValidatorDeltas>> {
                 let value =
                     namada_core::ledger::storage_api::StorageRead::read_bytes(self, &validator_deltas_key(key))?;
                 Ok(value.map(|value| namada_core::ledger::storage::types::decode(value).unwrap()))
@@ -708,8 +666,8 @@ macro_rules! impl_pos_read_only {
 
             fn read_validator_slashes(
                 &self,
-                key: &Self::Address,
-            ) -> std::result::Result<Vec<types::Slash>, Self::Error> {
+                key: &namada_core::types::address::Address,
+            ) -> namada_core::ledger::storage_api::Result<Vec<types::Slash>> {
                 let value = namada_core::ledger::storage_api::StorageRead::read_bytes(self, &validator_slashes_key(key))?;
                 Ok(value
                     .map(|value| namada_core::ledger::storage::types::decode(value).unwrap())
@@ -719,7 +677,7 @@ macro_rules! impl_pos_read_only {
             fn read_bond(
                 &self,
                 key: &BondId,
-            ) -> std::result::Result<Option<Bonds>, Self::Error> {
+            ) -> namada_core::ledger::storage_api::Result<Option<Bonds>> {
                 let value = namada_core::ledger::storage_api::StorageRead::read_bytes(self, &bond_key(key))?;
                 Ok(value.map(|value| namada_core::ledger::storage::types::decode(value).unwrap()))
             }
@@ -727,14 +685,14 @@ macro_rules! impl_pos_read_only {
             fn read_unbond(
                 &self,
                 key: &BondId,
-            ) -> std::result::Result<Option<Unbonds>, Self::Error> {
+            ) -> namada_core::ledger::storage_api::Result<Option<Unbonds>> {
                 let value = namada_core::ledger::storage_api::StorageRead::read_bytes(self, &unbond_key(key))?;
                 Ok(value.map(|value| namada_core::ledger::storage::types::decode(value).unwrap()))
             }
 
             fn read_validator_set(
                 &self,
-            ) -> std::result::Result<ValidatorSets, Self::Error> {
+            ) -> namada_core::ledger::storage_api::Result<ValidatorSets> {
                 let value =
                     namada_core::ledger::storage_api::StorageRead::read_bytes(self, &validator_set_key())?.unwrap();
                 Ok(namada_core::ledger::storage::types::decode(value).unwrap())
@@ -742,7 +700,7 @@ macro_rules! impl_pos_read_only {
 
             fn read_total_deltas(
                 &self,
-            ) -> std::result::Result<TotalDeltas, Self::Error> {
+            ) -> namada_core::ledger::storage_api::Result<TotalDeltas> {
                 let value =
                     namada_core::ledger::storage_api::StorageRead::read_bytes(self, &total_deltas_key())?.unwrap();
                 Ok(namada_core::ledger::storage::types::decode(value).unwrap())
@@ -752,7 +710,6 @@ macro_rules! impl_pos_read_only {
 }
 
 impl_pos_read_only! {
-    type Error = storage_api::Error;
     impl<DB, H> PosReadOnly for Storage<DB, H>
         where
             DB: storage::DB + for<'iter> storage::DBIter<'iter> +'static,
diff --git a/proof_of_stake/src/types.rs b/proof_of_stake/src/types.rs
index 480e06e9fa..089da88c86 100644
--- a/proof_of_stake/src/types.rs
+++ b/proof_of_stake/src/types.rs
@@ -5,10 +5,13 @@ use std::collections::{BTreeSet, HashMap};
 use std::convert::TryFrom;
 use std::fmt::Display;
 use std::hash::Hash;
-use std::ops::{Add, AddAssign, Sub};
+use std::ops::Add;
 
 use borsh::{BorshDeserialize, BorshSchema, BorshSerialize};
-pub use namada_core::types::storage::Epoch;
+use namada_core::types::address::Address;
+use namada_core::types::key::common;
+use namada_core::types::storage::Epoch;
+use namada_core::types::token;
 use rust_decimal::prelude::{Decimal, ToPrimitive};
 
 use crate::epoched::{
@@ -17,26 +20,20 @@ use crate::epoched::{
 use crate::parameters::PosParams;
 
 /// Epoched validator's consensus key.
-pub type ValidatorConsensusKeys<PublicKey> =
-    Epoched<PublicKey, OffsetPipelineLen>;
+pub type ValidatorConsensusKeys = Epoched<common::PublicKey, OffsetPipelineLen>;
 /// Epoched validator's state.
 pub type ValidatorStates = Epoched<ValidatorState, OffsetPipelineLen>;
 /// Epoched validator's total deltas.
-pub type ValidatorDeltas<TokenChange> =
-    EpochedDelta<TokenChange, OffsetUnbondingLen>;
+pub type ValidatorDeltas = EpochedDelta<token::Change, OffsetUnbondingLen>;
 
 /// Epoched bond.
-pub type Bonds<TokenAmount> =
-    EpochedDelta<Bond<TokenAmount>, OffsetUnbondingLen>;
+pub type Bonds = EpochedDelta<Bond, OffsetUnbondingLen>;
 /// Epoched unbond.
-pub type Unbonds<TokenAmount> =
-    EpochedDelta<Unbond<TokenAmount>, OffsetUnbondingLen>;
+pub type Unbonds = EpochedDelta<Unbond, OffsetUnbondingLen>;
 /// Epoched validator set.
-pub type ValidatorSets<Address> =
-    Epoched<ValidatorSet<Address>, OffsetUnbondingLen>;
+pub type ValidatorSets = Epoched<ValidatorSet, OffsetUnbondingLen>;
 /// Epoched total deltas.
-pub type TotalDeltas<TokenChange> =
-    EpochedDelta<TokenChange, OffsetUnbondingLen>;
+pub type TotalDeltas = EpochedDelta<token::Change, OffsetUnbondingLen>;
 /// Epoched validator commission rate
 pub type CommissionRates = Epoched<Decimal, OffsetPipelineLen>;
 
@@ -52,13 +49,13 @@ pub type CommissionRates = Epoched<Decimal, OffsetPipelineLen>;
     PartialOrd,
     Ord,
 )]
-pub struct GenesisValidator<Address, Token, PK> {
+pub struct GenesisValidator {
     /// Validator's address
     pub address: Address,
     /// Staked tokens are put into a self-bond
-    pub tokens: Token,
+    pub tokens: token::Amount,
     /// A public key used for signing validator's consensus actions
-    pub consensus_key: PK,
+    pub consensus_key: common::PublicKey,
     /// Commission rate charged on rewards for delegators (bounded inside 0-1)
     pub commission_rate: Decimal,
     /// Maximum change in commission rate permitted per epoch
@@ -67,18 +64,18 @@ pub struct GenesisValidator<Address, Token, PK> {
 
 /// An update of the active and inactive validator set.
 #[derive(Debug, Clone)]
-pub enum ValidatorSetUpdate<PK> {
+pub enum ValidatorSetUpdate {
     /// A validator is active
-    Active(ActiveValidator<PK>),
+    Active(ActiveValidator),
     /// A validator who was active in the last update and is now inactive
-    Deactivated(PK),
+    Deactivated(common::PublicKey),
 }
 
 /// Active validator's consensus key and its bonded stake.
 #[derive(Debug, Clone)]
-pub struct ActiveValidator<PK> {
+pub struct ActiveValidator {
     /// A public key used for signing validator's consensus actions
-    pub consensus_key: PK,
+    pub consensus_key: common::PublicKey,
     /// Total bonded stake of the validator
     pub bonded_stake: u64,
 }
@@ -96,20 +93,7 @@ pub struct ActiveValidator<PK> {
     BorshSerialize,
     BorshSchema,
 )]
-pub struct BondId<Address>
-where
-    Address: Display
-        + Debug
-        + Clone
-        + PartialEq
-        + Eq
-        + PartialOrd
-        + Ord
-        + Hash
-        + BorshSerialize
-        + BorshSchema
-        + BorshDeserialize,
-{
+pub struct BondId {
     /// (Un)bond's source address is the owner of the bonded tokens.
     pub source: Address,
     /// (Un)bond's validator address.
@@ -128,19 +112,7 @@ where
     BorshSerialize,
     BorshSchema,
 )]
-pub struct WeightedValidator<Address>
-where
-    Address: Debug
-        + Clone
-        + PartialEq
-        + Eq
-        + PartialOrd
-        + Ord
-        + Hash
-        + BorshDeserialize
-        + BorshSchema
-        + BorshSerialize,
-{
+pub struct WeightedValidator {
     /// The `total_stake` field must be on top, because lexicographic ordering
     /// is based on the top-to-bottom declaration order and in the
     /// `ValidatorSet` the `WeightedValidator`s these need to be sorted by
@@ -150,20 +122,7 @@ where
     pub address: Address,
 }
 
-impl<Address> Display for WeightedValidator<Address>
-where
-    Address: Display
-        + Debug
-        + Clone
-        + PartialEq
-        + Eq
-        + PartialOrd
-        + Ord
-        + Hash
-        + BorshDeserialize
-        + BorshSchema
-        + BorshSerialize,
-{
+impl Display for WeightedValidator {
     fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
         write!(
             f,
@@ -185,24 +144,12 @@ where
     BorshSerialize,
     BorshSchema,
 )]
-pub struct ValidatorSet<Address>
-where
-    Address: Debug
-        + Clone
-        + PartialEq
-        + Eq
-        + PartialOrd
-        + Ord
-        + Hash
-        + BorshDeserialize
-        + BorshSchema
-        + BorshSerialize,
-{
+pub struct ValidatorSet {
     /// Active validator set with maximum size equal to `max_validator_slots`
     /// in [`PosParams`].
-    pub active: BTreeSet<WeightedValidator<Address>>,
+    pub active: BTreeSet<WeightedValidator>,
     /// All the other validators that are not active
-    pub inactive: BTreeSet<WeightedValidator<Address>>,
+    pub inactive: BTreeSet<WeightedValidator>,
 }
 
 /// Validator's state.
@@ -232,7 +179,7 @@ pub enum ValidatorState {
 #[derive(
     Debug, Clone, Default, BorshDeserialize, BorshSerialize, BorshSchema,
 )]
-pub struct Bond<Token: Default> {
+pub struct Bond {
     /// Bonded positive deltas. A key is the epoch set for the bond. This is
     /// used in unbonding, where it's needed for slash epoch range check.
     ///
@@ -240,11 +187,11 @@ pub struct Bond<Token: Default> {
     /// We only need to keep the start `Epoch` for the Epoched head element
     /// (i.e. the current epoch data), the rest of the array can be calculated
     /// from the offset from the head
-    pub pos_deltas: HashMap<Epoch, Token>,
+    pub pos_deltas: HashMap<Epoch, token::Amount>,
     /// Unbonded negative deltas. The values are recorded as positive, but
     /// should be subtracted when we're finding the total for some given
     /// epoch.
-    pub neg_deltas: Token,
+    pub neg_deltas: token::Amount,
 }
 
 /// An unbond contains unbonded tokens from a validator's self-bond or a
@@ -252,11 +199,11 @@ pub struct Bond<Token: Default> {
 #[derive(
     Debug, Clone, Default, BorshDeserialize, BorshSerialize, BorshSchema,
 )]
-pub struct Unbond<Token: Default> {
+pub struct Unbond {
     /// A key is a pair of the epoch of the bond from which a unbond was
     /// created the epoch of unbonding. This is needed for slash epoch range
     /// check.
-    pub deltas: HashMap<(Epoch, Epoch), Token>,
+    pub deltas: HashMap<(Epoch, Epoch), token::Amount>,
 }
 
 /// A slash applied to validator, to punish byzantine behavior by removing
@@ -286,20 +233,7 @@ pub enum SlashType {
     LightClientAttack,
 }
 
-impl<Address> Display for BondId<Address>
-where
-    Address: Display
-        + Debug
-        + Clone
-        + PartialEq
-        + Eq
-        + PartialOrd
-        + Ord
-        + Hash
-        + BorshSerialize
-        + BorshDeserialize
-        + BorshSchema,
-{
+impl Display for BondId {
     fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
         write!(
             f,
@@ -309,13 +243,10 @@ where
     }
 }
 
-impl<Token> Bond<Token>
-where
-    Token: Clone + Copy + Add<Output = Token> + Sub<Output = Token> + Default,
-{
+impl Bond {
     /// Find the sum of all the bonds amounts.
-    pub fn sum(&self) -> Token {
-        let pos_deltas_sum: Token = self
+    pub fn sum(&self) -> token::Amount {
+        let pos_deltas_sum: token::Amount = self
             .pos_deltas
             .iter()
             .fold(Default::default(), |acc, (_epoch, amount)| acc + *amount);
@@ -323,10 +254,7 @@ where
     }
 }
 
-impl<Token> Add for Bond<Token>
-where
-    Token: Clone + AddAssign + Default,
-{
+impl Add for Bond {
     type Output = Self;
 
     fn add(mut self, rhs: Self) -> Self::Output {
@@ -354,22 +282,16 @@ where
     }
 }
 
-impl<Token> Unbond<Token>
-where
-    Token: Clone + Copy + Add<Output = Token> + Default,
-{
+impl Unbond {
     /// Find the sum of all the unbonds amounts.
-    pub fn sum(&self) -> Token {
+    pub fn sum(&self) -> token::Amount {
         self.deltas
             .iter()
             .fold(Default::default(), |acc, (_epoch, amount)| acc + *amount)
     }
 }
 
-impl<Token> Add for Unbond<Token>
-where
-    Token: Clone + AddAssign + Default,
-{
+impl Add for Unbond {
     type Output = Self;
 
     fn add(mut self, rhs: Self) -> Self::Output {
diff --git a/proof_of_stake/src/validation.rs b/proof_of_stake/src/validation.rs
index 048004dc04..f6fa285b2b 100644
--- a/proof_of_stake/src/validation.rs
+++ b/proof_of_stake/src/validation.rs
@@ -3,14 +3,13 @@
 use std::borrow::Cow;
 use std::collections::HashMap;
 use std::convert::TryFrom;
-use std::fmt::{Debug, Display};
-use std::hash::Hash;
+use std::fmt::Debug;
 use std::marker::PhantomData;
-use std::ops::{Add, AddAssign, Neg, Sub, SubAssign};
 
-use borsh::{BorshDeserialize, BorshSchema, BorshSerialize};
-use derivative::Derivative;
-use namada_core::types::key::PublicKeyTmRawHash;
+use namada_core::types::address::Address;
+use namada_core::types::key::{common, PublicKeyTmRawHash};
+use namada_core::types::storage::Epoch;
+use namada_core::types::token;
 use rust_decimal::Decimal;
 use thiserror::Error;
 
@@ -18,30 +17,14 @@ use crate::btree_set::BTreeSetShims;
 use crate::epoched::DynEpochOffset;
 use crate::parameters::PosParams;
 use crate::types::{
-    decimal_mult_i128, decimal_mult_u64, BondId, Bonds, CommissionRates, Epoch,
-    Slash, Slashes, TotalDeltas, Unbonds, ValidatorConsensusKeys,
-    ValidatorDeltas, ValidatorSets, ValidatorState, ValidatorStates,
-    WeightedValidator,
+    decimal_mult_i128, decimal_mult_u64, BondId, Bonds, CommissionRates, Slash,
+    Slashes, TotalDeltas, Unbonds, ValidatorConsensusKeys, ValidatorDeltas,
+    ValidatorSets, ValidatorState, ValidatorStates, WeightedValidator,
 };
 
 #[allow(missing_docs)]
 #[derive(Error, Debug)]
-pub enum Error<Address, TokenChange, PublicKey>
-where
-    Address: Display
-        + Debug
-        + Clone
-        + PartialEq
-        + Eq
-        + PartialOrd
-        + Ord
-        + Hash
-        + BorshSerialize
-        + BorshSchema
-        + BorshDeserialize,
-    TokenChange: Debug + Display,
-    PublicKey: Debug,
-{
+pub enum Error {
     #[error("Unexpectedly missing state value for validator {0}")]
     ValidatorStateIsRequired(Address),
     #[error("Invalid new validator state in epoch {0}")]
@@ -73,9 +56,9 @@ where
          {bond_delta}, unbonds Δ {unbond_delta}"
     )]
     InvalidBalances {
-        balance_delta: TokenChange,
-        bond_delta: TokenChange,
-        unbond_delta: TokenChange,
+        balance_delta: token::Change,
+        bond_delta: token::Change,
+        unbond_delta: token::Change,
     },
     #[error(
         "Data must be set or updated in the correct epoch. Got epoch {got}, \
@@ -83,35 +66,23 @@ where
     )]
     EpochedDataWrongEpoch { got: u64, expected: Vec<u64> },
     #[error("Empty bond {0} must be deleted")]
-    EmptyBond(BondId<Address>),
+    EmptyBond(BondId),
     #[error(
         "Bond ID {id} must start at the correct epoch. Got epoch {got}, \
          expected {expected}"
     )]
-    InvalidBondStartEpoch {
-        id: BondId<Address>,
-        got: u64,
-        expected: u64,
-    },
+    InvalidBondStartEpoch { id: BondId, got: u64, expected: u64 },
     #[error(
         "Bond ID {id} must be added at the correct epoch. Got epoch {got}, \
          expected {expected}"
     )]
-    InvalidNewBondEpoch {
-        id: BondId<Address>,
-        got: u64,
-        expected: u64,
-    },
+    InvalidNewBondEpoch { id: BondId, got: u64, expected: u64 },
 
     #[error(
         "Bond ID {id} must be subtracted at the correct epoch. Got epoch \
          {got}, expected {expected}"
     )]
-    InvalidNegDeltaEpoch {
-        id: BondId<Address>,
-        got: u64,
-        expected: u64,
-    },
+    InvalidNegDeltaEpoch { id: BondId, got: u64, expected: u64 },
 
     #[error(
         "Invalid validator {address} sum of total deltas. Total Δ \
@@ -119,15 +90,15 @@ where
     )]
     InvalidValidatorTotalDeltasSum {
         address: Address,
-        total_delta: TokenChange,
-        bond_delta: TokenChange,
+        total_delta: token::Change,
+        bond_delta: token::Change,
     },
     #[error("Unexpectedly missing validator set value")]
     MissingValidatorSet,
     #[error("Validator {0} not found in the validator set in epoch {1}")]
-    WeightedValidatorNotFound(WeightedValidator<Address>, u64),
+    WeightedValidatorNotFound(WeightedValidator, u64),
     #[error("Duplicate validator {0} in the validator set in epoch {1}")]
-    ValidatorSetDuplicate(WeightedValidator<Address>, u64),
+    ValidatorSetDuplicate(WeightedValidator, u64),
     #[error("Validator {0} has an invalid total deltas value {1}")]
     InvalidValidatorTotalDeltas(Address, i128),
     #[error("There are too many active validators in the validator set")]
@@ -136,14 +107,11 @@ where
         "An inactive validator {0} has voting power greater than an active \
          validator {1}"
     )]
-    ValidatorSetOutOfOrder(
-        WeightedValidator<Address>,
-        WeightedValidator<Address>,
-    ),
+    ValidatorSetOutOfOrder(WeightedValidator, WeightedValidator),
     #[error("Invalid active validator {0}")]
-    InvalidActiveValidator(WeightedValidator<Address>),
+    InvalidActiveValidator(WeightedValidator),
     #[error("Invalid inactive validator {0}")]
-    InvalidInactiveValidator(WeightedValidator<Address>),
+    InvalidInactiveValidator(WeightedValidator),
     #[error("Unexpectedly missing voting power value for validator {0}")]
     MissingValidatorVotingPower(Address),
     #[error("Validator {0} has an invalid voting power value {1}")]
@@ -161,7 +129,7 @@ where
     #[error("Invalid address raw hash update")]
     InvalidRawHashUpdate,
     #[error("Invalid new validator {0}, some fields are missing: {1:?}.")]
-    InvalidNewValidator(Address, NewValidator<PublicKey>),
+    InvalidNewValidator(Address, NewValidator),
     #[error("New validator {0} has not been added to the validator set.")]
     NewValidatorMissingInValidatorSet(Address),
     #[error("Validator set has not been updated for new validators.")]
@@ -180,62 +148,24 @@ where
 
 /// An update of PoS data.
 #[derive(Clone, Debug)]
-pub enum DataUpdate<Address, TokenAmount, TokenChange, PublicKey>
-where
-    Address: Display
-        + Debug
-        + Clone
-        + PartialEq
-        + Eq
-        + PartialOrd
-        + Ord
-        + Hash
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-    TokenAmount: Clone
-        + Debug
-        + Default
-        + Eq
-        + Sub
-        + Add<Output = TokenAmount>
-        + AddAssign
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-    TokenChange: Display
-        + Debug
-        + Default
-        + Clone
-        + Copy
-        + Add<Output = TokenChange>
-        + Sub<Output = TokenChange>
-        + From<TokenAmount>
-        + Into<i128>
-        + PartialEq
-        + Eq
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-    PublicKey: Debug + Clone + BorshDeserialize + BorshSerialize + BorshSchema,
-{
+pub enum DataUpdate {
     /// PoS account's balance update
-    Balance(Data<TokenAmount>),
+    Balance(Data<token::Amount>),
     /// Bond update
     Bond {
         /// Bond ID
-        id: BondId<Address>,
+        id: BondId,
         /// Bond prior and posterior state
-        data: Data<Bonds<TokenAmount>>,
+        data: Data<Bonds>,
         /// List of slashes applied to the bond's validator
         slashes: Slashes,
     },
     /// Unbond update
     Unbond {
         /// Unbond ID
-        id: BondId<Address>,
+        id: BondId,
         /// Unbond prior and posterior state
-        data: Data<Unbonds<TokenAmount>>,
+        data: Data<Unbonds>,
         /// List of slashes applied to the bond's validator
         slashes: Slashes,
     },
@@ -244,12 +174,12 @@ where
         /// Validator's address
         address: Address,
         /// Validator's data update
-        update: ValidatorUpdate<TokenChange, PublicKey>,
+        update: ValidatorUpdate,
     },
     /// Validator set update
-    ValidatorSet(Data<ValidatorSets<Address>>),
+    ValidatorSet(Data<ValidatorSets>),
     /// Total deltas update
-    TotalDeltas(Data<TotalDeltas<TokenChange>>),
+    TotalDeltas(Data<TotalDeltas>),
     /// Validator's address raw hash
     ValidatorAddressRawHash {
         /// Raw hash value
@@ -261,28 +191,13 @@ where
 
 /// An update of a validator's data.
 #[derive(Clone, Debug)]
-pub enum ValidatorUpdate<TokenChange, PublicKey>
-where
-    TokenChange: Display
-        + Debug
-        + Default
-        + Clone
-        + Copy
-        + Add<Output = TokenChange>
-        + Sub<Output = TokenChange>
-        + PartialEq
-        + Eq
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-    PublicKey: Debug + Clone + BorshDeserialize + BorshSerialize + BorshSchema,
-{
+pub enum ValidatorUpdate {
     /// Validator's state update
     State(Data<ValidatorStates>),
     /// Consensus key update
-    ConsensusKey(Data<ValidatorConsensusKeys<PublicKey>>),
+    ConsensusKey(Data<ValidatorConsensusKeys>),
     /// Validator deltas update
-    ValidatorDeltas(Data<ValidatorDeltas<TokenChange>>),
+    ValidatorDeltas(Data<ValidatorDeltas>),
     /// Commission rate update
     CommissionRate(Data<CommissionRates>, Option<Decimal>),
     /// Maximum commission rate change update
@@ -303,12 +218,10 @@ where
 
 /// A new validator account initialized in a transaction, which is used to check
 /// that all the validator's required fields have been written.
-#[derive(Clone, Debug, Derivative)]
-// https://mcarton.github.io/rust-derivative/latest/Default.html#custom-bound
-#[derivative(Default(bound = ""))]
-pub struct NewValidator<PublicKey> {
+#[derive(Clone, Debug, Default)]
+pub struct NewValidator {
     has_state: bool,
-    has_consensus_key: Option<PublicKey>,
+    has_consensus_key: Option<common::PublicKey>,
     has_total_deltas: bool,
     has_address_raw_hash: Option<String>,
     bonded_stake: u64,
@@ -329,67 +242,11 @@ struct Constants {
 /// Validate the given list of PoS data `changes`. Returns empty list, if all
 /// the changes are valid.
 #[must_use]
-pub fn validate<Address, TokenAmount, TokenChange, PublicKey>(
+pub fn validate(
     params: &PosParams,
-    changes: Vec<DataUpdate<Address, TokenAmount, TokenChange, PublicKey>>,
-    current_epoch: impl Into<Epoch>,
-) -> Vec<Error<Address, TokenChange, PublicKey>>
-where
-    Address: Display
-        + Debug
-        + Clone
-        + PartialEq
-        + Eq
-        + PartialOrd
-        + Ord
-        + Hash
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-    TokenAmount: Display
-        + Clone
-        + Copy
-        + Debug
-        + Default
-        + Eq
-        + Add<Output = TokenAmount>
-        + Sub<Output = TokenAmount>
-        + AddAssign
-        + SubAssign
-        + Into<u64>
-        + From<u64>
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-    TokenChange: Display
-        + Debug
-        + Default
-        + Clone
-        + Copy
-        + Add<Output = TokenChange>
-        + Sub<Output = TokenChange>
-        + Neg<Output = TokenChange>
-        + SubAssign
-        + AddAssign
-        + From<TokenAmount>
-        + Into<i128>
-        + From<i128>
-        + PartialEq
-        + Eq
-        + PartialOrd
-        + Ord
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-    PublicKey: Debug
-        + Clone
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema
-        + PartialEq
-        + PublicKeyTmRawHash,
-{
-    let current_epoch: Epoch = current_epoch.into();
+    changes: Vec<DataUpdate>,
+    current_epoch: Epoch,
+) -> Vec<Error> {
     let pipeline_offset = DynEpochOffset::PipelineLen.value(params);
     let unbonding_offset = DynEpochOffset::UnbondingLen.value(params);
     let pipeline_epoch = current_epoch + pipeline_offset;
@@ -405,19 +262,17 @@ where
     let mut errors = vec![];
 
     let Accumulator {
-            balance_delta,
-            bond_delta,
-            unbond_delta,
-            total_deltas,
-            total_stake_by_epoch,
-            validator_set_pre,
-            validator_set_post,
-            total_deltas_by_epoch: _,
-            bonded_stake_by_epoch,
-            new_validators,
-        } = Validate::<Address, TokenAmount, TokenChange, PublicKey>::accumulate_changes(
-            changes, params, &constants, &mut errors
-        );
+        balance_delta,
+        bond_delta,
+        unbond_delta,
+        total_deltas,
+        total_stake_by_epoch,
+        validator_set_pre,
+        validator_set_post,
+        total_deltas_by_epoch: _,
+        bonded_stake_by_epoch,
+        new_validators,
+    } = Validate::accumulate_changes(changes, params, &constants, &mut errors);
 
     // Check total deltas against bonds
     for (validator, total_delta) in total_deltas.iter() {
@@ -441,7 +296,7 @@ where
     // Negative unbond delta is from withdrawing, which removes tokens from
     // unbond, but doesn't affect total deltas.
     for (validator, delta) in &unbond_delta {
-        if *delta > TokenChange::default()
+        if *delta > token::Change::default()
             && !total_deltas.contains_key(validator)
         {
             errors.push(Error::MissingValidatorDeltas(validator.clone()));
@@ -734,12 +589,12 @@ where
     let bond_delta = bond_delta
         .values()
         .into_iter()
-        .fold(TokenChange::default(), |acc, delta| acc + (*delta));
+        .fold(token::Change::default(), |acc, delta| acc + (*delta));
     // Sum the unbond totals
     let unbond_delta = unbond_delta
         .values()
         .into_iter()
-        .fold(TokenChange::default(), |acc, delta| acc + (*delta));
+        .fold(token::Change::default(), |acc, delta| acc + (*delta));
 
     if balance_delta != bond_delta + unbond_delta {
         errors.push(Error::InvalidBalances {
@@ -752,216 +607,44 @@ where
     errors
 }
 
-#[derive(Clone, Debug)]
-struct Accumulator<Address, TokenAmount, TokenChange, PublicKey>
-where
-    Address: Display
-        + Debug
-        + Clone
-        + PartialEq
-        + Eq
-        + PartialOrd
-        + Ord
-        + Hash
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-    TokenAmount: Display
-        + Clone
-        + Copy
-        + Debug
-        + Default
-        + Eq
-        + Add<Output = TokenAmount>
-        + Sub<Output = TokenAmount>
-        + AddAssign
-        + SubAssign
-        + Into<u64>
-        + From<u64>
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-    TokenChange: Display
-        + Debug
-        + Default
-        + Clone
-        + Copy
-        + Add<Output = TokenChange>
-        + Sub<Output = TokenChange>
-        + Neg<Output = TokenChange>
-        + SubAssign
-        + AddAssign
-        + From<TokenAmount>
-        + Into<i128>
-        + From<i128>
-        + PartialEq
-        + Eq
-        + PartialOrd
-        + Ord
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-    PublicKey: Debug,
-{
-    balance_delta: TokenChange,
+#[derive(Clone, Debug, Default)]
+struct Accumulator {
+    balance_delta: token::Change,
     /// Changes of validators' bonds
-    bond_delta: HashMap<Address, TokenChange>,
+    bond_delta: HashMap<Address, token::Change>,
     /// Changes of validators' unbonds
-    unbond_delta: HashMap<Address, TokenChange>,
+    unbond_delta: HashMap<Address, token::Change>,
 
     /// Changes of all validator total deltas (up to `unbonding_epoch`)
-    total_deltas: HashMap<Address, TokenChange>,
+    total_deltas: HashMap<Address, token::Change>,
     /// Stake calculated from validator total deltas for each epoch
     /// in which it has changed (the tuple of values are in pre and post state)
     total_stake_by_epoch:
-        HashMap<Epoch, HashMap<Address, (TokenAmount, TokenAmount)>>,
+        HashMap<Epoch, HashMap<Address, (token::Amount, token::Amount)>>,
     /// Total voting power delta calculated from validators' total deltas
-    total_deltas_by_epoch: HashMap<Epoch, TokenChange>,
-    bonded_stake_by_epoch: HashMap<Epoch, HashMap<Address, TokenChange>>,
-    validator_set_pre: Option<ValidatorSets<Address>>,
-    validator_set_post: Option<ValidatorSets<Address>>,
-    new_validators: HashMap<Address, NewValidator<PublicKey>>,
-}
-
-/// Accumulator of storage changes
-impl<Address, TokenAmount, TokenChange, PublicKey> Default
-    for Accumulator<Address, TokenAmount, TokenChange, PublicKey>
-where
-    Address: Display
-        + Debug
-        + Clone
-        + PartialEq
-        + Eq
-        + PartialOrd
-        + Ord
-        + Hash
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-    TokenAmount: Display
-        + Clone
-        + Copy
-        + Debug
-        + Default
-        + Eq
-        + Add<Output = TokenAmount>
-        + Sub<Output = TokenAmount>
-        + AddAssign
-        + SubAssign
-        + Into<u64>
-        + From<u64>
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-    TokenChange: Display
-        + Debug
-        + Default
-        + Clone
-        + Copy
-        + Add<Output = TokenChange>
-        + Sub<Output = TokenChange>
-        + Neg<Output = TokenChange>
-        + SubAssign
-        + AddAssign
-        + From<TokenAmount>
-        + Into<i128>
-        + From<i128>
-        + PartialEq
-        + Eq
-        + PartialOrd
-        + Ord
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-    PublicKey: Debug,
-{
-    fn default() -> Self {
-        Self {
-            balance_delta: Default::default(),
-            bond_delta: Default::default(),
-            unbond_delta: Default::default(),
-            total_deltas: Default::default(),
-            total_stake_by_epoch: Default::default(),
-            total_deltas_by_epoch: Default::default(),
-            bonded_stake_by_epoch: Default::default(),
-            validator_set_pre: Default::default(),
-            validator_set_post: Default::default(),
-            new_validators: Default::default(),
-        }
-    }
+    total_deltas_by_epoch: HashMap<Epoch, token::Change>,
+    bonded_stake_by_epoch: HashMap<Epoch, HashMap<Address, token::Change>>,
+    validator_set_pre: Option<ValidatorSets>,
+    validator_set_post: Option<ValidatorSets>,
+    new_validators: HashMap<Address, NewValidator>,
 }
 
 /// An empty local type to re-use trait bounds for the functions associated with
 /// `Validate` in the `impl` below
-struct Validate<Address, TokenAmount, TokenChange, PublicKey> {
+struct Validate {
     address: PhantomData<Address>,
-    token_amount: PhantomData<TokenAmount>,
-    token_change: PhantomData<TokenChange>,
-    public_key: PhantomData<PublicKey>,
+    token_amount: PhantomData<token::Amount>,
+    token_change: PhantomData<token::Change>,
+    public_key: PhantomData<common::PublicKey>,
 }
 
-impl<Address, TokenAmount, TokenChange, PublicKey>
-    Validate<Address, TokenAmount, TokenChange, PublicKey>
-where
-    Address: Display
-        + Debug
-        + Clone
-        + PartialEq
-        + Eq
-        + PartialOrd
-        + Ord
-        + Hash
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-    TokenAmount: Display
-        + Clone
-        + Copy
-        + Debug
-        + Default
-        + Eq
-        + Add<Output = TokenAmount>
-        + Sub<Output = TokenAmount>
-        + AddAssign
-        + SubAssign
-        + Into<u64>
-        + From<u64>
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-    TokenChange: Display
-        + Debug
-        + Default
-        + Clone
-        + Copy
-        + Add<Output = TokenChange>
-        + Sub<Output = TokenChange>
-        + Neg<Output = TokenChange>
-        + SubAssign
-        + AddAssign
-        + From<TokenAmount>
-        + Into<i128>
-        + From<i128>
-        + PartialEq
-        + Eq
-        + PartialOrd
-        + Ord
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema,
-    PublicKey: Debug
-        + Clone
-        + BorshDeserialize
-        + BorshSerialize
-        + BorshSchema
-        + PartialEq,
-{
+impl Validate {
     fn accumulate_changes(
-        changes: Vec<DataUpdate<Address, TokenAmount, TokenChange, PublicKey>>,
+        changes: Vec<DataUpdate>,
         _params: &PosParams,
         constants: &Constants,
-        errors: &mut Vec<Error<Address, TokenChange, PublicKey>>,
-    ) -> Accumulator<Address, TokenAmount, TokenChange, PublicKey> {
+        errors: &mut Vec<Error>,
+    ) -> Accumulator {
         use DataUpdate::*;
         use ValidatorUpdate::*;
 
@@ -1065,8 +748,8 @@ where
 
     fn validator_state(
         constants: &Constants,
-        errors: &mut Vec<Error<Address, TokenChange, PublicKey>>,
-        new_validators: &mut HashMap<Address, NewValidator<PublicKey>>,
+        errors: &mut Vec<Error>,
+        new_validators: &mut HashMap<Address, NewValidator>,
         address: Address,
         data: Data<ValidatorStates>,
     ) {
@@ -1140,10 +823,10 @@ where
 
     fn validator_consensus_key(
         constants: &Constants,
-        errors: &mut Vec<Error<Address, TokenChange, PublicKey>>,
-        new_validators: &mut HashMap<Address, NewValidator<PublicKey>>,
+        errors: &mut Vec<Error>,
+        new_validators: &mut HashMap<Address, NewValidator>,
         address: Address,
-        data: Data<ValidatorConsensusKeys<PublicKey>>,
+        data: Data<ValidatorConsensusKeys>,
     ) {
         match (data.pre, data.post) {
             (None, Some(post)) => {
@@ -1195,15 +878,15 @@ where
 
     fn validator_deltas(
         constants: &Constants,
-        errors: &mut Vec<Error<Address, TokenChange, PublicKey>>,
-        total_deltas: &mut HashMap<Address, TokenChange>,
+        errors: &mut Vec<Error>,
+        total_deltas: &mut HashMap<Address, token::Change>,
         total_stake_by_epoch: &mut HashMap<
             Epoch,
-            HashMap<Address, (TokenAmount, TokenAmount)>,
+            HashMap<Address, (token::Amount, token::Amount)>,
         >,
-        new_validators: &mut HashMap<Address, NewValidator<PublicKey>>,
+        new_validators: &mut HashMap<Address, NewValidator>,
         address: Address,
-        data: Data<ValidatorDeltas<TokenChange>>,
+        data: Data<ValidatorDeltas>,
     ) {
         match (data.pre, data.post) {
             (Some(pre), Some(post)) => {
@@ -1211,11 +894,11 @@ where
                     errors.push(Error::InvalidLastUpdate)
                 }
                 // Changes of all total deltas (up to `unbonding_epoch`)
-                let mut deltas = TokenChange::default();
+                let mut deltas = token::Change::default();
                 // Sum of pre total deltas
-                let mut pre_deltas_sum = TokenChange::default();
+                let mut pre_deltas_sum = token::Change::default();
                 // Sum of post total deltas
-                let mut post_deltas_sum = TokenChange::default();
+                let mut post_deltas_sum = token::Change::default();
                 // Iter from the first epoch to the last epoch of `post`
                 for epoch in Epoch::iter_range(
                     constants.current_epoch,
@@ -1223,7 +906,7 @@ where
                 ) {
                     // Changes of all total deltas (up to
                     // `unbonding_epoch`)
-                    let mut delta = TokenChange::default();
+                    let mut delta = token::Change::default();
                     // Find the delta in `pre`
                     if let Some(change) = {
                         if epoch == constants.current_epoch {
@@ -1250,8 +933,9 @@ where
                             u64::try_from(stake_post),
                         ) {
                             (Ok(stake_pre), Ok(stake_post)) => {
-                                let stake_pre = TokenAmount::from(stake_pre);
-                                let stake_post = TokenAmount::from(stake_post);
+                                let stake_pre = token::Amount::from(stake_pre);
+                                let stake_post =
+                                    token::Amount::from(stake_post);
                                 total_stake_by_epoch
                                     .entry(epoch)
                                     .or_insert_with(HashMap::default)
@@ -1272,7 +956,7 @@ where
                     // A total delta can only be increased at
                     // `pipeline_offset` from bonds and decreased at
                     // `unbonding_offset` from unbonding
-                    if delta > TokenChange::default()
+                    if delta > token::Change::default()
                         && epoch != constants.pipeline_epoch
                     {
                         errors.push(Error::EpochedDataWrongEpoch {
@@ -1280,7 +964,7 @@ where
                             expected: vec![constants.pipeline_epoch.into()],
                         })
                     }
-                    if delta < TokenChange::default()
+                    if delta < token::Change::default()
                         && epoch != constants.unbonding_epoch
                     {
                         errors.push(Error::EpochedDataWrongEpoch {
@@ -1289,12 +973,12 @@ where
                         })
                     }
                 }
-                if post_deltas_sum < TokenChange::default() {
+                if post_deltas_sum < token::Change::default() {
                     errors.push(Error::NegativeValidatorDeltasSum(
                         address.clone(),
                     ))
                 }
-                if deltas != TokenChange::default() {
+                if deltas != token::Change::default() {
                     let deltas_entry = total_deltas.entry(address).or_default();
                     *deltas_entry += deltas;
                 }
@@ -1304,7 +988,7 @@ where
                     errors.push(Error::InvalidLastUpdate)
                 }
                 // Changes of all total deltas (up to `unbonding_epoch`)
-                let mut deltas = TokenChange::default();
+                let mut deltas = token::Change::default();
                 for epoch in Epoch::iter_range(
                     constants.current_epoch,
                     constants.unbonding_offset + 1,
@@ -1325,7 +1009,7 @@ where
                         let stake: i128 = Into::into(deltas);
                         match u64::try_from(stake) {
                             Ok(stake) => {
-                                let stake = TokenAmount::from(stake);
+                                let stake = token::Amount::from(stake);
                                 total_stake_by_epoch
                                     .entry(epoch)
                                     .or_insert_with(HashMap::default)
@@ -1340,12 +1024,12 @@ where
                         }
                     }
                 }
-                if deltas < TokenChange::default() {
+                if deltas < token::Change::default() {
                     errors.push(Error::NegativeValidatorDeltasSum(
                         address.clone(),
                     ))
                 }
-                if deltas != TokenChange::default() {
+                if deltas != token::Change::default() {
                     let deltas_entry =
                         total_deltas.entry(address.clone()).or_default();
                     *deltas_entry += deltas;
@@ -1365,8 +1049,8 @@ where
 
     fn validator_commission_rate(
         constants: &Constants,
-        errors: &mut Vec<Error<Address, TokenChange, PublicKey>>,
-        new_validators: &mut HashMap<Address, NewValidator<PublicKey>>,
+        errors: &mut Vec<Error>,
+        new_validators: &mut HashMap<Address, NewValidator>,
         address: Address,
         data: Data<CommissionRates>,
         max_change: Option<Decimal>,
@@ -1444,8 +1128,8 @@ where
     }
 
     fn validator_max_commission_rate_change(
-        errors: &mut Vec<Error<Address, TokenChange, PublicKey>>,
-        new_validators: &mut HashMap<Address, NewValidator<PublicKey>>,
+        errors: &mut Vec<Error>,
+        new_validators: &mut HashMap<Address, NewValidator>,
         address: Address,
         data: Data<Decimal>,
     ) {
@@ -1468,15 +1152,15 @@ where
     }
 
     fn balance(
-        errors: &mut Vec<Error<Address, TokenChange, PublicKey>>,
-        balance_delta: &mut TokenChange,
-        data: Data<TokenAmount>,
+        errors: &mut Vec<Error>,
+        balance_delta: &mut token::Change,
+        data: Data<token::Amount>,
     ) {
         match (data.pre, data.post) {
-            (None, Some(post)) => *balance_delta += TokenChange::from(post),
+            (None, Some(post)) => *balance_delta += token::Change::from(post),
             (Some(pre), Some(post)) => {
                 *balance_delta +=
-                    TokenChange::from(post) - TokenChange::from(pre);
+                    token::Change::from(post) - token::Change::from(pre);
             }
             (Some(_), None) => errors.push(Error::MissingBalance),
             (None, None) => {}
@@ -1485,10 +1169,10 @@ where
 
     fn bond(
         constants: &Constants,
-        errors: &mut Vec<Error<Address, TokenChange, PublicKey>>,
-        bond_delta: &mut HashMap<Address, TokenChange>,
-        id: BondId<Address>,
-        data: Data<Bonds<TokenAmount>>,
+        errors: &mut Vec<Error>,
+        bond_delta: &mut HashMap<Address, token::Change>,
+        id: BondId,
+        data: Data<Bonds>,
         slashes: Vec<Slash>,
     ) {
         match (data.pre, data.post) {
@@ -1511,13 +1195,13 @@ where
                 };
 
                 // Pre-bonds keyed by their `start_epoch`
-                let mut pre_bonds: HashMap<Epoch, TokenChange> =
+                let mut pre_bonds: HashMap<Epoch, token::Change> =
                     HashMap::default();
                 // We have to slash only the difference between post and
                 // pre, not both pre and post to avoid rounding errors
-                let mut slashed_deltas: HashMap<Epoch, TokenChange> =
+                let mut slashed_deltas: HashMap<Epoch, token::Change> =
                     HashMap::default();
-                let mut neg_deltas: HashMap<Epoch, TokenChange> =
+                let mut neg_deltas: HashMap<Epoch, token::Change> =
                     Default::default();
                 // Iter from the first epoch of `pre` to the last epoch of
                 // `post`
@@ -1527,7 +1211,7 @@ where
                 ) {
                     if let Some(bond) = pre.get_delta_at_epoch(epoch) {
                         for (start_epoch, delta) in bond.pos_deltas.iter() {
-                            let delta = TokenChange::from(*delta);
+                            let delta = token::Change::from(*delta);
                             slashed_deltas.insert(*start_epoch, -delta);
                             pre_bonds.insert(*start_epoch, delta);
                         }
@@ -1537,12 +1221,12 @@ where
                             epoch
                         };
                         let entry = neg_deltas.entry(ins_epoch).or_default();
-                        *entry -= TokenChange::from(bond.neg_deltas);
+                        *entry -= token::Change::from(bond.neg_deltas);
                     }
                     if let Some(bond) = post.get_delta_at_epoch(epoch) {
                         for (start_epoch, delta) in bond.pos_deltas.iter() {
                             // An empty bond must be deleted
-                            if *delta == TokenAmount::default() {
+                            if *delta == token::Amount::default() {
                                 errors.push(Error::EmptyBond(id.clone()))
                             }
                             // On the current epoch, all bond's
@@ -1561,10 +1245,10 @@ where
                                     expected: epoch.into(),
                                 })
                             }
-                            let delta = TokenChange::from(*delta);
+                            let delta = token::Change::from(*delta);
                             match slashed_deltas.get_mut(start_epoch) {
                                 Some(pre_delta) => {
-                                    if *pre_delta + delta == 0_i128.into() {
+                                    if *pre_delta + delta == 0_i128 {
                                         slashed_deltas.remove(start_epoch);
                                     } else {
                                         *pre_delta += delta;
@@ -1617,7 +1301,7 @@ where
                             match neg_deltas.get(&epoch) {
                                 Some(deltas) => {
                                     if -*deltas
-                                        != TokenChange::from(bond.neg_deltas)
+                                        != token::Change::from(bond.neg_deltas)
                                     {
                                         errors.push(
                                             Error::InvalidNegDeltaEpoch {
@@ -1646,31 +1330,30 @@ where
                             }
                         }
                         let entry = neg_deltas.entry(epoch).or_default();
-                        *entry += TokenChange::from(bond.neg_deltas);
+                        *entry += token::Change::from(bond.neg_deltas);
                     }
                 }
                 // Check slashes
                 for (start_epoch, delta) in slashed_deltas.iter_mut() {
                     for slash in &slashes {
                         if slash.epoch >= *start_epoch {
-                            let raw_delta: i128 = (*delta).into();
-                            let current_slashed = TokenChange::from(
-                                decimal_mult_i128(slash.rate, raw_delta),
-                            );
+                            let raw_delta: i128 = *delta;
+                            let current_slashed =
+                                decimal_mult_i128(slash.rate, raw_delta);
                             *delta -= current_slashed;
                         }
                     }
                 }
                 let total = slashed_deltas
                     .values()
-                    .fold(TokenChange::default(), |acc, delta| acc + *delta)
+                    .fold(token::Change::default(), |acc, delta| acc + *delta)
                     - neg_deltas
                         .values()
-                        .fold(TokenChange::default(), |acc, delta| {
+                        .fold(token::Change::default(), |acc, delta| {
                             acc + *delta
                         });
 
-                if total != TokenChange::default() {
+                if total != token::Change::default() {
                     let bond_entry =
                         bond_delta.entry(id.validator).or_default();
                     *bond_entry += total;
@@ -1681,7 +1364,7 @@ where
                 if post.last_update() != constants.current_epoch {
                     errors.push(Error::InvalidLastUpdate)
                 }
-                let mut total_delta = TokenChange::default();
+                let mut total_delta = token::Change::default();
                 for epoch in Epoch::iter_range(
                     constants.current_epoch,
                     constants.unbonding_offset + 1,
@@ -1720,20 +1403,20 @@ where
                             for slash in &slashes {
                                 if slash.epoch >= *start_epoch {
                                     let raw_delta: u64 = delta.into();
-                                    let current_slashed = TokenAmount::from(
+                                    let current_slashed = token::Amount::from(
                                         decimal_mult_u64(slash.rate, raw_delta),
                                     );
                                     delta -= current_slashed;
                                 }
                             }
-                            let delta = TokenChange::from(delta);
+                            let delta = token::Change::from(delta);
                             total_delta += delta
                         }
-                        total_delta -= TokenChange::from(bond.neg_deltas)
+                        total_delta -= token::Change::from(bond.neg_deltas)
                     }
                 }
                 // An empty bond must be deleted
-                if total_delta == TokenChange::default() {
+                if total_delta == token::Change::default() {
                     errors.push(Error::EmptyBond(id.clone()))
                 }
                 let bond_entry = bond_delta.entry(id.validator).or_default();
@@ -1741,7 +1424,7 @@ where
             }
             // Bond may be deleted when all the tokens are unbonded
             (Some(pre), None) => {
-                let mut total_delta = TokenChange::default();
+                let mut total_delta = token::Change::default();
                 for index in 0..constants.pipeline_offset + 1 {
                     let index = index as usize;
                     let epoch = pre.last_update() + index;
@@ -1752,16 +1435,16 @@ where
                             for slash in &slashes {
                                 if slash.epoch >= *start_epoch {
                                     let raw_delta: u64 = delta.into();
-                                    let current_slashed = TokenAmount::from(
+                                    let current_slashed = token::Amount::from(
                                         decimal_mult_u64(slash.rate, raw_delta),
                                     );
                                     delta -= current_slashed;
                                 }
                             }
-                            let delta = TokenChange::from(delta);
+                            let delta = token::Change::from(delta);
                             total_delta -= delta
                         }
-                        total_delta += TokenChange::from(bond.neg_deltas)
+                        total_delta += token::Change::from(bond.neg_deltas)
                     }
                 }
                 let bond_entry = bond_delta.entry(id.validator).or_default();
@@ -1773,10 +1456,10 @@ where
 
     fn unbond(
         constants: &Constants,
-        errors: &mut Vec<Error<Address, TokenChange, PublicKey>>,
-        unbond_delta: &mut HashMap<Address, TokenChange>,
-        id: BondId<Address>,
-        data: Data<Unbonds<TokenAmount>>,
+        errors: &mut Vec<Error>,
+        unbond_delta: &mut HashMap<Address, token::Change>,
+        id: BondId,
+        data: Data<Unbonds>,
         slashes: Vec<Slash>,
     ) {
         match (data.pre, data.post) {
@@ -1800,7 +1483,7 @@ where
 
                 // We have to slash only the difference between post and
                 // pre, not both pre and post to avoid rounding errors
-                let mut slashed_deltas: HashMap<(Epoch, Epoch), TokenChange> =
+                let mut slashed_deltas: HashMap<(Epoch, Epoch), token::Change> =
                     HashMap::default();
                 // Iter from the first epoch of `pre` to the last epoch of
                 // `post`
@@ -1812,7 +1495,7 @@ where
                         for ((start_epoch, end_epoch), delta) in
                             unbond.deltas.iter()
                         {
-                            let delta = TokenChange::from(*delta);
+                            let delta = token::Change::from(*delta);
                             slashed_deltas
                                 .insert((*start_epoch, *end_epoch), -delta);
                         }
@@ -1821,11 +1504,11 @@ where
                         for ((start_epoch, end_epoch), delta) in
                             unbond.deltas.iter()
                         {
-                            let delta = TokenChange::from(*delta);
+                            let delta = token::Change::from(*delta);
                             let key = (*start_epoch, *end_epoch);
                             match slashed_deltas.get_mut(&key) {
                                 Some(pre_delta) => {
-                                    if *pre_delta + delta == 0_i128.into() {
+                                    if *pre_delta + delta == 0_i128 {
                                         slashed_deltas.remove(&key);
                                     } else {
                                         *pre_delta += delta;
@@ -1846,18 +1529,17 @@ where
                         if slash.epoch >= *start_epoch
                             && slash.epoch <= *end_epoch
                         {
-                            let raw_delta: i128 = (*delta).into();
-                            let current_slashed = TokenChange::from(
-                                decimal_mult_i128(slash.rate, raw_delta),
-                            );
+                            let raw_delta: i128 = *delta;
+                            let current_slashed =
+                                decimal_mult_i128(slash.rate, raw_delta);
                             *delta -= current_slashed;
                         }
                     }
                 }
                 let total = slashed_deltas
                     .values()
-                    .fold(TokenChange::default(), |acc, delta| acc + *delta);
-                if total != TokenChange::default() {
+                    .fold(token::Change::default(), |acc, delta| acc + *delta);
+                if total != token::Change::default() {
                     let unbond_entry =
                         unbond_delta.entry(id.validator).or_default();
                     *unbond_entry += total;
@@ -1868,7 +1550,7 @@ where
                 if post.last_update() != constants.current_epoch {
                     errors.push(Error::InvalidLastUpdate)
                 }
-                let mut total_delta = TokenChange::default();
+                let mut total_delta = token::Change::default();
                 for epoch in Epoch::iter_range(
                     post.last_update(),
                     constants.unbonding_offset + 1,
@@ -1884,13 +1566,13 @@ where
                                     && slash.epoch <= *end_epoch
                                 {
                                     let raw_delta: u64 = delta.into();
-                                    let current_slashed = TokenAmount::from(
+                                    let current_slashed = token::Amount::from(
                                         decimal_mult_u64(slash.rate, raw_delta),
                                     );
                                     delta -= current_slashed;
                                 }
                             }
-                            let delta = TokenChange::from(delta);
+                            let delta = token::Change::from(delta);
                             total_delta += delta;
                         }
                     }
@@ -1901,7 +1583,7 @@ where
             }
             // Unbond may be deleted when all the tokens are withdrawn
             (Some(pre), None) => {
-                let mut total_delta = TokenChange::default();
+                let mut total_delta = token::Change::default();
                 for epoch in Epoch::iter_range(
                     pre.last_update(),
                     constants.unbonding_offset + 1,
@@ -1917,13 +1599,13 @@ where
                                     && slash.epoch <= *end_epoch
                                 {
                                     let raw_delta: u64 = delta.into();
-                                    let current_slashed = TokenAmount::from(
+                                    let current_slashed = token::Amount::from(
                                         decimal_mult_u64(slash.rate, raw_delta),
                                     );
                                     delta -= current_slashed;
                                 }
                             }
-                            let delta = TokenChange::from(delta);
+                            let delta = token::Change::from(delta);
                             total_delta -= delta;
                         }
                     }
@@ -1938,10 +1620,10 @@ where
 
     fn validator_set(
         constants: &Constants,
-        errors: &mut Vec<Error<Address, TokenChange, PublicKey>>,
-        validator_set_pre: &mut Option<ValidatorSets<Address>>,
-        validator_set_post: &mut Option<ValidatorSets<Address>>,
-        data: Data<ValidatorSets<Address>>,
+        errors: &mut Vec<Error>,
+        validator_set_pre: &mut Option<ValidatorSets>,
+        validator_set_post: &mut Option<ValidatorSets>,
+        data: Data<ValidatorSets>,
     ) {
         match (data.pre, data.post) {
             (Some(pre), Some(post)) => {
@@ -1957,9 +1639,9 @@ where
 
     fn total_deltas(
         constants: &Constants,
-        errors: &mut Vec<Error<Address, TokenChange, PublicKey>>,
-        total_delta_by_epoch: &mut HashMap<Epoch, TokenChange>,
-        data: Data<TotalDeltas<TokenChange>>,
+        errors: &mut Vec<Error>,
+        total_delta_by_epoch: &mut HashMap<Epoch, token::Change>,
+        data: Data<TotalDeltas>,
     ) {
         match (data.pre, data.post) {
             (Some(pre), Some(post)) => {
@@ -1998,8 +1680,8 @@ where
     }
 
     fn validator_address_raw_hash(
-        errors: &mut Vec<Error<Address, TokenChange, PublicKey>>,
-        new_validators: &mut HashMap<Address, NewValidator<PublicKey>>,
+        errors: &mut Vec<Error>,
+        new_validators: &mut HashMap<Address, NewValidator>,
         raw_hash: String,
         data: Data<Address>,
     ) {
diff --git a/shared/src/ledger/pos/mod.rs b/shared/src/ledger/pos/mod.rs
index db16c3eebf..2878f89fb2 100644
--- a/shared/src/ledger/pos/mod.rs
+++ b/shared/src/ledger/pos/mod.rs
@@ -5,9 +5,7 @@ pub mod vp;
 pub use namada_proof_of_stake;
 pub use namada_proof_of_stake::parameters::PosParams;
 pub use namada_proof_of_stake::storage::*;
-pub use namada_proof_of_stake::types::{
-    self, decimal_mult_u64, Slash, Slashes, ValidatorStates,
-};
+pub use namada_proof_of_stake::types;
 use namada_proof_of_stake::PosBase;
 use rust_decimal::Decimal;
 pub use vp::PosVP;
@@ -15,7 +13,6 @@ pub use vp::PosVP;
 use crate::ledger::storage::{self as ledger_storage, Storage, StorageHasher};
 use crate::types::address::{Address, InternalAddress};
 use crate::types::storage::Epoch;
-use crate::types::{key, token};
 
 /// Address of the PoS account implemented as a native VP
 pub const ADDRESS: Address = Address::Internal(InternalAddress::PoS);
@@ -48,38 +45,3 @@ pub fn init_genesis_storage<'a, DB, H>(
         .init_genesis(params, validators, current_epoch)
         .expect("Initialize PoS genesis storage")
 }
-
-/// Alias for a PoS type with the same name with concrete type parameters
-pub type ValidatorConsensusKeys =
-    namada_proof_of_stake::types::ValidatorConsensusKeys<
-        key::common::PublicKey,
-    >;
-
-/// Alias for a PoS type with the same name with concrete type parameters
-pub type ValidatorDeltas =
-    namada_proof_of_stake::types::ValidatorDeltas<token::Change>;
-
-/// Alias for a PoS type with the same name with concrete type parameters
-pub type Bonds = namada_proof_of_stake::types::Bonds<token::Amount>;
-
-/// Alias for a PoS type with the same name with concrete type parameters
-pub type Unbonds = namada_proof_of_stake::types::Unbonds<token::Amount>;
-
-/// Alias for a PoS type with the same name with concrete type parameters
-pub type ValidatorSets = namada_proof_of_stake::types::ValidatorSets<Address>;
-
-/// Alias for a PoS type with the same name with concrete type parameters
-pub type BondId = namada_proof_of_stake::types::BondId<Address>;
-
-/// Alias for a PoS type with the same name with concrete type parameters
-pub type GenesisValidator = namada_proof_of_stake::types::GenesisValidator<
-    Address,
-    token::Amount,
-    key::common::PublicKey,
->;
-
-/// Alias for a PoS type with the same name with concrete type parameters
-pub type CommissionRates = namada_proof_of_stake::types::CommissionRates;
-
-/// Alias for a PoS type with the same name with concrete type parameters
-pub type TotalDeltas = namada_proof_of_stake::types::TotalDeltas<token::Change>;
diff --git a/shared/src/ledger/pos/vp.rs b/shared/src/ledger/pos/vp.rs
index 0094ad9b5c..e1b13648d0 100644
--- a/shared/src/ledger/pos/vp.rs
+++ b/shared/src/ledger/pos/vp.rs
@@ -32,7 +32,7 @@ use crate::ledger::pos::{
     is_validator_max_commission_rate_change_key, is_validator_state_key,
 };
 use crate::ledger::storage::{self as ledger_storage, StorageHasher};
-use crate::ledger::storage_api::{self, StorageRead};
+use crate::ledger::storage_api::StorageRead;
 use crate::types::address::{Address, InternalAddress};
 use crate::types::storage::{Key, KeySeg};
 use crate::types::token;
@@ -116,7 +116,7 @@ where
         use validation::ValidatorUpdate::*;
 
         let addr = Address::Internal(Self::ADDR);
-        let mut changes: Vec<DataUpdate<_, _, _, _>> = vec![];
+        let mut changes: Vec<DataUpdate> = vec![];
         let current_epoch = self.ctx.pre().get_block_epoch()?;
         let staking_token_address = self.ctx.pre().get_native_token()?;
 
@@ -305,7 +305,6 @@ where
 }
 
 impl_pos_read_only! {
-    type Error = storage_api::Error;
     impl<'f, 'a, DB, H, CA> PosReadOnly for CtxPreStorageRead<'f, 'a, DB, H, CA>
         where
             DB: ledger_storage::DB + for<'iter> ledger_storage::DBIter<'iter> +'static,
@@ -314,7 +313,6 @@ impl_pos_read_only! {
 }
 
 impl_pos_read_only! {
-    type Error = storage_api::Error;
     impl<'f, 'a, DB, H, CA> PosReadOnly for CtxPostStorageRead<'f, 'a, DB, H, CA>
         where
             DB: ledger_storage::DB + for<'iter> ledger_storage::DBIter<'iter> +'static,
diff --git a/tests/src/native_vp/pos.rs b/tests/src/native_vp/pos.rs
index aff88abef9..69b7502a50 100644
--- a/tests/src/native_vp/pos.rs
+++ b/tests/src/native_vp/pos.rs
@@ -126,11 +126,7 @@ pub fn init_pos(
         // Initialize PoS storage
         tx_env
             .storage
-            .init_genesis(
-                params,
-                genesis_validators.iter(),
-                u64::from(start_epoch),
-            )
+            .init_genesis(params, genesis_validators.iter(), start_epoch)
             .unwrap();
     });
 }
diff --git a/tx_prelude/src/proof_of_stake.rs b/tx_prelude/src/proof_of_stake.rs
index 003f7302e0..c611c6780e 100644
--- a/tx_prelude/src/proof_of_stake.rs
+++ b/tx_prelude/src/proof_of_stake.rs
@@ -1,5 +1,6 @@
 //! Proof of Stake system integration with functions for transactions
 
+use namada_core::types::key::common;
 use namada_core::types::transaction::InitValidator;
 use namada_core::types::{key, token};
 pub use namada_proof_of_stake::parameters::PosParams;
@@ -127,70 +128,63 @@ impl Ctx {
 }
 
 namada_proof_of_stake::impl_pos_read_only! {
-    type Error = crate::Error;
     impl namada_proof_of_stake::PosReadOnly for Ctx
 }
 
 impl namada_proof_of_stake::PosActions for Ctx {
-    type BecomeValidatorError = crate::Error;
-    type BondError = crate::Error;
-    type CommissionRateChangeError = crate::Error;
-    type UnbondError = crate::Error;
-    type WithdrawError = crate::Error;
-
     fn write_pos_params(
         &mut self,
         params: &PosParams,
-    ) -> Result<(), Self::Error> {
+    ) -> storage_api::Result<()> {
         self.write(&params_key(), params)
     }
 
     fn write_validator_address_raw_hash(
         &mut self,
-        address: &Self::Address,
-        consensus_key: &Self::PublicKey,
-    ) -> Result<(), Self::Error> {
+        address: &Address,
+        consensus_key: &common::PublicKey,
+    ) -> storage_api::Result<()> {
         let raw_hash = key::tm_consensus_key_raw_hash(consensus_key);
         self.write(&validator_address_raw_hash_key(raw_hash), address)
     }
 
     fn write_validator_consensus_key(
         &mut self,
-        key: &Self::Address,
+        key: &Address,
         value: ValidatorConsensusKeys,
-    ) -> Result<(), Self::Error> {
+    ) -> storage_api::Result<()> {
         self.write(&validator_consensus_key_key(key), &value)
     }
 
     fn write_validator_state(
         &mut self,
-        key: &Self::Address,
+        key: &Address,
         value: ValidatorStates,
-    ) -> Result<(), Self::Error> {
+    ) -> storage_api::Result<()> {
         self.write(&validator_state_key(key), &value)
     }
 
     fn write_validator_commission_rate(
         &mut self,
-        key: &Self::Address,
+        key: &Address,
         value: CommissionRates,
-    ) -> Result<(), Self::Error> {
+    ) -> storage_api::Result<()> {
         self.write(&validator_commission_rate_key(key), &value)
     }
 
     fn write_validator_max_commission_rate_change(
         &mut self,
-        key: &Self::Address,
+        key: &Address,
         value: Decimal,
-    ) -> Result<(), Self::Error> {
+    ) -> storage_api::Result<()> {
         self.write(&validator_max_commission_rate_change_key(key), value)
     }
 
     fn write_validator_deltas(
         &mut self,
-        key: &Self::Address,
+        key: &Address,
         value: ValidatorDeltas,
-    ) -> Result<(), Self::Error> {
+    ) -> storage_api::Result<()> {
         self.write(&validator_deltas_key(key), &value)
     }
 
@@ -198,7 +192,7 @@ impl namada_proof_of_stake::PosActions for Ctx {
         &mut self,
         key: &BondId,
         value: Bonds,
-    ) -> Result<(), Self::Error> {
+    ) -> storage_api::Result<()> {
         self.write(&bond_key(key), &value)
     }
 
@@ -206,39 +200,39 @@ impl namada_proof_of_stake::PosActions for Ctx {
         &mut self,
         key: &BondId,
         value: Unbonds,
-    ) -> Result<(), Self::Error> {
+    ) -> storage_api::Result<()> {
         self.write(&unbond_key(key), &value)
     }
 
     fn write_validator_set(
         &mut self,
         value: ValidatorSets,
-    ) -> Result<(), Self::Error> {
+    ) -> storage_api::Result<()> {
         self.write(&validator_set_key(), &value)
     }
 
     fn write_total_deltas(
         &mut self,
         value: TotalDeltas,
-    ) -> Result<(), Self::Error> {
+    ) -> storage_api::Result<()> {
         self.write(&total_deltas_key(), &value)
     }
 
-    fn delete_bond(&mut self, key: &BondId) -> Result<(), Self::Error> {
+    fn delete_bond(&mut self, key: &BondId) -> storage_api::Result<()> {
         self.delete(&bond_key(key))
     }
 
-    fn delete_unbond(&mut self, key: &BondId) -> Result<(), Self::Error> {
+    fn delete_unbond(&mut self, key: &BondId) -> storage_api::Result<()> {
         self.delete(&unbond_key(key))
     }
 
     fn transfer(
         &mut self,
-        token: &Self::Address,
-        amount: Self::TokenAmount,
-        src: &Self::Address,
-        dest: &Self::Address,
-    ) -> Result<(), Self::Error> {
+        token: &Address,
+        amount: token::Amount,
+        src: &Address,
+        dest: &Address,
+    ) -> storage_api::Result<()> {
         crate::token::transfer(
             self, src, dest, token, None, amount, &None, &None,
         )
diff --git a/wasm/wasm_source/src/tx_bond.rs b/wasm/wasm_source/src/tx_bond.rs
index 894aa40763..5e5eeb474a 100644
--- a/wasm/wasm_source/src/tx_bond.rs
+++ b/wasm/wasm_source/src/tx_bond.rs
@@ -203,7 +203,7 @@ mod tests {
             // A delegation is applied at pipeline offset
             // Check that bond is empty before pipeline offset
             for epoch in 0..pos_params.pipeline_len {
-                let bond: Option<Bond<token::Amount>> = bonds_post.get(epoch);
+                let bond: Option<Bond> = bonds_post.get(epoch);
                 assert!(
                     bond.is_none(),
                     "Delegation before pipeline offset should be empty - \
@@ -212,13 +212,10 @@ mod tests {
             }
             // Check that bond is updated after the pipeline length
             for epoch in pos_params.pipeline_len..=pos_params.unbonding_len {
-                let start_epoch =
-                    namada_tx_prelude::proof_of_stake::types::Epoch::from(
-                        pos_params.pipeline_len,
-                    );
+                let start_epoch = Epoch::from(pos_params.pipeline_len);
                 let expected_bond =
                     HashMap::from_iter([(start_epoch, bond.amount)]);
-                let bond: Bond<token::Amount> = bonds_post.get(epoch).unwrap();
+                let bond: Bond = bonds_post.get(epoch).unwrap();
                 assert_eq!(
                     bond.pos_deltas, expected_bond,
                     "Delegation at and after pipeline offset should be equal \
@@ -229,12 +226,11 @@ mod tests {
             // This is a self-bond
             // Check that a bond already exists from genesis with initial stake
             // for the validator
-            let genesis_epoch =
-                namada_tx_prelude::proof_of_stake::types::Epoch::from(0);
+            let genesis_epoch = Epoch::from(0);
             for epoch in 0..pos_params.pipeline_len {
                 let expected_bond =
                     HashMap::from_iter([(genesis_epoch, initial_stake)]);
-                let bond: Bond<token::Amount> = bonds_post
+                let bond: Bond = bonds_post
                     .get(epoch)
                     .expect("Genesis validator should already have self-bond");
                 assert_eq!(
@@ -245,15 +241,12 @@ mod tests {
             }
             // Check that the bond is updated after the pipeline length
             for epoch in pos_params.pipeline_len..=pos_params.unbonding_len {
-                let start_epoch =
-                    namada_tx_prelude::proof_of_stake::types::Epoch::from(
-                        pos_params.pipeline_len,
-                    );
+                let start_epoch = Epoch::from(pos_params.pipeline_len);
                 let expected_bond = HashMap::from_iter([
                     (genesis_epoch, initial_stake),
                     (start_epoch, bond.amount),
                 ]);
-                let bond: Bond<token::Amount> = bonds_post.get(epoch).unwrap();
+                let bond: Bond = bonds_post.get(epoch).unwrap();
                 assert_eq!(
                     bond.pos_deltas, expected_bond,
                     "Self-bond at and after pipeline offset should contain \
diff --git a/wasm/wasm_source/src/tx_unbond.rs b/wasm/wasm_source/src/tx_unbond.rs
index dbc650d056..d7a6431243 100644
--- a/wasm/wasm_source/src/tx_unbond.rs
+++ b/wasm/wasm_source/src/tx_unbond.rs
@@ -250,7 +250,7 @@ mod tests {
         let unbonds_post = ctx().read_unbond(&unbond_id)?.unwrap();
         let bonds_post = ctx().read_bond(&unbond_id)?.unwrap();
         for epoch in 0..pos_params.unbonding_len {
-            let unbond: Option<Unbond<token::Amount>> = unbonds_post.get(epoch);
+            let unbond: Option<Unbond> = unbonds_post.get(epoch);
 
             assert!(
                 unbond.is_none(),
@@ -262,22 +262,18 @@ mod tests {
         let start_epoch = match &unbond.source {
             Some(_) => {
                 // This bond was a delegation
-                namada_tx_prelude::proof_of_stake::types::Epoch::from(
-                    pos_params.pipeline_len,
-                )
+                Epoch::from(pos_params.pipeline_len)
             }
             None => {
                 // This bond was a genesis validator self-bond
-                namada_tx_prelude::proof_of_stake::types::Epoch::default()
+                Epoch::default()
             }
         };
-        let end_epoch = namada_tx_prelude::proof_of_stake::types::Epoch::from(
-            pos_params.unbonding_len - 1,
-        );
+        let end_epoch = Epoch::from(pos_params.unbonding_len - 1);
 
         let expected_unbond =
             HashMap::from_iter([((start_epoch, end_epoch), unbond.amount)]);
-        let actual_unbond: Unbond<token::Amount> =
+        let actual_unbond: Unbond =
             unbonds_post.get(pos_params.unbonding_len).unwrap();
         assert_eq!(
             actual_unbond.deltas, expected_unbond,
@@ -286,7 +282,7 @@ mod tests {
         );
 
         for epoch in pos_params.pipeline_len..pos_params.unbonding_len {
-            let bond: Bond<token::Amount> = bonds_post.get(epoch).unwrap();
+            let bond: Bond = bonds_post.get(epoch).unwrap();
             let expected_bond =
                 HashMap::from_iter([(start_epoch, initial_stake)]);
             assert_eq!(
@@ -297,7 +293,7 @@ mod tests {
         }
         {
             let epoch = pos_params.unbonding_len + 1;
-            let bond: Bond<token::Amount> = bonds_post.get(epoch).unwrap();
+            let bond: Bond = bonds_post.get(epoch).unwrap();
             let expected_bond =
                 HashMap::from_iter([(start_epoch, initial_stake)]);
             assert_eq!(

From cbec11a170639cd621a5d1f08c722c490ddf697c Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Tom=C3=A1=C5=A1=20Zemanovi=C4=8D?= <tomas@heliax.dev>
Date: Mon, 21 Nov 2022 16:22:12 +0100
Subject: [PATCH 4/9] docs/overview: update deps graph and add a generated one
 too

---
 .../dev/src/explore/design/overview.md        |  12 +
 .../explore/design/overview/crates.excalidraw | 907 +++++++++++++-----
 .../src/explore/design/overview/crates.svg    |   4 +-
 .../overview/root-workspace-cargo-udeps.png   | Bin 0 -> 34486 bytes
 4 files changed, 696 insertions(+), 227 deletions(-)
 create mode 100644 documentation/dev/src/explore/design/overview/root-workspace-cargo-udeps.png

diff --git a/documentation/dev/src/explore/design/overview.md b/documentation/dev/src/explore/design/overview.md
index 9e6d419774..936529521e 100644
--- a/documentation/dev/src/explore/design/overview.md
+++ b/documentation/dev/src/explore/design/overview.md
@@ -8,3 +8,15 @@ The Rust crates internal dependency graph:
 
 ![crates](./overview/crates.svg  "crates")
 [Diagram on Excalidraw](https://excalidraw.com/#room=e32fc914de750ed4f5e4,6CWRFjnmCoiFR4BQ6i9K4g)
+
+## Graph for root workspace
+
+![Workspace dependency graph](./overview/root-workspace-cargo-udeps.png  "Workspace dependency graph")
+
+Generated with:
+
+```shell
+# Uses https://crates.io/crates/cargo-depgraph
+# From root dir:
+cargo depgraph --workspace-only --dedup-transitive-deps --dev-deps | dot -Tpng > documentation/dev/src/explore/design/root-workspace-cargo-udeps.png
+```
diff --git a/documentation/dev/src/explore/design/overview/crates.excalidraw b/documentation/dev/src/explore/design/overview/crates.excalidraw
index e305dfa088..bfe54c6199 100644
--- a/documentation/dev/src/explore/design/overview/crates.excalidraw
+++ b/documentation/dev/src/explore/design/overview/crates.excalidraw
@@ -5,8 +5,8 @@
   "elements": [
     {
       "type": "arrow",
-      "version": 940,
-      "versionNonce": 1634924291,
+      "version": 1755,
+      "versionNonce": 948744878,
       "isDeleted": false,
       "id": "XW8p0b2UGBcU4qhuM50S5",
       "fillStyle": "hachure",
@@ -15,26 +15,28 @@
       "roughness": 0,
       "opacity": 100,
       "angle": 0,
-      "x": 234.4292897735185,
-      "y": 27.074492275773665,
+      "x": 371.50540630660794,
+      "y": -65.3168707300475,
       "strokeColor": "#000000",
       "backgroundColor": "transparent",
-      "width": 46.74897051875004,
-      "height": 31.84747245633222,
+      "width": 98.62862494125517,
+      "height": 133.51589623592426,
       "seed": 244640995,
       "groupIds": [],
       "strokeSharpness": "round",
       "boundElements": [],
-      "updated": 1638978981591,
+      "updated": 1667837597361,
+      "link": null,
+      "locked": false,
       "startBinding": {
         "elementId": "TPinNTC84gdo4Heiyfrcl",
-        "gap": 7.842259549321174,
-        "focus": -0.23457593568181878
+        "gap": 4.184665686509593,
+        "focus": -0.24051015553247612
       },
       "endBinding": {
-        "elementId": "Vv4I15UDLDULEN3MM7cWR",
-        "gap": 6.876060164133929,
-        "focus": 0.014193953434318363
+        "elementId": "sXto6tFtVz5VWUddCdZEa",
+        "gap": 2.0846931092448173,
+        "focus": -0.6672657348088921
       },
       "lastCommittedPoint": null,
       "startArrowhead": null,
@@ -45,15 +47,15 @@
           0
         ],
         [
-          -46.74897051875004,
-          -31.84747245633222
+          -98.62862494125517,
+          -133.51589623592426
         ]
       ]
     },
     {
       "type": "text",
-      "version": 562,
-      "versionNonce": 1411527693,
+      "version": 598,
+      "versionNonce": 1489948718,
       "isDeleted": false,
       "id": "lZbFKRT7NFNBN-N0PEGt4",
       "fillStyle": "hachure",
@@ -62,8 +64,8 @@
       "roughness": 1,
       "opacity": 100,
       "angle": 0,
-      "x": 282.7198361155589,
-      "y": 168.66666666666663,
+      "x": 388.15489582721045,
+      "y": 125.41125755419421,
       "strokeColor": "#0008",
       "backgroundColor": "transparent",
       "width": 225,
@@ -89,7 +91,9 @@
           "id": "j6R5PVZmpe0pg3dobMg_R"
         }
       ],
-      "updated": 1638978994165,
+      "updated": 1668096815149,
+      "link": null,
+      "locked": false,
       "fontSize": 16,
       "fontFamily": 3,
       "text": "sdk\n(generated by the node) \nnot yet implemented",
@@ -97,12 +101,12 @@
       "textAlign": "left",
       "verticalAlign": "top",
       "containerId": null,
-      "originalText": ""
+      "originalText": "sdk\n(generated by the node) \nnot yet implemented"
     },
     {
       "type": "arrow",
-      "version": 762,
-      "versionNonce": 1801214061,
+      "version": 763,
+      "versionNonce": 234761454,
       "isDeleted": false,
       "id": "N3WOXF2nAY4GBP26E12jj",
       "fillStyle": "hachure",
@@ -121,7 +125,9 @@
       "groupIds": [],
       "strokeSharpness": "round",
       "boundElements": [],
-      "updated": 1638978645308,
+      "updated": 1667837597361,
+      "link": null,
+      "locked": false,
       "startBinding": {
         "elementId": "XCGaurOdV80qWvLWgvMKA",
         "focus": -0.3858314613263561,
@@ -148,8 +154,8 @@
     },
     {
       "type": "rectangle",
-      "version": 118,
-      "versionNonce": 1233481955,
+      "version": 119,
+      "versionNonce": 2046419186,
       "isDeleted": false,
       "id": "XCGaurOdV80qWvLWgvMKA",
       "fillStyle": "hachure",
@@ -173,12 +179,14 @@
           "id": "N3WOXF2nAY4GBP26E12jj"
         }
       ],
-      "updated": 1638978645308
+      "updated": 1667837597361,
+      "link": null,
+      "locked": false
     },
     {
       "type": "text",
-      "version": 288,
-      "versionNonce": 275825357,
+      "version": 289,
+      "versionNonce": 600203054,
       "isDeleted": false,
       "id": "2Uv1LCKqBFINap90QX39G",
       "fillStyle": "hachure",
@@ -206,7 +214,9 @@
           "id": "N3WOXF2nAY4GBP26E12jj"
         }
       ],
-      "updated": 1638978645308,
+      "updated": 1667837597361,
+      "link": null,
+      "locked": false,
       "fontSize": 16,
       "fontFamily": 3,
       "text": "apps \n(node/client/broadcaster)",
@@ -214,12 +224,12 @@
       "textAlign": "left",
       "verticalAlign": "top",
       "containerId": null,
-      "originalText": ""
+      "originalText": "apps \n(node/client/broadcaster)"
     },
     {
       "type": "text",
-      "version": 430,
-      "versionNonce": 2114673795,
+      "version": 467,
+      "versionNonce": 699724466,
       "isDeleted": false,
       "id": "q1u2GL7wVw4Exe4X5PaPJ",
       "fillStyle": "hachure",
@@ -228,12 +238,12 @@
       "roughness": 1,
       "opacity": 100,
       "angle": 0,
-      "x": -30,
-      "y": -61,
+      "x": -44.449427927178476,
+      "y": -22.73741825201381,
       "strokeColor": "#000000",
       "backgroundColor": "transparent",
-      "width": 328,
-      "height": 39,
+      "width": 58,
+      "height": 20,
       "seed": 1199188794,
       "groupIds": [],
       "strokeSharpness": "sharp",
@@ -243,20 +253,22 @@
           "id": "XW8p0b2UGBcU4qhuM50S5"
         }
       ],
-      "updated": 1638978645308,
+      "updated": 1667837597361,
+      "link": null,
+      "locked": false,
       "fontSize": 16,
       "fontFamily": 3,
-      "text": "shared\n(has to be able to compile to wasm)",
-      "baseline": 35,
+      "text": "shared",
+      "baseline": 16,
       "textAlign": "left",
       "verticalAlign": "top",
       "containerId": null,
-      "originalText": ""
+      "originalText": "shared"
     },
     {
       "type": "text",
-      "version": 380,
-      "versionNonce": 1078518061,
+      "version": 382,
+      "versionNonce": 776882542,
       "isDeleted": false,
       "id": "iXim05PslRfMMcHgO5M77",
       "fillStyle": "hachure",
@@ -265,7 +277,7 @@
       "roughness": 1,
       "opacity": 100,
       "angle": 0,
-      "x": 262.28822012513956,
+      "x": 263.5595295334808,
       "y": 409.9572198364548,
       "strokeColor": "#000000",
       "backgroundColor": "transparent",
@@ -284,7 +296,9 @@
           "id": "vjztA9aT9wXo-Mz6v8PpC"
         }
       ],
-      "updated": 1638978645308,
+      "updated": 1667837597361,
+      "link": null,
+      "locked": false,
       "fontSize": 16,
       "fontFamily": 3,
       "text": "wasm\n(tx/vp/mm/mm_filter)",
@@ -292,12 +306,12 @@
       "textAlign": "left",
       "verticalAlign": "top",
       "containerId": null,
-      "originalText": ""
+      "originalText": "wasm\n(tx/vp/mm/mm_filter)"
     },
     {
       "type": "text",
-      "version": 392,
-      "versionNonce": 1880902221,
+      "version": 472,
+      "versionNonce": 1537723506,
       "isDeleted": false,
       "id": "7hkmbpKqpRG1mw7i72ETt",
       "fillStyle": "hachure",
@@ -306,8 +320,8 @@
       "roughness": 1,
       "opacity": 100,
       "angle": 0,
-      "x": 250.93816429817616,
-      "y": 43.98982838512771,
+      "x": 347.7305845731653,
+      "y": -51.493336313717776,
       "strokeColor": "#000000",
       "backgroundColor": "transparent",
       "width": 131,
@@ -321,7 +335,9 @@
           "id": "qGwRshNFhRxZVbc_9XoOK"
         }
       ],
-      "updated": 1638978958291,
+      "updated": 1667837597362,
+      "link": null,
+      "locked": false,
       "fontSize": 16,
       "fontFamily": 3,
       "text": "vm_env\n(wasm imports)",
@@ -329,12 +345,12 @@
       "textAlign": "left",
       "verticalAlign": "top",
       "containerId": null,
-      "originalText": ""
+      "originalText": "vm_env\n(wasm imports)"
     },
     {
       "type": "arrow",
-      "version": 1508,
-      "versionNonce": 1553870509,
+      "version": 1575,
+      "versionNonce": 267273010,
       "isDeleted": false,
       "id": "qGwRshNFhRxZVbc_9XoOK",
       "fillStyle": "hachure",
@@ -343,17 +359,19 @@
       "roughness": 0,
       "opacity": 100,
       "angle": 0,
-      "x": 262.5545524579702,
-      "y": 396.8364074603423,
+      "x": 264.4192089775041,
+      "y": 396.83640746034223,
       "strokeColor": "#000000",
       "backgroundColor": "transparent",
-      "width": 31.03570718753258,
-      "height": 149.5928584864882,
+      "width": 110.76901556861083,
+      "height": 190.05066613958203,
       "seed": 1454454153,
       "groupIds": [],
       "strokeSharpness": "round",
       "boundElements": [],
-      "updated": 1638978981591,
+      "updated": 1668096811050,
+      "link": null,
+      "locked": false,
       "startBinding": {
         "elementId": "Wzbm5P1iAViA47cEtf8fo",
         "gap": 2.0964817197048013,
@@ -373,15 +391,15 @@
           0
         ],
         [
-          31.03570718753258,
-          -149.5928584864882
+          110.76901556861083,
+          -190.05066613958203
         ]
       ]
     },
     {
       "type": "arrow",
-      "version": 1768,
-      "versionNonce": 1721966243,
+      "version": 2121,
+      "versionNonce": 1563640178,
       "isDeleted": false,
       "id": "vjztA9aT9wXo-Mz6v8PpC",
       "fillStyle": "hachure",
@@ -390,26 +408,28 @@
       "roughness": 0,
       "opacity": 100,
       "angle": 5.789250827564958,
-      "x": 321.9782879333155,
-      "y": 154.088665021562,
+      "x": 421.4883473764835,
+      "y": 112.31311341902148,
       "strokeColor": "#000000",
       "backgroundColor": "transparent",
-      "width": 1.9590387446170325,
-      "height": 60.524866420498,
+      "width": 41.19080273092942,
+      "height": 106.34212703358264,
       "seed": 2120846791,
       "groupIds": [],
       "strokeSharpness": "round",
       "boundElements": [],
-      "updated": 1638978981592,
+      "updated": 1668096811050,
+      "link": null,
+      "locked": false,
       "startBinding": {
         "elementId": "ar_8ezfEs1dcv1WTLbxpz",
-        "focus": -0.20986675637201763,
-        "gap": 7.748380459840945
+        "focus": -0.3542919748085393,
+        "gap": 3.7621189181977
       },
       "endBinding": {
         "elementId": "TPinNTC84gdo4Heiyfrcl",
-        "focus": 0.2559196066333591,
-        "gap": 3.3655914385186065
+        "focus": -0.11786163970350894,
+        "gap": 4.694345285733192
       },
       "lastCommittedPoint": null,
       "startArrowhead": null,
@@ -420,15 +440,15 @@
           0
         ],
         [
-          1.9590387446170325,
-          -60.524866420498
+          41.19080273092942,
+          -106.34212703358264
         ]
       ]
     },
     {
       "type": "text",
-      "version": 429,
-      "versionNonce": 297558701,
+      "version": 430,
+      "versionNonce": 1302414830,
       "isDeleted": false,
       "id": "5P-9jtz0VpZvFi_qlrfFI",
       "fillStyle": "hachure",
@@ -460,7 +480,9 @@
           "id": "N3WOXF2nAY4GBP26E12jj"
         }
       ],
-      "updated": 1638978645308,
+      "updated": 1667837597362,
+      "link": null,
+      "locked": false,
       "fontSize": 16,
       "fontFamily": 3,
       "text": "apps lib",
@@ -468,12 +490,12 @@
       "textAlign": "left",
       "verticalAlign": "top",
       "containerId": null,
-      "originalText": ""
+      "originalText": "apps lib"
     },
     {
       "type": "arrow",
-      "version": 772,
-      "versionNonce": 1085418147,
+      "version": 948,
+      "versionNonce": 1282463730,
       "isDeleted": false,
       "id": "lLOOrmC-Vjj7oIfMlYSb8",
       "fillStyle": "hachure",
@@ -482,26 +504,28 @@
       "roughness": 0,
       "opacity": 100,
       "angle": 0,
-      "x": -28.858815305624397,
-      "y": 93.34045003053473,
+      "x": -38.54189360985094,
+      "y": 103.78123845818921,
       "strokeColor": "#000000",
       "backgroundColor": "transparent",
-      "width": 27.182921232842055,
-      "height": 99.9429435430508,
+      "width": 7.177806850155868,
+      "height": 90.8090017075307,
       "seed": 959014413,
       "groupIds": [],
       "strokeSharpness": "round",
       "boundElements": [],
-      "updated": 1638978645308,
+      "updated": 1667837597362,
+      "link": null,
+      "locked": false,
       "startBinding": {
         "elementId": "EbkJ-mbPprZhdzO3Ukel2",
-        "gap": 10.542003765397801,
-        "focus": -0.28644471223848134
+        "focus": -0.30160028144413814,
+        "gap": 1
       },
       "endBinding": {
         "elementId": "Vv4I15UDLDULEN3MM7cWR",
-        "gap": 5.046546832176402,
-        "focus": 0.7161156739729357
+        "focus": 0.7145906691885945,
+        "gap": 3.8928697489783417
       },
       "lastCommittedPoint": null,
       "startArrowhead": null,
@@ -512,15 +536,15 @@
           0
         ],
         [
-          27.182921232842055,
-          -99.9429435430508
+          -7.177806850155868,
+          -90.8090017075307
         ]
       ]
     },
     {
       "type": "rectangle",
-      "version": 194,
-      "versionNonce": 489430499,
+      "version": 250,
+      "versionNonce": 295589934,
       "isDeleted": false,
       "id": "Vv4I15UDLDULEN3MM7cWR",
       "fillStyle": "hachure",
@@ -529,12 +553,12 @@
       "roughness": 0,
       "opacity": 100,
       "angle": 0,
-      "x": -35.64597821935695,
-      "y": -70.64904034469248,
+      "x": -60.88566731675917,
+      "y": -36.43280653554018,
       "strokeColor": "#000000",
       "backgroundColor": "transparent",
-      "width": 346,
-      "height": 59,
+      "width": 100.52155837741054,
+      "height": 45.51217353722035,
       "seed": 479498130,
       "groupIds": [],
       "strokeSharpness": "sharp",
@@ -550,14 +574,20 @@
         {
           "type": "arrow",
           "id": "gjvzsG78lEToLfHSwix2l"
+        },
+        {
+          "id": "1PrWlfvRp8Ry560A9Glzp",
+          "type": "arrow"
         }
       ],
-      "updated": 1638978645309
+      "updated": 1667837597362,
+      "link": null,
+      "locked": false
     },
     {
       "type": "rectangle",
-      "version": 97,
-      "versionNonce": 832005581,
+      "version": 99,
+      "versionNonce": 540346802,
       "isDeleted": false,
       "id": "EbkJ-mbPprZhdzO3Ukel2",
       "fillStyle": "hachure",
@@ -589,12 +619,14 @@
           "id": "Q-Lc8vIaRv7dEQF96Es40"
         }
       ],
-      "updated": 1638978645309
+      "updated": 1667837597362,
+      "link": null,
+      "locked": false
     },
     {
       "type": "rectangle",
-      "version": 186,
-      "versionNonce": 1802791885,
+      "version": 290,
+      "versionNonce": 1891574382,
       "isDeleted": false,
       "id": "TPinNTC84gdo4Heiyfrcl",
       "fillStyle": "hachure",
@@ -603,8 +635,8 @@
       "roughness": 0,
       "opacity": 100,
       "angle": 0,
-      "x": 237.35402178064305,
-      "y": 34.35095965530752,
+      "x": 340.870326487958,
+      "y": -61.13220504353791,
       "strokeColor": "#000000",
       "backgroundColor": "transparent",
       "width": 160.00000000000003,
@@ -630,12 +662,14 @@
           "id": "VyqgKewhv649Rl_VgfMCi"
         }
       ],
-      "updated": 1638978965417
+      "updated": 1667837597362,
+      "link": null,
+      "locked": false
     },
     {
       "type": "rectangle",
-      "version": 260,
-      "versionNonce": 1071718765,
+      "version": 285,
+      "versionNonce": 1738723890,
       "isDeleted": false,
       "id": "ar_8ezfEs1dcv1WTLbxpz",
       "fillStyle": "hachure",
@@ -644,8 +678,8 @@
       "roughness": 0,
       "opacity": 100,
       "angle": 0,
-      "x": 270.4373551139763,
-      "y": 158.68429298864083,
+      "x": 379.9276094299221,
+      "y": 119.48407848046276,
       "strokeColor": "#0008",
       "backgroundColor": "transparent",
       "width": 237,
@@ -679,12 +713,14 @@
           "id": "i1YmU9V2mNKEn1n-x42MI"
         }
       ],
-      "updated": 1638978965416
+      "updated": 1668096819675,
+      "link": null,
+      "locked": false
     },
     {
       "type": "rectangle",
-      "version": 258,
-      "versionNonce": 1605517603,
+      "version": 259,
+      "versionNonce": 1036821678,
       "isDeleted": false,
       "id": "Wzbm5P1iAViA47cEtf8fo",
       "fillStyle": "hachure",
@@ -712,12 +748,14 @@
           "id": "qTCZ_7N0fuYegT9jZLwYS"
         }
       ],
-      "updated": 1638978645309
+      "updated": 1667837597362,
+      "link": null,
+      "locked": false
     },
     {
       "type": "text",
-      "version": 560,
-      "versionNonce": 1068083432,
+      "version": 750,
+      "versionNonce": 1312802098,
       "isDeleted": false,
       "id": "3K5BlHfHmWQqJECqYjhrw",
       "fillStyle": "hachure",
@@ -726,11 +764,11 @@
       "roughness": 1,
       "opacity": 100,
       "angle": 0,
-      "x": 327.6412405007913,
-      "y": -61.5,
+      "x": 343.82663225612686,
+      "y": -242.2368746012472,
       "strokeColor": "#000000",
       "backgroundColor": "transparent",
-      "width": 179,
+      "width": 180,
       "height": 39,
       "seed": 1395949267,
       "groupIds": [],
@@ -739,9 +777,15 @@
         {
           "type": "arrow",
           "id": "XW8p0b2UGBcU4qhuM50S5"
+        },
+        {
+          "id": "aFB1JfXdwInJtc3gPYN7J",
+          "type": "arrow"
         }
       ],
-      "updated": 1640603531311,
+      "updated": 1667837597362,
+      "link": null,
+      "locked": false,
       "fontSize": 16,
       "fontFamily": 3,
       "text": "macros\n(procedural macros)",
@@ -753,8 +797,8 @@
     },
     {
       "type": "rectangle",
-      "version": 278,
-      "versionNonce": 246686915,
+      "version": 505,
+      "versionNonce": 175103726,
       "isDeleted": false,
       "id": "NsweUiJ4jKgjdA0qcz00O",
       "fillStyle": "hachure",
@@ -763,8 +807,8 @@
       "roughness": 0,
       "opacity": 100,
       "angle": 0,
-      "x": 321.99526228143435,
-      "y": -71.14904034469248,
+      "x": 339.54946548211154,
+      "y": -252.67389121843763,
       "strokeColor": "#000000",
       "backgroundColor": "transparent",
       "width": 186.00000000000003,
@@ -786,12 +830,14 @@
           "id": "aFB1JfXdwInJtc3gPYN7J"
         }
       ],
-      "updated": 1638978645309
+      "updated": 1667837597362,
+      "link": null,
+      "locked": false
     },
     {
       "type": "arrow",
-      "version": 1011,
-      "versionNonce": 482577251,
+      "version": 1973,
+      "versionNonce": 2083802866,
       "isDeleted": false,
       "id": "aFB1JfXdwInJtc3gPYN7J",
       "fillStyle": "hachure",
@@ -800,26 +846,28 @@
       "roughness": 0,
       "opacity": 100,
       "angle": 0,
-      "x": 349.1354167882846,
-      "y": 23.491799798579976,
+      "x": 445.0792502855618,
+      "y": -63.670865022407355,
       "strokeColor": "#000000",
       "backgroundColor": "transparent",
-      "width": 46.682726805305435,
-      "height": 28.281680286544898,
+      "width": 3.2579955879015756,
+      "height": 129.00302619603028,
       "seed": 190431859,
       "groupIds": [],
       "strokeSharpness": "round",
       "boundElements": [],
-      "updated": 1638978958370,
+      "updated": 1667837597362,
+      "link": null,
+      "locked": false,
       "startBinding": {
         "elementId": "TPinNTC84gdo4Heiyfrcl",
-        "focus": -0.2707765433519321,
-        "gap": 10.859159856727544
+        "focus": 0.3098397968260942,
+        "gap": 2.5386599788694326
       },
       "endBinding": {
         "elementId": "NsweUiJ4jKgjdA0qcz00O",
-        "focus": -0.2940419127098849,
-        "gap": 7.359159856727558
+        "focus": -0.09068754025912654,
+        "gap": 1
       },
       "lastCommittedPoint": null,
       "startArrowhead": null,
@@ -830,15 +878,15 @@
           0
         ],
         [
-          46.682726805305435,
-          -28.281680286544898
+          -3.2579955879015756,
+          -129.00302619603028
         ]
       ]
     },
     {
       "type": "rectangle",
-      "version": 127,
-      "versionNonce": 922260579,
+      "version": 128,
+      "versionNonce": 1261556014,
       "isDeleted": false,
       "id": "4VBZ0MkHPtewJl2Z2t0Bg",
       "fillStyle": "hachure",
@@ -874,12 +922,14 @@
           "id": "_2-3bn8mf08UmbZ6BXIFS"
         }
       ],
-      "updated": 1638978645309
+      "updated": 1667837597362,
+      "link": null,
+      "locked": false
     },
     {
       "type": "text",
-      "version": 142,
-      "versionNonce": 1104385869,
+      "version": 144,
+      "versionNonce": 376545458,
       "isDeleted": false,
       "id": "rBCHOnrbLb_daQkqQR3iM",
       "fillStyle": "hachure",
@@ -901,9 +951,15 @@
         {
           "type": "arrow",
           "id": "_2-3bn8mf08UmbZ6BXIFS"
+        },
+        {
+          "id": "VyqgKewhv649Rl_VgfMCi",
+          "type": "arrow"
         }
       ],
-      "updated": 1638978645309,
+      "updated": 1667837597362,
+      "link": null,
+      "locked": false,
       "fontSize": 16,
       "fontFamily": 2,
       "text": "tests\n(integration tests &\nwasm test helpers)",
@@ -911,12 +967,12 @@
       "textAlign": "left",
       "verticalAlign": "top",
       "containerId": null,
-      "originalText": ""
+      "originalText": "tests\n(integration tests &\nwasm test helpers)"
     },
     {
       "type": "arrow",
-      "version": 309,
-      "versionNonce": 676204547,
+      "version": 472,
+      "versionNonce": 2071690094,
       "isDeleted": false,
       "id": "gjvzsG78lEToLfHSwix2l",
       "fillStyle": "hachure",
@@ -925,25 +981,27 @@
       "roughness": 0,
       "opacity": 100,
       "angle": 0,
-      "x": 121.80008801370579,
-      "y": 137.1918687462166,
+      "x": 94.57792085663155,
+      "y": 138.77133686820622,
       "strokeColor": "#2b8a3e",
       "backgroundColor": "transparent",
-      "width": 31.75599726538033,
-      "height": 135.45454545454544,
+      "width": 99.6640019562013,
+      "height": 119.36582571651728,
       "seed": 789434278,
       "groupIds": [],
       "strokeSharpness": "round",
       "boundElements": [],
-      "updated": 1638978645309,
+      "updated": 1667837597362,
+      "link": null,
+      "locked": false,
       "startBinding": {
         "elementId": "4VBZ0MkHPtewJl2Z2t0Bg",
-        "gap": 6.909090909090908,
+        "gap": 5.329622787101304,
         "focus": -0.06885109912899212
       },
       "endBinding": {
         "elementId": "Vv4I15UDLDULEN3MM7cWR",
-        "gap": 13.386363636363635,
+        "gap": 10.326144150008776,
         "focus": 0.31883892934573677
       },
       "lastCommittedPoint": null,
@@ -955,15 +1013,15 @@
           0
         ],
         [
-          -31.75599726538033,
-          -135.45454545454544
+          -99.6640019562013,
+          -119.36582571651728
         ]
       ]
     },
     {
       "type": "arrow",
-      "version": 359,
-      "versionNonce": 664938243,
+      "version": 619,
+      "versionNonce": 681491058,
       "isDeleted": false,
       "id": "VyqgKewhv649Rl_VgfMCi",
       "fillStyle": "hachure",
@@ -972,26 +1030,28 @@
       "roughness": 0,
       "opacity": 100,
       "angle": 0,
-      "x": 217.6291874855842,
-      "y": 152.50812625104635,
+      "x": 190.98928227815566,
+      "y": 146.14641420076202,
       "strokeColor": "#2b8a3e",
       "backgroundColor": "transparent",
-      "width": 50.95431878398884,
-      "height": 57.42989386846611,
+      "width": 178.9705519462201,
+      "height": 146.55134651702718,
       "seed": 1052874982,
       "groupIds": [],
       "strokeSharpness": "round",
       "boundElements": [],
-      "updated": 1638978958370,
+      "updated": 1667837597362,
+      "link": null,
+      "locked": false,
       "startBinding": {
-        "elementId": "4VBZ0MkHPtewJl2Z2t0Bg",
-        "gap": 3.454545454545454,
-        "focus": 0.48484848484848486
+        "elementId": "rBCHOnrbLb_daQkqQR3iM",
+        "focus": 0.0748046726595633,
+        "gap": 11.454545454545496
       },
       "endBinding": {
         "elementId": "TPinNTC84gdo4Heiyfrcl",
-        "gap": 1.727272727272727,
-        "focus": 0.1983948188896127
+        "focus": 0.11010748617593992,
+        "gap": 1.727272727272748
       },
       "lastCommittedPoint": null,
       "startArrowhead": null,
@@ -1002,15 +1062,15 @@
           0
         ],
         [
-          50.95431878398884,
-          -57.42989386846611
+          178.9705519462201,
+          -146.55134651702718
         ]
       ]
     },
     {
       "type": "rectangle",
-      "version": 213,
-      "versionNonce": 1969612813,
+      "version": 214,
+      "versionNonce": 1100340654,
       "isDeleted": false,
       "id": "5bb86M_XNHtgqUWdz-kf-",
       "fillStyle": "hachure",
@@ -1038,12 +1098,14 @@
           "id": "j6R5PVZmpe0pg3dobMg_R"
         }
       ],
-      "updated": 1638978645309
+      "updated": 1667837597362,
+      "link": null,
+      "locked": false
     },
     {
       "type": "text",
-      "version": 189,
-      "versionNonce": 1784032067,
+      "version": 190,
+      "versionNonce": 830643250,
       "isDeleted": false,
       "id": "8rk_Ui9gDtaE7NNb8u2Ir",
       "fillStyle": "hachure",
@@ -1067,7 +1129,9 @@
           "id": "_2-3bn8mf08UmbZ6BXIFS"
         }
       ],
-      "updated": 1638978645309,
+      "updated": 1667837597363,
+      "link": null,
+      "locked": false,
       "fontSize": 16,
       "fontFamily": 2,
       "text": "wasm tests",
@@ -1075,12 +1139,12 @@
       "textAlign": "left",
       "verticalAlign": "top",
       "containerId": null,
-      "originalText": ""
+      "originalText": "wasm tests"
     },
     {
       "type": "arrow",
-      "version": 692,
-      "versionNonce": 770137709,
+      "version": 693,
+      "versionNonce": 313128942,
       "isDeleted": false,
       "id": "_2-3bn8mf08UmbZ6BXIFS",
       "fillStyle": "hachure",
@@ -1099,7 +1163,9 @@
       "groupIds": [],
       "strokeSharpness": "round",
       "boundElements": [],
-      "updated": 1638978645309,
+      "updated": 1667837597363,
+      "link": null,
+      "locked": false,
       "startBinding": {
         "elementId": "8rk_Ui9gDtaE7NNb8u2Ir",
         "gap": 15,
@@ -1126,8 +1192,8 @@
     },
     {
       "type": "rectangle",
-      "version": 491,
-      "versionNonce": 227920611,
+      "version": 492,
+      "versionNonce": 1057244658,
       "isDeleted": false,
       "id": "c9FTgvEkL5qGqshm1XkEc",
       "fillStyle": "hachure",
@@ -1171,12 +1237,14 @@
           "id": "6kR5qmpuk9pmD6Oi1l544"
         }
       ],
-      "updated": 1638978645309
+      "updated": 1667837597363,
+      "link": null,
+      "locked": false
     },
     {
       "type": "text",
-      "version": 441,
-      "versionNonce": 397084877,
+      "version": 442,
+      "versionNonce": 1455475246,
       "isDeleted": false,
       "id": "CTPEGHIc-rJMuCSTtfFbf",
       "fillStyle": "hachure",
@@ -1200,7 +1268,9 @@
           "id": "j6R5PVZmpe0pg3dobMg_R"
         }
       ],
-      "updated": 1638978645309,
+      "updated": 1667837597363,
+      "link": null,
+      "locked": false,
       "fontSize": 16,
       "fontFamily": 2,
       "text": "wasm_for_tests\n(pre-build scripts\nused for testing)",
@@ -1208,12 +1278,12 @@
       "textAlign": "left",
       "verticalAlign": "top",
       "containerId": null,
-      "originalText": ""
+      "originalText": "wasm_for_tests\n(pre-build scripts\nused for testing)"
     },
     {
       "type": "arrow",
-      "version": 1118,
-      "versionNonce": 530436365,
+      "version": 1185,
+      "versionNonce": 1063455986,
       "isDeleted": false,
       "id": "j6R5PVZmpe0pg3dobMg_R",
       "fillStyle": "hachure",
@@ -1222,17 +1292,19 @@
       "roughness": 0,
       "opacity": 100,
       "angle": 0,
-      "x": 547.2189008950138,
+      "x": 566.5341925316106,
       "y": 396.7891966896191,
       "strokeColor": "#2b8a3e",
       "backgroundColor": "transparent",
-      "width": 45.12760305424797,
-      "height": 156.0355604263109,
+      "width": 22.78354967823168,
+      "height": 195.23577493448897,
       "seed": 1559261453,
       "groupIds": [],
       "strokeSharpness": "round",
       "boundElements": [],
-      "updated": 1638978981592,
+      "updated": 1668096811050,
+      "link": null,
+      "locked": false,
       "startBinding": {
         "elementId": "c9FTgvEkL5qGqshm1XkEc",
         "gap": 8.020096299021654,
@@ -1252,15 +1324,15 @@
           0
         ],
         [
-          -45.12760305424797,
-          -156.0355604263109
+          22.78354967823168,
+          -195.23577493448897
         ]
       ]
     },
     {
       "type": "text",
-      "version": 550,
-      "versionNonce": 1498721827,
+      "version": 551,
+      "versionNonce": 214127726,
       "isDeleted": false,
       "id": "dqZ0GfpvY8Ewz9AEmMCWa",
       "fillStyle": "hachure",
@@ -1300,7 +1372,9 @@
           "id": "qTCZ_7N0fuYegT9jZLwYS"
         }
       ],
-      "updated": 1638978645309,
+      "updated": 1667837597363,
+      "link": null,
+      "locked": false,
       "fontSize": 16,
       "fontFamily": 3,
       "text": "tx/vp_prelude",
@@ -1308,12 +1382,12 @@
       "textAlign": "left",
       "verticalAlign": "top",
       "containerId": null,
-      "originalText": ""
+      "originalText": "tx/vp_prelude"
     },
     {
       "type": "rectangle",
-      "version": 281,
-      "versionNonce": 372177293,
+      "version": 284,
+      "versionNonce": 1565813998,
       "isDeleted": false,
       "id": "fjybklv3t7WGXd-_o4IGU",
       "fillStyle": "hachure",
@@ -1359,14 +1433,20 @@
         {
           "type": "arrow",
           "id": "i1YmU9V2mNKEn1n-x42MI"
+        },
+        {
+          "id": "Mj92y8jQAhAp2FAk2eDPa",
+          "type": "arrow"
         }
       ],
-      "updated": 1638978645309
+      "updated": 1668096821612,
+      "link": null,
+      "locked": false
     },
     {
       "type": "arrow",
-      "version": 1483,
-      "versionNonce": 1386463587,
+      "version": 1484,
+      "versionNonce": 1853527726,
       "isDeleted": false,
       "id": "qTCZ_7N0fuYegT9jZLwYS",
       "fillStyle": "hachure",
@@ -1385,7 +1465,9 @@
       "groupIds": [],
       "strokeSharpness": "round",
       "boundElements": [],
-      "updated": 1638978645309,
+      "updated": 1667837597363,
+      "link": null,
+      "locked": false,
       "startBinding": {
         "elementId": "Wzbm5P1iAViA47cEtf8fo",
         "focus": -0.9194768569701585,
@@ -1412,8 +1494,8 @@
     },
     {
       "type": "arrow",
-      "version": 1662,
-      "versionNonce": 1619056269,
+      "version": 1729,
+      "versionNonce": 1092132530,
       "isDeleted": false,
       "id": "ZIRL-fdZPjVJvZGV2ldOy",
       "fillStyle": "hachure",
@@ -1422,17 +1504,19 @@
       "roughness": 0,
       "opacity": 100,
       "angle": 0,
-      "x": 381.5826013057739,
-      "y": 292.4896723063988,
+      "x": 404.23462696568436,
+      "y": 292.48967230639886,
       "strokeColor": "#000000",
       "backgroundColor": "transparent",
-      "width": 27.405787418156137,
-      "height": 51.49682634384905,
+      "width": 27.550876861996244,
+      "height": 90.69704085202719,
       "seed": 349033581,
       "groupIds": [],
       "strokeSharpness": "round",
       "boundElements": [],
-      "updated": 1638978962317,
+      "updated": 1668096811051,
+      "link": null,
+      "locked": false,
       "startBinding": {
         "elementId": "fjybklv3t7WGXd-_o4IGU",
         "gap": 4.902954015575294,
@@ -1452,15 +1536,15 @@
           0
         ],
         [
-          -27.405787418156137,
-          -51.49682634384905
+          27.550876861996244,
+          -90.69704085202719
         ]
       ]
     },
     {
       "type": "arrow",
-      "version": 1085,
-      "versionNonce": 1611373827,
+      "version": 1086,
+      "versionNonce": 344953070,
       "isDeleted": false,
       "id": "6kR5qmpuk9pmD6Oi1l544",
       "fillStyle": "hachure",
@@ -1479,7 +1563,9 @@
       "groupIds": [],
       "strokeSharpness": "round",
       "boundElements": [],
-      "updated": 1638978645309,
+      "updated": 1667837597363,
+      "link": null,
+      "locked": false,
       "startBinding": {
         "elementId": "c9FTgvEkL5qGqshm1XkEc",
         "focus": 0.08731195893821785,
@@ -1506,8 +1592,8 @@
     },
     {
       "type": "arrow",
-      "version": 1222,
-      "versionNonce": 77626605,
+      "version": 1289,
+      "versionNonce": 565573746,
       "isDeleted": false,
       "id": "i1YmU9V2mNKEn1n-x42MI",
       "fillStyle": "hachure",
@@ -1516,17 +1602,19 @@
       "roughness": 0,
       "opacity": 100,
       "angle": 0,
-      "x": 436.8729532949412,
-      "y": 295.6281799726297,
+      "x": 453.9052924435333,
+      "y": 295.62817997262965,
       "strokeColor": "#2b8a3e",
       "backgroundColor": "transparent",
-      "width": 32.396136366778364,
-      "height": 56.23087292631101,
+      "width": 116.88603369223176,
+      "height": 95.43108743448903,
       "seed": 1441329475,
       "groupIds": [],
       "strokeSharpness": "round",
       "boundElements": [],
-      "updated": 1638978962317,
+      "updated": 1668096811051,
+      "link": null,
+      "locked": false,
       "startBinding": {
         "elementId": "fjybklv3t7WGXd-_o4IGU",
         "gap": 1.7644463493444311,
@@ -1546,8 +1634,377 @@
           0
         ],
         [
-          32.396136366778364,
-          -56.23087292631101
+          116.88603369223176,
+          -95.43108743448903
+        ]
+      ]
+    },
+    {
+      "type": "text",
+      "version": 3,
+      "versionNonce": 445334318,
+      "isDeleted": false,
+      "id": "snE5rQoYffGo1nNrkicz7",
+      "fillStyle": "hachure",
+      "strokeWidth": 1,
+      "strokeStyle": "solid",
+      "roughness": 1,
+      "opacity": 100,
+      "angle": 0,
+      "x": 72.68099857250752,
+      "y": 104.64262632197415,
+      "strokeColor": "#000000",
+      "backgroundColor": "transparent",
+      "width": 11,
+      "height": 25,
+      "seed": 107469989,
+      "groupIds": [],
+      "strokeSharpness": "sharp",
+      "boundElements": [],
+      "updated": 1667837597363,
+      "link": null,
+      "locked": false,
+      "fontSize": 20,
+      "fontFamily": 1,
+      "text": "",
+      "baseline": 18,
+      "textAlign": "left",
+      "verticalAlign": "top",
+      "containerId": null,
+      "originalText": ""
+    },
+    {
+      "type": "text",
+      "version": 580,
+      "versionNonce": 421547698,
+      "isDeleted": false,
+      "id": "rXDjoawThAsgINC3dYskv",
+      "fillStyle": "hachure",
+      "strokeWidth": 1,
+      "strokeStyle": "solid",
+      "roughness": 1,
+      "opacity": 100,
+      "angle": 0,
+      "x": -66.20111453872039,
+      "y": -142.01868555754857,
+      "strokeColor": "#000000",
+      "backgroundColor": "transparent",
+      "width": 133,
+      "height": 20,
+      "seed": 1686954135,
+      "groupIds": [],
+      "strokeSharpness": "sharp",
+      "boundElements": [
+        {
+          "type": "arrow",
+          "id": "XW8p0b2UGBcU4qhuM50S5"
+        },
+        {
+          "id": "RSaOI_j676g1pzgh68PGT",
+          "type": "arrow"
+        },
+        {
+          "id": "1PrWlfvRp8Ry560A9Glzp",
+          "type": "arrow"
+        }
+      ],
+      "updated": 1667837597363,
+      "link": null,
+      "locked": false,
+      "fontSize": 16,
+      "fontFamily": 3,
+      "text": "proof_of_stake",
+      "baseline": 16,
+      "textAlign": "left",
+      "verticalAlign": "top",
+      "containerId": null,
+      "originalText": "proof_of_stake"
+    },
+    {
+      "type": "rectangle",
+      "version": 344,
+      "versionNonce": 1081994798,
+      "isDeleted": false,
+      "id": "BTWJfUkextn5AFNkxH0zw",
+      "fillStyle": "hachure",
+      "strokeWidth": 1,
+      "strokeStyle": "solid",
+      "roughness": 0,
+      "opacity": 100,
+      "angle": 0,
+      "x": -77.14160994875033,
+      "y": -151.5204673069046,
+      "strokeColor": "#000000",
+      "backgroundColor": "transparent",
+      "width": 151,
+      "height": 40,
+      "seed": 530602073,
+      "groupIds": [],
+      "strokeSharpness": "sharp",
+      "boundElements": [
+        {
+          "type": "arrow",
+          "id": "lLOOrmC-Vjj7oIfMlYSb8"
+        },
+        {
+          "type": "arrow",
+          "id": "XW8p0b2UGBcU4qhuM50S5"
+        },
+        {
+          "type": "arrow",
+          "id": "gjvzsG78lEToLfHSwix2l"
+        },
+        {
+          "id": "1PrWlfvRp8Ry560A9Glzp",
+          "type": "arrow"
+        },
+        {
+          "id": "RSaOI_j676g1pzgh68PGT",
+          "type": "arrow"
+        },
+        {
+          "id": "Mj92y8jQAhAp2FAk2eDPa",
+          "type": "arrow"
+        }
+      ],
+      "updated": 1668096823718,
+      "link": null,
+      "locked": false
+    },
+    {
+      "type": "arrow",
+      "version": 1316,
+      "versionNonce": 2101848178,
+      "isDeleted": false,
+      "id": "1PrWlfvRp8Ry560A9Glzp",
+      "fillStyle": "hachure",
+      "strokeWidth": 1,
+      "strokeStyle": "solid",
+      "roughness": 0,
+      "opacity": 100,
+      "angle": 0,
+      "x": -34.83434658237711,
+      "y": -37.43280653554018,
+      "strokeColor": "#000000",
+      "backgroundColor": "transparent",
+      "width": 3.545409484342265,
+      "height": 70.05629000440024,
+      "seed": 1540235929,
+      "groupIds": [],
+      "strokeSharpness": "round",
+      "boundElements": [],
+      "updated": 1667837597363,
+      "link": null,
+      "locked": false,
+      "startBinding": {
+        "elementId": "Vv4I15UDLDULEN3MM7cWR",
+        "focus": -0.4475029650922005,
+        "gap": 1
+      },
+      "endBinding": {
+        "elementId": "BTWJfUkextn5AFNkxH0zw",
+        "focus": 0.4960561286307573,
+        "gap": 4.031370766964187
+      },
+      "lastCommittedPoint": null,
+      "startArrowhead": null,
+      "endArrowhead": "arrow",
+      "points": [
+        [
+          0,
+          0
+        ],
+        [
+          -3.545409484342265,
+          -70.05629000440024
+        ]
+      ]
+    },
+    {
+      "type": "text",
+      "version": 679,
+      "versionNonce": 448450478,
+      "isDeleted": false,
+      "id": "HCnc-kGdrOmv66h53nUVE",
+      "fillStyle": "hachure",
+      "strokeWidth": 1,
+      "strokeStyle": "solid",
+      "roughness": 1,
+      "opacity": 100,
+      "angle": 0,
+      "x": -67.30268218444724,
+      "y": -246.60346391844524,
+      "strokeColor": "#000000",
+      "backgroundColor": "transparent",
+      "width": 349,
+      "height": 39,
+      "seed": 1355025177,
+      "groupIds": [],
+      "strokeSharpness": "sharp",
+      "boundElements": [
+        {
+          "type": "arrow",
+          "id": "XW8p0b2UGBcU4qhuM50S5"
+        },
+        {
+          "id": "RSaOI_j676g1pzgh68PGT",
+          "type": "arrow"
+        }
+      ],
+      "updated": 1667837597363,
+      "link": null,
+      "locked": false,
+      "fontSize": 16,
+      "fontFamily": 3,
+      "text": "core\n(basic types, storage, tx and VP api)",
+      "baseline": 35,
+      "textAlign": "left",
+      "verticalAlign": "top",
+      "containerId": null,
+      "originalText": "core\n(basic types, storage, tx and VP api)"
+    },
+    {
+      "type": "rectangle",
+      "version": 435,
+      "versionNonce": 517851698,
+      "isDeleted": false,
+      "id": "sXto6tFtVz5VWUddCdZEa",
+      "fillStyle": "hachure",
+      "strokeWidth": 1,
+      "strokeStyle": "solid",
+      "roughness": 0,
+      "opacity": 100,
+      "angle": 0,
+      "x": -79.89486434732021,
+      "y": -251.87740247708948,
+      "strokeColor": "#000000",
+      "backgroundColor": "transparent",
+      "width": 383.6822949125378,
+      "height": 50.95994240187291,
+      "seed": 363368183,
+      "groupIds": [],
+      "strokeSharpness": "sharp",
+      "boundElements": [
+        {
+          "type": "arrow",
+          "id": "lLOOrmC-Vjj7oIfMlYSb8"
+        },
+        {
+          "type": "arrow",
+          "id": "XW8p0b2UGBcU4qhuM50S5"
+        },
+        {
+          "type": "arrow",
+          "id": "gjvzsG78lEToLfHSwix2l"
+        },
+        {
+          "id": "1PrWlfvRp8Ry560A9Glzp",
+          "type": "arrow"
+        },
+        {
+          "id": "RSaOI_j676g1pzgh68PGT",
+          "type": "arrow"
+        }
+      ],
+      "updated": 1667837597363,
+      "link": null,
+      "locked": false
+    },
+    {
+      "type": "arrow",
+      "version": 1716,
+      "versionNonce": 2030577454,
+      "isDeleted": false,
+      "id": "RSaOI_j676g1pzgh68PGT",
+      "fillStyle": "hachure",
+      "strokeWidth": 1,
+      "strokeStyle": "solid",
+      "roughness": 0,
+      "opacity": 100,
+      "angle": 0,
+      "x": -36.723031505650525,
+      "y": -150.96698192339844,
+      "strokeColor": "#000000",
+      "backgroundColor": "transparent",
+      "width": 0.9289040894322254,
+      "height": 50.09773674715467,
+      "seed": 540356215,
+      "groupIds": [],
+      "strokeSharpness": "round",
+      "boundElements": [],
+      "updated": 1667837599964,
+      "link": null,
+      "locked": false,
+      "startBinding": {
+        "elementId": "rXDjoawThAsgINC3dYskv",
+        "focus": -0.5604411595305062,
+        "gap": 8.94829636584987
+      },
+      "endBinding": {
+        "elementId": "HCnc-kGdrOmv66h53nUVE",
+        "focus": 0.8149797719982349,
+        "gap": 6.5387452478921375
+      },
+      "lastCommittedPoint": null,
+      "startArrowhead": null,
+      "endArrowhead": "arrow",
+      "points": [
+        [
+          0,
+          0
+        ],
+        [
+          0.9289040894322254,
+          -50.09773674715467
+        ]
+      ]
+    },
+    {
+      "type": "arrow",
+      "version": 1836,
+      "versionNonce": 908201778,
+      "isDeleted": false,
+      "id": "Mj92y8jQAhAp2FAk2eDPa",
+      "fillStyle": "hachure",
+      "strokeWidth": 1,
+      "strokeStyle": "solid",
+      "roughness": 0,
+      "opacity": 100,
+      "angle": 0,
+      "x": 363.10096424865014,
+      "y": 291.4865667122689,
+      "strokeColor": "#000000",
+      "backgroundColor": "transparent",
+      "width": 329.30624831590114,
+      "height": 396.18836770886367,
+      "seed": 1218382706,
+      "groupIds": [],
+      "strokeSharpness": "round",
+      "boundElements": [],
+      "updated": 1668096824328,
+      "link": null,
+      "locked": false,
+      "startBinding": {
+        "elementId": "fjybklv3t7WGXd-_o4IGU",
+        "focus": -0.2598321416788019,
+        "gap": 5.906059609705238
+      },
+      "endBinding": {
+        "elementId": "BTWJfUkextn5AFNkxH0zw",
+        "focus": -0.14268881162945926,
+        "gap": 6.818666310309823
+      },
+      "lastCommittedPoint": null,
+      "startArrowhead": null,
+      "endArrowhead": "arrow",
+      "points": [
+        [
+          0,
+          0
+        ],
+        [
+          -329.30624831590114,
+          -396.18836770886367
         ]
       ]
     }
diff --git a/documentation/dev/src/explore/design/overview/crates.svg b/documentation/dev/src/explore/design/overview/crates.svg
index 49f36fd6bc..c08993547b 100644
--- a/documentation/dev/src/explore/design/overview/crates.svg
+++ b/documentation/dev/src/explore/design/overview/crates.svg
@@ -1,4 +1,4 @@
-<svg version="1.1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 774.6539535837289 602.5833333333333" width="1549.3079071674579" height="1205.1666666666665">
+<svg version="1.1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 774.6539535837289 784.1081842070785" width="1549.3079071674579" height="1568.216368414157">
   <!-- svg-source:excalidraw -->
   
   <defs>
@@ -13,4 +13,4 @@
       }
     </style>
   </defs>
-  <rect x="0" y="0" width="774.6539535837289" height="602.5833333333333" fill="#ffffff"/><g stroke-linecap="round"><g transform="translate(356.0752679928754 108.22353262046614) rotate(0 -23.37448525937502 -15.92373622816611)"><path d="M0 0 C-7.79 -5.31, -38.96 -26.54, -46.75 -31.85 M0 0 C-7.79 -5.31, -38.96 -26.54, -46.75 -31.85" stroke="#000000" stroke-width="1" fill="none"/></g><g transform="translate(356.0752679928754 108.22353262046614) rotate(0 -23.37448525937502 -15.92373622816611)"><path d="M-19.34 -24.88 C-24.96 -26.31, -30.57 -27.73, -46.75 -31.85 M-19.34 -24.88 C-26.66 -26.74, -33.98 -28.6, -46.75 -31.85" stroke="#000000" stroke-width="1" fill="none"/></g><g transform="translate(356.0752679928754 108.22353262046614) rotate(0 -23.37448525937502 -15.92373622816611)"><path d="M-30.23 -8.89 C-33.62 -13.59, -37 -18.3, -46.75 -31.85 M-30.23 -8.89 C-34.64 -15.02, -39.06 -21.15, -46.75 -31.85" stroke="#000000" stroke-width="1" fill="none"/></g></g><g transform="translate(404.36581433491585 249.8157070113591) rotate(0 112.5 29.5)"><text x="0" y="15.666666666666668" font-family="Cascadia, Segoe UI Emoji" font-size="16px" fill="#0008" text-anchor="start" style="white-space: pre;" direction="ltr">sdk</text><text x="0" y="35.333333333333336" font-family="Cascadia, Segoe UI Emoji" font-size="16px" fill="#0008" text-anchor="start" style="white-space: pre;" direction="ltr">(generated by the node) </text><text x="0" y="55" font-family="Cascadia, Segoe UI Emoji" font-size="16px" fill="#0008" text-anchor="start" style="white-space: pre;" direction="ltr">not yet implemented</text></g><g stroke-linecap="round"><g transform="translate(89.85557184907933 368.2643521822306) rotate(0 -1.9465594668322836 -67.15395524069419)"><path d="M0 0 C-0.65 -22.38, -3.24 -111.92, -3.89 -134.31 M0 0 C-0.65 -22.38, -3.24 -111.92, -3.89 -134.31" stroke="#000000" stroke-width="1" fill="none"/></g><g transform="translate(89.85557184907933 368.2643521822306) rotate(0 -1.9465594668322836 -67.15395524069419)"><path d="M7.18 -106.43 C3.6 -115.44, 0.02 -124.45, -3.89 -134.31 M7.18 -106.43 C3.06 -116.8, -1.06 -127.17, -3.89 -134.31" stroke="#000000" stroke-width="1" fill="none"/></g><g transform="translate(89.85557184907933 368.2643521822306) rotate(0 -1.9465594668322836 -67.15395524069419)"><path d="M-13.33 -105.83 C-10.28 -115.03, -7.23 -124.24, -3.89 -134.31 M-13.33 -105.83 C-9.82 -116.43, -6.31 -127.02, -3.89 -134.31" stroke="#000000" stroke-width="1" fill="none"/></g></g><g stroke-linecap="round" transform="translate(10 377.5) rotate(0 133 36.5)"><path d="M0 0 C85.3 0, 170.6 0, 266 0 M0 0 C73.98 0, 147.95 0, 266 0 M266 0 C266 14.91, 266 29.81, 266 73 M266 0 C266 24.81, 266 49.62, 266 73 M266 73 C170.17 73, 74.34 73, 0 73 M266 73 C179.02 73, 92.03 73, 0 73 M0 73 C0 52.03, 0 31.06, 0 0 M0 73 C0 57.58, 0 42.15, 0 0" stroke="#000000" stroke-width="1" fill="none"/></g><g transform="translate(28.55372978461105 391.8509596553075) rotate(0 117.5 19.5)"><text x="0" y="15.5" font-family="Cascadia, Segoe UI Emoji" font-size="16px" fill="#000000" text-anchor="start" style="white-space: pre;" direction="ltr">apps </text><text x="0" y="35" font-family="Cascadia, Segoe UI Emoji" font-size="16px" fill="#000000" text-anchor="start" style="white-space: pre;" direction="ltr">(node/client/broadcaster)</text></g><g transform="translate(91.64597821935695 20.14904034469248) rotate(0 164 19.5)"><text x="0" y="15.5" font-family="Cascadia, Segoe UI Emoji" font-size="16px" fill="#000000" text-anchor="start" style="white-space: pre;" direction="ltr">shared</text><text x="0" y="35" font-family="Cascadia, Segoe UI Emoji" font-size="16px" fill="#000000" text-anchor="start" style="white-space: pre;" direction="ltr">(has to be able to compile to wasm)</text></g><g transform="translate(383.9341983444965 491.10626018114726) rotate(0 94 19.5)"><text x="0" y="15.5" font-family="Cascadia, Segoe UI Emoji" font-size="16px" fill="#000000" text-anchor="start" style="white-space: pre;" direction="ltr">wasm</text><text x="0" y="35" font-family="Cascadia, Segoe UI Emoji" font-size="16px" fill="#000000" text-anchor="start" style="white-space: pre;" direction="ltr">(tx/vp/mm/mm_filter)</text></g><g transform="translate(372.5841425175331 125.13886872982019) rotate(0 65.5 19.5)"><text x="0" y="15.5" font-family="Cascadia, Segoe UI Emoji" font-size="16px" fill="#000000" text-anchor="start" style="white-space: pre;" direction="ltr">vm_env</text><text x="0" y="35" font-family="Cascadia, Segoe UI Emoji" font-size="16px" fill="#000000" text-anchor="start" style="white-space: pre;" direction="ltr">(wasm imports)</text></g><g stroke-linecap="round"><g transform="translate(384.20053067732715 477.98544780503477) rotate(0 15.51785359376629 -74.7964292432441)"><path d="M0 0 C5.17 -24.93, 25.86 -124.66, 31.04 -149.59 M0 0 C5.17 -24.93, 25.86 -124.66, 31.04 -149.59" stroke="#000000" stroke-width="1" fill="none"/></g><g transform="translate(384.20053067732715 477.98544780503477) rotate(0 15.51785359376629 -74.7964292432441)"><path d="M35.36 -119.91 C34.38 -126.6, 33.41 -133.29, 31.04 -149.59 M35.36 -119.91 C33.74 -131.02, 32.12 -142.14, 31.04 -149.59" stroke="#000000" stroke-width="1" fill="none"/></g><g transform="translate(384.20053067732715 477.98544780503477) rotate(0 15.51785359376629 -74.7964292432441)"><path d="M15.26 -124.07 C18.82 -129.83, 22.38 -135.58, 31.04 -149.59 M15.26 -124.07 C21.17 -133.63, 27.08 -143.19, 31.04 -149.59" stroke="#000000" stroke-width="1" fill="none"/></g></g><g stroke-linecap="round"><g transform="translate(443.6242661526725 235.2377053662545) rotate(331.69963896209117 0.9795193723085163 -30.262433210249)"><path d="M0 0 C0.33 -10.09, 1.63 -50.44, 1.96 -60.52 M0 0 C0.33 -10.09, 1.63 -50.44, 1.96 -60.52" stroke="#000000" stroke-width="1" fill="none"/></g><g transform="translate(443.6242661526725 235.2377053662545) rotate(331.69963896209117 0.9795193723085163 -30.262433210249)"><path d="M11.3 -32.02 C8.95 -39.19, 6.6 -46.37, 1.96 -60.52 M11.3 -32.02 C8.4 -40.86, 5.51 -49.7, 1.96 -60.52" stroke="#000000" stroke-width="1" fill="none"/></g><g transform="translate(443.6242661526725 235.2377053662545) rotate(331.69963896209117 0.9795193723085163 -30.262433210249)"><path d="M-9.21 -32.68 C-6.4 -39.69, -3.59 -46.7, 1.96 -60.52 M-9.21 -32.68 C-5.74 -41.32, -2.28 -49.95, 1.96 -60.52" stroke="#000000" stroke-width="1" fill="none"/></g></g><g transform="translate(63.285314376454416 196.11946653897442) rotate(0 37.5 10)"><text x="0" y="16" font-family="Cascadia, Segoe UI Emoji" font-size="16px" fill="#000000" text-anchor="start" style="white-space: pre;" direction="ltr">apps lib</text></g><g stroke-linecap="round"><g transform="translate(92.78716291373254 174.4894903752272) rotate(0 13.591460616421028 -49.9714717715254)"><path d="M0 0 C4.53 -16.66, 22.65 -83.29, 27.18 -99.94 M0 0 C4.53 -16.66, 22.65 -83.29, 27.18 -99.94" stroke="#000000" stroke-width="1" fill="none"/></g><g transform="translate(92.78716291373254 174.4894903752272) rotate(0 13.591460616421028 -49.9714717715254)"><path d="M29.69 -70.05 C28.85 -80, 28.02 -89.95, 27.18 -99.94 M29.69 -70.05 C29.11 -76.87, 28.54 -83.69, 27.18 -99.94" stroke="#000000" stroke-width="1" fill="none"/></g><g transform="translate(92.78716291373254 174.4894903752272) rotate(0 13.591460616421028 -49.9714717715254)"><path d="M9.88 -75.43 C15.64 -83.59, 21.4 -91.75, 27.18 -99.94 M9.88 -75.43 C13.83 -81.03, 17.78 -86.62, 27.18 -99.94" stroke="#000000" stroke-width="1" fill="none"/></g></g><g stroke-linecap="round" transform="translate(86 10.5) rotate(0 173 29.5)"><path d="M0 0 C78.08 0, 156.15 0, 346 0 M0 0 C73.92 0, 147.85 0, 346 0 M346 0 C346 20.39, 346 40.79, 346 59 M346 0 C346 18.93, 346 37.86, 346 59 M346 59 C217.09 59, 88.19 59, 0 59 M346 59 C215.78 59, 85.56 59, 0 59 M0 59 C0 41.8, 0 24.6, 0 0 M0 59 C0 43.79, 0 28.59, 0 0" stroke="#000000" stroke-width="1" fill="none"/></g><g stroke-linecap="round" transform="translate(43.2657470703125 185.031494140625) rotate(0 60 20)"><path d="M0 0 C33.42 0, 66.84 0, 120 0 M0 0 C33.47 0, 66.94 0, 120 0 M120 0 C120 15.73, 120 31.47, 120 40 M120 0 C120 10.12, 120 20.24, 120 40 M120 40 C83.84 40, 47.68 40, 0 40 M120 40 C92.29 40, 64.58 40, 0 40 M0 40 C0 24.58, 0 9.16, 0 0 M0 40 C0 27.62, 0 15.25, 0 0" stroke="#000000" stroke-width="1" fill="none"/></g><g stroke-linecap="round" transform="translate(359 115.5) rotate(0 80 29.5)"><path d="M0 0 C56.95 0, 113.91 0, 160 0 M0 0 C45.05 0, 90.1 0, 160 0 M160 0 C160 23.18, 160 46.36, 160 59 M160 0 C160 20.16, 160 40.33, 160 59 M160 59 C113.93 59, 67.87 59, 0 59 M160 59 C109.1 59, 58.21 59, 0 59 M0 59 C0 41.17, 0 23.33, 0 0 M0 59 C0 39.04, 0 19.07, 0 0" stroke="#000000" stroke-width="1" fill="none"/></g><g stroke-linecap="round" transform="translate(392.08333333333326 239.83333333333331) rotate(0 118.5 39.5)"><path d="M0 0 C52.53 0, 105.05 0, 237 0 M0 0 C84.23 0, 168.45 0, 237 0 M237 0 C237 30.53, 237 61.06, 237 79 M237 0 C237 21, 237 41.99, 237 79 M237 79 C143.23 79, 49.46 79, 0 79 M237 79 C159.99 79, 82.98 79, 0 79 M0 79 C0 59.05, 0 39.11, 0 0 M0 79 C0 48.65, 0 18.3, 0 0" stroke="#0008" stroke-width="1" fill="none"/></g><g stroke-linecap="round" transform="translate(373.08467610677076 480.0819295247395) rotate(0 109.5 31.5)"><path d="M0 0 C47.7 0, 95.4 0, 219 0 M0 0 C80.86 0, 161.71 0, 219 0 M219 0 C219 19.5, 219 39, 219 63 M219 0 C219 20.44, 219 40.89, 219 63 M219 63 C164.8 63, 110.61 63, 0 63 M219 63 C166.94 63, 114.88 63, 0 63 M0 63 C0 47.72, 0 32.45, 0 0 M0 63 C0 47.95, 0 32.91, 0 0" stroke="#000000" stroke-width="1" fill="none"/></g><g transform="translate(449.28721872014825 19.64904034469248) rotate(0 89.5 19.5)"><text x="0" y="15.5" font-family="Cascadia, Segoe UI Emoji" font-size="16px" fill="#000000" text-anchor="start" style="white-space: pre;" direction="ltr">macros</text><text x="0" y="35" font-family="Cascadia, Segoe UI Emoji" font-size="16px" fill="#000000" text-anchor="start" style="white-space: pre;" direction="ltr">(procedural macros)</text></g><g stroke-linecap="round" transform="translate(443.6412405007913 10) rotate(0 93.00000000000003 29.5)"><path d="M0 0 C50.14 0, 100.27 0, 186 0 M0 0 C37.6 0, 75.21 0, 186 0 M186 0 C186 16.88, 186 33.75, 186 59 M186 0 C186 18.69, 186 37.37, 186 59 M186 59 C143.03 59, 100.05 59, 0 59 M186 59 C123.49 59, 60.97 59, 0 59 M0 59 C0 44.11, 0 29.22, 0 0 M0 59 C0 39.12, 0 19.24, 0 0" stroke="#000000" stroke-width="1" fill="none"/></g><g stroke-linecap="round"><g transform="translate(470.78139500764155 104.64084014327246) rotate(0 23.341363402652718 -14.140840143272449)"><path d="M0 0 C7.78 -4.71, 38.9 -23.57, 46.68 -28.28 M0 0 C7.78 -4.71, 38.9 -23.57, 46.68 -28.28" stroke="#000000" stroke-width="1" fill="none"/></g><g transform="translate(470.78139500764155 104.64084014327246) rotate(0 23.341363402652718 -14.140840143272449)"><path d="M29.59 -7.01 C34.77 -13.46, 39.95 -19.9, 46.68 -28.28 M29.59 -7.01 C34 -12.5, 38.41 -17.99, 46.68 -28.28" stroke="#000000" stroke-width="1" fill="none"/></g><g transform="translate(470.78139500764155 104.64084014327246) rotate(0 23.341363402652718 -14.140840143272449)"><path d="M19.91 -22.98 C28.02 -24.58, 36.14 -26.19, 46.68 -28.28 M19.91 -22.98 C26.82 -24.35, 33.73 -25.71, 46.68 -28.28" stroke="#000000" stroke-width="1" fill="none"/></g></g><g stroke-linecap="round" transform="translate(183.82062025039568 225.25) rotate(0 76 38)"><path d="M0 0 C57.61 0, 115.21 0, 152 0 M152 0 C152 19.78, 152 39.56, 152 76 M152 76 C99.22 76, 46.45 76, 0 76 M0 76 C0 49.51, 0 23.01, 0 0" stroke="#2b8a3e" stroke-width="1.5" fill="none" stroke-dasharray="8 9"/></g><g transform="translate(193.82062025039568 238.75) rotate(0 67 25)"><text x="0" y="12.666666666666668" font-family="Helvetica, Segoe UI Emoji" font-size="16px" fill="#2b8a3e" text-anchor="start" style="white-space: pre;" direction="ltr">tests</text><text x="0" y="29.333333333333336" font-family="Helvetica, Segoe UI Emoji" font-size="16px" fill="#2b8a3e" text-anchor="start" style="white-space: pre;" direction="ltr">(integration tests &amp;</text><text x="0" y="46" font-family="Helvetica, Segoe UI Emoji" font-size="16px" fill="#2b8a3e" text-anchor="start" style="white-space: pre;" direction="ltr">wasm test helpers)</text></g><g stroke-linecap="round"><g transform="translate(243.44606623306274 218.3409090909091) rotate(0 -15.877998632690165 -67.72727272727272)"><path d="M0 0 C-5.29 -22.58, -26.46 -112.88, -31.76 -135.45" stroke="#2b8a3e" stroke-width="1.5" fill="none" stroke-dasharray="8 9"/></g><g transform="translate(243.44606623306274 218.3409090909091) rotate(0 -15.877998632690165 -67.72727272727272)"><path d="M-15.33 -110.35 C-21.42 -119.65, -27.51 -128.96, -31.76 -135.45" stroke="#2b8a3e" stroke-width="1.5" fill="none"/></g><g transform="translate(243.44606623306274 218.3409090909091) rotate(0 -15.877998632690165 -67.72727272727272)"><path d="M-35.31 -105.67 C-33.99 -116.71, -32.68 -127.75, -31.76 -135.45" stroke="#2b8a3e" stroke-width="1.5" fill="none"/></g></g><g stroke-linecap="round"><g transform="translate(339.2751657049412 233.65716659573883) rotate(0 25.47715939199442 -28.714946934233055)"><path d="M0 0 C8.49 -9.57, 42.46 -47.86, 50.95 -57.43" stroke="#2b8a3e" stroke-width="1.5" fill="none" stroke-dasharray="8 9"/></g><g transform="translate(339.2751657049412 233.65716659573883) rotate(0 25.47715939199442 -28.714946934233055)"><path d="M39.92 -29.53 C43.13 -37.66, 46.35 -45.79, 50.95 -57.43" stroke="#2b8a3e" stroke-width="1.5" fill="none"/></g><g transform="translate(339.2751657049412 233.65716659573883) rotate(0 25.47715939199442 -28.714946934233055)"><path d="M24.57 -43.15 C32.26 -47.31, 39.94 -51.47, 50.95 -57.43" stroke="#2b8a3e" stroke-width="1.5" fill="none"/></g></g><g stroke-linecap="round" transform="translate(375.15395358372893 543.5833333333333) rotate(0 108.5 24.5)"><path d="M0 0 C65.9 0, 131.81 0, 217 0 M217 0 C217 19.29, 217 38.59, 217 49 M217 49 C171.53 49, 126.07 49, 0 49 M0 49 C0 31.08, 0 13.15, 0 0" stroke="#2b8a3e" stroke-width="1.5" fill="none" stroke-dasharray="8 9"/></g><g transform="translate(388.15395358372893 560.0833333333333) rotate(0 40 8.5)"><text x="0" y="13" font-family="Helvetica, Segoe UI Emoji" font-size="16px" fill="#2b8a3e" text-anchor="start" style="white-space: pre;" direction="ltr">wasm tests</text></g><g stroke-linecap="round"><g transform="translate(373.15395358372893 572.9232319121936) rotate(0 -57.54880231942792 -132.09518869145919)"><path d="M0 0 C-19.18 -44.03, -95.91 -220.16, -115.1 -264.19" stroke="#2b8a3e" stroke-width="1.5" fill="none" stroke-dasharray="8 9"/></g><g transform="translate(373.15395358372893 572.9232319121936) rotate(0 -57.54880231942792 -132.09518869145919)"><path d="M-94.43 -242.44 C-100.7 -249.04, -106.98 -255.64, -115.1 -264.19" stroke="#2b8a3e" stroke-width="1.5" fill="none"/></g><g transform="translate(373.15395358372893 572.9232319121936) rotate(0 -57.54880231942792 -132.09518869145919)"><path d="M-113.24 -234.25 C-113.81 -243.33, -114.37 -252.42, -115.1 -264.19" stroke="#2b8a3e" stroke-width="1.5" fill="none"/></g></g><g stroke-linecap="round" transform="translate(621.6539535837289 485.95833333333326) rotate(0 71.5 38)"><path d="M0 0 C46.19 0, 92.39 0, 143 0 M143 0 C143 27.92, 143 55.84, 143 76 M143 76 C105.78 76, 68.57 76, 0 76 M0 76 C0 49.05, 0 22.09, 0 0" stroke="#2b8a3e" stroke-width="1.5" fill="none" stroke-dasharray="8 9"/></g><g transform="translate(633.1539535837289 500.45833333333326) rotate(0 59 25)"><text x="0" y="12.666666666666668" font-family="Helvetica, Segoe UI Emoji" font-size="16px" fill="#2b8a3e" text-anchor="start" style="white-space: pre;" direction="ltr">wasm_for_tests</text><text x="0" y="29.333333333333336" font-family="Helvetica, Segoe UI Emoji" font-size="16px" fill="#2b8a3e" text-anchor="start" style="white-space: pre;" direction="ltr">(pre-build scripts</text><text x="0" y="46" font-family="Helvetica, Segoe UI Emoji" font-size="16px" fill="#2b8a3e" text-anchor="start" style="white-space: pre;" direction="ltr">used for testing)</text></g><g stroke-linecap="round"><g transform="translate(668.8648791143708 477.93823703431156) rotate(0 -22.563801527123985 -78.01778021315545)"><path d="M0 0 C-7.52 -26.01, -37.61 -130.03, -45.13 -156.04" stroke="#2b8a3e" stroke-width="1.5" fill="none" stroke-dasharray="8 9"/></g><g transform="translate(668.8648791143708 477.93823703431156) rotate(0 -22.563801527123985 -78.01778021315545)"><path d="M-27.44 -131.81 C-34.44 -141.4, -41.44 -150.99, -45.13 -156.04" stroke="#2b8a3e" stroke-width="1.5" fill="none"/></g><g transform="translate(668.8648791143708 477.93823703431156) rotate(0 -22.563801527123985 -78.01778021315545)"><path d="M-47.15 -126.1 C-46.35 -137.95, -45.55 -149.8, -45.13 -156.04" stroke="#2b8a3e" stroke-width="1.5" fill="none"/></g></g><g transform="translate(470.46821729265594 391.52404034469237) rotate(0 61 10)"><text x="0" y="16" font-family="Cascadia, Segoe UI Emoji" font-size="16px" fill="#000000" text-anchor="start" style="white-space: pre;" direction="ltr">tx/vp_prelude</text></g><g stroke-linecap="round" transform="translate(453.9357362910732 378.54166666666663) rotate(0 78.5 21.37500000000003)"><path d="M0 0 C46.04 0, 92.09 0, 157 0 M0 0 C59.78 0, 119.55 0, 157 0 M157 0 C157 14.39, 157 28.79, 157 42.75 M157 0 C157 10.79, 157 21.58, 157 42.75 M157 42.75 C120.75 42.75, 84.51 42.75, 0 42.75 M157 42.75 C112 42.75, 67 42.75, 0 42.75 M0 42.75 C0 33.86, 0 24.97, 0 0 M0 42.75 C0 33.93, 0 25.12, 0 0" stroke="#000000" stroke-width="1" fill="none"/></g><g stroke-linecap="round"><g transform="translate(389.99713813092177 478.9446532869598) rotate(0 58.45483866094273 -26.527986620293262)"><path d="M0 0 C19.48 -8.84, 97.42 -44.21, 116.91 -53.06 M0 0 C19.48 -8.84, 97.42 -44.21, 116.91 -53.06" stroke="#000000" stroke-width="1" fill="none"/></g><g transform="translate(389.99713813092177 478.9446532869598) rotate(0 58.45483866094273 -26.527986620293262)"><path d="M95.48 -32.06 C103.87 -40.28, 112.26 -48.5, 116.91 -53.06 M95.48 -32.06 C102.24 -38.69, 109 -45.31, 116.91 -53.06" stroke="#000000" stroke-width="1" fill="none"/></g><g transform="translate(389.99713813092177 478.9446532869598) rotate(0 58.45483866094273 -26.527986620293262)"><path d="M87 -50.75 C98.71 -51.65, 110.42 -52.56, 116.91 -53.06 M87 -50.75 C96.44 -51.48, 105.87 -52.2, 116.91 -53.06" stroke="#000000" stroke-width="1" fill="none"/></g></g><g stroke-linecap="round"><g transform="translate(503.22857952513084 373.6387126510913) rotate(0 -13.702893709078069 -25.748413171924525)"><path d="M0 0 C-4.57 -8.58, -22.84 -42.91, -27.41 -51.5 M0 0 C-4.57 -8.58, -22.84 -42.91, -27.41 -51.5" stroke="#000000" stroke-width="1" fill="none"/></g><g transform="translate(503.22857952513084 373.6387126510913) rotate(0 -13.702893709078069 -25.748413171924525)"><path d="M-5.72 -31.99 C-12.46 -38.05, -19.21 -44.12, -27.41 -51.5 M-5.72 -31.99 C-11.65 -37.32, -17.57 -42.65, -27.41 -51.5" stroke="#000000" stroke-width="1" fill="none"/></g><g transform="translate(503.22857952513084 373.6387126510913) rotate(0 -13.702893709078069 -25.748413171924525)"><path d="M-23.34 -22.61 C-24.6 -31.59, -25.87 -40.57, -27.41 -51.5 M-23.34 -22.61 C-24.45 -30.51, -25.56 -38.4, -27.41 -51.5" stroke="#000000" stroke-width="1" fill="none"/></g></g><g stroke-linecap="round"><g transform="translate(647.9923407574266 479.61224177605845) rotate(0 -33.29036396556205 -26.570575109391825)"><path d="M0 0 C-11.1 -8.86, -55.48 -44.28, -66.58 -53.14" stroke="#2b8a3e" stroke-width="1.5" fill="none" stroke-dasharray="8 9"/></g><g transform="translate(647.9923407574266 479.61224177605845) rotate(0 -33.29036396556205 -26.570575109391825)"><path d="M-38.15 -43.57 C-46.59 -46.42, -55.04 -49.26, -66.58 -53.14" stroke="#2b8a3e" stroke-width="1.5" fill="none"/></g><g transform="translate(647.9923407574266 479.61224177605845) rotate(0 -33.29036396556205 -26.570575109391825)"><path d="M-50.95 -27.54 C-55.59 -35.14, -60.23 -42.74, -66.58 -53.14" stroke="#2b8a3e" stroke-width="1.5" fill="none"/></g></g><g stroke-linecap="round"><g transform="translate(558.5189315142982 376.7772203173222) rotate(0 16.198068183389182 -28.115436463155504)"><path d="M0 0 C5.4 -9.37, 27 -46.86, 32.4 -56.23" stroke="#2b8a3e" stroke-width="1.5" fill="none" stroke-dasharray="8 9"/></g><g transform="translate(558.5189315142982 376.7772203173222) rotate(0 16.198068183389182 -28.115436463155504)"><path d="M27.21 -26.68 C28.37 -33.25, 29.52 -39.82, 32.4 -56.23" stroke="#2b8a3e" stroke-width="1.5" fill="none"/></g><g transform="translate(558.5189315142982 376.7772203173222) rotate(0 16.198068183389182 -28.115436463155504)"><path d="M9.43 -36.93 C14.54 -41.22, 19.64 -45.51, 32.4 -56.23" stroke="#2b8a3e" stroke-width="1.5" fill="none"/></g></g></svg>
\ No newline at end of file
+  <rect x="0" y="0" width="774.6539535837289" height="784.1081842070785" fill="#ffffff"></rect><g stroke-linecap="round"><g transform="translate(493.1513845259649 197.35702048839013) rotate(0 -49.314312470627584 -66.75794811796213)"><path d="M0 0 C-16.44 -22.25, -82.19 -111.26, -98.63 -133.52 M0 0 C-16.44 -22.25, -82.19 -111.26, -98.63 -133.52" stroke="#000000" stroke-width="1" fill="none"></path></g><g transform="translate(493.1513845259649 197.35702048839013) rotate(0 -49.314312470627584 -66.75794811796213)"><path d="M-73.63 -116.94 C-78.75 -120.33, -83.87 -123.73, -98.63 -133.52 M-73.63 -116.94 C-80.3 -121.37, -86.98 -125.79, -98.63 -133.52" stroke="#000000" stroke-width="1" fill="none"></path></g><g transform="translate(493.1513845259649 197.35702048839013) rotate(0 -49.314312470627584 -66.75794811796213)"><path d="M-90.13 -104.74 C-91.87 -110.64, -93.61 -116.54, -98.63 -133.52 M-90.13 -104.74 C-92.4 -112.43, -94.67 -120.12, -98.63 -133.52" stroke="#000000" stroke-width="1" fill="none"></path></g></g><g transform="translate(509.8008740465674 388.08514877263184) rotate(0 112.5 29.5)"><text x="0" y="15.666666666666668" font-family="Cascadia, Segoe UI Emoji" font-size="16px" fill="#0008" text-anchor="start" style="white-space: pre;" direction="ltr">sdk</text><text x="0" y="35.333333333333336" font-family="Cascadia, Segoe UI Emoji" font-size="16px" fill="#0008" text-anchor="start" style="white-space: pre;" direction="ltr">(generated by the node) </text><text x="0" y="55" font-family="Cascadia, Segoe UI Emoji" font-size="16px" fill="#0008" text-anchor="start" style="white-space: pre;" direction="ltr">not yet implemented</text></g><g stroke-linecap="round"><g transform="translate(89.85557184907933 549.7892030559758) rotate(0 -1.9465594668322836 -67.15395524069419)"><path d="M0 0 C-0.65 -22.38, -3.24 -111.92, -3.89 -134.31 M0 0 C-0.65 -22.38, -3.24 -111.92, -3.89 -134.31" stroke="#000000" stroke-width="1" fill="none"></path></g><g transform="translate(89.85557184907933 549.7892030559758) rotate(0 -1.9465594668322836 -67.15395524069419)"><path d="M7.18 -106.43 C3.6 -115.44, 0.02 -124.45, -3.89 -134.31 M7.18 -106.43 C3.06 -116.8, -1.06 -127.17, -3.89 -134.31" stroke="#000000" stroke-width="1" fill="none"></path></g><g transform="translate(89.85557184907933 549.7892030559758) rotate(0 -1.9465594668322836 -67.15395524069419)"><path d="M-13.33 -105.83 C-10.28 -115.03, -7.23 -124.24, -3.89 -134.31 M-13.33 -105.83 C-9.82 -116.43, -6.31 -127.02, -3.89 -134.31" stroke="#000000" stroke-width="1" fill="none"></path></g></g><g stroke-linecap="round" transform="translate(10 559.0248508737452) rotate(0 133 36.5)"><path d="M0 0 C85.3 0, 170.6 0, 266 0 M0 0 C73.98 0, 147.95 0, 266 0 M266 0 C266 14.91, 266 29.81, 266 73 M266 0 C266 24.81, 266 49.62, 266 73 M266 73 C170.17 73, 74.34 73, 0 73 M266 73 C179.02 73, 92.03 73, 0 73 M0 73 C0 52.03, 0 31.06, 0 0 M0 73 C0 57.58, 0 42.15, 0 0" stroke="#000000" stroke-width="1" fill="none"></path></g><g transform="translate(28.55372978461105 573.3758105290526) rotate(0 117.5 19.5)"><text x="0" y="15.5" font-family="Cascadia, Segoe UI Emoji" font-size="16px" fill="#000000" text-anchor="start" style="white-space: pre;" direction="ltr">apps </text><text x="0" y="35" font-family="Cascadia, Segoe UI Emoji" font-size="16px" fill="#000000" text-anchor="start" style="white-space: pre;" direction="ltr">(node/client/broadcaster)</text></g><g transform="translate(77.19655029217847 239.93647296642382) rotate(0 29 10)"><text x="0" y="16" font-family="Cascadia, Segoe UI Emoji" font-size="16px" fill="#000000" text-anchor="start" style="white-space: pre;" direction="ltr">shared</text></g><g transform="translate(385.2055077528378 672.6311110548925) rotate(0 94 19.5)"><text x="0" y="15.5" font-family="Cascadia, Segoe UI Emoji" font-size="16px" fill="#000000" text-anchor="start" style="white-space: pre;" direction="ltr">wasm</text><text x="0" y="35" font-family="Cascadia, Segoe UI Emoji" font-size="16px" fill="#000000" text-anchor="start" style="white-space: pre;" direction="ltr">(tx/vp/mm/mm_filter)</text></g><g transform="translate(469.37656279252224 211.18055490471986) rotate(0 65.5 19.5)"><text x="0" y="15.5" font-family="Cascadia, Segoe UI Emoji" font-size="16px" fill="#000000" text-anchor="start" style="white-space: pre;" direction="ltr">vm_env</text><text x="0" y="35" font-family="Cascadia, Segoe UI Emoji" font-size="16px" fill="#000000" text-anchor="start" style="white-space: pre;" direction="ltr">(wasm imports)</text></g><g stroke-linecap="round"><g transform="translate(386.0651871968611 659.5102986787799) rotate(0 55.38450778430541 -95.02533306979102)"><path d="M0 0 C18.46 -31.68, 92.31 -158.38, 110.77 -190.05 M0 0 C18.46 -31.68, 92.31 -158.38, 110.77 -190.05" stroke="#000000" stroke-width="1" fill="none"></path></g><g transform="translate(386.0651871968611 659.5102986787799) rotate(0 55.38450778430541 -95.02533306979102)"><path d="M105.44 -160.53 C106.64 -167.18, 107.84 -173.84, 110.77 -190.05 M105.44 -160.53 C107.43 -171.58, 109.43 -182.64, 110.77 -190.05" stroke="#000000" stroke-width="1" fill="none"></path></g><g transform="translate(386.0651871968611 659.5102986787799) rotate(0 55.38450778430541 -95.02533306979102)"><path d="M87.71 -170.86 C92.91 -175.19, 98.11 -179.51, 110.77 -190.05 M87.71 -170.86 C96.34 -178.05, 104.98 -185.23, 110.77 -190.05" stroke="#000000" stroke-width="1" fill="none"></path></g></g><g stroke-linecap="round"><g transform="translate(543.1343255958404 374.9870046374591) rotate(331.69963896209117 20.59540136546471 -53.17106351679132)"><path d="M0 0 C6.87 -17.72, 34.33 -88.62, 41.19 -106.34 M0 0 C6.87 -17.72, 34.33 -88.62, 41.19 -106.34" stroke="#000000" stroke-width="1" fill="none"></path></g><g transform="translate(543.1343255958404 374.9870046374591) rotate(331.69963896209117 20.59540136546471 -53.17106351679132)"><path d="M40.58 -76.35 C40.73 -83.9, 40.89 -91.45, 41.19 -106.34 M40.58 -76.35 C40.77 -85.65, 40.96 -94.95, 41.19 -106.34" stroke="#000000" stroke-width="1" fill="none"></path></g><g transform="translate(543.1343255958404 374.9870046374591) rotate(331.69963896209117 20.59540136546471 -53.17106351679132)"><path d="M21.44 -83.76 C26.41 -89.44, 31.38 -95.13, 41.19 -106.34 M21.44 -83.76 C27.57 -90.76, 33.69 -97.77, 41.19 -106.34" stroke="#000000" stroke-width="1" fill="none"></path></g></g><g transform="translate(63.285314376454416 377.6443174127196) rotate(0 37.5 10)"><text x="0" y="16" font-family="Cascadia, Segoe UI Emoji" font-size="16px" fill="#000000" text-anchor="start" style="white-space: pre;" direction="ltr">apps lib</text></g><g stroke-linecap="round"><g transform="translate(83.104084609506 366.4551296766268) rotate(0 -3.588903425077934 -45.40450085376535)"><path d="M0 0 C-1.2 -15.13, -5.98 -75.67, -7.18 -90.81 M0 0 C-1.2 -15.13, -5.98 -75.67, -7.18 -90.81" stroke="#000000" stroke-width="1" fill="none"></path></g><g transform="translate(83.104084609506 366.4551296766268) rotate(0 -3.588903425077934 -45.40450085376535)"><path d="M5.27 -63.51 C1.13 -72.6, -3.02 -81.69, -7.18 -90.81 M5.27 -63.51 C2.43 -69.74, -0.41 -75.97, -7.18 -90.81" stroke="#000000" stroke-width="1" fill="none"></path></g><g transform="translate(83.104084609506 366.4551296766268) rotate(0 -3.588903425077934 -45.40450085376535)"><path d="M-15.19 -61.9 C-12.52 -71.52, -9.85 -81.15, -7.18 -90.81 M-15.19 -61.9 C-13.36 -68.49, -11.53 -75.09, -7.18 -90.81" stroke="#000000" stroke-width="1" fill="none"></path></g></g><g stroke-linecap="round" transform="translate(60.76031090259778 226.24108468289745) rotate(0 50.26077918870527 22.756086768610174)"><path d="M0 0 C22.68 0, 45.37 0, 100.52 0 M0 0 C21.48 0, 42.95 0, 100.52 0 M100.52 0 C100.52 15.73, 100.52 31.46, 100.52 45.51 M100.52 0 C100.52 14.6, 100.52 29.2, 100.52 45.51 M100.52 45.51 C63.07 45.51, 25.62 45.51, 0 45.51 M100.52 45.51 C62.69 45.51, 24.86 45.51, 0 45.51 M0 45.51 C0 32.25, 0 18.98, 0 0 M0 45.51 C0 33.78, 0 22.05, 0 0" stroke="#000000" stroke-width="1" fill="none"></path></g><g stroke-linecap="round" transform="translate(43.2657470703125 366.55634501437015) rotate(0 60 20)"><path d="M0 0 C33.42 0, 66.84 0, 120 0 M0 0 C33.47 0, 66.94 0, 120 0 M120 0 C120 15.73, 120 31.47, 120 40 M120 0 C120 10.12, 120 20.24, 120 40 M120 40 C83.84 40, 47.68 40, 0 40 M120 40 C92.29 40, 64.58 40, 0 40 M0 40 C0 24.58, 0 9.16, 0 0 M0 40 C0 27.62, 0 15.25, 0 0" stroke="#000000" stroke-width="1" fill="none"></path></g><g stroke-linecap="round" transform="translate(462.51630470731493 201.54168617489972) rotate(0 80 29.5)"><path d="M0 0 C56.95 0, 113.91 0, 160 0 M0 0 C45.05 0, 90.1 0, 160 0 M160 0 C160 23.18, 160 46.36, 160 59 M160 0 C160 20.16, 160 40.33, 160 59 M160 59 C113.93 59, 67.87 59, 0 59 M160 59 C109.1 59, 58.21 59, 0 59 M0 59 C0 41.17, 0 23.33, 0 0 M0 59 C0 39.04, 0 19.07, 0 0" stroke="#000000" stroke-width="1" fill="none"></path></g><g stroke-linecap="round" transform="translate(501.57358764927903 382.1579696989004) rotate(0 118.5 39.5)"><path d="M0 0 C52.53 0, 105.05 0, 237 0 M0 0 C84.23 0, 168.45 0, 237 0 M237 0 C237 30.53, 237 61.06, 237 79 M237 0 C237 21, 237 41.99, 237 79 M237 79 C143.23 79, 49.46 79, 0 79 M237 79 C159.99 79, 82.98 79, 0 79 M0 79 C0 59.05, 0 39.11, 0 0 M0 79 C0 48.65, 0 18.3, 0 0" stroke="#0008" stroke-width="1" fill="none"></path></g><g stroke-linecap="round" transform="translate(373.08467610677076 661.6067803984847) rotate(0 109.5 31.5)"><path d="M0 0 C47.7 0, 95.4 0, 219 0 M0 0 C80.86 0, 161.71 0, 219 0 M219 0 C219 19.5, 219 39, 219 63 M219 0 C219 20.44, 219 40.89, 219 63 M219 63 C164.8 63, 110.61 63, 0 63 M219 63 C166.94 63, 114.88 63, 0 63 M0 63 C0 47.72, 0 32.45, 0 0 M0 63 C0 47.95, 0 32.91, 0 0" stroke="#000000" stroke-width="1" fill="none"></path></g><g transform="translate(465.4726104754838 20.437016617190437) rotate(0 90.00000000000003 19.5)"><text x="0" y="15.5" font-family="Cascadia, Segoe UI Emoji" font-size="16px" fill="#000000" text-anchor="start" style="white-space: pre;" direction="ltr">macros</text><text x="0" y="35" font-family="Cascadia, Segoe UI Emoji" font-size="16px" fill="#000000" text-anchor="start" style="white-space: pre;" direction="ltr">(procedural macros)</text></g><g stroke-linecap="round" transform="translate(461.1954437014685 10) rotate(0 93.00000000000003 29.5)"><path d="M0 0 C50.14 0, 100.27 0, 186 0 M0 0 C37.6 0, 75.21 0, 186 0 M186 0 C186 16.88, 186 33.75, 186 59 M186 0 C186 18.69, 186 37.37, 186 59 M186 59 C143.03 59, 100.05 59, 0 59 M186 59 C123.49 59, 60.97 59, 0 59 M0 59 C0 44.11, 0 29.22, 0 0 M0 59 C0 39.12, 0 19.24, 0 0" stroke="#000000" stroke-width="1" fill="none"></path></g><g stroke-linecap="round"><g transform="translate(566.7252285049187 199.00302619603028) rotate(0 -1.6289977939507878 -64.50151309801514)"><path d="M0 0 C-0.54 -21.5, -2.71 -107.5, -3.26 -129 M0 0 C-0.54 -21.5, -2.71 -107.5, -3.26 -129" stroke="#000000" stroke-width="1" fill="none"></path></g><g transform="translate(566.7252285049187 199.00302619603028) rotate(0 -1.6289977939507878 -64.50151309801514)"><path d="M7.71 -101.08 C4.39 -109.54, 1.06 -118, -3.26 -129 M7.71 -101.08 C4.88 -108.28, 2.05 -115.49, -3.26 -129" stroke="#000000" stroke-width="1" fill="none"></path></g><g transform="translate(566.7252285049187 199.00302619603028) rotate(0 -1.6289977939507878 -64.50151309801514)"><path d="M-12.8 -100.56 C-9.91 -109.18, -7.02 -117.8, -3.26 -129 M-12.8 -100.56 C-10.34 -107.9, -7.88 -115.24, -3.26 -129" stroke="#000000" stroke-width="1" fill="none"></path></g></g><g stroke-linecap="round" transform="translate(183.82062025039568 406.77485087374515) rotate(0 76 38)"><path d="M0 0 C57.61 0, 115.21 0, 152 0 M152 0 C152 19.78, 152 39.56, 152 76 M152 76 C99.22 76, 46.45 76, 0 76 M0 76 C0 49.51, 0 23.01, 0 0" stroke="#2b8a3e" stroke-width="1.5" fill="none" stroke-dasharray="8 9"></path></g><g transform="translate(193.82062025039568 420.27485087374515) rotate(0 67 25)"><text x="0" y="12.666666666666668" font-family="Helvetica, Segoe UI Emoji" font-size="16px" fill="#2b8a3e" text-anchor="start" style="white-space: pre;" direction="ltr">tests</text><text x="0" y="29.333333333333336" font-family="Helvetica, Segoe UI Emoji" font-size="16px" fill="#2b8a3e" text-anchor="start" style="white-space: pre;" direction="ltr">(integration tests &amp;</text><text x="0" y="46" font-family="Helvetica, Segoe UI Emoji" font-size="16px" fill="#2b8a3e" text-anchor="start" style="white-space: pre;" direction="ltr">wasm test helpers)</text></g><g stroke-linecap="round"><g transform="translate(216.2238990759885 401.44522808664385) rotate(0 -49.83200097810065 -59.68291285825864)"><path d="M0 0 C-16.61 -19.89, -83.05 -99.47, -99.66 -119.37" stroke="#2b8a3e" stroke-width="1.5" fill="none" stroke-dasharray="8 9"></path></g><g transform="translate(216.2238990759885 401.44522808664385) rotate(0 -49.83200097810065 -59.68291285825864)"><path d="M-73.72 -104.3 C-83.34 -109.89, -92.95 -115.47, -99.66 -119.37" stroke="#2b8a3e" stroke-width="1.5" fill="none"></path></g><g transform="translate(216.2238990759885 401.44522808664385) rotate(0 -49.83200097810065 -59.68291285825864)"><path d="M-89.47 -91.15 C-93.25 -101.61, -97.03 -112.07, -99.66 -119.37" stroke="#2b8a3e" stroke-width="1.5" fill="none"></path></g></g><g stroke-linecap="round"><g transform="translate(312.6352604975126 408.82030541919966) rotate(0 89.48527597311005 -73.27567325851359)"><path d="M0 0 C29.83 -24.43, 149.14 -122.13, 178.97 -146.55" stroke="#2b8a3e" stroke-width="1.5" fill="none" stroke-dasharray="8 9"></path></g><g transform="translate(312.6352604975126 408.82030541919966) rotate(0 89.48527597311005 -73.27567325851359)"><path d="M163.66 -120.75 C168.12 -128.27, 172.58 -135.78, 178.97 -146.55" stroke="#2b8a3e" stroke-width="1.5" fill="none"></path></g><g transform="translate(312.6352604975126 408.82030541919966) rotate(0 89.48527597311005 -73.27567325851359)"><path d="M150.66 -136.63 C158.91 -139.52, 167.15 -142.41, 178.97 -146.55" stroke="#2b8a3e" stroke-width="1.5" fill="none"></path></g></g><g stroke-linecap="round" transform="translate(375.15395358372893 725.1081842070785) rotate(0 108.5 24.5)"><path d="M0 0 C65.9 0, 131.81 0, 217 0 M217 0 C217 19.29, 217 38.59, 217 49 M217 49 C171.53 49, 126.07 49, 0 49 M0 49 C0 31.08, 0 13.15, 0 0" stroke="#2b8a3e" stroke-width="1.5" fill="none" stroke-dasharray="8 9"></path></g><g transform="translate(388.15395358372893 741.6081842070785) rotate(0 40 8.5)"><text x="0" y="13" font-family="Helvetica, Segoe UI Emoji" font-size="16px" fill="#2b8a3e" text-anchor="start" style="white-space: pre;" direction="ltr">wasm tests</text></g><g stroke-linecap="round"><g transform="translate(373.15395358372893 754.4480827859388) rotate(0 -57.54880231942792 -132.09518869145919)"><path d="M0 0 C-19.18 -44.03, -95.91 -220.16, -115.1 -264.19" stroke="#2b8a3e" stroke-width="1.5" fill="none" stroke-dasharray="8 9"></path></g><g transform="translate(373.15395358372893 754.4480827859388) rotate(0 -57.54880231942792 -132.09518869145919)"><path d="M-94.43 -242.44 C-100.7 -249.04, -106.98 -255.64, -115.1 -264.19" stroke="#2b8a3e" stroke-width="1.5" fill="none"></path></g><g transform="translate(373.15395358372893 754.4480827859388) rotate(0 -57.54880231942792 -132.09518869145919)"><path d="M-113.24 -234.25 C-113.81 -243.33, -114.37 -252.42, -115.1 -264.19" stroke="#2b8a3e" stroke-width="1.5" fill="none"></path></g></g><g stroke-linecap="round" transform="translate(621.6539535837289 667.4831842070785) rotate(0 71.5 38)"><path d="M0 0 C46.19 0, 92.39 0, 143 0 M143 0 C143 27.92, 143 55.84, 143 76 M143 76 C105.78 76, 68.57 76, 0 76 M0 76 C0 49.05, 0 22.09, 0 0" stroke="#2b8a3e" stroke-width="1.5" fill="none" stroke-dasharray="8 9"></path></g><g transform="translate(633.1539535837289 681.9831842070785) rotate(0 59 25)"><text x="0" y="12.666666666666668" font-family="Helvetica, Segoe UI Emoji" font-size="16px" fill="#2b8a3e" text-anchor="start" style="white-space: pre;" direction="ltr">wasm_for_tests</text><text x="0" y="29.333333333333336" font-family="Helvetica, Segoe UI Emoji" font-size="16px" fill="#2b8a3e" text-anchor="start" style="white-space: pre;" direction="ltr">(pre-build scripts</text><text x="0" y="46" font-family="Helvetica, Segoe UI Emoji" font-size="16px" fill="#2b8a3e" text-anchor="start" style="white-space: pre;" direction="ltr">used for testing)</text></g><g stroke-linecap="round"><g transform="translate(688.1801707509676 659.4630879080567) rotate(0 11.39177483911584 -97.61788746724449)"><path d="M0 0 C3.8 -32.54, 18.99 -162.7, 22.78 -195.24" stroke="#2b8a3e" stroke-width="1.5" fill="none" stroke-dasharray="8 9"></path></g><g transform="translate(688.1801707509676 659.4630879080567) rotate(0 11.39177483911584 -97.61788746724449)"><path d="M29.71 -166.05 C26.97 -177.6, 24.23 -189.15, 22.78 -195.24" stroke="#2b8a3e" stroke-width="1.5" fill="none"></path></g><g transform="translate(688.1801707509676 659.4630879080567) rotate(0 11.39177483911584 -97.61788746724449)"><path d="M9.32 -168.42 C14.65 -179.04, 19.98 -189.65, 22.78 -195.24" stroke="#2b8a3e" stroke-width="1.5" fill="none"></path></g></g><g transform="translate(470.46821729265594 573.0488912184376) rotate(0 61 10)"><text x="0" y="16" font-family="Cascadia, Segoe UI Emoji" font-size="16px" fill="#000000" text-anchor="start" style="white-space: pre;" direction="ltr">tx/vp_prelude</text></g><g stroke-linecap="round" transform="translate(453.9357362910732 560.0665175404117) rotate(0 78.5 21.37500000000003)"><path d="M0 0 C46.04 0, 92.09 0, 157 0 M0 0 C59.78 0, 119.55 0, 157 0 M157 0 C157 14.39, 157 28.79, 157 42.75 M157 0 C157 10.79, 157 21.58, 157 42.75 M157 42.75 C120.75 42.75, 84.51 42.75, 0 42.75 M157 42.75 C112 42.75, 67 42.75, 0 42.75 M0 42.75 C0 33.86, 0 24.97, 0 0 M0 42.75 C0 33.93, 0 25.12, 0 0" stroke="#000000" stroke-width="1" fill="none"></path></g><g stroke-linecap="round"><g transform="translate(389.99713813092177 660.4695041607049) rotate(0 58.45483866094273 -26.527986620293262)"><path d="M0 0 C19.48 -8.84, 97.42 -44.21, 116.91 -53.06 M0 0 C19.48 -8.84, 97.42 -44.21, 116.91 -53.06" stroke="#000000" stroke-width="1" fill="none"></path></g><g transform="translate(389.99713813092177 660.4695041607049) rotate(0 58.45483866094273 -26.527986620293262)"><path d="M95.48 -32.06 C103.87 -40.28, 112.26 -48.5, 116.91 -53.06 M95.48 -32.06 C102.24 -38.69, 109 -45.31, 116.91 -53.06" stroke="#000000" stroke-width="1" fill="none"></path></g><g transform="translate(389.99713813092177 660.4695041607049) rotate(0 58.45483866094273 -26.527986620293262)"><path d="M87 -50.75 C98.71 -51.65, 110.42 -52.56, 116.91 -53.06 M87 -50.75 C96.44 -51.48, 105.87 -52.2, 116.91 -53.06" stroke="#000000" stroke-width="1" fill="none"></path></g></g><g stroke-linecap="round"><g transform="translate(525.8806051850413 555.1635635248365) rotate(0 13.775438430998122 -45.34852042601359)"><path d="M0 0 C4.59 -15.12, 22.96 -75.58, 27.55 -90.7 M0 0 C4.59 -15.12, 22.96 -75.58, 27.55 -90.7" stroke="#000000" stroke-width="1" fill="none"></path></g><g transform="translate(525.8806051850413 555.1635635248365) rotate(0 13.775438430998122 -45.34852042601359)"><path d="M29.17 -60.74 C28.67 -70.05, 28.17 -79.37, 27.55 -90.7 M29.17 -60.74 C28.73 -68.93, 28.29 -77.11, 27.55 -90.7" stroke="#000000" stroke-width="1" fill="none"></path></g><g transform="translate(525.8806051850413 555.1635635248365) rotate(0 13.775438430998122 -45.34852042601359)"><path d="M9.54 -66.71 C15.14 -74.16, 20.74 -81.62, 27.55 -90.7 M9.54 -66.71 C14.46 -73.26, 19.38 -79.82, 27.55 -90.7" stroke="#000000" stroke-width="1" fill="none"></path></g></g><g stroke-linecap="round"><g transform="translate(647.9923407574266 661.1370926498037) rotate(0 -33.29036396556205 -26.570575109391825)"><path d="M0 0 C-11.1 -8.86, -55.48 -44.28, -66.58 -53.14" stroke="#2b8a3e" stroke-width="1.5" fill="none" stroke-dasharray="8 9"></path></g><g transform="translate(647.9923407574266 661.1370926498037) rotate(0 -33.29036396556205 -26.570575109391825)"><path d="M-38.15 -43.57 C-46.59 -46.42, -55.04 -49.26, -66.58 -53.14" stroke="#2b8a3e" stroke-width="1.5" fill="none"></path></g><g transform="translate(647.9923407574266 661.1370926498037) rotate(0 -33.29036396556205 -26.570575109391825)"><path d="M-50.95 -27.54 C-55.59 -35.14, -60.23 -42.74, -66.58 -53.14" stroke="#2b8a3e" stroke-width="1.5" fill="none"></path></g></g><g stroke-linecap="round"><g transform="translate(575.5512706628903 558.3020711910673) rotate(0 58.44301684611588 -47.715543717244515)"><path d="M0 0 C19.48 -15.91, 97.41 -79.53, 116.89 -95.43" stroke="#2b8a3e" stroke-width="1.5" fill="none" stroke-dasharray="8 9"></path></g><g transform="translate(575.5512706628903 558.3020711910673) rotate(0 58.44301684611588 -47.715543717244515)"><path d="M101.54 -69.65 C104.95 -75.39, 108.36 -81.12, 116.89 -95.43" stroke="#2b8a3e" stroke-width="1.5" fill="none"></path></g><g transform="translate(575.5512706628903 558.3020711910673) rotate(0 58.44301684611588 -47.715543717244515)"><path d="M88.56 -85.55 C94.86 -87.75, 101.16 -89.94, 116.89 -95.43" stroke="#2b8a3e" stroke-width="1.5" fill="none"></path></g></g><g transform="translate(194.32697679186447 367.3165175404118) rotate(0 5.5 12.5)"><text x="0" y="18" font-family="Virgil, Segoe UI Emoji" font-size="20px" fill="#000000" text-anchor="start" style="white-space: pre;" direction="ltr"></text></g><g transform="translate(55.44486368063656 120.65520566088907) rotate(0 66.5 10)"><text x="0" y="16" font-family="Cascadia, Segoe UI Emoji" font-size="16px" fill="#000000" text-anchor="start" style="white-space: pre;" direction="ltr">proof_of_stake</text></g><g stroke-linecap="round" transform="translate(44.50436827060662 111.15342391153303) rotate(0 75.5 20)"><path d="M0 0 C55.55 0, 111.1 0, 151 0 M0 0 C41.93 0, 83.85 0, 151 0 M151 0 C151 8.38, 151 16.77, 151 40 M151 0 C151 11, 151 22, 151 40 M151 40 C102.97 40, 54.93 40, 0 40 M151 40 C92.22 40, 33.43 40, 0 40 M0 40 C0 27.45, 0 14.9, 0 0 M0 40 C0 28.71, 0 17.41, 0 0" stroke="#000000" stroke-width="1" fill="none"></path></g><g stroke-linecap="round"><g transform="translate(86.81163163697984 225.24108468289745) rotate(0 -1.7727047421711326 -35.02814500220012)"><path d="M0 0 C-0.59 -11.68, -2.95 -58.38, -3.55 -70.06 M0 0 C-0.59 -11.68, -2.95 -58.38, -3.55 -70.06" stroke="#000000" stroke-width="1" fill="none"></path></g><g transform="translate(86.81163163697984 225.24108468289745) rotate(0 -1.7727047421711326 -35.02814500220012)"><path d="M8.13 -42.42 C5.03 -49.74, 1.94 -57.06, -3.55 -70.06 M8.13 -42.42 C3.71 -52.87, -0.7 -63.33, -3.55 -70.06" stroke="#000000" stroke-width="1" fill="none"></path></g><g transform="translate(86.81163163697984 225.24108468289745) rotate(0 -1.7727047421711326 -35.02814500220012)"><path d="M-12.37 -41.38 C-10.03 -48.98, -7.69 -56.58, -3.55 -70.06 M-12.37 -41.38 C-9.03 -52.23, -5.69 -63.07, -3.55 -70.06" stroke="#000000" stroke-width="1" fill="none"></path></g></g><g transform="translate(54.34329603490971 16.07042729999239) rotate(0 174.5 19.5)"><text x="0" y="15.5" font-family="Cascadia, Segoe UI Emoji" font-size="16px" fill="#000000" text-anchor="start" style="white-space: pre;" direction="ltr">core</text><text x="0" y="35" font-family="Cascadia, Segoe UI Emoji" font-size="16px" fill="#000000" text-anchor="start" style="white-space: pre;" direction="ltr">(basic types, storage, tx and VP api)</text></g><g stroke-linecap="round" transform="translate(41.75111387203674 10.796488741348156) rotate(0 191.8411474562689 25.479971200936447)"><path d="M0 0 C135.58 0, 271.15 0, 383.68 0 M0 0 C132.31 0, 264.63 0, 383.68 0 M383.68 0 C383.68 12.32, 383.68 24.63, 383.68 50.96 M383.68 0 C383.68 17.44, 383.68 34.88, 383.68 50.96 M383.68 50.96 C265.64 50.96, 147.59 50.96, 0 50.96 M383.68 50.96 C291.71 50.96, 199.74 50.96, 0 50.96 M0 50.96 C0 39.2, 0 27.43, 0 0 M0 50.96 C0 38.94, 0 26.93, 0 0" stroke="#000000" stroke-width="1" fill="none"></path></g><g stroke-linecap="round"><g transform="translate(84.92294671370642 111.7069092950392) rotate(0 0.46445204471611135 -25.048868373577335)"><path d="M0 0 C0.15 -8.35, 0.77 -41.75, 0.93 -50.1 M0 0 C0.15 -8.35, 0.77 -41.75, 0.93 -50.1" stroke="#000000" stroke-width="1" fill="none"></path></g><g transform="translate(84.92294671370642 111.7069092950392) rotate(0 0.46445204471611135 -25.048868373577335)"><path d="M9.06 -26.4 C7.28 -31.58, 5.51 -36.76, 0.93 -50.1 M9.06 -26.4 C6.92 -32.63, 4.79 -38.85, 0.93 -50.1" stroke="#000000" stroke-width="1" fill="none"></path></g><g transform="translate(84.92294671370642 111.7069092950392) rotate(0 0.46445204471611135 -25.048868373577335)"><path d="M-8.07 -26.72 C-6.11 -31.83, -4.14 -36.93, 0.93 -50.1 M-8.07 -26.72 C-5.71 -32.86, -3.34 -39, 0.93 -50.1" stroke="#000000" stroke-width="1" fill="none"></path></g></g><g stroke-linecap="round"><g transform="translate(484.7469424680071 554.1604579307066) rotate(0 -164.65312415795057 -198.09418385443183)"><path d="M0 0 C-54.88 -66.03, -274.42 -330.16, -329.31 -396.19 M0 0 C-54.88 -66.03, -274.42 -330.16, -329.31 -396.19" stroke="#000000" stroke-width="1" fill="none"></path></g><g transform="translate(484.7469424680071 554.1604579307066) rotate(0 -164.65312415795057 -198.09418385443183)"><path d="M-303.4 -381.07 C-312.35 -386.29, -321.31 -391.52, -329.31 -396.19 M-303.4 -381.07 C-312.97 -386.65, -322.54 -392.24, -329.31 -396.19" stroke="#000000" stroke-width="1" fill="none"></path></g><g transform="translate(484.7469424680071 554.1604579307066) rotate(0 -164.65312415795057 -198.09418385443183)"><path d="M-319.18 -367.95 C-322.68 -377.71, -326.18 -387.47, -329.31 -396.19 M-319.18 -367.95 C-322.92 -378.38, -326.66 -388.81, -329.31 -396.19" stroke="#000000" stroke-width="1" fill="none"></path></g></g></svg>
\ No newline at end of file
diff --git a/documentation/dev/src/explore/design/overview/root-workspace-cargo-udeps.png b/documentation/dev/src/explore/design/overview/root-workspace-cargo-udeps.png
new file mode 100644
index 0000000000000000000000000000000000000000..6eef3287eb645fb3893efc6102cae33690fa959f
GIT binary patch
literal 34486
zcmce;c{tba+BW({B$>w$LK+YeDpEob$~>epN61hpB$OdTlnPM@nF*mJ^Vq1&qQOw+
ziYO}do_D|X?DtvEexBpl``E|3TYs#zitqRS-1mK5*Lj`id40n54rwqk@G(#*6s7~3
z>IM`FbtQ#D^_ZRxe<N(V-yZ)(XQQp5PWeOrbEhinI)$>1azK5rk@xMXcRpquqkVMK
zN~`XKm78`tHJR)0GCz6Dd<7RLC#R%YZAeo_R(D2(pu_`XiSK5Qdz&(n)623%x@k}G
zsME5Ee|cOs6wOuoC2aMRxcT2fSBy9{1J`q0`+fG<QI`$Z9lq$;Uu$`Ki#LH)H82c+
zo@hSJU{+DbpS#*8y~D}hWET&LiQrGV{RS_Jh=`&4KrPkXtSoJTAkXZ5k=*W{o;MWf
z*kV<ig$mf%#*=Gm_H-9oN_#Q33-8&pM_oe$KNzM}Ct9hl?pq)F#QW#(-`WBf!k*R1
z;BVB`?K*p>+1c3GV)Shrw8>xWk7AO>|7YWsSNK2k1LiZMDFHUW=EjT4%3>86*z{MC
z2i26gcl6Q4yLZ>y+1Z__I^*hkZ*PyNNJDdId{|g%eODLPp+kohbCl2j^4t`w6;z<1
z9{P8kj!};uJ?eH|UjF^n?7m%iT{nKm;qN|)zst~$XJuo%@TSCJ_}i!FW)EEj__n)V
zW)t0i<AkKNMP=CM{)fYLK`Y0)3;1^_D;v)rtMcV~9xvU}A{21>$U|3Q5fN$y1%)+=
zo|`7R@-E?DL=_eJxVgF4>^!}GqNmVOM9rs+YTceaLbnuL>ccrU4h#($Oz0bW$v-P`
z`>>mlUx86Kc~@OunX|R4D|0|V0HwC!5(^s_S4d~4QRT0Ryfrd5E1Y`n75JXi(qb4I
z8hV|!Z}pQ;r!NaCSG@Jjc&V%2d_e;{!}vOaOJZPZ>auk80(Eh5@xsFJl~cF__&L5m
z%kwqHvj=bQVEbA%FL?3d#n)Bm&N8mqO0f)BlBZaG?kgK>OJ8aKDmkjR*Uaz78;5ew
zarO;5Hv{M2pJS<6`oVAi;^u{I4y{&wrH=Fr4B|1Vm3@?}v9Ywey1I8WGi@e1bIQFY
zxvT#y|7PgY)1bc~sO(FjOmr7SR(MTPZ*oct!dKouuFWX4d#!f7)V{1kNz(&$G?dy>
z#}4<;eeyafy9K^abT!=AZ1}9ew0lwAZv6^e(%UI1DbusFd9Q8j>+4rgijP0%?vgV#
zHMMqd$f`K@(Dj&%L+jeE&p$r*M?U=Ef5hwe?5jNism<l5hF4J7czJUY6>OUssZ{Ka
zA9p-@?rr(0`j(a~b$e-P=~TT<a`#fhheq!hi5qvkefO>@^WZI`hr>fdD=0D^BRVnh
z@f-ORPHt*V*;81tM^3Kvh3~{jkNR;7OObXVX2*3RvU(;aCX~SCrQdm(USqGRE-?!j
zyo-*C3aP46rVLCEHn6I$+qhA~D`3WSQTf~tqk|sq?i9)nC8fN~GUpy<iu=+pug)Ap
z4HsN4H`_P&kDC=)?G1lXShK8%6P9Z=<qcT+&NDPT+?Hdwv8%#+aY3fSbNu4e)XAH-
zZZ-7v@wvLXnpJpi#)XtQ|7%Wm(%92qB-ek(-3wt)RGxm4uHJX#@Zr=ucT~LkpFUmn
z^yyQ2X6ELQ)jU?t&Z~tRSIgVUCODZ#@yT0cJ6l^*-8rFiZ$p`Gl48i>qMw{&`(5=U
zhxqgZ)z8YkXGAFLBqbRw{eOy8FMiXOG%uysh+bQV?Osh;m}wE3S(ve%`}#rX-Q%+w
z_V)IV<RujMuo+8}P|YnaD%!gE?YXqsgOj|tb6PI;Ui!b=(lsa%(GMn+EKhm(FD}gR
ze359Cx2mNat?*oReZ#@}V%wJ28QMI!RdI()(q=jIzt<zSvlA81OsyGtdF#^5a7()V
z%f#GqW95sv2R;u8v%X@s++^aHN#`;uAt||nQtHyr%@9IeV*gUr!XkNMS?TN-tFByQ
z?{6Ph#ceqfO2@=Ea7oBt#lV1FaF73GdFSpM=DHm}!?S<cAhFrq`SPKLg(B8m$<4`0
zVisv5+c=M#Vl_MRa?_?wDypi~hp<mCUg#XH^k#E(bR<U;n3j3ZY^Eq=baJ;+N=08>
zT-@~Zw2F=nBhoR2;^X79!{^)jU{+0CT~mSZEz&6)EPThsDEpY^-dCNoc61Dvv27k0
zs0$kF%HuI9w1^4|qhCQsXEXLXRZl$jro1zE^6v8t6eMtUWDp~-<ihx*q+?$e)s2=m
z*EcjYWH9elx0^~!Pp6_#QBm=|tU;K(NZBLABDhQC%o&Bg2QHDv9-pypOWXJ4#S3*;
zSE;ve-x`f}AH(%oyLRpCd&iVczN_Fu!pgR+*^LlB_qO^x|C={&ib_fv1{*HTygU10
zusMp~dtq7_;U84u@M`FmTXh}oF)~0%w186m`}cyT_l|BZIisRNK`@;fdo3(^w4Bws
zJHIYj`J7&%h4Rryr)iPpE<Ap$^yTd%LszDQa!#G}&OP_`+1agEJp1Lc>${4&rY2fm
zUf!^%s3v?}vBN9D7fDJyNs3-LN5PR72|-N!3U}_@p(vf1Qo-L&{~S$GOVz8?dsSDL
z`u3;S%rEvB|EIwU)xSkV(oS<(zP(c})1HabzhhD5n;WTbXK!EaPPOV-ceUS7S%%fC
z+cFRGA4Mw1ry3=V?>+MK+w82qo*qS8`#F2Z*Y{PCSY6%w_ZD83GYRYP^QUHp_VvDb
z6I;HnY4*EZf~CN*>S}c(FLKl80-0h?4Nq#@_3j{bC@kzUpY*Q1D5UYp$<Z;Q8o5bs
z3)6!L!EQAX5n21cb?j^6cDHC`+p^h&gc;k2u-xdc>ru_m$|mUK@jP88BF@tCf4<<p
zxC;<rCoU^1TjcsqF~{~QGs@~6pV<+EmUU^eMF14-*MnBF*dST^9!(Psl%jBtpGwGl
z{_2(P1iiWu9pH$jz(rJE4SK6O>UENg)6*0MH@c6$g0<YNtQ3mk&a|YYq^Ps+eO78*
z5p245?D6X|XVU`dps>rU$>Z_HXj};tRtvncn|k+>QKG$Rk5gA}NWqbZY1sU+{)f`c
z0*dRs){Drx@Bg8oscu(sD0#(uKMqYT>N8W6`i7{&H(TrxKQ8?bQt|m?J%vKdg1a`6
zU$EN$%LKn*E+R^`apS<J_((p>?b~xW#x0Qaiu{*+=O(&V3knMEa32Wlj6u#M;3>EH
z^y$F)4=eXyrY!%|T_;R+qR(i;=$Eav^@Xvq6Xg{Z_3z#Z95`^mKtn`?rtzPw6d>q5
zxy7nu!#afrW0#!lH+X*$SiO3+`o4XsZXc?rzkRDm0NjmMSEmaFT+){>b>e!Rv05*k
z&iy0(!1!h_5l1eeae*D~>nbZNi_Xov=7_SzP+21p8AW{@p-xRz(PQ0ua!rw&(CggT
z&K$-d)zsQ&u@ZxmlaUu_R*DD<@14LyuG5)*E-jKaG2SP?ZXIREIqt9s@m~Sr2U7K{
znj*NaKL144@$38K;HOVwUXmi$)?C5X8`JbZph{BY5*SqWzx~qHhm(OePTDdWK&Ro=
zE0+ETF4q;=*$!OBs$-}mHf>T&xp#$S;FGaj@L*Zb-9yJ0@bmsZKk+aMD02Gy`}bCO
z<zXK+RoAxTLfyG%xsr}WN-^!Rr(>IAi>61@!NQCTvBjk&z#3X1A)y>wwwM(JQ1aeU
zJ{RGC!{niq`xxu%;`Gk&+>Hr3p-quIY8$Q#<7xEu^{2mok5%Mm8|S?w$I8abDjZl?
zBUNEw{`GxrTl&c}d0HE;4+4rct`zd406pbfRGDK_?mFzMM>=2k<^2}LcV-6c(@XRF
zvV=rGUwvw4R_-qR@$^nrRaI1fHcrm<*pqe+f@0~czFJs4xtb=CA-0j{*xsm8?G4wh
zmluCx3ke7dW<?0ck)=@aTZo9PT16#!lwK!^!P}c6VM6&nsh#x9`>CDm;&cN`QE6%8
zlNixdBXJqO1$Xke(d33*d(v>JHe0_oZcFXlc&$SU)#>H?s?sB^Nvj_njn+@!znA_3
zaHpm~c$jAx+4&XGLMs=SBBj>H+*q+==Ygarb1xFMX>`qveEeuN)muz1q5!!qcOCXD
zt<bqm@A;KE--1cbQN&B6L;l>4lLRIT?Ge<<U>|O|5|0BNr;wE^`z)^U{&RkvI0OA^
z8lXkJO?MCN8yp-Ys}5epu`ak46_SKeyDZrj->Zq{IQQ-?>!g^+dUPk$=kgspGWTL{
zh8n|3iP~{0jU0X?InnXnXR)=t#kIHGs2`u9dYO12DNKulo0|@&df@toy*WlU?RWo1
z(3Zao7u8Ln?#QHy<fh`-Kmn+@c}qJf;@LK@2_vjsCwUhy3emz`hi(R9$h;OYr@2zf
zLXxlkY8;)q2F=%M2``cMPQb=>@y}(BKjQ@Ge{oYr4Hv*@A4#*NCui-gK57nhnO@-7
z7_qF~_59=0>!jqK8g9mBCv0=j!e-|NlFg3e<fLq~2P9^?EO^Y5wv106yAmsJ|6&c<
zMgb*mY$H&`HEg3XJK4r9n!32=bpC$?%u49i$f{4M-(^bIxDu+fq2Z>C?bYkog9@vE
zX_2-3ZC!FVDR9%TqB?mpFlgn4fJHU@_`$C=e|F<8?!O|)B)p<}`Ij>ifIjWtWOTP)
zb>PDvsb8xZS1kXwtbL|6VRUyjf$x%Awy?3YQ)d{2BT*l~^2N#W^~K&+AM5;>d6)7b
zfF(FoD=Ey=rMJi$R~MHy)F7wv?1%Fk$&s>R>1hmFhv-6?X`Onc`057+)?pLlh1Eh;
z5ObS0(eK>3)2z~41{krkI82j?Tme4zw6Pbfh0d?=_Mv(nPxF1^KvKk<3(5lGiJp-$
z7@r^T=eMOol{#HC*-N@F?`SRtU0Cq8YQ3E%ksUM6Ku;fdRH~)Ox&gO~{3!MmVWH29
zI}`Ktu{T?}zkvIx%}dMa8j<%BP0Z*QfA9~CW%qd7-C0~*)GM(+xbX8Qxjp0U87F<i
z89C>5L}VSc#J2i*>#Loc9f_R(GsiLT(a1Bm`1-~-Qw9etEG+Q&G$!|shR=Tp5Yoyp
zAP2#Khp}ttmZoDCpcdRkSzL^W9->CSwU6i8n(fRBA33ZhP44WaaV)uJ&9~KxqTtdO
z+?8i4x?{(hkS2aKyNm?<U-$a%)Mj=x<Nny!fxQDIuOgy<{QAu!U!`=0;_7O!QA}-g
zd_4T%Erq(C9v<8o3DbMQ3A}r3!Wn6icw-K*Q?HR{*tBWkoVm5|*0WO6-A8IA3MpB-
zC2z!D`gnU=J&)Vcd;j=qY8o2->8a*@>emP`-Qf82TH5UR$Y}g+sq0jyecF1D@$)<0
z*Z<!2(9N>3smc2HkI!oqPOkZu@}V|NlO1>e$;ZW?4sDM-wSS+FVcn<UOG6!4xogQG
zeWK8`&;lT9-_fIMZ&`@Y)FG&JB3Ntp{P{T)^UmSZC=ZhZ{pt=&DOo9WF6#KHGhg0@
z?+v08-oD)wd$@r#)Co5%{i)7;m8uz}jcEQMb(H>O@12yRc7C^$lGu27E`9uH$H*r~
zubZ+vRuRYh_W?vjY3P}@C_X#F?J=A7$NUISQc8T5NP4dO>C^Q9kyo!>tIv$rR3$67
zyDV_zSNYqa4QTO5KKA(X{_#plOXl*5iS44iDPP__7L}1<L(g@wuu%H+>C-5%cBjcj
zIuLewg^kU!;4{6qANzVK!!0!D=Beku^1K}VyfnY0-uL>EheKma%P<<W>6sbp4(+=R
z(zm&ku(aRvU*knq$J9BtrO~eS^4{{sYqEPhUtwv>wvr7SbiZ5~dRVzX>i*zIPbRmj
z*bAnXRV)dN<)>)OYAm)t@A-a4y{iG3I6OFbMfu~$4ILdEGBPsR#~wdG0O?%R3A>j5
zrn}(CB@|cf*h^3D;3^9%Dw;cdKQ5cl>eNx9KvP6hBv?yF$(pP1Jm#hmx(St!@<-n^
z@%lzx0y=*D_|aQ^UJ1vgylJ-LQd*^@7i$8kc2$xa#l^L{)?WRg?7QM<4RfjUO3JoB
ze{=)p$^`Vkqj}6?*YJJl_JP^i*%{%+D<6$2ly2{G?ClggFZ0TkE7xz_NLY{Qxw%HL
z4#=|qB6<$g0@R0xhihtU+hUt+Niq#v-;QQs{e}%gAUek0J>HI9Ng637onRr<e!#FM
zz#aG5k%QQUVKg@=P_l~u{Hnc!!=)QHIN#O;q#TrvVJ6s8M3(a4!2?Y#t$JY0fsv8W
z$7jDH9X~no&p)zmIr5+ppEo@{s5UqChGV<i`y04T`c?=Bbvu=aTkk6?cg=ly$B$My
z9`z?znPe(;x{+8@3(|L7O-Z?eH4LMfz&difNp%9?;ND_x1$d9dh&+-@$}C&V1*i7A
ztJpa>zzYmaOhiof6lx2EXPc9It8Q0&^5n^@H*eAd*P($Q#<P=;JM+)S?cnF<pL$=#
z-`(Bqe(Ka-bMr*960W~1=~BUW7X#IG798P`l9DPv{V9U{E7SFp-hUQnbqVW%4KUDX
zi)>GJv$x;S_t5PMmxPHX&*jU>IiH=?D}J<VOABw_yef)+duU#sgp_5qq}Td1r@Hn0
z?~zs3;a|`&{qy^U2?@NQEW*GeY2UnjIf?xKFTEQ7^SJK~&CPW8?%e~aa}D3mn0!E?
z68{&0cTF+${f!@f1qB7gXTMrh{{HfqJwwFM%IP;>Om%0TDa+$CpH(|D529yhxvhN8
z1kq3Tay`EVW$X6s@roBM68dJv8tPoGXl{?VWpU)lk-vwrICF`7NnC<L1nKEP5#Cfj
zr+}qjMo6Wi?gC1whqFA6<dL>P((;}jpaMxm2PUcOJTg-gZsQDyg<%8?I;p^@D5g+m
zfe7F46NC7du;}Q}kPzDa(QCs2Xyc#gS-4Q1yn59F`o?{zaaDPFxk`>v_4KaJuGi!~
z|2vBo?9>~WIXOPwk%f<G77AFT($LVD``piihCTPUhlfYZ?c0};y5yX@8POY@cvs;S
zlI%7ki}Qbt7-IPJytTC%@cY#7A1=N9{pm<=*@junsNX1+&6DC{Vzj4DojQRG@~@Ik
zDMzdMz@?ucpPZ56k`gs{_pKkl)%=Mvd0)|Or);9}wnr{6^{K+$@iQjfXSD3-31udr
zbfy*=1hmdxunmfyW9tA=8$^>ixVXf$5{_nq9<a8x6{UosPl`ZZ4yU4_qo>%QL(csP
z&TgIW+?eo}_f-;SKKF@XVG9#^g~6<<`G-sP%elQjN?~nlYZKYA!weJyLx`#{_>lG2
zu3vAuDy*h%Wp!ukwrw0szvrb1!>39=1Cp*SOIL_ZP*4h4ccV$ZnXsg!WUy-L>+;YI
zQb)_zRaR9swI(SwJ-xnh9}=?qWOu<nd;84h$#KL<8k#a2kXs_k%E#u$J2VKJDFRxI
zUReFmp=iVl8`?asgoFeekQ4QNedZ^7i}ntW-bzdq_w(~xL7Dm9V`)0(Rp~Jrjl(E2
zE4B%F^87i6wY4>MAXs6p*49?O!aT4{#ZFz~GasLF6x+Y#(|>%%ML3u>l6wm~C<<=W
zVXi!r0(R^wC(fBV(0l&fS$XrG#C#0`DOQ2}jv6gwq=+Doru#f9bc|EG?xNZ8IY0k%
zsmNTh0Ivwr?BLh0V9Ku(vTJZ?NCf<_<e`+^TfEka1Q~t`HKk8=@_lgfO<Ll}#Dw_N
z)RfUfY=Z<2B)#HY_4#u%Kl<JBTmzPWj!Ji~+3tFqvQ9)))VKqjsxT!iKVLG+`3nE`
zLkkNFH=ay=`eZz$W_FB;WY9>gJ;$}BsVVlve!0WCU%q^?nHy`%?MH1<-Ech|Nr6E)
z7)Adkm}a~1PrvXq+S=MN$;tO-W~<QShKHJV_<>Vn_nPQr`}y;yD!p)fM+YrsW;`p!
zsUlGw^w^2sqSPG$%au;%Cr)Hf{8`B&D4Cz1-v}O3JeYL|nX|sFO`pOF7R9I_>sxnO
z#rN!0YR=vv1J#+1Oa~tbd(CF|AChkhji37jVh;SfaphdQ_7JE|gBD@gP3F7}&c~0_
zQpP^i)Hs>rerPSqDk&&%+qOj0Qr7PFTcb+<@#DwrFVf566%MbGa~+wPnJE+p2Zydw
z++He`@aGb7rbrU~pb!%#oPXP3VZP!06W?-Xdn^NlckdQN-EP^K;&)MZ_s<m|y~o-!
zR^Klvp~|+@jh8y6>gTr$7s+Uz1ync6M=;<5{*_HYAZq)2@ASs|Eztszw{NeNw5&cl
zJbI+>4X>P!04RSmn=w`JeKUQ|g~DQD`x@kp<uW_|Azi~FBWWl|+*Rt)ie3}zwr*vm
zye@S#^!xcKo)WmQ;9WLp{?0eQwY<>LMD3lz-KV=I9vF(K*&aV0a`WaI&#7KE)PWto
zbJ-+ChMMrbcW8OcRdWBn>fXH<CLUbB&Vf*5Tf6r1Y96WD?rv_?FdnB}N_3_Bqxhsd
z%ZiF9dUA*KuJ?gHSM~SbohavMkk&eDd$8-1;pk9PBqb1`KvuE<(zW<Ro5<Ror&oON
z|Lqb!x{&YCW9Z~09>{a64W~*&`SJC`6_C9*6Jpo;z5^3RsRcVM=U<(1yzQfx=b1Cj
zD9P>%)7F%I!{Kgk%P(yI;5Qr65;saZZS<0DH30#hU*8KLc0Y|kK9HcSLyUi}FnK7L
zx9y8_V$UWw{&r76ul~(3sX5?JNfJbwXaJT0eH?=_zV~(iVLs6Mpe|~xkRyn!@SVnl
zr%VV)=FS*>zSJz&_LVJHgaQVuhZrX~|J-pJZB3GXdv>%{fRmdWqJm#aSGK+=B@hf#
z^Q(vVH|QoaBAA0iLWG5dsf5nYu~nS?8Vyy)G`lsBipG7sT}&rYZWWGP4Ohb`%E=L#
zOfP*uQ{}g35>7q1+9#$ipSR3A8RhU$_~2;H7|F-~JstIQJY!J#zw*&s%DILKeNC3n
zxdX(0<_}q3aTc`%jRHuq)wx^p<EKwKnI9f`tVAwq&)$zaEgb*6*ri{=*kd*4QsN@?
z7w_Sy?PNVu{SONpha+y4Lqs_-=a+|APd!{>zc6@72!w@<|Drc3>ByGDd_*|9fBgAt
zv@C>mAW#a2L$YAkE>x#2GdGdTMYnI~K`AqQe6OoO!`7bRcVedev;I)I1dZ#TZ#>aE
zY;JCYbrJyxCG`@{9T-g0z}(UM>pxWZYbcoiFQ{<)Y_l2DoL4>P&+PUp+p=W~c!dkN
z`#@$C%C~Rd+`oN1Kreh1NlMYi%wo3}6tXM-Bmd1;kOi(^c>VenS-L5qkSu}GioIuS
zUfkZvK`Qjyav7phm6pm)y(!T!wnflR&(CWVT2#G8-Yt9e5PWgx_W~&)ir?(Z?V5)U
z?f3OneDU&S42Wy+*l`P5#{|$vV|Q~BnE(bLN6|@F&0TX!Mp{1l_{>4CbGR=gsk}cw
zf9(H??VMNoN7E}^vdw&EPux#p=e^G-?_{;~>pLOvckS9mlvnie2S$Hd78}n=GTqwl
z#tocD9Z1i_#08@KZ`CU>FJq@PB2hM>jH|o57XgsgH#hH>JbYiBkUZ#*XWBG`!a?1K
zg9Sx_2eC1<_;Z$kCPI(m;}{s&u`k?!>Gf~k9Hy{>h#SI2sM71Zq6C)v%x2<LFHzQ^
zBsJDvpxLLVca=0H3%Dx#_U{kv<4c4p5>CHr6&qy*ML+A1Kv%*FxnsJA4<D}Y=`l5K
z=<YT_3Q;e+Twkv`Qy8$MO_I9}L;xGm*&^%K-Dw<u(5d`^W+ihNWE%!b+WFr<M5Lq+
z4g29jaX?xz`XsS@?o+I3BbZ+~b6pmp-J}MmA^qD7)`y7f+I95u+MNc7ja4XQ@=iC3
z?%!v}r*Q%sa)U|XMu8XEvgKg9*Op+t$Aa?KCmb9Hs()<vl9ziE+W9iwRjG^fRlAD8
z^OrBVcJJO@|LmC-xd#(F)URD#rT^_*EfA+QNDA|x40Aqbzr5XB<@?=e<C9k}Uib~(
z{ZWyB{^z&j$R%m0v*o_u#R+^yQa_>m0czGM$dUkgroU8IZg+iKo1}CWSL(Mxp;0V&
z9gopgNCVZ4Jr%DE=KUmWca6Dg|A!X<_W(jsAW=HrJ-)P2H+h|e#A=7u+jXH#{90>$
zq#_soSa$Moa}Ob7Lt>Gbp(!CMD^WGf&G}lBm5FAWW}Ih|(QWV$A=~n8;rDME3iKU`
zx$+3`EnyK6swyghCX<Qv)g~7Iy9#Gq`TT^H%dG?0<NA(ORQo<WVC0ou0SI4@?2EQW
z9IaYa`?J_?x$ldn7y5otx=VkQn2JgiTxd6`Muo}o*EBi&GErU)B-gXb5AE&WLOQN1
z>%N;U$cAQX4Xv)+e@XGhix;nptkt^CL%B3fdbC#{K_~N<c|~Q3l9`#=9mA|cpZ;04
zhV5}=nhoTno+WB@e7=#bSX5M`;^`@OF(?R3^k0@AIm)0#q@BMiDdR$J&D@+Pk)G=6
z>J~mf{D2e@jlWsHe!UfFM#x}sCUMsxeA)PI*5BuNpXrp6sA0O^rlQlIv=)B-vKgqo
zpsA^O6_w@v*mrt2uN(h&>17-gz72{<Sa^6HA?V`bLP7A{v8?f503srC_V(@D%e<bM
zn*uBm(GtaYW~@z=WE&yBX(}i-_ad{@==SsD!TMPrZj(I@p;5vlAI#RJ_;?=pDOeyb
zqt70{ZP7AqqM;hbo>eU#iO!6ZAw&=jx3GFR$2@<3MQ*FDcb=$izkkFjM(LJmzBB~b
zWj=WZu-&A?dHC=l@PTK9hIbdv+g$lnjf~H-xbaa_Bd)jpVRIU(dIRy6{ul7ADJep@
z7&7hyD&$)k2`eeWVf1#++}ynW!v`V4p3lt0D00Uf`lqMXmrY<qaDk#!gI1T8mNqas
zDS`H);IFI0uHH_T*MD(_Bp+NlQCV5`hES&K_;nLPNs7px!mk6G1Rs^s(A1>Ap(O@c
z_ipsQyYhc~e0u!r?C7gY;Q7MXMArpx{ng=eZTxS$u1<+ad@tm`K!xxpz6ktkD2tGk
zmt>mwKYkK1qV=7Av^ITz)J0Zm(fyZklKQcK7oEoQUrFy=xxf9jQ7)>xnpC_kt|n;c
zHGBLkv2EfZq@`zz-MnkpT728ht=81RtR&@u+%U5KyU^^TXljE%(>8{45Q_fPkUH|U
zG5z1|Gk}WBh$jJXz6e_L?t$x|m+ltJ^1mmP(6*JvqN?;y>|}Fn$BypbPe<;=?5DLm
z+*L-OuNxn8RuXBy1lC0rtNQlw=@T@JSt<S`@}h3MnT9xJma;R9@fv&$t?@De%YM*l
zEk~crNFJidQWiIRu~xlqIir48+AvcGr`rNvn^b(57_hvY@2U;_tgL~7ft1bn_2jR{
z$6s=*S>i@2+J~nh9|l8*%7j^09INUCeQY~~Hi9>6pxS2${+Y;w?m*eIF+4X5ty)8S
zJ3F|EAXaLaIzX!EKl*5}^3Ft4eLZ!G|F6xkJ+Pn}dY?N-02?Cax+35BQ}|@u=f?D5
zSjjiI5-?3E_vP{)X8JrLBC^D{&7Lt%^6<s!X?H)gIl%tnJp2~9H*iaNV`5@7b#xkE
z-r7DmFcA3qwcalOMYD+$cWrBye@OtsQk$CgXelp$JfK34Q*E|Fivx?!Dq=>tvS-;0
z$^1BY7-QHXE^=%<G(e<h7+r{Lt-gQ1mO!NE3FNFbvUc>G4Er(!1BO<-E|+<opAZ+s
z7PJ0+z@PJrP!DcycM}9fu)%9>nnCekhWNH^+tf~+*aSp+3C*+B$0sp_&T75AQ`AeY
z{l2J1^gw;c>R7>63a5PKt_p3tqwmjsRn9;9i2cL)d4bGBN#gh*9$d}>cqLZFNgay-
zY7$XW;&-e5xj|J;Z3c{0PI^onv?#v9svmr?ZWxU7;p$!P00BNdSy*EORq3Ye@rce-
zVOj_f^B$bm)Q1|OJ%1Lrb>|;W=Nr*G_DWM_AL0b@&9-KZ+4$_%J?9y2?_u$rxckXn
z-HtVsk@q6(J6woIqoleEPgt>@BtrMET2=RRb`&k^6(~xB1rOiXL&eXC-XB{EN&RSz
zf0b4PZ|iNUZ?kWv*?CLAN!r35LDF4ww7~Psct&b|i}oBqLEE~Ev^!3Jqz7Ia92*O}
zdX@EU)j89N6GnUax4WvN2eEQ+FdgSx$xQh^LFLkSLS_vc?`_;pFb#CLb*pzT4A4N2
z$q|hSM~wT<H1oTy14TltQ$z@U;BrLLtcVg45@Mhs%0w?*<TYu69@r?Sb$KZ;U3A^3
z5=%_m?Cfj<`W2XYo@U0cZ@D7S&X0P-H!=+)mnh;CxV8Z%96X&9X(z#k8M}JVf2OH8
z+hedvMArM?RRA!L2cW}_AH%QBbeUQXf|NMXnaxI6Yj*aFFmoLch<aA!Gi%@5*H;f)
zLAyMZ%=QjQ$Xn<{MS`}&r*g8dbXA^d;eIeVHE%=@VAG!x#|b**M%YWH``v0%0oh>Z
zp<-ZQxEsCy)tgG6ZGPv^=NXBL(Qe&FQ*ovXE6Y`YbEnv`W5da}<!pQR?)6LAScOfx
zHv8j8Gm;t`A72C{wp0M6uqzPmxR+{h%mZ&9d6@II4h&E_?-gWNsT&n||C?O!X6~8=
z@^p}1p2?QrOP7X`HbI-n|N8NHwVRt8fwL1QHn<_9qw5#0Fg-Q>d3kAVtBTzOej>8G
ze8<TT)dKieHGlu=&X_0ac(kBzqy^rvfeu^KgmqcFxm`hwe;gUn5_oCn^5#D6*UDLD
zKmze&!%sN|IyWy{MTG^zZy}5(A6(c;-ve&F)7SQX!(%~fn||YvGWhswwHiQm@%i6p
za%>ZB<C6!Wl&;y~ZWTMRb{9P-C%!jL<lm)-nLBjjYxyl?%}7F5aN#pBG$f>oZEYac
z^p79=3G3RX(~NMowzm)6eg2nG&LJC-={{$bNN#h6{pwxGB_-RCK(2z{17S7@1`rgk
zC`^Nl7HAo8yTXC^M7L~V`m@lohgDFpb-RXg?<0?mpwce%882qM`SMPM>zuuatSan<
zy~VaG4Gau==dK+>E^$gf`}O^fQ$s6=J&H78Cpt2j!0%*4A1_7pva_?lF0`x>?|S--
zI$H2J2<42)_f<;hV~9%zz=iN(6Pk9xz0(F|Hm<JGNRYRc&N73aC6Y6&I_^KF4(Fuv
zCIJr*feC@LDBB?E?s^sa+Ad?IM`2GEJiol<otc^0;kU3AWCxbjO_r5|`xAEgvO0WD
zaNGpJv?MGi$9Y9i`I=&48+il^|DVS_;W5(kf{vs3$2EOF*{c2HFSmbC2k!_;hH$cf
zleMOuI$2(KiG%L#9jB;Ck|I7OP&C4T2&(is>Bi>}0Jw82?PL$fPE5ULDu3j-nUjsT
z3_J_Xs#UAtMcnfmoPk0W!3sHj*Z+5(8XdiaJXHtlX3d}F$DM7t1u$13n9H9HpLKh`
z!oYB+e|(r+4}5Tdf+Pf>Reaoj_Ab{+@24l^PQ0e9=9a8YRl7tAB3O#QNR4onUZqYO
zK?G0+Oq$e6R;>zrv~yr8KgtMp7r36tO+X?dN)4J4R6t`7eB2n>O0-uE@JsGF`M#2u
zC>j@OSEC4LM4Pw}o*%BMy+HVRMjLkvx_hq3&oAY?9HtWr+aHcJCN8cyGd>rl30|?k
zX^M5tny}4=nFAyVBhlhRwyYNut1FAhGS(BnAfN<ZqLectOy>|#IwXcLB$hN>r}BV5
zh`{Vb)V=)$h$fOv$a#R>+p%*eX@}9w#3&}#Rrt&j7M4VuaZYQhTJ7h4RGDli;`u@Z
z4Ztwxe)jC>R{2~`g-SbiJUB^E*0}Sa3JjP$Hc3j}z{eaobSP9cRb~FmJ3N;H_lYis
z6CW(J>%yMcL45}UMmzuYgF}aQESz{c5f2}3NB9xUq;pGwiR_1#Rzznpqt@xuvT$-S
zIy+B_cZG*jRaX9ewB&+<E!VDHBX-a7M~?<^G6ouHPQgsl32P-JBuJVfNf>3+_s3K?
zs>ff;^2>E(o0^(3wB%)F13R*I-o6l4ns@wpTyKd3i{Ng*FvvSYcsTM=7x7V&N=iyZ
z1ch`;D%RBJ&tR;hArIxG9~eQ<>^S@7Xl%>Z=0-}H6P2-~n)=+uq0H^l(kxZy=9^JO
z#k|(%(jiej{a5}ao%bNh^nQ}4JVp=Y*p9!2k82ujiOs8I<tNXcQBTh=9NV~YW7MS1
zEjqtH%F|O+-+Ft~^76X!b3S*ft3%SQyoe(p#0mKlkhOr3f5mY)%(VZNp55Ku0}(VJ
z(NBG!FwRLIf6<YpD+BWbQBuB7_Go}Dnnt0j0~fgQ(Cya?6Z<#nQX6Jcwj7~$t6_SP
zV(Gcln%QBiz@MM50`#DknjJZkw`^l$!^**7Xk$*wT*{wCjfrx*_2#7~E?^-5rt$rA
zQb!(K9<|>11iHWkt9s}L#0jufV@kksIKb-=f_48vQKh3k!_7>UaBzxcKK*nWJpr8T
zun3UFL`O%jQ92`e`+51%#4LxlpJ%94Hqd<hmgX^CJwwOJPR%I5Am^Gt>7ZtyrK_uf
zV@drotPZ2jt37|z!OYLyxU5qL-sAP-vu^m%buu!=jlcWg_l`z&$v^Utd3kx6J#&4|
z4864eW|9LZIyaLB;X}Y5We6gHH*elNEkr2bLBPQ_!}?t3zS7N3o!R;}i3U09d-qaN
zROl~&6(TW=uaPQ=R-KvuvziLc{?fyRCvGM7ftOYmok~BHOvf7tL9n*1jqSA08==kU
zK=5cM&m5TiH_<PzI^otf2WGe}g|9y)RV-&|B`@!G;ML=h!ZZLS5fW|0Q@wTvD|mWx
z-*RSogjQLU*heK_H;G2CfeT^qWfvMo?DEb;1sC%}N$pt@vHM=SSqm3An?7y6d07{r
z!tS^W<HGNWOX?L?wzfeiOT_I9()i}?^NRMZw^!X66Z$&(Dy0QrWBs86218frM3}_H
z#H{OsSCzXDQa4}b4I&0`NFQ{RyLa#20j<ueTKo3xT4LjVPsm`7y!lq8uU4RFG6a9>
zZQ?x9mCFU%VYP?H>}e;;k%w_rj3LypLco2}z28RG2*H0TDk^G%|KjLoPKLLYJ{<7S
zwR_0Qia`_$y>*M<|IgAqdj}T-E#mGJ4Ev}5SnqF+geP6@<9`d<(~62%ZJRG&82ZCv
zEgq{yeCR#*j$PkT?CXMKEJ@fRDcP8|FY=konG4^pyiL4C12g&HpfUY{F4Z79rq|G;
zZYCsDxu>Y62IAXq9czCCk4DYwDe8S+9SG5afEUx;7YbL5J}rKd)IIbtYAL(VZ?$Qp
zpPTGq$;rvd6+Hsz={+~bM(nUzzh#ZcMfu+Id}L$<j<b+UmoD}39cSv7+eTyIyDBVt
z#GTF%W-+6QAM1lvK-aAZe{zGET>t#)sd-juiI%+N)$R^k5!v;~z~ZS;{2*7++SuBn
z&ZqM1{s<&XVtmDl6?ck?^wMrjRQ>34&hV$*f(x}Y|H0WPJwMcRg9P71ZIEwf1Dqo;
z<Vx#+kZ4i*S}D3$M3xJEdo#MxXG_-R*4Dz&LVKbCG{<Fld5NjTxaidIe&Dh=UrQ(h
zKpk6M`s9?e+K)OqSmKOt?)2Dj(#1tSS2#X?xuGmW`$B6{-kPhqT*$#_^WxXg*#bGi
zr>P<TK1s75(A6D_?F5wX6qbJ_yKP$o{wJcAaTUKsnilb5W1`>>CP!*s_yFZp|Nf#R
zd91?d%2T->@X_o-tOz9Onnl^&zs5wPoOcBT_q<?SqlA$NgCm=B6jw1aO3oDQ#Z$oK
zBBrC>-1_1LBZ^W3n#398*ffB&+z&>(3gPyJBn1p}&0p=>NAEbgG%YmxWD?_0@~)h=
ztF&n32pQhgv~PY;JR1c<%-`xESaok7AIHUoS`~~oq;*_PxJARvtfIv*{dTkeQ+3dd
zsX!dL65kIWIWi2;D1Cc2LDMvOm(N9nCa>lB?_91Qs+qvmq%y<OJ_fi3B1lZ<z^`UZ
z$HWqw=56J3+_*jHwlboR_{FhOgMB3GdNRAyJzxe(MDyVv0GX;UrKacEEJCP@9?&Vc
z*pxUwgXF*pV=T00`1oy66p?BQp}4UE@PpH}{`u__f$WfNC$im;$NYbP-ahs2vFXs=
zySn6F)uA5;hN}mdF7o?%`ZDh}I=8CtaTDJfPCu$_)Vly4X_wEp6+kFzP+^UVwT<6S
zQ`cS-(-vP`7}8#~THjN?Y~*EGLUeArbweo5PY6`k8b#0S&Qnj+6K>rS1-3wT(J(UN
zT%NfSK-r{NFLC`TYwN67A}6d*?P(V;Qs2$Y90ISr1DAk#?T$b&2<xP!b>(#doIq7v
zmTkE?-j5J0wrOG@{ao(OpI-wW3vYhPEiAlI#;TT9VEy6|@Rx@s?CtEV{wyyk=D0-Q
z4v}_|Qrn)P{k-bqR%0E2k+!26e{$tQmIlC*_m-bZpNL4X^qZlzIQLmT+%QJ5ulEZ&
z*l{y#)AGVF%UY!~jIhTgcb@Vb2%zLs7~6hc^Zb6M48~?g@Gcw6!b>+p<8}BmJ?yU%
zs>R=b{^#h|b!mhP2nq_KA=n6AzOcABLv?j^ZeiEmxWvQ=JY4OE4>bl?EQ=@$qkE_%
zB;K9Zo0tr``DeK&89V{XVJgT`0}UCzU%oY4GnNKO(}fP8C4qmdi_1LDZ2g@tr>lP1
zIXJ95ckZ{=9{-`n_E!zsnFTm*1C4bCd1{xK@!!gWwc;Wird`F2C(ne3J$e0?Z^JOx
z-@ztgDF4e%iO4$P6aQ`6>;Kyi6#T@<3?@iC(eP>sgd{igWS*p@r6tZeq{Skux{Kmn
zL%s?p{mw7jJM}AL5$%cp*?E#WI$gH;2zW48XoRGPLPpRkD^F&+C1siN_U+Mvo=!2t
zesWGR-NZos^yz_#HAW283dc*z%9=oilo~`%z9}02_B{dh?S@vyA*acUIe+&}GY`Z=
zMzqTD0cS%698$sGg;aQ-N4YsmRFX+Aua`F-ocpY7DAHbmLm-1#5OtbAJ&!NOq070x
zT>~C*)6B@}nW258w<QA+4_)64jF0D3e1$#1tlDqq?&SsBw-sKi!59`57uUT`RnsTq
z3e|qRut|Whu0?OEU*o^T&igEniGpD1Ep_BLbbALC4^O<o75D5seSw)WDx`VB`A`CN
zHUvRq1`@dd9c~zE4FwRNZU9N9_kj!lhJ%U0@IV2n*0r`WgXg~bTz=1&(@-JGyof;t
z&JF-WQ7I{ARP83>R7FF0iA4zB1O}8W@9$lvGN@pk_l_Nj{c{-PE|MYe2VsFJM1O)4
z<`R6?=mNz{lb;_s<JG)G>3!%X1mz(VLIj!fB5(yo)EyR<u3ZoZTOj)qX_TV!q56Cq
zi2bMukH<aid)0kuRN!KFlyg&855~^0Q5s>$X}YsFs1Z-}&y-Y`AIAnAR%PEgR`e`f
zXkpkfyx@mL!e{^zOBCFw6eSGSc_`YF$n+tqsjT>Jc8nw0Jh>_?vURH-%5ymQo4-aL
z;b3@yE{t_#U7=@T;X(X!fLdoo8B|TxJ8@Kv1JA1(?`FzX@Tw>C2?6csHY6!w*w8M|
z41>+T=D$jlZv~uNk8!yzfLP)t1?FNXR-0g%h3(<X<F8JYk_RX9&5Bhq<)lu**dN;v
ztN^T(Mrb>Y*ot6K?`Zt9FtBqMbvuaIPB1mYsY(yYP>d1=$1S8PjKZYCJw<#K`o}SC
z7CP2joQ@vmKuC8<#c}6ck*`9rbxU_n|E`1oY#&@Tly$)5jkpC(*zzV^DOw6Ko(HoM
zXN;M-c?1Mf5Yh6vFF#wH`x|D;xnt4%+}s-|VQ|4^XFjT^U_g)4^%>ew#7>WqY*%0}
zi^`=R{ki=pT0d9Iqr=!p348KHv#XSMn>{^6L_$J)xDO;R2Wpd;>^UEwSnm!)Q#Fpz
z?rDa)o=h`8ze>tFY*0pG+^t&@umRnzQ1+g-vOa!X%zx>-sjzz6%Ix&?bjmu|KXYd8
z%jIlCn&?_t!>S5?;t+_v!7pE=pr72$<>KKnO_Mpd9Arr^yc-k<yU-%6L$3msOBz?!
zY?wU!?!Nr-6HJAyZ&{K9m|rQ}$3PU(E77Zv8Jlo09>uVQF?4ZV?5}$8;2i3VtQ8rr
zi>PLn=uTBzxmM9r(~H<fV1uLt!q%lme*vzIuB9eRq3=^~4CP<e)uGRyn#xb?1}#jf
zMdw7+%(61fU_=2FRJkkOLbD(MMv8AV@7_Wm=P3`5AoT3utkZr`y};gqE-g%KdG?F}
zXS&zQY8~Yc_!HuzfZyjLd>ycfnWi6iG-v^HPTA@?rbkf$fS&o63kBmy235PS2<_Pq
zYr$2BR3qQC_+4kF<PLUw9FISC%ze+MTaO;?1T6#(oB}cq#9eJq&%=7J@^{WFkyU9-
z`uMNJ#&Q9889hw8eY@yi)F6U72o3Z-%bI1?D2Em&I39*WDi9MCy`ed|PhHT@IWf1?
z23*MQl`O=ODP`%^)HC7+sYST?iqI9blfy7k5q}(nvS+8`-+^eRsQCK&hQwvuYn`2^
z**Cn6icI(c)91ea_ALFWZ+F*yCffO+Z)Hwh3{V%EFcB~f@i(s>i!~Jh{&SMT&m$i}
zs@f0kz1X^euBy`?NIEbr;Hh@(tJew7oH@X!5>L*a<pohVGcMzzWG7y~Pc%YEjo#3N
z#minqbZ%}vO=bY2H|+79qlbqw*8Bl3jOp`xhjfnb*%XCl4l}Q4DS|LJOQw6bn3ras
zKIS^l)zWc|%^38kKtrX^>{fUpfK?6Ut4{P@cXj3os9KmZ4Qh%AD9C&67utHm;uc(y
zw$QB2%AB~!5FreWHCta{bF|;SuL;<5+QXv_=$~Oro~#$Blhq-r7hnkxCa7i!L5trq
zPPme1nAN(Phl5oW_?<P-e)7@%=L+1Jr9u=v7yc&V_3GP>f`xI9X|1l2GnQT`)Qr6d
zcd9#$-uD5UK8}tOCkA$vnnL<glvxOtwdl96A_)TAL+NJ16{4d6!KoXkFG$q=KbR+6
zlBlWl>*BJ!b&VW1qT=o2vxS}MhoIq?pZT0P@dLaOnWrS)PA>E43dN>Lm|xH3`uvZ+
zi`?d2aX-vUPrhNt>?7FcORUt$yuH|_`}gmYi9e{xxyN4Nu$<zLC7s%{=^IR{RGXac
zAY(_uj$ngBDL?yF5Ck`Dib0S>=+6C^sslA=ctQSXaDNrI(N70A^}8}g13j7lXRHyq
z9iMLRK5q`EG6TG1R4VZKg##Hd2&+FT3t=V15pgEMLs(M6LF@(~;L|{64mGgs;e>?X
z<b0z3b!&TVPo^gf4O~#>y{&&`>!%0D#>RrDzJuY2;x)>?rS50WaAN|LE4T1oEA*0m
zNC>o)!T#mGuK(}?z%~ftHN$rbK5lhQ%`nP33UL@@`|tG!wIXw7%8W{-6p0-7;qdpH
zLp@Jm03p>0Gu!5!g_f3nb8Q+!pv?FRM!&@!W8?Dqlg;>LW$XtQ48e<lp-@MDXac7q
zNk;ke3ksUBO3)t{SXIOREROD>4n*rm*c@)~czn2(uSnD=Rs5>0IRE<rtdxfDszEhc
zI9pbD9=N?HzyewQ)Td{$>{(3h`**|VCsE&PdCLK9W<lp{WHg_=7Ob&4=rSX~7KH%^
z8s%0iOn{Qr0QlA!wpYtBia=0B0&CC<N3?D+@3ZYH7=0%B+XqF*BzW`GN~Hc+f0ySC
zpOdr1MAQD5E_#_4j~1%mo$N#WnJfi>?-Dm|FiU86PXE%f19@%?*gs>^IsFiXVez@V
zQy3${jm~v_i1gg~vy*&!|IM5A{#JMsK*#gR(~k@Jh{atb=~-*5l+1p;2}Sj;RS(a7
z9$<qsE-tLBECAGQr1Sw306-;2N0$Casox^?_$P!cNJ7+ChXk*%(t}5S?>+7ecqB#S
zXCq<}WaH=0JF|OR$+tbKB=}w5?zjvqr^IuEO`J}nqV3!+pH~_epr|Vmx48Z)*@XN~
ziDSm|$Jva7r5+x1o&VMH|HJDQw&l>UCM4pbYYL?Hwe=`Ma!Im?OJDsj9eYg~y2eeM
z3{bRRkbGtIYDq;!h0gVlkpiC6SK(hWHE??b8DJ2olRHcqqp{jZ$!I*R15Xt*n!QVy
zd?57v!ptxwchG-&L*Q0Q3Ptn!j@7)9q;C`z4z63qD<v8}e6aks_S4e17COL>uL@)X
z(BC(LX=Mf}N*F#I$2EuC7axQU2dM1Uh>n_9&`Qq8ufYn5{0bKi9c722A~EZJo!kWn
zl2-q(SP9eV4&6O<{r&t*e2bRn&I<AvznN!Q+_sG|&oMjDIcAf%lfCBV@(nln<5r&-
z?7NtmcGmo2!oxA!aL3KAv}gF=yrRvP+k3F?ZJScvG5t5{dMYb858K_EoWH~}s~N#q
z?`}0R>tE&F_2TxU5s%wD9jZM>lzJQ)czAe%Lqo~%Gm!j~moH-%KbUva<X@B2c_c%7
z|DbH;<g}FQvBzRQnGQ>EAL}fw)lAArUFw?iVksIB6LE4!;T{|wrU8o#GGG|aCBpMF
zGV+bY!+&|kd$j3nk(|rs6{YianH%6JCr&qjX>|>abfhKdK(*S^RVyqlE%APX0|GNS
zU}^qCHv|Vkx*3Sr);2cOFfe1xLlh)0{<wR1G_01cT<2vM6+9B0HXGZzux(y<#o?9K
z4v&*gWSkW9%6B*oS~@0%4Q-CF7iZOa4%N<ARjb*$a=z)lW-1QQO@YVK8c>^9_c5F`
z4Y&TJ`qwXSE|wJ4I9+kS<<96Vt242K@6q|+3V{8j83V~%`|>5z`}gl*ri-*o(9+gE
zwY=y<%$l%ipxX%nL+V^uMTdNSd5sJ;NZ6~-`A25%9T9st5IYodZ2pY3YR4+G4tu6V
zKDk)MxaPG20>qzd4TOsU2qmUjE?ta4wzszreEC9Vu7{vwHG%~t-5y7i6lHGf=L@L^
zLx%#||7c8X;&TS+2~`%3e@1LiV`JkTKvDD_EU={NnYLVi*3XGa_O|@P+c0`!_|6~x
zHfHQI#&-DNvcnJR21%YV))Ss~U8iU{emOFPhwY6p`;q*Ia@jUK-bT~=_w73%pe8L}
zX{yW?Bgm#ooWekfgt#I0z~JEETHNW06J@UYk^6SU1~9A?T6;rLtBj1rCnqPD<HZa3
zgRB+}Qf?)#PcYn%xG>J&XHbSaT^LY*$q@tnkU>LuGekCT&QZOk3fqd)_nFDQkowi`
zl_Oz?H<xTp|H*jt{^Q_>7T($uX0MvSb@NDDG$Pb9qSMayVqllj+pZ^c4DU0*HxqLd
zF;v58%?I`ifH!yVsh~R-87?WVy{4!Y1p|Qqzrx8{cr)l>orIah_I;&~_jpEpuCmGD
z!@R&tS;+?nkzZL=v->$AtrM>>3d@%Ka8Cx~5VM$K>gOHt=~<4tE2`IqNj0~ZMRtZ>
zzRXy9qK%rkx$z=|jOcVz?_j+3A$c=~4bV_S8(M`4*?-s=%hAYVl!g0E@jDg=wnl#v
zxQ<m3y&iri;?#p)R|LtX6tCHUh|0HgVths+1SLNmw~U^iUIl$>5g0<e4j}jM^1SZ#
zDkq-hRKEA4c0ws{=HR58d*_c6LDFZaYS9kh%`PXMuTsdwG(~W*KstKu&Wk8Lq>yOG
zN!EcC_55YGQnzE?*HZ4<_WJ4{zwbsHI7dWAA}9i^@J<u*>Id-RGML7#gwGEhJDFF(
zw*80djJ}4jjPD*l{I$vT=ZzB;Qx<QOy+3?w&U}1pYvoh-lP4duxiIx>3FMg^gnxN_
zeEjdwy8ax)Y6}tS+KZ-6DDEV4kYPnQJ!}yXC;Ckuz8{nS?(ijL4c{KlfV02&nKPLU
zT5`NFJ788~&-4HzVTOi=faB}r<+(C4GKf*MqV_W;=oy)qKx#UudZ@}s9OwQhyVIo9
z;=+N08xIH^knmp^TuELnfeBIHd216bCXe?07(r$Sdx@sA_VsJl)YQ~9+w3oQ#;ljW
zJHxu;rs6<eX<^ZG`}odNP6vPLS3%!dyUUyP`0?X<IYycX4~DQ(lablN!os^9i3d_s
zb>&TtL1QPWMGEiU>9II>aAJpVkTMD``vFbOz{tqe1n2_5lR+$B%c;2EX-z!6=L5D&
zaavqeeCB~!brrl*0vyO|$khbKB33m}rAA(*TtWKW<vmRSI5w;CS0>|BPfP6$+G2B>
zbMwF;5gGEg_v9LEY?`8pfx$%#W0Aa2?lH;?uN;`$*H*2kjd|Yt%m{dH8xJjPuR%zZ
z5w?i903#!{Lk#B?q0oYf!l97yC8(u1eW*sA9f{CNa}H(7=dpM?<`z#}d;W>|g;&2<
zY`%!A&YT2g9b5__YX?4mj>4*kk-}~$npBAfF<O=`P}htsMm%Sc))vSz07#wKc?xwr
z3Qn3KNV*~DF46l&;*~)t!=NmMV4dBnpm5Jq^3hRdBvoYEL2MYP-c8jDL#s<&-|=~j
zyr=`=KI#6lG+ob_{)3$5j-y_I)C{*j`*HghELpAJc1_Vd>9nWk$IYRWYXVC5t5X)z
z<y{^J{Mo&{^fka1*b{Rdd&r&m%O2)g814-*Ev#fXXLxS$<WfoJDax)y*)Q<eVXd!j
z?*#ut#t1kNl|atTDEb!84z<r;yl6ypk|vUvKf#8ET)Dz>`}S?8<BJuZa$h&?a`P1_
zSkI(=M5;+L^03~Mxmk@P01d&gbP&<4!pk{lc#KvMps)Qt<{vyi_i>>f6G!JXy7S`R
zuE4dYU%1H#9r1V|a&Ob2`)faW3{_NAn4B;ydiYQSiW??EPN4~9aA7DT1~}9xbUiHq
zHN+(jGYOH<$Uv@^mX_1!;?hqDPo2j*nP~2@sH{w9F7N!wK4R9|)^Ec@!H^m)4Gqoh
zT|NdAn|y<4$xu0YD6rynP;r9r&W0fn+&AS8Y|k=p=VtL>DcyGRQY^2;<dh5;&Mp@Q
z6z&jAym4cqmV<+XcrbVFd{m!vZZd7Y57C9U{t)*sOti3cPh%ip=u^BUf=3!Pp)`5W
zhg9_8PthbCF6IZZ+H+)j%ueiP!Do9;c1NAB^a{h8^tTx17-49eSh(B=>uKT4Co{(<
z-d8^AY~x!SqI2O@{%$or6V_{y*UP@GL2^J%&`)`H4{s6k2Ez_*nTCbsFW-)P2)o&Z
ztgKOwB*AipH~sy#_#|M$(5$SDWa1S$$qKw!x!)uIIi(Nck3!nQ(`BR_*S>dMRFFDb
zonM0GNdo}=nCTY&L2~Sm>FzjqNQp6naG>ZditV1Vui+i<VL9z|Bz)7}OV0%c`Wm?=
z`|jn%%3zPW&X=QVXyCOr=<&05k2j!x5IV%%N7Ci_^XCodR*NtXLEiKN?y(*y3lwUY
zxl7^eU<UC;?|BWsGn3oQc+G?dGALCE=Tj??8l#b($@uD4dHGny=+<HM4r%6kcsC5d
z){Y%JNS=eB7zEli@}2pe8=GZbISKl|_{H{6+jURR8f2FV1zHT;_V)H#p*B8w`m_$G
z9&_V{QR>0eRL;i~aUJI~00C?G`L$&W5_ap#|JVgMqmP7x1eKFMM$3(KCtr$@OwaLl
zT4>>Bk4}rB@=${Ugcae&m0Ra8XGUb2op&hP|2#f677nfb${W87yD)%tF5J5D%k7;W
zRxU0D@3^vQ3=TRs=QJn3!k7&qVTuY;ql+>wgi(qz27U0dmjUqqEl}z%;=L2(y+{L`
zYUZWlwp@N4eleZ$tGMa7b>A*~-%?dqUqS5brWQW55ThqdXdz*(x~8qs-;pTrQYpf5
zd)DBYr!G|~PE$VUfS5sUU|#mRV#Gh2=fti`t{54lK=&33AA*Ra<mIM&ap!GjR_T1o
zoUR#WA#<FjDHT0n#!+-{DxH<WWcR@Ea1izcoji*5TtP2w>Gpy*!-i31+nrwFsSJOv
z+T=Z@|Jw{#$5;)c>VO+F2nz`HGz8;-&d-R>;JF+k(;OGwl-p^%X6tDRh~WslzltBm
zR9KNe8tT5&zS22TYJb}){|D(UHf~%=<Y=tjwhph>xNz+nJ9=_zm?_loIwj(nhP4tT
z)&YToSs7Vb<i^8fLx}h4uk*@Uf^h?1j+`Z+7eblHKX?)OE)<oqGiI`CG^zWtVN_Q&
zhI{Tng`65Lppk^6lj}a!D?=u#P==aN0$2qEm@uA=)4mJt$m^CWY$t7agBqC`M)ke9
zCt!El6?I$dOCg%)B$&RQUl25ZaB{QPN4h*r4nV)&2R9CR-wH<E{`G1lOifM{zWMJU
z>z$mvhT%PT2Dfe9s)Dcap^i|jB_)X+j7|ft(pK0W37o>{$_3OjPKF<wyEEaQ%5yOE
zUhJ$wxz=?tT!?R~(G$^{UV8U6Bl;dkvV1|pytvz(Vo#)Dlw(YKx+n@=?P0z!dWwVN
zLKZmR7m4z#p%SBT4s5>QtPc$-T2MIx1UB);Ao~*W1yTcfqa9F<=}(&4pAScDwSRE0
zKXV{B=l1m<Jr7o!O4oYCZd`u+y+&^n*P1<!yQ-7_lznta=-3C!=>jIE@n$0{>zR*J
zqDD#tP>#vq^*y8pg7LjX?m)b>rG4=cHE6lIrJF19`nqyV>=Nto*w`3D)|W232a!H;
zv(tB`s1)A4Gg|3<$1-|EplL_fjz-@mn>Pu^7gD5`S8ALU+6PpL5sPR39p)orWAtDt
zFbQ!$U|&`-Iz4JVb8`lC8D#vGSfSBHlULcvxjft5G#UfjS8c5d#IgTckxt>%y>u@)
zXOz}!yxdbCahc0I+c!I-?6tJ+EvJ*#qakJkC3m{B<kSDyqD=tLR9vw4`Cm9c_c&t^
ziEcu0Kd5GyYy-1a+{^sORksGWzW4E2X;L~XeLRR?1xNw*Y0ZKFfn@V*5kKPl8fpsm
zWXh+Q<k?Q%dVO2(g;oOXBb{ij0@Ga{Kgbv(%91U_UK~&$(mdW=^cpZ&Q78Wh5z|P2
z@OQ*7qIC#)v;7EC0cH>`Kw{N7c<>a=j4(ak@9k1?bleP41oH=xFmsvg@G6863dzAF
z_26FqsuY(SE?YC+aQY=J%SY@xRdA({CD|djm;{gbD9p$}N9pA^3vFEn@g3B60yMnw
zA~f>mJ_7I1pplm!<}H;~RQuq9{^$DO^7UW!LGh($$-;&{OV=y4@zgRIZ*C}_p9<BX
z<?^rmExP;hciw!)eAa1$T8~%N#8`;BYz?ZLObJ0L35FHcwlR$DZKj+jUgSq?_Nb-x
z@FH#YTAS1em%v?9Oxnsgwy(#z{-;j#>GqK<n8#FM7y=_t_uthBbcQRwqu^Kc(gwye
zTGszQ_@Tvg+%nbZMsDFN$(N1w{i{zs7&w{akoy&gH*i?K#r&4j)Zvaquq#A;<?fiw
z#Jm{kF5k7>!YdG<mkj3>R@Vc3tUr{+k1)_=A<kW5Wq<bUS!WLlJe5jF_#zBEcoR_`
zM+qmrFqz660IAXxDP0|gm(&Qt;2sAkYG7g#3QZvA$`D+5nBJ{~iDe?a9KGZvgPa?R
zPP8-oX2v`-IQ)y3X=(VFYI83e_uhQO#?quV<$Zi*bHbJDiZ)ex+2W13&1T8fYn2b4
z6Pf;`;wkebEw-fs<3$Q5djm0<&xZG=fHQ4Id%L6Huoy8Mf?K;07dM1lfEc~+`)4iQ
zfdhL?IER8C66l=X&mM<FP~Lp7xs=@=`w6br*heG`>9h`50=zVwQQM}Wg*3c)ksr<8
z?RMqiW+0ydlzE~?;H8{tQm0=M#rbf?S&sKjkLo=%B@SXjLM&~CRsy?a*)N8aBj=^O
zb`^+Q_BiZB$JpU|sNwUqezU&Qo9z$3xTu=B#F34z(gw63(a+%_iGnQ^@0?tT3~3%P
z^=k@<=zrCA=3zPKZU4W>Qem`+<jzuLNeS6fmI{eb%38J<$zZ}|k{U5Zq!JQQsie&^
zm@HXKG_s^rj0P>1ku_Y_)bDjR^ShttkLP**`TZWpao@*rcei}6@AvyTm-qR8zt2x_
zeZ-Z3r(#SGiO%H8l`GIN5B~asqN6ZlYM)Kps<jxurmgw-3gxGUV&b>$SLY}4zg+VC
z4=_%#LkE~tRE8VHWfWw=q!r(9sym}ru3Tw&AR``yC7&t?R&-myvbKsK6uN>_b0%87
zde|ea$5`KFUyaA@lP#=0duXYt;w#$QRVZ(ds}7l=8dut9Ug?eLQ4iuK#~530w+Ze&
z+GKQYgw~+YrKNMT>L<IpcEvQyp7}vj1x)ZhKZA=pySw)u+c9my{P`hFx2;!na<7Zt
zw#`WC9!m;wm>BT&<;k3j7xxv#9&|}-!z&kC+}BjELkCl00g#Wb=zA+a0Lg|Bzci>*
zE1o{>4mi}!q35}xA{Ww^83+@#{&5RfT+2_pBgqpoJA>OUn>TOXA3lA_(cq&w##iln
z-0TtbcFQr_gqd~SLnAjuYQVrwAJF%6-<&sF61HCc-QIOpvF^!@^Eawrd$%+3a&X(q
zrw8M^Iec2loPQnyGp=$3-J(lx<lmxmsQQ|B<;rGqubIig4%~LnsHR7-Jtzy9y2Nvw
zT4Wpux>zf=h;mdFmo4bm9!9vO)pIuww1{E?3S%h2B~2)NOm=c87)jcM!EBDb1&&}s
z`zNMESuKx+AeqP6gmoVNkBWrZr&!rh%T!cX&q$bhrpt_gg#(@(xOr&#X>${gAJg6?
zTRFI%Jmz@(md%Nw{T%yu4jQqh>cgp9pBuMau;G;3@)omt&bq5y*3QH-HgLd_Qu6z^
zq3iMy>>(rx5f=`4RkRX9osTvzcxBp=jK~a3JV3_&+++I8AT52&kDyphf2Ls#aWtVW
zplL}<x?-DB+=ie9*f_t>f4rPxsS+l+PPdG`T8-&(M*8}m`r^d2e$Lfx-SB6)3>RkZ
zoH>D@aPx30(@8lrgP?yNm#e`;hh8cytT}x}EHzzScVx92gvh|e#DuB3%f|}7<+;@$
z-ODjU7h4Qcnm6gSw!`8?uk~y0zb={E>m^-g7azMH2fwy(%&lLlHtFKYs6M@0^<KOn
z{b-hJ?4lbCOC=lPPbSnHIPgJ4IQy%MUooTkizXqt=@F8VSOHs2NKWc%oi*NGr-iyY
zl(pKl*-1^CHFGq4orb9AOY!w)&6?%#8p0;q1eqnx)B6!WUr7*Yr^p3d%}_q%FQk>V
z*|KFz6Gb7jXMuU--NY|4b4S+CpNG9$(<L<0CNnvqw$Fv$v#WPy=0DvTF=?-}_0hOy
zeVZj-J#A9?@MOJN$MlYlhR5#_kLC2j{0~C-mq;@_kE~<FAoARGZf3Acn(IlA%q3-0
zX!0F)Wj$%zXIAevnwNFITAp?{7Y2aRI#w*4{d!ZxqLt`jW_IYub5OfSj~;z)sGAp6
zJhGv;#RFR=yJ)OgvxY}m^3`~^t-8FA#I0W(lWoV2)ewf&UYnVp_a3Fe!>ynPgKHsF
z<5hc!4q2^P%Rsts1bnmllufL*y?9^Yzx>^H$B6f{GS6%dzY=r9(dFScpZeBY9Q9Sj
zO1JzSzW3iN_rCDe=~w)E$-Z6__msY1D7As7Ko&`zH6hJ45x(T$m49hvt$NvOZspUd
z?(SR6J+d&kNy~AK+^}JTSjb@I)pM@Z{cPe&K`V@eN$XQzdy%z72?egSX}jA>AuN-U
zfRD2Ke+|d-VGH#8;w7iN@m|ftAuy0kHFa8R>N5~W&@Y8ToIP=lQSK3FcHg8PZ=UQ~
zCO#Gm%i6?EEx$iv{DmRk(t^v^eP~`@ba&Kx*UaaBh03!XzgmH1PO5lT7U*s=W<;67
zzqmHZYC?(_A^<X8<XQN<eqhrEW<(FoMi(z$R2(>P0NifGFE-=;0K9?+r?8&2t!I?u
z!5NvFB1N~@{c2A3xcyHBnWMW_g-=03h?G}Ujk#CrCc^53qNdfU)9gh@I=o5s@~;SP
zr^T=D^Yx1@iZkHGq<#C!H>uzxO1H?HBslZGhX4F81P=BzG<_j=K!np5N&$*-`)AKD
zIQ=we?!6m}^fu3G+D6@fh*l@duOo8i4~S05zx4d*k*d)(t&|U!F1Y^a`%|Tl4Q7}s
zpN6c3v&o#%hxRw+!Qn|g=6HF92uJykKTe0=E(zym3$Ix3Lo2i#L+O#a&FytRdvXii
zXQu_m<}Iwg#?$;!sf@C49%YMpqLm(`jDp^)u0@l3Y4_dg{Vtf_zp%Cw)FRPP6PDt$
zI>MS3hWi@;F`+Wk5rZ*Y!6j1IqLuuIB0e}`&)O}4(T#ig$1dwU=;DpaJvs-{hcsHZ
zX{>If>t5FdM_VX9r~B8Ms%9&<9u%#LasU3ui^{I$=TCBV4T23>flu6#OY<|Iib6@o
zjsA<1mi52xr7MANp||&D+BF>LY%!sOX|Ag7J)dc14jRl4_VGSLf^8cC{U`#D9_>q<
zX^t&a8$G>}1=k%>+Kb>FoTQP$^+WOu&KHv!%J6k4>U{nD-dY|ksHl3<$@oq32xZ|+
zll-kNdyRb3V?LWyb$D^}ewd5%)q$NSrxwJN+7~uC?Brfpc5u*U`)_xfd0!G2=F_!*
zbPMI!5?NvF$snx6`PjYNc#<4M>Yt;8QXSbN?A6?HYTAri+yh((LEaF3?chRVF2a`t
zZTdb78aBn1t$HKM3h4<jA6kbS$*cgCo3h4K281x~n&1J_3V}~@KwkYyNNc#^E*cX1
zYR%b{pGG7#8a#iEL$yOex3?p^J_ujip;ygaE<3SJ#EErM#$IgKEOB<y&p%qOu<u?q
zLFYh8IE@6OmE$u$MqZtdX#)c4=$$T$3T7@lC&dxYzWY*BQ}b=(EibK)?ivLbGc~mb
zBg%8fj`fqVxRz}>2>ZcyqaZGuAfSkhk8fRG*kKya<kq2?gFqw`X~9%W+iqHpIEp`T
z<hO^v_TxpuH$s1y|6l-@%vCG9ckOO(T2(q|t<9$v>wZ!z{Bz%s(vu#2lzY9;%=Byb
zZrP17=c@|`c8<PUIOUJF9^<q29d};1((+Y^$>yw~HMTP^o%m}>=}c44c<a?VJ$p_Z
zu5)4W9doHFY4Ik0dosGq$-4voFBc$T(g^ctJM~6Uj&2o~lK1SH`OB_|uRa7=fjVaS
zRx}mym`mQGt#K#n=Vy=GriqRdZrT3uk*=?sG;fX)*TdBF+RY!oZ=QbsXrJ&=US}p9
z+VT?DG^lWHrk>mDR^O|e{N>VM`_!{XBCBsQ0_(i}D%!K*X-VZ2%?utO1GtPyI|`lw
z8`Zdb_out-L1N5#8=e0y>lfKfePf#a<DfeeMtVH_;pV+9KgFE=<@;u;cOBNHtWNb)
zSKHmF<$P7ul6z)_PR}ERz7Vvb^yiUgL4jI{BQu-v2F+vQ@u~J*L2`Iex$8hT&u@3P
zg*Yr{Tp4AxJ_diDVr_3oJrF7MAU#{2Z~e6NkVh*Y$9|rtU;UuwLZRDg>7ITT78j}e
z_lGA;7F<VRnVQte-aae*>!W*@*E-2qtZ0!yCRoljwAXwgRv>i#tu)}Q{=a{N**Kz$
z3{RIVF;liVbQ<{o{O2bfgl)~qxd>sgL-8lKvE#;F0@vf4!oZ`8pLtGJJ-ak;Osm$j
zql@Md$`=r22P4X(d126|M;{Dn#8RfX9DIF!eHn;ezI<8bSDTn<6c+8|=x7V=n!<7M
z`(3VsRtD@z+OX-!xJ4icMaL)*|K`rlIL;f!G&c#Dhvz9mzB;Vr^bHJ3pVzblHjZ*k
zE<DEglnQcoSjUL!@Wabi6kF6Ndzt|%Dse}1G#t5D<Q|@$o~i}>7gZcTDvJ&vjU~+Z
z*w?}$wb0Y$e$nG4;RfdC&66+L3?49;Glbnpj^LPEx5@{AwKy+-M^C*~CIoOn6a*d=
zwI4-`+rw2PVi8_b@{R=A!6sl17D(cAfjGqoc>;uYO*=a~NdO2g^!iyZ{z1{BXU}ZL
zhk_BH2`Nt-qmyBf)C=_>P~xQvb*es+%>W*R5oa=Culi~C!?R5AD}}EuELT|!F>&~}
zHnfhTMvW33m<;P+IL?J^%B(c)jefxl6G&DtYOla26MsNzVl{<`yTk5`EUSvHKO<UR
zP$>qXqUjX<IJ_<4nQ?v?ovXch@f01CoBmf+Ym5n+E09H<I&((ciXnF~co1hzz#t3E
zXqn|YR9#nTpFEv4eWTELX<-1IM$<q^GDgItNd0`G_O~So302Q^MA}BSYh-<I3|u@-
za^>I^Pg*kHX{L~P#gt<4Z*eicrRR^NKXtsV!u!T2b48klkw(+z%|oltwq(VE5R~9+
zzJ6eap{$u0EGR+%?bD*)y?v`AR3_o+HvjHlTJUt?M8RamRq_mU^)N+eiipdMYJ$<d
z@Lq}0SS38wc??DXQ|)+NLr9TszrQ(n6b2Pd^E^z4#3hEI_$C;r(|H52lS`=4cbg8q
z1XXpU*B{4{#TOd6#vTj&Iu*H?ZlE};B|aeUY5TW<jnIb`{Ne8QbLS?|b_DWDZv-;e
zruNb8bN0=8@d7E1a4zYG=23es?t%>8BZQYLn$ldbf`l3?uQgDupN7IA+Kj1{NQ(=x
zhQbAo2PZ~B{3$5K6pZa`UBB%WFQCSOFgEm{_3qfw73NL@iHhEt%LBmj0JYgnr5Fm-
zv56<3Nj8t%zCFojX_p{HXZ%Ytl=0qSoz%4V9{S_cpHJ*#4_-pwXAgr~_DDck+^lcu
zwbkp+;aKVHYfiZTabl98r|y-_M>RY%0)~g>bs9Qli($s+PyQamw(Z<$!i2D?f*F*L
zN+o)z4g<$SS3h^<dD^ChFTYx@K5RL?wUgW5Ss{ICwrOrs?4Ur+%LQ2a1Y_O!WjQnd
zTx2)J(Jx=Tu*dwUI6Aub^tZVTVy|;=QHmAc3BVStai9oGuXq;Vklerr>=&WmH!)|z
z;q<<i0<R#RHmQ49|8*_=ZO{udzmKoX6se)7;6<lD?8;+jfN0CipE_wa2RAdvmxG~4
z*JgxCntB(@83(M#aKkgAJj$vd$An_tw2A3y+7YyK;a}>n#jhneoj;uZ^ij?Fk0884
z0<XkRQLp121UxU;3lOlHdpPlIqs58C^v>G;xF;i2DK3GW#_DVMNQvtm5<n02n%c7E
zZe{BaRL{&dZ8AIhF(IGzAQ%X!lpK9|c{7j2#iV*aOu4!u`Kq5+;<0bt*Z8JoGi6g#
z$o&oQpdvP{eX9iT{`6@3M1^8){kK6uT0Ae;hqLMiT&o&xUyblbu@8%y>gRCwnkd+C
zvU9YjczDSo7gD_G{GTDao4OzF6>lQ8IH!C(;q@vMAn}_JA*iv1$4D{ao6>7_hY{Qt
z=V{YIz`eP+q#WOP|F;^n9yQc!bmRN-7kJy2RhdRPo_zyvgXt)IbcOYo$a}}VzU&oc
zD`p}>UZCzSxs#HdEE8r}nKJfo+k09dLQ|Pi(oLrpesKsZ+q>$HnB&ojirZnyHCljw
zK^(whsZHS1hdbjJ&h+Z>Da$|ik3ZUj5*>bXCP;4U{dBk6s)L(Nz7BJWGj%FWGMV<q
z<(J71;=c?Am?62R_Zd25h=}zu{5GP?7E@X4FXdBxw&Z>77lvo4{z-q6K+21-E5p{-
zSu-zXRZXa7RqgTm<LUk}U!I;4d%wUPJ9eZz_)eVN3HYlk$4oP{TB%UTu}f;W3u+fB
zpxMB(k${s8Sq0>zz<t*3r}IC&_(LR)_<RIEXH|<r!Jse?Q_!O*p+qz)AYZy$hWFWF
zE|mjQ?kfq!O#{38ox_C8)T7ggcP~@3%53ej?>w&G%~A~+L}99+P>8)TE!mxXzbT`(
z2gI8cM43Ey*6r;(w)+jJ4lq=&2xx=AGG*!Ad-`W;_cwv`x$RMZOUocgQmpbwc8u<6
zYO40;uN!ZD7^pW_P<(p-Hr~l=jemKe_A%Jwwa;hQ+p-S?9!r{e@3_&opFaBf`ob7$
zc2LR5bIr@&4ic%6wNIyj!PA4!FI^GiS6UU_>0H1)4L8a&Q^U!(<>J?y7e?QC%y3g)
zwS%)SbQH%!W?YAyT1}6-_vgn&D{Q8{*>;n<Bb#Fd9DZH%_H>(qlqc2U9(Bq|pQmn&
zFs)ZEUGiiN&VIqy<O%fZmD6F_S?6Z~SRFJnHa14zpO~Y1yzC(9qQq-=%&iY3iU!e(
zjLpzlig&UwM+@FhI(&9c>dh7NsDFU6#7l!c11_^49`zgpn-CoBlx>&2P@9R$rCtHm
zLj&$Ti5ue<u?!^?TpmXCs>M3Kmi_HOE|aUgj~On?GW*LYz|UhK6ez)dK}(OqS+|M}
zP8#?z;GTQywr7jWsa(}Y1P@CAmEk?2MNp@)R&<R0b>DBlwV|KE%+i{<A)WC?mP8@-
zYvk*yA7xsv`82+2*m-w1&po?#VJY2(d@yFxBrV0hQ>XN0maEmcXV0G16Eh0(Q_{#2
zRZtSDuR;9mF0yoFZLZT&4j$~S>IbO8r?kaY0g!$F`JV|Rlm&M6b>9ZeIN%P=1W)86
zKHod_FP`ro<rqwhnlW5U)ZDz}fjHMHQtLbD>i$CDOuMl3_WJ?-DFkZ2KCB;&=0N<T
z#9B9xTj_)1S@!DCP1|vuxXPsuh?)_QyX2Mjl0$aR&f20S;`NmxQdR{CZca{>Qb(_2
zN|uC2Te~ZF7L4MxBC~dZAQ1Oy=j+vXChNw$_?qr_*wQ1yAgy%b!w~Q3%7u&WW-gBY
z^~8y`id^P>_VeZ?N){Ge91~Xdtb~G=h~gj5!eD9)u_Ba&gAY`@_<#?)g^&cYmZ9i;
zU}C&E3De}mtNW(F73hTK<}~qvWFSh3O^_+@u>%UMp8aU};vc#Mfy%U#x<nB~9%C50
zAGfCUQagy<7wn)z%Ue46zsYW%!|+O+;=~`8mU%P3D1^sLg2zNCL)15l&UC-I6agh4
z%}h;oN!+s0Ca0`yCQAgSQ=x^@!-hp)&CeGPpID22{al83q9!XHd|_Z-0i^(GED=J@
zJTy{L){x)aKyO-bjpN6+0g=rC0Ia3tunk!_{0I({IX7=kP-t;?+fe_CDL~^upqF`g
z2pJ6=IEA7y6i7=$(Kt{Fwcy$2i#1hVJDV+C5#hDQP<*oiN0NVjDH<4NSuMJFpD>%6
zY37xF{sz4B$G2OdSghs7>mAN`=I32Gosb-#kzQ&yIqK!`>2J2}k6SpSpVNJWN;wm3
zh7LV5Aul{4q8;y|F7*S$@?m{%jB|Pyz?~B#xx~~|2NbJQ_&=tsFbg6}Xd#nMoAlSP
zf_?)M!fM<aIBD&m`x`+dV1ib@X#otjQPARppL;(auzmM#8P&z<&z`JGD?jv|$HOPy
zj@$DtkPk)Zp4p-(|AevAiKb4)C)b~}djCv`O6rE@ZL`wMm{(u(OXl13DqEXh_i?n*
zScY43xE;#UaVg_a3faj=EQuMm!9dnSl$X!pIW1mT+rM9v3py=-TJdtr#H>8Cn!@5q
z2`_Ck``6V^u+cL}kF6QI)|FzYbsuLnH%J+WhMAIQ0~SnD8QHr|1%|gk!n4qPGHi;<
z%6Z~%PBT93H!p3XxcI#lJLl&w+SW-Ssuar3(la$<>)#9*rE(7N^(fugGILYYKF&5p
zSfT<>q~t$t*|KHa;F^&ElVWRCKK@0+{hgcxTszH*R&}vk;;r9v)BB+DCK>NEY8G!l
z@ov!jwVl$}eu_!_k~-|H`E$Ds_i{V_|L8~m$Bs5;U2!9X1zo$^SF0B*(p-D}{<!wf
zI_n8f;&)gz9?&_hTbftW4@vj)9Gni_{@D1{@b@iR%t~+902VhfI(O{JO@o8k25Fv?
z)cSq6d~}9kJ%oGzKAjIm(j63c2?<FzMI<ah7yBcc)3Koz_eAYNK_6*lpj~1|TPs{T
z>5rxV$7@E+wkU`2K2iokHyYq2{KPY>%&?4UMTWV)XHaW0(>g%|3PVym_AoaeO@tFN
z?w&oN=x&6_O;6}DCdn$7wti&}0HYW@Gj<l=GJ;;it`{}6RZF#>B~fmOwv!%EHqady
zzk7YRaXa@B3UQUv_;u!<kd?yIXRNdlS9eds<3Cb(8HE5PrB!J=8#=>>YSk(@Hr7z|
z>X?XQNTCf^y7wxTZ~=>YlHW$*mxqNAbHepd9rrPh<42laxMfu2VXLl87>Wdm%1(_>
zC3PCjOLF)a9U+idUB$qGUnZpstSt))ZCO499fK=OoYA&@dqLu5O$*yXdZ8rMX0S=a
zk(T6a1J)g~GKTA7?)D>mX))sv`#k7zKVl!0mZa_63VcyCj2C6D4H=I3-Hh<mur$%2
zq(4B3Lm$Xrt@s+$vGu@{5B#2+$!BB(LA95?s}nQ0JX#46x{*}MNj~)3c5pv%)l#*Q
z&u=OYCY&Pb$#o>hOUUiS#`vdKd*umgGW%W+FH>Bkcp73l%r`==i?%W-S<!F-L-5UK
ziaeV61BqzPvSqD*`spXiTI{w$>M9sxLZBH#Tg?6~mZP+;L`DF95no)Gx(-m>-tPv5
zD$G?uJMgB9U^F1CG9vR1D}%@fXO=(K6tXt{8yt5}uDqdl=VNjY)qoE}-p3)##Vdll
zD_w%DXCgz0lN5yU{o!Nw|Aa-2I3}pJw4jF{jH=Gv!-LP?N<M#LeYfaqCBd)JUdoh?
z&c%~34=FUfxF%%fESNTgmS*_hiv(TjMX_Id@`RXXqO|tD=<7Y#DA5rH{2YAxL~{&{
zTc`@+eg>nLV-z`ItdrAaN;Yl${b<+b;k+h}g?I;ZimcWPX-|*t6uopa%CV7{O+ecc
z%SpzlTxIdT1>b)!G|<b8r-atgU;<tKOY1pQ6*$`U6}ZfD2b;KL6bD2{`pXgB9P1N>
zgk~6)I+Dzo!v}oBwzurNcXyTPEHadj?wtif2!_%W{)#|86*<Ayy+-X{_jcJh)<u0s
zH&`(2YfA<g8>l*q3mx)=@ZvXtVoDt41~PzwfX57cW57oHiD0x5Xfr+8O2uUAvJB)e
zx0}5W@h3;2;bl+VAVPAQr>f!gpIg&*1yMnHQZdxJ4S9Bsc;2LG)44U3m#IK6F%UyO
z@k_he{?6D5E*xyup3{J(gE7l9K*I2aHWZqVxRLelJ)T#68~ui8JGn_41t`L(Mf7b#
z<MY_oL_Y0|d2Fcw+(HZ0QS2+&GoS%GQ3R;`yfTp*bLNR3Mp7_}gDI%0F<lnZ2z+a|
zS#^BYS9p7wv?5DE`$q!iZ3zpjDV@1YHrp{~=0g@~QWVkz3a?KrszIT}6-0G5%ig7d
zZPx9dqviu0i~#~K<9|A8-ms;K8#3<~8)%`6!O6-N-?@C#BMycU4q*+PQ`NKE%@Meb
z`Y(>#U#E!|Is8^JZi6a;+)IzFypJl3%kyh2cfqO>&K2-Hw_Q!lAkeg0KxLtPa5M~a
z@FZT0q2484G=>C-bb{Q|1cd@;(K%TV+mbH3$2WtTk$dIJJOZIGR=Hz+IbQpdlgAoH
zq`J^~fRJd3GaV_V7ET1<?Ffu5c->vQ$z9Z;n5xeC3DTWZ>S8UyyDCbvYF~Trdg#ZD
z#x;2~+@lIQAbva$7YD+Tr9i<Bwt#@321JNlKns*!Es3bOQIvz-y5t$#tU~fmu_4}V
zr+C4E%)+2fik}>JA$_1+fl4GWh#0jyb&B9!&>@72-~#VSijK}o>mp-f{VvNzI$|@9
zaThxk^bx)VWn^S<8F3vZnvJ)2ZM|UOLQhJ{Y3KX~FeJ8E_Q0CgwmF;+{*IyQQ_o|l
z3)P{O#u@Ed*-UP}-wS|L(W%1TlSNg4FO|Qv9u~N6-8zof7%Y9nE&VR0g2FyvmzqD-
zcmuL;oIXI#xd9D!8<{F{!D8(bs*`>cbGCA(T2&i+7PV;jo~?NoR=p}t(`A5s{YY%u
zjQk4&Bt^#zSiC{bGbQOpnkhcVZyO}|sIBBRfwxsj6(7vlR=n=&QbXdNkrZ&|FqWA<
z0AM8+IF~4!5FHsQEsYLU^=@Tv-wHkSf1AIYZMM_sL$fbWj-HhRJNy?pm_WX86TUw>
zu7N}CtVSm|h?lf8HVE7~-{*%;N*ab28ultaIy!Grfx#{|qlWE-`yYr*M%JjH`ksqX
zH4586=+P`}IQV#ny^7KYxq`S(i3kBqMmj0_6!~=|1e<q1%g#zfl#|@u-N%egGig|H
z$2_c8RC<xr#8Nd#{3i@z->JFu!C6@#arRSXsB+iN)H+F4`|U)pjHrRCtub6+65&P$
znbZz((*`n@hww<YZ7!PS9Z^wZx9+mcM6M!0I6-TKgTwE-C!=@lXrRmTfMD0bE8Gl*
zK;X}&2U(wMuWH`#F&_QLDV3l6k5if+J)H?K^caPLjXGCwk&RpZXzPAmYz}4ZS+dJW
zM$4oaVle%vYcEl;!Mc@r#K#sNI(*oW<JeX{vRA1sWaCwlPp>FVF_03liPvlV?tV?N
z)@*RBpEivNzIk-j_KCx?0dBgDA6LH;k9XPs#B`%N=?!qY3~%RPFhp6_1$PVK!-Lz%
z789R&iC;EK(2DEv`ep5x=SLvjH|^2{?5J07<LGNO@Z0CXCecaWp9fO)9m8uoX-jtM
z5@HarVCYpz^vI)xC_EpqMq|59$ufDt-;bBoDaz{ob&j?1m>NSOvRGSJYc*kX#2Ho_
z%M;?xrhJ%s?eTKiX-oO2>=d55Zf<M2l<9ca^e?z>5)_COp>gBJ)fRd-=E&!;+G$Jj
zHvQH&L8~z)s0VNDw6&j@CjlpdwnlCExA(Vo54EiC8gx-4R16pHIQQ!n&FoT4Kmg67
zItDlIXfuX_7#oqvh6jwjcL`y0{}`DHQDlz1?Qww>k95%DRAtuPSn>PszXw8|rL9S?
zo#b+u@)%op6ViU>-tg24VOSd32v&yjTg>Z$-~-MvU`t4-nEcu}=ptC->ikM8j$jz?
z+>{wFpzY|lZ$Hw!{=&(n+&|HQ5R8@Wqf)blN@-+M^XaN4<N?v7<Gzg>pV(qzHi5~e
zfhf=3)%0qk$%~tP>(<u(jl7(}7Q_HIH`iXdR{6Q?cv>2ot={79DuOQb`hg7l;1D@i
zw8<vnZkylTTV0aGvrP-XO$sv@c0N|uE!>Pa;n$S_YO7xN?AC1TwTk<gCi+G2V({;o
zrNm>Fqu!Pu)YguiAgPjnR+8@i7rr5Kg3t^8JpeRHUnqZ=uaZtk`jPCXQMUQ_SCNEw
zrXXifYp>FhlUdYWw#}&%{o6YjvaY>4h2txQ3%lzGNStwX3`tm5xI;5|0i<=nl2H1I
z3aR4BWhE!?|NSoMB4XMKPhj7^&eNt(uQ>Pd!6l(%LJ*Y`3#W_$H^QIQ!7U36Iw&|Y
zqI;v!Zz}{H$7H{GHt7a-;%{jW-&Fi%cmu_Air&rn;VeiG@b1cib0tf_p^>IT^-{)d
znwqj$jX1XvjknN1As)%%U?v|3o<ot!&t>!nPUD=}U#GCYIv=@(LUtI8zSgX1Qw==D
z8jZj#bEe4HS^3S*Aw2(F`aEU-C@J4mmJ{I|)fty7wtMT^sy7U5=(rn=;Dc?ZDc6Jn
zL-Cu->wB3&b}s#Z45lm)QDRV=v9)5lCu^)Jm2xqpK_GYpQmxML`q~DTAZMVeuO*SL
zRUvFOG1-lTktlnPDEztux(9375;x>bhETSIaCC&NUz5|KPYBymoYcGIilm-*mi0W`
zd$8fwjYa^B{*R-f^S~F`Cs!mctZHj(zrTC><TsjKa+yluI|EN4^+*Tpg>#!I6wQW?
zwIAt<i2x0n+w7%9`!~sIn=i*$VuBhh`pa9m|8e>R$o7o`5gmno$)fgfQD|YpcgOby
zoo5Jr_(|I*$TeJh_-62Y>BMqw-<}*W1a=3$<TU`xpjN(k@?EBtH~n)VWn2!&lgdH8
zOAyOX6SB(nY{hDnTC^JhUwmmwKEmyKsN1Pmdd~XjRPP^%`H3kY&;My!QuDCBZfd}4
zkgmV}Jl$VWPCvInt1-;^>auGWVqVj(eaWJ9QBx3Hlg=OJ-Bbip3CqSWx9|YAwuo~w
zwX$Bn3-I6>*v4P^oUTK0h3k=6oCVwmS!Q{WD8a=Ol3}Urt_l7c@b+r=PjK#JRw7%h
zt3Pa%L9*f&Kr;oUj6yKkj`{M?60JUy5HUcw(jFI7Nd$kTJ$l=H?VTS;K0wB*1(b{o
znskMpBxZ=<Z?b4?;ucZp-GPi@)Aw*fLYKfXHm75@1kiB{$V}GKyRy30$MWjbC^j}L
zd=pbj#Rf(T9xn~AzP_vf(3iIo)D-39zlk@RT34Gj3#1230tYi#$YS1xI_UkMqCjLm
z;7J<@&Y3egbo!Dl-|$6Mu=pWn%DbpeyCFW+>z<^UOFVe6<;A3&k3Nr^LbqCy>>1te
zGnn1#$`fx@#)<a?7YK<P$3#K)TS8pR;kp9OY1uyaj6ONS`oZ~0hy4$1xc584zkjfH
zrB4g%7^TRU8Myu<?sl&D8M>MxkXh3dt5sA(9JycFt|F|pQ?I!#18zFlyEVkxiBYk!
ztrU8Gvv@e7Q-ZOuD<-C~f>035eZ`4gR+BKc9Z5W+mxeRXXdFRLg!6+FZwh=m%|4ud
z)M@;9i}}B63H6Gpy@DER3q==eSg9yf+QkbWXE*{hNvq+0>c>V9R6n5+l%7{IwB3f5
z>yX%xwYTor5d_H~?e$5h5O~TatgWxpD;<L0(YY<iAc3I6dHLk4;<*L&puD0Y;gzae
z;tZpT)=xr^S|%7Aw|sFga~w~<g`xqnB_eW6>|_;C<3J&>&?RkP%#}zRB5Os2n<Jad
zuul!8KproK|0oUJ6k3$Z4Zte_T#xZq<3NG1!Ftxq#oOaK2?h>PM*1vH<&(RNoa5rm
zB@8OmfeiXCf8-z-xy-BkvL}e#GUKs+j0|dJAqs{e>P*>i)m)@{iD*{vH5BT$pm3q^
z#ajTQsD4}UUx45@3JTgW7IujsmsDvUALzCIxTThxlZm~V|3@;X3P41b(E*z*R{nYY
z`gkc}tG7F|jmz4nvBr|{SXdswV;Dp?LR3A1l_16aZHm7?Pndu=r@Ly^s);s9GfDo3
zXPw(j0GaBQa4Q#}&WGiXxG0~_(J-?F$Xd_oFL5;0h&)^=y@0DRpuC>>ohe5%p52Xz
zFMPqO#?^=WS{A0enf}JErJ^$4kz3TxPYup+c=o2PW2vw#VU%2tp=gN1YiWS0i__f9
z2d#cPfBfFTN53O84;0kF=o|;@AJn#PwXKGc_pVdjbhRTNKrqYn+t$)3{o%iV*Z)r4
zAf-wFtyT(c?UGm~SdND4yxYf5PE5c0x6iKq{s4#<)$034`48=${x8v#KhtPSU328W
my{?vuAV*oS4%caB-EePZ?_-&LAEYSwH+Iwn`@^<#*8d-0QY#4n

literal 0
HcmV?d00001


From 205f047efe055f1b7187a149581179fee42d0f12 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Tom=C3=A1=C5=A1=20Zemanovi=C4=8D?= <tomas@heliax.dev>
Date: Fri, 25 Nov 2022 19:24:39 +0100
Subject: [PATCH 5/9] changelog: add #733

---
 .changelog/unreleased/improvements/733-core-crate-split.md | 4 ++++
 1 file changed, 4 insertions(+)
 create mode 100644 .changelog/unreleased/improvements/733-core-crate-split.md

diff --git a/.changelog/unreleased/improvements/733-core-crate-split.md b/.changelog/unreleased/improvements/733-core-crate-split.md
new file mode 100644
index 0000000000..6ad3737cae
--- /dev/null
+++ b/.changelog/unreleased/improvements/733-core-crate-split.md
@@ -0,0 +1,4 @@
+- Public parts of shared `namada` crate have been split up into a
+  `namada_core` crate. The `namada_proof_of_stake`, `namada_vp_prelude`
+  and `namada_tx_prelude` crates now depend on this `namada_core` crate.
+  ([#733](https://github.com/anoma/namada/pull/733))

From 7129ae0f5030f6265a21686b4d28ba7041e239bc Mon Sep 17 00:00:00 2001
From: Murisi Tarusenga <murisit@gmail.com>
Date: Wed, 30 Nov 2022 08:38:53 +0200
Subject: [PATCH 6/9] Reaggregated transaction creation code.

---
 apps/src/lib/cli.rs          |  38 ----------
 apps/src/lib/client/tx.rs    | 140 +++++++++++++++++------------------
 apps/src/lib/client/types.rs |  79 --------------------
 3 files changed, 67 insertions(+), 190 deletions(-)

diff --git a/apps/src/lib/cli.rs b/apps/src/lib/cli.rs
index 4d0ebcad23..5fc81f4dc4 100644
--- a/apps/src/lib/cli.rs
+++ b/apps/src/lib/cli.rs
@@ -1526,7 +1526,6 @@ pub mod args {
     use super::context::*;
     use super::utils::*;
     use super::{ArgGroup, ArgMatches};
-    use crate::client::types::{ParsedTxArgs, ParsedTxTransferArgs};
     use crate::config;
     use crate::config::TendermintMode;
     use crate::facade::tendermint::Timeout;
@@ -1766,21 +1765,6 @@ pub mod args {
         pub amount: token::Amount,
     }
 
-    impl TxTransfer {
-        pub fn parse_from_context(
-            &self,
-            ctx: &mut Context,
-        ) -> ParsedTxTransferArgs {
-            ParsedTxTransferArgs {
-                tx: self.tx.parse_from_context(ctx),
-                source: ctx.get_cached(&self.source),
-                target: ctx.get(&self.target),
-                token: ctx.get(&self.token),
-                amount: self.amount,
-            }
-        }
-    }
-
     impl Args for TxTransfer {
         fn parse(matches: &ArgMatches) -> Self {
             let tx = Tx::parse(matches);
@@ -2731,28 +2715,6 @@ pub mod args {
         pub signer: Option<WalletAddress>,
     }
 
-    impl Tx {
-        pub fn parse_from_context(&self, ctx: &mut Context) -> ParsedTxArgs {
-            ParsedTxArgs {
-                dry_run: self.dry_run,
-                force: self.force,
-                broadcast_only: self.broadcast_only,
-                ledger_address: self.ledger_address.clone(),
-                initialized_account_alias: self
-                    .initialized_account_alias
-                    .clone(),
-                fee_amount: self.fee_amount,
-                fee_token: ctx.get(&self.fee_token),
-                gas_limit: self.gas_limit.clone(),
-                signing_key: self
-                    .signing_key
-                    .as_ref()
-                    .map(|sk| ctx.get_cached(sk)),
-                signer: self.signer.as_ref().map(|signer| ctx.get(signer)),
-            }
-        }
-    }
-
     impl Args for Tx {
         fn def(app: App) -> App {
             app.arg(
diff --git a/apps/src/lib/client/tx.rs b/apps/src/lib/client/tx.rs
index bb667cabec..332ae75d8e 100644
--- a/apps/src/lib/client/tx.rs
+++ b/apps/src/lib/client/tx.rs
@@ -65,13 +65,11 @@ use sha2::Digest;
 use tokio::time::{Duration, Instant};
 
 use super::rpc;
-use super::types::ShieldedTransferContext;
 use crate::cli::context::WalletAddress;
 use crate::cli::{args, safe_exit, Context};
 use crate::client::rpc::{query_conversion, query_storage_value};
 use crate::client::signing::{find_keypair, sign_tx, tx_signer, TxSigningKey};
 use crate::client::tendermint_rpc_types::{TxBroadcastData, TxResponse};
-use crate::client::types::ParsedTxTransferArgs;
 use crate::facade::tendermint_config::net::Address as TendermintAddress;
 use crate::facade::tendermint_rpc::endpoint::broadcast::tx_sync::Response;
 use crate::facade::tendermint_rpc::error::Error as RpcError;
@@ -1293,18 +1291,32 @@ fn convert_amount(
 /// transactions balanced, but it is understood that transparent account changes
 /// are effected only by the amounts and signatures specified by the containing
 /// Transfer object.
-async fn gen_shielded_transfer<C>(
-    ctx: &mut C,
-    args: &ParsedTxTransferArgs,
+async fn gen_shielded_transfer(
+    ctx: &mut Context,
+    args: &args::TxTransfer,
     shielded_gas: bool,
-) -> Result<Option<(Transaction, TransactionMetadata)>, builder::Error>
-where
-    C: ShieldedTransferContext,
-{
-    let spending_key = args.source.spending_key().map(|x| x.into());
-    let payment_address = args.target.payment_address();
+) -> Result<Option<(Transaction, TransactionMetadata)>, builder::Error> {
+    // No shielded components are needed when neither source nor destination
+    // are shielded
+    let spending_key = ctx.get_cached(&args.source).spending_key();
+    let payment_address = ctx.get(&args.target).payment_address();
+    if spending_key.is_none() && payment_address.is_none() {
+        return Ok(None);
+    }
+    // We want to fund our transaction solely from supplied spending key
+    let spending_key = spending_key.map(|x| x.into());
+    let spending_keys: Vec<_> = spending_key.into_iter().collect();
+    // Load the current shielded context given the spending key we possess
+    let _ = ctx.shielded.load();
+    ctx.shielded
+        .fetch(&args.tx.ledger_address, &spending_keys, &[])
+        .await;
+    // Save the update state so that future fetches can be short-circuited
+    let _ = ctx.shielded.save();
     // Determine epoch in which to submit potential shielded transaction
-    let epoch = ctx.query_epoch(args.tx.ledger_address.clone()).await;
+    let epoch = rpc::query_epoch(args::Query {
+        ledger_address: args.tx.ledger_address.clone()
+    }).await;
     // Context required for storing which notes are in the source's possesion
     let consensus_branch_id = BranchId::Sapling;
     let amt: u64 = args.amount.into();
@@ -1313,7 +1325,7 @@ where
     // Now we build up the transaction within this object
     let mut builder = Builder::<TestNetwork, OsRng>::new(0u32);
     // Convert transaction amount into MASP types
-    let (asset_type, amount) = convert_amount(epoch, &args.token, args.amount);
+    let (asset_type, amount) = convert_amount(epoch, &ctx.get(&args.token), args.amount);
 
     // Transactions with transparent input and shielded output
     // may be affected if constructed close to epoch boundary
@@ -1323,13 +1335,14 @@ where
         // Transaction fees need to match the amount in the wrapper Transfer
         // when MASP source is used
         let (_, fee) =
-            convert_amount(epoch, &args.tx.fee_token, args.tx.fee_amount);
+            convert_amount(epoch, &ctx.get(&args.tx.fee_token), args.tx.fee_amount);
         builder.set_fee(fee.clone())?;
         // If the gas is coming from the shielded pool, then our shielded inputs
         // must also cover the gas fee
         let required_amt = if shielded_gas { amount + fee } else { amount };
         // Locate unspent notes that can help us meet the transaction amount
         let (_, unspent_notes, used_convs) = ctx
+            .shielded
             .collect_unspent_notes(
                 args.tx.ledger_address.clone(),
                 &to_viewing_key(&sk).vk,
@@ -1392,8 +1405,8 @@ where
         epoch_sensitive = false;
         // Embed the transparent target address into the shielded transaction so
         // that it can be signed
-        let target_enc = args
-            .target
+        let target = ctx.get(&args.target);
+        let target_enc = target
             .address()
             .expect("target address should be transparent")
             .try_to_vec()
@@ -1422,7 +1435,9 @@ where
     let mut tx = builder.build(consensus_branch_id, &prover);
 
     if epoch_sensitive {
-        let new_epoch = ctx.query_epoch(args.tx.ledger_address.clone()).await;
+        let new_epoch = rpc::query_epoch(args::Query {
+            ledger_address: args.tx.ledger_address.clone()
+        }).await;
 
         // If epoch has changed, recalculate shielded outputs to match new epoch
         if new_epoch != epoch {
@@ -1431,7 +1446,7 @@ where
             replay_builder.set_fee(Amount::zero())?;
             let ovk_opt = spending_key.map(|x| x.expsk.ovk);
             let (new_asset_type, _) =
-                convert_amount(new_epoch, &args.token, args.amount);
+                convert_amount(new_epoch, &ctx.get(&args.token), args.amount);
             replay_builder.add_sapling_output(
                 ovk_opt,
                 payment_address.unwrap().into(),
@@ -1476,9 +1491,8 @@ where
 }
 
 pub async fn submit_transfer(mut ctx: Context, args: args::TxTransfer) {
-    let parsed_args = args.parse_from_context(&mut ctx);
-    let source = parsed_args.source.effective_address();
-    let target = parsed_args.target.effective_address();
+    let source = ctx.get_cached(&args.source).effective_address();
+    let target = ctx.get(&args.target).effective_address();
     // Check that the source address exists on chain
     let source_exists =
         rpc::known_address(&source, args.tx.ledger_address.clone()).await;
@@ -1497,25 +1511,26 @@ pub async fn submit_transfer(mut ctx: Context, args: args::TxTransfer) {
             safe_exit(1)
         }
     }
+    let token = ctx.get(&args.token);
     // Check that the token address exists on chain
     let token_exists =
-        rpc::known_address(&parsed_args.token, args.tx.ledger_address.clone())
+        rpc::known_address(&token, args.tx.ledger_address.clone())
             .await;
     if !token_exists {
         eprintln!(
             "The token address {} doesn't exist on chain.",
-            parsed_args.token
+            token
         );
         if !args.tx.force {
             safe_exit(1)
         }
     }
     // Check source balance
-    let (sub_prefix, balance_key) = match args.sub_prefix {
+    let (sub_prefix, balance_key) = match &args.sub_prefix {
         Some(sub_prefix) => {
             let sub_prefix = storage::Key::parse(sub_prefix).unwrap();
             let prefix = token::multitoken_balance_prefix(
-                &parsed_args.token,
+                &token,
                 &sub_prefix,
             );
             (
@@ -1523,7 +1538,7 @@ pub async fn submit_transfer(mut ctx: Context, args: args::TxTransfer) {
                 token::multitoken_balance_key(&prefix, &source),
             )
         }
-        None => (None, token::balance_key(&parsed_args.token, &source)),
+        None => (None, token::balance_key(&token, &source)),
     };
     let client = HttpClient::new(args.tx.ledger_address.clone()).unwrap();
     match rpc::query_storage_value::<token::Amount>(&client, &balance_key).await
@@ -1534,7 +1549,7 @@ pub async fn submit_transfer(mut ctx: Context, args: args::TxTransfer) {
                     "The balance of the source {} of token {} is lower than \
                      the amount to be transferred. Amount to transfer is {} \
                      and the balance is {}.",
-                    source, parsed_args.token, args.amount, balance
+                    source, token, args.amount, balance
                 );
                 if !args.tx.force {
                     safe_exit(1)
@@ -1544,7 +1559,7 @@ pub async fn submit_transfer(mut ctx: Context, args: args::TxTransfer) {
         None => {
             eprintln!(
                 "No balance found for the source {} of token {}",
-                source, parsed_args.token
+                source, token
             );
             if !args.tx.force {
                 safe_exit(1)
@@ -1570,13 +1585,13 @@ pub async fn submit_transfer(mut ctx: Context, args: args::TxTransfer) {
             (
                 TxSigningKey::SecretKey(masp_tx_key()),
                 args.amount,
-                parsed_args.token.clone(),
+                token.clone(),
             )
         } else {
             (
                 TxSigningKey::WalletAddress(args.source.to_address()),
                 args.amount,
-                parsed_args.token.clone(),
+                token.clone(),
             )
         };
     // If our chosen signer is the MASP sentinel key, then our shielded inputs
@@ -1591,6 +1606,27 @@ pub async fn submit_transfer(mut ctx: Context, args: args::TxTransfer) {
         _ => None,
     };
 
+    let stx_result =
+        gen_shielded_transfer(&mut ctx, &args, shielded_gas)
+        .await;
+    let shielded = match stx_result {
+        Ok(stx) => stx.map(|x| x.0),
+        Err(builder::Error::ChangeIsNegative(_)) => {
+            eprintln!(
+                "The balance of the source {} is lower than the \
+                 amount to be transferred and fees. Amount to \
+                 transfer is {} {} and fees are {} {}.",
+                source,
+                args.amount,
+                token,
+                args.tx.fee_amount,
+                ctx.get(&args.tx.fee_token),
+            );
+            safe_exit(1)
+        }
+        Err(err) => panic!("{}", err),
+    };
+
     let transfer = token::Transfer {
         source,
         target,
@@ -1598,49 +1634,7 @@ pub async fn submit_transfer(mut ctx: Context, args: args::TxTransfer) {
         sub_prefix,
         amount,
         key,
-        shielded: {
-            let spending_key = parsed_args.source.spending_key();
-            let payment_address = parsed_args.target.payment_address();
-            // No shielded components are needed when neither source nor
-            // destination are shielded
-            if spending_key.is_none() && payment_address.is_none() {
-                None
-            } else {
-                // We want to fund our transaction solely from supplied spending
-                // key
-                let spending_key = spending_key.map(|x| x.into());
-                let spending_keys: Vec<_> = spending_key.into_iter().collect();
-                // Load the current shielded context given the spending key we
-                // possess
-                let _ = ctx.shielded.load();
-                ctx.shielded
-                    .fetch(&args.tx.ledger_address, &spending_keys, &[])
-                    .await;
-                // Save the update state so that future fetches can be
-                // short-circuited
-                let _ = ctx.shielded.save();
-                let stx_result =
-                    gen_shielded_transfer(&mut ctx, &parsed_args, shielded_gas)
-                        .await;
-                match stx_result {
-                    Ok(stx) => stx.map(|x| x.0),
-                    Err(builder::Error::ChangeIsNegative(_)) => {
-                        eprintln!(
-                            "The balance of the source {} is lower than the \
-                             amount to be transferred and fees. Amount to \
-                             transfer is {} {} and fees are {} {}.",
-                            parsed_args.source,
-                            args.amount,
-                            parsed_args.token,
-                            args.tx.fee_amount,
-                            parsed_args.tx.fee_token,
-                        );
-                        safe_exit(1)
-                    }
-                    Err(err) => panic!("{}", err),
-                }
-            }
-        },
+        shielded,
     };
     tracing::debug!("Transfer data {:?}", transfer);
     let data = transfer
diff --git a/apps/src/lib/client/types.rs b/apps/src/lib/client/types.rs
index 5a26244474..7246e436b8 100644
--- a/apps/src/lib/client/types.rs
+++ b/apps/src/lib/client/types.rs
@@ -13,82 +13,3 @@ use super::rpc;
 use crate::cli::{args, Context};
 use crate::client::tx::Conversions;
 use crate::facade::tendermint_config::net::Address as TendermintAddress;
-
-#[derive(Clone, Debug)]
-pub struct ParsedTxArgs {
-    /// Simulate applying the transaction
-    pub dry_run: bool,
-    /// Submit the transaction even if it doesn't pass client checks
-    pub force: bool,
-    /// Do not wait for the transaction to be added to the blockchain
-    pub broadcast_only: bool,
-    /// The address of the ledger node as host:port
-    pub ledger_address: TendermintAddress,
-    /// If any new account is initialized by the tx, use the given alias to
-    /// save it in the wallet.
-    pub initialized_account_alias: Option<String>,
-    /// The amount being payed to include the transaction
-    pub fee_amount: token::Amount,
-    /// The token in which the fee is being paid
-    pub fee_token: Address,
-    /// The max amount of gas used to process tx
-    pub gas_limit: GasLimit,
-    /// Sign the tx with the key for the given alias from your wallet
-    pub signing_key: Option<key::common::SecretKey>,
-    /// Sign the tx with the keypair of the public key of the given address
-    pub signer: Option<Address>,
-}
-
-#[derive(Clone, Debug)]
-pub struct ParsedTxTransferArgs {
-    /// Common tx arguments
-    pub tx: ParsedTxArgs,
-    /// Transfer source address
-    pub source: TransferSource,
-    /// Transfer target address
-    pub target: TransferTarget,
-    /// Transferred token address
-    pub token: Address,
-    /// Transferred token amount
-    pub amount: token::Amount,
-}
-
-#[async_trait]
-pub trait ShieldedTransferContext {
-    async fn collect_unspent_notes(
-        &mut self,
-        ledger_address: TendermintAddress,
-        vk: &ViewingKey,
-        target: Amount,
-        target_epoch: Epoch,
-    ) -> (
-        Amount,
-        Vec<(Diversifier, Note, MerklePath<Node>)>,
-        Conversions,
-    );
-
-    async fn query_epoch(&self, ledger_address: TendermintAddress) -> Epoch;
-}
-
-#[async_trait]
-impl ShieldedTransferContext for Context {
-    async fn collect_unspent_notes(
-        &mut self,
-        ledger_address: TendermintAddress,
-        vk: &ViewingKey,
-        target: Amount,
-        target_epoch: Epoch,
-    ) -> (
-        Amount,
-        Vec<(Diversifier, Note, MerklePath<Node>)>,
-        Conversions,
-    ) {
-        self.shielded
-            .collect_unspent_notes(ledger_address, vk, target, target_epoch)
-            .await
-    }
-
-    async fn query_epoch(&self, ledger_address: TendermintAddress) -> Epoch {
-        rpc::query_epoch(args::Query { ledger_address }).await
-    }
-}

From 5a6ff557876104ce401978ddfe039d774582b769 Mon Sep 17 00:00:00 2001
From: Murisi Tarusenga <murisit@gmail.com>
Date: Wed, 30 Nov 2022 09:20:35 +0200
Subject: [PATCH 7/9] Moved gen_shielded_transfer into ShieldedContext.

---
 apps/src/lib/client/tx.rs | 434 ++++++++++++++++++++------------------
 1 file changed, 224 insertions(+), 210 deletions(-)

diff --git a/apps/src/lib/client/tx.rs b/apps/src/lib/client/tx.rs
index 332ae75d8e..b27fa1f52e 100644
--- a/apps/src/lib/client/tx.rs
+++ b/apps/src/lib/client/tx.rs
@@ -45,7 +45,7 @@ use namada::types::governance::{
     OfflineProposal, OfflineVote, Proposal, ProposalVote,
 };
 use namada::types::key::*;
-use namada::types::masp::{PaymentAddress, TransferTarget};
+use namada::types::masp::{PaymentAddress, TransferSource, TransferTarget};
 use namada::types::storage::{
     BlockHeight, Epoch, Key, KeySeg, TxIndex, RESERVED_ADDRESS_PREFIX,
 };
@@ -1259,6 +1259,215 @@ impl ShieldedContext {
         }
         res
     }
+
+    /// Make shielded components to embed within a Transfer object. If no shielded
+    /// payment address nor spending key is specified, then no shielded components
+    /// are produced. Otherwise a transaction containing nullifiers and/or note
+    /// commitments are produced. Dummy transparent UTXOs are sometimes used to make
+    /// transactions balanced, but it is understood that transparent account changes
+    /// are effected only by the amounts and signatures specified by the containing
+    /// Transfer object.
+    async fn gen_shielded_transfer(
+        &mut self,
+        ledger_address: &TendermintAddress,
+        source: TransferSource,
+        target: TransferTarget,
+        args_amount: token::Amount,
+        token: Address,
+        fee_amount: token::Amount,
+        fee_token: Address,
+        shielded_gas: bool,
+    ) -> Result<Option<(Transaction, TransactionMetadata)>, builder::Error> {
+        // No shielded components are needed when neither source nor destination
+        // are shielded
+        let spending_key = source.spending_key();
+        let payment_address = target.payment_address();
+        if spending_key.is_none() && payment_address.is_none() {
+            return Ok(None);
+        }
+        // We want to fund our transaction solely from supplied spending key
+        let spending_key = spending_key.map(|x| x.into());
+        let spending_keys: Vec<_> = spending_key.into_iter().collect();
+        // Load the current shielded context given the spending key we possess
+        let _ = self.load();
+        self.fetch(&ledger_address, &spending_keys, &[])
+            .await;
+        // Save the update state so that future fetches can be short-circuited
+        let _ = self.save();
+        // Determine epoch in which to submit potential shielded transaction
+        let epoch = rpc::query_epoch(args::Query {
+            ledger_address: ledger_address.clone()
+        }).await;
+        // Context required for storing which notes are in the source's possesion
+        let consensus_branch_id = BranchId::Sapling;
+        let amt: u64 = args_amount.into();
+        let memo: Option<Memo> = None;
+
+        // Now we build up the transaction within this object
+        let mut builder = Builder::<TestNetwork, OsRng>::new(0u32);
+        // Convert transaction amount into MASP types
+        let (asset_type, amount) = convert_amount(epoch, &token, args_amount);
+
+        // Transactions with transparent input and shielded output
+        // may be affected if constructed close to epoch boundary
+        let mut epoch_sensitive: bool = false;
+        // If there are shielded inputs
+        if let Some(sk) = spending_key {
+            // Transaction fees need to match the amount in the wrapper Transfer
+            // when MASP source is used
+            let (_, fee) =
+                convert_amount(epoch, &fee_token, fee_amount);
+            builder.set_fee(fee.clone())?;
+            // If the gas is coming from the shielded pool, then our shielded inputs
+            // must also cover the gas fee
+            let required_amt = if shielded_gas { amount + fee } else { amount };
+            // Locate unspent notes that can help us meet the transaction amount
+            let (_, unspent_notes, used_convs) = self
+                .collect_unspent_notes(
+                    ledger_address.clone(),
+                    &to_viewing_key(&sk).vk,
+                    required_amt,
+                    epoch,
+                )
+                .await;
+            // Commit the notes found to our transaction
+            for (diversifier, note, merkle_path) in unspent_notes {
+                builder.add_sapling_spend(sk, diversifier, note, merkle_path)?;
+            }
+            // Commit the conversion notes used during summation
+            for (conv, wit, value) in used_convs.values() {
+                if *value > 0 {
+                    builder.add_convert(
+                        conv.clone(),
+                        *value as u64,
+                        wit.clone(),
+                    )?;
+                }
+            }
+        } else {
+            // No transfer fees come from the shielded transaction for non-MASP
+            // sources
+            builder.set_fee(Amount::zero())?;
+            // We add a dummy UTXO to our transaction, but only the source of the
+            // parent Transfer object is used to validate fund availability
+            let secp_sk =
+                secp256k1::SecretKey::from_slice(&[0xcd; 32]).expect("secret key");
+            let secp_ctx = secp256k1::Secp256k1::<secp256k1::SignOnly>::gen_new();
+            let secp_pk =
+                secp256k1::PublicKey::from_secret_key(&secp_ctx, &secp_sk)
+                .serialize();
+            let hash =
+                ripemd160::Ripemd160::digest(&sha2::Sha256::digest(&secp_pk));
+            let script = TransparentAddress::PublicKey(hash.into()).script();
+            epoch_sensitive = true;
+            builder.add_transparent_input(
+                secp_sk,
+                OutPoint::new([0u8; 32], 0),
+                TxOut {
+                    asset_type,
+                    value: amt,
+                    script_pubkey: script,
+                },
+            )?;
+        }
+        // Now handle the outputs of this transaction
+        // If there is a shielded output
+        if let Some(pa) = payment_address {
+            let ovk_opt = spending_key.map(|x| x.expsk.ovk);
+            builder.add_sapling_output(
+                ovk_opt,
+                pa.into(),
+                asset_type,
+                amt,
+                memo.clone(),
+            )?;
+        } else {
+            epoch_sensitive = false;
+            // Embed the transparent target address into the shielded transaction so
+            // that it can be signed
+            let target_enc = target
+                .address()
+                .expect("target address should be transparent")
+                .try_to_vec()
+                .expect("target address encoding");
+            let hash = ripemd160::Ripemd160::digest(&sha2::Sha256::digest(
+                target_enc.as_ref(),
+            ));
+            builder.add_transparent_output(
+                &TransparentAddress::PublicKey(hash.into()),
+                asset_type,
+                amt,
+            )?;
+        }
+        let prover = if let Ok(params_dir) = env::var(masp::ENV_VAR_MASP_PARAMS_DIR)
+        {
+            let params_dir = PathBuf::from(params_dir);
+            let spend_path = params_dir.join(masp::SPEND_NAME);
+            let convert_path = params_dir.join(masp::CONVERT_NAME);
+            let output_path = params_dir.join(masp::OUTPUT_NAME);
+            LocalTxProver::new(&spend_path, &output_path, &convert_path)
+        } else {
+            LocalTxProver::with_default_location()
+                .expect("unable to load MASP Parameters")
+        };
+        // Build and return the constructed transaction
+        let mut tx = builder.build(consensus_branch_id, &prover);
+
+        if epoch_sensitive {
+            let new_epoch = rpc::query_epoch(args::Query {
+                ledger_address: ledger_address.clone()
+            }).await;
+
+            // If epoch has changed, recalculate shielded outputs to match new epoch
+            if new_epoch != epoch {
+                // Hack: build new shielded transfer with updated outputs
+                let mut replay_builder = Builder::<TestNetwork, OsRng>::new(0u32);
+                replay_builder.set_fee(Amount::zero())?;
+                let ovk_opt = spending_key.map(|x| x.expsk.ovk);
+                let (new_asset_type, _) =
+                    convert_amount(new_epoch, &token, args_amount);
+                replay_builder.add_sapling_output(
+                    ovk_opt,
+                    payment_address.unwrap().into(),
+                    new_asset_type,
+                    amt,
+                    memo,
+                )?;
+
+                let secp_sk = secp256k1::SecretKey::from_slice(&[0xcd; 32])
+                    .expect("secret key");
+                let secp_ctx =
+                    secp256k1::Secp256k1::<secp256k1::SignOnly>::gen_new();
+                let secp_pk =
+                    secp256k1::PublicKey::from_secret_key(&secp_ctx, &secp_sk)
+                    .serialize();
+                let hash =
+                    ripemd160::Ripemd160::digest(&sha2::Sha256::digest(&secp_pk));
+                let script = TransparentAddress::PublicKey(hash.into()).script();
+                replay_builder.add_transparent_input(
+                    secp_sk,
+                    OutPoint::new([0u8; 32], 0),
+                    TxOut {
+                        asset_type: new_asset_type,
+                        value: amt,
+                        script_pubkey: script,
+                    },
+                )?;
+
+                let (replay_tx, _) =
+                    replay_builder.build(consensus_branch_id, &prover)?;
+                tx = tx.map(|(t, tm)| {
+                    let mut temp = t.deref().clone();
+                    temp.shielded_outputs = replay_tx.shielded_outputs.clone();
+                    temp.value_balance = temp.value_balance.reject(asset_type)
+                        - Amount::from_pair(new_asset_type, amt).unwrap();
+                    (temp.freeze().unwrap(), tm)
+                });
+            }
+        }
+
+        tx.map(Some)
+    }
 }
 
 /// Make asset type corresponding to given address and epoch
@@ -1284,215 +1493,11 @@ fn convert_amount(
     (asset_type, amount)
 }
 
-/// Make shielded components to embed within a Transfer object. If no shielded
-/// payment address nor spending key is specified, then no shielded components
-/// are produced. Otherwise a transaction containing nullifiers and/or note
-/// commitments are produced. Dummy transparent UTXOs are sometimes used to make
-/// transactions balanced, but it is understood that transparent account changes
-/// are effected only by the amounts and signatures specified by the containing
-/// Transfer object.
-async fn gen_shielded_transfer(
-    ctx: &mut Context,
-    args: &args::TxTransfer,
-    shielded_gas: bool,
-) -> Result<Option<(Transaction, TransactionMetadata)>, builder::Error> {
-    // No shielded components are needed when neither source nor destination
-    // are shielded
-    let spending_key = ctx.get_cached(&args.source).spending_key();
-    let payment_address = ctx.get(&args.target).payment_address();
-    if spending_key.is_none() && payment_address.is_none() {
-        return Ok(None);
-    }
-    // We want to fund our transaction solely from supplied spending key
-    let spending_key = spending_key.map(|x| x.into());
-    let spending_keys: Vec<_> = spending_key.into_iter().collect();
-    // Load the current shielded context given the spending key we possess
-    let _ = ctx.shielded.load();
-    ctx.shielded
-        .fetch(&args.tx.ledger_address, &spending_keys, &[])
-        .await;
-    // Save the update state so that future fetches can be short-circuited
-    let _ = ctx.shielded.save();
-    // Determine epoch in which to submit potential shielded transaction
-    let epoch = rpc::query_epoch(args::Query {
-        ledger_address: args.tx.ledger_address.clone()
-    }).await;
-    // Context required for storing which notes are in the source's possesion
-    let consensus_branch_id = BranchId::Sapling;
-    let amt: u64 = args.amount.into();
-    let memo: Option<Memo> = None;
-
-    // Now we build up the transaction within this object
-    let mut builder = Builder::<TestNetwork, OsRng>::new(0u32);
-    // Convert transaction amount into MASP types
-    let (asset_type, amount) = convert_amount(epoch, &ctx.get(&args.token), args.amount);
-
-    // Transactions with transparent input and shielded output
-    // may be affected if constructed close to epoch boundary
-    let mut epoch_sensitive: bool = false;
-    // If there are shielded inputs
-    if let Some(sk) = spending_key {
-        // Transaction fees need to match the amount in the wrapper Transfer
-        // when MASP source is used
-        let (_, fee) =
-            convert_amount(epoch, &ctx.get(&args.tx.fee_token), args.tx.fee_amount);
-        builder.set_fee(fee.clone())?;
-        // If the gas is coming from the shielded pool, then our shielded inputs
-        // must also cover the gas fee
-        let required_amt = if shielded_gas { amount + fee } else { amount };
-        // Locate unspent notes that can help us meet the transaction amount
-        let (_, unspent_notes, used_convs) = ctx
-            .shielded
-            .collect_unspent_notes(
-                args.tx.ledger_address.clone(),
-                &to_viewing_key(&sk).vk,
-                required_amt,
-                epoch,
-            )
-            .await;
-        // Commit the notes found to our transaction
-        for (diversifier, note, merkle_path) in unspent_notes {
-            builder.add_sapling_spend(sk, diversifier, note, merkle_path)?;
-        }
-        // Commit the conversion notes used during summation
-        for (conv, wit, value) in used_convs.values() {
-            if *value > 0 {
-                builder.add_convert(
-                    conv.clone(),
-                    *value as u64,
-                    wit.clone(),
-                )?;
-            }
-        }
-    } else {
-        // No transfer fees come from the shielded transaction for non-MASP
-        // sources
-        builder.set_fee(Amount::zero())?;
-        // We add a dummy UTXO to our transaction, but only the source of the
-        // parent Transfer object is used to validate fund availability
-        let secp_sk =
-            secp256k1::SecretKey::from_slice(&[0xcd; 32]).expect("secret key");
-        let secp_ctx = secp256k1::Secp256k1::<secp256k1::SignOnly>::gen_new();
-        let secp_pk =
-            secp256k1::PublicKey::from_secret_key(&secp_ctx, &secp_sk)
-                .serialize();
-        let hash =
-            ripemd160::Ripemd160::digest(&sha2::Sha256::digest(&secp_pk));
-        let script = TransparentAddress::PublicKey(hash.into()).script();
-        epoch_sensitive = true;
-        builder.add_transparent_input(
-            secp_sk,
-            OutPoint::new([0u8; 32], 0),
-            TxOut {
-                asset_type,
-                value: amt,
-                script_pubkey: script,
-            },
-        )?;
-    }
-    // Now handle the outputs of this transaction
-    // If there is a shielded output
-    if let Some(pa) = payment_address {
-        let ovk_opt = spending_key.map(|x| x.expsk.ovk);
-        builder.add_sapling_output(
-            ovk_opt,
-            pa.into(),
-            asset_type,
-            amt,
-            memo.clone(),
-        )?;
-    } else {
-        epoch_sensitive = false;
-        // Embed the transparent target address into the shielded transaction so
-        // that it can be signed
-        let target = ctx.get(&args.target);
-        let target_enc = target
-            .address()
-            .expect("target address should be transparent")
-            .try_to_vec()
-            .expect("target address encoding");
-        let hash = ripemd160::Ripemd160::digest(&sha2::Sha256::digest(
-            target_enc.as_ref(),
-        ));
-        builder.add_transparent_output(
-            &TransparentAddress::PublicKey(hash.into()),
-            asset_type,
-            amt,
-        )?;
-    }
-    let prover = if let Ok(params_dir) = env::var(masp::ENV_VAR_MASP_PARAMS_DIR)
-    {
-        let params_dir = PathBuf::from(params_dir);
-        let spend_path = params_dir.join(masp::SPEND_NAME);
-        let convert_path = params_dir.join(masp::CONVERT_NAME);
-        let output_path = params_dir.join(masp::OUTPUT_NAME);
-        LocalTxProver::new(&spend_path, &output_path, &convert_path)
-    } else {
-        LocalTxProver::with_default_location()
-            .expect("unable to load MASP Parameters")
-    };
-    // Build and return the constructed transaction
-    let mut tx = builder.build(consensus_branch_id, &prover);
-
-    if epoch_sensitive {
-        let new_epoch = rpc::query_epoch(args::Query {
-            ledger_address: args.tx.ledger_address.clone()
-        }).await;
-
-        // If epoch has changed, recalculate shielded outputs to match new epoch
-        if new_epoch != epoch {
-            // Hack: build new shielded transfer with updated outputs
-            let mut replay_builder = Builder::<TestNetwork, OsRng>::new(0u32);
-            replay_builder.set_fee(Amount::zero())?;
-            let ovk_opt = spending_key.map(|x| x.expsk.ovk);
-            let (new_asset_type, _) =
-                convert_amount(new_epoch, &ctx.get(&args.token), args.amount);
-            replay_builder.add_sapling_output(
-                ovk_opt,
-                payment_address.unwrap().into(),
-                new_asset_type,
-                amt,
-                memo,
-            )?;
-
-            let secp_sk = secp256k1::SecretKey::from_slice(&[0xcd; 32])
-                .expect("secret key");
-            let secp_ctx =
-                secp256k1::Secp256k1::<secp256k1::SignOnly>::gen_new();
-            let secp_pk =
-                secp256k1::PublicKey::from_secret_key(&secp_ctx, &secp_sk)
-                    .serialize();
-            let hash =
-                ripemd160::Ripemd160::digest(&sha2::Sha256::digest(&secp_pk));
-            let script = TransparentAddress::PublicKey(hash.into()).script();
-            replay_builder.add_transparent_input(
-                secp_sk,
-                OutPoint::new([0u8; 32], 0),
-                TxOut {
-                    asset_type: new_asset_type,
-                    value: amt,
-                    script_pubkey: script,
-                },
-            )?;
-
-            let (replay_tx, _) =
-                replay_builder.build(consensus_branch_id, &prover)?;
-            tx = tx.map(|(t, tm)| {
-                let mut temp = t.deref().clone();
-                temp.shielded_outputs = replay_tx.shielded_outputs.clone();
-                temp.value_balance = temp.value_balance.reject(asset_type)
-                    - Amount::from_pair(new_asset_type, amt).unwrap();
-                (temp.freeze().unwrap(), tm)
-            });
-        }
-    }
-
-    tx.map(Some)
-}
-
 pub async fn submit_transfer(mut ctx: Context, args: args::TxTransfer) {
-    let source = ctx.get_cached(&args.source).effective_address();
-    let target = ctx.get(&args.target).effective_address();
+    let transfer_source = ctx.get_cached(&args.source);
+    let source = transfer_source.effective_address();
+    let transfer_target = ctx.get(&args.target);
+    let target = transfer_target.effective_address();
     // Check that the source address exists on chain
     let source_exists =
         rpc::known_address(&source, args.tx.ledger_address.clone()).await;
@@ -1607,7 +1612,16 @@ pub async fn submit_transfer(mut ctx: Context, args: args::TxTransfer) {
     };
 
     let stx_result =
-        gen_shielded_transfer(&mut ctx, &args, shielded_gas)
+        ctx.shielded.gen_shielded_transfer(
+            &args.tx.ledger_address,
+            transfer_source,
+            transfer_target,
+            args.amount,
+            ctx.get(&args.token),
+            args.tx.fee_amount,
+            ctx.get(&args.tx.fee_token),
+            shielded_gas,
+        )
         .await;
     let shielded = match stx_result {
         Ok(stx) => stx.map(|x| x.0),

From 4fff7b57ac4f08f5dd00ac2b6793150a6d7bf8e5 Mon Sep 17 00:00:00 2001
From: Murisi Tarusenga <murisit@gmail.com>
Date: Wed, 30 Nov 2022 13:32:44 +0200
Subject: [PATCH 8/9] Started factoring platform dependent code out of
 ShieldedContext.

---
 apps/src/lib/cli/context.rs |   6 +-
 apps/src/lib/client/rpc.rs  |  32 ++--
 apps/src/lib/client/tx.rs   | 326 ++++++++++++++++++++++--------------
 3 files changed, 212 insertions(+), 152 deletions(-)

diff --git a/apps/src/lib/cli/context.rs b/apps/src/lib/cli/context.rs
index 04bd91e6ce..4461cd4319 100644
--- a/apps/src/lib/cli/context.rs
+++ b/apps/src/lib/cli/context.rs
@@ -12,7 +12,7 @@ use namada::types::key::*;
 use namada::types::masp::*;
 
 use super::args;
-use crate::client::tx::ShieldedContext;
+use crate::client::tx::{ShieldedContext, CLIShieldedUtils};
 use crate::config::genesis::genesis_config;
 use crate::config::global::GlobalConfig;
 use crate::config::{self, Config};
@@ -72,7 +72,7 @@ pub struct Context {
     /// The ledger configuration for a specific chain ID
     pub config: Config,
     /// The context fr shielded operations
-    pub shielded: ShieldedContext,
+    pub shielded: ShieldedContext<CLIShieldedUtils>,
     /// Native token's address
     pub native_token: Address,
 }
@@ -118,7 +118,7 @@ impl Context {
             wallet,
             global_config,
             config,
-            shielded: ShieldedContext::new(chain_dir),
+            shielded: CLIShieldedUtils::new(chain_dir),
             native_token,
         })
     }
diff --git a/apps/src/lib/client/rpc.rs b/apps/src/lib/client/rpc.rs
index 3fb4d8eea8..667370413b 100644
--- a/apps/src/lib/client/rpc.rs
+++ b/apps/src/lib/client/rpc.rs
@@ -162,13 +162,14 @@ pub async fn query_tx_deltas(
     // Connect to the Tendermint server holding the transactions
     let client = HttpClient::new(ledger_address.clone()).unwrap();
     // Build up the context that will be queried for transactions
+    ctx.shielded.utils.ledger_address = Some(ledger_address.clone());
     let _ = ctx.shielded.load();
     let vks = ctx.wallet.get_viewing_keys();
     let fvks: Vec<_> = vks
         .values()
         .map(|fvk| ExtendedFullViewingKey::from(*fvk).fvk.vk)
         .collect();
-    ctx.shielded.fetch(&ledger_address, &[], &fvks).await;
+    ctx.shielded.fetch(&[], &fvks).await;
     // Save the update state so that future fetches can be short-circuited
     let _ = ctx.shielded.save();
     // Required for filtering out rejected transactions from Tendermint
@@ -282,8 +283,6 @@ pub async fn query_transfers(mut ctx: Context, args: args::QueryTransfers) {
         .values()
         .map(|fvk| (ExtendedFullViewingKey::from(*fvk).fvk.vk, fvk))
         .collect();
-    // Connect to the Tendermint server holding the transactions
-    let client = HttpClient::new(args.query.ledger_address.clone()).unwrap();
     // Now display historical shielded and transparent transactions
     for ((height, idx), (epoch, tfer_delta, tx_delta)) in transfers {
         // Check if this transfer pertains to the supplied owner
@@ -304,7 +303,6 @@ pub async fn query_transfers(mut ctx: Context, args: args::QueryTransfers) {
             let amt = ctx
                 .shielded
                 .compute_exchanged_amount(
-                    client.clone(),
                     amt,
                     epoch,
                     Conversions::new(),
@@ -312,7 +310,7 @@ pub async fn query_transfers(mut ctx: Context, args: args::QueryTransfers) {
                 .await
                 .0;
             let dec =
-                ctx.shielded.decode_amount(client.clone(), amt, epoch).await;
+                ctx.shielded.decode_amount(amt, epoch).await;
             shielded_accounts.insert(acc, dec);
         }
         // Check if this transfer pertains to the supplied token
@@ -554,9 +552,8 @@ pub async fn query_pinned_balance(ctx: &mut Context, args: args::QueryBalance) {
         .map(|fvk| ExtendedFullViewingKey::from(*fvk).fvk.vk)
         .collect();
     // Build up the context that will be queried for asset decodings
+    ctx.shielded.utils.ledger_address = Some(args.query.ledger_address.clone());
     let _ = ctx.shielded.load();
-    // Establish connection with which to do exchange rate queries
-    let client = HttpClient::new(args.query.ledger_address.clone()).unwrap();
     // Print the token balances by payment address
     for owner in owners {
         let mut balance = Err(PinnedBalanceError::InvalidViewingKey);
@@ -566,7 +563,6 @@ pub async fn query_pinned_balance(ctx: &mut Context, args: args::QueryBalance) {
             balance = ctx
                 .shielded
                 .compute_exchanged_pinned_balance(
-                    &args.query.ledger_address,
                     owner,
                     vk,
                 )
@@ -593,7 +589,6 @@ pub async fn query_pinned_balance(ctx: &mut Context, args: args::QueryBalance) {
             balance = ctx
                 .shielded
                 .compute_exchanged_pinned_balance(
-                    &args.query.ledger_address,
                     owner,
                     &vk,
                 )
@@ -637,7 +632,7 @@ pub async fn query_pinned_balance(ctx: &mut Context, args: args::QueryBalance) {
                 // Print balances by human-readable token names
                 let balance = ctx
                     .shielded
-                    .decode_amount(client.clone(), balance, epoch)
+                    .decode_amount(balance, epoch)
                     .await;
                 for (addr, value) in balance.components() {
                     let asset_value = token::Amount::from(*value as u64);
@@ -878,20 +873,17 @@ pub async fn query_shielded_balance(
         None => ctx.wallet.get_viewing_keys().values().copied().collect(),
     };
     // Build up the context that will be queried for balances
+    ctx.shielded.utils.ledger_address = Some(args.query.ledger_address.clone());
     let _ = ctx.shielded.load();
     let fvks: Vec<_> = viewing_keys
         .iter()
         .map(|fvk| ExtendedFullViewingKey::from(*fvk).fvk.vk)
         .collect();
-    ctx.shielded
-        .fetch(&args.query.ledger_address, &[], &fvks)
-        .await;
+    ctx.shielded.fetch(&[], &fvks).await;
     // Save the update state so that future fetches can be short-circuited
     let _ = ctx.shielded.save();
     // The epoch is required to identify timestamped tokens
     let epoch = query_epoch(args.query.clone()).await;
-    // Establish connection with which to do exchange rate queries
-    let client = HttpClient::new(args.query.ledger_address.clone()).unwrap();
     // Map addresses to token names
     let tokens = address::tokens();
     match (args.token, owner.is_some()) {
@@ -907,7 +899,6 @@ pub async fn query_shielded_balance(
             } else {
                 ctx.shielded
                     .compute_exchanged_balance(
-                        client.clone(),
                         &viewing_key,
                         epoch,
                     )
@@ -952,7 +943,6 @@ pub async fn query_shielded_balance(
                 } else {
                     ctx.shielded
                         .compute_exchanged_balance(
-                            client.clone(),
                             &viewing_key,
                             epoch,
                         )
@@ -974,7 +964,7 @@ pub async fn query_shielded_balance(
                 // Decode the asset type
                 let decoded = ctx
                     .shielded
-                    .decode_asset_type(client.clone(), asset_type)
+                    .decode_asset_type(asset_type)
                     .await;
                 match decoded {
                     Some((addr, asset_epoch)) if asset_epoch == epoch => {
@@ -1044,7 +1034,6 @@ pub async fn query_shielded_balance(
                 } else {
                     ctx.shielded
                         .compute_exchanged_balance(
-                            client.clone(),
                             &viewing_key,
                             epoch,
                         )
@@ -1079,14 +1068,13 @@ pub async fn query_shielded_balance(
                 // Print balances by human-readable token names
                 let decoded_balance = ctx
                     .shielded
-                    .decode_all_amounts(client.clone(), balance)
+                    .decode_all_amounts(balance)
                     .await;
                 print_decoded_balance_with_epoch(decoded_balance);
             } else {
                 balance = ctx
                     .shielded
                     .compute_exchanged_balance(
-                        client.clone(),
                         &viewing_key,
                         epoch,
                     )
@@ -1095,7 +1083,7 @@ pub async fn query_shielded_balance(
                 // Print balances by human-readable token names
                 let decoded_balance = ctx
                     .shielded
-                    .decode_amount(client.clone(), balance, epoch)
+                    .decode_amount(balance, epoch)
                     .await;
                 print_decoded_balance(decoded_balance);
             }
diff --git a/apps/src/lib/client/tx.rs b/apps/src/lib/client/tx.rs
index b27fa1f52e..0f5b469d23 100644
--- a/apps/src/lib/client/tx.rs
+++ b/apps/src/lib/client/tx.rs
@@ -63,6 +63,7 @@ use rand_core::{CryptoRng, OsRng, RngCore};
 use rust_decimal::Decimal;
 use sha2::Digest;
 use tokio::time::{Duration, Instant};
+use async_trait::async_trait;
 
 use super::rpc;
 use crate::cli::context::WalletAddress;
@@ -387,6 +388,170 @@ pub async fn submit_init_validator(
     }
 }
 
+#[async_trait]
+pub trait ShieldedUtils : Sized + BorshDeserialize + BorshSerialize + Default + Clone {
+    async fn query_storage_value<T: Send>(
+        &self,
+        key: &storage::Key,
+    ) -> Option<T>
+    where
+        T: BorshDeserialize;
+
+    async fn query_epoch(&self) -> Epoch;
+
+    //fn download_parameters() -> Result<(), Error>;
+
+    fn local_tx_prover(&self) -> LocalTxProver;
+
+    fn load(self) -> std::io::Result<ShieldedContext<Self>>;
+
+    fn save(&self, ctx: &ShieldedContext<Self>) -> std::io::Result<()>;
+
+    async fn query_conversion(
+        &self,
+        asset_type: AssetType,
+    ) -> Option<(
+        Address,
+        Epoch,
+        masp_primitives::transaction::components::Amount,
+        MerklePath<Node>,
+    )>;
+}
+
+/// Shielded context file name
+const FILE_NAME: &str = "shielded.dat";
+const TMP_FILE_NAME: &str = "shielded.tmp";
+
+#[derive(Debug, BorshSerialize, BorshDeserialize, Clone)]
+pub struct CLIShieldedUtils {
+    #[borsh_skip]
+    context_dir: PathBuf,
+    #[borsh_skip]
+    pub ledger_address: Option<TendermintAddress>,
+}
+
+impl CLIShieldedUtils {
+    /// Initialize a shielded transaction context that identifies notes
+    /// decryptable by any viewing key in the given set
+    pub fn new(
+        context_dir: PathBuf,
+    ) -> ShieldedContext<Self> {
+        // Make sure that MASP parameters are downloaded to enable MASP
+        // transaction building and verification later on
+        let params_dir = masp::get_params_dir();
+        let spend_path = params_dir.join(masp::SPEND_NAME);
+        let convert_path = params_dir.join(masp::CONVERT_NAME);
+        let output_path = params_dir.join(masp::OUTPUT_NAME);
+        if !(spend_path.exists()
+            && convert_path.exists()
+            && output_path.exists())
+        {
+            println!("MASP parameters not present, downloading...");
+            masp_proofs::download_parameters()
+                .expect("MASP parameters not present or downloadable");
+            println!("MASP parameter download complete, resuming execution...");
+        }
+        // Finally initialize a shielded context with the supplied directory
+        let utils = Self { context_dir, ledger_address: None };
+        ShieldedContext { utils, ..Default::default() }
+    }
+}
+
+impl Default for CLIShieldedUtils {
+    fn default() -> Self {
+        Self { context_dir: PathBuf::from(FILE_NAME), ledger_address: None }
+    }
+}
+
+#[async_trait]
+impl ShieldedUtils for CLIShieldedUtils {
+    async fn query_storage_value<T: Send>(
+        &self,
+        key: &storage::Key,
+    ) -> Option<T>
+    where T: BorshDeserialize {
+        let client = HttpClient::new(self.ledger_address.clone().unwrap()).unwrap();
+        query_storage_value::<T>(&client, &key).await
+    }
+
+    async fn query_epoch(&self) -> Epoch {
+        rpc::query_epoch(args::Query {
+            ledger_address: self.ledger_address.clone().unwrap()
+        }).await
+    }
+
+    fn local_tx_prover(&self) -> LocalTxProver {
+        if let Ok(params_dir) = env::var(masp::ENV_VAR_MASP_PARAMS_DIR)
+        {
+            let params_dir = PathBuf::from(params_dir);
+            let spend_path = params_dir.join(masp::SPEND_NAME);
+            let convert_path = params_dir.join(masp::CONVERT_NAME);
+            let output_path = params_dir.join(masp::OUTPUT_NAME);
+            LocalTxProver::new(&spend_path, &output_path, &convert_path)
+        } else {
+            LocalTxProver::with_default_location()
+                .expect("unable to load MASP Parameters")
+        }
+    }
+
+    /// Try to load the last saved shielded context from the given context
+    /// directory. If this fails, then leave the current context unchanged.
+    fn load(self) -> std::io::Result<ShieldedContext<Self>> {
+        // Try to load shielded context from file
+        let mut ctx_file = File::open(self.context_dir.join(FILE_NAME))?;
+        let mut bytes = Vec::new();
+        ctx_file.read_to_end(&mut bytes)?;
+        let mut new_ctx = ShieldedContext::deserialize(&mut &bytes[..])?;
+        // Associate the originating context directory with the
+        // shielded context under construction
+        new_ctx.utils = self;
+        Ok(new_ctx)
+    }
+
+    /// Save this shielded context into its associated context directory
+    fn save(&self, ctx: &ShieldedContext<Self>) -> std::io::Result<()> {
+        // TODO: use mktemp crate?
+        let tmp_path = self.context_dir.join(TMP_FILE_NAME);
+        {
+            // First serialize the shielded context into a temporary file.
+            // Inability to create this file implies a simultaneuous write is in
+            // progress. In this case, immediately fail. This is unproblematic
+            // because the data intended to be stored can always be re-fetched
+            // from the blockchain.
+            let mut ctx_file = OpenOptions::new()
+                .write(true)
+                .create_new(true)
+                .open(tmp_path.clone())?;
+            let mut bytes = Vec::new();
+            ctx.serialize(&mut bytes)
+                .expect("cannot serialize shielded context");
+            ctx_file.write_all(&bytes[..])?;
+        }
+        // Atomically update the old shielded context file with new data.
+        // Atomicity is required to prevent other client instances from reading
+        // corrupt data.
+        std::fs::rename(tmp_path.clone(), self.context_dir.join(FILE_NAME))?;
+        // Finally, remove our temporary file to allow future saving of shielded
+        // contexts.
+        std::fs::remove_file(tmp_path)?;
+        Ok(())
+    }
+
+    /// Query a conversion.
+    async fn query_conversion(
+        &self,
+        asset_type: AssetType,
+    ) -> Option<(
+        Address,
+        Epoch,
+        masp_primitives::transaction::components::Amount,
+        MerklePath<Node>,
+    )> {
+        let client = HttpClient::new(self.ledger_address.clone().unwrap()).unwrap();
+        query_conversion(client, asset_type).await
+    }
+}
+
 /// Make a ViewingKey that can view notes encrypted by given ExtendedSpendingKey
 pub fn to_viewing_key(esk: &ExtendedSpendingKey) -> FullViewingKey {
     ExtendedFullViewingKey::from(esk).fvk
@@ -453,10 +618,10 @@ pub type TransactionDelta = HashMap<ViewingKey, Amount>;
 /// Represents the current state of the shielded pool from the perspective of
 /// the chosen viewing keys.
 #[derive(BorshSerialize, BorshDeserialize, Debug)]
-pub struct ShieldedContext {
+pub struct ShieldedContext<U: ShieldedUtils> {
     /// Location where this shielded context is saved
     #[borsh_skip]
-    context_dir: PathBuf,
+    pub utils: U,
     /// The last transaction index to be processed in this context
     last_txidx: u64,
     /// The commitment tree produced by scanning all transactions up to tx_pos
@@ -486,16 +651,12 @@ pub struct ShieldedContext {
     vk_map: HashMap<usize, ViewingKey>,
 }
 
-/// Shielded context file name
-const FILE_NAME: &str = "shielded.dat";
-const TMP_FILE_NAME: &str = "shielded.tmp";
-
 /// Default implementation to ease construction of TxContexts. Derive cannot be
 /// used here due to CommitmentTree not implementing Default.
-impl Default for ShieldedContext {
-    fn default() -> ShieldedContext {
-        ShieldedContext {
-            context_dir: PathBuf::from(FILE_NAME),
+impl<U: ShieldedUtils + Default> Default for ShieldedContext<U> {
+    fn default() -> ShieldedContext<U> {
+        ShieldedContext::<U> {
+            utils: U::default(),
             last_txidx: u64::default(),
             tree: CommitmentTree::empty(),
             pos_map: HashMap::default(),
@@ -512,55 +673,24 @@ impl Default for ShieldedContext {
     }
 }
 
-impl ShieldedContext {
+impl<U: ShieldedUtils> ShieldedContext<U> {
     /// Try to load the last saved shielded context from the given context
     /// directory. If this fails, then leave the current context unchanged.
     pub fn load(&mut self) -> std::io::Result<()> {
-        // Try to load shielded context from file
-        let mut ctx_file = File::open(self.context_dir.join(FILE_NAME))?;
-        let mut bytes = Vec::new();
-        ctx_file.read_to_end(&mut bytes)?;
-        let mut new_ctx = Self::deserialize(&mut &bytes[..])?;
-        // Associate the originating context directory with the
-        // shielded context under construction
-        new_ctx.context_dir = self.context_dir.clone();
+        let new_ctx = self.utils.clone().load()?;
         *self = new_ctx;
         Ok(())
     }
 
     /// Save this shielded context into its associated context directory
     pub fn save(&self) -> std::io::Result<()> {
-        // TODO: use mktemp crate?
-        let tmp_path = self.context_dir.join(TMP_FILE_NAME);
-        {
-            // First serialize the shielded context into a temporary file.
-            // Inability to create this file implies a simultaneuous write is in
-            // progress. In this case, immediately fail. This is unproblematic
-            // because the data intended to be stored can always be re-fetched
-            // from the blockchain.
-            let mut ctx_file = OpenOptions::new()
-                .write(true)
-                .create_new(true)
-                .open(tmp_path.clone())?;
-            let mut bytes = Vec::new();
-            self.serialize(&mut bytes)
-                .expect("cannot serialize shielded context");
-            ctx_file.write_all(&bytes[..])?;
-        }
-        // Atomically update the old shielded context file with new data.
-        // Atomicity is required to prevent other client instances from reading
-        // corrupt data.
-        std::fs::rename(tmp_path.clone(), self.context_dir.join(FILE_NAME))?;
-        // Finally, remove our temporary file to allow future saving of shielded
-        // contexts.
-        std::fs::remove_file(tmp_path)?;
-        Ok(())
+        self.utils.save(self)
     }
 
     /// Merge data from the given shielded context into the current shielded
     /// context. It must be the case that the two shielded contexts share the
     /// same last transaction ID and share identical commitment trees.
-    pub fn merge(&mut self, new_ctx: ShieldedContext) {
+    pub fn merge(&mut self, new_ctx: ShieldedContext<U>) {
         debug_assert_eq!(self.last_txidx, new_ctx.last_txidx);
         // Merge by simply extending maps. Identical keys should contain
         // identical values, so overwriting should not be problematic.
@@ -590,7 +720,6 @@ impl ShieldedContext {
     /// ShieldedContext
     pub async fn fetch(
         &mut self,
-        ledger_address: &TendermintAddress,
         sks: &[ExtendedSpendingKey],
         fvks: &[ViewingKey],
     ) {
@@ -615,10 +744,10 @@ impl ShieldedContext {
         let (txs, mut tx_iter);
         if !unknown_keys.is_empty() {
             // Load all transactions accepted until this point
-            txs = Self::fetch_shielded_transfers(ledger_address, 0).await;
+            txs = Self::fetch_shielded_transfers(&self.utils, 0).await;
             tx_iter = txs.iter();
             // Do this by constructing a shielding context only for unknown keys
-            let mut tx_ctx = ShieldedContext::new(self.context_dir.clone());
+            let mut tx_ctx = Self { utils: self.utils.clone(), ..Default::default() };
             for vk in unknown_keys {
                 tx_ctx.pos_map.entry(vk).or_insert_with(HashSet::new);
             }
@@ -636,7 +765,7 @@ impl ShieldedContext {
         } else {
             // Load only transactions accepted from last_txid until this point
             txs =
-                Self::fetch_shielded_transfers(ledger_address, self.last_txidx)
+                Self::fetch_shielded_transfers(&self.utils, self.last_txidx)
                     .await;
             tx_iter = txs.iter();
         }
@@ -647,41 +776,15 @@ impl ShieldedContext {
         }
     }
 
-    /// Initialize a shielded transaction context that identifies notes
-    /// decryptable by any viewing key in the given set
-    pub fn new(context_dir: PathBuf) -> ShieldedContext {
-        // Make sure that MASP parameters are downloaded to enable MASP
-        // transaction building and verification later on
-        let params_dir = masp::get_params_dir();
-        let spend_path = params_dir.join(masp::SPEND_NAME);
-        let convert_path = params_dir.join(masp::CONVERT_NAME);
-        let output_path = params_dir.join(masp::OUTPUT_NAME);
-        if !(spend_path.exists()
-            && convert_path.exists()
-            && output_path.exists())
-        {
-            println!("MASP parameters not present, downloading...");
-            masp_proofs::download_parameters()
-                .expect("MASP parameters not present or downloadable");
-            println!("MASP parameter download complete, resuming execution...");
-        }
-        // Finally initialize a shielded context with the supplied directory
-        Self {
-            context_dir,
-            ..Default::default()
-        }
-    }
-
     /// Obtain a chronologically-ordered list of all accepted shielded
     /// transactions from the ledger. The ledger conceptually stores
     /// transactions as a vector. More concretely, the HEAD_TX_KEY location
     /// stores the index of the last accepted transaction and each transaction
     /// is stored at a key derived from its index.
     pub async fn fetch_shielded_transfers(
-        ledger_address: &TendermintAddress,
+        utils: &U,
         last_txidx: u64,
     ) -> BTreeMap<(BlockHeight, TxIndex), (Epoch, Transfer)> {
-        let client = HttpClient::new(ledger_address.clone()).unwrap();
         // The address of the MASP account
         let masp_addr = masp();
         // Construct the key where last transaction pointer is stored
@@ -689,7 +792,7 @@ impl ShieldedContext {
             .push(&HEAD_TX_KEY.to_owned())
             .expect("Cannot obtain a storage key");
         // Query for the index of the last accepted transaction
-        let head_txidx = query_storage_value::<u64>(&client, &head_tx_key)
+        let head_txidx = utils.query_storage_value::<u64>(&head_tx_key)
             .await
             .unwrap_or(0);
         let mut shielded_txs = BTreeMap::new();
@@ -701,8 +804,7 @@ impl ShieldedContext {
                 .expect("Cannot obtain a storage key");
             // Obtain the current transaction
             let (tx_epoch, tx_height, tx_index, current_tx) =
-                query_storage_value::<(Epoch, BlockHeight, TxIndex, Transfer)>(
-                    &client,
+                utils.query_storage_value::<(Epoch, BlockHeight, TxIndex, Transfer)>(
                     &current_tx_key,
                 )
                 .await
@@ -863,7 +965,6 @@ impl ShieldedContext {
     /// if it is found.
     pub async fn decode_asset_type(
         &mut self,
-        client: HttpClient,
         asset_type: AssetType,
     ) -> Option<(Address, Epoch)> {
         // Try to find the decoding in the cache
@@ -872,7 +973,7 @@ impl ShieldedContext {
         }
         // Query for the ID of the last accepted transaction
         let (addr, ep, _conv, _path): (Address, _, Amount, MerklePath<Node>) =
-            query_conversion(client, asset_type).await?;
+            self.utils.query_conversion(asset_type).await?;
         self.asset_types.insert(asset_type, (addr.clone(), ep));
         Some((addr, ep))
     }
@@ -881,7 +982,6 @@ impl ShieldedContext {
     /// type and cache it.
     async fn query_allowed_conversion<'a>(
         &'a mut self,
-        client: HttpClient,
         asset_type: AssetType,
         conversions: &'a mut Conversions,
     ) -> Option<&'a mut (AllowedConversion, MerklePath<Node>, i64)> {
@@ -890,7 +990,7 @@ impl ShieldedContext {
             Entry::Vacant(conv_entry) => {
                 // Query for the ID of the last accepted transaction
                 let (addr, ep, conv, path): (Address, _, _, _) =
-                    query_conversion(client, asset_type).await?;
+                    self.utils.query_conversion(asset_type).await?;
                 self.asset_types.insert(asset_type, (addr, ep));
                 // If the conversion is 0, then we just have a pure decoding
                 if conv == Amount::zero() {
@@ -908,7 +1008,6 @@ impl ShieldedContext {
     /// balance and hence we return None.
     pub async fn compute_exchanged_balance(
         &mut self,
-        client: HttpClient,
         vk: &ViewingKey,
         target_epoch: Epoch,
     ) -> Option<Amount> {
@@ -917,7 +1016,6 @@ impl ShieldedContext {
             // And then exchange balance into current asset types
             Some(
                 self.compute_exchanged_amount(
-                    client,
                     balance,
                     target_epoch,
                     HashMap::new(),
@@ -973,7 +1071,6 @@ impl ShieldedContext {
     /// terms of the latest asset types.
     pub async fn compute_exchanged_amount(
         &mut self,
-        client: HttpClient,
         mut input: Amount,
         target_epoch: Epoch,
         mut conversions: Conversions,
@@ -985,14 +1082,13 @@ impl ShieldedContext {
             input.components().next().map(cloned_pair)
         {
             let target_asset_type = self
-                .decode_asset_type(client.clone(), asset_type)
+                .decode_asset_type(asset_type)
                 .await
                 .map(|(addr, _epoch)| make_asset_type(target_epoch, &addr))
                 .unwrap_or(asset_type);
             let at_target_asset_type = asset_type == target_asset_type;
             if let (Some((conv, _wit, usage)), false) = (
                 self.query_allowed_conversion(
-                    client.clone(),
                     asset_type,
                     &mut conversions,
                 )
@@ -1015,7 +1111,6 @@ impl ShieldedContext {
                 );
             } else if let (Some((conv, _wit, usage)), false) = (
                 self.query_allowed_conversion(
-                    client.clone(),
                     target_asset_type,
                     &mut conversions,
                 )
@@ -1053,7 +1148,6 @@ impl ShieldedContext {
     /// achieve the total value.
     pub async fn collect_unspent_notes(
         &mut self,
-        ledger_address: TendermintAddress,
         vk: &ViewingKey,
         target: Amount,
         target_epoch: Epoch,
@@ -1063,7 +1157,6 @@ impl ShieldedContext {
         Conversions,
     ) {
         // Establish connection with which to do exchange rate queries
-        let client = HttpClient::new(ledger_address.clone()).unwrap();
         let mut conversions = HashMap::new();
         let mut val_acc = Amount::zero();
         let mut notes = Vec::new();
@@ -1087,7 +1180,6 @@ impl ShieldedContext {
                     .expect("received note has invalid value or asset type");
                 let (contr, proposed_convs) = self
                     .compute_exchanged_amount(
-                        client.clone(),
                         pre_contr,
                         target_epoch,
                         conversions.clone(),
@@ -1122,7 +1214,7 @@ impl ShieldedContext {
     /// the given payment address fails with
     /// `PinnedBalanceError::NoTransactionPinned`.
     pub async fn compute_pinned_balance(
-        ledger_address: &TendermintAddress,
+        utils: &U,
         owner: PaymentAddress,
         viewing_key: &ViewingKey,
     ) -> Result<(Amount, Epoch), PinnedBalanceError> {
@@ -1137,7 +1229,6 @@ impl ShieldedContext {
             Some(counter_owner) if counter_owner == owner.into() => {}
             _ => return Err(PinnedBalanceError::InvalidViewingKey),
         }
-        let client = HttpClient::new(ledger_address.clone()).unwrap();
         // The address of the MASP account
         let masp_addr = masp();
         // Construct the key for where the transaction ID would be stored
@@ -1145,7 +1236,7 @@ impl ShieldedContext {
             .push(&(PIN_KEY_PREFIX.to_owned() + &owner.hash()))
             .expect("Cannot obtain a storage key");
         // Obtain the transaction pointer at the key
-        let txidx = query_storage_value::<u64>(&client, &pin_key)
+        let txidx = utils.query_storage_value::<u64>(&pin_key)
             .await
             .ok_or(PinnedBalanceError::NoTransactionPinned)?;
         // Construct the key for where the pinned transaction is stored
@@ -1154,8 +1245,8 @@ impl ShieldedContext {
             .expect("Cannot obtain a storage key");
         // Obtain the pointed to transaction
         let (tx_epoch, _tx_height, _tx_index, tx) =
-            query_storage_value::<(Epoch, BlockHeight, TxIndex, Transfer)>(
-                &client, &tx_key,
+            utils.query_storage_value::<(Epoch, BlockHeight, TxIndex, Transfer)>(
+                &tx_key,
             )
             .await
             .expect("Ill-formed epoch, transaction pair");
@@ -1196,19 +1287,16 @@ impl ShieldedContext {
     /// would have been displayed in the epoch of the transaction.
     pub async fn compute_exchanged_pinned_balance(
         &mut self,
-        ledger_address: &TendermintAddress,
         owner: PaymentAddress,
         viewing_key: &ViewingKey,
     ) -> Result<(Amount, Epoch), PinnedBalanceError> {
         // Obtain the balance that will be exchanged
         let (amt, ep) =
-            Self::compute_pinned_balance(ledger_address, owner, viewing_key)
+            Self::compute_pinned_balance(&self.utils, owner, viewing_key)
                 .await?;
-        // Establish connection with which to do exchange rate queries
-        let client = HttpClient::new(ledger_address.clone()).unwrap();
         // Finally, exchange the balance to the transaction's epoch
         Ok((
-            self.compute_exchanged_amount(client, amt, ep, HashMap::new())
+            self.compute_exchanged_amount(amt, ep, HashMap::new())
                 .await
                 .0,
             ep,
@@ -1220,7 +1308,6 @@ impl ShieldedContext {
     /// the given epoch are ignored.
     pub async fn decode_amount(
         &mut self,
-        client: HttpClient,
         amt: Amount,
         target_epoch: Epoch,
     ) -> Amount<Address> {
@@ -1228,7 +1315,7 @@ impl ShieldedContext {
         for (asset_type, val) in amt.components() {
             // Decode the asset type
             let decoded =
-                self.decode_asset_type(client.clone(), *asset_type).await;
+                self.decode_asset_type(*asset_type).await;
             // Only assets with the target timestamp count
             match decoded {
                 Some((addr, epoch)) if epoch == target_epoch => {
@@ -1244,14 +1331,13 @@ impl ShieldedContext {
     /// Addresses that they decode to.
     pub async fn decode_all_amounts(
         &mut self,
-        client: HttpClient,
         amt: Amount,
     ) -> Amount<(Address, Epoch)> {
         let mut res = Amount::zero();
         for (asset_type, val) in amt.components() {
             // Decode the asset type
             let decoded =
-                self.decode_asset_type(client.clone(), *asset_type).await;
+                self.decode_asset_type(*asset_type).await;
             // Only assets with the target timestamp count
             if let Some((addr, epoch)) = decoded {
                 res += &Amount::from_pair((addr, epoch), *val).unwrap()
@@ -1269,7 +1355,6 @@ impl ShieldedContext {
     /// Transfer object.
     async fn gen_shielded_transfer(
         &mut self,
-        ledger_address: &TendermintAddress,
         source: TransferSource,
         target: TransferTarget,
         args_amount: token::Amount,
@@ -1290,14 +1375,12 @@ impl ShieldedContext {
         let spending_keys: Vec<_> = spending_key.into_iter().collect();
         // Load the current shielded context given the spending key we possess
         let _ = self.load();
-        self.fetch(&ledger_address, &spending_keys, &[])
+        self.fetch(&spending_keys, &[])
             .await;
         // Save the update state so that future fetches can be short-circuited
         let _ = self.save();
         // Determine epoch in which to submit potential shielded transaction
-        let epoch = rpc::query_epoch(args::Query {
-            ledger_address: ledger_address.clone()
-        }).await;
+        let epoch = self.utils.query_epoch().await;
         // Context required for storing which notes are in the source's possesion
         let consensus_branch_id = BranchId::Sapling;
         let amt: u64 = args_amount.into();
@@ -1324,7 +1407,6 @@ impl ShieldedContext {
             // Locate unspent notes that can help us meet the transaction amount
             let (_, unspent_notes, used_convs) = self
                 .collect_unspent_notes(
-                    ledger_address.clone(),
                     &to_viewing_key(&sk).vk,
                     required_amt,
                     epoch,
@@ -1399,24 +1481,12 @@ impl ShieldedContext {
                 amt,
             )?;
         }
-        let prover = if let Ok(params_dir) = env::var(masp::ENV_VAR_MASP_PARAMS_DIR)
-        {
-            let params_dir = PathBuf::from(params_dir);
-            let spend_path = params_dir.join(masp::SPEND_NAME);
-            let convert_path = params_dir.join(masp::CONVERT_NAME);
-            let output_path = params_dir.join(masp::OUTPUT_NAME);
-            LocalTxProver::new(&spend_path, &output_path, &convert_path)
-        } else {
-            LocalTxProver::with_default_location()
-                .expect("unable to load MASP Parameters")
-        };
+        let prover = self.utils.local_tx_prover();
         // Build and return the constructed transaction
         let mut tx = builder.build(consensus_branch_id, &prover);
 
         if epoch_sensitive {
-            let new_epoch = rpc::query_epoch(args::Query {
-                ledger_address: ledger_address.clone()
-            }).await;
+            let new_epoch = self.utils.query_epoch().await;
 
             // If epoch has changed, recalculate shielded outputs to match new epoch
             if new_epoch != epoch {
@@ -1611,9 +1681,11 @@ pub async fn submit_transfer(mut ctx: Context, args: args::TxTransfer) {
         _ => None,
     };
 
+    // Update the context with the current ledger address
+    ctx.shielded.utils.ledger_address = Some(args.tx.ledger_address.clone());
+
     let stx_result =
         ctx.shielded.gen_shielded_transfer(
-            &args.tx.ledger_address,
             transfer_source,
             transfer_target,
             args.amount,

From dcbc058747a6ae9875658f4fbb9465001096dc47 Mon Sep 17 00:00:00 2001
From: Murisi Tarusenga <murisit@gmail.com>
Date: Wed, 30 Nov 2022 15:42:36 +0200
Subject: [PATCH 9/9] Moved ShieldedContext into shared crate.

---
 Cargo.lock                       |    2 +
 apps/Cargo.toml                  |    2 +-
 apps/src/bin/anoma-wallet/cli.rs |    2 +-
 apps/src/lib/cli/context.rs      |    3 +-
 apps/src/lib/client/rpc.rs       |    2 +-
 apps/src/lib/client/tx.rs        | 1081 +----------------------------
 apps/src/lib/client/types.rs     |    1 -
 shared/Cargo.toml                |    7 +
 shared/src/ledger/masp.rs        | 1087 ++++++++++++++++++++++++++++++
 tests/src/e2e/ledger_tests.rs    |    8 +-
 10 files changed, 1116 insertions(+), 1079 deletions(-)

diff --git a/Cargo.lock b/Cargo.lock
index 79a4079e7b..9a2cd54a75 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -3655,6 +3655,8 @@ dependencies = [
  "proptest",
  "prost",
  "pwasm-utils",
+ "rand 0.8.5",
+ "rand_core 0.6.4",
  "rayon",
  "rust_decimal",
  "serde_json",
diff --git a/apps/Cargo.toml b/apps/Cargo.toml
index 896168c63f..edd67efb1d 100644
--- a/apps/Cargo.toml
+++ b/apps/Cargo.toml
@@ -67,7 +67,7 @@ abciplus = [
 ]
 
 [dependencies]
-namada = {path = "../shared", default-features = false, features = ["wasm-runtime", "ferveo-tpke"]}
+namada = {path = "../shared", default-features = false, features = ["wasm-runtime", "ferveo-tpke", "masp-tx-gen"]}
 ark-serialize = "0.3.0"
 ark-std = "0.3.0"
 # branch = "bat/arse-merkle-tree"
diff --git a/apps/src/bin/anoma-wallet/cli.rs b/apps/src/bin/anoma-wallet/cli.rs
index 62857e76f3..b7d4d78a02 100644
--- a/apps/src/bin/anoma-wallet/cli.rs
+++ b/apps/src/bin/anoma-wallet/cli.rs
@@ -11,7 +11,7 @@ use namada::types::key::*;
 use namada::types::masp::{MaspValue, PaymentAddress};
 use namada_apps::cli;
 use namada_apps::cli::{args, cmds, Context};
-use namada_apps::client::tx::find_valid_diversifier;
+use namada::ledger::masp::find_valid_diversifier;
 use namada_apps::wallet::{DecryptionError, FindKeyError};
 use rand_core::OsRng;
 
diff --git a/apps/src/lib/cli/context.rs b/apps/src/lib/cli/context.rs
index 4461cd4319..2a05e6847c 100644
--- a/apps/src/lib/cli/context.rs
+++ b/apps/src/lib/cli/context.rs
@@ -10,9 +10,10 @@ use namada::types::address::Address;
 use namada::types::chain::ChainId;
 use namada::types::key::*;
 use namada::types::masp::*;
+use namada::ledger::masp::ShieldedContext;
 
 use super::args;
-use crate::client::tx::{ShieldedContext, CLIShieldedUtils};
+use crate::client::tx::CLIShieldedUtils;
 use crate::config::genesis::genesis_config;
 use crate::config::global::GlobalConfig;
 use crate::config::{self, Config};
diff --git a/apps/src/lib/client/rpc.rs b/apps/src/lib/client/rpc.rs
index 667370413b..7ed6760890 100644
--- a/apps/src/lib/client/rpc.rs
+++ b/apps/src/lib/client/rpc.rs
@@ -56,7 +56,7 @@ use tokio::time::{Duration, Instant};
 
 use crate::cli::{self, args, Context};
 use crate::client::tendermint_rpc_types::TxResponse;
-use crate::client::tx::{
+use namada::ledger::masp::{
     Conversions, PinnedBalanceError, TransactionDelta, TransferDelta,
 };
 use crate::facade::tendermint::merkle::proof::Proof;
diff --git a/apps/src/lib/client/tx.rs b/apps/src/lib/client/tx.rs
index 0f5b469d23..94ffe42311 100644
--- a/apps/src/lib/client/tx.rs
+++ b/apps/src/lib/client/tx.rs
@@ -1,11 +1,9 @@
 use std::borrow::Cow;
-use std::collections::hash_map::Entry;
-use std::collections::{BTreeMap, HashMap, HashSet};
+use std::collections::HashSet;
 use std::env;
 use std::fmt::Debug;
 use std::fs::{File, OpenOptions};
 use std::io::{Read, Write};
-use std::ops::Deref;
 use std::path::PathBuf;
 
 use async_std::io::prelude::WriteExt;
@@ -13,22 +11,9 @@ use async_std::io::{self};
 use borsh::{BorshDeserialize, BorshSerialize};
 use itertools::Either::*;
 use masp_primitives::asset_type::AssetType;
-use masp_primitives::consensus::{BranchId, TestNetwork};
-use masp_primitives::convert::AllowedConversion;
-use masp_primitives::ff::PrimeField;
-use masp_primitives::group::cofactor::CofactorGroup;
-use masp_primitives::keys::FullViewingKey;
-use masp_primitives::legacy::TransparentAddress;
-use masp_primitives::merkle_tree::{
-    CommitmentTree, IncrementalWitness, MerklePath,
-};
-use masp_primitives::note_encryption::*;
-use masp_primitives::primitives::{Diversifier, Note, ViewingKey};
+use masp_primitives::merkle_tree::MerklePath;
 use masp_primitives::sapling::Node;
-use masp_primitives::transaction::builder::{self, secp256k1, *};
-use masp_primitives::transaction::components::{Amount, OutPoint, TxOut};
-use masp_primitives::transaction::Transaction;
-use masp_primitives::zip32::{ExtendedFullViewingKey, ExtendedSpendingKey};
+use masp_primitives::transaction::builder;
 use masp_proofs::prover::LocalTxProver;
 use namada::ibc::applications::ics20_fungible_token_transfer::msgs::transfer::MsgTransfer;
 use namada::ibc::signer::Signer;
@@ -45,23 +30,18 @@ use namada::types::governance::{
     OfflineProposal, OfflineVote, Proposal, ProposalVote,
 };
 use namada::types::key::*;
-use namada::types::masp::{PaymentAddress, TransferSource, TransferTarget};
+use namada::types::masp::TransferTarget;
 use namada::types::storage::{
-    BlockHeight, Epoch, Key, KeySeg, TxIndex, RESERVED_ADDRESS_PREFIX,
+    Epoch, RESERVED_ADDRESS_PREFIX,
 };
 use namada::types::time::DateTimeUtc;
-use namada::types::token::{
-    Transfer, HEAD_TX_KEY, PIN_KEY_PREFIX, TX_KEY_PREFIX,
-};
 use namada::types::transaction::governance::{
     InitProposalData, VoteProposalData,
 };
 use namada::types::transaction::{pos, InitAccount, InitValidator, UpdateVp};
 use namada::types::{storage, token};
 use namada::{ledger, vm};
-use rand_core::{CryptoRng, OsRng, RngCore};
 use rust_decimal::Decimal;
-use sha2::Digest;
 use tokio::time::{Duration, Instant};
 use async_trait::async_trait;
 
@@ -388,36 +368,6 @@ pub async fn submit_init_validator(
     }
 }
 
-#[async_trait]
-pub trait ShieldedUtils : Sized + BorshDeserialize + BorshSerialize + Default + Clone {
-    async fn query_storage_value<T: Send>(
-        &self,
-        key: &storage::Key,
-    ) -> Option<T>
-    where
-        T: BorshDeserialize;
-
-    async fn query_epoch(&self) -> Epoch;
-
-    //fn download_parameters() -> Result<(), Error>;
-
-    fn local_tx_prover(&self) -> LocalTxProver;
-
-    fn load(self) -> std::io::Result<ShieldedContext<Self>>;
-
-    fn save(&self, ctx: &ShieldedContext<Self>) -> std::io::Result<()>;
-
-    async fn query_conversion(
-        &self,
-        asset_type: AssetType,
-    ) -> Option<(
-        Address,
-        Epoch,
-        masp_primitives::transaction::components::Amount,
-        MerklePath<Node>,
-    )>;
-}
-
 /// Shielded context file name
 const FILE_NAME: &str = "shielded.dat";
 const TMP_FILE_NAME: &str = "shielded.tmp";
@@ -435,7 +385,7 @@ impl CLIShieldedUtils {
     /// decryptable by any viewing key in the given set
     pub fn new(
         context_dir: PathBuf,
-    ) -> ShieldedContext<Self> {
+    ) -> masp::ShieldedContext<Self> {
         // Make sure that MASP parameters are downloaded to enable MASP
         // transaction building and verification later on
         let params_dir = masp::get_params_dir();
@@ -453,7 +403,7 @@ impl CLIShieldedUtils {
         }
         // Finally initialize a shielded context with the supplied directory
         let utils = Self { context_dir, ledger_address: None };
-        ShieldedContext { utils, ..Default::default() }
+        masp::ShieldedContext { utils, ..Default::default() }
     }
 }
 
@@ -464,7 +414,7 @@ impl Default for CLIShieldedUtils {
 }
 
 #[async_trait]
-impl ShieldedUtils for CLIShieldedUtils {
+impl masp::ShieldedUtils for CLIShieldedUtils {
     async fn query_storage_value<T: Send>(
         &self,
         key: &storage::Key,
@@ -496,12 +446,12 @@ impl ShieldedUtils for CLIShieldedUtils {
 
     /// Try to load the last saved shielded context from the given context
     /// directory. If this fails, then leave the current context unchanged.
-    fn load(self) -> std::io::Result<ShieldedContext<Self>> {
+    fn load(self) -> std::io::Result<masp::ShieldedContext<Self>> {
         // Try to load shielded context from file
         let mut ctx_file = File::open(self.context_dir.join(FILE_NAME))?;
         let mut bytes = Vec::new();
         ctx_file.read_to_end(&mut bytes)?;
-        let mut new_ctx = ShieldedContext::deserialize(&mut &bytes[..])?;
+        let mut new_ctx = masp::ShieldedContext::deserialize(&mut &bytes[..])?;
         // Associate the originating context directory with the
         // shielded context under construction
         new_ctx.utils = self;
@@ -509,7 +459,7 @@ impl ShieldedUtils for CLIShieldedUtils {
     }
 
     /// Save this shielded context into its associated context directory
-    fn save(&self, ctx: &ShieldedContext<Self>) -> std::io::Result<()> {
+    fn save(&self, ctx: &masp::ShieldedContext<Self>) -> std::io::Result<()> {
         // TODO: use mktemp crate?
         let tmp_path = self.context_dir.join(TMP_FILE_NAME);
         {
@@ -552,1016 +502,7 @@ impl ShieldedUtils for CLIShieldedUtils {
     }
 }
 
-/// Make a ViewingKey that can view notes encrypted by given ExtendedSpendingKey
-pub fn to_viewing_key(esk: &ExtendedSpendingKey) -> FullViewingKey {
-    ExtendedFullViewingKey::from(esk).fvk
-}
-
-/// Generate a valid diversifier, i.e. one that has a diversified base. Return
-/// also this diversified base.
-pub fn find_valid_diversifier<R: RngCore + CryptoRng>(
-    rng: &mut R,
-) -> (Diversifier, masp_primitives::jubjub::SubgroupPoint) {
-    let mut diversifier;
-    let g_d;
-    // Keep generating random diversifiers until one has a diversified base
-    loop {
-        let mut d = [0; 11];
-        rng.fill_bytes(&mut d);
-        diversifier = Diversifier(d);
-        if let Some(val) = diversifier.g_d() {
-            g_d = val;
-            break;
-        }
-    }
-    (diversifier, g_d)
-}
-
-/// Determine if using the current note would actually bring us closer to our
-/// target
-pub fn is_amount_required(src: Amount, dest: Amount, delta: Amount) -> bool {
-    if delta > Amount::zero() {
-        let gap = dest - src;
-        for (asset_type, value) in gap.components() {
-            if *value > 0 && delta[asset_type] > 0 {
-                return true;
-            }
-        }
-    }
-    false
-}
-
-/// An extension of Option's cloned method for pair types
-fn cloned_pair<T: Clone, U: Clone>((a, b): (&T, &U)) -> (T, U) {
-    (a.clone(), b.clone())
-}
-
-/// Errors that can occur when trying to retrieve pinned transaction
-#[derive(PartialEq, Eq)]
-pub enum PinnedBalanceError {
-    /// No transaction has yet been pinned to the given payment address
-    NoTransactionPinned,
-    /// The supplied viewing key does not recognize payments to given address
-    InvalidViewingKey,
-}
-
-/// Represents the amount used of different conversions
-pub type Conversions =
-    HashMap<AssetType, (AllowedConversion, MerklePath<Node>, i64)>;
-
-/// Represents the changes that were made to a list of transparent accounts
-pub type TransferDelta = HashMap<Address, Amount<Address>>;
-
-/// Represents the changes that were made to a list of shielded accounts
-pub type TransactionDelta = HashMap<ViewingKey, Amount>;
-
-/// Represents the current state of the shielded pool from the perspective of
-/// the chosen viewing keys.
-#[derive(BorshSerialize, BorshDeserialize, Debug)]
-pub struct ShieldedContext<U: ShieldedUtils> {
-    /// Location where this shielded context is saved
-    #[borsh_skip]
-    pub utils: U,
-    /// The last transaction index to be processed in this context
-    last_txidx: u64,
-    /// The commitment tree produced by scanning all transactions up to tx_pos
-    tree: CommitmentTree<Node>,
-    /// Maps viewing keys to applicable note positions
-    pos_map: HashMap<ViewingKey, HashSet<usize>>,
-    /// Maps a nullifier to the note position to which it applies
-    nf_map: HashMap<[u8; 32], usize>,
-    /// Maps note positions to their corresponding notes
-    note_map: HashMap<usize, Note>,
-    /// Maps note positions to their corresponding memos
-    memo_map: HashMap<usize, Memo>,
-    /// Maps note positions to the diversifier of their payment address
-    div_map: HashMap<usize, Diversifier>,
-    /// Maps note positions to their witness (used to make merkle paths)
-    witness_map: HashMap<usize, IncrementalWitness<Node>>,
-    /// Tracks what each transaction does to various account balances
-    delta_map: BTreeMap<
-        (BlockHeight, TxIndex),
-        (Epoch, TransferDelta, TransactionDelta),
-    >,
-    /// The set of note positions that have been spent
-    spents: HashSet<usize>,
-    /// Maps asset types to their decodings
-    asset_types: HashMap<AssetType, (Address, Epoch)>,
-    /// Maps note positions to their corresponding viewing keys
-    vk_map: HashMap<usize, ViewingKey>,
-}
-
-/// Default implementation to ease construction of TxContexts. Derive cannot be
-/// used here due to CommitmentTree not implementing Default.
-impl<U: ShieldedUtils + Default> Default for ShieldedContext<U> {
-    fn default() -> ShieldedContext<U> {
-        ShieldedContext::<U> {
-            utils: U::default(),
-            last_txidx: u64::default(),
-            tree: CommitmentTree::empty(),
-            pos_map: HashMap::default(),
-            nf_map: HashMap::default(),
-            note_map: HashMap::default(),
-            memo_map: HashMap::default(),
-            div_map: HashMap::default(),
-            witness_map: HashMap::default(),
-            spents: HashSet::default(),
-            delta_map: BTreeMap::default(),
-            asset_types: HashMap::default(),
-            vk_map: HashMap::default(),
-        }
-    }
-}
-
-impl<U: ShieldedUtils> ShieldedContext<U> {
-    /// Try to load the last saved shielded context from the given context
-    /// directory. If this fails, then leave the current context unchanged.
-    pub fn load(&mut self) -> std::io::Result<()> {
-        let new_ctx = self.utils.clone().load()?;
-        *self = new_ctx;
-        Ok(())
-    }
-
-    /// Save this shielded context into its associated context directory
-    pub fn save(&self) -> std::io::Result<()> {
-        self.utils.save(self)
-    }
-
-    /// Merge data from the given shielded context into the current shielded
-    /// context. It must be the case that the two shielded contexts share the
-    /// same last transaction ID and share identical commitment trees.
-    pub fn merge(&mut self, new_ctx: ShieldedContext<U>) {
-        debug_assert_eq!(self.last_txidx, new_ctx.last_txidx);
-        // Merge by simply extending maps. Identical keys should contain
-        // identical values, so overwriting should not be problematic.
-        self.pos_map.extend(new_ctx.pos_map);
-        self.nf_map.extend(new_ctx.nf_map);
-        self.note_map.extend(new_ctx.note_map);
-        self.memo_map.extend(new_ctx.memo_map);
-        self.div_map.extend(new_ctx.div_map);
-        self.witness_map.extend(new_ctx.witness_map);
-        self.spents.extend(new_ctx.spents);
-        self.asset_types.extend(new_ctx.asset_types);
-        self.vk_map.extend(new_ctx.vk_map);
-        // The deltas are the exception because different keys can reveal
-        // different parts of the same transaction. Hence each delta needs to be
-        // merged separately.
-        for ((height, idx), (ep, ntfer_delta, ntx_delta)) in new_ctx.delta_map {
-            let (_ep, tfer_delta, tx_delta) = self
-                .delta_map
-                .entry((height, idx))
-                .or_insert((ep, TransferDelta::new(), TransactionDelta::new()));
-            tfer_delta.extend(ntfer_delta);
-            tx_delta.extend(ntx_delta);
-        }
-    }
-
-    /// Fetch the current state of the multi-asset shielded pool into a
-    /// ShieldedContext
-    pub async fn fetch(
-        &mut self,
-        sks: &[ExtendedSpendingKey],
-        fvks: &[ViewingKey],
-    ) {
-        // First determine which of the keys requested to be fetched are new.
-        // Necessary because old transactions will need to be scanned for new
-        // keys.
-        let mut unknown_keys = Vec::new();
-        for esk in sks {
-            let vk = to_viewing_key(esk).vk;
-            if !self.pos_map.contains_key(&vk) {
-                unknown_keys.push(vk);
-            }
-        }
-        for vk in fvks {
-            if !self.pos_map.contains_key(vk) {
-                unknown_keys.push(*vk);
-            }
-        }
-
-        // If unknown keys are being used, we need to scan older transactions
-        // for any unspent notes
-        let (txs, mut tx_iter);
-        if !unknown_keys.is_empty() {
-            // Load all transactions accepted until this point
-            txs = Self::fetch_shielded_transfers(&self.utils, 0).await;
-            tx_iter = txs.iter();
-            // Do this by constructing a shielding context only for unknown keys
-            let mut tx_ctx = Self { utils: self.utils.clone(), ..Default::default() };
-            for vk in unknown_keys {
-                tx_ctx.pos_map.entry(vk).or_insert_with(HashSet::new);
-            }
-            // Update this unknown shielded context until it is level with self
-            while tx_ctx.last_txidx != self.last_txidx {
-                if let Some(((height, idx), (epoch, tx))) = tx_iter.next() {
-                    tx_ctx.scan_tx(*height, *idx, *epoch, tx);
-                } else {
-                    break;
-                }
-            }
-            // Merge the context data originating from the unknown keys into the
-            // current context
-            self.merge(tx_ctx);
-        } else {
-            // Load only transactions accepted from last_txid until this point
-            txs =
-                Self::fetch_shielded_transfers(&self.utils, self.last_txidx)
-                    .await;
-            tx_iter = txs.iter();
-        }
-        // Now that we possess the unspent notes corresponding to both old and
-        // new keys up until tx_pos, proceed to scan the new transactions.
-        for ((height, idx), (epoch, tx)) in &mut tx_iter {
-            self.scan_tx(*height, *idx, *epoch, tx);
-        }
-    }
-
-    /// Obtain a chronologically-ordered list of all accepted shielded
-    /// transactions from the ledger. The ledger conceptually stores
-    /// transactions as a vector. More concretely, the HEAD_TX_KEY location
-    /// stores the index of the last accepted transaction and each transaction
-    /// is stored at a key derived from its index.
-    pub async fn fetch_shielded_transfers(
-        utils: &U,
-        last_txidx: u64,
-    ) -> BTreeMap<(BlockHeight, TxIndex), (Epoch, Transfer)> {
-        // The address of the MASP account
-        let masp_addr = masp();
-        // Construct the key where last transaction pointer is stored
-        let head_tx_key = Key::from(masp_addr.to_db_key())
-            .push(&HEAD_TX_KEY.to_owned())
-            .expect("Cannot obtain a storage key");
-        // Query for the index of the last accepted transaction
-        let head_txidx = utils.query_storage_value::<u64>(&head_tx_key)
-            .await
-            .unwrap_or(0);
-        let mut shielded_txs = BTreeMap::new();
-        // Fetch all the transactions we do not have yet
-        for i in last_txidx..head_txidx {
-            // Construct the key for where the current transaction is stored
-            let current_tx_key = Key::from(masp_addr.to_db_key())
-                .push(&(TX_KEY_PREFIX.to_owned() + &i.to_string()))
-                .expect("Cannot obtain a storage key");
-            // Obtain the current transaction
-            let (tx_epoch, tx_height, tx_index, current_tx) =
-                utils.query_storage_value::<(Epoch, BlockHeight, TxIndex, Transfer)>(
-                    &current_tx_key,
-                )
-                .await
-                .unwrap();
-            // Collect the current transaction
-            shielded_txs.insert((tx_height, tx_index), (tx_epoch, current_tx));
-        }
-        shielded_txs
-    }
-
-    /// Applies the given transaction to the supplied context. More precisely,
-    /// the shielded transaction's outputs are added to the commitment tree.
-    /// Newly discovered notes are associated to the supplied viewing keys. Note
-    /// nullifiers are mapped to their originating notes. Note positions are
-    /// associated to notes, memos, and diversifiers. And the set of notes that
-    /// we have spent are updated. The witness map is maintained to make it
-    /// easier to construct note merkle paths in other code. See
-    /// https://zips.z.cash/protocol/protocol.pdf#scan
-    pub fn scan_tx(
-        &mut self,
-        height: BlockHeight,
-        index: TxIndex,
-        epoch: Epoch,
-        tx: &Transfer,
-    ) {
-        // Ignore purely transparent transactions
-        let shielded = if let Some(shielded) = &tx.shielded {
-            shielded
-        } else {
-            return;
-        };
-        // For tracking the account changes caused by this Transaction
-        let mut transaction_delta = TransactionDelta::new();
-        // Listen for notes sent to our viewing keys
-        for so in &shielded.shielded_outputs {
-            // Create merkle tree leaf node from note commitment
-            let node = Node::new(so.cmu.to_repr());
-            // Update each merkle tree in the witness map with the latest
-            // addition
-            for (_, witness) in self.witness_map.iter_mut() {
-                witness.append(node).expect("note commitment tree is full");
-            }
-            let note_pos = self.tree.size();
-            self.tree
-                .append(node)
-                .expect("note commitment tree is full");
-            // Finally, make it easier to construct merkle paths to this new
-            // note
-            let witness = IncrementalWitness::<Node>::from_tree(&self.tree);
-            self.witness_map.insert(note_pos, witness);
-            // Let's try to see if any of our viewing keys can decrypt latest
-            // note
-            for (vk, notes) in self.pos_map.iter_mut() {
-                let decres = try_sapling_note_decryption::<TestNetwork>(
-                    0,
-                    &vk.ivk().0,
-                    &so.ephemeral_key.into_subgroup().unwrap(),
-                    &so.cmu,
-                    &so.enc_ciphertext,
-                );
-                // So this current viewing key does decrypt this current note...
-                if let Some((note, pa, memo)) = decres {
-                    // Add this note to list of notes decrypted by this viewing
-                    // key
-                    notes.insert(note_pos);
-                    // Compute the nullifier now to quickly recognize when spent
-                    let nf = note.nf(vk, note_pos.try_into().unwrap());
-                    self.note_map.insert(note_pos, note);
-                    self.memo_map.insert(note_pos, memo);
-                    // The payment address' diversifier is required to spend
-                    // note
-                    self.div_map.insert(note_pos, *pa.diversifier());
-                    self.nf_map.insert(nf.0, note_pos);
-                    // Note the account changes
-                    let balance = transaction_delta
-                        .entry(*vk)
-                        .or_insert_with(Amount::zero);
-                    *balance +=
-                        Amount::from_nonnegative(note.asset_type, note.value)
-                            .expect(
-                                "found note with invalid value or asset type",
-                            );
-                    self.vk_map.insert(note_pos, *vk);
-                    break;
-                }
-            }
-        }
-        // Cancel out those of our notes that have been spent
-        for ss in &shielded.shielded_spends {
-            // If the shielded spend's nullifier is in our map, then target note
-            // is rendered unusable
-            if let Some(note_pos) = self.nf_map.get(&ss.nullifier) {
-                self.spents.insert(*note_pos);
-                // Note the account changes
-                let balance = transaction_delta
-                    .entry(self.vk_map[note_pos])
-                    .or_insert_with(Amount::zero);
-                let note = self.note_map[note_pos];
-                *balance -=
-                    Amount::from_nonnegative(note.asset_type, note.value)
-                        .expect("found note with invalid value or asset type");
-            }
-        }
-        // Record the changes to the transparent accounts
-        let transparent_delta =
-            Amount::from_nonnegative(tx.token.clone(), u64::from(tx.amount))
-                .expect("invalid value for amount");
-        let mut transfer_delta = TransferDelta::new();
-        transfer_delta
-            .insert(tx.source.clone(), Amount::zero() - &transparent_delta);
-        transfer_delta.insert(tx.target.clone(), transparent_delta);
-        self.delta_map.insert(
-            (height, index),
-            (epoch, transfer_delta, transaction_delta),
-        );
-        self.last_txidx += 1;
-    }
-
-    /// Summarize the effects on shielded and transparent accounts of each
-    /// Transfer in this context
-    pub fn get_tx_deltas(
-        &self,
-    ) -> &BTreeMap<
-        (BlockHeight, TxIndex),
-        (Epoch, TransferDelta, TransactionDelta),
-    > {
-        &self.delta_map
-    }
-
-    /// Compute the total unspent notes associated with the viewing key in the
-    /// context. If the key is not in the context, then we do not know the
-    /// balance and hence we return None.
-    pub fn compute_shielded_balance(&self, vk: &ViewingKey) -> Option<Amount> {
-        // Cannot query the balance of a key that's not in the map
-        if !self.pos_map.contains_key(vk) {
-            return None;
-        }
-        let mut val_acc = Amount::zero();
-        // Retrieve the notes that can be spent by this key
-        if let Some(avail_notes) = self.pos_map.get(vk) {
-            for note_idx in avail_notes {
-                // Spent notes cannot contribute a new transaction's pool
-                if self.spents.contains(note_idx) {
-                    continue;
-                }
-                // Get note associated with this ID
-                let note = self.note_map.get(note_idx).unwrap();
-                // Finally add value to multi-asset accumulator
-                val_acc +=
-                    Amount::from_nonnegative(note.asset_type, note.value)
-                        .expect("found note with invalid value or asset type");
-            }
-        }
-        Some(val_acc)
-    }
-
-    /// Query the ledger for the decoding of the given asset type and cache it
-    /// if it is found.
-    pub async fn decode_asset_type(
-        &mut self,
-        asset_type: AssetType,
-    ) -> Option<(Address, Epoch)> {
-        // Try to find the decoding in the cache
-        if let decoded @ Some(_) = self.asset_types.get(&asset_type) {
-            return decoded.cloned();
-        }
-        // Query for the ID of the last accepted transaction
-        let (addr, ep, _conv, _path): (Address, _, Amount, MerklePath<Node>) =
-            self.utils.query_conversion(asset_type).await?;
-        self.asset_types.insert(asset_type, (addr.clone(), ep));
-        Some((addr, ep))
-    }
-
-    /// Query the ledger for the conversion that is allowed for the given asset
-    /// type and cache it.
-    async fn query_allowed_conversion<'a>(
-        &'a mut self,
-        asset_type: AssetType,
-        conversions: &'a mut Conversions,
-    ) -> Option<&'a mut (AllowedConversion, MerklePath<Node>, i64)> {
-        match conversions.entry(asset_type) {
-            Entry::Occupied(conv_entry) => Some(conv_entry.into_mut()),
-            Entry::Vacant(conv_entry) => {
-                // Query for the ID of the last accepted transaction
-                let (addr, ep, conv, path): (Address, _, _, _) =
-                    self.utils.query_conversion(asset_type).await?;
-                self.asset_types.insert(asset_type, (addr, ep));
-                // If the conversion is 0, then we just have a pure decoding
-                if conv == Amount::zero() {
-                    None
-                } else {
-                    Some(conv_entry.insert((Amount::into(conv), path, 0)))
-                }
-            }
-        }
-    }
-
-    /// Compute the total unspent notes associated with the viewing key in the
-    /// context and express that value in terms of the currently timestamped
-    /// asset types. If the key is not in the context, then we do not know the
-    /// balance and hence we return None.
-    pub async fn compute_exchanged_balance(
-        &mut self,
-        vk: &ViewingKey,
-        target_epoch: Epoch,
-    ) -> Option<Amount> {
-        // First get the unexchanged balance
-        if let Some(balance) = self.compute_shielded_balance(vk) {
-            // And then exchange balance into current asset types
-            Some(
-                self.compute_exchanged_amount(
-                    balance,
-                    target_epoch,
-                    HashMap::new(),
-                )
-                .await
-                .0,
-            )
-        } else {
-            None
-        }
-    }
-
-    /// Try to convert as much of the given asset type-value pair using the
-    /// given allowed conversion. usage is incremented by the amount of the
-    /// conversion used, the conversions are applied to the given input, and
-    /// the trace amount that could not be converted is moved from input to
-    /// output.
-    fn apply_conversion(
-        conv: AllowedConversion,
-        asset_type: AssetType,
-        value: i64,
-        usage: &mut i64,
-        input: &mut Amount,
-        output: &mut Amount,
-    ) {
-        // If conversion if possible, accumulate the exchanged amount
-        let conv: Amount = conv.into();
-        // The amount required of current asset to qualify for conversion
-        let threshold = -conv[&asset_type];
-        if threshold == 0 {
-            eprintln!(
-                "Asset threshold of selected conversion for asset type {} is \
-                 0, this is a bug, please report it.",
-                asset_type
-            );
-        }
-        // We should use an amount of the AllowedConversion that almost
-        // cancels the original amount
-        let required = value / threshold;
-        // Forget about the trace amount left over because we cannot
-        // realize its value
-        let trace = Amount::from_pair(asset_type, value % threshold).unwrap();
-        // Record how much more of the given conversion has been used
-        *usage += required;
-        // Apply the conversions to input and move the trace amount to output
-        *input += conv * required - &trace;
-        *output += trace;
-    }
-
-    /// Convert the given amount into the latest asset types whilst making a
-    /// note of the conversions that were used. Note that this function does
-    /// not assume that allowed conversions from the ledger are expressed in
-    /// terms of the latest asset types.
-    pub async fn compute_exchanged_amount(
-        &mut self,
-        mut input: Amount,
-        target_epoch: Epoch,
-        mut conversions: Conversions,
-    ) -> (Amount, Conversions) {
-        // Where we will store our exchanged value
-        let mut output = Amount::zero();
-        // Repeatedly exchange assets until it is no longer possible
-        while let Some((asset_type, value)) =
-            input.components().next().map(cloned_pair)
-        {
-            let target_asset_type = self
-                .decode_asset_type(asset_type)
-                .await
-                .map(|(addr, _epoch)| make_asset_type(target_epoch, &addr))
-                .unwrap_or(asset_type);
-            let at_target_asset_type = asset_type == target_asset_type;
-            if let (Some((conv, _wit, usage)), false) = (
-                self.query_allowed_conversion(
-                    asset_type,
-                    &mut conversions,
-                )
-                .await,
-                at_target_asset_type,
-            ) {
-                println!(
-                    "converting current asset type to latest asset type..."
-                );
-                // Not at the target asset type, not at the latest asset type.
-                // Apply conversion to get from current asset type to the latest
-                // asset type.
-                Self::apply_conversion(
-                    conv.clone(),
-                    asset_type,
-                    value,
-                    usage,
-                    &mut input,
-                    &mut output,
-                );
-            } else if let (Some((conv, _wit, usage)), false) = (
-                self.query_allowed_conversion(
-                    target_asset_type,
-                    &mut conversions,
-                )
-                .await,
-                at_target_asset_type,
-            ) {
-                println!(
-                    "converting latest asset type to target asset type..."
-                );
-                // Not at the target asset type, yes at the latest asset type.
-                // Apply inverse conversion to get from latest asset type to
-                // the target asset type.
-                Self::apply_conversion(
-                    conv.clone(),
-                    asset_type,
-                    value,
-                    usage,
-                    &mut input,
-                    &mut output,
-                );
-            } else {
-                // At the target asset type. Then move component over to output.
-                let comp = input.project(asset_type);
-                output += &comp;
-                // Strike from input to avoid repeating computation
-                input -= comp;
-            }
-        }
-        (output, conversions)
-    }
-
-    /// Collect enough unspent notes in this context to exceed the given amount
-    /// of the specified asset type. Return the total value accumulated plus
-    /// notes and the corresponding diversifiers/merkle paths that were used to
-    /// achieve the total value.
-    pub async fn collect_unspent_notes(
-        &mut self,
-        vk: &ViewingKey,
-        target: Amount,
-        target_epoch: Epoch,
-    ) -> (
-        Amount,
-        Vec<(Diversifier, Note, MerklePath<Node>)>,
-        Conversions,
-    ) {
-        // Establish connection with which to do exchange rate queries
-        let mut conversions = HashMap::new();
-        let mut val_acc = Amount::zero();
-        let mut notes = Vec::new();
-        // Retrieve the notes that can be spent by this key
-        if let Some(avail_notes) = self.pos_map.get(vk).cloned() {
-            for note_idx in &avail_notes {
-                // No more transaction inputs are required once we have met
-                // the target amount
-                if val_acc >= target {
-                    break;
-                }
-                // Spent notes cannot contribute a new transaction's pool
-                if self.spents.contains(note_idx) {
-                    continue;
-                }
-                // Get note, merkle path, diversifier associated with this ID
-                let note = *self.note_map.get(note_idx).unwrap();
-
-                // The amount contributed by this note before conversion
-                let pre_contr = Amount::from_pair(note.asset_type, note.value)
-                    .expect("received note has invalid value or asset type");
-                let (contr, proposed_convs) = self
-                    .compute_exchanged_amount(
-                        pre_contr,
-                        target_epoch,
-                        conversions.clone(),
-                    )
-                    .await;
-
-                // Use this note only if it brings us closer to our target
-                if is_amount_required(
-                    val_acc.clone(),
-                    target.clone(),
-                    contr.clone(),
-                ) {
-                    // Be sure to record the conversions used in computing
-                    // accumulated value
-                    val_acc += contr;
-                    // Commit the conversions that were used to exchange
-                    conversions = proposed_convs;
-                    let merkle_path =
-                        self.witness_map.get(note_idx).unwrap().path().unwrap();
-                    let diversifier = self.div_map.get(note_idx).unwrap();
-                    // Commit this note to our transaction
-                    notes.push((*diversifier, note, merkle_path));
-                }
-            }
-        }
-        (val_acc, notes, conversions)
-    }
-
-    /// Compute the combined value of the output notes of the transaction pinned
-    /// at the given payment address. This computation uses the supplied viewing
-    /// keys to try to decrypt the output notes. If no transaction is pinned at
-    /// the given payment address fails with
-    /// `PinnedBalanceError::NoTransactionPinned`.
-    pub async fn compute_pinned_balance(
-        utils: &U,
-        owner: PaymentAddress,
-        viewing_key: &ViewingKey,
-    ) -> Result<(Amount, Epoch), PinnedBalanceError> {
-        // Check that the supplied viewing key corresponds to given payment
-        // address
-        let counter_owner = viewing_key.to_payment_address(
-            *masp_primitives::primitives::PaymentAddress::diversifier(
-                &owner.into(),
-            ),
-        );
-        match counter_owner {
-            Some(counter_owner) if counter_owner == owner.into() => {}
-            _ => return Err(PinnedBalanceError::InvalidViewingKey),
-        }
-        // The address of the MASP account
-        let masp_addr = masp();
-        // Construct the key for where the transaction ID would be stored
-        let pin_key = Key::from(masp_addr.to_db_key())
-            .push(&(PIN_KEY_PREFIX.to_owned() + &owner.hash()))
-            .expect("Cannot obtain a storage key");
-        // Obtain the transaction pointer at the key
-        let txidx = utils.query_storage_value::<u64>(&pin_key)
-            .await
-            .ok_or(PinnedBalanceError::NoTransactionPinned)?;
-        // Construct the key for where the pinned transaction is stored
-        let tx_key = Key::from(masp_addr.to_db_key())
-            .push(&(TX_KEY_PREFIX.to_owned() + &txidx.to_string()))
-            .expect("Cannot obtain a storage key");
-        // Obtain the pointed to transaction
-        let (tx_epoch, _tx_height, _tx_index, tx) =
-            utils.query_storage_value::<(Epoch, BlockHeight, TxIndex, Transfer)>(
-                &tx_key,
-            )
-            .await
-            .expect("Ill-formed epoch, transaction pair");
-        // Accumulate the combined output note value into this Amount
-        let mut val_acc = Amount::zero();
-        let tx = tx
-            .shielded
-            .expect("Pinned Transfers should have shielded part");
-        for so in &tx.shielded_outputs {
-            // Let's try to see if our viewing key can decrypt current note
-            let decres = try_sapling_note_decryption::<TestNetwork>(
-                0,
-                &viewing_key.ivk().0,
-                &so.ephemeral_key.into_subgroup().unwrap(),
-                &so.cmu,
-                &so.enc_ciphertext,
-            );
-            match decres {
-                // So the given viewing key does decrypt this current note...
-                Some((note, pa, _memo)) if pa == owner.into() => {
-                    val_acc +=
-                        Amount::from_nonnegative(note.asset_type, note.value)
-                            .expect(
-                                "found note with invalid value or asset type",
-                            );
-                    break;
-                }
-                _ => {}
-            }
-        }
-        Ok((val_acc, tx_epoch))
-    }
-
-    /// Compute the combined value of the output notes of the pinned transaction
-    /// at the given payment address if there's any. The asset types may be from
-    /// the epoch of the transaction or even before, so exchange all these
-    /// amounts to the epoch of the transaction in order to get the value that
-    /// would have been displayed in the epoch of the transaction.
-    pub async fn compute_exchanged_pinned_balance(
-        &mut self,
-        owner: PaymentAddress,
-        viewing_key: &ViewingKey,
-    ) -> Result<(Amount, Epoch), PinnedBalanceError> {
-        // Obtain the balance that will be exchanged
-        let (amt, ep) =
-            Self::compute_pinned_balance(&self.utils, owner, viewing_key)
-                .await?;
-        // Finally, exchange the balance to the transaction's epoch
-        Ok((
-            self.compute_exchanged_amount(amt, ep, HashMap::new())
-                .await
-                .0,
-            ep,
-        ))
-    }
-
-    /// Convert an amount whose units are AssetTypes to one whose units are
-    /// Addresses that they decode to. All asset types not corresponding to
-    /// the given epoch are ignored.
-    pub async fn decode_amount(
-        &mut self,
-        amt: Amount,
-        target_epoch: Epoch,
-    ) -> Amount<Address> {
-        let mut res = Amount::zero();
-        for (asset_type, val) in amt.components() {
-            // Decode the asset type
-            let decoded =
-                self.decode_asset_type(*asset_type).await;
-            // Only assets with the target timestamp count
-            match decoded {
-                Some((addr, epoch)) if epoch == target_epoch => {
-                    res += &Amount::from_pair(addr, *val).unwrap()
-                }
-                _ => {}
-            }
-        }
-        res
-    }
-
-    /// Convert an amount whose units are AssetTypes to one whose units are
-    /// Addresses that they decode to.
-    pub async fn decode_all_amounts(
-        &mut self,
-        amt: Amount,
-    ) -> Amount<(Address, Epoch)> {
-        let mut res = Amount::zero();
-        for (asset_type, val) in amt.components() {
-            // Decode the asset type
-            let decoded =
-                self.decode_asset_type(*asset_type).await;
-            // Only assets with the target timestamp count
-            if let Some((addr, epoch)) = decoded {
-                res += &Amount::from_pair((addr, epoch), *val).unwrap()
-            }
-        }
-        res
-    }
-
-    /// Make shielded components to embed within a Transfer object. If no shielded
-    /// payment address nor spending key is specified, then no shielded components
-    /// are produced. Otherwise a transaction containing nullifiers and/or note
-    /// commitments are produced. Dummy transparent UTXOs are sometimes used to make
-    /// transactions balanced, but it is understood that transparent account changes
-    /// are effected only by the amounts and signatures specified by the containing
-    /// Transfer object.
-    async fn gen_shielded_transfer(
-        &mut self,
-        source: TransferSource,
-        target: TransferTarget,
-        args_amount: token::Amount,
-        token: Address,
-        fee_amount: token::Amount,
-        fee_token: Address,
-        shielded_gas: bool,
-    ) -> Result<Option<(Transaction, TransactionMetadata)>, builder::Error> {
-        // No shielded components are needed when neither source nor destination
-        // are shielded
-        let spending_key = source.spending_key();
-        let payment_address = target.payment_address();
-        if spending_key.is_none() && payment_address.is_none() {
-            return Ok(None);
-        }
-        // We want to fund our transaction solely from supplied spending key
-        let spending_key = spending_key.map(|x| x.into());
-        let spending_keys: Vec<_> = spending_key.into_iter().collect();
-        // Load the current shielded context given the spending key we possess
-        let _ = self.load();
-        self.fetch(&spending_keys, &[])
-            .await;
-        // Save the update state so that future fetches can be short-circuited
-        let _ = self.save();
-        // Determine epoch in which to submit potential shielded transaction
-        let epoch = self.utils.query_epoch().await;
-        // Context required for storing which notes are in the source's possesion
-        let consensus_branch_id = BranchId::Sapling;
-        let amt: u64 = args_amount.into();
-        let memo: Option<Memo> = None;
-
-        // Now we build up the transaction within this object
-        let mut builder = Builder::<TestNetwork, OsRng>::new(0u32);
-        // Convert transaction amount into MASP types
-        let (asset_type, amount) = convert_amount(epoch, &token, args_amount);
-
-        // Transactions with transparent input and shielded output
-        // may be affected if constructed close to epoch boundary
-        let mut epoch_sensitive: bool = false;
-        // If there are shielded inputs
-        if let Some(sk) = spending_key {
-            // Transaction fees need to match the amount in the wrapper Transfer
-            // when MASP source is used
-            let (_, fee) =
-                convert_amount(epoch, &fee_token, fee_amount);
-            builder.set_fee(fee.clone())?;
-            // If the gas is coming from the shielded pool, then our shielded inputs
-            // must also cover the gas fee
-            let required_amt = if shielded_gas { amount + fee } else { amount };
-            // Locate unspent notes that can help us meet the transaction amount
-            let (_, unspent_notes, used_convs) = self
-                .collect_unspent_notes(
-                    &to_viewing_key(&sk).vk,
-                    required_amt,
-                    epoch,
-                )
-                .await;
-            // Commit the notes found to our transaction
-            for (diversifier, note, merkle_path) in unspent_notes {
-                builder.add_sapling_spend(sk, diversifier, note, merkle_path)?;
-            }
-            // Commit the conversion notes used during summation
-            for (conv, wit, value) in used_convs.values() {
-                if *value > 0 {
-                    builder.add_convert(
-                        conv.clone(),
-                        *value as u64,
-                        wit.clone(),
-                    )?;
-                }
-            }
-        } else {
-            // No transfer fees come from the shielded transaction for non-MASP
-            // sources
-            builder.set_fee(Amount::zero())?;
-            // We add a dummy UTXO to our transaction, but only the source of the
-            // parent Transfer object is used to validate fund availability
-            let secp_sk =
-                secp256k1::SecretKey::from_slice(&[0xcd; 32]).expect("secret key");
-            let secp_ctx = secp256k1::Secp256k1::<secp256k1::SignOnly>::gen_new();
-            let secp_pk =
-                secp256k1::PublicKey::from_secret_key(&secp_ctx, &secp_sk)
-                .serialize();
-            let hash =
-                ripemd160::Ripemd160::digest(&sha2::Sha256::digest(&secp_pk));
-            let script = TransparentAddress::PublicKey(hash.into()).script();
-            epoch_sensitive = true;
-            builder.add_transparent_input(
-                secp_sk,
-                OutPoint::new([0u8; 32], 0),
-                TxOut {
-                    asset_type,
-                    value: amt,
-                    script_pubkey: script,
-                },
-            )?;
-        }
-        // Now handle the outputs of this transaction
-        // If there is a shielded output
-        if let Some(pa) = payment_address {
-            let ovk_opt = spending_key.map(|x| x.expsk.ovk);
-            builder.add_sapling_output(
-                ovk_opt,
-                pa.into(),
-                asset_type,
-                amt,
-                memo.clone(),
-            )?;
-        } else {
-            epoch_sensitive = false;
-            // Embed the transparent target address into the shielded transaction so
-            // that it can be signed
-            let target_enc = target
-                .address()
-                .expect("target address should be transparent")
-                .try_to_vec()
-                .expect("target address encoding");
-            let hash = ripemd160::Ripemd160::digest(&sha2::Sha256::digest(
-                target_enc.as_ref(),
-            ));
-            builder.add_transparent_output(
-                &TransparentAddress::PublicKey(hash.into()),
-                asset_type,
-                amt,
-            )?;
-        }
-        let prover = self.utils.local_tx_prover();
-        // Build and return the constructed transaction
-        let mut tx = builder.build(consensus_branch_id, &prover);
-
-        if epoch_sensitive {
-            let new_epoch = self.utils.query_epoch().await;
-
-            // If epoch has changed, recalculate shielded outputs to match new epoch
-            if new_epoch != epoch {
-                // Hack: build new shielded transfer with updated outputs
-                let mut replay_builder = Builder::<TestNetwork, OsRng>::new(0u32);
-                replay_builder.set_fee(Amount::zero())?;
-                let ovk_opt = spending_key.map(|x| x.expsk.ovk);
-                let (new_asset_type, _) =
-                    convert_amount(new_epoch, &token, args_amount);
-                replay_builder.add_sapling_output(
-                    ovk_opt,
-                    payment_address.unwrap().into(),
-                    new_asset_type,
-                    amt,
-                    memo,
-                )?;
-
-                let secp_sk = secp256k1::SecretKey::from_slice(&[0xcd; 32])
-                    .expect("secret key");
-                let secp_ctx =
-                    secp256k1::Secp256k1::<secp256k1::SignOnly>::gen_new();
-                let secp_pk =
-                    secp256k1::PublicKey::from_secret_key(&secp_ctx, &secp_sk)
-                    .serialize();
-                let hash =
-                    ripemd160::Ripemd160::digest(&sha2::Sha256::digest(&secp_pk));
-                let script = TransparentAddress::PublicKey(hash.into()).script();
-                replay_builder.add_transparent_input(
-                    secp_sk,
-                    OutPoint::new([0u8; 32], 0),
-                    TxOut {
-                        asset_type: new_asset_type,
-                        value: amt,
-                        script_pubkey: script,
-                    },
-                )?;
-
-                let (replay_tx, _) =
-                    replay_builder.build(consensus_branch_id, &prover)?;
-                tx = tx.map(|(t, tm)| {
-                    let mut temp = t.deref().clone();
-                    temp.shielded_outputs = replay_tx.shielded_outputs.clone();
-                    temp.value_balance = temp.value_balance.reject(asset_type)
-                        - Amount::from_pair(new_asset_type, amt).unwrap();
-                    (temp.freeze().unwrap(), tm)
-                });
-            }
-        }
-
-        tx.map(Some)
-    }
-}
-
-/// Make asset type corresponding to given address and epoch
-fn make_asset_type(epoch: Epoch, token: &Address) -> AssetType {
-    // Typestamp the chosen token with the current epoch
-    let token_bytes = (token, epoch.0)
-        .try_to_vec()
-        .expect("token should serialize");
-    // Generate the unique asset identifier from the unique token address
-    AssetType::new(token_bytes.as_ref()).expect("unable to create asset type")
-}
 
-/// Convert Anoma amount and token type to MASP equivalents
-fn convert_amount(
-    epoch: Epoch,
-    token: &Address,
-    val: token::Amount,
-) -> (AssetType, Amount) {
-    let asset_type = make_asset_type(epoch, token);
-    // Combine the value and unit into one amount
-    let amount = Amount::from_nonnegative(asset_type, u64::from(val))
-        .expect("invalid value for amount");
-    (asset_type, amount)
-}
 
 pub async fn submit_transfer(mut ctx: Context, args: args::TxTransfer) {
     let transfer_source = ctx.get_cached(&args.source);
diff --git a/apps/src/lib/client/types.rs b/apps/src/lib/client/types.rs
index 7246e436b8..10ae25182a 100644
--- a/apps/src/lib/client/types.rs
+++ b/apps/src/lib/client/types.rs
@@ -11,5 +11,4 @@ use namada::types::{key, token};
 
 use super::rpc;
 use crate::cli::{args, Context};
-use crate::client::tx::Conversions;
 use crate::facade::tendermint_config::net::Address as TendermintAddress;
diff --git a/shared/Cargo.toml b/shared/Cargo.toml
index 47bac417e3..3112c314e6 100644
--- a/shared/Cargo.toml
+++ b/shared/Cargo.toml
@@ -66,6 +66,11 @@ ibc-mocks-abcipp = [
   "namada_core/ibc-mocks-abcipp",
 ]
 
+masp-tx-gen = [
+  "rand",
+  "rand_core",
+]
+
 # for integration tests and test utilies
 testing = [
   "namada_core/testing",
@@ -124,6 +129,8 @@ wasmparser = "0.83.0"
 #libmasp = { git = "https://github.com/anoma/masp", branch = "murisi/masp-incentive" }
 masp_primitives = { git = "https://github.com/anoma/masp", rev = "bee40fc465f6afbd10558d12fe96eb1742eee45c" }
 masp_proofs = { git = "https://github.com/anoma/masp", rev = "bee40fc465f6afbd10558d12fe96eb1742eee45c" }
+rand = {version = "0.8", default-features = false, optional = true}
+rand_core = {version = "0.6", default-features = false, optional = true}
 zeroize = "1.5.5"
 
 [dev-dependencies]
diff --git a/shared/src/ledger/masp.rs b/shared/src/ledger/masp.rs
index 03c289eefd..2c470323ef 100644
--- a/shared/src/ledger/masp.rs
+++ b/shared/src/ledger/masp.rs
@@ -18,6 +18,52 @@ use masp_primitives::transaction::{
 };
 use masp_proofs::sapling::SaplingVerificationContext;
 
+use std::collections::hash_map::Entry;
+use std::collections::{BTreeMap, HashMap, HashSet};
+use std::fmt::Debug;
+#[cfg(feature = "masp-tx-gen")]
+use std::io::Read;
+
+//use async_std::io::prelude::WriteExt;
+//use async_std::io::{self};
+use borsh::{BorshDeserialize, BorshSerialize};
+use masp_primitives::consensus::{BranchId, TestNetwork};
+use masp_primitives::convert::AllowedConversion;
+use masp_primitives::ff::PrimeField;
+use masp_primitives::group::cofactor::CofactorGroup;
+use masp_primitives::keys::FullViewingKey;
+#[cfg(feature = "masp-tx-gen")]
+use masp_primitives::legacy::TransparentAddress;
+use masp_primitives::merkle_tree::{
+    CommitmentTree, IncrementalWitness, MerklePath,
+};
+use masp_primitives::note_encryption::*;
+use masp_primitives::primitives::{Diversifier, Note, ViewingKey};
+use masp_primitives::sapling::Node;
+#[cfg(feature = "masp-tx-gen")]
+use masp_primitives::transaction::builder::{self, secp256k1, *};
+use masp_primitives::transaction::components::Amount;
+#[cfg(feature = "masp-tx-gen")]
+use masp_primitives::transaction::components::{OutPoint, TxOut};
+use masp_primitives::zip32::{ExtendedFullViewingKey, ExtendedSpendingKey};
+use masp_proofs::prover::LocalTxProver;
+use crate::types::address::{masp, Address};
+use crate::types::masp::PaymentAddress;
+#[cfg(feature = "masp-tx-gen")]
+use crate::types::masp::{TransferSource, TransferTarget};
+use crate::types::storage::{
+    BlockHeight, Epoch, Key, KeySeg, TxIndex,
+};
+use crate::types::token::{
+    Transfer, HEAD_TX_KEY, PIN_KEY_PREFIX, TX_KEY_PREFIX,
+};
+use crate::types::{storage, token};
+#[cfg(feature = "masp-tx-gen")]
+use rand_core::{CryptoRng, OsRng, RngCore};
+#[cfg(feature = "masp-tx-gen")]
+use sha2::Digest;
+use async_trait::async_trait;
+
 /// Env var to point to a dir with MASP parameters. When not specified,
 /// the default OS specific path is used.
 pub const ENV_VAR_MASP_PARAMS_DIR: &str = "ANOMA_MASP_PARAMS_DIR";
@@ -200,3 +246,1044 @@ pub fn get_params_dir() -> PathBuf {
         masp_proofs::default_params_folder().unwrap()
     }
 }
+
+#[async_trait]
+pub trait ShieldedUtils : Sized + BorshDeserialize + BorshSerialize + Default + Clone {
+    async fn query_storage_value<T: Send>(
+        &self,
+        key: &storage::Key,
+    ) -> Option<T>
+    where
+        T: BorshDeserialize;
+
+    async fn query_epoch(&self) -> Epoch;
+
+    fn local_tx_prover(&self) -> LocalTxProver;
+
+    fn load(self) -> std::io::Result<ShieldedContext<Self>>;
+
+    fn save(&self, ctx: &ShieldedContext<Self>) -> std::io::Result<()>;
+
+    async fn query_conversion(
+        &self,
+        asset_type: AssetType,
+    ) -> Option<(
+        Address,
+        Epoch,
+        masp_primitives::transaction::components::Amount,
+        MerklePath<Node>,
+    )>;
+}
+
+/// Make a ViewingKey that can view notes encrypted by given ExtendedSpendingKey
+pub fn to_viewing_key(esk: &ExtendedSpendingKey) -> FullViewingKey {
+    ExtendedFullViewingKey::from(esk).fvk
+}
+
+/// Generate a valid diversifier, i.e. one that has a diversified base. Return
+/// also this diversified base.
+#[cfg(feature = "masp-tx-gen")]
+pub fn find_valid_diversifier<R: RngCore + CryptoRng>(
+    rng: &mut R,
+) -> (Diversifier, masp_primitives::jubjub::SubgroupPoint) {
+    let mut diversifier;
+    let g_d;
+    // Keep generating random diversifiers until one has a diversified base
+    loop {
+        let mut d = [0; 11];
+        rng.fill_bytes(&mut d);
+        diversifier = Diversifier(d);
+        if let Some(val) = diversifier.g_d() {
+            g_d = val;
+            break;
+        }
+    }
+    (diversifier, g_d)
+}
+
+/// Determine if using the current note would actually bring us closer to our
+/// target
+pub fn is_amount_required(src: Amount, dest: Amount, delta: Amount) -> bool {
+    if delta > Amount::zero() {
+        let gap = dest - src;
+        for (asset_type, value) in gap.components() {
+            if *value > 0 && delta[asset_type] > 0 {
+                return true;
+            }
+        }
+    }
+    false
+}
+
+/// An extension of Option's cloned method for pair types
+fn cloned_pair<T: Clone, U: Clone>((a, b): (&T, &U)) -> (T, U) {
+    (a.clone(), b.clone())
+}
+
+/// Errors that can occur when trying to retrieve pinned transaction
+#[derive(PartialEq, Eq)]
+pub enum PinnedBalanceError {
+    /// No transaction has yet been pinned to the given payment address
+    NoTransactionPinned,
+    /// The supplied viewing key does not recognize payments to given address
+    InvalidViewingKey,
+}
+
+/// Represents the amount used of different conversions
+pub type Conversions =
+    HashMap<AssetType, (AllowedConversion, MerklePath<Node>, i64)>;
+
+/// Represents the changes that were made to a list of transparent accounts
+pub type TransferDelta = HashMap<Address, Amount<Address>>;
+
+/// Represents the changes that were made to a list of shielded accounts
+pub type TransactionDelta = HashMap<ViewingKey, Amount>;
+
+/// Represents the current state of the shielded pool from the perspective of
+/// the chosen viewing keys.
+#[derive(BorshSerialize, BorshDeserialize, Debug)]
+pub struct ShieldedContext<U: ShieldedUtils> {
+    /// Location where this shielded context is saved
+    #[borsh_skip]
+    pub utils: U,
+    /// The last transaction index to be processed in this context
+    pub last_txidx: u64,
+    /// The commitment tree produced by scanning all transactions up to tx_pos
+    pub tree: CommitmentTree<Node>,
+    /// Maps viewing keys to applicable note positions
+    pub pos_map: HashMap<ViewingKey, HashSet<usize>>,
+    /// Maps a nullifier to the note position to which it applies
+    pub nf_map: HashMap<[u8; 32], usize>,
+    /// Maps note positions to their corresponding notes
+    pub note_map: HashMap<usize, Note>,
+    /// Maps note positions to their corresponding memos
+    pub memo_map: HashMap<usize, Memo>,
+    /// Maps note positions to the diversifier of their payment address
+    pub div_map: HashMap<usize, Diversifier>,
+    /// Maps note positions to their witness (used to make merkle paths)
+    pub witness_map: HashMap<usize, IncrementalWitness<Node>>,
+    /// Tracks what each transaction does to various account balances
+    pub delta_map: BTreeMap<
+        (BlockHeight, TxIndex),
+        (Epoch, TransferDelta, TransactionDelta),
+    >,
+    /// The set of note positions that have been spent
+    pub spents: HashSet<usize>,
+    /// Maps asset types to their decodings
+    pub asset_types: HashMap<AssetType, (Address, Epoch)>,
+    /// Maps note positions to their corresponding viewing keys
+    pub vk_map: HashMap<usize, ViewingKey>,
+}
+
+/// Default implementation to ease construction of TxContexts. Derive cannot be
+/// used here due to CommitmentTree not implementing Default.
+impl<U: ShieldedUtils + Default> Default for ShieldedContext<U> {
+    fn default() -> ShieldedContext<U> {
+        ShieldedContext::<U> {
+            utils: U::default(),
+            last_txidx: u64::default(),
+            tree: CommitmentTree::empty(),
+            pos_map: HashMap::default(),
+            nf_map: HashMap::default(),
+            note_map: HashMap::default(),
+            memo_map: HashMap::default(),
+            div_map: HashMap::default(),
+            witness_map: HashMap::default(),
+            spents: HashSet::default(),
+            delta_map: BTreeMap::default(),
+            asset_types: HashMap::default(),
+            vk_map: HashMap::default(),
+        }
+    }
+}
+
+impl<U: ShieldedUtils> ShieldedContext<U> {
+    /// Try to load the last saved shielded context from the given context
+    /// directory. If this fails, then leave the current context unchanged.
+    pub fn load(&mut self) -> std::io::Result<()> {
+        let new_ctx = self.utils.clone().load()?;
+        *self = new_ctx;
+        Ok(())
+    }
+
+    /// Save this shielded context into its associated context directory
+    pub fn save(&self) -> std::io::Result<()> {
+        self.utils.save(self)
+    }
+
+    /// Merge data from the given shielded context into the current shielded
+    /// context. It must be the case that the two shielded contexts share the
+    /// same last transaction ID and share identical commitment trees.
+    pub fn merge(&mut self, new_ctx: ShieldedContext<U>) {
+        debug_assert_eq!(self.last_txidx, new_ctx.last_txidx);
+        // Merge by simply extending maps. Identical keys should contain
+        // identical values, so overwriting should not be problematic.
+        self.pos_map.extend(new_ctx.pos_map);
+        self.nf_map.extend(new_ctx.nf_map);
+        self.note_map.extend(new_ctx.note_map);
+        self.memo_map.extend(new_ctx.memo_map);
+        self.div_map.extend(new_ctx.div_map);
+        self.witness_map.extend(new_ctx.witness_map);
+        self.spents.extend(new_ctx.spents);
+        self.asset_types.extend(new_ctx.asset_types);
+        self.vk_map.extend(new_ctx.vk_map);
+        // The deltas are the exception because different keys can reveal
+        // different parts of the same transaction. Hence each delta needs to be
+        // merged separately.
+        for ((height, idx), (ep, ntfer_delta, ntx_delta)) in new_ctx.delta_map {
+            let (_ep, tfer_delta, tx_delta) = self
+                .delta_map
+                .entry((height, idx))
+                .or_insert((ep, TransferDelta::new(), TransactionDelta::new()));
+            tfer_delta.extend(ntfer_delta);
+            tx_delta.extend(ntx_delta);
+        }
+    }
+
+    /// Fetch the current state of the multi-asset shielded pool into a
+    /// ShieldedContext
+    pub async fn fetch(
+        &mut self,
+        sks: &[ExtendedSpendingKey],
+        fvks: &[ViewingKey],
+    ) {
+        // First determine which of the keys requested to be fetched are new.
+        // Necessary because old transactions will need to be scanned for new
+        // keys.
+        let mut unknown_keys = Vec::new();
+        for esk in sks {
+            let vk = to_viewing_key(esk).vk;
+            if !self.pos_map.contains_key(&vk) {
+                unknown_keys.push(vk);
+            }
+        }
+        for vk in fvks {
+            if !self.pos_map.contains_key(vk) {
+                unknown_keys.push(*vk);
+            }
+        }
+
+        // If unknown keys are being used, we need to scan older transactions
+        // for any unspent notes
+        let (txs, mut tx_iter);
+        if !unknown_keys.is_empty() {
+            // Load all transactions accepted until this point
+            txs = Self::fetch_shielded_transfers(&self.utils, 0).await;
+            tx_iter = txs.iter();
+            // Do this by constructing a shielding context only for unknown keys
+            let mut tx_ctx = Self { utils: self.utils.clone(), ..Default::default() };
+            for vk in unknown_keys {
+                tx_ctx.pos_map.entry(vk).or_insert_with(HashSet::new);
+            }
+            // Update this unknown shielded context until it is level with self
+            while tx_ctx.last_txidx != self.last_txidx {
+                if let Some(((height, idx), (epoch, tx))) = tx_iter.next() {
+                    tx_ctx.scan_tx(*height, *idx, *epoch, tx);
+                } else {
+                    break;
+                }
+            }
+            // Merge the context data originating from the unknown keys into the
+            // current context
+            self.merge(tx_ctx);
+        } else {
+            // Load only transactions accepted from last_txid until this point
+            txs =
+                Self::fetch_shielded_transfers(&self.utils, self.last_txidx)
+                    .await;
+            tx_iter = txs.iter();
+        }
+        // Now that we possess the unspent notes corresponding to both old and
+        // new keys up until tx_pos, proceed to scan the new transactions.
+        for ((height, idx), (epoch, tx)) in &mut tx_iter {
+            self.scan_tx(*height, *idx, *epoch, tx);
+        }
+    }
+
+    /// Obtain a chronologically-ordered list of all accepted shielded
+    /// transactions from the ledger. The ledger conceptually stores
+    /// transactions as a vector. More concretely, the HEAD_TX_KEY location
+    /// stores the index of the last accepted transaction and each transaction
+    /// is stored at a key derived from its index.
+    pub async fn fetch_shielded_transfers(
+        utils: &U,
+        last_txidx: u64,
+    ) -> BTreeMap<(BlockHeight, TxIndex), (Epoch, Transfer)> {
+        // The address of the MASP account
+        let masp_addr = masp();
+        // Construct the key where last transaction pointer is stored
+        let head_tx_key = Key::from(masp_addr.to_db_key())
+            .push(&HEAD_TX_KEY.to_owned())
+            .expect("Cannot obtain a storage key");
+        // Query for the index of the last accepted transaction
+        let head_txidx = utils.query_storage_value::<u64>(&head_tx_key)
+            .await
+            .unwrap_or(0);
+        let mut shielded_txs = BTreeMap::new();
+        // Fetch all the transactions we do not have yet
+        for i in last_txidx..head_txidx {
+            // Construct the key for where the current transaction is stored
+            let current_tx_key = Key::from(masp_addr.to_db_key())
+                .push(&(TX_KEY_PREFIX.to_owned() + &i.to_string()))
+                .expect("Cannot obtain a storage key");
+            // Obtain the current transaction
+            let (tx_epoch, tx_height, tx_index, current_tx) =
+                utils.query_storage_value::<(Epoch, BlockHeight, TxIndex, Transfer)>(
+                    &current_tx_key,
+                )
+                .await
+                .unwrap();
+            // Collect the current transaction
+            shielded_txs.insert((tx_height, tx_index), (tx_epoch, current_tx));
+        }
+        shielded_txs
+    }
+
+    /// Applies the given transaction to the supplied context. More precisely,
+    /// the shielded transaction's outputs are added to the commitment tree.
+    /// Newly discovered notes are associated to the supplied viewing keys. Note
+    /// nullifiers are mapped to their originating notes. Note positions are
+    /// associated to notes, memos, and diversifiers. And the set of notes that
+    /// we have spent are updated. The witness map is maintained to make it
+    /// easier to construct note merkle paths in other code. See
+    /// https://zips.z.cash/protocol/protocol.pdf#scan
+    pub fn scan_tx(
+        &mut self,
+        height: BlockHeight,
+        index: TxIndex,
+        epoch: Epoch,
+        tx: &Transfer,
+    ) {
+        // Ignore purely transparent transactions
+        let shielded = if let Some(shielded) = &tx.shielded {
+            shielded
+        } else {
+            return;
+        };
+        // For tracking the account changes caused by this Transaction
+        let mut transaction_delta = TransactionDelta::new();
+        // Listen for notes sent to our viewing keys
+        for so in &shielded.shielded_outputs {
+            // Create merkle tree leaf node from note commitment
+            let node = Node::new(so.cmu.to_repr());
+            // Update each merkle tree in the witness map with the latest
+            // addition
+            for (_, witness) in self.witness_map.iter_mut() {
+                witness.append(node).expect("note commitment tree is full");
+            }
+            let note_pos = self.tree.size();
+            self.tree
+                .append(node)
+                .expect("note commitment tree is full");
+            // Finally, make it easier to construct merkle paths to this new
+            // note
+            let witness = IncrementalWitness::<Node>::from_tree(&self.tree);
+            self.witness_map.insert(note_pos, witness);
+            // Let's try to see if any of our viewing keys can decrypt latest
+            // note
+            for (vk, notes) in self.pos_map.iter_mut() {
+                let decres = try_sapling_note_decryption::<TestNetwork>(
+                    0,
+                    &vk.ivk().0,
+                    &so.ephemeral_key.into_subgroup().unwrap(),
+                    &so.cmu,
+                    &so.enc_ciphertext,
+                );
+                // So this current viewing key does decrypt this current note...
+                if let Some((note, pa, memo)) = decres {
+                    // Add this note to list of notes decrypted by this viewing
+                    // key
+                    notes.insert(note_pos);
+                    // Compute the nullifier now to quickly recognize when spent
+                    let nf = note.nf(vk, note_pos.try_into().unwrap());
+                    self.note_map.insert(note_pos, note);
+                    self.memo_map.insert(note_pos, memo);
+                    // The payment address' diversifier is required to spend
+                    // note
+                    self.div_map.insert(note_pos, *pa.diversifier());
+                    self.nf_map.insert(nf.0, note_pos);
+                    // Note the account changes
+                    let balance = transaction_delta
+                        .entry(*vk)
+                        .or_insert_with(Amount::zero);
+                    *balance +=
+                        Amount::from_nonnegative(note.asset_type, note.value)
+                            .expect(
+                                "found note with invalid value or asset type",
+                            );
+                    self.vk_map.insert(note_pos, *vk);
+                    break;
+                }
+            }
+        }
+        // Cancel out those of our notes that have been spent
+        for ss in &shielded.shielded_spends {
+            // If the shielded spend's nullifier is in our map, then target note
+            // is rendered unusable
+            if let Some(note_pos) = self.nf_map.get(&ss.nullifier) {
+                self.spents.insert(*note_pos);
+                // Note the account changes
+                let balance = transaction_delta
+                    .entry(self.vk_map[note_pos])
+                    .or_insert_with(Amount::zero);
+                let note = self.note_map[note_pos];
+                *balance -=
+                    Amount::from_nonnegative(note.asset_type, note.value)
+                        .expect("found note with invalid value or asset type");
+            }
+        }
+        // Record the changes to the transparent accounts
+        let transparent_delta =
+            Amount::from_nonnegative(tx.token.clone(), u64::from(tx.amount))
+                .expect("invalid value for amount");
+        let mut transfer_delta = TransferDelta::new();
+        transfer_delta
+            .insert(tx.source.clone(), Amount::zero() - &transparent_delta);
+        transfer_delta.insert(tx.target.clone(), transparent_delta);
+        self.delta_map.insert(
+            (height, index),
+            (epoch, transfer_delta, transaction_delta),
+        );
+        self.last_txidx += 1;
+    }
+
+    /// Summarize the effects on shielded and transparent accounts of each
+    /// Transfer in this context
+    pub fn get_tx_deltas(
+        &self,
+    ) -> &BTreeMap<
+        (BlockHeight, TxIndex),
+        (Epoch, TransferDelta, TransactionDelta),
+    > {
+        &self.delta_map
+    }
+
+    /// Compute the total unspent notes associated with the viewing key in the
+    /// context. If the key is not in the context, then we do not know the
+    /// balance and hence we return None.
+    pub fn compute_shielded_balance(&self, vk: &ViewingKey) -> Option<Amount> {
+        // Cannot query the balance of a key that's not in the map
+        if !self.pos_map.contains_key(vk) {
+            return None;
+        }
+        let mut val_acc = Amount::zero();
+        // Retrieve the notes that can be spent by this key
+        if let Some(avail_notes) = self.pos_map.get(vk) {
+            for note_idx in avail_notes {
+                // Spent notes cannot contribute a new transaction's pool
+                if self.spents.contains(note_idx) {
+                    continue;
+                }
+                // Get note associated with this ID
+                let note = self.note_map.get(note_idx).unwrap();
+                // Finally add value to multi-asset accumulator
+                val_acc +=
+                    Amount::from_nonnegative(note.asset_type, note.value)
+                        .expect("found note with invalid value or asset type");
+            }
+        }
+        Some(val_acc)
+    }
+
+    /// Query the ledger for the decoding of the given asset type and cache it
+    /// if it is found.
+    pub async fn decode_asset_type(
+        &mut self,
+        asset_type: AssetType,
+    ) -> Option<(Address, Epoch)> {
+        // Try to find the decoding in the cache
+        if let decoded @ Some(_) = self.asset_types.get(&asset_type) {
+            return decoded.cloned();
+        }
+        // Query for the ID of the last accepted transaction
+        let (addr, ep, _conv, _path): (Address, _, Amount, MerklePath<Node>) =
+            self.utils.query_conversion(asset_type).await?;
+        self.asset_types.insert(asset_type, (addr.clone(), ep));
+        Some((addr, ep))
+    }
+
+    /// Query the ledger for the conversion that is allowed for the given asset
+    /// type and cache it.
+    async fn query_allowed_conversion<'a>(
+        &'a mut self,
+        asset_type: AssetType,
+        conversions: &'a mut Conversions,
+    ) -> Option<&'a mut (AllowedConversion, MerklePath<Node>, i64)> {
+        match conversions.entry(asset_type) {
+            Entry::Occupied(conv_entry) => Some(conv_entry.into_mut()),
+            Entry::Vacant(conv_entry) => {
+                // Query for the ID of the last accepted transaction
+                let (addr, ep, conv, path): (Address, _, _, _) =
+                    self.utils.query_conversion(asset_type).await?;
+                self.asset_types.insert(asset_type, (addr, ep));
+                // If the conversion is 0, then we just have a pure decoding
+                if conv == Amount::zero() {
+                    None
+                } else {
+                    Some(conv_entry.insert((Amount::into(conv), path, 0)))
+                }
+            }
+        }
+    }
+
+    /// Compute the total unspent notes associated with the viewing key in the
+    /// context and express that value in terms of the currently timestamped
+    /// asset types. If the key is not in the context, then we do not know the
+    /// balance and hence we return None.
+    pub async fn compute_exchanged_balance(
+        &mut self,
+        vk: &ViewingKey,
+        target_epoch: Epoch,
+    ) -> Option<Amount> {
+        // First get the unexchanged balance
+        if let Some(balance) = self.compute_shielded_balance(vk) {
+            // And then exchange balance into current asset types
+            Some(
+                self.compute_exchanged_amount(
+                    balance,
+                    target_epoch,
+                    HashMap::new(),
+                )
+                .await
+                .0,
+            )
+        } else {
+            None
+        }
+    }
+
+    /// Try to convert as much of the given asset type-value pair using the
+    /// given allowed conversion. usage is incremented by the amount of the
+    /// conversion used, the conversions are applied to the given input, and
+    /// the trace amount that could not be converted is moved from input to
+    /// output.
+    fn apply_conversion(
+        conv: AllowedConversion,
+        asset_type: AssetType,
+        value: i64,
+        usage: &mut i64,
+        input: &mut Amount,
+        output: &mut Amount,
+    ) {
+        // If conversion if possible, accumulate the exchanged amount
+        let conv: Amount = conv.into();
+        // The amount required of current asset to qualify for conversion
+        let threshold = -conv[&asset_type];
+        if threshold == 0 {
+            eprintln!(
+                "Asset threshold of selected conversion for asset type {} is \
+                 0, this is a bug, please report it.",
+                asset_type
+            );
+        }
+        // We should use an amount of the AllowedConversion that almost
+        // cancels the original amount
+        let required = value / threshold;
+        // Forget about the trace amount left over because we cannot
+        // realize its value
+        let trace = Amount::from_pair(asset_type, value % threshold).unwrap();
+        // Record how much more of the given conversion has been used
+        *usage += required;
+        // Apply the conversions to input and move the trace amount to output
+        *input += conv * required - &trace;
+        *output += trace;
+    }
+
+    /// Convert the given amount into the latest asset types whilst making a
+    /// note of the conversions that were used. Note that this function does
+    /// not assume that allowed conversions from the ledger are expressed in
+    /// terms of the latest asset types.
+    pub async fn compute_exchanged_amount(
+        &mut self,
+        mut input: Amount,
+        target_epoch: Epoch,
+        mut conversions: Conversions,
+    ) -> (Amount, Conversions) {
+        // Where we will store our exchanged value
+        let mut output = Amount::zero();
+        // Repeatedly exchange assets until it is no longer possible
+        while let Some((asset_type, value)) =
+            input.components().next().map(cloned_pair)
+        {
+            let target_asset_type = self
+                .decode_asset_type(asset_type)
+                .await
+                .map(|(addr, _epoch)| make_asset_type(target_epoch, &addr))
+                .unwrap_or(asset_type);
+            let at_target_asset_type = asset_type == target_asset_type;
+            if let (Some((conv, _wit, usage)), false) = (
+                self.query_allowed_conversion(
+                    asset_type,
+                    &mut conversions,
+                )
+                .await,
+                at_target_asset_type,
+            ) {
+                println!(
+                    "converting current asset type to latest asset type..."
+                );
+                // Not at the target asset type, not at the latest asset type.
+                // Apply conversion to get from current asset type to the latest
+                // asset type.
+                Self::apply_conversion(
+                    conv.clone(),
+                    asset_type,
+                    value,
+                    usage,
+                    &mut input,
+                    &mut output,
+                );
+            } else if let (Some((conv, _wit, usage)), false) = (
+                self.query_allowed_conversion(
+                    target_asset_type,
+                    &mut conversions,
+                )
+                .await,
+                at_target_asset_type,
+            ) {
+                println!(
+                    "converting latest asset type to target asset type..."
+                );
+                // Not at the target asset type, yes at the latest asset type.
+                // Apply inverse conversion to get from latest asset type to
+                // the target asset type.
+                Self::apply_conversion(
+                    conv.clone(),
+                    asset_type,
+                    value,
+                    usage,
+                    &mut input,
+                    &mut output,
+                );
+            } else {
+                // At the target asset type. Then move component over to output.
+                let comp = input.project(asset_type);
+                output += &comp;
+                // Strike from input to avoid repeating computation
+                input -= comp;
+            }
+        }
+        (output, conversions)
+    }
+
+    /// Collect enough unspent notes in this context to exceed the given amount
+    /// of the specified asset type. Return the total value accumulated plus
+    /// notes and the corresponding diversifiers/merkle paths that were used to
+    /// achieve the total value.
+    pub async fn collect_unspent_notes(
+        &mut self,
+        vk: &ViewingKey,
+        target: Amount,
+        target_epoch: Epoch,
+    ) -> (
+        Amount,
+        Vec<(Diversifier, Note, MerklePath<Node>)>,
+        Conversions,
+    ) {
+        // Establish connection with which to do exchange rate queries
+        let mut conversions = HashMap::new();
+        let mut val_acc = Amount::zero();
+        let mut notes = Vec::new();
+        // Retrieve the notes that can be spent by this key
+        if let Some(avail_notes) = self.pos_map.get(vk).cloned() {
+            for note_idx in &avail_notes {
+                // No more transaction inputs are required once we have met
+                // the target amount
+                if val_acc >= target {
+                    break;
+                }
+                // Spent notes cannot contribute a new transaction's pool
+                if self.spents.contains(note_idx) {
+                    continue;
+                }
+                // Get note, merkle path, diversifier associated with this ID
+                let note = *self.note_map.get(note_idx).unwrap();
+
+                // The amount contributed by this note before conversion
+                let pre_contr = Amount::from_pair(note.asset_type, note.value)
+                    .expect("received note has invalid value or asset type");
+                let (contr, proposed_convs) = self
+                    .compute_exchanged_amount(
+                        pre_contr,
+                        target_epoch,
+                        conversions.clone(),
+                    )
+                    .await;
+
+                // Use this note only if it brings us closer to our target
+                if is_amount_required(
+                    val_acc.clone(),
+                    target.clone(),
+                    contr.clone(),
+                ) {
+                    // Be sure to record the conversions used in computing
+                    // accumulated value
+                    val_acc += contr;
+                    // Commit the conversions that were used to exchange
+                    conversions = proposed_convs;
+                    let merkle_path =
+                        self.witness_map.get(note_idx).unwrap().path().unwrap();
+                    let diversifier = self.div_map.get(note_idx).unwrap();
+                    // Commit this note to our transaction
+                    notes.push((*diversifier, note, merkle_path));
+                }
+            }
+        }
+        (val_acc, notes, conversions)
+    }
+
+    /// Compute the combined value of the output notes of the transaction pinned
+    /// at the given payment address. This computation uses the supplied viewing
+    /// keys to try to decrypt the output notes. If no transaction is pinned at
+    /// the given payment address fails with
+    /// `PinnedBalanceError::NoTransactionPinned`.
+    pub async fn compute_pinned_balance(
+        utils: &U,
+        owner: PaymentAddress,
+        viewing_key: &ViewingKey,
+    ) -> Result<(Amount, Epoch), PinnedBalanceError> {
+        // Check that the supplied viewing key corresponds to given payment
+        // address
+        let counter_owner = viewing_key.to_payment_address(
+            *masp_primitives::primitives::PaymentAddress::diversifier(
+                &owner.into(),
+            ),
+        );
+        match counter_owner {
+            Some(counter_owner) if counter_owner == owner.into() => {}
+            _ => return Err(PinnedBalanceError::InvalidViewingKey),
+        }
+        // The address of the MASP account
+        let masp_addr = masp();
+        // Construct the key for where the transaction ID would be stored
+        let pin_key = Key::from(masp_addr.to_db_key())
+            .push(&(PIN_KEY_PREFIX.to_owned() + &owner.hash()))
+            .expect("Cannot obtain a storage key");
+        // Obtain the transaction pointer at the key
+        let txidx = utils.query_storage_value::<u64>(&pin_key)
+            .await
+            .ok_or(PinnedBalanceError::NoTransactionPinned)?;
+        // Construct the key for where the pinned transaction is stored
+        let tx_key = Key::from(masp_addr.to_db_key())
+            .push(&(TX_KEY_PREFIX.to_owned() + &txidx.to_string()))
+            .expect("Cannot obtain a storage key");
+        // Obtain the pointed to transaction
+        let (tx_epoch, _tx_height, _tx_index, tx) =
+            utils.query_storage_value::<(Epoch, BlockHeight, TxIndex, Transfer)>(
+                &tx_key,
+            )
+            .await
+            .expect("Ill-formed epoch, transaction pair");
+        // Accumulate the combined output note value into this Amount
+        let mut val_acc = Amount::zero();
+        let tx = tx
+            .shielded
+            .expect("Pinned Transfers should have shielded part");
+        for so in &tx.shielded_outputs {
+            // Let's try to see if our viewing key can decrypt current note
+            let decres = try_sapling_note_decryption::<TestNetwork>(
+                0,
+                &viewing_key.ivk().0,
+                &so.ephemeral_key.into_subgroup().unwrap(),
+                &so.cmu,
+                &so.enc_ciphertext,
+            );
+            match decres {
+                // So the given viewing key does decrypt this current note...
+                Some((note, pa, _memo)) if pa == owner.into() => {
+                    val_acc +=
+                        Amount::from_nonnegative(note.asset_type, note.value)
+                            .expect(
+                                "found note with invalid value or asset type",
+                            );
+                    break;
+                }
+                _ => {}
+            }
+        }
+        Ok((val_acc, tx_epoch))
+    }
+
+    /// Compute the combined value of the output notes of the pinned transaction
+    /// at the given payment address if there's any. The asset types may be from
+    /// the epoch of the transaction or even before, so exchange all these
+    /// amounts to the epoch of the transaction in order to get the value that
+    /// would have been displayed in the epoch of the transaction.
+    pub async fn compute_exchanged_pinned_balance(
+        &mut self,
+        owner: PaymentAddress,
+        viewing_key: &ViewingKey,
+    ) -> Result<(Amount, Epoch), PinnedBalanceError> {
+        // Obtain the balance that will be exchanged
+        let (amt, ep) =
+            Self::compute_pinned_balance(&self.utils, owner, viewing_key)
+                .await?;
+        // Finally, exchange the balance to the transaction's epoch
+        Ok((
+            self.compute_exchanged_amount(amt, ep, HashMap::new())
+                .await
+                .0,
+            ep,
+        ))
+    }
+
+    /// Convert an amount whose units are AssetTypes to one whose units are
+    /// Addresses that they decode to. All asset types not corresponding to
+    /// the given epoch are ignored.
+    pub async fn decode_amount(
+        &mut self,
+        amt: Amount,
+        target_epoch: Epoch,
+    ) -> Amount<Address> {
+        let mut res = Amount::zero();
+        for (asset_type, val) in amt.components() {
+            // Decode the asset type
+            let decoded =
+                self.decode_asset_type(*asset_type).await;
+            // Only assets with the target timestamp count
+            match decoded {
+                Some((addr, epoch)) if epoch == target_epoch => {
+                    res += &Amount::from_pair(addr, *val).unwrap()
+                }
+                _ => {}
+            }
+        }
+        res
+    }
+
+    /// Convert an amount whose units are AssetTypes to one whose units are
+    /// Addresses that they decode to.
+    pub async fn decode_all_amounts(
+        &mut self,
+        amt: Amount,
+    ) -> Amount<(Address, Epoch)> {
+        let mut res = Amount::zero();
+        for (asset_type, val) in amt.components() {
+            // Decode the asset type
+            let decoded =
+                self.decode_asset_type(*asset_type).await;
+            // Only assets with the target timestamp count
+            if let Some((addr, epoch)) = decoded {
+                res += &Amount::from_pair((addr, epoch), *val).unwrap()
+            }
+        }
+        res
+    }
+
+    /// Make shielded components to embed within a Transfer object. If no shielded
+    /// payment address nor spending key is specified, then no shielded components
+    /// are produced. Otherwise a transaction containing nullifiers and/or note
+    /// commitments are produced. Dummy transparent UTXOs are sometimes used to make
+    /// transactions balanced, but it is understood that transparent account changes
+    /// are effected only by the amounts and signatures specified by the containing
+    /// Transfer object.
+    #[cfg(feature = "masp-tx-gen")]
+    pub async fn gen_shielded_transfer(
+        &mut self,
+        source: TransferSource,
+        target: TransferTarget,
+        args_amount: token::Amount,
+        token: Address,
+        fee_amount: token::Amount,
+        fee_token: Address,
+        shielded_gas: bool,
+    ) -> Result<Option<(Transaction, TransactionMetadata)>, builder::Error> {
+        // No shielded components are needed when neither source nor destination
+        // are shielded
+        let spending_key = source.spending_key();
+        let payment_address = target.payment_address();
+        if spending_key.is_none() && payment_address.is_none() {
+            return Ok(None);
+        }
+        // We want to fund our transaction solely from supplied spending key
+        let spending_key = spending_key.map(|x| x.into());
+        let spending_keys: Vec<_> = spending_key.into_iter().collect();
+        // Load the current shielded context given the spending key we possess
+        let _ = self.load();
+        self.fetch(&spending_keys, &[])
+            .await;
+        // Save the update state so that future fetches can be short-circuited
+        let _ = self.save();
+        // Determine epoch in which to submit potential shielded transaction
+        let epoch = self.utils.query_epoch().await;
+        // Context required for storing which notes are in the source's possesion
+        let consensus_branch_id = BranchId::Sapling;
+        let amt: u64 = args_amount.into();
+        let memo: Option<Memo> = None;
+
+        // Now we build up the transaction within this object
+        let mut builder = Builder::<TestNetwork, OsRng>::new(0u32);
+        // Convert transaction amount into MASP types
+        let (asset_type, amount) = convert_amount(epoch, &token, args_amount);
+
+        // Transactions with transparent input and shielded output
+        // may be affected if constructed close to epoch boundary
+        let mut epoch_sensitive: bool = false;
+        // If there are shielded inputs
+        if let Some(sk) = spending_key {
+            // Transaction fees need to match the amount in the wrapper Transfer
+            // when MASP source is used
+            let (_, fee) =
+                convert_amount(epoch, &fee_token, fee_amount);
+            builder.set_fee(fee.clone())?;
+            // If the gas is coming from the shielded pool, then our shielded inputs
+            // must also cover the gas fee
+            let required_amt = if shielded_gas { amount + fee } else { amount };
+            // Locate unspent notes that can help us meet the transaction amount
+            let (_, unspent_notes, used_convs) = self
+                .collect_unspent_notes(
+                    &to_viewing_key(&sk).vk,
+                    required_amt,
+                    epoch,
+                )
+                .await;
+            // Commit the notes found to our transaction
+            for (diversifier, note, merkle_path) in unspent_notes {
+                builder.add_sapling_spend(sk, diversifier, note, merkle_path)?;
+            }
+            // Commit the conversion notes used during summation
+            for (conv, wit, value) in used_convs.values() {
+                if *value > 0 {
+                    builder.add_convert(
+                        conv.clone(),
+                        *value as u64,
+                        wit.clone(),
+                    )?;
+                }
+            }
+        } else {
+            // No transfer fees come from the shielded transaction for non-MASP
+            // sources
+            builder.set_fee(Amount::zero())?;
+            // We add a dummy UTXO to our transaction, but only the source of the
+            // parent Transfer object is used to validate fund availability
+            let secp_sk =
+                secp256k1::SecretKey::from_slice(&[0xcd; 32]).expect("secret key");
+            let secp_ctx = secp256k1::Secp256k1::<secp256k1::SignOnly>::gen_new();
+            let secp_pk =
+                secp256k1::PublicKey::from_secret_key(&secp_ctx, &secp_sk)
+                .serialize();
+            let hash =
+                ripemd160::Ripemd160::digest(&sha2::Sha256::digest(&secp_pk));
+            let script = TransparentAddress::PublicKey(hash.into()).script();
+            epoch_sensitive = true;
+            builder.add_transparent_input(
+                secp_sk,
+                OutPoint::new([0u8; 32], 0),
+                TxOut {
+                    asset_type,
+                    value: amt,
+                    script_pubkey: script,
+                },
+            )?;
+        }
+        // Now handle the outputs of this transaction
+        // If there is a shielded output
+        if let Some(pa) = payment_address {
+            let ovk_opt = spending_key.map(|x| x.expsk.ovk);
+            builder.add_sapling_output(
+                ovk_opt,
+                pa.into(),
+                asset_type,
+                amt,
+                memo.clone(),
+            )?;
+        } else {
+            epoch_sensitive = false;
+            // Embed the transparent target address into the shielded transaction so
+            // that it can be signed
+            let target_enc = target
+                .address()
+                .expect("target address should be transparent")
+                .try_to_vec()
+                .expect("target address encoding");
+            let hash = ripemd160::Ripemd160::digest(&sha2::Sha256::digest(
+                target_enc.as_ref(),
+            ));
+            builder.add_transparent_output(
+                &TransparentAddress::PublicKey(hash.into()),
+                asset_type,
+                amt,
+            )?;
+        }
+        let prover = self.utils.local_tx_prover();
+        // Build and return the constructed transaction
+        let mut tx = builder.build(consensus_branch_id, &prover);
+
+        if epoch_sensitive {
+            let new_epoch = self.utils.query_epoch().await;
+
+            // If epoch has changed, recalculate shielded outputs to match new epoch
+            if new_epoch != epoch {
+                // Hack: build new shielded transfer with updated outputs
+                let mut replay_builder = Builder::<TestNetwork, OsRng>::new(0u32);
+                replay_builder.set_fee(Amount::zero())?;
+                let ovk_opt = spending_key.map(|x| x.expsk.ovk);
+                let (new_asset_type, _) =
+                    convert_amount(new_epoch, &token, args_amount);
+                replay_builder.add_sapling_output(
+                    ovk_opt,
+                    payment_address.unwrap().into(),
+                    new_asset_type,
+                    amt,
+                    memo,
+                )?;
+
+                let secp_sk = secp256k1::SecretKey::from_slice(&[0xcd; 32])
+                    .expect("secret key");
+                let secp_ctx =
+                    secp256k1::Secp256k1::<secp256k1::SignOnly>::gen_new();
+                let secp_pk =
+                    secp256k1::PublicKey::from_secret_key(&secp_ctx, &secp_sk)
+                    .serialize();
+                let hash =
+                    ripemd160::Ripemd160::digest(&sha2::Sha256::digest(&secp_pk));
+                let script = TransparentAddress::PublicKey(hash.into()).script();
+                replay_builder.add_transparent_input(
+                    secp_sk,
+                    OutPoint::new([0u8; 32], 0),
+                    TxOut {
+                        asset_type: new_asset_type,
+                        value: amt,
+                        script_pubkey: script,
+                    },
+                )?;
+
+                let (replay_tx, _) =
+                    replay_builder.build(consensus_branch_id, &prover)?;
+                tx = tx.map(|(t, tm)| {
+                    let mut temp = t.deref().clone();
+                    temp.shielded_outputs = replay_tx.shielded_outputs.clone();
+                    temp.value_balance = temp.value_balance.reject(asset_type)
+                        - Amount::from_pair(new_asset_type, amt).unwrap();
+                    (temp.freeze().unwrap(), tm)
+                });
+            }
+        }
+
+        tx.map(Some)
+    }
+}
+
+/// Make asset type corresponding to given address and epoch
+fn make_asset_type(epoch: Epoch, token: &Address) -> AssetType {
+    // Typestamp the chosen token with the current epoch
+    let token_bytes = (token, epoch.0)
+        .try_to_vec()
+        .expect("token should serialize");
+    // Generate the unique asset identifier from the unique token address
+    AssetType::new(token_bytes.as_ref()).expect("unable to create asset type")
+}
+
+/// Convert Anoma amount and token type to MASP equivalents
+fn convert_amount(
+    epoch: Epoch,
+    token: &Address,
+    val: token::Amount,
+) -> (AssetType, Amount) {
+    let asset_type = make_asset_type(epoch, token);
+    // Combine the value and unit into one amount
+    let amount = Amount::from_nonnegative(asset_type, u64::from(val))
+        .expect("invalid value for amount");
+    (asset_type, amount)
+}
diff --git a/tests/src/e2e/ledger_tests.rs b/tests/src/e2e/ledger_tests.rs
index 71ea121c4e..f16fe1abcb 100644
--- a/tests/src/e2e/ledger_tests.rs
+++ b/tests/src/e2e/ledger_tests.rs
@@ -21,7 +21,7 @@ use color_eyre::eyre::Result;
 use data_encoding::HEXLOWER;
 use namada::types::address::{btc, eth, masp_rewards, Address};
 use namada::types::token;
-use namada_apps::client::tx::ShieldedContext;
+use namada_apps::client::tx::CLIShieldedUtils;
 use namada_apps::config::genesis::genesis_config::{
     GenesisConfig, ParametersConfig, PosParamsConfig,
 };
@@ -477,7 +477,7 @@ fn ledger_txs_and_queries() -> Result<()> {
 #[test]
 fn masp_txs_and_queries() -> Result<()> {
     // Download the shielded pool parameters before starting node
-    let _ = ShieldedContext::new(PathBuf::new());
+    let _ = CLIShieldedUtils::new(PathBuf::new());
     // Lengthen epoch to ensure that a transaction can be constructed and
     // submitted within the same block. Necessary to ensure that conversion is
     // not invalidated.
@@ -746,7 +746,7 @@ fn masp_txs_and_queries() -> Result<()> {
 #[test]
 fn masp_pinned_txs() -> Result<()> {
     // Download the shielded pool parameters before starting node
-    let _ = ShieldedContext::new(PathBuf::new());
+    let _ = CLIShieldedUtils::new(PathBuf::new());
     // Lengthen epoch to ensure that a transaction can be constructed and
     // submitted within the same block. Necessary to ensure that conversion is
     // not invalidated.
@@ -906,7 +906,7 @@ fn masp_pinned_txs() -> Result<()> {
 #[test]
 fn masp_incentives() -> Result<()> {
     // Download the shielded pool parameters before starting node
-    let _ = ShieldedContext::new(PathBuf::new());
+    let _ = CLIShieldedUtils::new(PathBuf::new());
     // Lengthen epoch to ensure that a transaction can be constructed and
     // submitted within the same block. Necessary to ensure that conversion is
     // not invalidated.