Skip to content

Commit

Permalink
Merge branch 'bat/fix/migration-e2e' (#3481)
Browse files Browse the repository at this point in the history
Fixed broken migration e2e test
  • Loading branch information
brentstone authored Jul 5, 2024
2 parents 8479d38 + f74c745 commit 22a4839
Show file tree
Hide file tree
Showing 5 changed files with 11 additions and 132 deletions.
2 changes: 2 additions & 0 deletions .changelog/unreleased/ci/3481-fix-migration-e2e.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
* Fixed a broken e2e test for db migrations. Added it to the list of e2e test to be run by the CI.
([\#3481](https://github.com/anoma/namada/pull/3481))
1 change: 1 addition & 0 deletions .github/workflows/scripts/e2e.json
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
"e2e::ledger_tests::run_ledger": 5,
"e2e::ledger_tests::run_ledger_load_state_and_reset": 23,
"e2e::ledger_tests::test_namada_shuts_down_if_tendermint_dies": 2,
"e2e::ledger_tests::test_namada_db_migration": 30,
"e2e::ledger_tests::test_genesis_validators": 14,
"e2e::ledger_tests::test_node_connectivity_and_consensus": 28,
"e2e::ledger_tests::test_epoch_sleep": 12,
Expand Down
6 changes: 5 additions & 1 deletion crates/tests/src/e2e/ledger_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -378,6 +378,8 @@ fn test_db_migration() -> Result<()> {
"update-db",
"--path",
migrations_json_path.to_string_lossy().as_ref(),
"--block-height",
"2",
],
Some(30),
)?;
Expand All @@ -394,7 +396,9 @@ fn test_db_migration() -> Result<()> {
&[
"balance",
"--owner",
"tnam1q9rhgyv3ydq0zu3whnftvllqnvhvhm270qxay5tn"
"tnam1q9rhgyv3ydq0zu3whnftvllqnvhvhm270qxay5tn",
"--token",
"nam"
],
Some(20),
)?;
Expand Down
132 changes: 2 additions & 130 deletions examples/make-db-migration.rs
Original file line number Diff line number Diff line change
@@ -1,18 +1,13 @@
use std::collections::BTreeMap;

use borsh::{BorshDeserialize, BorshSerialize};
use data_encoding::{HEXLOWER, HEXUPPER};
use namada_apps_lib::wasm_loader::read_wasm;
use namada_macros::BorshDeserializer;
use namada_parameters::storage;
use namada_sdk::address::Address;
use namada_sdk::hash::Hash as CodeHash;
use namada_sdk::masp_primitives::asset_type::AssetType;
use namada_sdk::masp_primitives::merkle_tree::FrozenCommitmentTree;
use namada_sdk::masp_primitives::sapling;
use namada_sdk::migrations;
use namada_sdk::storage::{DbColFam, Key};
use namada_shielded_token::storage_key::masp_token_map_key;
use namada_sdk::storage::DbColFam;
use namada_shielded_token::{ConversionLeaf, ConversionState};
use namada_trans_token::storage_key::{balance_key, minted_balance_key};
use namada_trans_token::Amount;
Expand Down Expand Up @@ -78,128 +73,5 @@ fn example() {
}

fn main() {
se_migration()
}

// The current vp_user hash to be replaced on the SE
const REMOVED_HASH: &str =
"129EE7BEE68B02BFAE638DA2A634B8ECBFFA2CB3F46CFA8E172BAF009627EC78";
fn se_migration() {
// Get VP
let wasm_path = "wasm";
let bytes = read_wasm(wasm_path, "vp_user.wasm").expect("bingbong");
let vp_hash = CodeHash::sha256(&bytes);

// account VPs
let account_vp_str = "#tnam[a-z,0-9]*\\/\\?".to_string();
let accounts_update = migrations::DbUpdateType::RepeatAdd {
pattern: account_vp_str,
cf: DbColFam::SUBSPACE,
value: migrations::UpdateValue::raw(vp_hash),
force: false,
};

// wasm/hash and wasm/name
let wasm_name_key = Key::wasm_code_name("vp_user.wasm".to_string());
let wasm_hash_key = Key::wasm_hash("vp_user.wasm");
let wasm_name_update = migrations::DbUpdateType::Add {
key: wasm_name_key,
cf: DbColFam::SUBSPACE,
value: migrations::UpdateValue::raw(vp_hash),
force: false,
};
let wasm_hash_update = migrations::DbUpdateType::Add {
key: wasm_hash_key,
cf: DbColFam::SUBSPACE,
value: migrations::UpdateValue::raw(vp_hash),
force: false,
};

// wasm/code/<uc hash>
let code_key = Key::wasm_code(&vp_hash);
let code_update = migrations::DbUpdateType::Add {
key: code_key,
cf: DbColFam::SUBSPACE,
value: migrations::UpdateValue::raw(bytes.clone()),
force: false,
};

// wasm/len/<code len>
let len_key = Key::wasm_code_len(&vp_hash);
let code_len_update = migrations::DbUpdateType::Add {
key: len_key,
cf: DbColFam::SUBSPACE,
value: (bytes.len() as u64).into(),
force: false,
};

// VP allowlist
let vp_allowlist_key = storage::get_vp_allowlist_storage_key();
let new_hash_str = HEXLOWER.encode(vp_hash.as_ref());
let new_vp_allowlist = vec![
"8781c170ad1e3d2bbddc308b77b7a2edda3fff3bc5d746232feec968ee4fe3cd"
.to_string(),
new_hash_str,
];
let allowlist_update = migrations::DbUpdateType::Add {
key: vp_allowlist_key,
cf: DbColFam::SUBSPACE,
value: new_vp_allowlist.into(),
force: false,
};

// remove keys associated with old wasm
let remove_old_wasm = migrations::DbUpdateType::RepeatDelete(
format!("/wasm/[a-z]+/{}", REMOVED_HASH),
DbColFam::SUBSPACE,
);

// Conversion state token map
let conversion_token_map: BTreeMap<String, Address> = BTreeMap::new();
let conversion_token_map_key = masp_token_map_key();
let conversion_state_token_map_update = migrations::DbUpdateType::Add {
key: conversion_token_map_key,
cf: DbColFam::SUBSPACE,
value: migrations::UpdateValue::wrapped(conversion_token_map),
force: false,
};

// Conversion state
let query_result = std::fs::read_to_string("conversion_state.txt").unwrap();
let hex_bytes = query_result.split('\n').nth(2).unwrap();
let bytes = HEXUPPER
.decode(
hex_bytes
.strip_prefix("The value in bytes is ")
.unwrap()
.trim()
.as_bytes(),
)
.unwrap();
let old_conversion_state = ConversionState::try_from_slice(&bytes).unwrap();
let new_conversion_state: NewConversionState = old_conversion_state.into();
let conversion_state_update = migrations::DbUpdateType::Add {
key: Key::parse("conversion_state").unwrap(),
cf: DbColFam::STATE,
value: migrations::UpdateValue::force_borsh(new_conversion_state),
force: true,
};

let updates = [
accounts_update,
wasm_name_update,
wasm_hash_update,
code_update,
allowlist_update,
code_len_update,
remove_old_wasm,
conversion_state_token_map_update,
conversion_state_update,
];

let changes = migrations::DbChanges {
changes: updates.into_iter().collect(),
};
std::fs::write("migrations.json", serde_json::to_string(&changes).unwrap())
.unwrap();
example()
}
2 changes: 1 addition & 1 deletion examples/migration_example.json
Original file line number Diff line number Diff line change
@@ -1 +1 @@
{"changes":[{"Add":{"key":{"segments":[{"AddressSeg":"tnam1pyqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqej6juv"},{"AddressSeg":"tnam1qxgfw7myv4dh0qna4hq0xdg6lx77fzl7dcem8h7e"},{"StringSeg":"balance"},{"AddressSeg":"tnam1q9rhgyv3ydq0zu3whnftvllqnvhvhm270qxay5tn"}]},"value":"E47D97A2C7D7834F0907AEEEBD30CAA28CA88EBDB854398482DB2E1A49D7811A012000000080E7414914AF682C000000000000000000000000000000000000000000000000","force":false}},{"Add":{"key":{"segments":[{"AddressSeg":"tnam1pyqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqej6juv"},{"AddressSeg":"tnam1qxgfw7myv4dh0qna4hq0xdg6lx77fzl7dcem8h7e"},{"StringSeg":"balance"},{"StringSeg":"minted"}]},"value":"E47D97A2C7D7834F0907AEEEBD30CAA28CA88EBDB854398482DB2E1A49D7811A0120000000797241E2F46A0000000000000000000000000000000000000000000000000000","force":false}},{"RepeatDelete":"tnam1qyvfwdkz8zgs9n3qn9xhp8scyf8crrxwuq26r6gy"}]}
{"changes":[{"Add":{"key":{"segments":[{"AddressSeg":"tnam1pyqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqej6juv"},{"AddressSeg":"tnam1qxgfw7myv4dh0qna4hq0xdg6lx77fzl7dcem8h7e"},{"StringSeg":"balance"},{"AddressSeg":"tnam1q9rhgyv3ydq0zu3whnftvllqnvhvhm270qxay5tn"}]},"cf":"SUBSPACE","value":"E47D97A2C7D7834F0907AEEEBD30CAA28CA88EBDB854398482DB2E1A49D7811A012000000080E7414914AF682C000000000000000000000000000000000000000000000000","force":false}},{"Add":{"key":{"segments":[{"AddressSeg":"tnam1pyqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqej6juv"},{"AddressSeg":"tnam1qxgfw7myv4dh0qna4hq0xdg6lx77fzl7dcem8h7e"},{"StringSeg":"balance"},{"StringSeg":"minted"}]},"cf":"SUBSPACE","value":"E47D97A2C7D7834F0907AEEEBD30CAA28CA88EBDB854398482DB2E1A49D7811A0120000000797241E2F46A0000000000000000000000000000000000000000000000000000","force":false}},{"RepeatDelete":["tnam1qyvfwdkz8zgs9n3qn9xhp8scyf8crrxwuq26r6gy","SUBSPACE"]}]}

0 comments on commit 22a4839

Please sign in to comment.