Skip to content
This repository has been archived by the owner on Jul 27, 2022. It is now read-only.

Commit

Permalink
Merge #1513
Browse files Browse the repository at this point in the history
1513: Problem: (fix #1217) chain-abci restore_from_storage not unit tested r=tomtau a=linfeng-crypto

solution:
because the function `restore_from_storege` is private, I added a unit test to test the other function `new_with_storage` which can cover it.

Co-authored-by: linfeng <[email protected]>
  • Loading branch information
bors[bot] and linfeng-crypto authored May 1, 2020
2 parents daecdaa + 2ea188a commit e55aa53
Showing 1 changed file with 46 additions and 18 deletions.
64 changes: 46 additions & 18 deletions chain-abci/tests/abci_app.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,8 @@ use chain_core::tx::{
use chain_storage::buffer::Get;
use chain_storage::jellyfish::SparseMerkleProof;
use chain_storage::{
LookupItem, Storage, COL_NODE_INFO, GENESIS_APP_HASH_KEY, LAST_STATE_KEY, NUM_COLUMNS,
LookupItem, Storage, CHAIN_ID_KEY, COL_EXTRA, COL_NODE_INFO, GENESIS_APP_HASH_KEY,
LAST_STATE_KEY, NUM_COLUMNS,
};
use chain_tx_filter::BlockFilter;
use hex::decode;
Expand All @@ -58,6 +59,9 @@ use std::str::FromStr;
use std::sync::Arc;
use test_common::chain_env::ChainEnv;

const TEST_CHAIN_ID: &str = "test-00";
const EXAMPLE_HASH: &str = "F5E8DFBF717082D6E9508E1A5A5C9B8EAC04A39F69C40262CB733C920DA10962";

pub fn get_enclave_bridge_mock() -> MockClient {
MockClient::new(0)
}
Expand All @@ -75,7 +79,23 @@ fn create_db() -> Arc<dyn KeyValueDB> {
Arc::new(create(NUM_COLUMNS))
}

const TEST_CHAIN_ID: &str = "test-00";
fn create_db_with_state_history() -> Arc<dyn KeyValueDB> {
let db = create_db();
let decoded_gah = decode(EXAMPLE_HASH).unwrap();
let mut genesis_app_hash = [0u8; HASH_SIZE_256];
genesis_app_hash.copy_from_slice(&decoded_gah[..]);
let mut inittx = db.transaction();
inittx.put(COL_NODE_INFO, GENESIS_APP_HASH_KEY, &genesis_app_hash);
inittx.put(
COL_NODE_INFO,
LAST_STATE_KEY,
&get_dummy_app_state(genesis_app_hash).encode(),
);
inittx.put(COL_EXTRA, CHAIN_ID_KEY, TEST_CHAIN_ID.as_bytes());

db.write(inittx).unwrap();
db
}

#[test]
fn proper_hash_and_chainid_should_be_stored() {
Expand All @@ -96,6 +116,26 @@ fn proper_hash_and_chainid_should_be_stored() {
assert_eq!(chain_id, TEST_CHAIN_ID.as_bytes());
}

#[test]
fn proper_last_state_should_be_restored() {
let db = create_db_with_state_history();
let storage = Storage::new_db(db.clone());
assert!(storage.get_last_app_state().is_some());
let app = ChainNodeApp::new_with_storage(
get_enclave_bridge_mock(),
EXAMPLE_HASH,
TEST_CHAIN_ID,
storage,
None,
None,
);
let decoded_gah = decode(EXAMPLE_HASH).unwrap();
let stored_genesis = app.storage.get_genesis_app_hash();
assert_eq!(decoded_gah, stored_genesis);
let chain_id = app.storage.get_stored_chain_id();
assert_eq!(chain_id, TEST_CHAIN_ID.as_bytes());
}

#[test]
#[should_panic]
fn too_long_hash_should_panic() {
Expand Down Expand Up @@ -189,25 +229,13 @@ fn get_dummy_app_state(app_hash: H256) -> ChainNodeState {
#[test]
#[should_panic]
fn previously_stored_hash_should_match() {
let db = create_db();
let example_hash = "F5E8DFBF717082D6E9508E1A5A5C9B8EAC04A39F69C40262CB733C920DA10962";
let decoded_gah = decode(example_hash).unwrap();
let mut genesis_app_hash = [0u8; HASH_SIZE_256];
genesis_app_hash.copy_from_slice(&decoded_gah[..]);
let mut inittx = db.transaction();
inittx.put(COL_NODE_INFO, GENESIS_APP_HASH_KEY, &genesis_app_hash);
inittx.put(
COL_NODE_INFO,
LAST_STATE_KEY,
&get_dummy_app_state(genesis_app_hash).encode(),
);
db.write(inittx).unwrap();
let example_hash2 = "F5E8DFBF717082D6E9508E1A5A5C9B8EAC04A39F69C40262CB733C920DA10963";
let db = create_db_with_state_history();
let error_hash = "F5E8DFBF717082D6E9508E1A5A5C9B8EAC04A39F69C40262CB733C920DA10963";
let _app = ChainNodeApp::new_with_storage(
get_enclave_bridge_mock(),
example_hash2,
error_hash,
TEST_CHAIN_ID,
Storage::new_db(db.clone()),
Storage::new_db(db),
None,
None,
);
Expand Down

0 comments on commit e55aa53

Please sign in to comment.