From 68ef326462f0b8e95a7afbc147d78d05306e3b88 Mon Sep 17 00:00:00 2001 From: 0o-de-lally <1364012+0o-de-lally@users.noreply.github.com> Date: Fri, 22 Oct 2021 14:43:18 -0400 Subject: [PATCH] Proof Zero Preimage (#778) * complete move fixtures * add tower link to genesis preimage, make preimage a fixed 1024 size * add wizard to add a tower link * patching e2e tests and tools tests * more sane fixtures managment for e2e tests * patch merge * patch fixtures helpers locations * remove duplicate fixtures helper * makefile recipe to backup 0L folder * bring back info prints * delay difficulty for stage was off. * patch globals for vdf difficulty * rename block_0.json throughout * skip add_tower when in test mode for onboarding * patch build --- Cargo.lock | 1 + Makefile | 81 +++--- config/global-constants/src/lib.rs | 9 +- .../management/genesis/src/ol_node_files.rs | 12 +- .../diem-framework/modules/0L/Globals.move | 8 +- .../modules/0L/TestFixtures.move | 3 +- .../diem-framework/modules/doc/DiemAccount.md | 75 +----- .../diem-framework/modules/doc/Globals.md | 2 +- .../diem-framework/modules/doc/Receipts.md | 43 ++- .../current/docs/modules/DiemAccount.md | 75 +----- .../artifacts/current/docs/modules/Globals.md | 2 +- .../current/docs/modules/Receipts.md | 43 ++- .../artifacts/current/modules/026_Globals.mv | Bin 1040 -> 1040 bytes .../artifacts/current/modules/036_Receipts.mv | Bin 738 -> 889 bytes .../current/modules/044_DiemAccount.mv | Bin 13435 -> 13262 bytes language/diem-framework/staged/stdlib.mv | Bin 107268 -> 107986 bytes .../diem-vm/src/diem_transaction_executor.rs | 6 +- .../src/tests/ol_minerstate_commit.rs | 2 +- .../src/tests/ol_minerstate_onboarding.todo | 2 +- .../e2e-testsuite/src/tests/ol_reconfig.rs | 2 +- .../src/tests/ol_upgrade_oracle.rs | 2 +- .../onboard_and_operator_mines.move | 2 +- .../0L/miner_state/chained_from_genesis.move | 2 +- .../0L/miner_state/chained_proof_fail.move | 4 +- .../chained_proof_success_easy.move | 4 +- .../chained_proof_success_hard.move | 4 +- .../0L/miner_state/state_not_initialized.move | 2 +- .../update_metrics_above_thresh.move | 4 +- .../update_metrics_below_thresh.move | 2 +- ol/cli/src/commands/init_cmd.rs | 6 +- ol/cli/src/node/node.rs | 2 +- .../create_account_on_chain.md | 2 +- .../devs/archived/genesis_v4_registration.md | 2 +- ol/documentation/devs/provision_debug_net.md | 6 +- .../genesis/genesis_registration.md | 85 ++++-- .../genesis/mining_after_genesis.md | 2 +- ol/documentation/ops/mining_VDF_proofs.md | 6 +- ol/fixtures/Makefile | 6 +- .../prod/alice/proof_0.json} | 0 .../prod/alice/proof_1.json} | 0 .../stage/alice/proof_0.json} | 0 .../test/alice/proof_0.json} | 0 .../test/alice/proof_1.json} | 0 .../test/bob/proof_0.json} | 0 .../test/carol/proof_0.json} | 0 .../test/dave/proof_0.json} | 0 .../test/eve/block_1.json | 0 .../test/eve/proof_0.json} | 0 ol/keys/.gitignore | 4 +- ol/onboard/.gitignore | 4 +- ol/onboard/src/commands/wizard_fork_cmd.rs | 8 +- ol/onboard/src/commands/wizard_user_cmd.rs | 8 +- ol/onboard/src/commands/wizard_val_cmd.rs | 20 +- ol/onboard/src/{home.rs => home.depr} | 0 ol/onboard/src/lib.rs | 2 +- ol/onboard/src/manifest.rs | 4 +- ol/tower/.gitignore | 4 +- ol/tower/src/backlog.rs | 8 +- ol/tower/src/commands/start_cmd.rs | 18 +- ol/tower/src/commands/zero_cmd.rs | 2 +- ol/tower/src/commit_proof.rs | 4 +- ol/tower/src/lib.rs | 5 +- ol/tower/src/preimage.rs | 106 ++++++++ ol/tower/src/{block.rs => proof.rs} | 255 ++++++------------ ol/tower/tests/config.rs | 9 +- ol/tower/tests/integration-simple.rs | 13 +- ol/tower/tests/integration-submit-tx.rs | 10 +- ol/types/Cargo.toml | 3 +- ol/types/src/account.rs | 24 +- ol/types/src/block.rs | 12 +- ol/types/src/config.rs | 16 +- ol/types/src/dialogue.rs | 90 ++++++- ol/types/src/fixtures.rs | 11 +- ol/types/src/genesis_proof.rs | 2 +- ol/verifiable_delay/.gitignore | 2 +- testsuite/cli/src/client_proxy.rs | 4 +- 76 files changed, 598 insertions(+), 559 deletions(-) rename ol/fixtures/{blocks/prod/alice/block_0.json => vdf_proofs/prod/alice/proof_0.json} (100%) rename ol/fixtures/{blocks/prod/alice/block_1.json => vdf_proofs/prod/alice/proof_1.json} (100%) rename ol/fixtures/{blocks/stage/alice/block_0.json => vdf_proofs/stage/alice/proof_0.json} (100%) rename ol/fixtures/{blocks/test/alice/block_0.json => vdf_proofs/test/alice/proof_0.json} (100%) rename ol/fixtures/{blocks/test/alice/block_1.json => vdf_proofs/test/alice/proof_1.json} (100%) rename ol/fixtures/{blocks/test/bob/block_0.json => vdf_proofs/test/bob/proof_0.json} (100%) rename ol/fixtures/{blocks/test/carol/block_0.json => vdf_proofs/test/carol/proof_0.json} (100%) rename ol/fixtures/{blocks/test/dave/block_0.json => vdf_proofs/test/dave/proof_0.json} (100%) rename ol/fixtures/{blocks => vdf_proofs}/test/eve/block_1.json (100%) rename ol/fixtures/{blocks/test/eve/block_0.json => vdf_proofs/test/eve/proof_0.json} (100%) rename ol/onboard/src/{home.rs => home.depr} (100%) create mode 100644 ol/tower/src/preimage.rs rename ol/tower/src/{block.rs => proof.rs} (62%) diff --git a/Cargo.lock b/Cargo.lock index 546d8c778c..b03a48b0b1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5750,6 +5750,7 @@ dependencies = [ "diem-global-constants", "diem-types", "dirs 2.0.2", + "glob", "hex 0.4.3", "machine-ip", "move-core-types", diff --git a/Makefile b/Makefile index 418bb000d0..9d89b02610 100644 --- a/Makefile +++ b/Makefile @@ -133,15 +133,42 @@ mv-bin: reset: onboard val --skip-mining --upstream-peer http://167.172.248.37/ --source-path ~/libra -reset-safety: - jq -r '.["${ACC}-oper/safety_data"].value = { "epoch": 0, "last_voted_round": 0, "preferred_round": 0, "last_vote": null }' ${DATA_PATH}/key_store.json > ${DATA_PATH}/temp_key_store && mv ${DATA_PATH}/temp_key_store ${DATA_PATH}/key_store.json - backup: - cd ~ && rsync -av --exclude db/ --exclude logs/ ~/.0L ~/0L_backup_$(shell date +"%m-%d-%y") + cd ~ && rsync -av --exclude db/ --exclude logs/ ~/.0L ~/0L_backup_$(shell date +"%m-%d-%y-%T") + +confirm: + @read -p "Continue (y/n)?" CONT; \ + if [ "$$CONT" = "y" ]; then \ + echo "deleting...."; \ + else \ + exit 1; \ + fi \ + +danger-delete-all: + @echo THIS WILL WIPE ALL YOUR FILES in ${HOME}/.0L + @echo it will also make a backup at ${HOME}/backup_0L/ + @echo the files github_token.txt, autopay_batch.json, set_layout, ./vdf_proofs/ and ./blocks/ will be returned to ${HOME}/.0L/ + make confirm + make backup + rm -rf ${HOME}/.0L | true + mkdir ${HOME}/.0L/ + make danger-restore + +danger-restore: + cp ${HOME}/backup_0L/github_token.txt ${HOME}/.0L/ | true + cp ${HOME}/backup_0L/autopay_batch.json ${HOME}/.0L/ | true + rsync -rtv ${HOME}/backup_0L/blocks/ ${HOME}/.0L/blocks | true + rsync -rtv ${HOME}/backup_0L/vdf_proofs/ ${HOME}/.0L/vdf_proofs | true + rsync -rtv ${HOME}/backup_0L/set_layout.toml ${HOME}/.0L/ | true + + + + clear-prod-db: @echo WIPING DB + make confirm rm -rf ${DATA_PATH}/db | true reset-safety: @@ -185,22 +212,14 @@ gen-make-pull: --shared-backend ${GENESIS_REMOTE} \ --pull-request-user ${GITHUB_USER} -genesis-miner: - cargo run -p tower -- zero - gen-onboard: - cargo run -p onboard ${CARGO_ARGS} -- val --genesis-ceremony + cargo run -p onboard ${CARGO_ARGS} -- val --genesis-ceremony -ceremony: gen-fork-repo gen-onboard +gen-reset: + cargo run -p onboard ${CARGO_ARGS} -- val --genesis-ceremony --skip-mining -# cargo run -p tower ${CARGO_ARGS} -- zero - -register: -# export ACC=$(shell toml get ${DATA_PATH}/0L.toml profile.account) -# @echo Initializing from ${DATA_PATH}/0L.toml with account: -# @echo ${ACC} -# make init +gen-register: @echo the OPER initializes local accounts and submit pubkeys to github ACC=${ACC}-oper make oper-key @@ -214,10 +233,8 @@ register: @echo OPER send signed transaction with configurations for *OWNER* account ACC=${ACC}-oper OWNER=${ACC} IP=${IP} make reg -ifeq (${TEST}, y) @echo Making pull request to genesis coordination repo make gen-make-pull -endif init-test: echo ${MNEM} | head -c -1 | cargo run -p diem-genesis-tool -- init --path=${DATA_PATH} --namespace=${ACC} @@ -227,15 +244,15 @@ init: # OWNER does this # Submits proofs to shared storage add-proofs: - cargo run -p diem-genesis-tool ${CARGO_ARGS} -- mining \ - --path-to-genesis-pow ${DATA_PATH}/blocks/block_0.json \ + cargo run -p diem-genesis-tool ${CARGO_ARGS} -- mining \ + --path-to-genesis-pow ${DATA_PATH}/vdf_proofs/proof_0.json \ --path-to-account-json ${DATA_PATH}/account.json \ --shared-backend ${REMOTE} # OPER does this # Submits operator key to github, and creates local OPERATOR_ACCOUNT oper-key: - cargo run -p diem-genesis-tool ${CARGO_ARGS} -- operator-key \ + cargo run -p diem-genesis-tool ${CARGO_ARGS} -- operator-key \ --validator-backend ${LOCAL} \ --shared-backend ${REMOTE} @@ -291,7 +308,7 @@ genesis: #### NODE MANAGEMENT #### start: # run in foreground. Only for testing, use a daemon for net. - cargo run -p diem-node -- --config ${DATA_PATH}/validator.node.yaml + NODE_ENV=error cargo run -p diem-node -- --config ${DATA_PATH}/validator.node.yaml # Start a fullnode instead of a validator node start-full: @@ -327,8 +344,8 @@ ifeq (${TEST}, y) @if test -d ${DATA_PATH}; then \ cd ${DATA_PATH} && rm -rf libradb *.yaml *.blob *.json db *.toml; \ fi - @if test -d ${DATA_PATH}/blocks; then \ - rm -f ${DATA_PATH}/blocks/*.json; \ + @if test -d ${DATA_PATH}/vdf_proofs; then \ + rm -f ${DATA_PATH}/vdf_proofs/*.json; \ fi endif @@ -359,11 +376,11 @@ ifdef TEST @if test ! -d ${DATA_PATH}; then \ echo Creating Directories \ mkdir ${DATA_PATH}; \ - mkdir -p ${DATA_PATH}/blocks/; \ + mkdir -p ${DATA_PATH}/vdf_proofs/; \ fi - @if test -f ${DATA_PATH}/blocks/block_0.json; then \ - rm ${DATA_PATH}/blocks/block_0.json; \ + @if test -f ${DATA_PATH}/vdf_proofs/proof_0.json; then \ + rm ${DATA_PATH}/vdf_proofs/proof_0.json; \ fi @if test -f ${DATA_PATH}/0L.toml; then \ @@ -373,7 +390,7 @@ ifdef TEST # skip miner configuration with fixtures cp ./ol/fixtures/configs/${NS}.toml ${DATA_PATH}/0L.toml # skip mining proof zero with fixtures - cp ./ol/fixtures/blocks/${NODE_ENV}/${NS}/block_0.json ${DATA_PATH}/blocks/block_0.json + cp ./ol/fixtures/vdf_proofs/${NODE_ENV}/${NS}/proof_0.json ${DATA_PATH}/vdf_proofs/proof_0.json # place a mock autopay.json in root cp ./ol/fixtures/autopay/${NS}.autopay_batch.json ${DATA_PATH}/autopay_batch.json # place a mock account.json in root, used as template for onboarding @@ -486,14 +503,14 @@ clean-tags: git tag -d ${TAG} nuke-testnet: - @echo WIPING EVERYTHING but keeping: github_token.txt, autopay_batch.json, set_layout.toml, /blocks/block_0.json + @echo WIPING EVERYTHING but keeping: github_token.txt, autopay_batch.json, set_layout.toml, /vdf_proofs/proof_0.json @if test -d ${DATA_PATH}; then \ - cd ${DATA_PATH} && cp github_token.txt autopay_batch.json set_layout.toml blocks/block_0.json ~/; \ + cd ${DATA_PATH} && cp github_token.txt autopay_batch.json set_layout.toml vdf_proofs/proof_0.json ~/; \ cd ${DATA_PATH} && rm -rf *; \ cd ~ && cp github_token.txt autopay_batch.json set_layout.toml ${DATA_PATH}; \ - cd ${DATA_PATH} && mkdir blocks;\ - cd ~ && cp block_0.json ${DATA_PATH}/blocks/; \ + cd ${DATA_PATH} && mkdir vdf_proofs;\ + cd ~ && cp proof_0.json ${DATA_PATH}/vdf_proofs/; \ fi diff --git a/config/global-constants/src/lib.rs b/config/global-constants/src/lib.rs index e511b6c612..dfd7a04346 100644 --- a/config/global-constants/src/lib.rs +++ b/config/global-constants/src/lib.rs @@ -40,7 +40,6 @@ pub const VDF_SECURITY_PARAM: u16 = 512; /// Filename for 0L configs pub const CONFIG_FILE: &str = "0L.toml"; - // TODO: make this lazy static. /// Switch settings between production and testing pub fn delay_difficulty() -> u64 { @@ -48,9 +47,9 @@ pub fn delay_difficulty() -> u64 { Ok(val) => val, _ => "prod".to_string() // default to "prod" if not set }; - // must explicitly set env to prod to use production difficulty. - if node_env == "prod" { - return 120_000_000 + // test settings need to be set explicitly + if node_env == "test" { + return 100 // difficulty for test suites and on local for debugging purposes. } - return 100 // difficulty for test suites and on local for debugging purposes. + return 120_000_000 } diff --git a/config/management/genesis/src/ol_node_files.rs b/config/management/genesis/src/ol_node_files.rs index 6fbe4e1376..e8a8131a00 100644 --- a/config/management/genesis/src/ol_node_files.rs +++ b/config/management/genesis/src/ol_node_files.rs @@ -131,11 +131,11 @@ pub fn write_node_config_files( disk_storage.path = output_dir.clone().join("key_store.json"); disk_storage.namespace = Some(namespace.to_owned()); - // Write the genesis waypoint without a namespaced storage. - let mut safety_rules_storage = OnDiskStorageConfig::default(); - safety_rules_storage.set_data_dir(output_dir.clone()); - safety_rules_storage.path = output_dir.clone().join("safety_rules.json"); - safety_rules_storage.namespace = Some(namespace.to_owned()); + // // Write the genesis waypoint without a namespaced storage. + // let mut safety_rules_storage = OnDiskStorageConfig::default(); + // safety_rules_storage.set_data_dir(output_dir.clone()); + // safety_rules_storage.path = output_dir.clone().join("safety_rules.json"); + // safety_rules_storage.namespace = Some(namespace.to_owned()); // Get node configs template let mut config = if *fullnode_only { @@ -181,7 +181,7 @@ pub fn write_node_config_files( c.execution.genesis_file_location = genesis_path.clone(); c.consensus.safety_rules.service = SafetyRulesService::Thread; - c.consensus.safety_rules.backend = SecureBackend::OnDiskStorage(safety_rules_storage); + c.consensus.safety_rules.backend = SecureBackend::OnDiskStorage(disk_storage.clone()); c }; diff --git a/language/diem-framework/modules/0L/Globals.move b/language/diem-framework/modules/0L/Globals.move index 1d87a7ca66..61199dc414 100644 --- a/language/diem-framework/modules/0L/Globals.move +++ b/language/diem-framework/modules/0L/Globals.move @@ -98,17 +98,17 @@ module Globals { if (StagingNet::is_staging_net()) { return GlobalConstants { - epoch_length: 60 * 20, // 20 mins, enough for a hard miner proof. + epoch_length: 60 * 40, // 20 mins, enough for a hard miner proof. max_validators_per_set: 100, subsidy_ceiling_gas: 8640000 * coin_scale, - vdf_difficulty: 5000000, + vdf_difficulty: 120000000, epoch_mining_thres_lower: 1, epoch_mining_thres_upper: 72, // upper bound enforced at 20 mins per proof. epoch_slow_wallet_unlock: 10000000, } } else { return GlobalConstants { - epoch_length: 60 * 60 * 24, // approx 24 hours at 1.4 blocks/sec + epoch_length: 60 * 60 * 24, // approx 24 hours at 1.4 vdf_proofs/sec max_validators_per_set: 100, // max expected for BFT limits. // See DiemVMConfig for gas constants: // Target max gas units per transaction 100000000 @@ -116,7 +116,7 @@ module Globals { // target transaction per sec max gas: 20 // uses "scaled representation", since there are no decimals. subsidy_ceiling_gas: 8640000 * coin_scale, // subsidy amount assumes 24 hour epoch lengths. Also needs to be adjusted for coin_scale the onchain representation of human readable value. - vdf_difficulty: 5000000, // FYI approx 10 mins per proof on 2020 macbook pro 2.5 ghz quadcore + vdf_difficulty: 120000000, // FYI approx 30 mins per proof on 2020 macbook pro 2.5 ghz quadcore epoch_mining_thres_lower: 7, // NOTE: bootstrapping, allowance for operator error. epoch_mining_thres_upper: 72, // upper bound enforced at 20 mins per proof. epoch_slow_wallet_unlock: 1000 * coin_scale, // approx 10 years for largest accounts in genesis. diff --git a/language/diem-framework/modules/0L/TestFixtures.move b/language/diem-framework/modules/0L/TestFixtures.move index 4ee7f4a1da..fd9a7cafcf 100644 --- a/language/diem-framework/modules/0L/TestFixtures.move +++ b/language/diem-framework/modules/0L/TestFixtures.move @@ -31,7 +31,6 @@ module TestFixtures{ x"0051dfa4c3341c18197b72f5e5eecc693eb56d408206c206d90f5ec7a75f833b2affb0ea7280d4513ab8351f39362d362203ff3e41882309e7900f470f0a27eeeb7b" } - //FROM: diem/fixtures/block_0.json.stage.alice public fun alice_0_easy_chal(): vector { assert(Testnet::is_testnet(), 130102014010); x"87515d94a244235a1433d7117bc0cb154c613c2f4b1e67ca8d98a542ee3f59f5000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000006578706572696d656e74616c6400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000074657374" @@ -42,7 +41,7 @@ module TestFixtures{ x"00639237baac2348608e1e6dbcee015a2a628780d97f865f17c6e9f99d325ecb120052b0be3b0578af20cf0c4304ab14cbe7635a4247ed4ff3fcbd488bc66eb88cb7" } - //FROM: diem/fixtures/block_1.json.stage.alice + //FROM: ol/fixtures/proof_1.json.stage.alice public fun alice_1_easy_chal(): vector { assert(Testnet::is_testnet(), 130102014010); diff --git a/language/diem-framework/modules/doc/DiemAccount.md b/language/diem-framework/modules/doc/DiemAccount.md index eeae67123c..acdc143959 100644 --- a/language/diem-framework/modules/doc/DiemAccount.md +++ b/language/diem-framework/modules/doc/DiemAccount.md @@ -45,7 +45,6 @@ before and after every transaction. - [Function `preburn`](#0x1_DiemAccount_preburn) - [Function `extract_withdraw_capability`](#0x1_DiemAccount_extract_withdraw_capability) - [Function `restore_withdraw_capability`](#0x1_DiemAccount_restore_withdraw_capability) -- [Function `vm_make_payment`](#0x1_DiemAccount_vm_make_payment) - [Function `process_community_wallets`](#0x1_DiemAccount_process_community_wallets) - [Function `vm_make_payment_no_limit`](#0x1_DiemAccount_vm_make_payment_no_limit) - [Function `vm_burn_from_balance`](#0x1_DiemAccount_vm_burn_from_balance) @@ -2694,79 +2693,6 @@ Return the withdraw capability to the account it originally came from - - - - -## Function `vm_make_payment` - - - -
public fun vm_make_payment<Token: store>(payer: address, payee: address, amount: u64, metadata: vector<u8>, metadata_signature: vector<u8>, vm: &signer)
-
- - - -
-Implementation - - -
public fun vm_make_payment<Token: store>(
-    payer : address,
-    payee: address,
-    amount: u64,
-    metadata: vector<u8>,
-    metadata_signature: vector<u8>,
-    vm: &signer
-) acquires DiemAccount , Balance, AccountOperationsCapability, AutopayEscrow, CumulativeDeposits, SlowWallet { //////// 0L ////////
-    if (Signer::address_of(vm) != CoreAddresses::DIEM_ROOT_ADDRESS()) return;
-    if (amount == 0) return;
-
-    // Check payee can receive funds in this currency.
-    if (!exists<Balance<Token>>(payee)) return;
-    // assert(exists<Balance<Token>>(payee), Errors::not_published(EROLE_CANT_STORE_BALANCE));
-
-    // Check there is a payer
-    if (!exists_at(payer)) return;
-    // assert(exists_at(payer), Errors::not_published(EACCOUNT));
-
-    // Check the payer is in possession of withdraw token.
-    if (delegated_withdraw_capability(payer)) return;
-
-    let (max_withdraw, withdrawal_allowed) = AccountLimits::max_withdrawal<Token>(payer);
-    if (!withdrawal_allowed) return;
-
-    // VM can extract the withdraw token.
-    let account = borrow_global_mut<DiemAccount>(payer);
-    let cap = Option::extract(&mut account.withdraw_capability);
-
-    let transfer_now =
-        if (max_withdraw >= amount) {
-            amount
-        } else {
-            max_withdraw
-        };
-    let transfer_later = amount - transfer_now;
-    if (transfer_now > 0) {
-        pay_from<Token>(
-            &cap,
-            payee,
-            transfer_now,
-            metadata,
-            metadata_signature
-        );
-    };
-
-    if (transfer_later > 0) {
-        new_escrow<Token>(vm, payer, payee, transfer_later);
-    };
-
-    restore_withdraw_capability(cap);
-}
-
- - -
@@ -3583,6 +3509,7 @@ Creating an account at address 0x0 will abort as it is a reserved address for th } ); + Receipts::init(&new_account); //////// 0L //////// // NOTE: if all accounts are to be slow set this // set_slow(&new_account); diff --git a/language/diem-framework/modules/doc/Globals.md b/language/diem-framework/modules/doc/Globals.md index d675d88989..5363b0c82c 100644 --- a/language/diem-framework/modules/doc/Globals.md +++ b/language/diem-framework/modules/doc/Globals.md @@ -338,7 +338,7 @@ Get the constants for the current network if (StagingNet::is_staging_net()) { return GlobalConstants { - epoch_length: 60 * 20, // 20 mins, enough for a hard miner proof. + epoch_length: 60 * 40, // 20 mins, enough for a hard miner proof. max_validators_per_set: 100, subsidy_ceiling_gas: 8640000 * coin_scale, vdf_difficulty: 5000000, diff --git a/language/diem-framework/modules/doc/Receipts.md b/language/diem-framework/modules/doc/Receipts.md index 9889f5569a..84f2a156e2 100644 --- a/language/diem-framework/modules/doc/Receipts.md +++ b/language/diem-framework/modules/doc/Receipts.md @@ -105,7 +105,7 @@ -
public fun write_receipt(vm: &signer, payer: address, destination: address, value: u64): (u64, u64, u64)
+
public fun write_receipt(sender: &signer, payer: address, destination: address, value: u64): (u64, u64, u64)
 
@@ -114,25 +114,35 @@ Implementation -
public fun write_receipt(vm: &signer, payer: address, destination: address, value: u64):(u64, u64, u64) acquires UserReceipts {
-    CoreAddresses::assert_vm(vm);
-    // let addr = Signer::address_of(account);
-    let r = borrow_global_mut<UserReceipts>(payer);
-    let (_, i) = Vector::index_of(&r.destination, &destination);
+
public fun write_receipt(sender: &signer, payer: address, destination: address, value: u64):(u64, u64, u64) acquires UserReceipts {
+    // TODO: make a function for user to write own receipt.
+    CoreAddresses::assert_vm(sender);
+    if (!exists<UserReceipts>(payer)) {
+      return (0, 0, 0)
+    };
 
-    let timestamp = DiemTimestamp::now_seconds();
+    let r = borrow_global_mut<UserReceipts>(payer);
+    let (found_it, i) = Vector::index_of(&r.destination, &destination);
 
-    let cumu = *Vector::borrow<u64>(&r.cumulative, i);
+    let cumu = 0;
+    if (found_it) {
+      cumu = *Vector::borrow<u64>(&r.cumulative, i);
+    };
     cumu = cumu + value;
+    Vector::push_back(&mut r.cumulative, *&cumu);
 
+    let timestamp = DiemTimestamp::now_seconds();
     Vector::push_back(&mut r.last_payment_timestamp, *×tamp);
-    Vector::swap_remove(&mut r.last_payment_timestamp, i);
-
     Vector::push_back(&mut r.last_payment_value, *&value);
-    Vector::swap_remove(&mut r.last_payment_value, i);
 
-    Vector::push_back(&mut r.cumulative, *&cumu);
-    Vector::swap_remove(&mut r.cumulative, i);
+    if (found_it) { // put in same index if the account was already there.
+      Vector::swap_remove(&mut r.last_payment_timestamp, i);
+      Vector::swap_remove(&mut r.last_payment_value, i);
+      Vector::swap_remove(&mut r.cumulative, i);
+    } else {
+      Vector::push_back(&mut r.destination, destination);
+    };
+
     (timestamp, value, cumu)
 }
 
@@ -157,8 +167,13 @@
public fun read_receipt(account: address, destination: address):(u64, u64, u64) acquires UserReceipts {
+  if (!exists<UserReceipts>(account)) {
+    return (0, 0, 0)
+  };
+
   let r = borrow_global<UserReceipts>(account);
-  let (_, i) = Vector::index_of(&r.destination, &destination);
+  let (found_it, i) = Vector::index_of(&r.destination, &destination);
+  if (!found_it) return (0, 0, 0);
 
   let time = Vector::borrow<u64>(&r.last_payment_timestamp, i);
   let value = Vector::borrow<u64>(&r.last_payment_value, i);
diff --git a/language/diem-framework/releases/artifacts/current/docs/modules/DiemAccount.md b/language/diem-framework/releases/artifacts/current/docs/modules/DiemAccount.md
index eeae67123c..acdc143959 100644
--- a/language/diem-framework/releases/artifacts/current/docs/modules/DiemAccount.md
+++ b/language/diem-framework/releases/artifacts/current/docs/modules/DiemAccount.md
@@ -45,7 +45,6 @@ before and after every transaction.
 -  [Function `preburn`](#0x1_DiemAccount_preburn)
 -  [Function `extract_withdraw_capability`](#0x1_DiemAccount_extract_withdraw_capability)
 -  [Function `restore_withdraw_capability`](#0x1_DiemAccount_restore_withdraw_capability)
--  [Function `vm_make_payment`](#0x1_DiemAccount_vm_make_payment)
 -  [Function `process_community_wallets`](#0x1_DiemAccount_process_community_wallets)
 -  [Function `vm_make_payment_no_limit`](#0x1_DiemAccount_vm_make_payment_no_limit)
 -  [Function `vm_burn_from_balance`](#0x1_DiemAccount_vm_burn_from_balance)
@@ -2694,79 +2693,6 @@ Return the withdraw capability to the account it originally came from
 
 
 
-
-
-
-
-## Function `vm_make_payment`
-
-
-
-
public fun vm_make_payment<Token: store>(payer: address, payee: address, amount: u64, metadata: vector<u8>, metadata_signature: vector<u8>, vm: &signer)
-
- - - -
-Implementation - - -
public fun vm_make_payment<Token: store>(
-    payer : address,
-    payee: address,
-    amount: u64,
-    metadata: vector<u8>,
-    metadata_signature: vector<u8>,
-    vm: &signer
-) acquires DiemAccount , Balance, AccountOperationsCapability, AutopayEscrow, CumulativeDeposits, SlowWallet { //////// 0L ////////
-    if (Signer::address_of(vm) != CoreAddresses::DIEM_ROOT_ADDRESS()) return;
-    if (amount == 0) return;
-
-    // Check payee can receive funds in this currency.
-    if (!exists<Balance<Token>>(payee)) return;
-    // assert(exists<Balance<Token>>(payee), Errors::not_published(EROLE_CANT_STORE_BALANCE));
-
-    // Check there is a payer
-    if (!exists_at(payer)) return;
-    // assert(exists_at(payer), Errors::not_published(EACCOUNT));
-
-    // Check the payer is in possession of withdraw token.
-    if (delegated_withdraw_capability(payer)) return;
-
-    let (max_withdraw, withdrawal_allowed) = AccountLimits::max_withdrawal<Token>(payer);
-    if (!withdrawal_allowed) return;
-
-    // VM can extract the withdraw token.
-    let account = borrow_global_mut<DiemAccount>(payer);
-    let cap = Option::extract(&mut account.withdraw_capability);
-
-    let transfer_now =
-        if (max_withdraw >= amount) {
-            amount
-        } else {
-            max_withdraw
-        };
-    let transfer_later = amount - transfer_now;
-    if (transfer_now > 0) {
-        pay_from<Token>(
-            &cap,
-            payee,
-            transfer_now,
-            metadata,
-            metadata_signature
-        );
-    };
-
-    if (transfer_later > 0) {
-        new_escrow<Token>(vm, payer, payee, transfer_later);
-    };
-
-    restore_withdraw_capability(cap);
-}
-
- - -
@@ -3583,6 +3509,7 @@ Creating an account at address 0x0 will abort as it is a reserved address for th } ); + Receipts::init(&new_account); //////// 0L //////// // NOTE: if all accounts are to be slow set this // set_slow(&new_account); diff --git a/language/diem-framework/releases/artifacts/current/docs/modules/Globals.md b/language/diem-framework/releases/artifacts/current/docs/modules/Globals.md index d675d88989..5363b0c82c 100644 --- a/language/diem-framework/releases/artifacts/current/docs/modules/Globals.md +++ b/language/diem-framework/releases/artifacts/current/docs/modules/Globals.md @@ -338,7 +338,7 @@ Get the constants for the current network if (StagingNet::is_staging_net()) { return GlobalConstants { - epoch_length: 60 * 20, // 20 mins, enough for a hard miner proof. + epoch_length: 60 * 40, // 20 mins, enough for a hard miner proof. max_validators_per_set: 100, subsidy_ceiling_gas: 8640000 * coin_scale, vdf_difficulty: 5000000, diff --git a/language/diem-framework/releases/artifacts/current/docs/modules/Receipts.md b/language/diem-framework/releases/artifacts/current/docs/modules/Receipts.md index 9889f5569a..84f2a156e2 100644 --- a/language/diem-framework/releases/artifacts/current/docs/modules/Receipts.md +++ b/language/diem-framework/releases/artifacts/current/docs/modules/Receipts.md @@ -105,7 +105,7 @@ -
public fun write_receipt(vm: &signer, payer: address, destination: address, value: u64): (u64, u64, u64)
+
public fun write_receipt(sender: &signer, payer: address, destination: address, value: u64): (u64, u64, u64)
 
@@ -114,25 +114,35 @@ Implementation -
public fun write_receipt(vm: &signer, payer: address, destination: address, value: u64):(u64, u64, u64) acquires UserReceipts {
-    CoreAddresses::assert_vm(vm);
-    // let addr = Signer::address_of(account);
-    let r = borrow_global_mut<UserReceipts>(payer);
-    let (_, i) = Vector::index_of(&r.destination, &destination);
+
public fun write_receipt(sender: &signer, payer: address, destination: address, value: u64):(u64, u64, u64) acquires UserReceipts {
+    // TODO: make a function for user to write own receipt.
+    CoreAddresses::assert_vm(sender);
+    if (!exists<UserReceipts>(payer)) {
+      return (0, 0, 0)
+    };
 
-    let timestamp = DiemTimestamp::now_seconds();
+    let r = borrow_global_mut<UserReceipts>(payer);
+    let (found_it, i) = Vector::index_of(&r.destination, &destination);
 
-    let cumu = *Vector::borrow<u64>(&r.cumulative, i);
+    let cumu = 0;
+    if (found_it) {
+      cumu = *Vector::borrow<u64>(&r.cumulative, i);
+    };
     cumu = cumu + value;
+    Vector::push_back(&mut r.cumulative, *&cumu);
 
+    let timestamp = DiemTimestamp::now_seconds();
     Vector::push_back(&mut r.last_payment_timestamp, *×tamp);
-    Vector::swap_remove(&mut r.last_payment_timestamp, i);
-
     Vector::push_back(&mut r.last_payment_value, *&value);
-    Vector::swap_remove(&mut r.last_payment_value, i);
 
-    Vector::push_back(&mut r.cumulative, *&cumu);
-    Vector::swap_remove(&mut r.cumulative, i);
+    if (found_it) { // put in same index if the account was already there.
+      Vector::swap_remove(&mut r.last_payment_timestamp, i);
+      Vector::swap_remove(&mut r.last_payment_value, i);
+      Vector::swap_remove(&mut r.cumulative, i);
+    } else {
+      Vector::push_back(&mut r.destination, destination);
+    };
+
     (timestamp, value, cumu)
 }
 
@@ -157,8 +167,13 @@
public fun read_receipt(account: address, destination: address):(u64, u64, u64) acquires UserReceipts {
+  if (!exists<UserReceipts>(account)) {
+    return (0, 0, 0)
+  };
+
   let r = borrow_global<UserReceipts>(account);
-  let (_, i) = Vector::index_of(&r.destination, &destination);
+  let (found_it, i) = Vector::index_of(&r.destination, &destination);
+  if (!found_it) return (0, 0, 0);
 
   let time = Vector::borrow<u64>(&r.last_payment_timestamp, i);
   let value = Vector::borrow<u64>(&r.last_payment_value, i);
diff --git a/language/diem-framework/releases/artifacts/current/modules/026_Globals.mv b/language/diem-framework/releases/artifacts/current/modules/026_Globals.mv
index 55b59ba13b8295ddd01b798d23e26685163690db..21a56e28f987997fae074bc660bcde5483c4a5a2 100644
GIT binary patch
delta 19
bcmbQhF@a;lS0?5JPKL?0%qo**n7;r3Jah%&

delta 19
bcmbQhF@a;lS0?5SEDV!vnN=psFn<96Ke7fa

diff --git a/language/diem-framework/releases/artifacts/current/modules/036_Receipts.mv b/language/diem-framework/releases/artifacts/current/modules/036_Receipts.mv
index bc834aad734c0c4bba5e669ea143a8262b8c1a01..3b8d3757c5acd58dff78cace402702d812ae9548 100644
GIT binary patch
delta 440
zcmZXQu}T9$5QcYVc6K+j>*b5Q4oYg2x6JW2Qc^wmX_iJ
z#L}nG2e7x&xkJ+2HuKFt^DqDI7yr1&qL=aMnSXlvE%uN4cD)T?
za07y|Mcx5poek_<2th({i-&FN9Kqsci&)FYXSa2GH@hBPoR4qfWO_fEU5%%c%h}Ts
zCi4R^J7z%4Eoc5u6Hv^aNi3Y33`$HkknjD-0b)x^P6=!Bk#njEqbugo
z7ZD3YFkrHiXgXe?3gkJiu4w`a?3(1oYarA>F7aw9Sz+pNUxy^Fq(TKN)YanKl-Z)R
tomL9kKo*ADDPcwxFdg!+8mNz<(rbw7tAURdjbP~{h&}@K$Iy^Q{{y{aCrkhU

delta 288
zcmXAiyGjH>5Jl_JUDI3L&O@68gT=^1F)|tb5F=sHRS<+(VGu(zV+Vi0+(7XM%uM|T
zjm6x|ayF?t_Y~avl`nsvH`j+#5yYkeQR9F_d
g9l=f>bzdqfd8KHjq~g+)xlypsu+{cVq3Br#XKY@$J)1d<^+oab{21^oaIsq$$8M3@EoOkUM3Md~PLnzt
zkiaxpt&A+4rsz3Jc&?scY)DTswk(BH%NdkbD6*2*Ft#ehxw;mTVddz&5bOCN9Z|>E
zD6+P0I%A{tkdLK7$_>i;x<*CUXB534JE>k5<`mtKS9GJYc~NL+Q$aB{C&1W}1ligI
zGTyA1+Z2-REg(B|FfT5uwCz+!FUcbKiB_d_sZ!ddJnvS~@0p>Pm$fN+xjMDCol`KO
z<)|h&q7{4RL`tOB+)es)#_u8DAeOtA=5V~eL~~7I?w83lS?WGIgR}JgipC!x*hvL{
z(V8YxkE(s`F-5h(#NhRQU9(5Z$i{SC#k;c^48({#LP~Zxc6}^E*NpRsEhe<3!{g+6p}Rca_(CsFnXC
zncGM+i
z>h9>)yZ0sbC9n2+y=%z3mb~j|(mO!QYrN~pJ4oIQl(>o54V1i<*zM$fn!Gz`banYK
zm5)&QRw^H$zB?&-CnYv4->_kpx0o#T6gH*1$ob=HlM8{4~Ue9xx!yDExPH%&J+Iz6}}
z-ALPlW9fr57|d?yat0l;X)eoJT%Hq5G&IvtaA!jf^ofRLH7opxA9Xd~HnS)>_2eue
zhl9q(tT<0FSC{8wjRXUY4U;1_a}Bx1HEd?rHgUjmEx8?Q#I>S+Tv)-infj(3wyDJlPU8Kw&~_XhiP7c7
zDp9T;rekODY^J$-(2=&3mzXWDtA1uoP6U6;w9-V7&DP+G{H(W0Who_U+?2f3jpKM+
z7;={zhcj$RXg0TX35QT~U03eL8gX4A3>#Dyb^!WEV*
zBK|9aSF-J`pZYI&SGu|iM-Q*88eM75jzh85A~_V~b9ofUqFko5AJJWf;xHV;HZZ9x
zPm^WJVaIYzVR9*~B9v;a7!DrE6=RdK+|H_WKV?~n_J!OPC_hUD(PBkVR!j34gugvs
z*E{6I9ap%#OT>$|)n+;oJ5psPtVB=2%E$>vI#r^I&leFA2ttW$%2`?1cqYFXHuOSA
zcF@smO;%?uTOJ}ucevo(6?zfON+`<<`L02S%Nt2uz03W^tW^c$mC}+ZEqOD>ani`5
zJjyvG3+^dQOTt>ziHHbq7tta_6>C2%G}A4?2Zi0a+lcE%#AbfZAw%GucSw+yRo5($
z$id)1Q=x4PwYCn~v9&x>99_FaNy28-vOHoe6wEL5YV$N%4qk1V>i1nGR)rc+-qMld
z3QSjZ07I2p2!hGakZL0I(9p>{Xv7`JavO`skY_HB1}`?Z8Bn~7g1Sg(Cllg2feGUt!CIsIth{BqasQo3C+38s>7AP(Uz_ocIup3QOox#SzS>+
zO14q3@{q>bYts`>gOe6%euR{`0Yzy?Q8O)84hntk)8SyOy%BZ2uf0?=baMJQYWlFM
zctTFIssZQO_AY4rxxE$Ibseop-TaPraCUagM54afkq3RgqmOP2^v)7!cV`z0Vq@nD
zS{FRtdEY1fUi?qL?BIr^0+lePH21so0n+
zf(zZZwE3{aRqNjpL}%6G`xYbcdK$q&n8|a~hzf(yWcawMJcVFj)~p)!G0aQGtWd~&
zRCs}(Zp;Z*dL5JM*MZQ1#|aN6P`9=a)gMPYRk|UZ!qV3kDc4*e{n}#e7&i~mY`I@C
z5+U?3M<;6>qqk@qs0J5hUun%c)tdNS3t5$dE1=+l54@VmCANVebzAfMwI%A8;0E#}
z3R_;Kz(xEdj~Z!o3zMhj)tMOLdN>;@gDovIfG-N|G^W6me8c7~Rur?N;=P2>0YHH5
z*K7SM6w|tqx?P5GCDWhh?DGR1!2!lJqMDd%LJr8;V7a+JC3=yIuD0``%9Ur?4KY(l(I=Khaw#Wz##J}L)vL*x`3F3e2iE)4Qx%j
z9L-)BL-r!Br;CNA=b4O<^P}k!A_=)5hQF7_(Vt~5&Xy-|)~#nN2)eXVlb$4lDTDMX
z^l4RzR}ep#z+ye)<_f5UxvE|2nwy!WIZb<xADCcXV4
zO@=G02-#eX>sxA&Y^~MWN9r^k&1gF2P3hORtft#@n(oj#cSedXuGfrRDP-(UBN@*l
z+0&qzd$lK*G$NTWkh!m*)3#r4J>X&R2b;9erCMlGTfR(3e`tKu^TSBd
zk2Fm`t2fkzkFv~E^-u9|2z?IvF@C3K05js80cL`c_!+EcU^&mj224fO3sA&0_eE_n
z^%4yFK(apvECBBpunB;~ehHWz^JQ(odj;4qkm^^plhwaM3I_dk^h)B=R?OmSFdE?N
zqSs+eZ~Ki7AATEo_dCGDO1%O8cGUWNU>1>o0PLXTo3I;Yoj<}J^d$RdZ7=&5y(Rk=
zSeKCOUtwY&$^K0n&4v#dYoY42SvghxE?fnq;yo>HA3TP!nD1+A)&~$Et5Vl!&A;F}
zj3ND@PK@*K$UyoUibiACQZzt)+Ofv(_f3DDzur&z8_344!7lK(_&b;GTn-(r9Y)7h
zsjJdcewTkO_}77dJ^06AUAce58t`uf|0JYt267V8w*mPi_@4s*cGxmFavDZL7`Y8b
zj>FPBAbkg>(3z+T{wRSRbbpP1??GwAkF5Q0R{#^$SPhVJa
z%nDzr7%+{!2lgJG&R1@Q=~Gn}^%ih?6Nof8$(y5>xCKNhIP%u$72g{^Tyq2Tg$rxj
zWuF8Ni->14wKhCb+W-UMUA0-HPt^`AyD)Hqm`ek0d01xp$=X5H!V
zAhLvmlG5PBHPj}d=|wn(!~VM3sbS=rYO`x{!i_-U+X#ef3L===DojCDfpRP$vcy*-
zY83T=3u+9*`+99N<$cIjxZyM
zzzzRiS6hCuwyt)eAH!!Y<9v5mm#OR>!Ig2fM>5XPHg%~WYOhPtGDT^sE)i4!`se~N
z2s5n}84Pv;?O5Ex4Yq5meK>~0sZ1`vpIbBmgpx#W#Yb{-Kxe}>qL>ec&t@7*`lRAY
zU1~MHqI;MSH5q=8X{wx*X_*o!hN%TXKmeX#LE8(@^ZY5DoH9}FmZ{6!1S(7jQyp>>
zu^@p{5R@}+D0BhET-Q~HaSXez5bB8QMnbrB)OC4WI3gyvx*WA!AreBWf^dbc3Ydy3
z@tER4QV?zhNghX4NjIqi9;YgN$5Zi-eVidUnr$FOEhX>t=`
zd*;d87S@d|EnzVwY#mUA9ajV49od3=4XDu;k~D%c+r}(Bm%ReZ($=9g+Kw6u2XYyh
z3=ifi(Zo%;+UiNEWKtyAGU0$gmz|`Q;*hmHlzk?*5@ntGRL$=n9Z3wcXgc;tWO)GAnFX}at^^x
z8JNv>9r~(9n@J*$31#Xt;foE;;fD?Va3WmW*a<`7^^J3pKHS)V^!3I7#>3LW{OUek
z+>42Bwgp6+Ee57d@^%alXh%Wk`0IsLxvM!^#yI*owd;e9U_Nc1?UB99bvWpIs_AYv
zu12RHY3c(Q4WYShs!!(BeZZ}(U5;T0$pvRvqXnMB?4^+vm&j!mBCc8A0M9PdX--I2
zxU60zklSL(7~;k^fya_0m9?vI*Gy3?5V5EzSSL3+E9_K*U5RU@vP6`y9=%o?-rrnX
zJ~Z2ATQmzBEVqbg!SK!I8qC9o&E-`?vRYP&Ds~*SNHvyhr=6;|Gnjz(mMRol-BRTZ
z$*lB*#~uKYEl|8w!8%&zlB7({7Q*qCV)?)4Kn%ADgbJ*Bk>_7CsaKd_T29Ai#yZKT>
zE7wu>bIdYh!USyba~P9li0BbdUudk%
z47bh8P6bj*hf9ymH9Mk3(rjse5-i9d$T*xfvL)s+PG!vO<_?xsObSl9D3_dzbIS8<
zz#MdK4qQZj_i+A^waw->p>H?fJm@udpdG~04%!W5IJz-zfNlipS-)-V^lbV_J(Fgf
zMprN^F-!3tR23epFe~+}l+~JbI0^7yMLo-#4bhy_de&kV!#n4-?$%aTVAUlMf@a_(
zy1-OGSL(Dff#pVfa7LJQBXp{(H6s-rOH>oHAd%33W?~Dth#NC-R;l3(6RBBY;O7>v
z;wGM&frz%6_UhaE1o#R8KpoS8J3*So%o@ZPqR!#@dH6wKXxt3X_?bqcv=tIcDv}NsTY~mTftH8fThS%(s;paR2{{YLd0YLx&

diff --git a/language/diem-framework/staged/stdlib.mv b/language/diem-framework/staged/stdlib.mv
index 041a63a7ef8d5c6a66d3f7b07eb02e83c1460d38..ff6737c256a58d1d39c647d9957f9aa1c6bfa979 100644
GIT binary patch
delta 4239
zcmZ`+33OD|8NUC!Z<}}DyqOy&lRYz;5W;4F7r+O&D-Z`|Q6f;PRcC(8|kEkeuve@sw5Uc03Gw1&AfB*ge
z`~UY}?tkCTUz&e-)}$2=klU@Ti;X6_hZYWhXx>rzrqCE;4lze_bncy|=RKokt@SV+
zAp0(!bROaf_Zj}N7Ju5b!AMMFoaxNKi-|X8X`C@z=Zwce&^@w<=?O;wCfztOlHk&d
z5(LQ!WHUXLlpvV8ZOk)G+qNCsHpz6EW*8hl-E@JNhGRHf{Y{q)a$=0QX>tIYncNKb
ztV!1xc(rWCH4D=*7Ckt{n0--UeW9y=XfRwivugO_u8qBg0rQf=`k}r7w|THIFg~q!
z^>z)p1BLd^wt+E;`1XOWp~Co^=qwBlb@jFnb@lZ+>$mo7-2!x5A-$!2aHy@neMe8B
zcc^Wss|Q@|J^e+WA#ZEnvbEr}cXkdB6b1*|`ZgGap8la7c2{p_;R?K(9eo1>ecNO0
zgD_~QZCg*Qf9v3;wvP7o-J-W|d)r`PeP3_qpcvfV-Vb|v`nDC|{7?VjPCW)EIb1m0
zb-0R!QROhnU2MVF*)b!Vp~OMeG!2R0>8KqC+g^J4^{0
zMru7L6xU#m?K9IxN8DK1_85#k>nAk2{R?E
zl+aR|rWAO~1e4a7b(SF_CBvFSQ<-Zy7L;gEg7}-`8+CQZ;AKVzliV=_o&rI4cp48Y
z!96_{=rZQocp=Zm3lPoY3NiSp#$rW>81e`72oVsA5Mv2Y)-=E$qt|WyPUk5yUK8K5t=*}=X6-j;C(Wa?zsj93_$7&#<~iF_~YM~4zr0UmpW`~l8Z3a+W?dCHbqqw##D@HI*8NN%qco!
z4Hz@PF%x4JW_^rCo!K_Hv|*hkt|gt!WYQ#-N}=M4igcFy2PK%47OSm=^lV0?vpE_E
z9G>VCN-(
zEmYVdm2`_0Xi6%<^Cis97b$ef1O+cpa~JBvDaMxS#f&XWLuxtB3ARFki+BlR7e|J!
zEX7Q-QglgV>#7L0DEUhjxU8(4vDForUsDOtTBVj>TdiPQM!|Ji1>0*B?8qs&UM<+32wTiMK1h5za>X>DG!LrX
z4^2_Tty2};2C49Y=O$4}_~LW*^XA(Pq?g=IdO72F5OR;z+)48}9^a+~2$y#knI=o$
zO{Z~I`5grl-z6-O4ud~V98TY>@Y4Gf)b1z!ES7kH^mE8eKNty&D(E~E3H(q&<3|Le
z(ho=UM--*%$FyiMOFydc^2aESr5{(b)%VdEpga*3;!h$Yej3q#MsR!jNyT56iC(Io
zqAUcSraWiaXQG9Eu3-8XN+5IV{!F6s*9Z#Jo~5O5m)#UL>@ORx_zg9{Bi?V-;;G+J
zvrjDddqR+F{y?j6#KfK>IKud&TB~M1@k@zSKCkSld;ySj?ZxPk#MBx%>?K+o;PEGF
zQ&KOhjnXTT9j_ANk@_?F?Zk@zqDZm7(p+vb_iv~Rmi#+yfQbDXZGt_ye=4oHh%NUo
zGB>l_zo~l*%e}5t=fV&6UpQQG2t1^V571#)>M(7GgnNW`z_btPDq>|HsWq(rYY<%H
zsItU7Mgd6WDn+Y~({L{&GL%
zU&Jn+w{jl&m-)jTXLp=Sjnf`y|Q&Lwcbjtw@~Z#boOmjd>f@YmUnc_@>jBz#$IB3$@m(rUgUqB
z{FU8%sr%f2Fiz59+=_gRli!mvKE&YgbyF6qov_12WkLM99UMeBCvxP+Ja+dk#6c&
zV+q+DcD|hzmk8$Raur5PxaaMv;TDH^hP>1>9ER@W;;9G1GXxV{u13StWIY-h!gZ!E
z*T`1sy5x~ui_wO$4x>f3qW@W%yujtICNy3mc)h2%+R^{ws$GJ^%32e^VYs@``4nM9
z9Dx^3d#9>oz0xi_VMoijX5fXe?VYlwHJI5TH>w^A-I}MN8E*14Xx6~3$u6{If{h-o
zM~oZh!i+<5GxV5_2?5KqWH&}j_~bjeiCY{~OQ_3OqNx;YE;^#}MvwA_r$>A44b6A!
zit?^>r0g>%?N{cVO*Kpj6Ts$qo*cwz@jM~qkmp5AnA+-jPF&a`CLFm9t85nu;mUg9
z2}{<)6;~ik@gOM}`f<{UW2vNa(Vccq&YNq&y7Nx(uoX{H>iv9jRh?=J#x{{89MysJ5yEXrDr+=cs;
z%LV72(Ca}gwFsJTdq1}-?{aw)sStZSsLonR*?%%!B+^CRjMKH)$RZ6|YofWa4P?~@
zxSWu+q*@#I8e#KkBBqFAYbOrXIlB=nb#2iq!*z$M!!?I4pf85^9Lk`Yes!p}cny+j
zE!^N}Ih<~>%NE1a`H((HQEQm~V6-Xl@E)aLNI6TF%_j}>R30D_7=CtIhoL|k3U<-SlJo@#(`gu(@fm(s5A=EHNSIehZ)Ou%;!*8!Fs31~yO{K)Kz
zyh`EIa19mOT&9#a1ct>XS~0}sTSt^@|8?a2+*Jsfh|C-yADdYh|#hEyv0iG`ktMT2$i_pr$jJ7BQq$lu&Msn-y7p
zFNvCZ4PBDVZE24s-PFlIKXt4DS^3VfVmQ`3J_GpN9oCs&-Cxo>pCLts8CmH~EoR~qo!+TDwpta%A6L)`d7}WpwFnI9e$?*Tn
zA5SXUOl~}?!vtWIaQi0EWz@W{mvYp)Z!a}09r#}*Z+pXW8PrQie_1WI;q8%!u*l1
za+*G@ItPkNK1TTAm}=C{Lu?W##Id4^PHh=z3zc>*STdn40>-$xi{{9y6d@H^A9?R&
zPYCQ@g{AtYF9UsFHsxvYGs`r
z6!Vx-i6Ufj{D3=MeHE#LWaK8DGOPMeYH7e8p-A9EI;hZ^l*y)b6E%7$L6owk&X+_A
z>0Cs?`KW0)uEwhP$cKqGaOA=YaO8;DNG*NA~r&s5lGDe~nZBv0aMI$0>ZLT7|r8BC`T
zn~oSpiRvRTA~aFHPR~4sF_(x$gFIIZEA7079(Vi7AItG
z4(@6v$TUyEe3nbd)q$N0@^IxErKlsYwKKqlYWpGuUHJusEH1?LC1Vhl7Afi7#R_{$
z6!sy`JL$0_2LN`+h1u5DEexA|iaRX~p4ekk48Xw`t0^JCBo2)hqF
z9Z2FQa5=-*PhpY)#Q7N*28rJflNl-exx&aV042r!1Bc26;}5BM?g54LVbHE3kw-wA
z21fkRV8gIN{_9}FuN3N!0a_9NO+bHKQN}z0%`GJUq?+eH1z{BZv|24X09PXAw}4S~
zo(YWjT|j>pF#7S|EB^B0;78GOkiZSk!z@Nh{tyWIqr&(LYD2Mq!y6@og@4AwA@n7f
zi~d_J?HCbj2$&990fz!HkS4XcsYdI#2`CCT@czU2FAOY#FS){*3g(7&D}KT=AQ{^rATpum6b
zaQSDyp=01b2~_fz8}bb
z(0>3+THJfV?dabR{Z0Na-z?7aft0wz;64V%li*gnRj6r2tGi?P$eU-FG)>E}q6{CP
z!F2eYBLap`94#`Vk!>4#H!Sa8)qCCYn>MW4xFz$*m5@I?{=-#ri=ofo`sp3e=8t?j!EUpG!z7Y0nVjP{94m)*f9bI#;`U?hxmSCZ
z7jme_jRfYDdXlD)o&I~r5@MksjwTo3=<*|<6%BTA;^=a*qjSPF&&Idr2}c)1Fu7Fd
zg32`Im=a`)E4yWnv~6%e_TpHEV>ym4*)t9ep>mGRY$_3ki$%v
z*~p>Wn$Xxdp$8nE<3IRWQSP-$yIh5KbY0Yp^VR;*&+=Qlab=BMYctMJH+85Wvd^KY
znIbop>jV{mHab8w!gOOs2?yDadJJyh2GcR+dK_K;RmW3h8@NFuKq!f{<{_JHYv{cu
zH>zkjTHy00|IXv(S#7rDQeJ0N98^_Fh#c^rIX*Ujz>eE75o2jVK@boCBA!zxP+j0jzBb|N7!f>RKbGp
zB)Gf*rQ9eY!j@^_2vepp6gS~9g}taC-0-42jH04WRC+v2rGMaa8UFg0T=|Co%I5``
z5+8k@LKG)T^jpx@?f!%l@mQyA+J-Qg6sB?@&kW0Uf6pxH*0*wXn@*@>`%z)nQ(7!i>s+(F~E8A@bYUHsg8%PrfG^tX;4<
zp=@R{J7G$>6YP*}35!*UkRs-pfR5}vRnB+g@o?%z=@|d$sbc@FQ?p^OKlXG97EtTy
z(s(!KQ7?MVX$t#PXmJY$x6QxxbPRg@q0_^y4G#VevhKT(GBw#bYM7%^mte}7Xf}~U
z7_M;ylfQj5pv7pM?5QM?Fvg6@d;Gi4RQk`JnF~98XQUoF{iczLh+9X>5$_%GV2%I!
zNL^u@%Gb$6)lUOaYHK{*BxPm@&!F_I3gf!7GgG&4)QpC=acY)&wO$>aFq-y#)0O?Q
z$3J-XM{JGk8Q%6q8=x!CpR2BKvs3Z`;6~ESMK^@a1!tIcX`aGx#gP=Yk;?K!Sdkut
zgqeem03!C5i)UAvKApH9Hq8A&0F|
z)Lf)`Q!s~2ZdC*upFN*j&}kRi1)_lM0L4>?Nmy^jO3f1V_mlGl&^aVPVFeUJ`_K%i
z$!#C)F1G3}BpQjyY%@#l9J&i8AjeZM7P;R=+?lZfs-QjN0ZfFpOdmiBU%LUO!rsiQ
z05$mf0&v&vOf`l1(3=^gaQ~%}PhTqeOanuC0~x#+E3*2*4hQuXK@74ilVPwJIy1)@
zD(K0?HK>`p`KzkiICG(i5WCDN^Je^?VmC%Ea+N#f$c43^*{4AoZpu8V!2*Vdn~CT!
zw5iS3xsDYP)UPleP~TDbVAgSLGJ*-sGwC!G#v+&#yTL3Q7fCQvw*~PMS&{09?qKht
z)79-xv12+rU=<2P`6*`TA)y1-Dt@J1c#;`uz-5D;ZQB;N6KtYh8*q?HLm3cZUI`wp
z#c2hbqEF-04(UzYvO^+d3r@MnwK*4;q?NIDy~8m%a1j{V!uglh7VFD|x`%*suT@`x
zda&!&(k38XL2thUny{sg8fq#lM?*$RnXQLua3iuTJsWQ#~
z;w+?(!RA5arCxjmUR0XufSFDcN)`@`%C1on3Z*e-%qyfbm7#{Y3u+zgAlQRU)fll(
z9)c|J+OQd6t6^h}Zwqpu5OIXW@4#Gwy;C
f)e println!("create block dir failed: {:?}", why),
diff --git a/ol/cli/src/node/node.rs b/ol/cli/src/node/node.rs
index f35cb0680a..f74c20b228 100644
--- a/ol/cli/src/node/node.rs
+++ b/ol/cli/src/node/node.rs
@@ -186,7 +186,7 @@ impl Node {
         // check to see no files are present
         let home_path = self.app_conf.workspace.node_home.clone();
 
-        let c_exist = home_path.join("blocks/block_0.json").exists()
+        let c_exist = home_path.join("vdf_proofs/proof_0.json").exists()
             && home_path.join("validator.node.yaml").exists()
             && home_path.join("key_store.json").exists();
         c_exist
diff --git a/ol/documentation/create_accounts/create_account_on_chain.md b/ol/documentation/create_accounts/create_account_on_chain.md
index 7b693fbf5d..ed6682fa14 100644
--- a/ol/documentation/create_accounts/create_account_on_chain.md
+++ b/ol/documentation/create_accounts/create_account_on_chain.md
@@ -34,7 +34,7 @@ onboard user
 
 If Alice already has a 0th proof mined for whatever reason:
 ```
-onboard user --block-zero 
+onboard user --block-zero 
 ```
 
 IF you want the account.json to go into a different directory
diff --git a/ol/documentation/devs/archived/genesis_v4_registration.md b/ol/documentation/devs/archived/genesis_v4_registration.md
index 05aaac13fb..9f85856f34 100644
--- a/ol/documentation/devs/archived/genesis_v4_registration.md
+++ b/ol/documentation/devs/archived/genesis_v4_registration.md
@@ -54,7 +54,7 @@ This step will:
 - generate keys
 - save your Github Token to ~/.0L/github_token.txt
 - generate miner configs `~/.0L/miner.toml`
-- mine the block_0.json
+- mine the proof_0.json
 
 ```
 make ceremony
diff --git a/ol/documentation/devs/provision_debug_net.md b/ol/documentation/devs/provision_debug_net.md
index 26087ebfa3..9849663a75 100644
--- a/ol/documentation/devs/provision_debug_net.md
+++ b/ol/documentation/devs/provision_debug_net.md
@@ -44,9 +44,9 @@ For the debug-net simulation, the github token (when saved to `github_token.txt`
 - Edit the my_configs/Makefile with REPO_OWNER as your username, REPO_NAME, the desired name for the repo.
 
 # Generate Keys
-(For Testing: skip this step, this file block_0.json will be set later)
+(For Testing: skip this step, this file proof_0.json will be set later)
 
-The 0L tower creates your initial keys, and produces a first proof of your vdf tower. You need both the proof output (block_0.json) and the mnemonic to be able to participate in genesis.
+The 0L tower creates your initial keys, and produces a first proof of your vdf tower. You need both the proof output (proof_0.json) and the mnemonic to be able to participate in genesis.
 
 4. Create keys with `make keygen`. A mnemonic will be printed on the screen.
 
@@ -64,7 +64,7 @@ Check the data in `../.OL/OL.toml`. This includes a `statement` field which is a
 
 This is the proof-of-work which is submitted for inclusion in genesis. 
 
-This step can take 10-30 minutes to complete. The output is a block_0.json. This file should be copied to your my_config/ folder. Confirm it is there.
+This step can take 10-30 minutes to complete. The output is a proof_0.json. This file should be copied to your my_config/ folder. Confirm it is there.
 
 
 # Register to genesis
diff --git a/ol/documentation/genesis/genesis_registration.md b/ol/documentation/genesis/genesis_registration.md
index 08c1cfa2e3..16b9d7b61d 100644
--- a/ol/documentation/genesis/genesis_registration.md
+++ b/ol/documentation/genesis/genesis_registration.md
@@ -12,15 +12,30 @@ You will need a few files in place before starting with genesis registration.
 
 - .0L/github_token.txt: the Github authentication token (required). [Link](https://docs.github.com/en/free-pro-team@latest/github/authenticating-to-github/creating-a-personal-access-token)
 
-- .0L/blocks/block_0.json of a prexisting Delay tower (optional - you can always use the tools to start a new tower)
+- .0L/vdf_proofs/proof_0.json of a prexisting Delay tower (optional - you can always use the tools to start a new tower)
 - .0L/autopay_batch.json: autopay instructions to include in registration profile (optional)
 
-If you don't already have a mnemonic and block_0.json, see instructions to generate are below.
+If you don't already have a mnemonic and proof_0.json, see instructions to generate are below.
 
 Then using the makefile helpers you can register as such:
 
 ```
-GITHUB_USER= make ceremony register
+# fork the genesis registration repo
+GITHUB_USER= make gen-fork-repo
+
+# mine your first proof, and config your environment.
+GITHUB_USER= make gen-onboard
+
+# submit registration information to your branch
+GITHUB_USER= make gen-register
+
+# make a pull request to main branch.
+GITHUB_USER= make gen-make-pull
+```
+
+If you hit any errors in your config, but don't want to mine a new proof do this:
+```
+GITHUB_USER= make gen-reset
 ```
 
 ## infrastructure
@@ -35,7 +50,7 @@ Tools are provided to a) fork the GENESIS_REPO b) write registration info ro CAN
 
 The GENESIS_REPO coordinator then has the task of manually approving all PRs.
 
-# Warning - Don't lose your Tower
+# Warning - Don't lose your old Tower
 
 If you have a Delay Tower on a node: you should back up the proofs. You will want these for your identity on a new chain.
 
@@ -43,6 +58,20 @@ If you have a Delay Tower on a node: you should back up the proofs. You will wan
 tar -zcvf my-tower.tar.gz ~/.0L/blocks/
 ```
 
+# Start from a clean slate.
+You'll want a fresh ~/.0L/ folder with only your old blocks .0L/blocks, github_token.txt, and your autopay_batch.json.
+
+Make sure you have all your ~/.0L/files backed up.
+
+In your ~/.0L/ folder you will want to see:
+- /blocks/ (your legacy proofs from another tower)
+- /github_token.txt
+- /autopay_batch.json
+
+There is a DANGEROUS helper that will do this for you. 1) backup the files, 2) wipe ~/.0L and 3) sync your blocks and token back. You will be prompted for Y/N at each step.
+```
+make danger-delete-all
+```
 # Registration
 
 Have these things ready:
@@ -50,13 +79,6 @@ Have these things ready:
 - A fun statement
 - The static IP address of your node.
 
-Assuming you have a github token, and the binaries installed, you should be able to complete registration with three steps:
-```
-onboard keygen // creates new keys, and initializes a miner.toml file
-make register // registers your data to a shared github repo
-
-```
-
 ## 0. Generate Github Token
 
 NOTE: Check if you already have one from previous testnet genesis in `~/node_data/github.txt`
@@ -107,17 +129,31 @@ IMMEDIATELY SAVE YOUR MNEMONIC TO A PASSWORD MANAGER
 
 
 ## 3. Initialize configs for node.
+
+There's a specific "onboarding" flow for genesis.
+
 This creates the files your validator needs to run 0L tools. By default files will be created in `$HOME/.0L/`.
 
 The following script does several steps:
+- Mine's the first proof. Expect this to take 30mins.
 - OL app configs: defaults to `$HOME/.0L/0L.toml` 
 - keys init: creating credentials and configs
 - fork: on github this forks the GENESIS_REPO into the CANDIDATE_REPO
 
 ```
-GITHUB_USER= make ceremony
+GITHUB_USER= make gen-onboard
+
+# the equivalent command is:
+cargo run -p onboard --release -- val --genesis-ceremony
+```
+
+If you hit any errors in your config, but don't want to mine a new proof do this:
 ```
+GITHUB_USER= make gen-reset
 
+# the equivalent command is:
+cargo run -p onboard --release -- val --genesis-ceremony --skip-mining
+```
 ## 4. Pause and check your work ##
 Check all your configs are correct before registering is correct: `make check`. 
 
@@ -136,18 +172,11 @@ test mode:
 
 If the data looks incorrect, you can doublecheck `$HOME/.0L/0L.toml`, and you may optionally edit those.
 
-## 5. Mine your first proof (or bring first proof from elsewhere)
 
-NOTE: if you already have a puzzle tower, and you are porting it to this chain. Then skip this step, and simply copy the block_0.json into your data path (e.g. ~/.0L/blocks/block_0.json).
+### (Optional) link your previous tower to new genesis proof.
 
-```
-make genesis-miner
-```
-This will mine the 0th proof of your tower, which is needed for genesis.
-
-### (Optional) bring previous tower proof 0
-
-If you are using a mnemonic and have previously generated a tower, then you can simply add the block_0.json to .0L/blocks/. You will eventually want to include all proofs of a previous tower.
+The onboard tool will scan the files in `.0L/vdf_proofs/` for legacy files with the format block_x.json. 
+It will then take the highest block, and hash the proof of it. And interactively, the onboard tool will ask if you want to include that information in your genesis block.
 ## 6. Register for genesis
 
 The following script does several steps:
@@ -155,10 +184,18 @@ The following script does several steps:
 - pull: submitting a pull request from CANDIDATE_REPO to GENESIS_REPO
 
 ```
-GITHUB_USER= make register
+GITHUB_USER= make gen-register
+```
+
+## 6. Submit the pull request of the changes
+
+Until now all the changes were made on your fork of the genesis registration. To make a pull request to the main branch do:
+
+```
+GITHUB_USER= make gen-make-pull
 ```
 
-After this step check your data at `http://github.com/0LSF/experimental-genesis`
+After this step check your data at `http://github.com/0LSF/genesis-registration`
 
 #### Troubleshooting:
 
diff --git a/ol/documentation/genesis/mining_after_genesis.md b/ol/documentation/genesis/mining_after_genesis.md
index 3cf2e6c885..05630a8c65 100644
--- a/ol/documentation/genesis/mining_after_genesis.md
+++ b/ol/documentation/genesis/mining_after_genesis.md
@@ -1,6 +1,6 @@
 # Resume building tower after genesis
 
-1. copy any existing `block_x.json` files into `~/.0L/blocks/``
+1. copy any existing `block_x.json` files into `~/.0L/vdf_proofs/``
 2. build binaries
 ```
 cd libra/
diff --git a/ol/documentation/ops/mining_VDF_proofs.md b/ol/documentation/ops/mining_VDF_proofs.md
index ddeec91063..b572ef3157 100644
--- a/ol/documentation/ops/mining_VDF_proofs.md
+++ b/ol/documentation/ops/mining_VDF_proofs.md
@@ -15,12 +15,12 @@ note: you may want to use separated `screen` instances for different purposes li
 ## Confirm the files
 
 you should have 
-- ~/.0L/blocks/block_0.json
+- ~/.0L/vdf_proofs/proof_0.json
 - ~/.0L/key_store.json
 - ~/.0L/0L.toml
 
 ## Resume the tower app
-The tower app will resume mining. The first block to be created is block_1.json (which takes as the preimage input the sha256 hash of block_0.json)
+The tower app will resume mining. The first block to be created is proof_1.json (which takes as the preimage input the sha256 hash of proof_0.json)
 
 From your `screen` instance you can run:
 ```
@@ -28,7 +28,7 @@ From your `screen` instance you can run:
 export NODE_ENV=prod
 tower -o start
 ```
-WARNING: If you don't set node env as above, you will generate a "test" proof, which takes 1 second. If that happened, delete ~/.0L/blocks/block_1.json (and any other blocks made, but NOT block_0.json) and resume with `NODE_ENV=prod`.
+WARNING: If you don't set node env as above, you will generate a "test" proof, which takes 1 second. If that happened, delete ~/.0L/vdf_proofs/proof_1.json (and any other blocks made, but NOT proof_0.json) and resume with `NODE_ENV=prod`.
 
 ## Check mining state
 
diff --git a/ol/fixtures/Makefile b/ol/fixtures/Makefile
index 570a68c379..09a13a47b9 100644
--- a/ol/fixtures/Makefile
+++ b/ol/fixtures/Makefile
@@ -23,10 +23,10 @@ echo:
 
 fixtures-test: 
 	cd ../onboard && NODE_ENV=test TEST=y cargo r -p onboard -- val --genesis-ceremony --autopay-file ${SOURCE}/ol/fixtures/autopay/${PERSONA}.autopay_batch.json
-	cp ${DATA_PATH}/blocks/block_0.json ${SOURCE}/ol/fixtures/blocks/test/${PERSONA}/
+	cp ${DATA_PATH}/vdf_proofs/proof_0.json ${SOURCE}/ol/fixtures/vdf_proofs/test/${PERSONA}/
 	cp ${DATA_PATH}/account.json ${SOURCE}/ol/fixtures/account/${PERSONA}.account.json
 
 fixtures-prod: 
 	cd ../onboard && NODE_ENV=prod TEST=y cargo r -p onboard -- val --genesis-ceremony --autopay-file ${SOURCE}/ol/fixtures/autopay/${PERSONA}.autopay_batch.json
-	cp ${DATA_PATH}/blocks/block_0.json ${SOURCE}/ol/fixtures/blocks/prod/${PERSONA}/
-	cp ${DATA_PATH}/blocks/block_0.json ${SOURCE}/ol/fixtures/blocks/stage/${PERSONA}/
\ No newline at end of file
+	cp ${DATA_PATH}/vdf_proofs/proof_0.json ${SOURCE}/ol/fixtures/vdf_proofs/prod/${PERSONA}/
+	cp ${DATA_PATH}/vdf_proofs/proof_0.json ${SOURCE}/ol/fixtures/vdf_proofs/stage/${PERSONA}/
\ No newline at end of file
diff --git a/ol/fixtures/blocks/prod/alice/block_0.json b/ol/fixtures/vdf_proofs/prod/alice/proof_0.json
similarity index 100%
rename from ol/fixtures/blocks/prod/alice/block_0.json
rename to ol/fixtures/vdf_proofs/prod/alice/proof_0.json
diff --git a/ol/fixtures/blocks/prod/alice/block_1.json b/ol/fixtures/vdf_proofs/prod/alice/proof_1.json
similarity index 100%
rename from ol/fixtures/blocks/prod/alice/block_1.json
rename to ol/fixtures/vdf_proofs/prod/alice/proof_1.json
diff --git a/ol/fixtures/blocks/stage/alice/block_0.json b/ol/fixtures/vdf_proofs/stage/alice/proof_0.json
similarity index 100%
rename from ol/fixtures/blocks/stage/alice/block_0.json
rename to ol/fixtures/vdf_proofs/stage/alice/proof_0.json
diff --git a/ol/fixtures/blocks/test/alice/block_0.json b/ol/fixtures/vdf_proofs/test/alice/proof_0.json
similarity index 100%
rename from ol/fixtures/blocks/test/alice/block_0.json
rename to ol/fixtures/vdf_proofs/test/alice/proof_0.json
diff --git a/ol/fixtures/blocks/test/alice/block_1.json b/ol/fixtures/vdf_proofs/test/alice/proof_1.json
similarity index 100%
rename from ol/fixtures/blocks/test/alice/block_1.json
rename to ol/fixtures/vdf_proofs/test/alice/proof_1.json
diff --git a/ol/fixtures/blocks/test/bob/block_0.json b/ol/fixtures/vdf_proofs/test/bob/proof_0.json
similarity index 100%
rename from ol/fixtures/blocks/test/bob/block_0.json
rename to ol/fixtures/vdf_proofs/test/bob/proof_0.json
diff --git a/ol/fixtures/blocks/test/carol/block_0.json b/ol/fixtures/vdf_proofs/test/carol/proof_0.json
similarity index 100%
rename from ol/fixtures/blocks/test/carol/block_0.json
rename to ol/fixtures/vdf_proofs/test/carol/proof_0.json
diff --git a/ol/fixtures/blocks/test/dave/block_0.json b/ol/fixtures/vdf_proofs/test/dave/proof_0.json
similarity index 100%
rename from ol/fixtures/blocks/test/dave/block_0.json
rename to ol/fixtures/vdf_proofs/test/dave/proof_0.json
diff --git a/ol/fixtures/blocks/test/eve/block_1.json b/ol/fixtures/vdf_proofs/test/eve/block_1.json
similarity index 100%
rename from ol/fixtures/blocks/test/eve/block_1.json
rename to ol/fixtures/vdf_proofs/test/eve/block_1.json
diff --git a/ol/fixtures/blocks/test/eve/block_0.json b/ol/fixtures/vdf_proofs/test/eve/proof_0.json
similarity index 100%
rename from ol/fixtures/blocks/test/eve/block_0.json
rename to ol/fixtures/vdf_proofs/test/eve/proof_0.json
diff --git a/ol/keys/.gitignore b/ol/keys/.gitignore
index 4b494951fa..2605805cbd 100644
--- a/ol/keys/.gitignore
+++ b/ol/keys/.gitignore
@@ -39,6 +39,6 @@ rls*.log
 target/
 
 #Miner
-/blocks/
-/test_blocks/
+/vdf_proofs/
+/test_vdf_proofs/
 
diff --git a/ol/onboard/.gitignore b/ol/onboard/.gitignore
index 4b494951fa..2605805cbd 100644
--- a/ol/onboard/.gitignore
+++ b/ol/onboard/.gitignore
@@ -39,6 +39,6 @@ rls*.log
 target/
 
 #Miner
-/blocks/
-/test_blocks/
+/vdf_proofs/
+/test_vdf_proofs/
 
diff --git a/ol/onboard/src/commands/wizard_fork_cmd.rs b/ol/onboard/src/commands/wizard_fork_cmd.rs
index f656e18805..f1c6727a9f 100644
--- a/ol/onboard/src/commands/wizard_fork_cmd.rs
+++ b/ol/onboard/src/commands/wizard_fork_cmd.rs
@@ -14,7 +14,7 @@ use diem_types::waypoint::Waypoint;
 use diem_wallet::WalletLibrary;
 use ol::{commands::init_cmd, config::AppCfg};
 use ol_keys::{scheme::KeyScheme, wallet};
-use ol_types::block::Block;
+use ol_types::block::VDFProof;
 use ol_types::config::IS_TEST;
 use ol_types::{account::ValConfigs, config::TxType, pay_instruction::PayInstruction};
 use reqwest::Url;
@@ -171,8 +171,8 @@ impl Runnable for ForkCmd {
         status_ok!("\nNode config written", "\n...........................\n");
 
         if !self.skip_mining {
-            // Mine Block
-            tower::block::write_genesis(&app_config);
+            // Mine Proof
+            tower::proof::write_genesis(&app_config);
             status_ok!(
                 "\nGenesis proof complete",
                 "\n...........................\n"
@@ -268,7 +268,7 @@ pub fn write_account_json(
     let cfg = wizard_config.unwrap_or(app_config().clone());
     let json_path = json_path.clone().unwrap_or(cfg.workspace.node_home.clone());
     let keys = KeyScheme::new(&wallet);
-    let block = Block::parse_block_file(cfg.get_block_dir().join("block_0.json").to_owned());
+    let block = VDFProof::parse_block_file(cfg.get_block_dir().join("proof_0.json").to_owned());
 
     ValConfigs::new(
         block,
diff --git a/ol/onboard/src/commands/wizard_user_cmd.rs b/ol/onboard/src/commands/wizard_user_cmd.rs
index b3f60a3288..c4714cf388 100644
--- a/ol/onboard/src/commands/wizard_user_cmd.rs
+++ b/ol/onboard/src/commands/wizard_user_cmd.rs
@@ -4,8 +4,8 @@
 
 use diem_global_constants::{VDF_SECURITY_PARAM, delay_difficulty};
 use ol_keys::wallet;
-use ol_types::block::Block;
-use tower::{block::write_genesis, delay};
+use ol_types::block::VDFProof;
+use tower::{proof::write_genesis, delay};
 use ol_types::config::AppCfg;
 use abscissa_core::{Command, Options, Runnable};
 use std::{path::PathBuf};
@@ -17,7 +17,7 @@ pub struct UserWizardCmd {
     output_dir: Option,
     #[options(help = "File to check")]
     check_file: Option,
-    #[options(help = "use an existing block_0.json file and skip mining")]
+    #[options(help = "use an existing proof_0.json file and skip mining")]
     block_zero: Option,
 }
 
@@ -47,7 +47,7 @@ fn wizard(path: PathBuf, block_zero: &Option) {
     // Create block zero, if there isn't one.
     let block;
     if let Some(block_path) = block_zero {
-        block = Block::parse_block_file(block_path.to_owned());
+        block = VDFProof::parse_block_file(block_path.to_owned());
     } else {
         block = write_genesis(&app_cfg);
     }
diff --git a/ol/onboard/src/commands/wizard_val_cmd.rs b/ol/onboard/src/commands/wizard_val_cmd.rs
index 81a42566c3..2e1f7cf130 100644
--- a/ol/onboard/src/commands/wizard_val_cmd.rs
+++ b/ol/onboard/src/commands/wizard_val_cmd.rs
@@ -11,7 +11,7 @@ use diem_types::{transaction::SignedTransaction, waypoint::Waypoint};
 use diem_wallet::WalletLibrary;
 use ol::{commands::init_cmd, config::AppCfg};
 use ol_keys::{scheme::KeyScheme, wallet};
-use ol_types::block::Block;
+use ol_types::block::VDFProof;
 use ol_types::config::IS_TEST;
 use ol_types::fixtures;
 use ol_types::{account::ValConfigs, config::TxType, pay_instruction::PayInstruction};
@@ -185,8 +185,8 @@ impl Runnable for ValWizardCmd {
         }
 
         if !self.skip_mining {
-            // Mine Block
-            tower::block::write_genesis(&app_config);
+            // Mine Proof
+            tower::proof::write_genesis(&app_config);
             status_ok!(
                 "\nGenesis proof complete",
                 "\n...........................\n"
@@ -205,14 +205,18 @@ impl Runnable for ValWizardCmd {
             "\nAccount manifest written",
             "\n...........................\n"
         );
+      
 
         status_info!(
-            "Your validator node and miner app are now configured.", 
-            &format!(
+            "Success", "Your validator node and miner app are now configured.\n");
+
+
+        if !self.genesis_ceremony {
+            println!(
                 "\nStart your node with `ol start`, and then ask someone with GAS to do this transaction `txs create-validator -u http://{}`",
                 &app_config.profile.ip
-            )
-        );
+            );
+        }
     }
 }
 
@@ -292,7 +296,7 @@ pub fn write_account_json(
     let cfg = wizard_config.unwrap_or(app_config().clone());
     let json_path = json_path.clone().unwrap_or(cfg.workspace.node_home.clone());
     let keys = KeyScheme::new(&wallet);
-    let block = Block::parse_block_file(cfg.get_block_dir().join("block_0.json").to_owned());
+    let block = VDFProof::parse_block_file(cfg.get_block_dir().join("proof_0.json").to_owned());
 
     ValConfigs::new(
         block,
diff --git a/ol/onboard/src/home.rs b/ol/onboard/src/home.depr
similarity index 100%
rename from ol/onboard/src/home.rs
rename to ol/onboard/src/home.depr
diff --git a/ol/onboard/src/lib.rs b/ol/onboard/src/lib.rs
index 24d28ca140..1986298982 100644
--- a/ol/onboard/src/lib.rs
+++ b/ol/onboard/src/lib.rs
@@ -22,5 +22,5 @@ pub mod error;
 pub mod prelude;
 pub mod entrypoint;
 pub mod manifest;
-pub mod home;
+// pub mod home;
 pub mod read_genesis;
\ No newline at end of file
diff --git a/ol/onboard/src/manifest.rs b/ol/onboard/src/manifest.rs
index 52310fa536..ae2d215d0b 100644
--- a/ol/onboard/src/manifest.rs
+++ b/ol/onboard/src/manifest.rs
@@ -2,7 +2,7 @@
 
 #![allow(clippy::never_loop)]
 
-use ol_types::{block::Block, config::AppCfg};
+use ol_types::{block::VDFProof, config::AppCfg};
 use ol_keys::scheme::KeyScheme;
 
 use diem_types::transaction::SignedTransaction;
@@ -28,7 +28,7 @@ pub fn write_manifest(
     );
 
     let keys = KeyScheme::new(&wallet);
-    let block = Block::parse_block_file(cfg.get_block_dir().join("block_0.json").to_owned());
+    let block = VDFProof::parse_block_file(cfg.get_block_dir().join("proof_0.json").to_owned());
 
     ValConfigs::new(
         block,
diff --git a/ol/tower/.gitignore b/ol/tower/.gitignore
index 4b494951fa..2605805cbd 100644
--- a/ol/tower/.gitignore
+++ b/ol/tower/.gitignore
@@ -39,6 +39,6 @@ rls*.log
 target/
 
 #Miner
-/blocks/
-/test_blocks/
+/vdf_proofs/
+/test_vdf_proofs/
 
diff --git a/ol/tower/src/backlog.rs b/ol/tower/src/backlog.rs
index 7f0e6fb4bb..02a93e0585 100644
--- a/ol/tower/src/backlog.rs
+++ b/ol/tower/src/backlog.rs
@@ -2,13 +2,13 @@
 #![forbid(unsafe_code)]
 
 use cli::{diem_client::DiemClient};
-use ol_types::block::Block;
+use ol_types::block::VDFProof;
 use txs::submit_tx::{TxParams, eval_tx_status};
 use std::{fs::File, path::PathBuf, thread, time};
 use ol_types::config::AppCfg;
 use crate::commit_proof::commit_proof_tx;
 use std::io::BufReader;
-use crate::block::parse_block_height;
+use crate::proof::{parse_block_height, FILENAME};
 use anyhow::{bail, Result, Error};
 use diem_json_rpc_types::views::{TowerStateResourceView};
 
@@ -34,12 +34,12 @@ pub fn process_backlog(
             let mut i = remote_height + 1;
             while i <= current_block_number {
                 let path = PathBuf::from(
-                    format!("{}/block_{}.json", blocks_dir.display(), i)
+                    format!("{}/{}_{}.json", blocks_dir.display(), FILENAME, i)
                 );
                 println!("submitting proof {}", i);
                 let file = File::open(&path)?;
                 let reader = BufReader::new(file);
-                let block: Block = serde_json::from_reader(reader)?;
+                let block: VDFProof = serde_json::from_reader(reader)?;
                 let view = commit_proof_tx(
                     &tx_params, block, is_operator
                 )?;
diff --git a/ol/tower/src/commands/start_cmd.rs b/ol/tower/src/commands/start_cmd.rs
index a63c3765d7..aa7a2c402e 100644
--- a/ol/tower/src/commands/start_cmd.rs
+++ b/ol/tower/src/commands/start_cmd.rs
@@ -1,12 +1,12 @@
 //! `start`
 
-use ol_types::config::AppCfg;
-use crate::{backlog, block::*, entrypoint};
+use crate::{backlog, entrypoint, proof::*};
 use crate::{entrypoint::EntryPointTxsCmd, prelude::*};
 use abscissa_core::{config, Command, FrameworkError, Options, Runnable};
+use ol_types::config::AppCfg;
 use ol_types::config::TxType;
-use txs::submit_tx::tx_params;
 use std::process::exit;
+use txs::submit_tx::tx_params;
 
 /// `start` subcommand
 #[derive(Command, Default, Debug, Options)]
@@ -35,7 +35,7 @@ impl Runnable for StartCmd {
             use_upstream_url,
             ..
         } = entrypoint::get_args();
-        
+
         // config reading respects swarm setup
         // so also cfg.get_waypoint will return correct data
         let cfg = app_config().clone();
@@ -48,7 +48,9 @@ impl Runnable for StartCmd {
                     exit(-1);
                 }
             }
-        } else { waypoint };
+        } else {
+            waypoint
+        };
 
         let tx_params = tx_params(
             cfg.clone(),
@@ -60,7 +62,8 @@ impl Runnable for StartCmd {
             is_operator,
             use_upstream_url,
             None,
-        ).expect("could not get tx parameters");
+        )
+        .expect("could not get tx parameters");
 
         // Check for, and submit backlog proofs.
         if !self.skip_backlog {
@@ -83,7 +86,8 @@ impl Runnable for StartCmd {
                 Err(err) => {
                     println!("ERROR: miner failed, message: {:?}", err);
                     // exit on unrecoverable error.
-                    exit(1);                }
+                    exit(1);
+                }
             }
         }
     }
diff --git a/ol/tower/src/commands/zero_cmd.rs b/ol/tower/src/commands/zero_cmd.rs
index 70ca13e316..9eaf10b095 100644
--- a/ol/tower/src/commands/zero_cmd.rs
+++ b/ol/tower/src/commands/zero_cmd.rs
@@ -1,6 +1,6 @@
 //! `start` subcommand - example of how to write a subcommand
 
-use crate::{application::app_config, block::write_genesis};
+use crate::{application::app_config, proof::write_genesis};
 /// App-local prelude includes `app_reader()`/`app_writer()`/`app_config()`
 /// accessors along with logging macros. Customize as you see fit.
 use abscissa_core::{Command, Options, Runnable};
diff --git a/ol/tower/src/commit_proof.rs b/ol/tower/src/commit_proof.rs
index 9fcfd2be96..815bde86d4 100644
--- a/ol/tower/src/commit_proof.rs
+++ b/ol/tower/src/commit_proof.rs
@@ -2,7 +2,7 @@
 #![forbid(unsafe_code)]
 use anyhow::Error;
 use cli::{diem_client::DiemClient, AccountData, AccountStatus};
-use ol_types::block::Block;
+use ol_types::block::VDFProof;
 use txs::{sign_tx::sign_tx, submit_tx::{TxParams, submit_tx}};
 use diem_json_rpc_types::views::{TransactionView};
 use diem_transaction_builder::stdlib as transaction_builder;
@@ -10,7 +10,7 @@ use diem_transaction_builder::stdlib as transaction_builder;
 /// Submit a miner transaction to the network.
 pub fn commit_proof_tx(
     tx_params: &TxParams,
-    block: Block,
+    block: VDFProof,
     is_operator: bool,
 ) -> Result {
 
diff --git a/ol/tower/src/lib.rs b/ol/tower/src/lib.rs
index 8b206dd988..0660aba6df 100644
--- a/ol/tower/src/lib.rs
+++ b/ol/tower/src/lib.rs
@@ -21,7 +21,8 @@ pub mod commands;
 pub mod entrypoint;
 pub mod error;
 
-pub mod block;
+pub mod proof;
 pub mod delay;
 pub mod backlog;
-pub mod commit_proof;
\ No newline at end of file
+pub mod commit_proof;
+pub mod preimage;
\ No newline at end of file
diff --git a/ol/tower/src/preimage.rs b/ol/tower/src/preimage.rs
new file mode 100644
index 0000000000..271556e115
--- /dev/null
+++ b/ol/tower/src/preimage.rs
@@ -0,0 +1,106 @@
+//! genesis preimage formatting.
+
+use byteorder::{LittleEndian, WriteBytesExt};
+use diem_global_constants::{VDF_SECURITY_PARAM, delay_difficulty};
+use hex::decode;
+use ol::config::AppCfg;
+
+
+/// Format the config file data into a fixed byte structure for easy parsing in Move/other languages
+pub fn genesis_preimage(cfg: &AppCfg) -> Vec {
+    const AUTH_KEY_BYTES: usize = 32;
+    const CHAIN_ID_BYTES: usize = 16;
+    const DIFFICULTY_BYTES: usize = 8;
+    const SECURITY_BYTES: usize = 8;
+    const PIETRZAK: usize = 1; // PIETRZAK = 1, WESOLOWSKI = 2
+    const LINK_TO_TOWER: usize = 64; // optional, hash of the last proof of an existing tower.
+    const STATEMENT_BYTES: usize = 895; // remainder
+    
+
+    let mut preimage: Vec = vec![];
+
+    // AUTH_KEY_BYTES
+    let mut padded_key_bytes = match decode(cfg.profile.auth_key.clone().to_string()) {
+        Err(x) => panic!("Invalid 0L Auth Key: {}", x),
+        Ok(key_bytes) => padding(key_bytes, AUTH_KEY_BYTES)
+    };
+
+    preimage.append(&mut padded_key_bytes);
+
+    // CHAIN_ID_BYTES
+    let mut padded_chain_id_bytes = padding(cfg.chain_info.chain_id.as_bytes().to_vec(), CHAIN_ID_BYTES);
+
+    preimage.append(&mut padded_chain_id_bytes);
+
+    // DIFFICULTY_BYTES
+    preimage
+        .write_u64::(delay_difficulty())
+        .unwrap();
+    
+    // SECURITY_BYTES
+    preimage
+        .write_u64::(VDF_SECURITY_PARAM.into())
+        .unwrap();
+    
+    // PIETRZAK
+    preimage
+        .write_u8(1)
+        .unwrap();
+
+    // LINK_TO_TOWER
+    let mut padded_tower_link_bytes = padding(cfg.profile.tower_link.clone().unwrap_or("".to_string()).into_bytes(), LINK_TO_TOWER);
+    preimage.append(&mut padded_tower_link_bytes);
+
+    // STATEMENT
+    let mut padded_statements_bytes = padding(cfg.profile.statement.clone().into_bytes(), STATEMENT_BYTES);
+    preimage.append(&mut padded_statements_bytes);
+
+    assert_eq!(
+        preimage.len(),
+        (
+            AUTH_KEY_BYTES // 0L Auth_Key
+            + CHAIN_ID_BYTES // chain_id
+            + DIFFICULTY_BYTES // iterations/difficulty
+            + SECURITY_BYTES
+            + PIETRZAK
+            + LINK_TO_TOWER
+            + STATEMENT_BYTES
+            // = 1024
+        ),
+        "Preimage is the incorrect byte length"
+    );
+
+    assert_eq!(
+        preimage.len(),
+        1024,
+        "Preimage is the incorrect byte length"
+    );
+    
+    return preimage;
+}
+
+fn padding(mut statement_bytes: Vec, limit: usize) -> Vec {
+    match statement_bytes.len() {
+        d if d > limit => panic!(
+            "Message is longer than {} bytes. Got {} bytes",
+            limit,
+            statement_bytes.len()
+        ),
+        d if d < limit => {
+            let padding_length = limit - statement_bytes.len() as usize;
+            let mut padding_bytes: Vec = vec![0; padding_length];
+            padding_bytes.append(&mut statement_bytes);
+            padding_bytes
+        }
+        d if d == limit => statement_bytes,
+        _ => unreachable!(),
+    }
+}
+
+// #[test]
+
+// fn test() {
+//     let word = padding("hello".as_bytes().to_vec(), 100);
+//     asset!(word.len(), )
+//     dbg!(&word);
+// }
\ No newline at end of file
diff --git a/ol/tower/src/block.rs b/ol/tower/src/proof.rs
similarity index 62%
rename from ol/tower/src/block.rs
rename to ol/tower/src/proof.rs
index 751e053d25..2ca7d57844 100644
--- a/ol/tower/src/block.rs
+++ b/ol/tower/src/proof.rs
@@ -1,27 +1,24 @@
 //! Proof block datastructure
 
-use diem_global_constants::{VDF_SECURITY_PARAM, delay_difficulty};
-use ol_types::config::AppCfg;
-use crate::{
-    delay::*,
-    backlog,
-};
-use anyhow::{Error, bail};
-use byteorder::{LittleEndian, WriteBytesExt};
-use glob::glob;
-use hex::decode;
+use crate::{backlog, delay::*, preimage::genesis_preimage};
+use anyhow::{bail, Error};
 use diem_crypto::hash::HashValue;
-use ol_types::block::Block;
-use txs::submit_tx::TxParams;
+use diem_global_constants::{delay_difficulty, VDF_SECURITY_PARAM};
+use glob::glob;
+use ol_types::block::VDFProof;
+use ol_types::config::AppCfg;
 use std::{
     fs,
     io::{BufReader, Write},
     path::PathBuf,
     time::Instant,
 };
+use txs::submit_tx::TxParams;
+
+pub const FILENAME: &str = "proof";
 
 // writes a JSON file with the first vdf proof
-fn mine_genesis(config: &AppCfg, difficulty: u64, security: u16) -> Block {
+fn mine_genesis(config: &AppCfg, difficulty: u64, security: u16) -> VDFProof {
     println!("Mining Genesis Proof");
     let preimage = genesis_preimage(&config);
     let now = Instant::now();
@@ -29,7 +26,7 @@ fn mine_genesis(config: &AppCfg, difficulty: u64, security: u16) -> Block {
     let proof = do_delay(&preimage, difficulty, security).unwrap(); // Todo: make mine_genesis return a result.
     let elapsed_secs = now.elapsed().as_secs();
     println!("Delay: {:?} seconds", elapsed_secs);
-    let block = Block {
+    let block = VDFProof {
         height: 0u64,
         elapsed_secs,
         preimage,
@@ -42,57 +39,46 @@ fn mine_genesis(config: &AppCfg, difficulty: u64, security: u16) -> Block {
 }
 
 /// Mines genesis and writes the file
-pub fn write_genesis(config: &AppCfg) -> Block {
+pub fn write_genesis(config: &AppCfg) -> VDFProof {
     let difficulty = delay_difficulty();
     let security = VDF_SECURITY_PARAM;
     let block = mine_genesis(config, difficulty, security);
     //TODO: check for overwriting file...
     write_json(&block, &config.get_block_dir());
+    let genesis_proof_filename = &format!("{}_0.json", FILENAME);
     println!(
-        "block zero proof mined, file saved to: {:?}",
-        &config.get_block_dir().join("block_0.json")
+        "proof zero mined, file saved to: {:?}",
+        &config.get_block_dir().join(genesis_proof_filename)
     );
     block
 }
 /// Mine one block
-pub fn mine_once(config: &AppCfg) -> Result {
-    let (_current_block_number, current_block_path) = parse_block_height(&config.get_block_dir());
+pub fn mine_once(config: &AppCfg) -> Result {
     // If there are files in path, continue mining.
-    if let Some(max_block_path) = current_block_path {
-        // current_block_path is Option type, check if destructures to Some.
-        let block_file =
-            fs::read_to_string(max_block_path).expect("Could not read latest block file in path");
-
-        let latest_block: Block =
-            serde_json::from_str(&block_file).expect("could not deserialize latest block");
-
-        let preimage = HashValue::sha3_256_of(&latest_block.proof).to_vec();
-        // Otherwise this is the first time the app is run, and it needs a genesis preimage, which comes from configs.
-        let height = latest_block.height + 1;
-        // TODO: cleanup this duplication with mine_genesis_once?
-        let difficulty = delay_difficulty();
-        let security = VDF_SECURITY_PARAM;
-
-        let now = Instant::now();
-        let data = do_delay(&preimage, difficulty, security)?;
-        let elapsed_secs = now.elapsed().as_secs();
-        println!("Delay: {:?} seconds", elapsed_secs);
-
-        let block = Block {
-            height,
-            elapsed_secs,
-            preimage,
-            proof: data.clone(),
-            difficulty: Some(difficulty),
-            security: Some(security),
-        };
-
-        write_json(&block, &config.get_block_dir());
-        Ok(block)
-    // Err(ErrorKind::Io.context(format!("submit_vdf_proof_tx_to_network {:?}", block_dir)).into())
-    } else {
-        bail!(format!("No files found in {:?}", &config.get_block_dir()));
-    }
+    let latest_block = get_latest_proof(config)?;
+    let preimage = HashValue::sha3_256_of(&latest_block.proof).to_vec();
+    // Otherwise this is the first time the app is run, and it needs a genesis preimage, which comes from configs.
+    let height = latest_block.height + 1;
+    // TODO: cleanup this duplication with mine_genesis_once?
+    let difficulty = delay_difficulty();
+    let security = VDF_SECURITY_PARAM;
+
+    let now = Instant::now();
+    let data = do_delay(&preimage, difficulty, security)?;
+    let elapsed_secs = now.elapsed().as_secs();
+    println!("Delay: {:?} seconds", elapsed_secs);
+
+    let block = VDFProof {
+        height,
+        elapsed_secs,
+        preimage,
+        proof: data.clone(),
+        difficulty: Some(difficulty),
+        security: Some(security),
+    };
+
+    write_json(&block, &config.get_block_dir());
+    Ok(block)
 }
 
 /// Write block to file
@@ -108,7 +94,7 @@ pub fn mine_and_submit(
 
     // If there are NO files in path, mine the genesis proof.
     if current_block_number.is_none() {
-        bail!("ERROR: Genesis block_0.json not found.");
+        bail!("ERROR: Genesis proof_0.json not found.");
     } else {
         // the max block that has been succesfully submitted to client
         let mut mining_height = current_block_number.unwrap() + 1;
@@ -116,10 +102,13 @@ pub fn mine_and_submit(
 
         // mine continuously from the last block in the file systems
         loop {
-            println!("Mining VDF Proof: Block {}", mining_height);
+            println!("Mining VDF Proof # {}", mining_height);
 
             let block = mine_once(&config)?;
-            println!("Proof mined: block_{}.json created.", block.height.to_string());
+            println!(
+                "Proof mined: proof_{}.json created.",
+                block.height.to_string()
+            );
 
             // submits backlog to client
             match backlog::process_backlog(&config, &tx_params, is_operator) {
@@ -135,7 +124,7 @@ pub fn mine_and_submit(
     }
 }
 
-fn write_json(block: &Block, blocks_dir: &PathBuf) {
+fn write_json(block: &VDFProof, blocks_dir: &PathBuf) {
     if !&blocks_dir.exists() {
         // first run, create the directory if there is none, or if the user changed the configs.
         // note: user may have blocks but they are in a different directory than what miner.toml says.
@@ -143,7 +132,8 @@ fn write_json(block: &Block, blocks_dir: &PathBuf) {
     };
     // Write the file.
     let mut latest_block_path = blocks_dir.clone();
-    latest_block_path.push(format!("block_{}.json", block.height));
+
+    latest_block_path.push(format!("{}_{}.json", FILENAME, block.height));
     //println!("{:?}", &latest_block_path);
     let mut file = fs::File::create(&latest_block_path).unwrap();
     file.write_all(serde_json::to_string(&block).unwrap().as_bytes())
@@ -156,13 +146,13 @@ pub fn parse_block_height(blocks_dir: &PathBuf) -> (Option, Option
     let mut max_block_path = None;
 
     // iterate through all json files in the directory.
-    for entry in glob(&format!("{}/block_*.json", blocks_dir.display()))
+    for entry in glob(&format!("{}/{}_*.json", blocks_dir.display(), FILENAME))
         .expect("Failed to read glob pattern")
     {
         if let Ok(entry) = entry {
             let file = fs::File::open(&entry).expect("Could not open block file");
             let reader = BufReader::new(file);
-            let block: Block = serde_json::from_reader(reader).unwrap();
+            let block: VDFProof = serde_json::from_reader(reader).unwrap();
             let blocknumber = block.height;
             if max_block.is_none() {
                 max_block = Some(blocknumber);
@@ -178,12 +168,28 @@ pub fn parse_block_height(blocks_dir: &PathBuf) -> (Option, Option
     (max_block, max_block_path)
 }
 
-/// Parse a block_x.json file and return a Block
-pub fn parse_block_file(path: PathBuf) -> Block {
-    let file =
-        fs::File::open(&path).expect(&format!("Could not open block file: {:?}", path.to_str()));
-    let reader = BufReader::new(file);
-    serde_json::from_reader(reader).unwrap()
+/// Parse a proof_x.json file and return a VDFProof
+pub fn parse_block_file(path: &PathBuf) -> Result {
+    let block_file = fs::read_to_string(path).expect("Could not read latest block file in path");
+
+    match serde_json::from_str(&block_file) {
+        Ok(v) => Ok(v),
+        Err(e) => bail!(e),
+    }
+}
+
+/// find the most recent proof on disk
+pub fn get_latest_proof(config: &AppCfg) -> Result {
+    let (_current_block_number, current_block_path) = parse_block_height(&config.get_block_dir());
+
+    match current_block_path {
+        // current_block_path is Option type, check if destructures to Some.
+        Some(p) => parse_block_file(&p),
+        None => bail!(format!(
+            "ERROR: cannot find a block in directory, path: {:?}",
+            &config.get_block_dir()
+        )),
+    }
 }
 
 /* ////////////// */
@@ -232,7 +238,7 @@ fn create_fixtures() {
 
         // create miner.toml
         //rename the path for actual fixtures
-        configs_fixture.workspace.block_dir = "blocks".to_string();
+        configs_fixture.workspace.block_dir = "vdf_proofs".to_string();
         let toml = toml::to_string(&configs_fixture).unwrap();
         let mut toml_path = PathBuf::from(save_to);
         toml_path.push("miner.toml");
@@ -245,6 +251,7 @@ fn create_fixtures() {
 
 #[test]
 fn test_mine_once() {
+    use hex::decode;
     // if no file is found, the block height is 0
     let mut configs_fixture = test_make_configs_fixture();
     configs_fixture.workspace.block_dir = "test_blocks_temp_2".to_owned();
@@ -254,9 +261,8 @@ fn test_mine_once() {
 
     let fixture_previous_proof = decode("0016f43606b957ab9d93046cdffa73a1e6be4f21f3848eb7b55b81756f7d31919affef388c0d92ca7d68232de4fea46884186c23ef1d6c86f63f5c586000048bce05").unwrap();
 
-    let fixture_block = Block {
-        /// Block Height
-        height: 0u64,
+    let fixture_block = VDFProof {
+        height: 0u64, // Tower height
         elapsed_secs: 0u64,
         preimage: Vec::new(),
         proof: fixture_previous_proof,
@@ -267,9 +273,9 @@ fn test_mine_once() {
     write_json(&fixture_block, &configs_fixture.get_block_dir());
     mine_once(&configs_fixture).unwrap();
     // confirm this file was written to disk.
-    let block_file = fs::read_to_string("./test_blocks_temp_2/block_1.json")
+    let block_file = fs::read_to_string("./test_blocks_temp_2/proof_1.json")
         .expect("Could not read latest block");
-    let latest_block: Block =
+    let latest_block: VDFProof =
         serde_json::from_str(&block_file).expect("could not deserialize latest block");
     // Test the file is read, and blockheight is 0
     assert_eq!(latest_block.height, 1, "Not the droid you are looking for.");
@@ -290,7 +296,6 @@ fn test_mine_genesis() {
     // if no file is found, the block height is 0
     //let blocks_dir = Path::new("./test_blocks");
     let configs_fixture = test_make_configs_fixture();
-    dbg!(&configs_fixture);
 
     //clear from sideffects.
     test_helper_clear_block_dir(&configs_fixture.get_block_dir());
@@ -299,17 +304,17 @@ fn test_mine_genesis() {
     write_genesis(&configs_fixture);
     // read file
     let block_file =
-        // TODO: make this work: let latest_block_path = &configs_fixture.chain_info.block_dir.to_string().push(format!("block_0.json"));
-        fs::read_to_string("./test_blocks_temp_1/block_0.json").expect("Could not read latest block");
+        // TODO: make this work: let latest_block_path = &configs_fixture.chain_info.block_dir.to_string().push(format!("proof_0.json"));
+        fs::read_to_string("./test_blocks_temp_1/proof_0.json").expect("Could not read latest block");
 
-    let latest_block: Block =
+    let latest_block: VDFProof =
         serde_json::from_str(&block_file).expect("could not deserialize latest block");
 
     // Test the file is read, and blockheight is 0
     assert_eq!(latest_block.height, 0, "test");
 
     // Test the expected proof is writtent to file correctly.
-    let correct_proof = "0016f43606b957ab9d93046cdffa73a1e6be4f21f3848eb7b55b81756f7d31919affef388c0d92ca7d68232de4fea46884186c23ef1d6c86f63f5c586000048bce05";
+    let correct_proof = "00292f460bffb29e5d3a7fe5fe7a560104b83a48a236a819812b33e5a3ef2ec266fff30541a78101b4e266358c965ac65830f955842c6dee22b103b50f4709a51655";
     assert_eq!(hex::encode(&latest_block.proof), correct_proof, "test");
 
     test_helper_clear_block_dir(&configs_fixture.get_block_dir());
@@ -326,7 +331,7 @@ fn test_parse_no_files() {
 fn test_parse_one_file() {
     // create a file temporarily in ./test_blocks with height 33
     let current_block_number = 33;
-    let block = Block {
+    let block = VDFProof {
         height: current_block_number,
         elapsed_secs: 0u64,
         preimage: Vec::new(),
@@ -342,7 +347,7 @@ fn test_parse_one_file() {
 
     fs::create_dir(&blocks_dir).unwrap();
     let mut latest_block_path = blocks_dir.clone();
-    latest_block_path.push(format!("block_{}.json", current_block_number));
+    latest_block_path.push(format!("proof_{}.json", current_block_number));
     let mut file = fs::File::create(&latest_block_path).unwrap();
     file.write_all(serde_json::to_string(&block).unwrap().as_bytes())
         .expect("Could not write block");
@@ -359,92 +364,8 @@ pub fn test_make_configs_fixture() -> AppCfg {
     cfg.workspace.node_home = PathBuf::from(".");
     cfg.workspace.block_dir = "test_blocks_temp_1".to_owned();
     cfg.chain_info.chain_id = "0L testnet".to_owned();
-    cfg.profile.auth_key =
-        "3e4629ba1e63114b59a161e89ad4a083b3a31b5fd59e39757c493e96398e4df2".parse().unwrap();
-    cfg
-}
-
-/// Format the config file data into a fixed byte structure for easy parsing in Move/other languages
-pub fn genesis_preimage(cfg: &AppCfg) -> Vec {
-    const AUTH_KEY_BYTES: usize = 32;
-    const CHAIN_ID_BYTES: usize = 64;
-    const STATEMENT_BYTES: usize = 1008;
-
-    let mut preimage: Vec = vec![];
-
-    let mut padded_key_bytes = match decode(cfg.profile.auth_key.clone().to_string()) {
-        Err(x) => panic!("Invalid 0L Auth Key: {}", x),
-        Ok(key_bytes) => {
-            if key_bytes.len() != AUTH_KEY_BYTES {
-                panic!(
-                    "Expected a {} byte 0L Auth Key. Got {} bytes",
-                    AUTH_KEY_BYTES,
-                    key_bytes.len()
-                );
-            }
-            key_bytes
-        }
-    };
-
-    preimage.append(&mut padded_key_bytes);
-
-    let mut padded_chain_id_bytes = {
-        let mut chain_id_bytes = cfg.chain_info.chain_id.clone().into_bytes();
-
-        match chain_id_bytes.len() {
-            d if d > CHAIN_ID_BYTES => panic!(
-                "Chain Id is longer than {} bytes. Got {} bytes",
-                CHAIN_ID_BYTES,
-                chain_id_bytes.len()
-            ),
-            d if d < CHAIN_ID_BYTES => {
-                let padding_length = CHAIN_ID_BYTES - chain_id_bytes.len() as usize;
-                let mut padding_bytes: Vec = vec![0; padding_length];
-                padding_bytes.append(&mut chain_id_bytes);
-                padding_bytes
-            }
-            d if d == CHAIN_ID_BYTES => chain_id_bytes,
-            _ => unreachable!(),
-        }
-    };
-
-    preimage.append(&mut padded_chain_id_bytes);
-
-    preimage
-        .write_u64::(delay_difficulty())
+    cfg.profile.auth_key = "3e4629ba1e63114b59a161e89ad4a083b3a31b5fd59e39757c493e96398e4df2"
+        .parse()
         .unwrap();
-
-    let mut padded_statements_bytes = {
-        let mut statement_bytes = cfg.profile.statement.clone().into_bytes();
-
-        match statement_bytes.len() {
-            d if d > STATEMENT_BYTES => panic!(
-                "Chain Id is longer than 1008 bytes. Got {} bytes",
-                statement_bytes.len()
-            ),
-            d if d < STATEMENT_BYTES => {
-                let padding_length = STATEMENT_BYTES - statement_bytes.len() as usize;
-                let mut padding_bytes: Vec = vec![0; padding_length];
-                padding_bytes.append(&mut statement_bytes);
-                padding_bytes
-            }
-            d if d == STATEMENT_BYTES => statement_bytes,
-            _ => unreachable!(),
-        }
-    };
-
-    preimage.append(&mut padded_statements_bytes);
-
-    assert_eq!(
-        preimage.len(),
-        (
-            AUTH_KEY_BYTES // 0L Auth_Key
-            + CHAIN_ID_BYTES // chain_id
-            + 8 // iterations/difficulty
-            + STATEMENT_BYTES
-            // statement
-        ),
-        "Preimage is the incorrect byte length"
-    );
-    return preimage;
+    cfg
 }
diff --git a/ol/tower/tests/config.rs b/ol/tower/tests/config.rs
index 56cc4b252f..62b5b4a418 100644
--- a/ol/tower/tests/config.rs
+++ b/ol/tower/tests/config.rs
@@ -2,18 +2,15 @@
 
 #![forbid(unsafe_code)]
 
-
-
-
 #[test]
 fn test_genesis_preimage() {
-    let configs = tower::block::test_make_configs_fixture();
+    let configs = tower::proof::test_make_configs_fixture();
 
-    let first_preimage = tower::block::genesis_preimage(&configs);
+    let first_preimage = tower::preimage::genesis_preimage(&configs);
     let encoded = hex::encode(&first_preimage);
     println!("proof:\n{:?}", encoded);
 
-    let correct_preimage = "3e4629ba1e63114b59a161e89ad4a083b3a31b5fd59e39757c493e96398e4df2000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000304c20746573746e65746400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000050726f74657374732072616765206163726f737320746865206e6174696f6e".to_owned();
+    let correct_preimage = "3e4629ba1e63114b59a161e89ad4a083b3a31b5fd59e39757c493e96398e4df2000000000000304c20746573746e657464000000000000000002000000000000010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000050726f74657374732072616765206163726f737320746865206e6174696f6e".to_owned();
 
     assert_eq!(encoded, correct_preimage, "Preimages do not match")
 }
diff --git a/ol/tower/tests/integration-simple.rs b/ol/tower/tests/integration-simple.rs
index ca6e584fbc..164207827c 100644
--- a/ol/tower/tests/integration-simple.rs
+++ b/ol/tower/tests/integration-simple.rs
@@ -10,10 +10,10 @@ pub fn integration() {
     // PREPARE FIXTURES
     // the transactions will always abort if the fixtures are incorrect.
     // in swarm, all validators in genesis used NodeConfig.defaul() preimage and proofs.
-    // these are equivalent to fixtures/block_0.json.test.alice 
+    // these are equivalent to fixtures/proof_0.json.test.alice 
     // for the test to work:
 
-    // the miner needs to start producing block_1.json. If block_1.json is not 
+    // the miner needs to start producing proof_1.json. If proof_1.json is not 
     // successful, then block_2 cannot be either, because it depends on certain 
     // on-chain state from block_1 correct submission.
     let miner_source_path = Path::new(env!("CARGO_MANIFEST_DIR"));
@@ -24,7 +24,7 @@ pub fn integration() {
     fs::remove_dir_all(&swarm_configs_path).unwrap();
 
     let node_exec = &root_source_path.join("target/debug/diem-node");
-    // TODO: Assert that block_0.json is in blocks folder.
+    // TODO: Assert that proof_0.json is in blocks folder.
     std::env::set_var("RUST_LOG", "debug");
     let mut swarm_cmd = Command::new("cargo");
     swarm_cmd.current_dir(&root_source_path.as_os_str());
@@ -67,13 +67,6 @@ pub fn integration() {
                     .spawn()
                     .unwrap();
             init_child.wait().unwrap();
-            // // copy fixtures
-            // fs::create_dir_all(swarm_configs_path.join("blocks")).unwrap();
-            // // copy fixtures/block_0.json.test.alice -> blocks/block_0.json
-            // fs::copy(
-            //   root_source_path.join("ol/fixtures/blocks/test/alice/block_0.json"), 
-            //   swarm_configs_path.join("blocks/block_0.json")
-            // ).unwrap();
 
             // start the miner swarm test helper.
             let mut miner_cmd = Command::new("cargo");
diff --git a/ol/tower/tests/integration-submit-tx.rs b/ol/tower/tests/integration-submit-tx.rs
index e0c8acc158..3318940b3b 100644
--- a/ol/tower/tests/integration-submit-tx.rs
+++ b/ol/tower/tests/integration-submit-tx.rs
@@ -14,7 +14,7 @@ use anyhow::{bail, Error};
 /// the port in between and testing the connectivity. 
 pub fn integration_submit_tx() {
     
-    // the miner needs to start producing block_1.json. If block_1.json is not successful, then block_2 cannot be either, because it depends on certain on-chain state from block_1 correct submission.
+    // the miner needs to start producing proof_1.json. If proof_1.json is not successful, then block_2 cannot be either, because it depends on certain on-chain state from block_1 correct submission.
     let miner_source_path = Path::new(env!("CARGO_MANIFEST_DIR"));
     let root_source_path = miner_source_path.parent().unwrap().parent().unwrap();
     let home = dirs::home_dir().unwrap();
@@ -26,7 +26,7 @@ pub fn integration_submit_tx() {
     }
 
     let node_exec = &root_source_path.join("target/debug/diem-node");
-    // TODO: Assert that block_0.json is in blocks folder.
+    // TODO: Assert that proof_0.json is in blocks folder.
     std::env::set_var("RUST_LOG", "debug");
     let mut swarm_cmd = Command::new("cargo");
     swarm_cmd.current_dir(&root_source_path.as_os_str());
@@ -101,7 +101,7 @@ pub fn integration_submit_tx() {
             blocks_dir.push(&config.workspace.block_dir);
 
 
-            let (current_block_number, _current_block_path) = tower::block::parse_block_height(&blocks_dir);
+            let (current_block_number, _current_block_path) = tower::proof::parse_block_height(&blocks_dir);
             let block_number_before_block = current_block_number.unwrap();
             
 
@@ -134,7 +134,7 @@ pub fn integration_submit_tx() {
             let test_timeout = Duration::from_secs(240); // To let the timeout happen and continue mining. 
             thread::sleep(test_timeout);
             
-            let (current_block_number, _current_block_path) = tower::block::parse_block_height(&blocks_dir);
+            let (current_block_number, _current_block_path) = tower::proof::parse_block_height(&blocks_dir);
             let block_number_after_unblock = current_block_number.unwrap();
             
             // Miner should have continued mining. +1 to consider atleast 2 blocks mined. 
@@ -210,7 +210,7 @@ fn check_node_sync(tx_params: &TxParams, config: &AppCfg) -> Result<(), Error> {
 
     let mut blocks_dir = config.workspace.node_home.clone();
     blocks_dir.push(&config.workspace.block_dir);
-    let (current_block_number, _current_block_path) = tower::block::parse_block_height(&blocks_dir);
+    let (current_block_number, _current_block_path) = tower::proof::parse_block_height(&blocks_dir);
     let current_block_number = current_block_number.unwrap();
     println!("Local tower height: {}", current_block_number);
 
diff --git a/ol/types/Cargo.toml b/ol/types/Cargo.toml
index 997bf19fb7..65ac5d4502 100644
--- a/ol/types/Cargo.toml
+++ b/ol/types/Cargo.toml
@@ -27,4 +27,5 @@ once_cell = "1.7.2"
 ol-keys = { path = "../keys" }
 move-core-types = { path = "../../language/move-core/types" }
 sha2 = "0.9.1"
-num-format = "0.4.0"
\ No newline at end of file
+num-format = "0.4.0"
+glob = "0.3"
diff --git a/ol/types/src/account.rs b/ol/types/src/account.rs
index da0f53f71d..695d4ee959 100644
--- a/ol/types/src/account.rs
+++ b/ol/types/src/account.rs
@@ -1,5 +1,5 @@
 //! Formatters for libra account creation
-use crate::{block::Block, config::IS_TEST};
+use crate::{block::VDFProof, config::IS_TEST};
 use dialoguer::Confirm;
 use diem_crypto::x25519::PublicKey;
 use diem_types::{
@@ -23,8 +23,8 @@ use std::{fs::File, io::Write, path::PathBuf, process::exit};
 #[derive(Serialize, Deserialize, Debug, Clone)]
 /// Configuration data necessary to initialize a validator.
 pub struct ValConfigs {
-    /// Block zero of the onboarded miner
-    pub block_zero: Block,
+    /// Proof zero of the onboarded miner
+    pub block_zero: VDFProof,
     /// Human readable name of Owner account
     pub ow_human_name: String,
     /// IP address of Operator
@@ -54,8 +54,8 @@ pub struct ValConfigs {
 #[derive(Serialize, Deserialize, Debug)]
 /// Configuration data necessary to initialize an end user.
 pub struct UserConfigs {
-    /// Block zero of the onboarded miner
-    pub block_zero: Block,
+    /// Proof zero of the onboarded miner
+    pub block_zero: VDFProof,
 }
 // TODO: Duplicated from block.rs
 fn as_hex(data: &[u8], serializer: S) -> Result
@@ -77,7 +77,7 @@ where
 impl ValConfigs {
     /// New val config.
     pub fn new(
-        block: Block,
+        block: VDFProof,
         keys: KeyScheme,
         ip_address: String,
         autopay_instructions: Option>,
@@ -119,7 +119,7 @@ impl ValConfigs {
         let fn_addr_obj = fn_addr_obj.append_prod_protos(fn_pubkey, 0);
         
         Self {
-            /// Block zero of the onboarded miner
+            /// Proof zero of the onboarded miner
             block_zero: block,
             ow_human_name: owner_address.clone(),
             op_address: keys.child_1_operator.get_address().to_string(),
@@ -148,7 +148,7 @@ impl ValConfigs {
         println!("account manifest created, file saved to: {:?}", json_path);
     }
 
-    /// Extract the preimage and proof from a genesis proof block_0.json
+    /// Extract the preimage and proof from a genesis proof proof_0.json
     pub fn get_init_data(path: &PathBuf) -> Result {
         let file = std::fs::File::open(path)?;
         let reader = std::io::BufReader::new(file);
@@ -196,9 +196,9 @@ impl ValConfigs {
 
 impl UserConfigs {
     /// New user configs
-    pub fn new(block: Block) -> UserConfigs {
+    pub fn new(block: VDFProof) -> UserConfigs {
         UserConfigs {
-            /// Block zero of the onboarded miner
+            /// Proof zero of the onboarded miner
             block_zero: block,
         }
     }
@@ -213,7 +213,7 @@ impl UserConfigs {
             .expect("Could not write account.json");
         println!("Account manifest saved to: {:?}", json_path);
     }
-    /// Extract the preimage and proof from a genesis proof block_0.json
+    /// Extract the preimage and proof from a genesis proof proof_0.json
     pub fn get_init_data(path: &PathBuf) -> Result {
         let file = std::fs::File::open(path)?;
         let reader = std::io::BufReader::new(file);
@@ -250,7 +250,7 @@ fn test_parse_account_file() {
 fn val_config_ip_address() {
     use diem_types::network_address::encrypted::EncNetworkAddress;
 
-    let block = Block {
+    let block = VDFProof {
         height: 0u64,
         elapsed_secs: 0u64,
         preimage: Vec::new(),
diff --git a/ol/types/src/block.rs b/ol/types/src/block.rs
index 0f93e68bbf..a7b10e28eb 100644
--- a/ol/types/src/block.rs
+++ b/ol/types/src/block.rs
@@ -6,8 +6,8 @@ use serde::{Deserialize, Serialize};
 use std::{fs, io::BufReader, path::PathBuf};
 /// Data structure and serialization of 0L delay proof.
 #[derive(Serialize, Deserialize, Debug, Clone)]
-pub struct Block {
-    /// Block Height
+pub struct VDFProof {
+    /// Proof Height
     pub height: u64,
     /// Elapsed Time in seconds
     pub elapsed_secs: u64,
@@ -23,18 +23,18 @@ pub struct Block {
     pub security: Option,
 }
 
-impl Block {
+impl VDFProof {
 
-    /// Extract the preimage and proof from a genesis proof block_0.json
+    /// Extract the preimage and proof from a genesis proof proof_0.json
     pub fn get_genesis_tx_data(path: &PathBuf) -> Result<(Vec,Vec),std::io::Error> {
         let file = std::fs::File::open(path)?;
         let reader = std::io::BufReader::new(file);
-        let block: Block = serde_json::from_reader(reader).expect("Genesis block should deserialize");
+        let block: VDFProof = serde_json::from_reader(reader).expect("Genesis block should deserialize");
         return Ok((block.preimage, block.proof));
     }
 
     /// new object deserialized from file
-    pub fn parse_block_file(path: PathBuf) -> Block{
+    pub fn parse_block_file(path: PathBuf) -> VDFProof{
         let file = fs::File::open(&path).expect(&format!("Could not open block file: {:?}", path.to_str()));
         let reader = BufReader::new(file);
         serde_json::from_reader(reader).unwrap()
diff --git a/ol/types/src/config.rs b/ol/types/src/config.rs
index 019950e272..29bd7df1c4 100644
--- a/ol/types/src/config.rs
+++ b/ol/types/src/config.rs
@@ -20,7 +20,7 @@ use std::{
     str::FromStr
 };
 
-use crate::dialogue::{what_home, what_ip, what_statement};
+use crate::dialogue::{add_tower, what_home, what_ip, what_statement};
 
 const BASE_WAYPOINT: &str = "0:683185844ef67e5c8eeaa158e635de2a4c574ce7bbb7f41f787d38db2d623ae2";
 
@@ -146,6 +146,7 @@ impl AppCfg {
             None => what_statement(),
         };
 
+
         default_config.profile.ip = match ip {
             Some(i) => i,
             None => what_ip().unwrap(),
@@ -154,6 +155,11 @@ impl AppCfg {
             what_home(None, None)
         });
 
+        // Add link to previous tower
+        if !*IS_TEST {
+            default_config.profile.tower_link = add_tower(&default_config);
+        }
+        
         if source_path.is_some() {
             // let source_path = what_source();
             default_config.workspace.source_path = source_path.clone();
@@ -340,7 +346,7 @@ impl Default for Workspace {
         Self {
             node_home: dirs::home_dir().unwrap().join(NODE_HOME),
             source_path: None,
-            block_dir: "blocks".to_owned(),
+            block_dir: "vdf_proofs".to_owned(),
             db_path: default_db_path(),
             stdlib_bin_path: None,
         }
@@ -365,7 +371,7 @@ pub struct ChainInfo {
 impl Default for ChainInfo {
     fn default() -> Self {
         Self {
-            chain_id: "experimental".to_owned(),
+            chain_id: "1".to_string(),
             base_epoch: Some(0),
             // Mock Waypoint. Miner complains without.
             base_waypoint: Waypoint::from_str(BASE_WAYPOINT).ok(),
@@ -393,6 +399,9 @@ pub struct Profile {
 
     /// Other nodes to connect for fallback connections
     pub upstream_nodes: Option>,
+
+    /// Link to another delay tower.
+    pub tower_link: Option,
 }
 
 impl Default for Profile {
@@ -404,6 +413,7 @@ impl Default for Profile {
             ip: "0.0.0.0".parse().unwrap(),
             default_node: Some("http://localhost:8080".parse().expect("parse url")),
             upstream_nodes: Some(vec!["http://localhost:8080".parse().expect("parse url")]),
+            tower_link: None,
         }
     }
 }
diff --git a/ol/types/src/dialogue.rs b/ol/types/src/dialogue.rs
index 79dc7bdb66..9e7099eb27 100644
--- a/ol/types/src/dialogue.rs
+++ b/ol/types/src/dialogue.rs
@@ -1,21 +1,23 @@
 //! get home path or set it
-use anyhow::Error;
+use anyhow::{bail, Error};
 use dialoguer::{Confirm, Input};
+use diem_crypto::HashValue;
 use diem_global_constants::NODE_HOME;
-use std::{net::Ipv4Addr, path::PathBuf};
+use glob::glob;
+use hex::encode;
+use std::{fs, net::Ipv4Addr, path::PathBuf};
 
-use crate::config::IS_TEST;
+use crate::{block::VDFProof, config::{AppCfg, IS_TEST}};
 
 /// interact with user to get the home path for files
 pub fn what_home(swarm_path: Option, swarm_persona: Option) -> PathBuf {
-
     // For dev and CI setup
     if let Some(path) = swarm_path {
-      return swarm_home(path, swarm_persona);
+        return swarm_home(path, swarm_persona);
     } else {
-      if *IS_TEST {
-        return dirs::home_dir().unwrap().join(NODE_HOME)
-      }
+        if *IS_TEST {
+            return dirs::home_dir().unwrap().join(NODE_HOME);
+        }
     }
 
     let mut default_home_dir = dirs::home_dir().unwrap();
@@ -62,17 +64,16 @@ pub fn what_source() -> Option {
 
 /// interact with user to get ip address
 pub fn what_ip() -> Result {
-
     let system_ip = match machine_ip::get() {
         Some(ip) => ip.to_string(),
         None => "127.0.0.1".to_string(),
     };
     let ip = system_ip
-            .parse::()
-            .expect("Could not parse IP address: {:?}");
+        .parse::()
+        .expect("Could not parse IP address: {:?}");
 
     if *IS_TEST {
-      return Ok(ip)
+        return Ok(ip);
     }
 
     let txt = &format!(
@@ -98,7 +99,7 @@ pub fn what_ip() -> Result {
 /// interact with user to get a statement
 pub fn what_statement() -> String {
     if *IS_TEST {
-      return "test".to_owned()
+        return "test".to_owned();
     }
     Input::new()
         .with_prompt("Enter a (fun) statement to go into your first transaction")
@@ -107,6 +108,29 @@ pub fn what_statement() -> String {
             "We need some text unique to you which will go into your the first proof of your tower",
         )
 }
+
+/// interact with user to get a statement
+pub fn add_tower(config: &AppCfg) -> Option {
+    let block = find_last_legacy_block(&config.workspace.node_home.join("blocks")).unwrap();
+    let hash = hash_last_proof(&block.proof);
+    let txt = "(optional) want to link to another tower's last hash?";
+    match Confirm::new().with_prompt(txt).interact().unwrap() {
+        false => None,
+        true => {
+          let hash_string = encode(hash);
+          let txt = format!("Use this hash as your tower link? {} ", &hash_string);
+          match Confirm::new().with_prompt(txt).interact().unwrap() {
+            true => Some(hash_string),
+            false => { 
+              Input::new()
+                .with_prompt("Enter hash of last proof data")
+                .interact_text()
+                .ok()
+            },
+        }
+    }
+  }
+}
 /// returns node_home
 /// usually something like "/root/.0L"
 /// in case of swarm like "....../swarm_temp/0" for alice
@@ -121,3 +145,43 @@ fn swarm_home(mut swarm_path: PathBuf, swarm_persona: Option) -> PathBuf
     }
     swarm_path
 }
+
+// helper to parse the existing blocks in the miner's path. This function receives any path. Note: the path is configured in miner.toml which abscissa Configurable parses, see commands.rs.
+fn find_last_legacy_block(blocks_dir: &PathBuf) -> Result {
+    let mut max_block: Option = None;
+    let mut max_block_path = None;
+    // iterate through all json files in the directory.
+    for entry in glob(&format!("{}/block_*.json", blocks_dir.display()))
+        .expect("Failed to read glob pattern")
+    {
+        if let Ok(entry) = entry {
+            let block_file =
+                fs::read_to_string(&entry).expect("Could not read latest block file in path");
+
+            let block: VDFProof = serde_json::from_str(&block_file)?;
+            let blocknumber = block.height;
+            if max_block.is_none() {
+                max_block = Some(blocknumber);
+                max_block_path = Some(entry);
+            } else {
+                if blocknumber > max_block.unwrap() {
+                    max_block = Some(blocknumber);
+                    max_block_path = Some(entry);
+                }
+            }
+        }
+    }
+    
+    if let Some(p) = max_block_path {
+        let b = fs::read_to_string(p).expect("Could not read latest block file in path");
+        match serde_json::from_str(&b) {
+            Ok(v) => Ok(v),
+            Err(e) => bail!(e),
+        }
+    } else {
+        bail!("cannot find a legacy block in: {:?}", blocks_dir)
+    }
+}
+fn hash_last_proof(proof: &Vec) -> Vec{
+  HashValue::sha3_256_of(proof).to_vec()
+}
\ No newline at end of file
diff --git a/ol/types/src/fixtures.rs b/ol/types/src/fixtures.rs
index 884139858f..2ab86f5200 100644
--- a/ol/types/src/fixtures.rs
+++ b/ol/types/src/fixtures.rs
@@ -1,6 +1,7 @@
 //! helper to get fixtures data from files in ol/fixtures folder.
 use std::{fs, path::{Path, PathBuf}};
-use crate::{block::Block, config::{AppCfg, parse_toml}};
+
+use crate::{block::VDFProof, config::{AppCfg, parse_toml}};
 
 /// get mnemonic
 pub fn get_persona_mnem(persona: &str) -> String {
@@ -78,12 +79,12 @@ pub fn get_persona_toml_configs(persona: &str) -> AppCfg {
 
 
 /// get block 0
-pub fn get_persona_block_zero(persona: &str, env: &str) -> Block {
+pub fn get_persona_block_zero(persona: &str, env: &str) -> VDFProof {
   let path= env!("CARGO_MANIFEST_DIR");
   let buf = Path::new(path)
   .parent()
   .unwrap()
-  .join(format!("fixtures/blocks/{}/{}/block_0.json", env, persona));
+  .join(format!("fixtures/vdf_proofs/{}/{}/proof_0.json", env, persona));
 
   let s = fs::read_to_string(&buf).expect("could not find block file");
   serde_json::from_str(&s).expect(&format!("could not parse block from file: {:?}", &buf))
@@ -91,12 +92,12 @@ pub fn get_persona_block_zero(persona: &str, env: &str) -> Block {
 }
 
 /// get block 0
-pub fn get_persona_block_one(persona: &str, env: &str) -> Block {
+pub fn get_persona_block_one(persona: &str, env: &str) -> VDFProof {
   let path= env!("CARGO_MANIFEST_DIR");
   let buf = Path::new(path)
   .parent()
   .unwrap()
-  .join(format!("fixtures/blocks/{}/{}/block_1.json", env, persona));
+  .join(format!("fixtures/vdf_proofs/{}/{}/proof_1.json", env, persona));
 
   let s = fs::read_to_string(&buf).expect("could not find block file");
   serde_json::from_str(&s).expect(&format!("could not parse block from file: {:?}", &buf))
diff --git a/ol/types/src/genesis_proof.rs b/ol/types/src/genesis_proof.rs
index d831f2d977..c23f7290dc 100644
--- a/ol/types/src/genesis_proof.rs
+++ b/ol/types/src/genesis_proof.rs
@@ -18,7 +18,7 @@ pub struct GenesisMiningProof {
 // Default is for Swarm and testing
 impl Default for GenesisMiningProof {
     fn default() -> GenesisMiningProof {
-        // These use "alice" fixtures from ../fixtures/blocks/ and used elsewhere in the project, in both easy(stage) and hard(Prod) mode.
+        // These use "alice" fixtures from ../fixtures/vdf_proofs/ and used elsewhere in the project, in both easy(stage) and hard(Prod) mode.
         let env = if *IS_PROD {
           "prod"
         } else {
diff --git a/ol/verifiable_delay/.gitignore b/ol/verifiable_delay/.gitignore
index d2f12c5267..151d64f8a1 100644
--- a/ol/verifiable_delay/.gitignore
+++ b/ol/verifiable_delay/.gitignore
@@ -39,7 +39,7 @@ rls*.log
 target/
 
 #miner output
-blocks/
+vdf_proofs/
 
 #Mac files
 .DS_Store
diff --git a/testsuite/cli/src/client_proxy.rs b/testsuite/cli/src/client_proxy.rs
index 255b8649db..27560fdde6 100644
--- a/testsuite/cli/src/client_proxy.rs
+++ b/testsuite/cli/src/client_proxy.rs
@@ -62,7 +62,7 @@ use std::{
 };
 
 //////// 0L ////////
-use ol_types::{block::Block, account::ValConfigs};
+use ol_types::{block::VDFProof, account::ValConfigs};
 
 const CLIENT_WALLET_MNEMONIC_FILE: &str = "client.mnemonic";
 const GAS_UNIT_PRICE: u64 = 0;
@@ -457,7 +457,7 @@ impl ClientProxy {
         file.read_to_string(&mut json_string)
             .unwrap_or_else(|err| panic!("Error while reading file: [{}]", err));
 
-        let block: Block = serde_json::from_str(&json_string).expect("could not parse json file");
+        let block: VDFProof = serde_json::from_str(&json_string).expect("could not parse json file");
 
         let (sender_address, _) = self
             .get_account_address_from_parameter(space_delim_strings[1])