Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

This commits adds a migration binary to moves clients on a postgresql… #836

Merged
merged 3 commits into from
Sep 27, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 18 additions & 9 deletions .github/workflows/rust.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,15 +14,15 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: Swatinem/rust-cache@v1
- uses: Swatinem/rust-cache@v2
- name: Check Rita and Rita Exit x86
run: cargo check --all
test:
needs: check
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: Swatinem/rust-cache@v1
- uses: Swatinem/rust-cache@v2
- name: Run Rita and Rita Exit tests
run: RUST_TEST_THREADS=1 cargo test --verbose --all
rustfmt:
Expand All @@ -37,47 +37,47 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: Swatinem/rust-cache@v1
- uses: Swatinem/rust-cache@v2
- name: Check for Clippy lints
run: rustup component add clippy && cargo clippy --all --all-targets --all-features -- -D warnings
audit:
needs: check
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: Swatinem/rust-cache@v1
- uses: Swatinem/rust-cache@v2
- name: Run Cargo Audit
run: cargo install cargo-audit && cargo audit
cross-mips:
needs: test
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: Swatinem/rust-cache@v1
- uses: Swatinem/rust-cache@v2
- name: Cross test mips
run: cargo install cross && cross test --target mips-unknown-linux-musl --verbose -- --test-threads=1
cross-mipsel:
needs: test
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: Swatinem/rust-cache@v1
- uses: Swatinem/rust-cache@v2
- name: Cross test mipsel
run: cargo install cross && cross test --target mipsel-unknown-linux-musl --verbose -- --test-threads=1
cross-aarch64:
needs: test
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: Swatinem/rust-cache@v1
- uses: Swatinem/rust-cache@v2
- name: Cross test aarch64
run: cargo install cross && cross test --target aarch64-unknown-linux-musl --verbose -- --test-threads=1
cross-armv7:
needs: test
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: Swatinem/rust-cache@v1
- uses: Swatinem/rust-cache@v2
- name: Cross test armv7
run: cargo install cross && cross test --target armv7-unknown-linux-musleabihf --verbose -- --test-threads=1
integration-test-five-nodes:
Expand Down Expand Up @@ -135,4 +135,13 @@ jobs:
- name: Install Wireguard
run: sudo apt-get update && sudo apt install -y wireguard linux-source linux-headers-$(uname -r) build-essential && sudo modprobe wireguard
- name: Run integration test
run: bash scripts/integration_tests/all-up-test.sh CONTRACT_TEST
run: bash scripts/integration_tests/all-up-test.sh CONTRACT_TEST
integration-test-db-migration:
needs: integration-test-five-nodes
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Install Wireguard
run: sudo apt-get update && sudo apt install -y wireguard linux-source linux-headers-$(uname -r) build-essential && sudo modprobe wireguard
- name: Run integration test
run: bash scripts/integration_tests/all-up-test.sh MIGRATION_TEST
24 changes: 24 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ members = [
"rita_exit",
"rita_client",
"rita_client_registration",
"rita_db_migration",
"rita_bin",
"test_runner",
"integration_tests",
Expand Down
1 change: 1 addition & 0 deletions integration_tests/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ rita_client = { path = "../rita_client", features = ["dev_env"] }
rita_common = { path = "../rita_common", features = ["integration_test"] }
rita_exit = { path = "../rita_exit", features = ["dev_env"] }
rita_client_registration = { path = "../rita_client_registration" }
rita_db_migration = { path = "../rita_db_migration" }
ctrlc = { version = "3.2.1", features = ["termination"] }
diesel = { version = "1.4", features = ["postgres", "r2d2"] }
diesel_migrations = { version = "1.4", features = ["postgres"] }
Expand Down
141 changes: 141 additions & 0 deletions integration_tests/src/db_migration_test.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,141 @@
use std::{
thread,
time::{Duration, Instant},
};

use clarity::{Address, PrivateKey};
use diesel::{PgConnection, RunQueryDsl};
use log::{error, info};
use rita_client_registration::{
client_db::get_all_regsitered_clients, register_client_batch_loop::register_client_batch_loop,
};
use rita_common::usage_tracker::tests::test::random_identity;
use rita_db_migration::{
get_database_connection, models::Client, schema::clients::dsl::clients, start_db_migration,
};
use web30::client::Web3;

use crate::{
payments_eth::{get_miner_key, WEB3_TIMEOUT},
setup_utils::database::start_postgres,
utils::{deploy_contracts, get_eth_node},
};

pub const DB_URI: &str = "postgres://postgres@localhost/test";

/// This tests the rita_db_migration binary and veries that clients actually migrate from a postgresql db
/// to a smart contract
pub async fn run_db_migration_test() {
info!("Starting db migration test");

info!("Waiting to deploy contracts");
let althea_db_addr = deploy_contracts().await;
info!("DB addr is {}", althea_db_addr);

info!("Starting postrgresql db");
start_postgres();

let conn = get_database_connection(DB_URI.to_string()).expect("Please fix db path");

let num_clients = 10;
// Add a bunch of dummy clients to the db to migrate
add_dummy_clients_to_db(num_clients, &conn);

thread::sleep(Duration::from_secs(10));

info!("Run migration code");

let miner_private_key: PrivateKey = get_miner_key();
// Start registration loop
register_client_batch_loop(get_eth_node(), althea_db_addr, miner_private_key);

match start_db_migration(DB_URI.to_string()) {
Ok(_) => println!("Successfully migrated all clients!"),
Err(e) => println!("Failed to migrate clients with {}", e),
}

info!("Waiting for register loop to migrate all clients");
thread::sleep(Duration::from_secs(10));

validate_db_migration(num_clients, althea_db_addr, miner_private_key).await;
}

fn add_dummy_clients_to_db(num_of_entries: usize, conn: &PgConnection) {
for i in 0..num_of_entries {
let new_client = random_db_client();
info!("Inserting new client {}: {}", i, new_client.wg_pubkey);
if let Err(e) = diesel::insert_into(clients)
.values(&new_client)
.execute(conn)
{
panic!("Why did a client {} insert fail? {}", i, e);
}
}
}

fn random_db_client() -> Client {
let random_id = random_identity();
Client {
mesh_ip: random_id.mesh_ip.to_string(),
wg_pubkey: random_id.wg_public_key.to_string(),
wg_port: 0,
eth_address: random_id.eth_address.to_string(),
internal_ip: "".to_string(),
internet_ipv6: "".to_string(),
nickname: "".to_string(),
email: "".to_string(),
phone: "".to_string(),
country: "".to_string(),
email_code: "".to_string(),
verified: true,
email_sent_time: 0,
text_sent: 0,
last_balance_warning_time: 0,
last_seen: 0,
}
}

async fn validate_db_migration(
num_clients: usize,
althea_db_addr: Address,
miner_private_key: PrivateKey,
) {
let miner_pub_key = miner_private_key.to_address();
let contact = Web3::new(&get_eth_node(), WEB3_TIMEOUT);

let start = Instant::now();
loop {
let client_vec = get_all_regsitered_clients(&contact, miner_pub_key, althea_db_addr).await;
if client_vec.is_err() {
if Instant::now() - start > Duration::from_secs(300) {
panic!("Failed to migrate clients after waiting for 5 mins");
}
error!("No clients have been registered so far, waiting..",);
thread::sleep(Duration::from_secs(10));
} else if let Ok(client_list) = client_vec {
if client_list.len() == num_clients {
info!(
"All clients have successuflly migrated from postgresql db to smart contract!"
);
info!("DB clients are :\n");
for id in client_list {
info!("{}", id);
}
break;
} else {
if Instant::now() - start > Duration::from_secs(300) {
panic!(
"Failed to migrate {} clients after waiting for 5 mins. Only migrated {}",
num_clients,
client_list.len()
);
}
error!(
"{} clients have been registered so far, waiting..",
client_list.len()
);
thread::sleep(Duration::from_secs(10));
}
}
}
}
1 change: 1 addition & 0 deletions integration_tests/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ use std::time::Duration;

pub mod config;
pub mod contract_test;
pub mod db_migration_test;
pub mod debts;
pub mod five_nodes;
pub mod mutli_exit;
Expand Down
2 changes: 1 addition & 1 deletion integration_tests/src/setup_utils/database.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ pub fn start_postgres() {
const DB_URL_LOCAL: &str = "postgres://[email protected]/test";
// for the rita exit instances
const POSTGRES_DATABASE_LOCATION: &str = "/var/lib/postgresql/data";
let migration_directory = Path::new("/althea_rs/exit_db/migrations/");
let migration_directory = Path::new("/althea_rs/integration_tests/src/setup_utils/migrations/");
let postgres_pid_path: String = format!("{}/postmaster.pid", POSTGRES_DATABASE_LOCATION);

// only init and launch if postgres has not already been started
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
-- This file was automatically created by Diesel to setup helper functions
-- and other internal bookkeeping. This file is safe to edit, any future
-- changes will be added to existing projects as new migrations.

DROP FUNCTION IF EXISTS diesel_manage_updated_at(_tbl regclass);
DROP FUNCTION IF EXISTS diesel_set_updated_at();
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
-- This file was automatically created by Diesel to setup helper functions
-- and other internal bookkeeping. This file is safe to edit, any future
-- changes will be added to existing projects as new migrations.




-- Sets up a trigger for the given table to automatically set a column called
-- `updated_at` whenever the row is modified (unless `updated_at` was included
-- in the modified columns)
--
-- # Example
--
-- ```sql
-- CREATE TABLE users (id SERIAL PRIMARY KEY, updated_at TIMESTAMP NOT NULL DEFAULT NOW());
--
-- SELECT diesel_manage_updated_at('users');
-- ```
CREATE OR REPLACE FUNCTION diesel_manage_updated_at(_tbl regclass) RETURNS VOID AS $$
BEGIN
EXECUTE format('CREATE TRIGGER set_updated_at BEFORE UPDATE ON %s
FOR EACH ROW EXECUTE PROCEDURE diesel_set_updated_at()', _tbl);
END;
$$ LANGUAGE plpgsql;

CREATE OR REPLACE FUNCTION diesel_set_updated_at() RETURNS trigger AS $$
BEGIN
IF (
NEW IS DISTINCT FROM OLD AND
NEW.updated_at IS NOT DISTINCT FROM OLD.updated_at
) THEN
NEW.updated_at := current_timestamp;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
-- This file should undo anything in `up.sql`
DROP TABLE clients;
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
CREATE TABLE clients
(
mesh_ip varchar(40) CONSTRAINT firstkey PRIMARY KEY,
wg_pubkey varchar(44) NOT NULL,
wg_port integer NOT NULL,
eth_address varchar(64) NOT NULL,
internal_ip varchar(42) NOT NULL,
nickname varchar(32) NOT NULL,
email varchar(512) NOT NULL,
phone varchar(32) NOT NULL,
country varchar(8) NOT NULL,
email_code varchar(16) NOT NULL,
verified boolean DEFAULT FALSE NOT NULL,
email_sent_time bigint DEFAULT 0 NOT NULL,
text_sent integer DEFAULT 0 NOT NULL,
last_seen bigint DEFAULT 0 NOT NULL,
last_balance_warning_time bigint DEFAULT 0 NOT NULL
);
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
-- This file should undo anything in `up.sql`
ALTER TABLE clients
DROP COLUMN internet_ipv6
;
DROP TABLE assigned_ips;
Loading