Skip to content

Commit

Permalink
Track loading progress in database.
Browse files Browse the repository at this point in the history
  • Loading branch information
Syfaro committed Jun 1, 2024
1 parent 2247742 commit 5b04421
Show file tree
Hide file tree
Showing 19 changed files with 231 additions and 172 deletions.
56 changes: 0 additions & 56 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 0 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,6 @@ percent-encoding = "2.2"
prometheus = { version = "0.13", features = ["process"] }
radix_fmt = "1"
rand = "0.8"
redis = { version = "0.25.4", features = ["tokio-comp", "aio", "connection-manager"] }
regex = "1"
reqwest = { version = "0.11", features = ["json", "cookies"] }
roux = "2"
Expand Down
4 changes: 1 addition & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -44,10 +44,9 @@ up and running.
It has a few software dependencies to run:

* PostgreSQL with the [bktree] extension as the primary backing datastore
* Redis to send events about user actions
* NATS for pubsub and distributing events
* [Faktory] for managing background tasks
* [faktory-cron] to run scheduled jobs (provided in jobs.yaml)
* NATS for publishing information on newly discovered public images

It also requires credentials or app tokens for the following sites:

Expand All @@ -60,7 +59,6 @@ It also requires credentials or app tokens for the following sites:
* S3-like (endpoint, region, bucket, access, and secret key)
* SMTP (host, username, and password)
* Telegram (token and domain)
* Twitter (API token and secret)

[bktree]: https://github.com/fake-name/pg-spgist_hamming
[faktory]: https://github.com/contribsys/faktory
Expand Down
1 change: 1 addition & 0 deletions migrations/20240601011418_loading_accounts.down.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
DROP TABLE linked_account_import;
8 changes: 8 additions & 0 deletions migrations/20240601011418_loading_accounts.up.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
CREATE TABLE linked_account_import (
linked_account_id uuid PRIMARY KEY REFERENCES linked_account (id) ON DELETE CASCADE,
started_at timestamp with time zone NOT NULL DEFAULT current_timestamp,
completed_at timestamp with time zone,
expected_count integer NOT NULL,
expected_ids text[] NOT NULL,
loaded_ids text[] NOT NULL DEFAULT array[]::text[]
);
6 changes: 6 additions & 0 deletions queries/linked_account_import/complete.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
UPDATE
linked_account_import
SET
completed_at = current_timestamp
WHERE
linked_account_id = $1;
9 changes: 9 additions & 0 deletions queries/linked_account_import/loaded.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
UPDATE
linked_account_import
SET
loaded_ids = array_append(loaded_ids, $2)
WHERE
linked_account_id = $1
RETURNING
expected_count,
cardinality(loaded_ids) loaded_count;
4 changes: 4 additions & 0 deletions queries/linked_account_import/start.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
INSERT INTO
linked_account_import (linked_account_id, expected_count, expected_ids)
VALUES
($1, $2, $3);
133 changes: 133 additions & 0 deletions sqlx-data.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 0 additions & 2 deletions src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,6 @@ pub enum Error {
Database(#[from] sqlx::Error),
#[error("template error: {0}")]
Template(#[from] askama::Error),
#[error("redis error: {0}")]
Redis(#[from] redis::RedisError),
#[error("job error: {0}")]
Job(#[from] foxlib::jobs::Error),
#[error("actix error: {0}")]
Expand Down
1 change: 0 additions & 1 deletion src/jobs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -508,7 +508,6 @@ impl Display for JobInitiator {
pub struct JobContext {
pub producer: FaktoryProducer,
pub conn: sqlx::PgPool,
pub redis: redis::aio::ConnectionManager,
pub s3: rusoto_s3::S3Client,
pub fuzzysearch: Arc<fuzzysearch::FuzzySearch>,
pub mailer: crate::Mailer,
Expand Down
9 changes: 0 additions & 9 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -604,12 +604,6 @@ async fn main() {
);
let s3 = rusoto_s3::S3Client::new_with(client, provider, region);

let redis_client =
redis::Client::open(config.redis_dsn.clone()).expect("could not create redis client");
let redis_manager = redis::aio::ConnectionManager::new(redis_client.clone())
.await
.expect("could not create redis connection manager");

let fuzzysearch = fuzzysearch::FuzzySearch::new_with_opts(fuzzysearch::FuzzySearchOpts {
endpoint: Some(config.fuzzysearch_host.clone()),
api_key: config.fuzzysearch_api_key.clone(),
Expand Down Expand Up @@ -674,7 +668,6 @@ async fn main() {
let ctx = jobs::JobContext {
producer,
conn: pool,
redis: redis_manager,
s3,
fuzzysearch: std::sync::Arc::new(fuzzysearch),
mailer,
Expand Down Expand Up @@ -745,8 +738,6 @@ async fn main() {
.wrap(actix_web::middleware::Compress::default())
.app_data(web::Data::new(pool.clone()))
.app_data(web::Data::new(s3.clone()))
.app_data(web::Data::new(redis_client.clone()))
.app_data(web::Data::new(redis_manager.clone()))
.app_data(web::Data::new(config.clone()))
.app_data(web::Data::new(telegram_login.clone()))
.app_data(web::Data::new(producer.clone()))
Expand Down
Loading

0 comments on commit 5b04421

Please sign in to comment.