Skip to content

Commit

Permalink
ProcessingJob dataloader
Browse files Browse the repository at this point in the history
  • Loading branch information
iamvigneshwars committed Apr 10, 2024
1 parent eed8ea4 commit e39250b
Show file tree
Hide file tree
Showing 4 changed files with 87 additions and 40 deletions.
15 changes: 0 additions & 15 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion charts/processed_data/charts/processed_data/Chart.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,4 @@ type: application

version: 0.1.0

appVersion: 0.1.0-rc5
appVersion: 0.1.0-rc6
1 change: 0 additions & 1 deletion processed_data/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@ anyhow = "1.0.81"
async-graphql = { version = "7.0.2", default-features = false, features = [
"chrono",
"graphiql",
"tracing",
"dataloader",
] }
async-graphql-axum = { version = "7.0.2" }
Expand Down
109 changes: 86 additions & 23 deletions processed_data/src/graphql/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,15 @@ pub fn root_schema_builder(
) -> SchemaBuilder<Query, EmptyMutation, EmptySubscription> {
Schema::build(Query, EmptyMutation, EmptySubscription)
.data(DataLoader::new(
DataCollectionLoader::new(database.clone()),
ProcessedDataLoader::new(database.clone()),
tokio::spawn,
))
.data(DataLoader::new(
ProcessingJobDataLoader::new(database.clone()),
tokio::spawn,
))
.data(DataLoader::new(
ProcessingJobParameterDataLoader::new(database.clone()),
tokio::spawn,
))
.data(database)
Expand All @@ -43,18 +51,33 @@ pub fn root_schema_builder(
#[derive(Debug, Clone, Default)]
pub struct Query;

pub struct DataCollectionLoader(DatabaseConnection);
pub struct ProcessedDataLoader(DatabaseConnection);

Check failure on line 54 in processed_data/src/graphql/mod.rs

View workflow job for this annotation

GitHub Actions / lint

missing documentation for a struct

Check failure on line 54 in processed_data/src/graphql/mod.rs

View workflow job for this annotation

GitHub Actions / lint

missing documentation for a struct
pub struct ProcessingJobDataLoader(DatabaseConnection);

Check failure on line 55 in processed_data/src/graphql/mod.rs

View workflow job for this annotation

GitHub Actions / lint

missing documentation for a struct

Check failure on line 55 in processed_data/src/graphql/mod.rs

View workflow job for this annotation

GitHub Actions / lint

missing documentation for a struct
pub struct ProcessingJobParameterDataLoader(DatabaseConnection);

Check failure on line 56 in processed_data/src/graphql/mod.rs

View workflow job for this annotation

GitHub Actions / lint

missing documentation for a struct

Check failure on line 56 in processed_data/src/graphql/mod.rs

View workflow job for this annotation

GitHub Actions / lint

missing documentation for a struct

impl DataCollectionLoader {
impl ProcessingJobDataLoader {
fn new(database: DatabaseConnection) -> Self {

Check failure on line 59 in processed_data/src/graphql/mod.rs

View workflow job for this annotation

GitHub Actions / lint

missing documentation for an associated function

Check failure on line 59 in processed_data/src/graphql/mod.rs

View workflow job for this annotation

GitHub Actions / lint

missing documentation for an associated function
Self(database)
}
}

impl Loader<u32> for DataCollectionLoader {
impl ProcessedDataLoader {
fn new(database: DatabaseConnection) -> Self {

Check failure on line 65 in processed_data/src/graphql/mod.rs

View workflow job for this annotation

GitHub Actions / lint

missing documentation for an associated function

Check failure on line 65 in processed_data/src/graphql/mod.rs

View workflow job for this annotation

GitHub Actions / lint

missing documentation for an associated function
Self(database)
}
}

impl ProcessingJobParameterDataLoader {
fn new(database: DatabaseConnection) -> Self {

Check failure on line 71 in processed_data/src/graphql/mod.rs

View workflow job for this annotation

GitHub Actions / lint

missing documentation for an associated function

Check failure on line 71 in processed_data/src/graphql/mod.rs

View workflow job for this annotation

GitHub Actions / lint

missing documentation for an associated function
Self(database)
}
}

impl Loader<u32> for ProcessedDataLoader {
type Value = DataProcessing;
type Error = async_graphql::Error;

#[instrument(name = "load_processed_data", skip(self))]
async fn load(&self, keys: &[u32]) -> Result<HashMap<u32, Self::Value>, Self::Error> {
let mut results = HashMap::new();
let keys_vec: Vec<u32> = keys.iter().cloned().collect();

Check failure on line 83 in processed_data/src/graphql/mod.rs

View workflow job for this annotation

GitHub Actions / lint

called `iter().cloned().collect()` on a slice to create a `Vec`. Calling `to_vec()` is both faster and more readable

Check failure on line 83 in processed_data/src/graphql/mod.rs

View workflow job for this annotation

GitHub Actions / lint

called `iter().cloned().collect()` on a slice to create a `Vec`. Calling `to_vec()` is both faster and more readable
Expand All @@ -74,30 +97,76 @@ impl Loader<u32> for DataCollectionLoader {
}
}

impl Loader<u32> for ProcessingJobDataLoader {
type Value = Vec<ProcessingJob>;
type Error = async_graphql::Error;

#[instrument(name = "load_processing_job", skip(self))]
async fn load(&self, keys: &[u32]) -> Result<HashMap<u32, Self::Value>, Self::Error> {
let mut results = HashMap::new();
let keys_vec: Vec<u32> = keys.iter().cloned().collect();

Check failure on line 107 in processed_data/src/graphql/mod.rs

View workflow job for this annotation

GitHub Actions / lint

called `iter().cloned().collect()` on a slice to create a `Vec`. Calling `to_vec()` is both faster and more readable

Check failure on line 107 in processed_data/src/graphql/mod.rs

View workflow job for this annotation

GitHub Actions / lint

called `iter().cloned().collect()` on a slice to create a `Vec`. Calling `to_vec()` is both faster and more readable
let records = processing_job::Entity::find()
.filter(processing_job::Column::DataCollectionId.is_in(keys_vec))
.all(&self.0)
.await?;

for record in records {
let data_collection_id = record.data_collection_id.unwrap();
let data = ProcessingJob::from(record);

results
.entry(data_collection_id)
.or_insert_with(Vec::new)
.push(data)
}
Ok(results)
}
}

impl Loader<u32> for ProcessingJobParameterDataLoader {
type Value = Vec<ProcessingJobParameter>;
type Error = async_graphql::Error;

#[instrument(name = "load_processing_job_parameter", skip(self))]
async fn load(&self, keys: &[u32]) -> Result<HashMap<u32, Self::Value>, Self::Error> {
let mut results = HashMap::new();
let keys_vec: Vec<u32> = keys.iter().cloned().collect();

Check failure on line 133 in processed_data/src/graphql/mod.rs

View workflow job for this annotation

GitHub Actions / lint

called `iter().cloned().collect()` on a slice to create a `Vec`. Calling `to_vec()` is both faster and more readable

Check failure on line 133 in processed_data/src/graphql/mod.rs

View workflow job for this annotation

GitHub Actions / lint

called `iter().cloned().collect()` on a slice to create a `Vec`. Calling `to_vec()` is both faster and more readable
let records = processing_job_parameter::Entity::find()
.filter(processing_job_parameter::Column::ProcessingJobId.is_in(keys_vec))
.all(&self.0)
.await?;

for record in records {
let processing_job_id = record.processing_job_id.unwrap();
let data = ProcessingJobParameter::from(record);
results
.entry(processing_job_id)
.or_insert_with(Vec::new)
.push(data)
}

Ok(results)
}
}

#[ComplexObject]
impl DataCollection {
/// Fetched all the processed data from data collection during a session
async fn processed_data(
&self,
ctx: &Context<'_>,
) -> Result<Option<DataProcessing>, async_graphql::Error> {
let loader = ctx.data_unchecked::<DataLoader<DataCollectionLoader>>();
let loader = ctx.data_unchecked::<DataLoader<ProcessedDataLoader>>();
Ok(loader.load_one(self.id).await?)

Check failure on line 160 in processed_data/src/graphql/mod.rs

View workflow job for this annotation

GitHub Actions / lint

question mark operator is useless here

Check failure on line 160 in processed_data/src/graphql/mod.rs

View workflow job for this annotation

GitHub Actions / lint

question mark operator is useless here
}

/// Fetched all the processing jobs
async fn processing_jobs(
&self,
ctx: &Context<'_>,
) -> async_graphql::Result<Vec<ProcessingJob>, async_graphql::Error> {
let database = ctx.data::<DatabaseConnection>()?;
Ok(processing_job::Entity::find()
.filter(processing_job::Column::DataCollectionId.eq(self.id))
.all(database)
.await?
.into_iter()
.map(ProcessingJob::from)
.collect())
) -> async_graphql::Result<Option<Vec<ProcessingJob>>, async_graphql::Error> {
let loader = ctx.data_unchecked::<DataLoader<ProcessingJobDataLoader>>();
Ok(loader.load_one(self.id).await?)
}

/// Fetches all the automatic process
Expand Down Expand Up @@ -141,15 +210,9 @@ impl ProcessingJob {
async fn parameters(
&self,
ctx: &Context<'_>,
) -> async_graphql::Result<Vec<ProcessingJobParameter>> {
let database = ctx.data::<DatabaseConnection>()?;
Ok(processing_job_parameter::Entity::find()
.filter(processing_job_parameter::Column::ProcessingJobId.eq(self.processing_job_id))
.all(database)
.await?
.into_iter()
.map(ProcessingJobParameter::from)
.collect())
) -> async_graphql::Result<Option<Vec<ProcessingJobParameter>>> {
let loader = ctx.data_unchecked::<DataLoader<ProcessingJobParameterDataLoader>>();
Ok(loader.load_one(self.processing_job_id).await?)
}
}

Expand Down

0 comments on commit e39250b

Please sign in to comment.