Skip to content

Commit

Permalink
help to override the max parallelism
Browse files Browse the repository at this point in the history
Signed-off-by: Bugen Zhao <[email protected]>
  • Loading branch information
BugenZhao committed Oct 23, 2024
1 parent 3cb1641 commit 8451b92
Show file tree
Hide file tree
Showing 3 changed files with 43 additions and 6 deletions.
7 changes: 5 additions & 2 deletions src/meta/src/controller/fragment.rs
Original file line number Diff line number Diff line change
Expand Up @@ -720,11 +720,14 @@ impl CatalogController {

pub async fn get_max_parallelism_by_id(&self, job_id: ObjectId) -> MetaResult<usize> {
let inner = self.inner.read().await;
let job = StreamingJob::find_by_id(job_id)
let max_parallelism: i32 = StreamingJob::find_by_id(job_id)
.select_only()
.column(streaming_job::Column::MaxParallelism)
.into_tuple()
.one(&inner.db)
.await?
.ok_or_else(|| anyhow::anyhow!("job {} not found in database", job_id))?;
Ok(job.max_parallelism as usize)
Ok(max_parallelism as usize)
}

/// Get all actor ids in the target streaming jobs.
Expand Down
12 changes: 8 additions & 4 deletions src/meta/src/controller/streaming_job.rs
Original file line number Diff line number Diff line change
Expand Up @@ -723,10 +723,14 @@ impl CatalogController {
.ok_or_else(|| MetaError::catalog_id_not_found(ObjectType::Table.as_str(), id))?;

if original_max_parallelism != max_parallelism as i32 {
bail_invalid_parameter!(
"cannot use a different max parallelism when altering or sinking into an existing table, \
please `SET STREAMING_MAX_PARALLELISM TO {}` first",
original_max_parallelism
// We already override the max parallelism in `StreamFragmentGraph` before entering this function.
// This should not happen in normal cases.
bail!(
"cannot use a different max parallelism \
when altering or creating/dropping a sink into an existing table, \
original: {}, new: {}",
original_max_parallelism,
max_parallelism
);
}

Expand Down
30 changes: 30 additions & 0 deletions src/meta/src/rpc/ddl_controller.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1059,6 +1059,16 @@ impl DdlController {
..
} = replace_table_info;

// Ensure the max parallelism unchanged before replacing table.
let original_max_parallelism = self
.metadata_manager
.get_job_max_parallelism(streaming_job.id().into())
.await?;
let fragment_graph = PbStreamFragmentGraph {
max_parallelism: original_max_parallelism as _,
..fragment_graph
};

let fragment_graph =
StreamFragmentGraph::new(&self.env, fragment_graph, &streaming_job)?;
streaming_job.set_table_fragment_id(fragment_graph.table_fragment_id());
Expand Down Expand Up @@ -1198,6 +1208,16 @@ impl DdlController {
panic!("additional replace table event only occurs when dropping sink into table")
};

// Ensure the max parallelism unchanged before replacing table.
let original_max_parallelism = self
.metadata_manager
.get_job_max_parallelism(streaming_job.id().into())
.await?;
let fragment_graph = PbStreamFragmentGraph {
max_parallelism: original_max_parallelism as _,
..fragment_graph
};

let fragment_graph =
StreamFragmentGraph::new(&self.env, fragment_graph, &streaming_job)?;
streaming_job.set_table_fragment_id(fragment_graph.table_fragment_id());
Expand Down Expand Up @@ -1344,6 +1364,16 @@ impl DdlController {
let _reschedule_job_lock = self.stream_manager.reschedule_lock_read_guard().await;
let ctx = StreamContext::from_protobuf(fragment_graph.get_ctx().unwrap());

// Ensure the max parallelism unchanged before replacing table.
let original_max_parallelism = self
.metadata_manager
.get_job_max_parallelism(streaming_job.id().into())
.await?;
let fragment_graph = PbStreamFragmentGraph {
max_parallelism: original_max_parallelism as _,
..fragment_graph
};

// 1. build fragment graph.
let fragment_graph = StreamFragmentGraph::new(&self.env, fragment_graph, &streaming_job)?;
streaming_job.set_table_fragment_id(fragment_graph.table_fragment_id());
Expand Down

0 comments on commit 8451b92

Please sign in to comment.