-
Notifications
You must be signed in to change notification settings - Fork 597
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
feat: support backfill_rate_limit for source backfill #19445
Merged
xxchan
merged 1 commit into
main
from
11-16-feat_support_backfill_rate_limit_for_source_backfill
Nov 20, 2024
Merged
Changes from all commits
Commits
File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
134 changes: 134 additions & 0 deletions
134
e2e_test/source_inline/kafka/alter/rate_limit_source_kafka_shared.slt
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,134 @@ | ||
control substitution on | ||
|
||
############## Create kafka seed data | ||
|
||
statement ok | ||
create table kafka_seed_data (v1 int); | ||
|
||
statement ok | ||
insert into kafka_seed_data select * from generate_series(1, 1000); | ||
|
||
############## Sink into kafka | ||
|
||
statement ok | ||
create sink kafka_sink | ||
from | ||
kafka_seed_data with ( | ||
${RISEDEV_KAFKA_WITH_OPTIONS_COMMON}, | ||
topic = 'test_rate_limit_shared', | ||
type = 'append-only', | ||
force_append_only='true' | ||
); | ||
|
||
############## Source from kafka (rate_limit = 0) | ||
|
||
# Wait for the topic to create | ||
skipif in-memory | ||
sleep 5s | ||
|
||
statement ok | ||
create source kafka_source (v1 int) with ( | ||
${RISEDEV_KAFKA_WITH_OPTIONS_COMMON}, | ||
topic = 'test_rate_limit_shared', | ||
source_rate_limit = 0, | ||
) FORMAT PLAIN ENCODE JSON | ||
|
||
statement ok | ||
flush; | ||
|
||
############## Check data | ||
|
||
skipif in-memory | ||
sleep 3s | ||
|
||
############## Create MV on source | ||
|
||
statement ok | ||
create materialized view rl_mv1 as select count(*) from kafka_source; | ||
|
||
############## Although source is rate limited, the MV's SourceBackfill is not. | ||
|
||
statement ok | ||
flush; | ||
|
||
query I | ||
select * from rl_mv1; | ||
---- | ||
1000 | ||
|
||
############## Insert more data. They will not go into the MV. | ||
|
||
statement ok | ||
insert into kafka_seed_data select * from generate_series(1, 1000); | ||
|
||
sleep 3s | ||
|
||
query I | ||
select * from rl_mv1; | ||
---- | ||
1000 | ||
|
||
statement ok | ||
SET BACKGROUND_DDL=true; | ||
|
||
statement ok | ||
SET BACKFILL_RATE_LIMIT=0; | ||
|
||
statement ok | ||
create materialized view rl_mv2 as select count(*) from kafka_source; | ||
|
||
sleep 1s | ||
|
||
query T | ||
SELECT progress from rw_ddl_progress; | ||
---- | ||
0 rows consumed | ||
|
||
############## Alter Source (rate_limit = 0 --> rate_limit = 1000) | ||
|
||
statement ok | ||
alter source kafka_source set source_rate_limit to 1000; | ||
|
||
sleep 3s | ||
|
||
query I | ||
select * from rl_mv1; | ||
---- | ||
2000 | ||
|
||
query T | ||
SELECT progress from rw_ddl_progress; | ||
---- | ||
0 rows consumed | ||
|
||
|
||
|
||
statement error | ||
alter materialized view rl_mv2 set source_rate_limit = 1000; | ||
---- | ||
db error: ERROR: Failed to run the query | ||
|
||
Caused by: | ||
sql parser error: expected SCHEMA/PARALLELISM/BACKFILL_RATE_LIMIT after SET, found: source_rate_limit | ||
LINE 1: alter materialized view rl_mv2 set source_rate_limit = 1000; | ||
^ | ||
|
||
|
||
statement ok | ||
alter materialized view rl_mv2 set backfill_rate_limit = 2000; | ||
|
||
sleep 3s | ||
|
||
query ? | ||
select * from rl_mv2; | ||
---- | ||
2000 | ||
|
||
|
||
############## Cleanup | ||
|
||
statement ok | ||
drop source kafka_source cascade; | ||
|
||
statement ok | ||
drop table kafka_seed_data cascade; |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -1317,7 +1317,6 @@ impl CatalogController { | |
.map(|(id, mask, stream_node)| (id, mask, stream_node.to_protobuf())) | ||
.collect_vec(); | ||
|
||
// TODO: limit source backfill? | ||
fragments.retain_mut(|(_, fragment_type_mask, stream_node)| { | ||
let mut found = false; | ||
if *fragment_type_mask & PbFragmentTypeFlag::Source as i32 != 0 { | ||
|
@@ -1384,7 +1383,7 @@ impl CatalogController { | |
|
||
// edit the `rate_limit` of the `Chain` node in given `table_id`'s fragments | ||
// return the actor_ids to be applied | ||
pub async fn update_mv_rate_limit_by_job_id( | ||
pub async fn update_backfill_rate_limit_by_job_id( | ||
&self, | ||
job_id: ObjectId, | ||
rate_limit: Option<u32>, | ||
|
@@ -1411,7 +1410,7 @@ impl CatalogController { | |
fragments.retain_mut(|(_, fragment_type_mask, stream_node)| { | ||
let mut found = false; | ||
if (*fragment_type_mask & PbFragmentTypeFlag::StreamScan as i32 != 0) | ||
|| (*fragment_type_mask & PbFragmentTypeFlag::Source as i32 != 0) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. The previous code looks wrong: Alter backfill rate limit will affect MV on non-shared source There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. seem so. |
||
|| (*fragment_type_mask & PbFragmentTypeFlag::SourceScan as i32 != 0) | ||
{ | ||
visit_stream_node(stream_node, |node| match node { | ||
PbNodeBody::StreamCdcScan(node) => { | ||
|
@@ -1422,11 +1421,9 @@ impl CatalogController { | |
node.rate_limit = rate_limit; | ||
found = true; | ||
} | ||
PbNodeBody::Source(node) => { | ||
if let Some(inner) = node.source_inner.as_mut() { | ||
inner.rate_limit = rate_limit; | ||
found = true; | ||
} | ||
PbNodeBody::SourceBackfill(node) => { | ||
node.rate_limit = rate_limit; | ||
found = true; | ||
} | ||
_ => {} | ||
}); | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
shall we directly change the field name for distinguish?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
It will be breaking change
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
why? It keeps the same tag number. https://stackoverflow.com/questions/45431685/protocol-buffer-does-changing-field-name-break-the-message
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
IIRC sql meta backend stores with JSON encoding