Skip to content

Commit

Permalink
[fix](partial update) remove CHECK on illegal number of partial colum…
Browse files Browse the repository at this point in the history
…ns (apache#22319)
  • Loading branch information
zhannngchen authored Jul 28, 2023
1 parent 5b14d9f commit 53d255f
Show file tree
Hide file tree
Showing 4 changed files with 63 additions and 58 deletions.
15 changes: 9 additions & 6 deletions be/src/olap/rowset/segment_v2/segment_writer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -320,12 +320,15 @@ void SegmentWriter::_serialize_block_to_row_column(vectorized::Block& block) {
// 3. set columns to data convertor and then write all columns
Status SegmentWriter::append_block_with_partial_content(const vectorized::Block* block,
size_t row_pos, size_t num_rows) {
CHECK(block->columns() > _tablet_schema->num_key_columns() &&
block->columns() < _tablet_schema->num_columns())
<< "block columns: " << block->columns()
<< ", num key columns: " << _tablet_schema->num_key_columns()
<< ", total schema columns: " << _tablet_schema->num_columns();
CHECK(_tablet_schema->keys_type() == UNIQUE_KEYS && _opts.enable_unique_key_merge_on_write);
if (block->columns() <= _tablet_schema->num_key_columns() ||
block->columns() >= _tablet_schema->num_columns()) {
return Status::InternalError(
fmt::format("illegal partial update block columns: {}, num key columns: {}, total "
"schema columns: {}",
block->columns(), _tablet_schema->num_key_columns(),
_tablet_schema->num_columns()));
}
DCHECK(_tablet_schema->keys_type() == UNIQUE_KEYS && _opts.enable_unique_key_merge_on_write);

// find missing column cids
std::vector<uint32_t> missing_cids = _tablet_schema->get_missing_cids();
Expand Down
Original file line number Diff line number Diff line change
@@ -1 +1 @@
1, 1, 1
1, 2, 1, 1
Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,15 @@
1 1 1 0 0 0 0 0 0

-- !sql6 --
1 0 0 0 0 0 0 0 0 0
1 2 1 0 0 0 0 1 0

-- !sql7 --
1 1 1.0 0 0 0 0 0 0 0
1 0 0 0 0 0 0 0 0 0

-- !sql8 --
1 1 1.0 0 0 0 0 0 0 0

-- !sql9 --
1

-- !sql10 --
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -212,32 +212,33 @@ suite("test_partial_update_schema_change", "p0") {
}
}
qt_sql5 " select * from ${tableName} order by c0 "

// test load data with delete column
// todo bug
// streamLoad {
// table "${tableName}"

// set 'column_separator', ','
// set 'partial_columns', 'true'
// set 'columns', 'c0, c1, c8'
// test load data with delete column, stream load will ignore the
// non-existing column
streamLoad {
table "${tableName}"

// file 'schema_change/load_without_delete_column.csv'
// time 10000 // limit inflight 10s
set 'column_separator', ','
set 'partial_columns', 'true'
set 'columns', 'c0, c1, c7, c8'

// check { result, exception, startTime, endTime ->
// if (exception != null) {
// throw exception
// }
// // check result, which is fail for loading delete column.
// log.info("Stream load result: ${result}".toString())
// def json = parseJson(result)
// assertEquals("fail", json.Status.toLowerCase())
// assertEquals(1, json.NumberTotalRows)
// assertEquals(1, json.NumberFilteredRows)
// assertEquals(0, json.NumberUnselectedRows)
// }
// }
file 'schema_change/load_without_delete_column.csv'
time 10000 // limit inflight 10s

check { result, exception, startTime, endTime ->
if (exception != null) {
throw exception
}
// check result, which is fail for loading delete column.
log.info("Stream load result: ${result}".toString())
def json = parseJson(result)
assertEquals("success", json.Status.toLowerCase())
assertEquals(1, json.NumberTotalRows)
assertEquals(0, json.NumberFilteredRows)
assertEquals(0, json.NumberUnselectedRows)
}
}
qt_sql6 " select * from ${tableName} order by c0 "

sql """ DROP TABLE IF EXISTS ${tableName} """

Expand Down Expand Up @@ -285,7 +286,7 @@ suite("test_partial_update_schema_change", "p0") {
assertEquals(0, json.NumberUnselectedRows)
}
}
qt_sql6 " select * from ${tableName} order by c0 "
qt_sql7 " select * from ${tableName} order by c0 "

// schema change
sql " ALTER table ${tableName} MODIFY COLUMN c2 double "
Expand Down Expand Up @@ -323,7 +324,7 @@ suite("test_partial_update_schema_change", "p0") {
assertEquals(0, json.NumberUnselectedRows)
}
}
qt_sql7 " select * from ${tableName} order by c0 "
qt_sql8 " select * from ${tableName} order by c0 "

sql """ DROP TABLE IF EXISTS ${tableName} """

Expand Down Expand Up @@ -363,7 +364,7 @@ suite("test_partial_update_schema_change", "p0") {
assertEquals(0, json.NumberUnselectedRows)
}
}
qt_sql8 " select * from ${tableName} order by c0 "
qt_sql9 " select * from ${tableName} order by c0 "

// schema change
sql " ALTER table ${tableName} ADD COLUMN c1 int key null "
Expand All @@ -390,32 +391,30 @@ suite("test_partial_update_schema_change", "p0") {
try_times--
}

// test load data with all key column
// todo cause core
// streamLoad {
// table "${tableName}"
// test load data with all key column, should fail because
// it don't have any value columns
streamLoad {
table "${tableName}"

// set 'column_separator', ','
// set 'partial_columns', 'true'
// set 'columns', 'c0, c1'
set 'column_separator', ','
set 'partial_columns', 'true'
set 'columns', 'c0, c1'

// file 'schema_change/load_with_key_column.csv'
// time 10000 // limit inflight 10s
file 'schema_change/load_with_key_column.csv'
time 10000 // limit inflight 10s

// check { result, exception, startTime, endTime ->
// if (exception != null) {
// throw exception
// }
// log.info("Stream load result: ${result}".toString())
// def json = parseJson(result)
// assertEquals("success", json.Status.toLowerCase())
// assertEquals(1, json.NumberTotalRows)
// assertEquals(0, json.NumberFilteredRows)
// assertEquals(0, json.NumberUnselectedRows)
// }
// }
// //check data
// qt_sql9 " select * from ${tableName} order by c0 "
check { result, exception, startTime, endTime ->
if (exception != null) {
throw exception
}
log.info("Stream load result: ${result}".toString())
def json = parseJson(result)
assertEquals("fail", json.Status.toLowerCase())
assertEquals(0, json.NumberTotalRows)
assertEquals(0, json.NumberFilteredRows)
assertEquals(0, json.NumberUnselectedRows)
}
}

sql """ DROP TABLE IF EXISTS ${tableName} """

Expand Down

0 comments on commit 53d255f

Please sign in to comment.