Skip to content

Commit

Permalink
Fix show column issue (#1961)
Browse files Browse the repository at this point in the history
### What problem does this PR solve?

_Briefly describe what this PR aims to solve. Include background context
that will help reviewers understand the purpose of the PR._

### Type of change

- [x] Bug Fix (non-breaking change which fixes an issue)

Signed-off-by: Jin Hai <[email protected]>
  • Loading branch information
JinHai-CN authored Oct 2, 2024
1 parent a6c444e commit c7cee7b
Show file tree
Hide file tree
Showing 18 changed files with 62 additions and 73 deletions.
9 changes: 4 additions & 5 deletions python/infinity_http.py
Original file line number Diff line number Diff line change
Expand Up @@ -275,12 +275,11 @@ def show_columns(self,table_name):
h = self.set_up_header(["accept"])
r = self.request(url, "get", h)
self.raise_exception(r)
res = {"column_name":[], "column_type":[], "constraint":[], "default":[]}
res = {"name":[], "type":[], "default":[]}
print(r.json())
for col in r.json()["columns"]:
res["column_name"].append(col["column_name"])
res["column_type"].append(col["column_type"])
res["constraint"].append(col["constraint"])
res["name"].append(col["name"])
res["type"].append(col["type"])
res["default"].append(col["default"])
res = pl.from_pandas(pd.DataFrame(res))
return res
Expand All @@ -292,7 +291,7 @@ def show_columns_type(self,table_name):
self.raise_exception(r)
res = {}
for col in r.json()["columns"]:
res[col["column_name"]] = col["column_type"]
res[col["name"]] = col["type"]
return res

# not implemented, just to pass test
Expand Down
6 changes: 3 additions & 3 deletions python/test_http_api/test_show.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,9 +70,9 @@ def test_http_show_columns(self):
self.show_table_columns(db_name, table_name, {
"error_code": 0,
"columns": [
{'column_name': 'num', 'column_type': 'Integer', 'constraint': '', 'default': 'Null'},
{'column_name': 'body', 'column_type': 'Varchar', 'constraint': '', 'default': 'Null'},
{'column_name': 'vec', 'column_type': 'Embedding(float,5)', 'constraint': '', 'default': 'Null'}
{'name': 'num', 'type': 'Integer', 'default': 'Null'},
{'name': 'body', 'type': 'Varchar', 'default': 'Null'},
{'name': 'vec', 'type': 'Embedding(float,5)', 'default': 'Null'}
]
})

Expand Down
2 changes: 1 addition & 1 deletion python/test_pysdk/test_show.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ def _test_show_columns(self, suffix):
res = db_obj.show_columns("test_show_columns"+suffix)
print(res)
# check the polars dataframe
assert res.columns == ["column_name", "column_type", "constraint", "default"]
assert res.columns == ["name", "type", "default"]

res = db_obj.drop_table("test_show_columns"+suffix, ConflictType.Error)
assert res.error_code == ErrorCode.OK
Expand Down
4 changes: 2 additions & 2 deletions python/test_pysdk/test_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -377,8 +377,8 @@ def test_create_table_with_upper_column_name(self, suffix):
res = db_obj.show_columns(table_name)
print("\n")
print(res)
assert res["column_name"][0] == "c1"
assert res["column_name"][1] == "c2"
assert res["name"][0] == "c1"
assert res["name"][1] == "c2"
db_obj.drop_table(table_name, ConflictType.Ignore)

@pytest.mark.usefixtures("skip_if_http")
Expand Down
9 changes: 5 additions & 4 deletions src/executor/operator/physical_scan/physical_table_scan.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -170,24 +170,25 @@ void PhysicalTableScan::ExecuteInternal(QueryContext *query_context, TableScanOp
switch(column_id) {
case COLUMN_IDENTIFIER_ROW_ID: {
u32 segment_offset = block_id * DEFAULT_BLOCK_CAPACITY + read_offset;
output_ptr->column_vectors[output_column_id++]->AppendWith(RowID(segment_id, segment_offset), write_size);
output_ptr->column_vectors[output_column_id]->AppendWith(RowID(segment_id, segment_offset), write_size);
break;
}
case COLUMN_IDENTIFIER_CREATE: {
ColumnVector create_ts_vec = current_block_entry->GetCreateTSVector(buffer_mgr, read_offset, write_size);
output_ptr->column_vectors[output_column_id++]->AppendWith(create_ts_vec);
output_ptr->column_vectors[output_column_id]->AppendWith(create_ts_vec);
break;
}
case COLUMN_IDENTIFIER_DELETE: {
ColumnVector delete_ts_vec = current_block_entry->GetDeleteTSVector(buffer_mgr, read_offset, write_size);
output_ptr->column_vectors[output_column_id++]->AppendWith(delete_ts_vec);
output_ptr->column_vectors[output_column_id]->AppendWith(delete_ts_vec);
break;
}
default: {
ColumnVector column_vector = current_block_entry->GetColumnBlockEntry(column_id)->GetConstColumnVector(buffer_mgr);
output_ptr->column_vectors[output_column_id++]->AppendWith(column_vector, read_offset, write_size);
output_ptr->column_vectors[output_column_id]->AppendWith(column_vector, read_offset, write_size);
}
}
++ output_column_id;
}

// write_size = already read size = already write size
Expand Down
44 changes: 22 additions & 22 deletions src/executor/operator/physical_show.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -172,15 +172,15 @@ void PhysicalShow::Init() {
output_names_->reserve(3);
output_types_->reserve(3);

output_names_->emplace_back("column_name");
output_names_->emplace_back("column_type");
output_names_->emplace_back("constraint");
output_names_->emplace_back("name");
output_names_->emplace_back("type");
output_names_->emplace_back("default");
// output_names_->emplace_back("constraint");

output_types_->emplace_back(varchar_type);
output_types_->emplace_back(varchar_type);
output_types_->emplace_back(varchar_type);
output_types_->emplace_back(varchar_type);
// output_types_->emplace_back(varchar_type);
break;
}
case ShowType::kShowIndexes: {
Expand Down Expand Up @@ -1788,10 +1788,10 @@ void PhysicalShow::ExecuteShowColumns(QueryContext *query_context, ShowOperatorS
auto varchar_type = MakeShared<DataType>(LogicalType::kVarchar);

Vector<SharedPtr<ColumnDef>> column_defs = {
MakeShared<ColumnDef>(0, varchar_type, "column_name", std::set<ConstraintType>()),
MakeShared<ColumnDef>(1, varchar_type, "column_type", std::set<ConstraintType>()),
MakeShared<ColumnDef>(2, varchar_type, "constraint", std::set<ConstraintType>()),
MakeShared<ColumnDef>(3, varchar_type, "default", std::set<ConstraintType>()),
MakeShared<ColumnDef>(0, varchar_type, "name", std::set<ConstraintType>()),
MakeShared<ColumnDef>(1, varchar_type, "type", std::set<ConstraintType>()),
MakeShared<ColumnDef>(2, varchar_type, "default", std::set<ConstraintType>()),
// MakeShared<ColumnDef>(3, varchar_type, "constraint", std::set<ConstraintType>())
};

SharedPtr<TableDef> table_def = TableDef::Make(MakeShared<String>("default_db"), MakeShared<String>("Views"), column_defs);
Expand All @@ -1802,7 +1802,7 @@ void PhysicalShow::ExecuteShowColumns(QueryContext *query_context, ShowOperatorS
varchar_type,
varchar_type,
varchar_type,
varchar_type,
// varchar_type,
};
SizeT row_count = 0;
output_block_ptr->Init(column_types);
Expand Down Expand Up @@ -1833,19 +1833,6 @@ void PhysicalShow::ExecuteShowColumns(QueryContext *query_context, ShowOperatorS
value_expr.AppendToChunk(output_block_ptr->column_vectors[output_column_idx]);
}

++output_column_idx;
{
// Append column constraint to the third column
String column_constraint;
for (auto &constraint : column->constraints_) {
column_constraint += " " + ConstrainTypeToString(constraint);
}

Value value = Value::MakeVarchar(column_constraint);
ValueExpression value_expr(value);
value_expr.AppendToChunk(output_block_ptr->column_vectors[output_column_idx]);
}

++output_column_idx;
{
// Append column default value to the fourth column
Expand All @@ -1855,6 +1842,19 @@ void PhysicalShow::ExecuteShowColumns(QueryContext *query_context, ShowOperatorS
value_expr.AppendToChunk(output_block_ptr->column_vectors[output_column_idx]);
}

// ++output_column_idx;
// {
// // Append column constraint to the third column
// String column_constraint;
// for (auto &constraint : column->constraints_) {
// column_constraint += " " + ConstrainTypeToString(constraint);
// }
//
// Value value = Value::MakeVarchar(column_constraint);
// ValueExpression value_expr(value);
// value_expr.AppendToChunk(output_block_ptr->column_vectors[output_column_idx]);
// }

if (++row_count == output_block_ptr->capacity()) {
output_block_ptr->Finalize();
show_operator_state->output_.emplace_back(std::move(output_block_ptr));
Expand Down
1 change: 0 additions & 1 deletion src/network/infinity_thrift_service.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@ import infinity_thrift_types;
import infinity;
import stl;
import infinity_context;
import local_file_system;
import file_system_type;
import file_system;
import file_writer;
Expand Down
5 changes: 1 addition & 4 deletions src/storage/buffer/file_worker/data_file_worker.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@ module data_file_worker;

import stl;
import infinity_exception;
import local_file_system;
import third_party;
import status;
import logger;
Expand Down Expand Up @@ -66,7 +65,6 @@ void DataFileWorker::FreeInMemory() {

// FIXME: to_spill
bool DataFileWorker::WriteToFileImpl(bool to_spill, bool &prepare_success, const FileWorkerSaveCtx &ctx) {
LocalFileSystem fs;
// File structure:
// - header: magic number
// - header: buffer size
Expand Down Expand Up @@ -99,7 +97,6 @@ bool DataFileWorker::WriteToFileImpl(bool to_spill, bool &prepare_success, const
}

void DataFileWorker::ReadFromFileImpl(SizeT file_size) {
LocalFileSystem fs;

if (file_size < sizeof(u64) * 3) {
Status status = Status::DataIOError(fmt::format("Incorrect file length {}.", file_size));
Expand Down Expand Up @@ -150,4 +147,4 @@ void DataFileWorker::ReadFromFileImpl(SizeT file_size) {
}
}

} // namespace infinity
} // namespace infinity
2 changes: 1 addition & 1 deletion src/storage/column_vector/column_vector.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1896,7 +1896,7 @@ void ColumnVector::AppendWith(const ColumnVector &other, SizeT from, SizeT count

if (*this->data_type_ != *other.data_type_) {
String error_message =
fmt::format("Attempt to append column vector{} to column vector{}", other.data_type_->ToString(), data_type_->ToString());
fmt::format("Attempt to append column vector {} to column vector {}", other.data_type_->ToString(), data_type_->ToString());
UnrecoverableError(error_message);
}

Expand Down
3 changes: 1 addition & 2 deletions src/storage/fast_rough_filter/fast_rough_filter.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ import probabilistic_data_filter;
import min_max_data_filter;
import logger;
import third_party;
import local_file_system;
import infinity_exception;
import filter_expression_push_down_helper;

Expand Down Expand Up @@ -139,4 +138,4 @@ bool FastRoughFilter::LoadFromJsonFile(const nlohmann::json &entry_json) {
return load_success;
}

} // namespace infinity
} // namespace infinity
3 changes: 1 addition & 2 deletions src/storage/fast_rough_filter/fast_rough_filter.cppm
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ import probabilistic_data_filter;
import min_max_data_filter;
import logger;
import third_party;
import local_file_system;
import infinity_exception;
import filter_expression_push_down_helper;

Expand Down Expand Up @@ -142,4 +141,4 @@ public:
virtual bool EvaluateInner(TxnTimeStamp query_ts, const FastRoughFilter &filter) const = 0;
};

} // namespace infinity
} // namespace infinity
3 changes: 1 addition & 2 deletions src/storage/fast_rough_filter/probabilistic_data_filter.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ import logical_type;
import binary_fuse_filter;
import file_system;
import file_system_type;
import local_file_system;
import infinity_exception;

namespace infinity {
Expand Down Expand Up @@ -203,4 +202,4 @@ u64 ConvertValueToU64(const Value &value) {
}
}

} // namespace infinity
} // namespace infinity
1 change: 0 additions & 1 deletion src/storage/knn_index/knn_ivf/ivf_index_data.cppm
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@ import local_file_handle;
namespace infinity {

class IndexBase;
class FileHandler;
struct SegmentEntry;
class BufferManager;

Expand Down
1 change: 0 additions & 1 deletion src/storage/knn_index/knn_ivf/ivf_index_storage.cppm
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@ import knn_expr;
namespace infinity {

class LocalFileHandle;
class FileHandler;

template <EmbeddingDataType t>
struct EmbeddingDataTypeToCppType;
Expand Down
1 change: 0 additions & 1 deletion src/storage/meta/db_meta.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@ import status;
import catalog_delta_entry;
import infinity_exception;
import base_entry;
import local_file_system;

namespace infinity {

Expand Down
1 change: 0 additions & 1 deletion src/storage/meta/table_meta.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@ import txn_state;
import txn_manager;
import buffer_manager;
import catalog_delta_entry;
import local_file_system;
import third_party;
import status;
import infinity_exception;
Expand Down
28 changes: 14 additions & 14 deletions test/sql/ddl/type/test_sparse.slt
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@ CREATE TABLE test_sparse (col1 INT, col2 SPARSE(FLOAT, 30000));
query I
SHOW TABLE test_sparse COLUMNS;
----
col1 Integer (empty) Null
col2 Sparse(float,int16,30000) (empty) Null
col1 Integer Null
col2 Sparse(float,int16,30000) Null

################

Expand All @@ -21,8 +21,8 @@ CREATE TABLE test_sparse (col1 INT, col2 SPARSE(DOUBLE, 30000));
query I
SHOW TABLE test_sparse COLUMNS;
----
col1 Integer (empty) Null
col2 Sparse(double,int16,30000) (empty) Null
col1 Integer Null
col2 Sparse(double,int16,30000) Null

################

Expand All @@ -35,8 +35,8 @@ CREATE TABLE test_sparse (col1 INT, col2 SPARSE(FLOAT, 30));
query I
SHOW TABLE test_sparse COLUMNS;
----
col1 Integer (empty) Null
col2 Sparse(float,int8,30) (empty) Null
col1 Integer Null
col2 Sparse(float,int8,30) Null

################

Expand All @@ -49,8 +49,8 @@ CREATE TABLE test_sparse (col1 INT, col2 SPARSE(FLOAT, 100000));
query I
SHOW TABLE test_sparse COLUMNS;
----
col1 Integer (empty) Null
col2 Sparse(float,int32,100000) (empty) Null
col1 Integer Null
col2 Sparse(float,int32,100000) Null

################

Expand All @@ -63,8 +63,8 @@ CREATE TABLE test_sparse (col1 INT, col2 SPARSE(FLOAT, 10000000000));
query I
SHOW TABLE test_sparse COLUMNS;
----
col1 Integer (empty) Null
col2 Sparse(float,int64,10000000000) (empty) Null
col1 Integer Null
col2 Sparse(float,int64,10000000000) Null

################

Expand All @@ -77,8 +77,8 @@ CREATE TABLE test_sparse (col1 INT, col2 SPARSE(INT, 30000));
query I
SHOW TABLE test_sparse COLUMNS;
----
col1 Integer (empty) Null
col2 Sparse(int32,int16,30000) (empty) Null
col1 Integer Null
col2 Sparse(int32,int16,30000) Null

################

Expand All @@ -91,8 +91,8 @@ CREATE TABLE test_sparse (col1 INT, col2 SPARSE(BIT, 10000000000));
query I
SHOW TABLE test_sparse COLUMNS;
----
col1 Integer (empty) Null
col2 Sparse(bit,int64,10000000000) (empty) Null
col1 Integer Null
col2 Sparse(bit,int64,10000000000) Null

################

Expand Down
12 changes: 6 additions & 6 deletions test/sql/ddl/type/test_sparse_default.slt
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,9 @@ CREATE TABLE test_sparse_default (col1 INT, col2 SPARSE(FLOAT, 30000) DEFAULT []
query I
SHOW TABLE test_sparse_default COLUMNS;
----
col1 Integer (empty) Null
col2 Sparse(float,int16,30000) (empty) (empty)
col3 Sparse(bit,int16,30000) (empty) (empty)
col1 Integer Null
col2 Sparse(float,int16,30000) (empty)
col3 Sparse(bit,int16,30000) (empty)

statement ok
COPY test_sparse_default FROM '/var/infinity/test_data/sparse_default.csv' WITH ( DELIMITER ',', FORMAT CSV );
Expand All @@ -32,9 +32,9 @@ CREATE TABLE test_sparse_default (col1 INT, col2 SPARSE(FLOAT, 30000) DEFAULT [0
query I
SHOW TABLE test_sparse_default COLUMNS;
----
col1 Integer (empty) Null
col2 Sparse(float,int16,30000) (empty) [0.000000:0]
col3 Sparse(bit,int16,30000) (empty) [0]
col1 Integer Null
col2 Sparse(float,int16,30000) [0.000000:0]
col3 Sparse(bit,int16,30000) [0]

statement ok
COPY test_sparse_default FROM '/var/infinity/test_data/sparse_default.csv' WITH ( DELIMITER ',', FORMAT CSV );
Expand Down

0 comments on commit c7cee7b

Please sign in to comment.