diff --git a/.github/workflows/build_and_test.yaml b/.github/workflows/build_and_test.yaml index d950eea0..80baccd9 100644 --- a/.github/workflows/build_and_test.yaml +++ b/.github/workflows/build_and_test.yaml @@ -40,7 +40,7 @@ jobs: git clone --branch ${{ matrix.version }} --single-branch --depth 1 https://github.com/postgres/postgres.git pushd postgres git branch - ./configure --prefix=$PWD/inst/ --enable-cassert --enable-debug --with-openssl + ./configure --prefix=$PWD/inst/ --enable-cassert --enable-debug --with-openssl --with-icu --with-libxml make -j8 install - name: Build and test pg_quack extension diff --git a/src/quack_duckdb_connection.cpp b/src/quack_duckdb_connection.cpp index c5bfbcf7..ac9eb18a 100644 --- a/src/quack_duckdb_connection.cpp +++ b/src/quack_duckdb_connection.cpp @@ -1,5 +1,6 @@ #include "duckdb.hpp" #include "duckdb/parser/parsed_data/create_table_function_info.hpp" +#include "duckdb/main/extension_util.hpp" #include "quack/quack_duckdb_connection.hpp" #include "quack/quack_heap_scan.hpp" @@ -26,6 +27,7 @@ quack_create_duckdb_connection(List *tables, List *neededColumns, const char *qu duckdb::make_uniq_base( tables, neededColumns, query)); + auto connection = duckdb::make_uniq(*db); // Add the postgres_scan inserted by the replacement scan @@ -35,6 +37,8 @@ quack_create_duckdb_connection(List *tables, List *neededColumns, const char *qu auto &catalog = duckdb::Catalog::GetSystemCatalog(context); context.transaction.BeginTransaction(); + auto &instance = *db->instance; + duckdb::ExtensionUtil::RegisterType(instance, "UnsupportedPostgresType", duckdb::LogicalTypeId::VARCHAR); catalog.CreateTableFunction(context, &heap_scan_info); context.transaction.Commit(); diff --git a/src/quack_planner.cpp b/src/quack_planner.cpp index 2df2b7c2..6b98bcac 100644 --- a/src/quack_planner.cpp +++ b/src/quack_planner.cpp @@ -40,24 +40,25 @@ quack_create_plan(Query *parse, const char *query) { auto &column = preparedResultTypes[i]; Oid postgresColumnOid = quack::GetPostgresDuckDBType(column); - if (OidIsValid(postgresColumnOid)) { - HeapTuple tp; - Form_pg_type typtup; + if (!OidIsValid(postgresColumnOid)) { + elog(ERROR, "Could not convert DuckDB to Postgres type, likely because the postgres->duckdb conversion was not supported"); + } + HeapTuple tp; + Form_pg_type typtup; - tp = SearchSysCache1(TYPEOID, ObjectIdGetDatum(postgresColumnOid)); - if (!HeapTupleIsValid(tp)) - elog(ERROR, "cache lookup failed for type %u", postgresColumnOid); + tp = SearchSysCache1(TYPEOID, ObjectIdGetDatum(postgresColumnOid)); + if (!HeapTupleIsValid(tp)) + elog(ERROR, "cache lookup failed for type %u", postgresColumnOid); - typtup = (Form_pg_type)GETSTRUCT(tp); + typtup = (Form_pg_type)GETSTRUCT(tp); - Var *var = makeVar(INDEX_VAR, i + 1, postgresColumnOid, typtup->typtypmod, typtup->typcollation, 0); + Var *var = makeVar(INDEX_VAR, i + 1, postgresColumnOid, typtup->typtypmod, typtup->typcollation, 0); - quackNode->custom_scan_tlist = - lappend(quackNode->custom_scan_tlist, - makeTargetEntry((Expr *)var, i + 1, (char *)preparedQuery->GetNames()[i].c_str(), false)); + quackNode->custom_scan_tlist = + lappend(quackNode->custom_scan_tlist, + makeTargetEntry((Expr *)var, i + 1, (char *)preparedQuery->GetNames()[i].c_str(), false)); - ReleaseSysCache(tp); - } + ReleaseSysCache(tp); } quackNode->custom_private = list_make2(duckdbConnection.release(), preparedQuery.release()); diff --git a/src/quack_types.cpp b/src/quack_types.cpp index 6dd57952..2b0523cb 100644 --- a/src/quack_types.cpp +++ b/src/quack_types.cpp @@ -509,7 +509,7 @@ ConvertPostgresToDuckColumnType(Form_pg_attribute &attribute) { return duck_type; } default: - elog(ERROR, "(DuckDB/ConvertPostgresToDuckColumnType) Unsupported quack type: %d", type); + return duckdb::LogicalType::USER("UnsupportedPostgresType"); } } @@ -568,8 +568,7 @@ GetPostgresDuckDBType(duckdb::LogicalType type) { } } default: { - elog(ERROR, "(DuckDB/GetPostgresDuckDBType) Unsupported quack type: %s", type.ToString().c_str()); - break; + return InvalidOid; } } } diff --git a/test/regression/expected/projection_pushdown_unsupported_type.out b/test/regression/expected/projection_pushdown_unsupported_type.out new file mode 100644 index 00000000..2cbab542 --- /dev/null +++ b/test/regression/expected/projection_pushdown_unsupported_type.out @@ -0,0 +1,18 @@ +-- XML is not supported, pushdown should avoid problems +CREATE TABLE my_table(a TEXT, b XML, c INTEGER); +INSERT INTO my_table (a, b, c) SELECT * from ( + VALUES + ('a', 'value'::XML, 42), + (NULL, NULL, NULL), + ('b', 'value'::XML, -128), + ('c', 'value'::XML, 2000000) +) t(a); +SELECT a, c FROM my_table; + a | c +---+--------- + a | 42 + | + b | -128 + c | 2000000 +(4 rows) + diff --git a/test/regression/schedule b/test/regression/schedule index e5d8b746..c3d3a55d 100644 --- a/test/regression/schedule +++ b/test/regression/schedule @@ -4,3 +4,4 @@ test: execution_error test: type_support test: array_type_support test: views +test: projection_pushdown_unsupported_type diff --git a/test/regression/sql/projection_pushdown_unsupported_type.sql b/test/regression/sql/projection_pushdown_unsupported_type.sql new file mode 100644 index 00000000..1de70921 --- /dev/null +++ b/test/regression/sql/projection_pushdown_unsupported_type.sql @@ -0,0 +1,10 @@ +-- XML is not supported, pushdown should avoid problems +CREATE TABLE my_table(a TEXT, b XML, c INTEGER); +INSERT INTO my_table (a, b, c) SELECT * from ( + VALUES + ('a', 'value'::XML, 42), + (NULL, NULL, NULL), + ('b', 'value'::XML, -128), + ('c', 'value'::XML, 2000000) +) t(a); +SELECT a, c FROM my_table;