diff --git a/src/it/scala/com/databricks/spark/redshift/RedshiftReadSuite.scala b/src/it/scala/com/databricks/spark/redshift/RedshiftReadSuite.scala index ec2779ab..d3d489ee 100644 --- a/src/it/scala/com/databricks/spark/redshift/RedshiftReadSuite.scala +++ b/src/it/scala/com/databricks/spark/redshift/RedshiftReadSuite.scala @@ -245,4 +245,18 @@ class RedshiftReadSuite extends IntegrationSuiteBase { .load() assert(df.schema.fields(0).dataType === LongType) } + + test("read result of count() query (a BigInt) returned as LongType (regression for #310)") { + val df = read + .option("query", s"select count(testbool) as c from $test_table") + .load() + assert(df.schema.fields(0).dataType === LongType) + } + + test("read result returning a BigInt becomes a LongType (regression for #311)") { + val df = read + .option("query", s"select testlong::BigInt as c from $test_table") + .load() + assert(df.schema.fields(0).dataType === LongType) + } } diff --git a/src/main/scala/com/databricks/spark/redshift/RedshiftJDBCWrapper.scala b/src/main/scala/com/databricks/spark/redshift/RedshiftJDBCWrapper.scala index dc72dccf..4253ac79 100644 --- a/src/main/scala/com/databricks/spark/redshift/RedshiftJDBCWrapper.scala +++ b/src/main/scala/com/databricks/spark/redshift/RedshiftJDBCWrapper.scala @@ -301,7 +301,7 @@ private[redshift] class JDBCWrapper { val answer = sqlType match { // scalastyle:off case java.sql.Types.ARRAY => null - case java.sql.Types.BIGINT => if (signed) { LongType } else { DecimalType(20,0) } + case java.sql.Types.BIGINT => LongType case java.sql.Types.BINARY => BinaryType case java.sql.Types.BIT => BooleanType // @see JdbcDialect for quirks case java.sql.Types.BLOB => BinaryType