diff --git a/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/GpuDataSource.scala b/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/GpuDataSource.scala index 37395a72d2f..c05cc0dbc60 100644 --- a/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/GpuDataSource.scala +++ b/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/GpuDataSource.scala @@ -121,16 +121,18 @@ case class GpuDataSource( data: LogicalPlan, outputColumnNames: Seq[String]): BaseRelation = { - val outputColumns = DataWritingCommand.logicalPlanOutputWithNames(data, outputColumnNames) - if (outputColumns.map(_.dataType).exists(_.isInstanceOf[CalendarIntervalType])) { - throw QueryCompilationErrors.cannotSaveIntervalIntoExternalStorageError() - } - val format = originalProvidingInstance() if (!format.isInstanceOf[FileFormat]) { throw new IllegalArgumentException(s"Original provider does not extend FileFormat: $format") } + val outputColumns = DataWritingCommand.logicalPlanOutputWithNames(data, outputColumnNames) + outputColumns.toStructType.foreach { field => + if (field.dataType.isInstanceOf[CalendarIntervalType]) { + throw QueryCompilationErrors.dataTypeUnsupportedByDataSourceError(format.toString, field) + } + } + val cmd = planForWritingFileFormat(format.asInstanceOf[FileFormat], mode, data) // Spark 3.4 doesn't need the child physical plan for metrics anymore, this is now // cleaned up, so we need to run the DataWritingCommand using SparkSession. This actually