Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
78 changes: 78 additions & 0 deletions native/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions native/core/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@ tempfile = "3.24.0"
itertools = "0.14.0"
paste = "1.0.14"
datafusion = { workspace = true, features = ["parquet_encryption", "sql"] }
datafusion-datasource = { workspace = true }
datafusion-spark = { workspace = true }
once_cell = "1.18.0"
regex = { workspace = true }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -167,7 +167,6 @@ case class CometScanRule(session: SparkSession) extends Rule[SparkPlan] with Com
case SCAN_AUTO =>
// TODO add support for native_datafusion in the future
nativeIcebergCompatScan(session, scanExec, r, hadoopConf)
.orElse(nativeCometScan(session, scanExec, r, hadoopConf))
.getOrElse(scanExec)
case SCAN_NATIVE_DATAFUSION =>
nativeDataFusionScan(session, scanExec, r, hadoopConf).getOrElse(scanExec)
Expand Down
86 changes: 51 additions & 35 deletions spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -187,53 +187,69 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
}

test("basic data type support") {
// this test requires native_comet scan due to unsigned u8/u16 issue
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Should we divert the scan type to native_comet only for u8/u16? So that the rest of the types can be tested for iceberg_compat?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Sure. I will make that change. Thanks for the review!

withSQLConf(CometConf.COMET_NATIVE_SCAN_IMPL.key -> CometConf.SCAN_NATIVE_COMET) {
Seq(true, false).foreach { dictionaryEnabled =>
withTempDir { dir =>
val path = new Path(dir.toURI.toString, "test.parquet")
makeParquetFileAllPrimitiveTypes(path, dictionaryEnabled = dictionaryEnabled, 10000)
withParquetTable(path.toString, "tbl") {
checkSparkAnswerAndOperator("select * FROM tbl WHERE _2 > 100")
}
}
}
}
}

test("basic data type support - excluding u8/u16") {
// variant that skips _9 (UINT_8) and _10 (UINT_16) for default scan impl
Seq(true, false).foreach { dictionaryEnabled =>
withTempDir { dir =>
val path = new Path(dir.toURI.toString, "test.parquet")
makeParquetFileAllPrimitiveTypes(path, dictionaryEnabled = dictionaryEnabled, 10000)
withSQLConf(CometConf.COMET_SCAN_ALLOW_INCOMPATIBLE.key -> "false") {
withParquetTable(path.toString, "tbl") {
// select all columns except _9 (UINT_8) and _10 (UINT_16)
checkSparkAnswerAndOperator(
"""select _1, _2, _3, _4, _5, _6, _7, _8, _11, _12, _13, _14, _15, _16, _17,
|_18, _19, _20, _21, _id FROM tbl WHERE _2 > 100""".stripMargin)
}
}
}
}

test("uint data type support") {
// this test requires native_comet scan due to unsigned u8/u16 issue
withSQLConf(CometConf.COMET_NATIVE_SCAN_IMPL.key -> CometConf.SCAN_NATIVE_COMET) {
Seq(true, false).foreach { dictionaryEnabled =>
withTempDir { dir =>
val path = new Path(dir.toURI.toString, "testuint.parquet")
makeParquetFileAllPrimitiveTypes(
path,
dictionaryEnabled = dictionaryEnabled,
Byte.MinValue,
Byte.MaxValue)
withParquetTable(path.toString, "tbl") {
checkSparkAnswerAndOperator("select * FROM tbl WHERE _2 > 100")
val qry = "select _9 from tbl order by _11"
checkSparkAnswerAndOperator(qry)
}
}
}
}
}

test("uint data type support") {
test("uint data type support - excluding u8/u16") {
// variant that tests UINT_32 and UINT_64, skipping _9 (UINT_8) and _10 (UINT_16)
Seq(true, false).foreach { dictionaryEnabled =>
// TODO: Once the question of what to get back from uint_8, uint_16 types is resolved,
// we can also update this test to check for COMET_SCAN_ALLOW_INCOMPATIBLE=true
Seq(false).foreach { allowIncompatible =>
{
withSQLConf(CometConf.COMET_SCAN_ALLOW_INCOMPATIBLE.key -> allowIncompatible.toString) {
withTempDir { dir =>
val path = new Path(dir.toURI.toString, "testuint.parquet")
makeParquetFileAllPrimitiveTypes(
path,
dictionaryEnabled = dictionaryEnabled,
Byte.MinValue,
Byte.MaxValue)
withParquetTable(path.toString, "tbl") {
val qry = "select _9 from tbl order by _11"
if (usingDataSourceExec(conf)) {
if (!allowIncompatible) {
checkSparkAnswerAndOperator(qry)
} else {
// need to convert the values to unsigned values
val expected = (Byte.MinValue to Byte.MaxValue)
.map(v => {
if (v < 0) Byte.MaxValue.toShort - v else v
})
.toDF("a")
checkAnswer(sql(qry), expected)
}
} else {
checkSparkAnswerAndOperator(qry)
}
}
}
}
withTempDir { dir =>
val path = new Path(dir.toURI.toString, "testuint.parquet")
makeParquetFileAllPrimitiveTypes(
path,
dictionaryEnabled = dictionaryEnabled,
Byte.MinValue,
Byte.MaxValue)
withParquetTable(path.toString, "tbl") {
// test UINT_32 (_11) and UINT_64 (_12) only
checkSparkAnswerAndOperator("select _11, _12 from tbl order by _11")
}
}
}
Expand Down
Loading