Skip to content

Commit 35f38c7

Browse files
committed
enable_spark_tests_comet_native_writer_fix_spark_4
1 parent 385c407 commit 35f38c7

3 files changed

Lines changed: 38 additions & 38 deletions

File tree

dev/diffs/3.4.3.diff

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1902,7 +1902,7 @@ index f77b6336b81..b703603d26b 100644
19021902

19031903
- test("SPARK-33901: ctas should should not change table's schema") {
19041904
+ test("SPARK-33901: ctas should should not change table's schema",
1905-
+ IgnoreComet("comet native writer does not support empty dir / table creation yet")) {
1905+
+ IgnoreComet("https://github.com/apache/datafusion-comet/issues/3418")) {
19061906
withTable("t1", "t2") {
19071907
sql(s"CREATE TABLE t1(i CHAR(5), c VARCHAR(4)) USING $format")
19081908
sql(s"CREATE TABLE t2 USING $format AS SELECT * FROM t1")
@@ -1912,7 +1912,7 @@ index f77b6336b81..b703603d26b 100644
19121912

19131913
- test("SPARK-37160: CREATE TABLE AS SELECT with CHAR_AS_VARCHAR") {
19141914
+ test("SPARK-37160: CREATE TABLE AS SELECT with CHAR_AS_VARCHAR",
1915-
+ IgnoreComet("comet native writer does not support empty dir/table creation yet")) {
1915+
+ IgnoreComet("https://github.com/apache/datafusion-comet/issues/3419")) {
19161916
withTable("t1", "t2") {
19171917
sql(s"CREATE TABLE t1(col CHAR(5)) USING $format")
19181918
checkTableSchemaTypeStr("t1", Seq(Row("char(5)")))
@@ -1935,7 +1935,7 @@ index 343b59a311e..9d5789c1d91 100644
19351935

19361936
- test("empty file should be skipped while write to file") {
19371937
+ test("empty file should be skipped while write to file",
1938-
+ IgnoreComet("comet native writer does not create empty files / dir")) {
1938+
+ IgnoreComet("https://github.com/apache/datafusion-comet/issues/3417")) {
19391939
withTempPath { path =>
19401940
spark.range(100).repartition(10).where("id = 50").write.parquet(path.toString)
19411941
val partFiles = path.listFiles()
@@ -2179,7 +2179,7 @@ index 8670d95c65e..3fe49802309 100644
21792179
- test("Write Spark version into Parquet metadata") {
21802180
+// TODO : Comet native writer to add spark / comet version into parquet metadata
21812181
+ test("Write Spark version into Parquet metadata",
2182-
+ IgnoreComet("Comet doesn't support DELTA encoding yet")) {
2182+
+ IgnoreComet("https://github.com/apache/datafusion-comet/issues/3427")) {
21832183
withTempPath { dir =>
21842184
spark.range(1).repartition(1).write.parquet(dir.getAbsolutePath)
21852185
assert(getMetaData(dir)(SPARK_VERSION_METADATA_KEY) === SPARK_VERSION_SHORT)
@@ -2203,7 +2203,7 @@ index 8b386e8f689..28ced6209e0 100644
22032203
- test("parquet timestamp conversion") {
22042204
+ // TODO : Support legacy timestamps conversion /cast in comet native writer
22052205
+ test("parquet timestamp conversion",
2206-
+ IgnoreComet("timestamp96 conversion failed with the native writer")) {
2206+
+ IgnoreComet("https://github.com/apache/datafusion-comet/issues/3425")) {
22072207
// Make a table with one parquet file written by impala, and one parquet file written by spark.
22082208
// We should only adjust the timestamps in the impala file, and only if the conf is set
22092209
val impalaFile = "test-data/impala_timestamp.parq"
@@ -2739,7 +2739,7 @@ index 2207661478d..dc4e4b4240c 100644
27392739

27402740
- test("INSERT INTO TABLE - complex type but different names") {
27412741
+ test("INSERT INTO TABLE - complex type but different names",
2742-
+ IgnoreComet("comet native writer issue")) {
2742+
+ IgnoreComet("https://github.com/apache/datafusion-comet/issues/3426")) {
27432743
val tab1 = "tab1"
27442744
val tab2 = "tab2"
27452745
withTable(tab1, tab2) {
@@ -2749,7 +2749,7 @@ index 2207661478d..dc4e4b4240c 100644
27492749

27502750
- test("SPARK-38336 INSERT INTO statements with tables with default columns: positive tests") {
27512751
+ test("SPARK-38336 INSERT INTO statements with tables with default columns: positive tests",
2752-
+ IgnoreComet("comet native writer insert overwrite bug")) {
2752+
+ IgnoreComet("https://github.com/apache/datafusion-comet/issues/3421")) {
27532753
// When the USE_NULLS_FOR_MISSING_DEFAULT_COLUMN_VALUES configuration is enabled, and no
27542754
// explicit DEFAULT value is available when the INSERT INTO statement provides fewer
27552755
// values than expected, NULL values are appended in their place.
@@ -2759,7 +2759,7 @@ index 2207661478d..dc4e4b4240c 100644
27592759

27602760
- test("SPARK-38811 INSERT INTO on columns added with ALTER TABLE ADD COLUMNS: Positive tests") {
27612761
+ test("SPARK-38811 INSERT INTO on columns added with ALTER TABLE ADD COLUMNS: Positive tests",
2762-
+ IgnoreComet("bug with insert overwrite mode comet native writer")) {
2762+
+ IgnoreComet("https://github.com/apache/datafusion-comet/issues/3422")) {
27632763
// There is a complex expression in the default value.
27642764
val createTableBooleanCol = "create table t(i boolean) using parquet"
27652765
val createTableIntCol = "create table t(i int) using parquet"
@@ -2769,7 +2769,7 @@ index 2207661478d..dc4e4b4240c 100644
27692769

27702770
- test("SPARK-43071: INSERT INTO from queries whose final operators are not projections") {
27712771
+ test("SPARK-43071: INSERT INTO from queries whose final operators are not projections",
2772-
+ IgnoreComet("comet writer issues with insert overwrite")) {
2772+
+ IgnoreComet("https://github.com/apache/datafusion-comet/issues/3423")) {
27732773
def runTest(insert: String, expected: Seq[Row]): Unit = {
27742774
withTable("t1", "t2") {
27752775
sql("create table t1(i boolean, s bigint default 42) using parquet")
@@ -2779,7 +2779,7 @@ index 2207661478d..dc4e4b4240c 100644
27792779

27802780
- test("SPARK-29174 Support LOCAL in INSERT OVERWRITE DIRECTORY to data source") {
27812781
+ test("SPARK-29174 Support LOCAL in INSERT OVERWRITE DIRECTORY to data source",
2782-
+ IgnoreComet("comet native writer does not support insert overwrite")) {
2782+
+ IgnoreComet("https://github.com/apache/datafusion-comet/issues/3420")) {
27832783
withTempPath { dir =>
27842784
val path = dir.toURI.getPath
27852785
sql(s"""create table tab1 ( a int) using parquet location '$path'""")
@@ -2963,7 +2963,7 @@ index 44c9fbadfac..5f98bb9be17 100644
29632963

29642964
- test("write path implements onTaskCommit API correctly") {
29652965
+ test("write path implements onTaskCommit API correctly",
2966-
+ IgnoreComet("Comet native writer partial support")) {
2966+
+ IgnoreComet("https://github.com/apache/datafusion-comet/issues/3428")) {
29672967
withSQLConf(
29682968
SQLConf.FILE_COMMIT_PROTOCOL_CLASS.key ->
29692969
classOf[MessageCapturingCommitProtocol].getCanonicalName) {
@@ -2973,7 +2973,7 @@ index 44c9fbadfac..5f98bb9be17 100644
29732973

29742974
- test("Insert overwrite table command should output correct schema: basic") {
29752975
+ test("Insert overwrite table command should output correct schema: basic",
2976-
+ IgnoreComet("Comet native writer does not supportinsert overwrite mode")) {
2976+
+ IgnoreComet("https://github.com/apache/datafusion-comet/issues/3424")) {
29772977
withTable("tbl", "tbl2") {
29782978
withView("view1") {
29792979
val df = spark.range(10).toDF("id")

dev/diffs/3.5.8.diff

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1906,7 +1906,7 @@ index 12d5870309f..51429bf4039 100644
19061906

19071907
- test("SPARK-33901: ctas should should not change table's schema") {
19081908
+ test("SPARK-33901: ctas should should not change table's schema",
1909-
+ IgnoreComet("comet native writer does not support empty dir / table creation yet")) {
1909+
+ IgnoreComet("https://github.com/apache/datafusion-comet/issues/3418")) {
19101910
withTable("t1", "t2") {
19111911
sql(s"CREATE TABLE t1(i CHAR(5), c VARCHAR(4)) USING $format")
19121912
sql(s"CREATE TABLE t2 USING $format AS SELECT * FROM t1")
@@ -1916,7 +1916,7 @@ index 12d5870309f..51429bf4039 100644
19161916

19171917
- test("SPARK-37160: CREATE TABLE AS SELECT with CHAR_AS_VARCHAR") {
19181918
+ test("SPARK-37160: CREATE TABLE AS SELECT with CHAR_AS_VARCHAR",
1919-
+ IgnoreComet("comet native writer does not support empty dir/table creation yet")) {
1919+
+ IgnoreComet("https://github.com/apache/datafusion-comet/issues/3419")) {
19201920
withTable("t1", "t2") {
19211921
sql(s"CREATE TABLE t1(col CHAR(5)) USING $format")
19221922
checkTableSchemaTypeStr("t1", Seq(Row("char(5)")))
@@ -1939,7 +1939,7 @@ index 343b59a311e..9d5789c1d91 100644
19391939

19401940
- test("empty file should be skipped while write to file") {
19411941
+ test("empty file should be skipped while write to file",
1942-
+ IgnoreComet("comet native writer does not create empty files / dir")) {
1942+
+ IgnoreComet("https://github.com/apache/datafusion-comet/issues/3417")) {
19431943
withTempPath { path =>
19441944
spark.range(100).repartition(10).where("id = 50").write.parquet(path.toString)
19451945
val partFiles = path.listFiles()
@@ -2177,7 +2177,7 @@ index 4f8a9e39716..5da031994ff 100644
21772177

21782178
- test("Write Spark version into Parquet metadata") {
21792179
+ test("Write Spark version into Parquet metadata",
2180-
+ IgnoreComet("comet does not write spark version in parquet metadata")) {
2180+
+ IgnoreComet("https://github.com/apache/datafusion-comet/issues/3427")) {
21812181
withTempPath { dir =>
21822182
spark.range(1).repartition(1).write.parquet(dir.getAbsolutePath)
21832183
assert(getMetaData(dir)(SPARK_VERSION_METADATA_KEY) === SPARK_VERSION_SHORT)
@@ -2200,7 +2200,7 @@ index 8b386e8f689..67a41628a3b 100644
22002200

22012201
- test("parquet timestamp conversion") {
22022202
+ test("parquet timestamp conversion",
2203-
+ IgnoreComet("timestamp96 conversion failed with the native writer")) {
2203+
+ IgnoreComet("https://github.com/apache/datafusion-comet/issues/3425")) {
22042204
// Make a table with one parquet file written by impala, and one parquet file written by spark.
22052205
// We should only adjust the timestamps in the impala file, and only if the conf is set
22062206
val impalaFile = "test-data/impala_timestamp.parq"
@@ -2734,7 +2734,7 @@ index 7b1a5a32037..f163a9733a5 100644
27342734

27352735
- test("INSERT INTO TABLE - complex type but different names") {
27362736
+ test("INSERT INTO TABLE - complex type but different names",
2737-
+ IgnoreComet("comet native writer issue")) {
2737+
+ IgnoreComet("https://github.com/apache/datafusion-comet/issues/3426")) {
27382738
val tab1 = "tab1"
27392739
val tab2 = "tab2"
27402740
withTable(tab1, tab2) {
@@ -2744,7 +2744,7 @@ index 7b1a5a32037..f163a9733a5 100644
27442744

27452745
- test("SPARK-38336 INSERT INTO statements with tables with default columns: positive tests") {
27462746
+ test("SPARK-38336 INSERT INTO statements with tables with default columns: positive tests",
2747-
+ IgnoreComet("comet native writer insert overwrite bug")) {
2747+
+ IgnoreComet("https://github.com/apache/datafusion-comet/issues/3421")) {
27482748
// When the INSERT INTO statement provides fewer values than expected, NULL values are appended
27492749
// in their place.
27502750
withTable("t") {
@@ -2754,7 +2754,7 @@ index 7b1a5a32037..f163a9733a5 100644
27542754

27552755
- test("SPARK-38811 INSERT INTO on columns added with ALTER TABLE ADD COLUMNS: Positive tests") {
27562756
+ test("SPARK-38811 INSERT INTO on columns added with ALTER TABLE ADD COLUMNS: Positive tests",
2757-
+ IgnoreComet("bug with insert overwrite mode comet native writer")) {
2757+
+ IgnoreComet("https://github.com/apache/datafusion-comet/issues/3422")) {
27582758
// There is a complex expression in the default value.
27592759
val createTableBooleanCol = "create table t(i boolean) using parquet"
27602760
val createTableIntCol = "create table t(i int) using parquet"
@@ -2764,7 +2764,7 @@ index 7b1a5a32037..f163a9733a5 100644
27642764

27652765
- test("SPARK-43071: INSERT INTO from queries whose final operators are not projections") {
27662766
+ test("SPARK-43071: INSERT INTO from queries whose final operators are not projections",
2767-
+ IgnoreComet("comet writer issues with insert overwrite")) {
2767+
+ IgnoreComet("https://github.com/apache/datafusion-comet/issues/3423")) {
27682768
def runTest(insert: String, expected: Seq[Row]): Unit = {
27692769
withTable("t1", "t2") {
27702770
sql("create table t1(i boolean, s bigint default 42) using parquet")
@@ -2774,7 +2774,7 @@ index 7b1a5a32037..f163a9733a5 100644
27742774

27752775
- test("SPARK-29174 Support LOCAL in INSERT OVERWRITE DIRECTORY to data source") {
27762776
+ test("SPARK-29174 Support LOCAL in INSERT OVERWRITE DIRECTORY to data source",
2777-
+ IgnoreComet("comet native writer does not support insert overwrite")) {
2777+
+ IgnoreComet("https://github.com/apache/datafusion-comet/issues/3420")) {
27782778
withTempPath { dir =>
27792779
val path = dir.toURI.getPath
27802780
sql(s"""create table tab1 ( a int) using parquet location '$path'""")
@@ -2945,7 +2945,7 @@ index b40f9210a68..34aa6287eb5 100644
29452945

29462946
- test("write path implements onTaskCommit API correctly") {
29472947
+ test("write path implements onTaskCommit API correctly",
2948-
+ IgnoreComet("Comet native writer partial support")) {
2948+
+ IgnoreComet("https://github.com/apache/datafusion-comet/issues/3428")) {
29492949
withSQLConf(
29502950
SQLConf.FILE_COMMIT_PROTOCOL_CLASS.key ->
29512951
classOf[MessageCapturingCommitProtocol].getCanonicalName) {
@@ -2955,7 +2955,7 @@ index b40f9210a68..34aa6287eb5 100644
29552955

29562956
- test("Insert overwrite table command should output correct schema: basic") {
29572957
+ test("Insert overwrite table command should output correct schema: basic",
2958-
+ IgnoreComet("Comet native writer does not supportinsert overwrite mode")) {
2958+
+ IgnoreComet("https://github.com/apache/datafusion-comet/issues/3424")) {
29592959
withTable("tbl", "tbl2") {
29602960
withView("view1") {
29612961
val df = spark.range(10).toDF("id")

dev/diffs/4.0.1.diff

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1127,7 +1127,7 @@ index 818c7ec65b1..0d333807b70 100644
11271127
}
11281128

11291129
- test("SPARK-48817: test multi inserts") {
1130-
+ test("SPARK-48817: test multi inserts", IgnoreComet("comet parquet native writer bug")) {
1130+
+ test("SPARK-48817: test multi inserts", IgnoreComet("https://github.com/apache/datafusion-comet/issues/3430")) {
11311131
withTable("t1", "t2", "t3") {
11321132
createTable("t1", Seq("i"), Seq("int"))
11331133
createTable("t2", Seq("i"), Seq("int"))
@@ -1465,7 +1465,7 @@ index 77ed8b411e3..b82f70c0071 100644
14651465
}
14661466

14671467
- test("ctas with union") {
1468-
+ test("ctas with union", IgnoreComet("comet native writer bug when shuffle / AQE enabled")) {
1468+
+ test("ctas with union", IgnoreComet("https://github.com/apache/datafusion-comet/issues/3429")) {
14691469
withTable(testTable) {
14701470
sql(s"""
14711471
|CREATE TABLE $testTable USING $dataSource AS
@@ -2623,7 +2623,7 @@ index a5bb3058bed..de51b21f21a 100644
26232623
- test("SPARK-33901: ctas should should not change table's schema") {
26242624
+// TODO support CTAS in comet native parquet writer
26252625
+ test("SPARK-33901: ctas should should not change table's schema",
2626-
+ IgnoreComet("comet native writer does not support empty dir / table creation yet")) {
2626+
+ IgnoreComet("https://github.com/apache/datafusion-comet/issues/3418")) {
26272627
withTable("t1", "t2") {
26282628
sql(s"CREATE TABLE t1(i CHAR(5), c VARCHAR(4)) USING $format")
26292629
sql(s"CREATE TABLE t2 USING $format AS SELECT * FROM t1")
@@ -2633,7 +2633,7 @@ index a5bb3058bed..de51b21f21a 100644
26332633

26342634
- test("SPARK-37160: CREATE TABLE AS SELECT with CHAR_AS_VARCHAR") {
26352635
+ test("SPARK-37160: CREATE TABLE AS SELECT with CHAR_AS_VARCHAR",
2636-
+ IgnoreComet("comet native writer does not support empty dir/table creation yet")) {
2636+
+ IgnoreComet("https://github.com/apache/datafusion-comet/issues/3419")) {
26372637
withTable("t1", "t2") {
26382638
sql(s"CREATE TABLE t1(col CHAR(5)) USING $format")
26392639
checkTableSchemaTypeStr("t1", Seq(Row("char(5)")))
@@ -2657,7 +2657,7 @@ index 343b59a311e..05f3cab13f9 100644
26572657
- test("empty file should be skipped while write to file") {
26582658
+// TODO :: comet native write does not write empty files / dirs
26592659
+ test("empty file should be skipped while write to file",
2660-
+ IgnoreComet("comet native writer does not create empty files / dir")) {
2660+
+ IgnoreComet("https://github.com/apache/datafusion-comet/issues/3417")) {
26612661
withTempPath { path =>
26622662
spark.range(100).repartition(10).where("id = 50").write.parquet(path.toString)
26632663
val partFiles = path.listFiles()
@@ -2909,7 +2909,7 @@ index 4474ec1fd42..d0366be3597 100644
29092909
- test("Write Spark version into Parquet metadata") {
29102910
+// TODO : Comet native writer to add spark / comet version into parquet metadata
29112911
+ test("Write Spark version into Parquet metadata",
2912-
+ IgnoreComet("Comet doesn't support DELTA encoding yet")) {
2912+
+ IgnoreComet("https://github.com/apache/datafusion-comet/issues/3427")) {
29132913
withTempPath { dir =>
29142914
spark.range(1).repartition(1).write.parquet(dir.getAbsolutePath)
29152915
assert(getMetaData(dir)(SPARK_VERSION_METADATA_KEY) === SPARK_VERSION_SHORT)
@@ -2933,7 +2933,7 @@ index baa11df302b..c88f66d5c77 100644
29332933
- test("parquet timestamp conversion") {
29342934
+// TODO : Support legacy timestamps conversion /cast in comet native writer
29352935
+ test("parquet timestamp conversion",
2936-
+ IgnoreComet("timestamp96 conversion failed with the native writer")) {
2936+
+ IgnoreComet("https://github.com/apache/datafusion-comet/issues/3425")) {
29372937
// Make a table with one parquet file written by impala, and one parquet file written by spark.
29382938
// We should only adjust the timestamps in the impala file, and only if the conf is set
29392939
val impalaFile = "test-data/impala_timestamp.parq"
@@ -3479,7 +3479,7 @@ index baf99798965..e23d8223e05 100644
34793479

34803480
- test("INSERT INTO TABLE - complex type but different names") {
34813481
+ test("INSERT INTO TABLE - complex type but different names",
3482-
+ IgnoreComet("comet native writer issue")) {
3482+
+ IgnoreComet("https://github.com/apache/datafusion-comet/issues/3426")) {
34833483
val tab1 = "tab1"
34843484
val tab2 = "tab2"
34853485
withTable(tab1, tab2) {
@@ -3489,7 +3489,7 @@ index baf99798965..e23d8223e05 100644
34893489

34903490
- test("SPARK-38336 INSERT INTO statements with tables with default columns: positive tests") {
34913491
+ test("SPARK-38336 INSERT INTO statements with tables with default columns: positive tests",
3492-
+ IgnoreComet("comet native writer insert overwrite bug")) {
3492+
+ IgnoreComet("https://github.com/apache/datafusion-comet/issues/3421")) {
34933493
// When the INSERT INTO statement provides fewer values than expected, NULL values are appended
34943494
// in their place.
34953495
withTable("t") {
@@ -3499,7 +3499,7 @@ index baf99798965..e23d8223e05 100644
34993499

35003500
- test("SPARK-38811 INSERT INTO on columns added with ALTER TABLE ADD COLUMNS: Positive tests") {
35013501
+ test("SPARK-38811 INSERT INTO on columns added with ALTER TABLE ADD COLUMNS: Positive tests",
3502-
+ IgnoreComet("bug with insert overwrite mode comet native writer")) {
3502+
+ IgnoreComet("https://github.com/apache/datafusion-comet/issues/3422")) {
35033503
// There is a complex expression in the default value.
35043504
val createTableBooleanCol = "create table t(i boolean) using parquet"
35053505
val createTableIntCol = "create table t(i int) using parquet"
@@ -3510,7 +3510,7 @@ index baf99798965..e23d8223e05 100644
35103510
- test("SPARK-43071: INSERT INTO from queries whose final operators are not projections") {
35113511
+// TODO : fix overwrite mode issues comet native writer
35123512
+ test("SPARK-43071: INSERT INTO from queries whose final operators are not projections",
3513-
+ IgnoreComet("comet writer issues with insert overwrite")) {
3513+
+ IgnoreComet("https://github.com/apache/datafusion-comet/issues/3423")) {
35143514
def runTest(insert: String, expected: Seq[Row]): Unit = {
35153515
withTable("t1", "t2") {
35163516
sql("create table t1(i boolean, s bigint default 42) using parquet")
@@ -3521,7 +3521,7 @@ index baf99798965..e23d8223e05 100644
35213521
- test("SPARK-29174 Support LOCAL in INSERT OVERWRITE DIRECTORY to data source") {
35223522
+// TODO : Insert overwrite not supported comet native writer
35233523
+ test("SPARK-29174 Support LOCAL in INSERT OVERWRITE DIRECTORY to data source",
3524-
+ IgnoreComet("comet native writer does not support insert overwrite")) {
3524+
+ IgnoreComet("https://github.com/apache/datafusion-comet/issues/3420")) {
35253525
withTempPath { dir =>
35263526
val path = dir.toURI.getPath
35273527
sql(s"""create table tab1 ( a int) using parquet location '$path'""")
@@ -3717,7 +3717,7 @@ index 300807cf058..8dab4f9a2af 100644
37173717
+
37183718
+// TODO : fix test for native writer
37193719
+ test("write path implements onTaskCommit API correctly",
3720-
+ IgnoreComet("Comet native writer partial support")) {
3720+
+ IgnoreComet("https://github.com/apache/datafusion-comet/issues/3428")) {
37213721
withSQLConf(
37223722
SQLConf.FILE_COMMIT_PROTOCOL_CLASS.key ->
37233723
classOf[MessageCapturingCommitProtocol].getCanonicalName) {
@@ -3728,7 +3728,7 @@ index 300807cf058..8dab4f9a2af 100644
37283728
- test("Insert overwrite table command should output correct schema: basic") {
37293729
+// TODO : support vairous write modes in comet native writer
37303730
+ test("Insert overwrite table command should output correct schema: basic",
3731-
+ IgnoreComet("Comet native writer does not supportinsert overwrite mode")) {
3731+
+ IgnoreComet("https://github.com/apache/datafusion-comet/issues/3424")) {
37323732
withTable("tbl", "tbl2") {
37333733
withView("view1") {
37343734
val df = spark.range(10).toDF("id")

0 commit comments

Comments
 (0)