Skip to content

Commit af8172a

Browse files
committed
fix: Fix 3.5.0 compile issues
1 parent cda2140 commit af8172a

File tree

13 files changed

+115
-55
lines changed

13 files changed

+115
-55
lines changed

build.sc

Lines changed: 18 additions & 47 deletions
Original file line numberDiff line numberDiff line change
@@ -8,50 +8,25 @@ trait SparkModule extends Cross.Module2[String, String] with SbtModule with CiRe
88
outer =>
99
override def scalaVersion = crossValue
1010
val sparkVersion = crossValue2
11+
val Array(sparkMajor, sparkMinor, sparkPatch) = sparkVersion.split("\\.")
12+
val sparkBinaryVersion = s"$sparkMajor.$sparkMinor"
1113

1214
override def millSourcePath = super.millSourcePath / os.up
1315

14-
// Custom source layout for Spark Data Source API 2
15-
val sparkVersionSpecificSources = if (sparkVersion >= "3.4.0") {
16-
Seq("scala", "3.0_and_up/scala", "3.1_and_up/scala", "3.2_and_up/scala", "3.3_and_up/scala", "3.4_and_up/scala")
17-
} else if (sparkVersion >= "3.3.0") {
18-
Seq(
19-
"scala",
20-
"3.0_3.1_3.2_3.3/scala",
21-
"3.0_and_up/scala",
22-
"3.1_and_up/scala",
23-
"3.2_and_up/scala",
24-
"3.3_and_up/scala"
25-
)
26-
} else if (sparkVersion >= "3.2.0") {
27-
Seq(
28-
"scala",
29-
"3.0_3.1_3.2/scala",
30-
"3.0_3.1_3.2_3.3/scala",
31-
"3.0_and_up/scala",
32-
"3.1_and_up/scala",
33-
"3.2_and_up/scala"
34-
)
35-
} else if (sparkVersion >= "3.1.0") {
36-
Seq(
37-
"scala",
38-
"3.1/scala",
39-
"3.0_3.1/scala",
40-
"3.0_3.1_3.2_3.3/scala",
41-
"3.0_3.1_3.2/scala",
42-
"3.0_and_up/scala",
43-
"3.1_and_up/scala"
44-
)
45-
} else if (sparkVersion >= "3.0.0") {
46-
Seq("scala", "3.0/scala", "3.0_3.1/scala", "3.0_3.1_3.2_3.3/scala", "3.0_3.1_3.2/scala", "3.0_and_up/scala")
47-
} else if (sparkVersion >= "2.4.0") {
48-
Seq("scala", "2.4/scala")
49-
} else {
50-
throw new UnsupportedOperationException(s"sparkVersion ${sparkVersion} is not supported")
16+
def sparkVersionSpecificSources = T {
17+
val versionSpecificDirs = os.list(os.pwd / "src" / "main")
18+
val Array(sparkMajor, sparkMinor, sparkPatch) = sparkVersion.split("\\.")
19+
val sparkBinaryVersion = s"$sparkMajor.$sparkMinor"
20+
versionSpecificDirs.filter(_.last match {
21+
case "scala" => true
22+
case `sparkBinaryVersion` => true
23+
case s"${sparkMaj}.${sparkMin}_and_up" => sparkMaj == sparkMajor && sparkMin <= sparkMinor
24+
case s"${sparkLow}_to_${sparkHigh}" => sparkLow <= sparkVersion && sparkHigh >= sparkBinaryVersion
25+
case _ => false
26+
})
5127
}
52-
5328
override def sources = T.sources {
54-
super.sources() ++ sparkVersionSpecificSources.map(s => PathRef(millSourcePath / "src" / "main" / os.RelPath(s)))
29+
super.sources() ++ sparkVersionSpecificSources().map(PathRef(_))
5530
}
5631

5732
override def docSources = T.sources(Seq[PathRef]())
@@ -156,14 +131,10 @@ val spark32 = List("3.2.4")
156131
val spark33 = List("3.3.3")
157132
val spark34 = List("3.4.1")
158133
val spark35 = List("3.5.0")
134+
val sparkVersions = spark24 ++ spark30 ++ spark31 ++ spark32 ++ spark33 ++ spark34 ++ spark35
135+
val crossMatrix =
136+
sparkVersions.map(spark => (scala212, spark)) ++
137+
sparkVersions.filter(_ >= "3.2").map(spark => (scala213, spark))
159138

160-
val crossMatrix = {
161-
162-
(spark24 ++ spark30 ++ spark31 ++ spark32 ++ spark33 ++ spark34 ++ spark35).map(spark =>
163-
(scala212, spark)
164-
) ++ (spark32 ++ spark33 ++ spark34 ++ spark35).map(spark => (scala213, spark))
165-
166-
// (spark34).map(spark => (scala212, spark))
167-
}
168139

169140
object `spark-excel` extends Cross[SparkModule](crossMatrix) {}
Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
/*
2+
* Copyright 2022 Martin Mauch (@nightscape)
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* http://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*/
16+
17+
package com.crealytics.spark.excel.v2
18+
19+
import _root_.org.apache.spark.sql.catalyst.util.BadRecordException
20+
import org.apache.spark.unsafe.types.UTF8String
21+
import org.apache.spark.sql.catalyst.InternalRow
22+
23+
trait ExcelParserBase {
24+
25+
protected def getCurrentInput: UTF8String
26+
def badRecord(partialResults: Array[InternalRow], baseException: Throwable): BadRecordException =
27+
BadRecordException(() => getCurrentInput, () => partialResults.headOption, baseException)
28+
}
Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
/*
2+
* Copyright 2022 Martin Mauch (@nightscape)
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* http://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*/
16+
17+
package com.crealytics.spark.excel.v2
18+
19+
import _root_.org.apache.spark.sql.catalyst.util.BadRecordException
20+
import org.apache.spark.unsafe.types.UTF8String
21+
import org.apache.spark.sql.catalyst.InternalRow
22+
23+
trait ExcelParserBase {
24+
25+
protected def getCurrentInput: UTF8String
26+
def badRecord(partialResults: Array[InternalRow], baseException: Throwable): BadRecordException =
27+
BadRecordException(() => getCurrentInput, () => partialResults.headOption, baseException)
28+
}

src/main/3.4_and_up/scala/com/crealytics/spark/excel/v2/ExcelPartitionReaderFactory.scala renamed to src/main/3.0_to_3.4/scala/com/crealytics/spark/excel/v2/ExcelPartitionReaderFactory.scala

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,8 @@ import org.apache.spark.sql.internal.SQLConf
2626
import org.apache.spark.sql.sources.Filter
2727
import org.apache.spark.sql.types.StructType
2828
import org.apache.spark.util.SerializableConfiguration
29+
30+
import java.net.URI
2931
import scala.util.control.NonFatal
3032

3133
/** A factory used to create Excel readers.
@@ -75,7 +77,7 @@ case class ExcelPartitionReaderFactory(
7577
requiredSchema: StructType
7678
): SheetData[InternalRow] = {
7779
val excelHelper = ExcelHelper(options)
78-
val sheetData = excelHelper.getSheetData(conf, file.filePath.toUri)
80+
val sheetData = excelHelper.getSheetData(conf, URI.create(file.filePath.toString))
7981
try {
8082
SheetData(
8183
ExcelParser.parseIterator(sheetData.rowIterator, parser, headerChecker, requiredSchema),
Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
/*
2+
* Copyright 2022 Martin Mauch (@nightscape)
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* http://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*/
16+
17+
package com.crealytics.spark.excel.v2
18+
19+
import _root_.org.apache.spark.sql.catalyst.util.BadRecordException
20+
import org.apache.spark.unsafe.types.UTF8String
21+
import org.apache.spark.sql.catalyst.InternalRow
22+
23+
trait ExcelParserBase {
24+
25+
protected def getCurrentInput: UTF8String
26+
def badRecord(partialResults: Array[InternalRow], baseException: Throwable): BadRecordException =
27+
BadRecordException(() => getCurrentInput, () => partialResults, baseException)
28+
}

src/main/3.0_3.1_3.2_3.3/scala/com/crealytics/spark/excel/v2/ExcelPartitionReaderFactory.scala renamed to src/main/3.5_and_up/scala/com/crealytics/spark/excel/v2/ExcelPartitionReaderFactory.scala

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ package com.crealytics.spark.excel.v2
1818

1919
import org.apache.hadoop.conf.Configuration
2020
import org.apache.spark.broadcast.Broadcast
21-
import org.apache.spark.sql.catalyst.InternalRow
21+
import org.apache.spark.sql.catalyst.{InternalRow, FileSourceOptions}
2222
import org.apache.spark.sql.connector.read.PartitionReader
2323
import org.apache.spark.sql.execution.datasources.PartitionedFile
2424
import org.apache.spark.sql.execution.datasources.v2._
@@ -54,7 +54,10 @@ case class ExcelPartitionReaderFactory(
5454
parsedOptions: ExcelOptions,
5555
filters: Seq[Filter]
5656
) extends FilePartitionReaderFactory {
57-
57+
protected def options: FileSourceOptions = new FileSourceOptions(Map(
58+
FileSourceOptions.IGNORE_CORRUPT_FILES -> "true",
59+
FileSourceOptions.IGNORE_MISSING_FILES -> "true"
60+
))
5861
override def buildReader(file: PartitionedFile): PartitionReader[InternalRow] = {
5962
val conf = broadcastedConf.value.value
6063
val actualDataSchema =
@@ -77,7 +80,7 @@ case class ExcelPartitionReaderFactory(
7780
requiredSchema: StructType
7881
): SheetData[InternalRow] = {
7982
val excelHelper = ExcelHelper(parsedOptions)
80-
val sheetData = excelHelper.getSheetData(conf, URI.create(file.filePath))
83+
val sheetData = excelHelper.getSheetData(conf, URI.create(file.filePath.toString))
8184
try {
8285
SheetData(
8386
ExcelParser.parseIterator(sheetData.rowIterator, parser, headerChecker, requiredSchema),

src/main/scala/com/crealytics/spark/excel/v2/ExcelParser.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ import org.apache.poi.ss.usermodel.DateUtil
4242
* The pushdown filters that should be applied to converted values.
4343
*/
4444
class ExcelParser(dataSchema: StructType, requiredSchema: StructType, val options: ExcelOptions, filters: Seq[Filter])
45-
extends Logging {
45+
extends Logging with ExcelParserBase {
4646
require(
4747
requiredSchema.toSet.subsetOf(dataSchema.toSet),
4848
s"requiredSchema (${requiredSchema.catalogString}) should be the subset of " +
@@ -95,7 +95,7 @@ class ExcelParser(dataSchema: StructType, requiredSchema: StructType, val option
9595
private val pushedFilters = new ExcelFilters(filters, requiredSchema)
9696

9797
/* Retrieve the raw record string. */
98-
private def getCurrentInput: UTF8String = UTF8String
98+
protected def getCurrentInput: UTF8String = UTF8String
9999
.fromString("TODO: how to show the corrupted record?")
100100

101101
/** This parser first picks some tokens from the input tokens, according to the required schema, then parse these
@@ -352,7 +352,7 @@ class ExcelParser(dataSchema: StructType, requiredSchema: StructType, val option
352352

353353
private def convert(tokens: Vector[Cell]): Option[InternalRow] = {
354354
if (tokens == null) {
355-
throw BadRecordException(() => getCurrentInput, () => None, new RuntimeException("Malformed Excel record"))
355+
throw badRecord(Array.empty, new RuntimeException("Malformed Excel record"))
356356
}
357357

358358
var badRecordException: Option[Throwable] =
@@ -396,7 +396,7 @@ class ExcelParser(dataSchema: StructType, requiredSchema: StructType, val option
396396
if (skipRow) { noRows }
397397
else {
398398
if (badRecordException.isDefined) {
399-
throw BadRecordException(() => getCurrentInput, () => requiredRow.headOption, badRecordException.get)
399+
throw badRecord(requiredRow.toArray, badRecordException.get)
400400
} else { requiredRow }
401401
}
402402
}

0 commit comments

Comments
 (0)