Skip to content
This repository was archived by the owner on Feb 27, 2025. It is now read-only.

Commit 5bf3a53

Browse files
committed
minor fix
1 parent ef94937 commit 5bf3a53

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

src/main/scala/com/microsoft/sqlserver/jdbc/spark/utils/BulkCopyUtils.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -300,7 +300,7 @@ object BulkCopyUtils extends Logging {
300300
assertIfCheckEnabled(dfCols.length + autoCols.length == tableCols.length, strictSchemaCheck,
301301
s"${prefix} numbers of columns")
302302

303-
if (columnsToWriteSet.isEmpty()) {
303+
if (columnsToWriteSet.isEmpty) {
304304
val result = new Array[ColumnMetadata](tableCols.length - autoCols.length)
305305
} else {
306306
val result = new Array[ColumnMetadata](columnsToWriteSet.size)
@@ -311,7 +311,7 @@ object BulkCopyUtils extends Logging {
311311
for (i <- 0 to tableCols.length-1) {
312312
val tableColName = tableCols(i).name
313313
var dfFieldIndex = -1
314-
if (!columnsToWriteSet.isEmpty() && !columnsToWriteSet.contains(tableColName)) {
314+
if (!columnsToWriteSet.isEmpty && !columnsToWriteSet.contains(tableColName)) {
315315
// if columnsToWrite provided, and column name not in it, skip column mapping and ColumnMetadata
316316
logDebug(s"skipping col index $i col name $tableColName, user not provided in columnsToWrite list")
317317
} else if (autoCols.contains(tableColName)) {

0 commit comments

Comments
 (0)