This repository was archived by the owner on Feb 27, 2025. It is now read-only.
File tree Expand file tree Collapse file tree 1 file changed +2
-2
lines changed
src/main/scala/com/microsoft/sqlserver/jdbc/spark/utils Expand file tree Collapse file tree 1 file changed +2
-2
lines changed Original file line number Diff line number Diff line change @@ -300,7 +300,7 @@ object BulkCopyUtils extends Logging {
300
300
assertIfCheckEnabled(dfCols.length + autoCols.length == tableCols.length, strictSchemaCheck,
301
301
s " ${prefix} numbers of columns " )
302
302
303
- if (columnsToWriteSet.isEmpty() ) {
303
+ if (columnsToWriteSet.isEmpty) {
304
304
val result = new Array [ColumnMetadata ](tableCols.length - autoCols.length)
305
305
} else {
306
306
val result = new Array [ColumnMetadata ](columnsToWriteSet.size)
@@ -311,7 +311,7 @@ object BulkCopyUtils extends Logging {
311
311
for (i <- 0 to tableCols.length- 1 ) {
312
312
val tableColName = tableCols(i).name
313
313
var dfFieldIndex = - 1
314
- if (! columnsToWriteSet.isEmpty() && ! columnsToWriteSet.contains(tableColName)) {
314
+ if (! columnsToWriteSet.isEmpty && ! columnsToWriteSet.contains(tableColName)) {
315
315
// if columnsToWrite provided, and column name not in it, skip column mapping and ColumnMetadata
316
316
logDebug(s " skipping col index $i col name $tableColName, user not provided in columnsToWrite list " )
317
317
} else if (autoCols.contains(tableColName)) {
You can’t perform that action at this time.
0 commit comments