Skip to content
This repository was archived by the owner on Nov 22, 2022. It is now read-only.

Commit 02c32ec

Browse files
authored
Backport DataFrameWriter argument changes (#556)
Resolves #555
1 parent d49a5d1 commit 02c32ec

File tree

1 file changed

+11
-10
lines changed

1 file changed

+11
-10
lines changed

third_party/3/pyspark/sql/readwriter.pyi

Lines changed: 11 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -2,13 +2,14 @@
22
#
33

44
from typing import overload
5-
from typing import Any, Dict, List, Optional, Tuple, Union
5+
from typing import Dict, List, Optional, Tuple, Union
66

77
from pyspark.sql._typing import OptionalPrimitiveType
88
from pyspark.sql.dataframe import DataFrame
99
from pyspark.rdd import RDD
10+
from pyspark.sql.column import Column
1011
from pyspark.sql.context import SQLContext
11-
from pyspark.sql.types import *
12+
from pyspark.sql.types import StructType
1213

1314
PathOrPaths = Union[str, List[str]]
1415
TupleOrListOfString = Union[List[str], Tuple[str, ...]]
@@ -25,7 +26,7 @@ class DataFrameReader(OptionUtils):
2526
self,
2627
path: Optional[PathOrPaths] = ...,
2728
format: Optional[str] = ...,
28-
schema: Optional[StructType] = ...,
29+
schema: Optional[Union[StructType, str]] = ...,
2930
**options: OptionalPrimitiveType
3031
) -> DataFrame: ...
3132
def json(
@@ -109,8 +110,8 @@ class DataFrameReader(OptionUtils):
109110
url: str,
110111
table: str,
111112
column: str,
112-
lowerBound: int,
113-
upperBound: int,
113+
lowerBound: Union[int, str],
114+
upperBound: Union[int, str],
114115
numPartitions: int,
115116
*,
116117
properties: Optional[Dict[str, str]] = ...
@@ -150,7 +151,7 @@ class DataFrameWriter(OptionUtils):
150151
path: Optional[str] = ...,
151152
format: Optional[str] = ...,
152153
mode: Optional[str] = ...,
153-
partitionBy: Optional[List[str]] = ...,
154+
partitionBy: Optional[Union[str, List[str]]] = ...,
154155
**options: OptionalPrimitiveType
155156
) -> None: ...
156157
def insertInto(self, tableName: str, overwrite: Optional[bool] = ...) -> None: ...
@@ -159,7 +160,7 @@ class DataFrameWriter(OptionUtils):
159160
name: str,
160161
format: Optional[str] = ...,
161162
mode: Optional[str] = ...,
162-
partitionBy: Optional[List[str]] = ...,
163+
partitionBy: Optional[Union[str, List[str]]] = ...,
163164
**options: OptionalPrimitiveType
164165
) -> None: ...
165166
def json(
@@ -171,13 +172,13 @@ class DataFrameWriter(OptionUtils):
171172
timestampFormat: Optional[str] = ...,
172173
lineSep: Optional[str] = ...,
173174
encoding: Optional[str] = ...,
174-
ignoreNullFields: Optional[bool] = ...,
175+
ignoreNullFields: Optional[Union[bool, str]] = ...,
175176
) -> None: ...
176177
def parquet(
177178
self,
178179
path: str,
179180
mode: Optional[str] = ...,
180-
partitionBy: Optional[List[str]] = ...,
181+
partitionBy: Optional[Union[str, List[str]]] = ...,
181182
compression: Optional[str] = ...,
182183
) -> None: ...
183184
def text(
@@ -208,7 +209,7 @@ class DataFrameWriter(OptionUtils):
208209
self,
209210
path: str,
210211
mode: Optional[str] = ...,
211-
partitionBy: Optional[List[str]] = ...,
212+
partitionBy: Optional[Union[str, List[str]]] = ...,
212213
compression: Optional[str] = ...,
213214
) -> None: ...
214215
def jdbc(

0 commit comments

Comments
 (0)