diff --git a/databricks/koalas/frame.py b/databricks/koalas/frame.py index 0a83f56d9f..337d7c5b15 100644 --- a/databricks/koalas/frame.py +++ b/databricks/koalas/frame.py @@ -3278,7 +3278,7 @@ def cache(self): """ return _CachedDataFrame(self._internal) - def to_table(self, name: str, format: Optional[str] = None, mode: str = 'error', + def to_table(self, name: str, format: Optional[str] = None, mode: str = 'overwrite', partition_cols: Union[str, List[str], None] = None, **options): """ @@ -3297,8 +3297,9 @@ def to_table(self, name: str, format: Optional[str] = None, mode: str = 'error', - 'json' - 'csv' - mode : str {'append', 'overwrite', 'ignore', 'error', 'errorifexists'}, default 'error'. - Specifies the behavior of the save operation when the table exists already. + mode : str {'append', 'overwrite', 'ignore', 'error', 'errorifexists'}, default + 'overwrite'. Specifies the behavior of the save operation when the table exists + already. - 'append': Append the new data to existing data. - 'overwrite': Overwrite existing data. @@ -3333,7 +3334,7 @@ def to_table(self, name: str, format: Optional[str] = None, mode: str = 'error', self.to_spark().write.saveAsTable(name=name, format=format, mode=mode, partitionBy=partition_cols, **options) - def to_delta(self, path: str, mode: str = 'error', + def to_delta(self, path: str, mode: str = 'overwrite', partition_cols: Union[str, List[str], None] = None, **options): """ Write the DataFrame out as a Delta Lake table. @@ -3342,8 +3343,9 @@ def to_delta(self, path: str, mode: str = 'error', ---------- path : str, required Path to write to. - mode : str {'append', 'overwrite', 'ignore', 'error', 'errorifexists'}, default 'error'. - Specifies the behavior of the save operation when the destination exists already. + mode : str {'append', 'overwrite', 'ignore', 'error', 'errorifexists'}, default + 'overwrite'. Specifies the behavior of the save operation when the destination + exists already. - 'append': Append the new data to existing data. - 'overwrite': Overwrite existing data. @@ -3391,7 +3393,7 @@ def to_delta(self, path: str, mode: str = 'error', self.to_spark_io( path=path, mode=mode, format="delta", partition_cols=partition_cols, **options) - def to_parquet(self, path: str, mode: str = 'error', + def to_parquet(self, path: str, mode: str = 'overwrite', partition_cols: Union[str, List[str], None] = None, compression: Optional[str] = None): """ @@ -3401,8 +3403,9 @@ def to_parquet(self, path: str, mode: str = 'error', ---------- path : str, required Path to write to. - mode : str {'append', 'overwrite', 'ignore', 'error', 'errorifexists'}, default 'error'. - Specifies the behavior of the save operation when the destination exists already. + mode : str {'append', 'overwrite', 'ignore', 'error', 'errorifexists'}, + default 'overwrite'. Specifies the behavior of the save operation when the + destination exists already. - 'append': Append the new data to existing data. - 'overwrite': Overwrite existing data. @@ -3445,7 +3448,7 @@ def to_parquet(self, path: str, mode: str = 'error', path=path, mode=mode, partitionBy=partition_cols, compression=compression) def to_spark_io(self, path: Optional[str] = None, format: Optional[str] = None, - mode: str = 'error', partition_cols: Union[str, List[str], None] = None, + mode: str = 'overwrite', partition_cols: Union[str, List[str], None] = None, **options): """Write the DataFrame out to a Spark data source. @@ -3461,8 +3464,8 @@ def to_spark_io(self, path: Optional[str] = None, format: Optional[str] = None, - 'orc' - 'json' - 'csv' - mode : str {'append', 'overwrite', 'ignore', 'error', 'errorifexists'}, default 'error'. - Specifies the behavior of the save operation when data already. + mode : str {'append', 'overwrite', 'ignore', 'error', 'errorifexists'}, default + 'overwrite'. Specifies the behavior of the save operation when data already. - 'append': Append the new data to existing data. - 'overwrite': Overwrite existing data. diff --git a/docs/source/reference/io.rst b/docs/source/reference/io.rst index 59d309e282..17f40c015b 100644 --- a/docs/source/reference/io.rst +++ b/docs/source/reference/io.rst @@ -75,6 +75,7 @@ JSON :toctree: api/ read_json + DataFrame.to_json HTML ----