@@ -38,6 +38,8 @@ if typing.TYPE_CHECKING:
3838 CsvCompression ,
3939 HiveTypes ,
4040 ColumnsTypes ,
41+ ProfilerFormat ,
42+ ParquetCompression ,
4143 )
4244 from ._enums import ExplainTypeLiteral , CSVLineTerminatorLiteral , RenderModeLiteral
4345 from duckdb import sqltypes , func
@@ -302,7 +304,7 @@ class DuckDBPyConnection:
302304 filename : bool = False ,
303305 hive_partitioning : bool = False ,
304306 union_by_name : bool = False ,
305- compression : str | None = None ,
307+ compression : ParquetCompression | None = None ,
306308 ) -> DuckDBPyRelation : ...
307309 @typing .overload
308310 def from_parquet (
@@ -314,7 +316,7 @@ class DuckDBPyConnection:
314316 filename : bool = False ,
315317 hive_partitioning : bool = False ,
316318 union_by_name : bool = False ,
317- compression : str | None = None ,
319+ compression : ParquetCompression | None = None ,
318320 ) -> DuckDBPyRelation : ...
319321 def from_query (self , query : str , * , alias : str = "" , params : object = None ) -> DuckDBPyRelation : ...
320322 def get_table_names (self , query : str , * , qualified : bool = False ) -> set [str ]: ...
@@ -327,7 +329,7 @@ class DuckDBPyConnection:
327329 repository_url : str | None = None ,
328330 version : str | None = None ,
329331 ) -> None : ...
330- def get_profiling_information (self , format : str = "json" ) -> str : ...
332+ def get_profiling_information (self , format : ProfilerFormat = "json" ) -> str : ...
331333 def enable_profiling (self ) -> None : ...
332334 def disable_profiling (self ) -> None : ...
333335 def interrupt (self ) -> None : ...
@@ -426,7 +428,7 @@ class DuckDBPyConnection:
426428 filename : bool = False ,
427429 hive_partitioning : bool = False ,
428430 union_by_name : bool = False ,
429- compression : str | None = None ,
431+ compression : ParquetCompression | None = None ,
430432 ) -> DuckDBPyRelation : ...
431433 @typing .overload
432434 def read_parquet (
@@ -438,7 +440,7 @@ class DuckDBPyConnection:
438440 filename : bool = False ,
439441 hive_partitioning : bool = False ,
440442 union_by_name : bool = False ,
441- compression : typing . Any = None ,
443+ compression : ParquetCompression | None = None ,
442444 ) -> DuckDBPyRelation : ...
443445 def register (self , view_name : str , python_object : object ) -> DuckDBPyConnection : ...
444446 def register_filesystem (self , filesystem : fsspec .AbstractFileSystem ) -> None : ...
@@ -725,7 +727,7 @@ class DuckDBPyRelation:
725727 self ,
726728 file_name : str ,
727729 * ,
728- compression : str | None = None ,
730+ compression : ParquetCompression | None = None ,
729731 field_ids : ParquetFieldsOptions | None = None ,
730732 row_group_size_bytes : int | str | None = None ,
731733 row_group_size : int | None = None ,
@@ -781,7 +783,7 @@ class DuckDBPyRelation:
781783 self ,
782784 file_name : str ,
783785 * ,
784- compression : str | None = None ,
786+ compression : ParquetCompression | None = None ,
785787 field_ids : ParquetFieldsOptions | None = None ,
786788 row_group_size_bytes : str | int | None = None ,
787789 row_group_size : int | None = None ,
@@ -1039,7 +1041,7 @@ def from_parquet(
10391041 filename : bool = False ,
10401042 hive_partitioning : bool = False ,
10411043 union_by_name : bool = False ,
1042- compression : str | None = None ,
1044+ compression : ParquetCompression | None = None ,
10431045 connection : DuckDBPyConnection | None = None ,
10441046) -> DuckDBPyRelation : ...
10451047@typing .overload
@@ -1051,7 +1053,7 @@ def from_parquet(
10511053 filename : bool = False ,
10521054 hive_partitioning : bool = False ,
10531055 union_by_name : bool = False ,
1054- compression : typing . Any = None ,
1056+ compression : ParquetCompression | None = None ,
10551057 connection : DuckDBPyConnection | None = None ,
10561058) -> DuckDBPyRelation : ...
10571059def from_query (
@@ -1081,7 +1083,9 @@ def limit(
10811083 * ,
10821084 connection : DuckDBPyConnection | None = None ,
10831085) -> DuckDBPyRelation : ...
1084- def get_profiling_information (* , connection : DuckDBPyConnection | None = None , format : str = "json" ) -> str : ...
1086+ def get_profiling_information (
1087+ * , connection : DuckDBPyConnection | None = None , format : ProfilerFormat = "json"
1088+ ) -> str : ...
10851089def enable_profiling (* , connection : DuckDBPyConnection | None = None ) -> None : ...
10861090def disable_profiling (* , connection : DuckDBPyConnection | None = None ) -> None : ...
10871091def list_filesystems (* , connection : DuckDBPyConnection | None = None ) -> lst [str ]: ...
@@ -1208,7 +1212,7 @@ def read_parquet(
12081212 filename : bool = False ,
12091213 hive_partitioning : bool = False ,
12101214 union_by_name : bool = False ,
1211- compression : str | None = None ,
1215+ compression : ParquetCompression | None = None ,
12121216 connection : DuckDBPyConnection | None = None ,
12131217) -> DuckDBPyRelation : ...
12141218@typing .overload
@@ -1220,7 +1224,7 @@ def read_parquet(
12201224 filename : bool = False ,
12211225 hive_partitioning : bool = False ,
12221226 union_by_name : bool = False ,
1223- compression : typing . Any = None ,
1227+ compression : ParquetCompression | None = None ,
12241228 connection : DuckDBPyConnection | None = None ,
12251229) -> DuckDBPyRelation : ...
12261230def register (
0 commit comments