@@ -95,6 +95,8 @@ __all__: list[str] = [
9595 "execute" ,
9696 "executemany" ,
9797 "extract_statements" ,
98+ "to_arrow_reader" ,
99+ "to_arrow_table" ,
98100 "fetch_arrow_table" ,
99101 "fetch_df" ,
100102 "fetch_df_chunk" ,
@@ -194,7 +196,11 @@ class DuckDBPyConnection:
194196 def __exit__ (self , exc_type : object , exc : object , traceback : object ) -> None : ...
195197 def append (self , table_name : str , df : pandas .DataFrame , * , by_name : bool = False ) -> DuckDBPyConnection : ...
196198 def array_type (self , type : sqltypes .DuckDBPyType , size : pytyping .SupportsInt ) -> sqltypes .DuckDBPyType : ...
197- def arrow (self , rows_per_batch : pytyping .SupportsInt = 1000000 ) -> pyarrow .lib .RecordBatchReader : ...
199+ def arrow (self , rows_per_batch : pytyping .SupportsInt = 1000000 ) -> pyarrow .lib .RecordBatchReader :
200+ """Alias of to_arrow_reader(). We recommend using to_arrow_reader() instead."""
201+ ...
202+ def to_arrow_reader (self , batch_size : pytyping .SupportsInt = 1000000 ) -> pyarrow .lib .RecordBatchReader : ...
203+ def to_arrow_table (self , batch_size : pytyping .SupportsInt = 1000000 ) -> pyarrow .lib .Table : ...
198204 def begin (self ) -> DuckDBPyConnection : ...
199205 def checkpoint (self ) -> DuckDBPyConnection : ...
200206 def close (self ) -> None : ...
@@ -222,12 +228,16 @@ class DuckDBPyConnection:
222228 def execute (self , query : Statement | str , parameters : object = None ) -> DuckDBPyConnection : ...
223229 def executemany (self , query : Statement | str , parameters : object = None ) -> DuckDBPyConnection : ...
224230 def extract_statements (self , query : str ) -> list [Statement ]: ...
225- def fetch_arrow_table (self , rows_per_batch : pytyping .SupportsInt = 1000000 ) -> pyarrow .lib .Table : ...
231+ def fetch_arrow_table (self , rows_per_batch : pytyping .SupportsInt = 1000000 ) -> pyarrow .lib .Table :
232+ """Deprecated: use to_arrow_table() instead."""
233+ ...
226234 def fetch_df (self , * , date_as_object : bool = False ) -> pandas .DataFrame : ...
227235 def fetch_df_chunk (
228236 self , vectors_per_chunk : pytyping .SupportsInt = 1 , * , date_as_object : bool = False
229237 ) -> pandas .DataFrame : ...
230- def fetch_record_batch (self , rows_per_batch : pytyping .SupportsInt = 1000000 ) -> pyarrow .lib .RecordBatchReader : ...
238+ def fetch_record_batch (self , rows_per_batch : pytyping .SupportsInt = 1000000 ) -> pyarrow .lib .RecordBatchReader :
239+ """Deprecated: use to_arrow_reader() instead."""
240+ ...
231241 def fetchall (self ) -> list [tuple [pytyping .Any , ...]]: ...
232242 def fetchdf (self , * , date_as_object : bool = False ) -> pandas .DataFrame : ...
233243 def fetchmany (self , size : pytyping .SupportsInt = 1 ) -> list [tuple [pytyping .Any , ...]]: ...
@@ -487,7 +497,11 @@ class DuckDBPyRelation:
487497 def arg_min (
488498 self , arg_column : str , value_column : str , groups : str = "" , window_spec : str = "" , projected_columns : str = ""
489499 ) -> DuckDBPyRelation : ...
490- def arrow (self , batch_size : pytyping .SupportsInt = 1000000 ) -> pyarrow .lib .RecordBatchReader : ...
500+ def arrow (self , batch_size : pytyping .SupportsInt = 1000000 ) -> pyarrow .lib .RecordBatchReader :
501+ """Alias of to_arrow_reader(). We recommend using to_arrow_reader() instead."""
502+ ...
503+ def to_arrow_reader (self , batch_size : pytyping .SupportsInt = 1000000 ) -> pyarrow .lib .RecordBatchReader : ...
504+ def to_arrow_table (self , batch_size : pytyping .SupportsInt = 1000000 ) -> pyarrow .lib .Table : ...
491505 def avg (
492506 self , column : str , groups : str = "" , window_spec : str = "" , projected_columns : str = ""
493507 ) -> DuckDBPyRelation : ...
@@ -533,12 +547,18 @@ class DuckDBPyRelation:
533547 def favg (
534548 self , column : str , groups : str = "" , window_spec : str = "" , projected_columns : str = ""
535549 ) -> DuckDBPyRelation : ...
536- def fetch_arrow_reader (self , batch_size : pytyping .SupportsInt = 1000000 ) -> pyarrow .lib .RecordBatchReader : ...
537- def fetch_arrow_table (self , batch_size : pytyping .SupportsInt = 1000000 ) -> pyarrow .lib .Table : ...
550+ def fetch_arrow_reader (self , batch_size : pytyping .SupportsInt = 1000000 ) -> pyarrow .lib .RecordBatchReader :
551+ """Deprecated: use to_arrow_reader() instead."""
552+ ...
553+ def fetch_arrow_table (self , batch_size : pytyping .SupportsInt = 1000000 ) -> pyarrow .lib .Table :
554+ """Deprecated: use to_arrow_table() instead."""
555+ ...
538556 def fetch_df_chunk (
539557 self , vectors_per_chunk : pytyping .SupportsInt = 1 , * , date_as_object : bool = False
540558 ) -> pandas .DataFrame : ...
541- def fetch_record_batch (self , rows_per_batch : pytyping .SupportsInt = 1000000 ) -> pyarrow .lib .RecordBatchReader : ...
559+ def fetch_record_batch (self , rows_per_batch : pytyping .SupportsInt = 1000000 ) -> pyarrow .lib .RecordBatchReader :
560+ """Deprecated: use to_arrow_reader() instead."""
561+ ...
542562 def fetchall (self ) -> list [tuple [pytyping .Any , ...]]: ...
543563 def fetchdf (self , * , date_as_object : bool = False ) -> pandas .DataFrame : ...
544564 def fetchmany (self , size : pytyping .SupportsInt = 1 ) -> list [tuple [pytyping .Any , ...]]: ...
@@ -656,7 +676,6 @@ class DuckDBPyRelation:
656676 def query (self , virtual_table_name : str , sql_query : str ) -> DuckDBPyRelation : ...
657677 def rank (self , window_spec : str , projected_columns : str = "" ) -> DuckDBPyRelation : ...
658678 def rank_dense (self , window_spec : str , projected_columns : str = "" ) -> DuckDBPyRelation : ...
659- def record_batch (self , batch_size : pytyping .SupportsInt = 1000000 ) -> pyarrow .RecordBatchReader : ...
660679 def row_number (self , window_spec : str , projected_columns : str = "" ) -> DuckDBPyRelation : ...
661680 def select (self , * args : str | Expression , groups : str = "" ) -> DuckDBPyRelation : ...
662681 def select_dtypes (self , types : pytyping .List [sqltypes .DuckDBPyType | str ]) -> DuckDBPyRelation : ...
@@ -692,7 +711,6 @@ class DuckDBPyRelation:
692711 self , column : str , groups : str = "" , window_spec : str = "" , projected_columns : str = ""
693712 ) -> DuckDBPyRelation : ...
694713 def tf (self ) -> dict [str , tensorflow .Tensor ]: ...
695- def to_arrow_table (self , batch_size : pytyping .SupportsInt = 1000000 ) -> pyarrow .lib .Table : ...
696714 def to_csv (
697715 self ,
698716 file_name : str ,
@@ -1067,9 +1085,18 @@ def array_type(
10671085@pytyping .overload
10681086def arrow (
10691087 rows_per_batch : pytyping .SupportsInt = 1000000 , * , connection : DuckDBPyConnection | None = None
1070- ) -> pyarrow .lib .RecordBatchReader : ...
1088+ ) -> pyarrow .lib .RecordBatchReader :
1089+ """Alias of to_arrow_reader(). We recommend using to_arrow_reader() instead."""
1090+ ...
1091+
10711092@pytyping .overload
10721093def arrow (arrow_object : pytyping .Any , * , connection : DuckDBPyConnection | None = None ) -> DuckDBPyRelation : ...
1094+ def to_arrow_reader (
1095+ batch_size : pytyping .SupportsInt = 1000000 , * , connection : DuckDBPyConnection | None = None
1096+ ) -> pyarrow .lib .RecordBatchReader : ...
1097+ def to_arrow_table (
1098+ batch_size : pytyping .SupportsInt = 1000000 , * , connection : DuckDBPyConnection | None = None
1099+ ) -> pyarrow .lib .Table : ...
10731100def begin (* , connection : DuckDBPyConnection | None = None ) -> DuckDBPyConnection : ...
10741101def checkpoint (* , connection : DuckDBPyConnection | None = None ) -> DuckDBPyConnection : ...
10751102def close (* , connection : DuckDBPyConnection | None = None ) -> None : ...
@@ -1128,7 +1155,10 @@ def executemany(
11281155def extract_statements (query : str , * , connection : DuckDBPyConnection | None = None ) -> list [Statement ]: ...
11291156def fetch_arrow_table (
11301157 rows_per_batch : pytyping .SupportsInt = 1000000 , * , connection : DuckDBPyConnection | None = None
1131- ) -> pyarrow .lib .Table : ...
1158+ ) -> pyarrow .lib .Table :
1159+ """Deprecated: use to_arrow_table() instead."""
1160+ ...
1161+
11321162def fetch_df (* , date_as_object : bool = False , connection : DuckDBPyConnection | None = None ) -> pandas .DataFrame : ...
11331163def fetch_df_chunk (
11341164 vectors_per_chunk : pytyping .SupportsInt = 1 ,
@@ -1138,7 +1168,10 @@ def fetch_df_chunk(
11381168) -> pandas .DataFrame : ...
11391169def fetch_record_batch (
11401170 rows_per_batch : pytyping .SupportsInt = 1000000 , * , connection : DuckDBPyConnection | None = None
1141- ) -> pyarrow .lib .RecordBatchReader : ...
1171+ ) -> pyarrow .lib .RecordBatchReader :
1172+ """Deprecated: use to_arrow_reader() instead."""
1173+ ...
1174+
11421175def fetchall (* , connection : DuckDBPyConnection | None = None ) -> list [tuple [pytyping .Any , ...]]: ...
11431176def fetchdf (* , date_as_object : bool = False , connection : DuckDBPyConnection | None = None ) -> pandas .DataFrame : ...
11441177def fetchmany (
0 commit comments