6060from bigframes import version
6161import bigframes ._config .bigquery_options as bigquery_options
6262import bigframes .clients
63- from bigframes .core import blocks , log_adapter
63+ import bigframes .constants
64+ from bigframes .core import blocks , log_adapter , utils
6465import bigframes .core .pyformat
6566
6667# Even though the ibis.backends.bigquery import is unused, it's needed
@@ -248,13 +249,6 @@ def __init__(
248249 self ._temp_storage_manager = (
249250 self ._session_resource_manager or self ._anon_dataset_manager
250251 )
251- self ._executor : executor .Executor = bq_caching_executor .BigQueryCachingExecutor (
252- bqclient = self ._clients_provider .bqclient ,
253- bqstoragereadclient = self ._clients_provider .bqstoragereadclient ,
254- storage_manager = self ._temp_storage_manager ,
255- strictly_ordered = self ._strictly_ordered ,
256- metrics = self ._metrics ,
257- )
258252 self ._loader = bigframes .session .loader .GbqDataLoader (
259253 session = self ,
260254 bqclient = self ._clients_provider .bqclient ,
@@ -265,6 +259,14 @@ def __init__(
265259 force_total_order = self ._strictly_ordered ,
266260 metrics = self ._metrics ,
267261 )
262+ self ._executor : executor .Executor = bq_caching_executor .BigQueryCachingExecutor (
263+ bqclient = self ._clients_provider .bqclient ,
264+ bqstoragereadclient = self ._clients_provider .bqstoragereadclient ,
265+ loader = self ._loader ,
266+ storage_manager = self ._temp_storage_manager ,
267+ strictly_ordered = self ._strictly_ordered ,
268+ metrics = self ._metrics ,
269+ )
268270
269271 def __del__ (self ):
270272 """Automatic cleanup of internal resources."""
@@ -937,15 +939,15 @@ def _read_pandas(
937939 if write_engine == "default" :
938940 write_engine = (
939941 "bigquery_load"
940- if mem_usage > MAX_INLINE_DF_BYTES
942+ if mem_usage > bigframes . constants . MAX_INLINE_BYTES
941943 else "bigquery_inline"
942944 )
943945
944946 if write_engine == "bigquery_inline" :
945- if mem_usage > MAX_INLINE_DF_BYTES :
947+ if mem_usage > bigframes . constants . MAX_INLINE_BYTES :
946948 raise ValueError (
947949 f"DataFrame size ({ mem_usage } bytes) exceeds the maximum allowed "
948- f"for inline data ({ MAX_INLINE_DF_BYTES } bytes)."
950+ f"for inline data ({ bigframes . constants . MAX_INLINE_BYTES } bytes)."
949951 )
950952 return self ._read_pandas_inline (pandas_dataframe )
951953 elif write_engine == "bigquery_load" :
@@ -954,6 +956,10 @@ def _read_pandas(
954956 return self ._loader .read_pandas (pandas_dataframe , method = "stream" )
955957 elif write_engine == "bigquery_write" :
956958 return self ._loader .read_pandas (pandas_dataframe , method = "write" )
959+ elif write_engine == "_deferred" :
960+ import bigframes .dataframe as dataframe
961+
962+ return dataframe .DataFrame (blocks .Block .from_local (pandas_dataframe , self ))
957963 else :
958964 raise ValueError (f"Got unexpected write_engine '{ write_engine } '" )
959965
@@ -1102,11 +1108,8 @@ def _read_csv_w_bigquery_engine(
11021108 native CSV loading capabilities, making it suitable for large datasets
11031109 that may not fit into local memory.
11041110 """
1105- if dtype is not None :
1106- raise NotImplementedError (
1107- f"BigQuery engine does not support the `dtype` argument."
1108- f"{ constants .FEEDBACK_LINK } "
1109- )
1111+ if dtype is not None and not utils .is_dict_like (dtype ):
1112+ raise ValueError ("dtype should be a dict-like object." )
11101113
11111114 if names is not None :
11121115 if len (names ) != len (set (names )):
@@ -1161,10 +1164,16 @@ def _read_csv_w_bigquery_engine(
11611164 job_config .skip_leading_rows = header + 1
11621165
11631166 table_id = self ._loader .load_file (filepath_or_buffer , job_config = job_config )
1164- return self ._loader .read_gbq_table (
1167+ df = self ._loader .read_gbq_table (
11651168 table_id , index_col = index_col , columns = columns , names = names
11661169 )
11671170
1171+ if dtype is not None :
1172+ for column , dtype in dtype .items ():
1173+ if column in df .columns :
1174+ df [column ] = df [column ].astype (dtype )
1175+ return df
1176+
11681177 def read_pickle (
11691178 self ,
11701179 filepath_or_buffer : FilePath | ReadPickleBuffer ,
0 commit comments