@@ -210,6 +210,50 @@ def test_create_dataset_from_bigframes(client, is_replay_mode):
210210 )
211211
212212
213+ @pytest .mark .skipif (
214+ sys .version_info < (3 , 10 ),
215+ reason = "bigframes requires python 3.10 or higher" ,
216+ )
217+ @pytest .mark .usefixtures ("mock_bigquery_client" , "mock_import_bigframes" )
218+ def test_create_dataset_from_bigframes_preserves_other_metadata (client , is_replay_mode ):
219+ import bigframes .pandas
220+
221+ dataframe = pd .DataFrame (
222+ {
223+ "col1" : ["col1" ],
224+ "col2" : ["col2" ],
225+ }
226+ )
227+ if is_replay_mode :
228+ bf_dataframe = mock .MagicMock ()
229+ bf_dataframe .to_gbq .return_value = "temp_table_id"
230+ else :
231+ bf_dataframe = bigframes .pandas .DataFrame (dataframe )
232+
233+ dataset = client .datasets .create_from_bigframes (
234+ dataframe = bf_dataframe ,
235+ target_table_id = BIGQUERY_TABLE_NAME ,
236+ multimodal_dataset = {
237+ "display_name" : "test-from-bigframes" ,
238+ "metadata" : {
239+ "gemini_request_read_config" : {
240+ "assembled_request_column_name" : "test_column"
241+ }
242+ },
243+ },
244+ )
245+
246+ assert isinstance (dataset , types .MultimodalDataset )
247+ assert dataset .display_name == "test-from-bigframes"
248+ assert (
249+ dataset .metadata .gemini_request_read_config .assembled_request_column_name
250+ == "test_column"
251+ )
252+ assert dataset .metadata .input_config .bigquery_source .uri == (
253+ f"bq://{ BIGQUERY_TABLE_NAME } "
254+ )
255+
256+
213257pytestmark = pytest_helper .setup (
214258 file = __file__ ,
215259 globals_for_file = globals (),
@@ -371,3 +415,50 @@ async def test_create_dataset_from_bigframes_async(client, is_replay_mode):
371415 pd .testing .assert_frame_equal (
372416 rows .to_dataframe (), dataframe , check_index_type = False
373417 )
418+
419+
420+ @pytest .mark .skipif (
421+ sys .version_info < (3 , 10 ),
422+ reason = "bigframes requires python 3.10 or higher" ,
423+ )
424+ @pytest .mark .asyncio
425+ @pytest .mark .usefixtures ("mock_bigquery_client" , "mock_import_bigframes" )
426+ async def test_create_dataset_from_bigframes_preserves_other_metadata_async (
427+ client , is_replay_mode
428+ ):
429+ import bigframes .pandas
430+
431+ dataframe = pd .DataFrame (
432+ {
433+ "col1" : ["col1" ],
434+ "col2" : ["col2" ],
435+ }
436+ )
437+ if is_replay_mode :
438+ bf_dataframe = mock .MagicMock ()
439+ bf_dataframe .to_gbq .return_value = "temp_table_id"
440+ else :
441+ bf_dataframe = bigframes .pandas .DataFrame (dataframe )
442+
443+ dataset = await client .aio .datasets .create_from_bigframes (
444+ dataframe = bf_dataframe ,
445+ target_table_id = BIGQUERY_TABLE_NAME ,
446+ multimodal_dataset = {
447+ "display_name" : "test-from-bigframes" ,
448+ "metadata" : {
449+ "gemini_request_read_config" : {
450+ "assembled_request_column_name" : "test_column"
451+ }
452+ },
453+ },
454+ )
455+
456+ assert isinstance (dataset , types .MultimodalDataset )
457+ assert dataset .display_name == "test-from-bigframes"
458+ assert (
459+ dataset .metadata .gemini_request_read_config .assembled_request_column_name
460+ == "test_column"
461+ )
462+ assert dataset .metadata .input_config .bigquery_source .uri == (
463+ f"bq://{ BIGQUERY_TABLE_NAME } "
464+ )
0 commit comments