Skip to content

Commit 4fdae8e

Browse files
cleop-googlecopybara-github
authored andcommitted
chore: GenAI SDK client(multimodal) - Refactor create_from_pandas to use create_from_bigframes.
PiperOrigin-RevId: 890548230
1 parent e164b19 commit 4fdae8e

File tree

1 file changed

+10
-72
lines changed

1 file changed

+10
-72
lines changed

vertexai/_genai/datasets.py

Lines changed: 10 additions & 72 deletions
Original file line numberDiff line numberDiff line change
@@ -846,53 +846,23 @@ def create_from_pandas(
846846
multimodal_dataset = types.MultimodalDataset()
847847

848848
bigframes = _datasets_utils._try_import_bigframes()
849-
bigquery = _datasets_utils._try_import_bigquery()
850849
project = self._api_client.project
851850
location = self._api_client.location
852851
credentials = self._api_client._credentials
853852

854-
if target_table_id:
855-
target_table_id = _datasets_utils._normalize_and_validate_table_id(
856-
table_id=target_table_id,
857-
project=project,
858-
location=location,
859-
credentials=credentials,
860-
)
861-
else:
862-
dataset_id = _datasets_utils._create_default_bigquery_dataset_if_not_exists(
863-
project=project, location=location, credentials=credentials
864-
)
865-
target_table_id = _datasets_utils._generate_target_table_id(dataset_id)
866-
867853
session_options = bigframes.BigQueryOptions(
868854
credentials=credentials,
869855
project=project,
870856
location=location,
871857
)
872858
with bigframes.connect(session_options) as session:
873-
temp_bigframes_df = session.read_pandas(dataframe)
874-
client = bigquery.Client(project=project, credentials=credentials)
875-
_datasets_utils.save_dataframe_to_bigquery(
876-
temp_bigframes_df,
877-
target_table_id,
878-
client,
859+
return self.create_from_bigframes(
860+
dataframe=session.read_pandas(dataframe),
861+
multimodal_dataset=multimodal_dataset,
862+
target_table_id=target_table_id,
863+
config=config,
879864
)
880865

881-
return self.create_from_bigquery(
882-
multimodal_dataset=multimodal_dataset.model_copy(
883-
update={
884-
"metadata": types.SchemaTablesDatasetMetadata(
885-
input_config=types.SchemaTablesDatasetMetadataInputConfig(
886-
bigquery_source=types.SchemaTablesDatasetMetadataBigQuerySource(
887-
uri=f"bq://{target_table_id}"
888-
)
889-
)
890-
)
891-
}
892-
),
893-
config=config,
894-
)
895-
896866
def create_from_bigframes(
897867
self,
898868
*,
@@ -1987,55 +1957,23 @@ async def create_from_pandas(
19871957
multimodal_dataset = types.MultimodalDataset()
19881958

19891959
bigframes = _datasets_utils._try_import_bigframes()
1990-
bigquery = _datasets_utils._try_import_bigquery()
19911960
project = self._api_client.project
19921961
location = self._api_client.location
19931962
credentials = self._api_client._credentials
19941963

1995-
if target_table_id:
1996-
target_table_id = (
1997-
await _datasets_utils._normalize_and_validate_table_id_async(
1998-
table_id=target_table_id,
1999-
project=project,
2000-
location=location,
2001-
credentials=credentials,
2002-
)
2003-
)
2004-
else:
2005-
dataset_id = await _datasets_utils._create_default_bigquery_dataset_if_not_exists_async(
2006-
project=project, location=location, credentials=credentials
2007-
)
2008-
target_table_id = _datasets_utils._generate_target_table_id(dataset_id)
2009-
20101964
session_options = bigframes.BigQueryOptions(
20111965
credentials=credentials,
20121966
project=project,
20131967
location=location,
20141968
)
20151969
with bigframes.connect(session_options) as session:
2016-
temp_bigframes_df = session.read_pandas(dataframe)
2017-
client = bigquery.Client(project=project, credentials=credentials)
2018-
await _datasets_utils.save_dataframe_to_bigquery_async(
2019-
temp_bigframes_df,
2020-
target_table_id,
2021-
client,
1970+
return await self.create_from_bigframes(
1971+
dataframe=session.read_pandas(dataframe),
1972+
multimodal_dataset=multimodal_dataset,
1973+
target_table_id=target_table_id,
1974+
config=config,
20221975
)
20231976

2024-
return await self.create_from_bigquery(
2025-
multimodal_dataset=multimodal_dataset.model_copy(
2026-
update={
2027-
"metadata": types.SchemaTablesDatasetMetadata(
2028-
input_config=types.SchemaTablesDatasetMetadataInputConfig(
2029-
bigquery_source=types.SchemaTablesDatasetMetadataBigQuerySource(
2030-
uri=f"bq://{target_table_id}"
2031-
)
2032-
)
2033-
)
2034-
}
2035-
),
2036-
config=config,
2037-
)
2038-
20391977
async def create_from_bigframes(
20401978
self,
20411979
*,

0 commit comments

Comments
 (0)