From 00cdc0d77fdc43d0950c6de37a00401e75b81eed Mon Sep 17 00:00:00 2001 From: Jasper Bernhardt Date: Wed, 17 Dec 2025 15:34:41 +0000 Subject: [PATCH 1/2] Makefile uv compatible I cannot test the poetry part at the moment, but it should be unchanged --- Makefile | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/Makefile b/Makefile index 80729f2..e0dda9e 100644 --- a/Makefile +++ b/Makefile @@ -17,6 +17,17 @@ ifneq (,$(wildcard ./.env)) export endif +UV_EXISTS := $(shell command -v uv 2> /dev/null) +ifdef UV_EXISTS + PYTHON_TOOL_RUN := uv run + PYTHON_TOOL_SYNC := uv sync + PYTHON_TOOL_LOCK_CHECK := uv lock --check +else + PYTHON_TOOL_RUN := poetry run python + PYTHON_TOOL_SYNC := poetry install + PYTHON_TOOL_LOCK_CHECK := poetry check --lock +endif + ROOT_DIR := $(shell dirname $(realpath $(firstword $(MAKEFILE_LIST)))) TESTPATH := $(ROOT_DIR)/tests/ @@ -24,17 +35,17 @@ TESTPATH := $(ROOT_DIR)/tests/ .PHONY: install install: # Install virtual environment with poetry @echo "🚀 Installing dependencies using Poetry" - @poetry install + $(PYTHON_TOOL_SYNC) .PHONY: check check: # Check lock file consistency @echo "🚀 Checking lock file consistency with 'pyproject.toml'" - @poetry check --lock + $(PYTHON_TOOL_LOCK_CHECK) .PHONY: generate-data generate-data: # Generate synthetic PDF medical records for testing @echo "🚀 Generating synthetic data..." - @poetry run python scripts/generate_data.py + $(PYTHON_TOOL_RUN) scripts/generate_data.py .PHONY: enable-apis enable-apis: # Enable required Google Cloud APIs @@ -51,7 +62,7 @@ create-datastore: enable-apis # Create the Vertex AI Search Data Store using the .PHONY: create-engine create-engine: # Create the Enterprise Search App (Engine) using the provided script @echo "🚀 Creating Enterprise Search App (Engine)..." - @poetry run python scripts/create_enterprise_engine.py + $(PYTHON_TOOL_RUN) scripts/create_enterprise_engine.py .PHONY: create-gcs-bucket create-gcs-bucket: # Create the GCS bucket for document ingestion From b363b803dfe0b5d635d18aca1c897e07965f5b93 Mon Sep 17 00:00:00 2001 From: Jasper Bernhardt Date: Wed, 17 Dec 2025 15:36:37 +0000 Subject: [PATCH 2/2] Add PEP-723 headers to script files This allows direct "uv run" with temporary virtual envs These are mostly autogenerated; I tried to test them, but I am missing the setup for a couple of them --- scripts/create_enterprise_engine.py | 15 ++++++++++--- scripts/generate_data.py | 12 +++++++++- scripts/generate_golden_dataset.py | 12 ++++++++-- scripts/run_evaluation.py | 35 ++++++++++++++++++++--------- 4 files changed, 58 insertions(+), 16 deletions(-) diff --git a/scripts/create_enterprise_engine.py b/scripts/create_enterprise_engine.py index 9e2ae87..5207527 100644 --- a/scripts/create_enterprise_engine.py +++ b/scripts/create_enterprise_engine.py @@ -11,14 +11,23 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import sys + +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "google-cloud-discoveryengine", +# "python-dotenv", +# ] +# /// + import os +import sys + +from dotenv import load_dotenv from google.api_core.client_options import ClientOptions from google.api_core.exceptions import NotFound from google.cloud import discoveryengine_v1 as discoveryengine -from dotenv import load_dotenv - # --- Configuration --- load_dotenv() PROJECT_ID = os.getenv("PROJECT_ID") diff --git a/scripts/generate_data.py b/scripts/generate_data.py index dd7ba2a..af10a4b 100644 --- a/scripts/generate_data.py +++ b/scripts/generate_data.py @@ -10,9 +10,19 @@ # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and -# limitations under the License. +# Limitations under the License. + +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "faker", +# "reportlab", +# ] +# /// + import os import random + from faker import Faker from reportlab.lib.pagesizes import LETTER from reportlab.pdfgen import canvas diff --git a/scripts/generate_golden_dataset.py b/scripts/generate_golden_dataset.py index c83921f..39554df 100644 --- a/scripts/generate_golden_dataset.py +++ b/scripts/generate_golden_dataset.py @@ -11,11 +11,19 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "google-cloud-aiplatform", +# "python-dotenv", +# "pypdf", +# ] +# /// + import os import glob import json -import pandas as pd -from google.cloud import aiplatform from vertexai.generative_models import GenerativeModel import vertexai from dotenv import load_dotenv diff --git a/scripts/run_evaluation.py b/scripts/run_evaluation.py index b1327d3..b2e0a40 100755 --- a/scripts/run_evaluation.py +++ b/scripts/run_evaluation.py @@ -11,29 +11,44 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "pandas", +# "google-cloud-aiplatform", +# "google-cloud-aiplatform[evaluation]", +# "python-dotenv", +# "google-adk", +# "google-genai", +# ] +# /// + import json -import pandas as pd -import vertexai -from vertexai.generative_models import GenerativeModel -from vertexai.evaluation import EvalTask import os import sys -from dotenv import load_dotenv import uuid + +import pandas as pd +import vertexai +from dotenv import load_dotenv from google.genai import types +from vertexai.evaluation import EvalTask +from vertexai.generative_models import GenerativeModel # Load environment variables load_dotenv() # Add 'src' to path so we can import the tool sys.path.append(os.path.join(os.path.dirname(__file__), '..')) -from src.agents.adk_agent import system_prompt -from src.agents.tools import search_knowledge_base +import asyncio + +from google.adk.artifacts import InMemoryArtifactService from google.adk.runners import Runner from google.adk.sessions import InMemorySessionService -from google.adk.artifacts import InMemoryArtifactService -from src.agents.adk_agent import agent_config, app_name -import asyncio + +from src.agents.adk_agent import agent_config, app_name, system_prompt +from src.agents.tools import search_knowledge_base # Configuration PROJECT_ID = os.getenv("PROJECT_ID")