|
6 | 6 | _API_FLAVOR_TO_PROVIDER, |
7 | 7 | _DEFAULT_API_FLAVOR, |
8 | 8 | _compute_vendor_and_api_flavor, |
| 9 | + get_chat_model, |
9 | 10 | ) |
10 | 11 | from uipath_langchain.chat.types import APIFlavor, LLMProvider |
11 | 12 |
|
@@ -320,3 +321,147 @@ def test_default_flavors_map_back_to_same_provider(self): |
320 | 321 | f"Default flavor {default_flavor} for {provider} " |
321 | 322 | f"maps to {mapped_provider} instead" |
322 | 323 | ) |
| 324 | + |
| 325 | + |
| 326 | +class TestGetChatModelTemperatureGating: |
| 327 | + """End-to-end tests that call ``get_chat_model`` and assert how |
| 328 | + ``temperature`` is forwarded to the underlying LangChain chat class. |
| 329 | +
|
| 330 | + The gate is driven by discovery's ``modelDetails.shouldSkipTemperature``: |
| 331 | + when True, ``temperature`` must be omitted from the constructor kwargs; |
| 332 | + when False/absent, it must be passed through as-is. |
| 333 | + """ |
| 334 | + |
| 335 | + def test_opus_4_7_bedrock_converse_omits_temperature(self, mocker): |
| 336 | + """flag=True + Bedrock Converse: UiPathChatBedrockConverse must be |
| 337 | + instantiated without a ``temperature`` kwarg.""" |
| 338 | + pytest.importorskip("langchain_aws") |
| 339 | + mocker.patch( |
| 340 | + "uipath_langchain.chat.chat_model_factory._get_model_info", |
| 341 | + return_value={ |
| 342 | + "modelName": "anthropic.claude-opus-4-7", |
| 343 | + "vendor": "AwsBedrock", |
| 344 | + "apiFlavor": "AwsBedrockConverse", |
| 345 | + "modelDetails": {"shouldSkipTemperature": True}, |
| 346 | + }, |
| 347 | + ) |
| 348 | + mock_cls = mocker.patch( |
| 349 | + "uipath_langchain.chat.bedrock.UiPathChatBedrockConverse" |
| 350 | + ) |
| 351 | + |
| 352 | + get_chat_model( |
| 353 | + model="anthropic.claude-opus-4-7", |
| 354 | + temperature=0.0, |
| 355 | + max_tokens=4096, |
| 356 | + agenthub_config="cfg", |
| 357 | + ) |
| 358 | + |
| 359 | + _, kwargs = mock_cls.call_args |
| 360 | + assert "temperature" not in kwargs |
| 361 | + |
| 362 | + def test_sonnet_4_5_bedrock_converse_forwards_temperature(self, mocker): |
| 363 | + """flag=False: UiPathChatBedrockConverse receives the exact caller |
| 364 | + temperature.""" |
| 365 | + pytest.importorskip("langchain_aws") |
| 366 | + mocker.patch( |
| 367 | + "uipath_langchain.chat.chat_model_factory._get_model_info", |
| 368 | + return_value={ |
| 369 | + "modelName": "anthropic.claude-sonnet-4-5-20250929-v1:0", |
| 370 | + "vendor": "AwsBedrock", |
| 371 | + "apiFlavor": "AwsBedrockConverse", |
| 372 | + "modelDetails": {"shouldSkipTemperature": False}, |
| 373 | + }, |
| 374 | + ) |
| 375 | + mock_cls = mocker.patch( |
| 376 | + "uipath_langchain.chat.bedrock.UiPathChatBedrockConverse" |
| 377 | + ) |
| 378 | + |
| 379 | + get_chat_model( |
| 380 | + model="anthropic.claude-sonnet-4-5-20250929-v1:0", |
| 381 | + temperature=0.7, |
| 382 | + max_tokens=4096, |
| 383 | + agenthub_config="cfg", |
| 384 | + ) |
| 385 | + |
| 386 | + _, kwargs = mock_cls.call_args |
| 387 | + assert kwargs.get("temperature") == 0.7 |
| 388 | + |
| 389 | + def test_gpt_openai_responses_forwards_temperature_when_flag_absent(self, mocker): |
| 390 | + """Older discovery payloads have ``modelDetails: null``; the gate |
| 391 | + must default to not-skipping and UiPathChatOpenAI must receive the |
| 392 | + caller temperature.""" |
| 393 | + pytest.importorskip("langchain_openai") |
| 394 | + mocker.patch( |
| 395 | + "uipath_langchain.chat.chat_model_factory._get_model_info", |
| 396 | + return_value={ |
| 397 | + "modelName": "gpt-5-2025-08-07", |
| 398 | + "vendor": "OpenAi", |
| 399 | + "apiFlavor": "OpenAiResponses", |
| 400 | + "modelDetails": None, |
| 401 | + }, |
| 402 | + ) |
| 403 | + mock_cls = mocker.patch("uipath_langchain.chat.openai.UiPathChatOpenAI") |
| 404 | + |
| 405 | + get_chat_model( |
| 406 | + model="gpt-5-2025-08-07", |
| 407 | + temperature=0.3, |
| 408 | + max_tokens=2048, |
| 409 | + agenthub_config="cfg", |
| 410 | + ) |
| 411 | + |
| 412 | + _, kwargs = mock_cls.call_args |
| 413 | + assert kwargs.get("temperature") == 0.3 |
| 414 | + |
| 415 | + def test_byom_custom_name_honors_discovery_flag(self, mocker): |
| 416 | + """BYOM display names don't match any known alias, but the discovery |
| 417 | + flag still identifies the underlying model — the gate must use it |
| 418 | + and the leaf client must be built without a temperature kwarg.""" |
| 419 | + pytest.importorskip("langchain_aws") |
| 420 | + mocker.patch( |
| 421 | + "uipath_langchain.chat.chat_model_factory._get_model_info", |
| 422 | + return_value={ |
| 423 | + "modelName": "Custom BYOM Opus 4.7", |
| 424 | + "vendor": "AwsBedrock", |
| 425 | + "apiFlavor": "AwsBedrockConverse", |
| 426 | + "modelDetails": {"shouldSkipTemperature": True}, |
| 427 | + }, |
| 428 | + ) |
| 429 | + mock_cls = mocker.patch( |
| 430 | + "uipath_langchain.chat.bedrock.UiPathChatBedrockConverse" |
| 431 | + ) |
| 432 | + |
| 433 | + get_chat_model( |
| 434 | + model="Custom BYOM Opus 4.7", |
| 435 | + temperature=0.7, |
| 436 | + max_tokens=4096, |
| 437 | + agenthub_config="cfg", |
| 438 | + ) |
| 439 | + |
| 440 | + _, kwargs = mock_cls.call_args |
| 441 | + assert "temperature" not in kwargs |
| 442 | + |
| 443 | + def test_gemini_vertex_forwards_temperature(self, mocker): |
| 444 | + """Third vendor path: flag=False on a Vertex Gemini model must |
| 445 | + forward the caller temperature to UiPathChatVertex.""" |
| 446 | + pytest.importorskip("langchain_google_genai") |
| 447 | + pytest.importorskip("google.genai") |
| 448 | + mocker.patch( |
| 449 | + "uipath_langchain.chat.chat_model_factory._get_model_info", |
| 450 | + return_value={ |
| 451 | + "modelName": "gemini-2.5-pro", |
| 452 | + "vendor": "VertexAi", |
| 453 | + "apiFlavor": "GeminiGenerateContent", |
| 454 | + "modelDetails": {"shouldSkipTemperature": False}, |
| 455 | + }, |
| 456 | + ) |
| 457 | + mock_cls = mocker.patch("uipath_langchain.chat.vertex.UiPathChatVertex") |
| 458 | + |
| 459 | + get_chat_model( |
| 460 | + model="gemini-2.5-pro", |
| 461 | + temperature=0.5, |
| 462 | + max_tokens=2048, |
| 463 | + agenthub_config="cfg", |
| 464 | + ) |
| 465 | + |
| 466 | + _, kwargs = mock_cls.call_args |
| 467 | + assert kwargs.get("temperature") == 0.5 |
0 commit comments