diff --git a/docs-website/docs/pipeline-components/connectors/openapiserviceconnector.mdx b/docs-website/docs/pipeline-components/connectors/openapiserviceconnector.mdx
index e743f45806..886b7568f5 100644
--- a/docs-website/docs/pipeline-components/connectors/openapiserviceconnector.mdx
+++ b/docs-website/docs/pipeline-components/connectors/openapiserviceconnector.mdx
@@ -14,8 +14,8 @@ description: "`OpenAPIServiceConnector` is a component that acts as an interface
| | |
| --- | --- |
| **Most common position in a pipeline** | Flexible |
-| **Mandatory run variables** | `messages`: A list of [`ChatMessage`](../../concepts/data-classes/chatmessage.mdx) objects where the last message is expected to carry parameter invocation payload.
`service_openapi_spec`: OpenAPI specification of the service being invoked. It can be YAML/JSON, and all ref values must be resolved.
`service_credentials`: Authentication credentials for the service. We currently support two OpenAPI spec v3 security schemes:
1. http – for Basic, Bearer, and other HTTP authentication schemes;
2. apiKey – for API keys and cookie authentication. |
-| **Output variables** | `service_response`: A dictionary that is a list of [`ChatMessage`](../../concepts/data-classes/chatmessage.mdx) objects where each message corresponds to a function invocation.
If a user specifies multiple function calling requests, there will be multiple responses. |
+| **Mandatory run variables** | `messages`: A list of [`ChatMessage`](../../concepts/data-classes/chatmessage.mdx) objects where the last message must be from the assistant and contain tool calls.
`service_openapi_spec`: OpenAPI specification of the service being invoked. It can be YAML/JSON, and all ref values must be resolved.
`service_credentials`: Authentication credentials for the service. We currently support two OpenAPI spec v3 security schemes:
1. http – for Basic, Bearer, and other HTTP authentication schemes;
2. apiKey – for API keys and cookie authentication. |
+| **Output variables** | `service_response`: A list of [`ChatMessage`](../../concepts/data-classes/chatmessage.mdx) objects where each message corresponds to a tool call invocation.
If a message contains multiple tool calls, there will be multiple responses. |
| **API reference** | [Connectors](/reference/connectors-api) |
| **GitHub link** | https://github.com/deepset-ai/haystack/blob/main/haystack/components/connectors/openapi_service.py |
@@ -37,13 +37,13 @@ pip install openapi3
### On its own
-This component is primarily meant to be used in pipelines, as [`OpenAPIServiceToFunctions`](../converters/openapiservicetofunctions.mdx), in tandem with the function calling model, resolves the actual function calling parameters that are injected as invocation parameters for `OpenAPIServiceConnector`.
+This component is primarily meant to be used in pipelines, as [`OpenAPIServiceToFunctions`](../converters/openapiservicetofunctions.mdx), in tandem with an LLM with tool calling capabilities, resolves the actual tool call parameters that are injected as invocation parameters for `OpenAPIServiceConnector`.
### In a pipeline
-Let's say we're linking the Serper search engine to a pipeline. Here, `OpenAPIServiceConnector` uses the abilities of `OpenAPIServiceToFunctions`. `OpenAPIServiceToFunctions` first fetches and changes the [Serper's OpenAPI specification](https://bit.ly/serper_dev_spec) into a format that OpenAI's function calling mechanism can understand. Then, `OpenAPIServiceConnector` activates the Serper service using this specification.
+Let's say we're linking the Serper search engine to a pipeline. Here, `OpenAPIServiceConnector` uses the abilities of `OpenAPIServiceToFunctions`. `OpenAPIServiceToFunctions` first fetches and changes the [Serper's OpenAPI specification](https://bit.ly/serper_dev_spec) into function definitions that an LLM with tool calling capabilities can understand. Then, `OpenAPIServiceConnector` activates the Serper service using this specification.
-More precisely, `OpenAPIServiceConnector` dynamically calls methods defined in the Serper OpenAPI specification. This involves reading chat messages or other inputs to extract function call parameters, handling authentication with the Serper service, and making the right API calls. The connector makes sure that the method call follows the Serper API requirements, such as correct formatting requests and handling responses.
+More precisely, `OpenAPIServiceConnector` dynamically calls methods defined in the Serper OpenAPI specification. This involves reading chat messages to extract tool call parameters, handling authentication with the Serper service, and making the right API calls. The connector makes sure that the method call follows the Serper API requirements, such as correct formatting requests and handling responses.
Note that we used Serper just as an example here. This could be any OpenAPI-compliant service.
@@ -55,57 +55,86 @@ To run the following code snippet, note that you have to have your own Serper an
import json
import requests
-from typing import Dict, Any, List
+from typing import Any
+
from haystack import Pipeline
-from haystack.components.generators.utils import print_streaming_chunk
+from haystack.components.connectors import OpenAPIServiceConnector
from haystack.components.converters import OpenAPIServiceToFunctions, OutputAdapter
from haystack.components.generators.chat import OpenAIChatGenerator
-from haystack.components.connectors import OpenAPIServiceConnector
-from haystack.components.fetchers import LinkContentFetcher
-from haystack.dataclasses import ChatMessage, ByteStream
-from haystack.utils import Secret
+from haystack.dataclasses import ChatMessage
+from haystack.dataclasses.byte_stream import ByteStream
-def prepare_fc_params(openai_functions_schema: Dict[str, Any]) -> Dict[str, Any]:
+
+def prepare_fc_params(openai_functions_schema: dict[str, Any]) -> dict[str, Any]:
return {
- "tools": [{
- "type": "function",
- "function": openai_functions_schema
- }],
+ "tools": [{"type": "function", "function": openai_functions_schema}],
"tool_choice": {
"type": "function",
- "function": {"name": openai_functions_schema["name"]}
- }
+ "function": {"name": openai_functions_schema["name"]},
+ },
}
-system_prompt = requests.get("https://bit.ly/serper_dev_system_prompt").text
-serper_spec = requests.get("https://bit.ly/serper_dev_spec").text
-
-pipe = Pipeline()
-pipe.add_component("spec_to_functions", OpenAPIServiceToFunctions())
-pipe.add_component("functions_llm", OpenAIChatGenerator(api_key=Secret.from_token(llm_api_key), model="gpt-3.5-turbo-0613"))
-pipe.add_component("openapi_container", OpenAPIServiceConnector())
-pipe.add_component("a1", OutputAdapter("{{functions[0] | prepare_fc}}", Dict[str, Any], {"prepare_fc": prepare_fc_params}))
-pipe.add_component("a2", OutputAdapter("{{specs[0]}}", Dict[str, Any]))
-pipe.add_component("a3", OutputAdapter("{{system_message + service_response}}", List[ChatMessage]))
-pipe.add_component("llm", OpenAIChatGenerator(api_key=Secret.from_token(llm_api_key), model="gpt-4-1106-preview", streaming_callback=print_streaming_chunk))
-
-pipe.connect("spec_to_functions.functions", "a1.functions")
-pipe.connect("spec_to_functions.openapi_specs", "a2.specs")
-pipe.connect("a1", "functions_llm.generation_kwargs")
-pipe.connect("functions_llm.replies", "openapi_container.messages")
-pipe.connect("a2", "openapi_container.service_openapi_spec")
-pipe.connect("openapi_container.service_response", "a3.service_response")
-pipe.connect("a3", "llm.messages")
+serperdev_spec = requests.get("https://bit.ly/serper_dev_spec").json()
+system_prompt = requests.get("https://bit.ly/serper_dev_system").text
user_prompt = "Why was Sam Altman ousted from OpenAI?"
-result = pipe.run(data={"functions_llm": {"messages":[ChatMessage.from_system("Only do function calling"), ChatMessage.from_user(user_prompt)]},
- "openapi_container": {"service_credentials": serper_dev_key},
- "spec_to_functions": {"sources": [ByteStream.from_string(serper_spec)]},
- "a3": {"system_message": [ChatMessage.from_system(system_prompt)]}})
-
->Sam Altman was ousted from OpenAI on November 17, 2023, following
->a "deliberative review process" by the board of directors. The board concluded
->that he was not "consistently candid in his communications". However, he
->returned as CEO just days after his ouster.
+pipe = Pipeline()
+pipe.add_component("spec_to_functions", OpenAPIServiceToFunctions())
+pipe.add_component(
+ "prepare_fc_adapter",
+ OutputAdapter(
+ "{{functions[0] | prepare_fc}}",
+ dict[str, Any],
+ {"prepare_fc": prepare_fc_params},
+ ),
+)
+pipe.add_component("functions_llm", OpenAIChatGenerator())
+pipe.add_component("openapi_connector", OpenAPIServiceConnector())
+pipe.add_component(
+ "message_adapter",
+ OutputAdapter(
+ "{{system_message + service_response}}",
+ list[ChatMessage],
+ unsafe=True,
+ ),
+)
+pipe.add_component("llm", OpenAIChatGenerator())
+
+pipe.connect("spec_to_functions.functions", "prepare_fc_adapter.functions")
+pipe.connect(
+ "spec_to_functions.openapi_specs",
+ "openapi_connector.service_openapi_spec",
+)
+pipe.connect("prepare_fc_adapter", "functions_llm.generation_kwargs")
+pipe.connect("functions_llm.replies", "openapi_connector.messages")
+pipe.connect("openapi_connector.service_response", "message_adapter.service_response")
+pipe.connect("message_adapter", "llm.messages")
+
+result = pipe.run(
+ data={
+ "functions_llm": {
+ "messages": [
+ ChatMessage.from_system("Only do tool/function calling"),
+ ChatMessage.from_user(user_prompt),
+ ],
+ },
+ "openapi_connector": {
+ "service_credentials": serper_dev_key,
+ },
+ "spec_to_functions": {
+ "sources": [ByteStream.from_string(json.dumps(serperdev_spec))],
+ },
+ "message_adapter": {
+ "system_message": [ChatMessage.from_system(system_prompt)],
+ },
+ },
+)
+
+print(result["llm"]["replies"][0].text)
+
+# Sam Altman was ousted from OpenAI on November 17, 2023, following
+# a "deliberative review process" by the board of directors. The board concluded
+# that he was not "consistently candid in his communications". However, he
+# returned as CEO just days after his ouster.
```
diff --git a/docs-website/docs/pipeline-components/converters/openapiservicetofunctions.mdx b/docs-website/docs/pipeline-components/converters/openapiservicetofunctions.mdx
index aca1d2bfec..755060affc 100644
--- a/docs-website/docs/pipeline-components/converters/openapiservicetofunctions.mdx
+++ b/docs-website/docs/pipeline-components/converters/openapiservicetofunctions.mdx
@@ -2,12 +2,12 @@
title: "OpenAPIServiceToFunctions"
id: openapiservicetofunctions
slug: "/openapiservicetofunctions"
-description: "`OpenAPIServiceToFunctions` is a component that transforms OpenAPI service specifications into a format compatible with OpenAI's function calling mechanism."
+description: "`OpenAPIServiceToFunctions` is a component that transforms OpenAPI service specifications into a format compatible with LLM tool calling."
---
# OpenAPIServiceToFunctions
-`OpenAPIServiceToFunctions` is a component that transforms OpenAPI service specifications into a format compatible with OpenAI's function calling mechanism.
+`OpenAPIServiceToFunctions` is a component that transforms OpenAPI service specifications into a format compatible with LLM tool calling.
@@ -15,7 +15,7 @@ description: "`OpenAPIServiceToFunctions` is a component that transforms OpenAPI
| --- | --- |
| **Most common position in a pipeline** | Flexible |
| **Mandatory run variables** | `sources`: A list of OpenAPI specification sources, which can be file paths or [`ByteStream`](../../concepts/data-classes.mdx#bytestream) objects |
-| **Output variables** | `functions`: A list of JSON OpenAI function calling definitions objects. For each path definition in OpenAPI specification, a corresponding OpenAI function calling definitions is generated.
`openapi_specs`: A list of JSON/YAML objects with references resolved. Such OpenAPI spec (with references resolved) can, in turn, be used as input to OpenAPIServiceConnector. |
+| **Output variables** | `functions`: A list of JSON function definitions objects. For each path definition in OpenAPI specification, a corresponding function definition is generated.
`openapi_specs`: A list of JSON/YAML objects with references resolved. Such OpenAPI spec (with references resolved) can, in turn, be used as input to OpenAPIServiceConnector. |
| **API reference** | [Converters](/reference/converters-api) |
| **GitHub link** | https://github.com/deepset-ai/haystack/blob/main/haystack/components/converters/openapi_functions.py |
@@ -23,9 +23,9 @@ description: "`OpenAPIServiceToFunctions` is a component that transforms OpenAPI
## Overview
-`OpenAPIServiceToFunctions` transforms OpenAPI service specifications into an OpenAI function calling format. It takes an OpenAPI specification, processes it to extract function definitions, and formats these definitions to be compatible with OpenAI's function calling JSON format.
+`OpenAPIServiceToFunctions` transforms OpenAPI service specifications into a function calling format suitable for LLM tool calling. It takes an OpenAPI specification, processes it to extract function definitions, and formats these definitions to be compatible with LLM tool calling.
-`OpenAPIServiceToFunctions` is valuable when used together with [`OpenAPIServiceConnector`](../connectors/openapiserviceconnector.mdx) component. It converts OpenAPI specifications into definitions suitable for OpenAI's function calls, allowing `OpenAPIServiceConnector` to handle input parameters for the OpenAPI specification and facilitate their use in REST API calls through `OpenAPIServiceConnector`.
+`OpenAPIServiceToFunctions` is valuable when used together with [`OpenAPIServiceConnector`](../connectors/openapiserviceconnector.mdx) component. It converts OpenAPI specifications into function definitions, allowing `OpenAPIServiceConnector` to handle input parameters for the OpenAPI specification and facilitate their use in REST API calls through `OpenAPIServiceConnector`.
To use `OpenAPIServiceToFunctions`, you need to install an optional `jsonref` dependency with:
@@ -39,11 +39,11 @@ pip install jsonref
### On its own
-This component is primarily meant to be used in pipelines. Using this component alone is useful when you want to convert OpenAPI specification into OpenAI's function call specification and then perhaps save it in a file and subsequently use it in function calling.
+This component is primarily meant to be used in pipelines. Using this component alone is useful when you want to convert OpenAPI specification into function definitions and then perhaps save them in a file and subsequently use them for tool calling.
### In a pipeline
-In a pipeline context, `OpenAPIServiceToFunctions` is most valuable when used alongside `OpenAPIServiceConnector`. For instance, let’s consider integrating [serper.dev](http://serper.dev/) search engine bridge into a pipeline. `OpenAPIServiceToFunctions` retrieves the OpenAPI specification of Serper from https://bit.ly/serper_dev_spec, converts this specification into a format that OpenAI's function calling mechanism can understand, and then seamlessly passes this translated specification as `generation_kwargs` for LLM function calling invocation.
+In a pipeline context, `OpenAPIServiceToFunctions` is most valuable when used alongside `OpenAPIServiceConnector`. For instance, let’s consider integrating [serper.dev](http://serper.dev/) search engine bridge into a pipeline. `OpenAPIServiceToFunctions` retrieves the OpenAPI specification of Serper from https://bit.ly/serper_dev_spec, converts this specification into function definitions that an LLM with tool calling capabilities can understand, and then seamlessly passes these definitions as `generation_kwargs` to the Chat Generator component.
:::info
To run the following code snippet, note that you have to have your own Serper and OpenAI API keys.
@@ -53,57 +53,86 @@ To run the following code snippet, note that you have to have your own Serper an
import json
import requests
-from typing import Dict, Any, List
+from typing import Any
+
from haystack import Pipeline
-from haystack.components.generators.utils import print_streaming_chunk
+from haystack.components.connectors import OpenAPIServiceConnector
from haystack.components.converters import OpenAPIServiceToFunctions, OutputAdapter
from haystack.components.generators.chat import OpenAIChatGenerator
-from haystack.components.connectors import OpenAPIServiceConnector
-from haystack.components.fetchers import LinkContentFetcher
-from haystack.dataclasses import ChatMessage, ByteStream
-from haystack.utils import Secret
+from haystack.dataclasses import ChatMessage
+from haystack.dataclasses.byte_stream import ByteStream
-def prepare_fc_params(openai_functions_schema: Dict[str, Any]) -> Dict[str, Any]:
+
+def prepare_fc_params(openai_functions_schema: dict[str, Any]) -> dict[str, Any]:
return {
- "tools": [{
- "type": "function",
- "function": openai_functions_schema
- }],
+ "tools": [{"type": "function", "function": openai_functions_schema}],
"tool_choice": {
"type": "function",
- "function": {"name": openai_functions_schema["name"]}
- }
+ "function": {"name": openai_functions_schema["name"]},
+ },
}
-system_prompt = requests.get("https://bit.ly/serper_dev_system_prompt").text
-serper_spec = requests.get("https://bit.ly/serper_dev_spec").text
-
-pipe = Pipeline()
-pipe.add_component("spec_to_functions", OpenAPIServiceToFunctions())
-pipe.add_component("functions_llm", OpenAIChatGenerator(api_key=Secret.from_token(llm_api_key), model="gpt-3.5-turbo-0613"))
-pipe.add_component("openapi_container", OpenAPIServiceConnector())
-pipe.add_component("a1", OutputAdapter("{{functions[0] | prepare_fc}}", Dict[str, Any], {"prepare_fc": prepare_fc_params}))
-pipe.add_component("a2", OutputAdapter("{{specs[0]}}", Dict[str, Any]))
-pipe.add_component("a3", OutputAdapter("{{system_message + service_response}}", List[ChatMessage]))
-pipe.add_component("llm", OpenAIChatGenerator(api_key=Secret.from_token(llm_api_key), model="gpt-4-1106-preview", streaming_callback=print_streaming_chunk))
-
-pipe.connect("spec_to_functions.functions", "a1.functions")
-pipe.connect("spec_to_functions.openapi_specs", "a2.specs")
-pipe.connect("a1", "functions_llm.generation_kwargs")
-pipe.connect("functions_llm.replies", "openapi_container.messages")
-pipe.connect("a2", "openapi_container.service_openapi_spec")
-pipe.connect("openapi_container.service_response", "a3.service_response")
-pipe.connect("a3", "llm.messages")
+serperdev_spec = requests.get("https://bit.ly/serper_dev_spec").json()
+system_prompt = requests.get("https://bit.ly/serper_dev_system").text
user_prompt = "Why was Sam Altman ousted from OpenAI?"
-result = pipe.run(data={"functions_llm": {"messages":[ChatMessage.from_system("Only do function calling"), ChatMessage.from_user(user_prompt)]},
- "openapi_container": {"service_credentials": serper_dev_key},
- "spec_to_functions": {"sources": [ByteStream.from_string(serper_spec)]},
- "a3": {"system_message": [ChatMessage.from_system(system_prompt)]}})
-
->Sam Altman was ousted from OpenAI on November 17, 2023, following
->a "deliberative review process" by the board of directors. The board concluded
->that he was not "consistently candid in his communications". However, he
->returned as CEO just days after his ouster.
+pipe = Pipeline()
+pipe.add_component("spec_to_functions", OpenAPIServiceToFunctions())
+pipe.add_component(
+ "prepare_fc_adapter",
+ OutputAdapter(
+ "{{functions[0] | prepare_fc}}",
+ dict[str, Any],
+ {"prepare_fc": prepare_fc_params},
+ ),
+)
+pipe.add_component("functions_llm", OpenAIChatGenerator())
+pipe.add_component("openapi_connector", OpenAPIServiceConnector())
+pipe.add_component(
+ "message_adapter",
+ OutputAdapter(
+ "{{system_message + service_response}}",
+ list[ChatMessage],
+ unsafe=True,
+ ),
+)
+pipe.add_component("llm", OpenAIChatGenerator())
+
+pipe.connect("spec_to_functions.functions", "prepare_fc_adapter.functions")
+pipe.connect(
+ "spec_to_functions.openapi_specs",
+ "openapi_connector.service_openapi_spec",
+)
+pipe.connect("prepare_fc_adapter", "functions_llm.generation_kwargs")
+pipe.connect("functions_llm.replies", "openapi_connector.messages")
+pipe.connect("openapi_connector.service_response", "message_adapter.service_response")
+pipe.connect("message_adapter", "llm.messages")
+
+result = pipe.run(
+ data={
+ "functions_llm": {
+ "messages": [
+ ChatMessage.from_system("Only do tool/function calling"),
+ ChatMessage.from_user(user_prompt),
+ ],
+ },
+ "openapi_connector": {
+ "service_credentials": serper_dev_key,
+ },
+ "spec_to_functions": {
+ "sources": [ByteStream.from_string(json.dumps(serperdev_spec))],
+ },
+ "message_adapter": {
+ "system_message": [ChatMessage.from_system(system_prompt)],
+ },
+ },
+)
+
+print(result["llm"]["replies"][0].text)
+
+# Sam Altman was ousted from OpenAI on November 17, 2023, following
+# a "deliberative review process" by the board of directors. The board concluded
+# that he was not "consistently candid in his communications". However, he
+# returned as CEO just days after his ouster.
```
diff --git a/docs-website/versioned_docs/version-2.26/pipeline-components/connectors/openapiserviceconnector.mdx b/docs-website/versioned_docs/version-2.26/pipeline-components/connectors/openapiserviceconnector.mdx
index e743f45806..886b7568f5 100644
--- a/docs-website/versioned_docs/version-2.26/pipeline-components/connectors/openapiserviceconnector.mdx
+++ b/docs-website/versioned_docs/version-2.26/pipeline-components/connectors/openapiserviceconnector.mdx
@@ -14,8 +14,8 @@ description: "`OpenAPIServiceConnector` is a component that acts as an interface
| | |
| --- | --- |
| **Most common position in a pipeline** | Flexible |
-| **Mandatory run variables** | `messages`: A list of [`ChatMessage`](../../concepts/data-classes/chatmessage.mdx) objects where the last message is expected to carry parameter invocation payload.
`service_openapi_spec`: OpenAPI specification of the service being invoked. It can be YAML/JSON, and all ref values must be resolved.
`service_credentials`: Authentication credentials for the service. We currently support two OpenAPI spec v3 security schemes:
1. http – for Basic, Bearer, and other HTTP authentication schemes;
2. apiKey – for API keys and cookie authentication. |
-| **Output variables** | `service_response`: A dictionary that is a list of [`ChatMessage`](../../concepts/data-classes/chatmessage.mdx) objects where each message corresponds to a function invocation.
If a user specifies multiple function calling requests, there will be multiple responses. |
+| **Mandatory run variables** | `messages`: A list of [`ChatMessage`](../../concepts/data-classes/chatmessage.mdx) objects where the last message must be from the assistant and contain tool calls.
`service_openapi_spec`: OpenAPI specification of the service being invoked. It can be YAML/JSON, and all ref values must be resolved.
`service_credentials`: Authentication credentials for the service. We currently support two OpenAPI spec v3 security schemes:
1. http – for Basic, Bearer, and other HTTP authentication schemes;
2. apiKey – for API keys and cookie authentication. |
+| **Output variables** | `service_response`: A list of [`ChatMessage`](../../concepts/data-classes/chatmessage.mdx) objects where each message corresponds to a tool call invocation.
If a message contains multiple tool calls, there will be multiple responses. |
| **API reference** | [Connectors](/reference/connectors-api) |
| **GitHub link** | https://github.com/deepset-ai/haystack/blob/main/haystack/components/connectors/openapi_service.py |
@@ -37,13 +37,13 @@ pip install openapi3
### On its own
-This component is primarily meant to be used in pipelines, as [`OpenAPIServiceToFunctions`](../converters/openapiservicetofunctions.mdx), in tandem with the function calling model, resolves the actual function calling parameters that are injected as invocation parameters for `OpenAPIServiceConnector`.
+This component is primarily meant to be used in pipelines, as [`OpenAPIServiceToFunctions`](../converters/openapiservicetofunctions.mdx), in tandem with an LLM with tool calling capabilities, resolves the actual tool call parameters that are injected as invocation parameters for `OpenAPIServiceConnector`.
### In a pipeline
-Let's say we're linking the Serper search engine to a pipeline. Here, `OpenAPIServiceConnector` uses the abilities of `OpenAPIServiceToFunctions`. `OpenAPIServiceToFunctions` first fetches and changes the [Serper's OpenAPI specification](https://bit.ly/serper_dev_spec) into a format that OpenAI's function calling mechanism can understand. Then, `OpenAPIServiceConnector` activates the Serper service using this specification.
+Let's say we're linking the Serper search engine to a pipeline. Here, `OpenAPIServiceConnector` uses the abilities of `OpenAPIServiceToFunctions`. `OpenAPIServiceToFunctions` first fetches and changes the [Serper's OpenAPI specification](https://bit.ly/serper_dev_spec) into function definitions that an LLM with tool calling capabilities can understand. Then, `OpenAPIServiceConnector` activates the Serper service using this specification.
-More precisely, `OpenAPIServiceConnector` dynamically calls methods defined in the Serper OpenAPI specification. This involves reading chat messages or other inputs to extract function call parameters, handling authentication with the Serper service, and making the right API calls. The connector makes sure that the method call follows the Serper API requirements, such as correct formatting requests and handling responses.
+More precisely, `OpenAPIServiceConnector` dynamically calls methods defined in the Serper OpenAPI specification. This involves reading chat messages to extract tool call parameters, handling authentication with the Serper service, and making the right API calls. The connector makes sure that the method call follows the Serper API requirements, such as correct formatting requests and handling responses.
Note that we used Serper just as an example here. This could be any OpenAPI-compliant service.
@@ -55,57 +55,86 @@ To run the following code snippet, note that you have to have your own Serper an
import json
import requests
-from typing import Dict, Any, List
+from typing import Any
+
from haystack import Pipeline
-from haystack.components.generators.utils import print_streaming_chunk
+from haystack.components.connectors import OpenAPIServiceConnector
from haystack.components.converters import OpenAPIServiceToFunctions, OutputAdapter
from haystack.components.generators.chat import OpenAIChatGenerator
-from haystack.components.connectors import OpenAPIServiceConnector
-from haystack.components.fetchers import LinkContentFetcher
-from haystack.dataclasses import ChatMessage, ByteStream
-from haystack.utils import Secret
+from haystack.dataclasses import ChatMessage
+from haystack.dataclasses.byte_stream import ByteStream
-def prepare_fc_params(openai_functions_schema: Dict[str, Any]) -> Dict[str, Any]:
+
+def prepare_fc_params(openai_functions_schema: dict[str, Any]) -> dict[str, Any]:
return {
- "tools": [{
- "type": "function",
- "function": openai_functions_schema
- }],
+ "tools": [{"type": "function", "function": openai_functions_schema}],
"tool_choice": {
"type": "function",
- "function": {"name": openai_functions_schema["name"]}
- }
+ "function": {"name": openai_functions_schema["name"]},
+ },
}
-system_prompt = requests.get("https://bit.ly/serper_dev_system_prompt").text
-serper_spec = requests.get("https://bit.ly/serper_dev_spec").text
-
-pipe = Pipeline()
-pipe.add_component("spec_to_functions", OpenAPIServiceToFunctions())
-pipe.add_component("functions_llm", OpenAIChatGenerator(api_key=Secret.from_token(llm_api_key), model="gpt-3.5-turbo-0613"))
-pipe.add_component("openapi_container", OpenAPIServiceConnector())
-pipe.add_component("a1", OutputAdapter("{{functions[0] | prepare_fc}}", Dict[str, Any], {"prepare_fc": prepare_fc_params}))
-pipe.add_component("a2", OutputAdapter("{{specs[0]}}", Dict[str, Any]))
-pipe.add_component("a3", OutputAdapter("{{system_message + service_response}}", List[ChatMessage]))
-pipe.add_component("llm", OpenAIChatGenerator(api_key=Secret.from_token(llm_api_key), model="gpt-4-1106-preview", streaming_callback=print_streaming_chunk))
-
-pipe.connect("spec_to_functions.functions", "a1.functions")
-pipe.connect("spec_to_functions.openapi_specs", "a2.specs")
-pipe.connect("a1", "functions_llm.generation_kwargs")
-pipe.connect("functions_llm.replies", "openapi_container.messages")
-pipe.connect("a2", "openapi_container.service_openapi_spec")
-pipe.connect("openapi_container.service_response", "a3.service_response")
-pipe.connect("a3", "llm.messages")
+serperdev_spec = requests.get("https://bit.ly/serper_dev_spec").json()
+system_prompt = requests.get("https://bit.ly/serper_dev_system").text
user_prompt = "Why was Sam Altman ousted from OpenAI?"
-result = pipe.run(data={"functions_llm": {"messages":[ChatMessage.from_system("Only do function calling"), ChatMessage.from_user(user_prompt)]},
- "openapi_container": {"service_credentials": serper_dev_key},
- "spec_to_functions": {"sources": [ByteStream.from_string(serper_spec)]},
- "a3": {"system_message": [ChatMessage.from_system(system_prompt)]}})
-
->Sam Altman was ousted from OpenAI on November 17, 2023, following
->a "deliberative review process" by the board of directors. The board concluded
->that he was not "consistently candid in his communications". However, he
->returned as CEO just days after his ouster.
+pipe = Pipeline()
+pipe.add_component("spec_to_functions", OpenAPIServiceToFunctions())
+pipe.add_component(
+ "prepare_fc_adapter",
+ OutputAdapter(
+ "{{functions[0] | prepare_fc}}",
+ dict[str, Any],
+ {"prepare_fc": prepare_fc_params},
+ ),
+)
+pipe.add_component("functions_llm", OpenAIChatGenerator())
+pipe.add_component("openapi_connector", OpenAPIServiceConnector())
+pipe.add_component(
+ "message_adapter",
+ OutputAdapter(
+ "{{system_message + service_response}}",
+ list[ChatMessage],
+ unsafe=True,
+ ),
+)
+pipe.add_component("llm", OpenAIChatGenerator())
+
+pipe.connect("spec_to_functions.functions", "prepare_fc_adapter.functions")
+pipe.connect(
+ "spec_to_functions.openapi_specs",
+ "openapi_connector.service_openapi_spec",
+)
+pipe.connect("prepare_fc_adapter", "functions_llm.generation_kwargs")
+pipe.connect("functions_llm.replies", "openapi_connector.messages")
+pipe.connect("openapi_connector.service_response", "message_adapter.service_response")
+pipe.connect("message_adapter", "llm.messages")
+
+result = pipe.run(
+ data={
+ "functions_llm": {
+ "messages": [
+ ChatMessage.from_system("Only do tool/function calling"),
+ ChatMessage.from_user(user_prompt),
+ ],
+ },
+ "openapi_connector": {
+ "service_credentials": serper_dev_key,
+ },
+ "spec_to_functions": {
+ "sources": [ByteStream.from_string(json.dumps(serperdev_spec))],
+ },
+ "message_adapter": {
+ "system_message": [ChatMessage.from_system(system_prompt)],
+ },
+ },
+)
+
+print(result["llm"]["replies"][0].text)
+
+# Sam Altman was ousted from OpenAI on November 17, 2023, following
+# a "deliberative review process" by the board of directors. The board concluded
+# that he was not "consistently candid in his communications". However, he
+# returned as CEO just days after his ouster.
```
diff --git a/docs-website/versioned_docs/version-2.26/pipeline-components/converters/openapiservicetofunctions.mdx b/docs-website/versioned_docs/version-2.26/pipeline-components/converters/openapiservicetofunctions.mdx
index aca1d2bfec..755060affc 100644
--- a/docs-website/versioned_docs/version-2.26/pipeline-components/converters/openapiservicetofunctions.mdx
+++ b/docs-website/versioned_docs/version-2.26/pipeline-components/converters/openapiservicetofunctions.mdx
@@ -2,12 +2,12 @@
title: "OpenAPIServiceToFunctions"
id: openapiservicetofunctions
slug: "/openapiservicetofunctions"
-description: "`OpenAPIServiceToFunctions` is a component that transforms OpenAPI service specifications into a format compatible with OpenAI's function calling mechanism."
+description: "`OpenAPIServiceToFunctions` is a component that transforms OpenAPI service specifications into a format compatible with LLM tool calling."
---
# OpenAPIServiceToFunctions
-`OpenAPIServiceToFunctions` is a component that transforms OpenAPI service specifications into a format compatible with OpenAI's function calling mechanism.
+`OpenAPIServiceToFunctions` is a component that transforms OpenAPI service specifications into a format compatible with LLM tool calling.
@@ -15,7 +15,7 @@ description: "`OpenAPIServiceToFunctions` is a component that transforms OpenAPI
| --- | --- |
| **Most common position in a pipeline** | Flexible |
| **Mandatory run variables** | `sources`: A list of OpenAPI specification sources, which can be file paths or [`ByteStream`](../../concepts/data-classes.mdx#bytestream) objects |
-| **Output variables** | `functions`: A list of JSON OpenAI function calling definitions objects. For each path definition in OpenAPI specification, a corresponding OpenAI function calling definitions is generated.
`openapi_specs`: A list of JSON/YAML objects with references resolved. Such OpenAPI spec (with references resolved) can, in turn, be used as input to OpenAPIServiceConnector. |
+| **Output variables** | `functions`: A list of JSON function definitions objects. For each path definition in OpenAPI specification, a corresponding function definition is generated.
`openapi_specs`: A list of JSON/YAML objects with references resolved. Such OpenAPI spec (with references resolved) can, in turn, be used as input to OpenAPIServiceConnector. |
| **API reference** | [Converters](/reference/converters-api) |
| **GitHub link** | https://github.com/deepset-ai/haystack/blob/main/haystack/components/converters/openapi_functions.py |
@@ -23,9 +23,9 @@ description: "`OpenAPIServiceToFunctions` is a component that transforms OpenAPI
## Overview
-`OpenAPIServiceToFunctions` transforms OpenAPI service specifications into an OpenAI function calling format. It takes an OpenAPI specification, processes it to extract function definitions, and formats these definitions to be compatible with OpenAI's function calling JSON format.
+`OpenAPIServiceToFunctions` transforms OpenAPI service specifications into a function calling format suitable for LLM tool calling. It takes an OpenAPI specification, processes it to extract function definitions, and formats these definitions to be compatible with LLM tool calling.
-`OpenAPIServiceToFunctions` is valuable when used together with [`OpenAPIServiceConnector`](../connectors/openapiserviceconnector.mdx) component. It converts OpenAPI specifications into definitions suitable for OpenAI's function calls, allowing `OpenAPIServiceConnector` to handle input parameters for the OpenAPI specification and facilitate their use in REST API calls through `OpenAPIServiceConnector`.
+`OpenAPIServiceToFunctions` is valuable when used together with [`OpenAPIServiceConnector`](../connectors/openapiserviceconnector.mdx) component. It converts OpenAPI specifications into function definitions, allowing `OpenAPIServiceConnector` to handle input parameters for the OpenAPI specification and facilitate their use in REST API calls through `OpenAPIServiceConnector`.
To use `OpenAPIServiceToFunctions`, you need to install an optional `jsonref` dependency with:
@@ -39,11 +39,11 @@ pip install jsonref
### On its own
-This component is primarily meant to be used in pipelines. Using this component alone is useful when you want to convert OpenAPI specification into OpenAI's function call specification and then perhaps save it in a file and subsequently use it in function calling.
+This component is primarily meant to be used in pipelines. Using this component alone is useful when you want to convert OpenAPI specification into function definitions and then perhaps save them in a file and subsequently use them for tool calling.
### In a pipeline
-In a pipeline context, `OpenAPIServiceToFunctions` is most valuable when used alongside `OpenAPIServiceConnector`. For instance, let’s consider integrating [serper.dev](http://serper.dev/) search engine bridge into a pipeline. `OpenAPIServiceToFunctions` retrieves the OpenAPI specification of Serper from https://bit.ly/serper_dev_spec, converts this specification into a format that OpenAI's function calling mechanism can understand, and then seamlessly passes this translated specification as `generation_kwargs` for LLM function calling invocation.
+In a pipeline context, `OpenAPIServiceToFunctions` is most valuable when used alongside `OpenAPIServiceConnector`. For instance, let’s consider integrating [serper.dev](http://serper.dev/) search engine bridge into a pipeline. `OpenAPIServiceToFunctions` retrieves the OpenAPI specification of Serper from https://bit.ly/serper_dev_spec, converts this specification into function definitions that an LLM with tool calling capabilities can understand, and then seamlessly passes these definitions as `generation_kwargs` to the Chat Generator component.
:::info
To run the following code snippet, note that you have to have your own Serper and OpenAI API keys.
@@ -53,57 +53,86 @@ To run the following code snippet, note that you have to have your own Serper an
import json
import requests
-from typing import Dict, Any, List
+from typing import Any
+
from haystack import Pipeline
-from haystack.components.generators.utils import print_streaming_chunk
+from haystack.components.connectors import OpenAPIServiceConnector
from haystack.components.converters import OpenAPIServiceToFunctions, OutputAdapter
from haystack.components.generators.chat import OpenAIChatGenerator
-from haystack.components.connectors import OpenAPIServiceConnector
-from haystack.components.fetchers import LinkContentFetcher
-from haystack.dataclasses import ChatMessage, ByteStream
-from haystack.utils import Secret
+from haystack.dataclasses import ChatMessage
+from haystack.dataclasses.byte_stream import ByteStream
-def prepare_fc_params(openai_functions_schema: Dict[str, Any]) -> Dict[str, Any]:
+
+def prepare_fc_params(openai_functions_schema: dict[str, Any]) -> dict[str, Any]:
return {
- "tools": [{
- "type": "function",
- "function": openai_functions_schema
- }],
+ "tools": [{"type": "function", "function": openai_functions_schema}],
"tool_choice": {
"type": "function",
- "function": {"name": openai_functions_schema["name"]}
- }
+ "function": {"name": openai_functions_schema["name"]},
+ },
}
-system_prompt = requests.get("https://bit.ly/serper_dev_system_prompt").text
-serper_spec = requests.get("https://bit.ly/serper_dev_spec").text
-
-pipe = Pipeline()
-pipe.add_component("spec_to_functions", OpenAPIServiceToFunctions())
-pipe.add_component("functions_llm", OpenAIChatGenerator(api_key=Secret.from_token(llm_api_key), model="gpt-3.5-turbo-0613"))
-pipe.add_component("openapi_container", OpenAPIServiceConnector())
-pipe.add_component("a1", OutputAdapter("{{functions[0] | prepare_fc}}", Dict[str, Any], {"prepare_fc": prepare_fc_params}))
-pipe.add_component("a2", OutputAdapter("{{specs[0]}}", Dict[str, Any]))
-pipe.add_component("a3", OutputAdapter("{{system_message + service_response}}", List[ChatMessage]))
-pipe.add_component("llm", OpenAIChatGenerator(api_key=Secret.from_token(llm_api_key), model="gpt-4-1106-preview", streaming_callback=print_streaming_chunk))
-
-pipe.connect("spec_to_functions.functions", "a1.functions")
-pipe.connect("spec_to_functions.openapi_specs", "a2.specs")
-pipe.connect("a1", "functions_llm.generation_kwargs")
-pipe.connect("functions_llm.replies", "openapi_container.messages")
-pipe.connect("a2", "openapi_container.service_openapi_spec")
-pipe.connect("openapi_container.service_response", "a3.service_response")
-pipe.connect("a3", "llm.messages")
+serperdev_spec = requests.get("https://bit.ly/serper_dev_spec").json()
+system_prompt = requests.get("https://bit.ly/serper_dev_system").text
user_prompt = "Why was Sam Altman ousted from OpenAI?"
-result = pipe.run(data={"functions_llm": {"messages":[ChatMessage.from_system("Only do function calling"), ChatMessage.from_user(user_prompt)]},
- "openapi_container": {"service_credentials": serper_dev_key},
- "spec_to_functions": {"sources": [ByteStream.from_string(serper_spec)]},
- "a3": {"system_message": [ChatMessage.from_system(system_prompt)]}})
-
->Sam Altman was ousted from OpenAI on November 17, 2023, following
->a "deliberative review process" by the board of directors. The board concluded
->that he was not "consistently candid in his communications". However, he
->returned as CEO just days after his ouster.
+pipe = Pipeline()
+pipe.add_component("spec_to_functions", OpenAPIServiceToFunctions())
+pipe.add_component(
+ "prepare_fc_adapter",
+ OutputAdapter(
+ "{{functions[0] | prepare_fc}}",
+ dict[str, Any],
+ {"prepare_fc": prepare_fc_params},
+ ),
+)
+pipe.add_component("functions_llm", OpenAIChatGenerator())
+pipe.add_component("openapi_connector", OpenAPIServiceConnector())
+pipe.add_component(
+ "message_adapter",
+ OutputAdapter(
+ "{{system_message + service_response}}",
+ list[ChatMessage],
+ unsafe=True,
+ ),
+)
+pipe.add_component("llm", OpenAIChatGenerator())
+
+pipe.connect("spec_to_functions.functions", "prepare_fc_adapter.functions")
+pipe.connect(
+ "spec_to_functions.openapi_specs",
+ "openapi_connector.service_openapi_spec",
+)
+pipe.connect("prepare_fc_adapter", "functions_llm.generation_kwargs")
+pipe.connect("functions_llm.replies", "openapi_connector.messages")
+pipe.connect("openapi_connector.service_response", "message_adapter.service_response")
+pipe.connect("message_adapter", "llm.messages")
+
+result = pipe.run(
+ data={
+ "functions_llm": {
+ "messages": [
+ ChatMessage.from_system("Only do tool/function calling"),
+ ChatMessage.from_user(user_prompt),
+ ],
+ },
+ "openapi_connector": {
+ "service_credentials": serper_dev_key,
+ },
+ "spec_to_functions": {
+ "sources": [ByteStream.from_string(json.dumps(serperdev_spec))],
+ },
+ "message_adapter": {
+ "system_message": [ChatMessage.from_system(system_prompt)],
+ },
+ },
+)
+
+print(result["llm"]["replies"][0].text)
+
+# Sam Altman was ousted from OpenAI on November 17, 2023, following
+# a "deliberative review process" by the board of directors. The board concluded
+# that he was not "consistently candid in his communications". However, he
+# returned as CEO just days after his ouster.
```
diff --git a/haystack/components/connectors/openapi_service.py b/haystack/components/connectors/openapi_service.py
index 7165e691c1..6ddd295ae7 100644
--- a/haystack/components/connectors/openapi_service.py
+++ b/haystack/components/connectors/openapi_service.py
@@ -151,11 +151,9 @@ class OpenAPIServiceConnector:
The `OpenAPIServiceConnector` component connects the Haystack framework to OpenAPI services, enabling it to call
operations as defined in the OpenAPI specification of the service.
- It integrates with `ChatMessage` dataclass, where the payload in messages is used to determine the method to be
- called and the parameters to be passed. The message payload should be an OpenAI JSON formatted function calling
- string consisting of the method name and the parameters to be passed to the method. The method name and parameters
- are then used to invoke the method on the OpenAPI service. The response from the service is returned as a
- `ChatMessage`.
+ It integrates with `ChatMessage` dataclass, where the `ToolCall` entries in messages are used to determine the
+ method to be called and the parameters to be passed. The method name and parameters are then used to invoke the
+ method on the OpenAPI service. The response from the service is returned as a `ChatMessage`.
Before using this component, users usually resolve service endpoint parameters with a help of
`OpenAPIServiceToFunctions` component.
@@ -164,9 +162,9 @@ class OpenAPIServiceConnector:
service specified via OpenAPI specification.
Note, however, that `OpenAPIServiceConnector` is usually not meant to be used directly, but rather as part of a
- pipeline that includes the `OpenAPIServiceToFunctions` component and an `OpenAIChatGenerator` component using LLM
- with the function calling capabilities. In the example below we use the function calling payload directly, but in a
- real-world scenario, the function calling payload would usually be generated by the `OpenAIChatGenerator` component.
+ pipeline that includes the `OpenAPIServiceToFunctions` component and a Chat Generator component using an LLM
+ with tool calling capabilities. In the example below we use the tool call payload directly, but in a
+ real-world scenario, the tool calls would usually be generated by the Chat Generator component.
Usage example:
@@ -175,23 +173,29 @@ class OpenAPIServiceConnector:
import requests
from haystack.components.connectors import OpenAPIServiceConnector
- from haystack.dataclasses import ChatMessage
+ from haystack.dataclasses import ChatMessage, ToolCall
- fc_payload = [{'function': {'arguments': '{"q": "Why was Sam Altman ousted from OpenAI?"}', 'name': 'search'},
- 'id': 'call_PmEBYvZ7mGrQP5PUASA5m9wO', 'type': 'function'}]
+ tool_call = ToolCall(
+ tool_name="search",
+ arguments={"q": "Why was Sam Altman ousted from OpenAI?"},
+ )
+ message = ChatMessage.from_assistant(tool_calls=[tool_call])
- serper_token =
+ serper_token = "your_serper_dev_token"
serperdev_openapi_spec = json.loads(requests.get("https://bit.ly/serper_dev_spec").text)
service_connector = OpenAPIServiceConnector()
- result = service_connector.run(messages=[ChatMessage.from_assistant(json.dumps(fc_payload))],
- service_openapi_spec=serperdev_openapi_spec, service_credentials=serper_token)
+ result = service_connector.run(
+ messages=[message],
+ service_openapi_spec=serperdev_openapi_spec,
+ service_credentials=serper_token,
+ )
print(result)
- >> {'service_response': [ChatMessage(_role=, _content=[TextContent(text=
- >> '{"searchParameters": {"q": "Why was Sam Altman ousted from OpenAI?",
- >> "type": "search", "engine": "google"}, "answerBox": {"snippet": "Concerns over AI safety and OpenAI\'s role
- >> in protecting were at the center of Altman\'s brief ouster from the company."...
+ # {'service_response': [ChatMessage(_role=, _content=[TextContent(text=
+ # '{"searchParameters": {"q": "Why was Sam Altman ousted from OpenAI?",
+ # "type": "search", "engine": "google"}, "answerBox": {"snippet": "Concerns over AI safety and OpenAI\'s role
+ # in protecting were at the center of Altman\'s brief ouster from the company."...
```
"""
diff --git a/haystack/components/converters/openapi_functions.py b/haystack/components/converters/openapi_functions.py
index 9c24152e70..bcc08a53ae 100644
--- a/haystack/components/converters/openapi_functions.py
+++ b/haystack/components/converters/openapi_functions.py
@@ -37,9 +37,13 @@ class OpenAPIServiceToFunctions:
Usage example:
```python
from haystack.components.converters import OpenAPIServiceToFunctions
+ from haystack.dataclasses.byte_stream import ByteStream
converter = OpenAPIServiceToFunctions()
- result = converter.run(sources=["path/to/openapi_definition.yaml"])
+ spec = ByteStream.from_string(
+ '{"openapi":"3.0.0","info":{"title":"API","version":"1.0.0"},"paths":{"/search":{"get":{"operationId":"search","summary":"Search","parameters":[{"name":"q","in":"query","required":true,"schema":{"type":"string"}}]}}}}'
+ )
+ result = converter.run(sources=[spec])
assert result["functions"]
```
"""