@@ -122,7 +122,7 @@ pipe = Pipeline()
122122
123123pipe.add_component(" retriever" , InMemoryBM25Retriever(document_store = docstore))
124124pipe.add_component(" prompt_builder" , PromptBuilder(template = template))
125- pipe.add_component(" llm" , OpenAIGenerator(api_key = Secret.from_token(" <your-api-key>" ))
125+ pipe.add_component(" llm" , OpenAIGenerator(api_key = Secret.from_token(" <your-api-key>" )))
126126pipe.connect(" retriever" , " prompt_builder.documents" )
127127pipe.connect(" prompt_builder" , " llm" )
128128
@@ -137,3 +137,58 @@ res=pipe.run({
137137
138138print (res)
139139```
140+
141+ # ## In YAML
142+
143+ This is the YAML representation of the RAG pipeline shown above. It retrieves documents based on a query, constructs a prompt using a template, and generates an answer using a chat model.
144+
145+ ```yaml
146+ components:
147+ llm:
148+ init_parameters:
149+ api_base_url: null
150+ api_key:
151+ type : token
152+ value: < your- api- key>
153+ generation_kwargs: {}
154+ http_client_kwargs: null
155+ max_retries: null
156+ model: gpt- 4o - mini
157+ organization: null
158+ streaming_callback: null
159+ system_prompt: null
160+ timeout: null
161+ type : haystack.components.generators.openai.OpenAIGenerator
162+ prompt_builder:
163+ init_parameters:
164+ required_variables: null
165+ template: " \n Given the following information, answer the question.\n\n Context:\n \
166+ {% for document in documents %}\n {{ document.content }}\n {% endfor %}\n\n \
167+ Question: {{ query }}?\n "
168+ variables: null
169+ type : haystack.components.builders.prompt_builder.PromptBuilder
170+ retriever:
171+ init_parameters:
172+ document_store:
173+ init_parameters:
174+ bm25_algorithm: BM25L
175+ bm25_parameters: {}
176+ bm25_tokenization_regex: (? u)\b\w+\b
177+ embedding_similarity_function: dot_product
178+ index: 64e4f9ab - 87fb - 47fd - b390- dabcfda61447
179+ return_embedding: true
180+ type : haystack.document_stores.in_memory.document_store.InMemoryDocumentStore
181+ filter_policy: replace
182+ filters: null
183+ scale_score: false
184+ top_k: 10
185+ type : haystack.components.retrievers.in_memory.bm25_retriever.InMemoryBM25Retriever
186+ connection_type_validation: true
187+ connections:
188+ - receiver: prompt_builder.documents
189+ sender: retriever.documents
190+ - receiver: llm.prompt
191+ sender: prompt_builder.prompt
192+ max_runs_per_component: 100
193+ metadata: {}
194+ ```
0 commit comments