@@ -122,7 +122,7 @@ pipe = Pipeline()
122122
123123pipe.add_component(" retriever" , InMemoryBM25Retriever(document_store = docstore))
124124pipe.add_component(" prompt_builder" , PromptBuilder(template = template))
125- pipe.add_component(" llm" , OpenAIGenerator(api_key = Secret.from_token( " <your-api-key> " ))
125+ pipe.add_component(" llm" , OpenAIGenerator(api_key = Secret.from_env_var( " OPENAI_API_KEY " ) ))
126126pipe.connect(" retriever" , " prompt_builder.documents" )
127127pipe.connect(" prompt_builder" , " llm" )
128128
@@ -137,3 +137,60 @@ res=pipe.run({
137137
138138print (res)
139139```
140+
141+ # ## In YAML
142+
143+ This is the YAML representation of the RAG pipeline shown above. It retrieves documents based on a query, constructs a prompt using a template, and generates an answer using a chat model.
144+
145+ ```yaml
146+ components:
147+ llm:
148+ init_parameters:
149+ api_base_url: null
150+ api_key:
151+ env_vars:
152+ - OPENAI_API_KEY
153+ strict: true
154+ type : env_var
155+ generation_kwargs: {}
156+ http_client_kwargs: null
157+ max_retries: null
158+ model: gpt- 5 - mini
159+ organization: null
160+ streaming_callback: null
161+ system_prompt: null
162+ timeout: null
163+ type : haystack.components.generators.openai.OpenAIGenerator
164+ prompt_builder:
165+ init_parameters:
166+ required_variables: null
167+ template: " \n Given the following information, answer the question.\n\n Context:\n \
168+ {% for document in documents %}\n {{ document.content }}\n {% endfor %}\n\n \
169+ Question: {{ query }}?\n "
170+ variables: null
171+ type : haystack.components.builders.prompt_builder.PromptBuilder
172+ retriever:
173+ init_parameters:
174+ document_store:
175+ init_parameters:
176+ bm25_algorithm: BM25L
177+ bm25_parameters: {}
178+ bm25_tokenization_regex: (? u)\b\w+\b
179+ embedding_similarity_function: dot_product
180+ index: 64e4f9ab - 87fb - 47fd - b390- dabcfda61447
181+ return_embedding: true
182+ type : haystack.document_stores.in_memory.document_store.InMemoryDocumentStore
183+ filter_policy: replace
184+ filters: null
185+ scale_score: false
186+ top_k: 10
187+ type : haystack.components.retrievers.in_memory.bm25_retriever.InMemoryBM25Retriever
188+ connection_type_validation: true
189+ connections:
190+ - receiver: prompt_builder.documents
191+ sender: retriever.documents
192+ - receiver: llm.prompt
193+ sender: prompt_builder.prompt
194+ max_runs_per_component: 100
195+ metadata: {}
196+ ```
0 commit comments