|
| 1 | +#!/usr/bin/env python3 |
| 2 | + |
| 3 | +print("Loading...") |
| 4 | + |
| 5 | +import dspy |
| 6 | + |
| 7 | +print("Connecting...") |
| 8 | + |
| 9 | +#lm = dspy.LM('gemini/gemini-2.5-pro-preview-03-25', api_key='GEMINI_API_KEY') |
| 10 | + |
| 11 | +#dspy.settings.configure(track_usage=True) |
| 12 | + |
| 13 | +import os |
| 14 | +lm = dspy.LM( |
| 15 | + model="openai/" + os.getenv("INFERENCE_MODEL", "gemini/models/gemini-flash-latest"), |
| 16 | + api_base=os.getenv("OPENAI_EP", "http://localhost:8321/v1"), |
| 17 | + api_key="none" |
| 18 | +) |
| 19 | + |
| 20 | +print(len(lm.history)) |
| 21 | +dspy.configure(lm=lm, track_usage=True) |
| 22 | + |
| 23 | +result1 = lm("Say this is a test!", temperature=0.7) |
| 24 | +print("Result 1:", result1) |
| 25 | + |
| 26 | +result2 = lm(messages=[{"role": "user", "content": "Say this is a test!"}]) |
| 27 | +print("Result 2:", result2) |
| 28 | + |
| 29 | +# Define a module (ChainOfThought) and assign it a signature (return an answer, given a question). |
| 30 | +qa = dspy.ChainOfThought('question -> answer') |
| 31 | + |
| 32 | +# Run with the default LM configured with `dspy.configure` above. |
| 33 | +r = qa(question="How many floors are in the castle David Gregory inherited?") |
| 34 | +print(r.reasoning) |
| 35 | +print(r.answer) |
| 36 | + |
| 37 | +c = dspy.Predict('sentence -> sentiment: bool') |
| 38 | +print(c(sentence="it's a charming and often affecting journey.").sentiment) |
| 39 | + |
| 40 | + |
| 41 | +if False: |
| 42 | + class Hop(dspy.Module): |
| 43 | + def __init__(self, num_docs=10, num_hops=4): |
| 44 | + self.num_docs, self.num_hops = num_docs, num_hops |
| 45 | + self.generate_query = dspy.ChainOfThought('claim, notes -> query') |
| 46 | + self.append_notes = dspy.ChainOfThought('claim, notes, context -> new_notes: list[str], titles: list[str]') |
| 47 | + |
| 48 | + def forward(self, claim: str) -> list[str]: |
| 49 | + notes = [] |
| 50 | + titles = [] |
| 51 | + |
| 52 | + for _ in range(self.num_hops): |
| 53 | + query = self.generate_query(claim=claim, notes=notes).query |
| 54 | + context = search(query, k=self.num_docs) |
| 55 | + prediction = self.append_notes(claim=claim, notes=notes, context=context) |
| 56 | + notes.extend(prediction.new_notes) |
| 57 | + titles.extend(prediction.titles) |
| 58 | + |
| 59 | + return dspy.Prediction(notes=notes, titles=list(set(titles))) |
| 60 | + hop = Hop() |
| 61 | + print(hop(claim="Stephen Curry is the best 3 pointer shooter ever in the human history")) |
| 62 | + |
| 63 | +print(len(lm.history)) |
0 commit comments