from localllm import localllm_connect
1llm = localllm_connect("localllm/gemma-4-E2B-it-Q8_0")- 1
-
localllm_connectwill download the model (if it is not downloaded yet) and loads the model
Python package localllm provides functionalities to use a local LLM object alongside dspy. This looks like this.
from localllm import localllm_connect
1llm = localllm_connect("localllm/gemma-4-E2B-it-Q8_0")localllm_connect will download the model (if it is not downloaded yet) and loads the model
import dspy
class Go(dspy.Signature):
sentence: str = dspy.InputField(desc = "A question")
value: list[str] = dspy.OutputField(desc = "The answer")
2predict = dspy.ChainOfThought(Go)
out = predict(sentence = "What are the 10 most common first names in Japan")
outPrediction(
reasoning='Determining the absolute "10 most common first names in Japan" is complex as naming trends can shift, and data sources vary. However, based on general demographic data and historical popularity, certain names consistently rank high. Common Japanese names often follow traditional patterns or have meanings related to nature or virtues. I will list some of the most frequently cited and traditional popular names.',
value=['Sota', 'Yuki', 'Haruto', 'Ren', 'Riku', 'Hiroto', 'Kaito', 'Taro', 'Yuto', 'Asahi']
)
class YourProgram(dspy.Signature):
sentence: str = dspy.InputField(desc = "A question")
value: int = dspy.OutputField(desc = "The answer")
confidence: float = dspy.OutputField(desc = "Give a confidence score about your answer")
predict = dspy.Predict(YourProgram)
out = predict(sentence = "How many r's can you detect in the word strawberrrry? Please provide 1 number.")
outPrediction(
value=5,
confidence=1.0
)