1717
1818python3 examples/docqa/fn-call-local-simple.py
1919
20+ To change the local model, use the optional arg -m <local_model>.
21+ See this [script](https://github.com/langroid/langroid-examples/blob/main/examples/docqa/rag-local-simple.py)
22+ for other ways to specify the local_model.
23+
2024"""
2125import os
2226from typing import List
27+ import fire
2328
2429from pydantic import BaseModel , Field
2530import langroid as lr
3035
3136os .environ ["TOKENIZERS_PARALLELISM" ] = "false"
3237
33- # Create the llm config object.
34- # Note: if instead of ollama you've spun up your local LLM to listen at
35- # an OpenAI-Compatible Endpoint like `localhost:8000`, then you can set
36- # chat_model="local/localhost:8000"; carefully note there's no http in this,
37- # and if the endpoint is localhost:8000/v1, then you must set
38- # chat_model="local/localhost:8000/v1"
39- # Similarly if your endpoint is `http://128.0.4.5:8000/v1`, then you must set
40- # chat_model="local/128.0.4.5:8000/v1"
41- llm_cfg = lm .OpenAIGPTConfig (
42- chat_model = "litellm/ollama/mistral:7b-instruct-v0.2-q4_K_M" ,
43- chat_context_length = 4096 , # set this based on model
44- max_output_tokens = 100 ,
45- temperature = 0.2 ,
46- stream = True ,
47- timeout = 45 ,
48- )
49-
50- # Recommended: First test if basic chat works with this llm setup as below:
51- # Once this works, then you can try the rest of the example.
52- #
53- # agent = lr.ChatAgent(
54- # lr.ChatAgentConfig(
55- # llm=llm_cfg,
56- # )
57- # )
58- #
59- # agent.llm_response("What is 3 + 4?")
60- #
61- # task = lr.Task(agent)
62- # verify you can interact with this in a chat loop on cmd line:
63- # task.run("Concisely answer some questions")
64-
6538# (1) Define the desired structure via Pydantic.
6639# Here we define a nested structure for City information.
6740# The "Field" annotations are optional, and are included in the system message
6841# if provided, and help with generation accuracy.
6942
70-
7143class CityData (BaseModel ):
7244 population : int = Field (..., description = "population of city" )
7345 country : str = Field (..., description = "country of city" )
@@ -90,9 +62,9 @@ def handle(self) -> str:
9062 """Handle LLM's structured output if it matches City structure"""
9163 print ("SUCCESS! Got Valid City Info" )
9264 return """
93- Thanks! ask me for another city name, do not say anything else
94- until you get a city name.
95- """
65+ Thanks! ask me for another city name, do not say anything else
66+ until you get a city name.
67+ """
9668
9769 @staticmethod
9870 def handle_message_fallback (
@@ -101,10 +73,10 @@ def handle_message_fallback(
10173 """Fallback method when LLM forgets to generate a tool"""
10274 if isinstance (msg , ChatDocument ) and msg .metadata .sender == "LLM" :
10375 return """
104- You must use the `city_tool` to generate city information.
105- You either forgot to use it, or you used it with the wrong format.
106- Make sure all fields are filled out.
107- """
76+ You must use the `city_tool` to generate city information.
77+ You either forgot to use it, or you used it with the wrong format.
78+ Make sure all fields are filled out.
79+ """
10880
10981 @classmethod
11082 def examples (cls ) -> List ["ToolMessage" ]:
@@ -122,33 +94,66 @@ def examples(cls) -> List["ToolMessage"]:
12294 ]
12395
12496
125- # (3) Define a ChatAgentConfig and ChatAgent
126-
127- config = lr .ChatAgentConfig (
128- llm = llm_cfg ,
129- system_message = """
130- You are an expert on world city information.
131- The user will give you a city name, and you should use the `city_tool` to
132- generate information about the city, and present it to the user.
133- Make up the values if you don't know them exactly, but make sure
134- the structure is as specified in the `city_tool` JSON definition.
135-
136- DO NOT SAY ANYTHING ELSE BESIDES PROVIDING THE CITY INFORMATION.
137-
138- START BY ASKING ME TO GIVE YOU A CITY NAME.
139- DO NOT GENERATE ANYTHING YOU GET A CITY NAME.
140-
141- Once you've generated the city information using `city_tool`,
142- ask for another city name, and so on.
143- """ ,
144- )
145-
146- agent = lr .ChatAgent (config )
147-
148- # (4) Enable the Tool for this agent --> this auto-inserts JSON instructions
149- # and few-shot examples into the system message
150- agent .enable_message (CityTool )
151-
152- # (5) Create task and run it to start an interactive loop
153- task = lr .Task (agent )
154- task .run ()
97+ def app (
98+ m : str = "litellm/ollama/mistral:7b-instruct-v0.2-q4_K_M" ,
99+ ):
100+ # create LLM config
101+ llm_cfg = lm .OpenAIGPTConfig (
102+ chat_model = m or "litellm/ollama/mistral:7b-instruct-v0.2-q4_K_M" ,
103+ chat_context_length = 4096 , # set this based on model
104+ max_output_tokens = 100 ,
105+ temperature = 0.2 ,
106+ stream = True ,
107+ timeout = 45 ,
108+ )
109+
110+ # Recommended: First test if basic chat works with this llm setup as below:
111+ # Once this works, then you can try the rest of the example.
112+ #
113+ # agent = lr.ChatAgent(
114+ # lr.ChatAgentConfig(
115+ # llm=llm_cfg,
116+ # )
117+ # )
118+ #
119+ # agent.llm_response("What is 3 + 4?")
120+ #
121+ # task = lr.Task(agent)
122+ # verify you can interact with this in a chat loop on cmd line:
123+ # task.run("Concisely answer some questions")
124+
125+
126+
127+ # Define a ChatAgentConfig and ChatAgent
128+
129+ config = lr .ChatAgentConfig (
130+ llm = llm_cfg ,
131+ system_message = """
132+ You are an expert on world city information.
133+ The user will give you a city name, and you should use the `city_tool` to
134+ generate information about the city, and present it to the user.
135+ Make up the values if you don't know them exactly, but make sure
136+ the structure is as specified in the `city_tool` JSON definition.
137+
138+ DO NOT SAY ANYTHING ELSE BESIDES PROVIDING THE CITY INFORMATION.
139+
140+ START BY ASKING ME TO GIVE YOU A CITY NAME.
141+ DO NOT GENERATE ANYTHING YOU GET A CITY NAME.
142+
143+ Once you've generated the city information using `city_tool`,
144+ ask for another city name, and so on.
145+ """ ,
146+ )
147+
148+ agent = lr .ChatAgent (config )
149+
150+ # (4) Enable the Tool for this agent --> this auto-inserts JSON instructions
151+ # and few-shot examples into the system message
152+ agent .enable_message (CityTool )
153+
154+ # (5) Create task and run it to start an interactive loop
155+ task = lr .Task (agent )
156+ task .run ()
157+
158+ if __name__ == "__main__" :
159+ fire .Fire (app )
0 commit comments