-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathgroq_code_function_calling.py
More file actions
100 lines (78 loc) · 2.64 KB
/
groq_code_function_calling.py
File metadata and controls
100 lines (78 loc) · 2.64 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
import os
import json
from dotenv import load_dotenv
from groq import Groq
load_dotenv()
client = Groq(api_key=os.getenv("GROQ_API_KEY"))
# ---------------------------------------------
# 1. Python function (Tool)
# ---------------------------------------------
def get_patient_symptoms(disease: str):
data = {
"diabetes": ["Frequent urination", "Excessive thirst", "Fatigue", "Slow wound healing"],
"flu": ["Fever", "Cough", "Body aches"],
"covid": ["Fever", "Cough", "Loss of smell", "Breathing difficulty"],
}
return data.get(disease.lower(), ["No data found"])
# ---------------------------------------------
# 2. LLM Request
# ---------------------------------------------
completion = client.chat.completions.create(
model="openai/gpt-oss-120b",
messages=[
{"role": "user", "content": "What are the symptoms of denuge?"}
],
tools=[{
"type": "function",
"function": {
"name": "get_patient_symptoms",
"description": "Returns symptoms for a disease name",
"parameters": {
"type": "object",
"properties": {
"disease": {"type": "string"}
},
"required": ["disease"]
}
}
}],
tool_choice="auto"
)
message = completion.choices[0].message
# ---------------------------------------------
# 3. Check if LLM called a tool (Correct Groq syntax)
# ---------------------------------------------
if not message.tool_calls:
print("LLM response:", message.content)
exit()
# There could be multiple tools
tool_call = message.tool_calls[0]
tool_name = tool_call.function.name
tool_args = json.loads(tool_call.function.arguments)
print("🔧 Tool requested:", tool_name)
print("📥 Args:", tool_args)
# ---------------------------------------------
# 4. Run the local Python function
# ---------------------------------------------
if tool_name == "get_patient_symptoms":
function_result = get_patient_symptoms(**tool_args)
else:
function_result = {"error": "unknown tool"}
print("🧪 Function Output:", function_result)
# ---------------------------------------------
# 5. Send tool result back to LLM
# ---------------------------------------------
followup = client.chat.completions.create(
model="openai/gpt-oss-120b",
messages=[
{"role": "user", "content": "What are the symptoms of diabetes?"},
message,
{
"role": "tool",
"tool_call_id": tool_call.id,
"content": json.dumps(function_result)
}
]
)
print("\n💬 Final Answer:\n")
print(followup.choices[0].message.content)