-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathAI.py
120 lines (115 loc) · 4.4 KB
/
AI.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
import openai
class AI:
def __init__(self, model="gpt-3.5-turbo-0613"):
try:
# get the OPENAI_API_KEY
openai.Model.retrieve(model)
self.model = model
except openai.InvalidRequestError:
print(
f"Please set upt the OPENAI_API_KEY in the enverment."
)
def get_completion_from_messages(self,messages,
model="gpt-3.5-turbo-0613",
temperature=0,
max_tokens=500):
response = openai.ChatCompletion.create(
model=model,
messages=messages,
temperature=temperature,
max_tokens=max_tokens,
# functions = functions,
# function_call="auto"
)
return response.choices[0].message["content"]
def get_completion_from_messages_function(messages,
functions,
model="gpt-3.5-turbo-0613",
temperature=0,
max_tokens=500):
response = openai.ChatCompletion.create(
model=model,
messages=messages,
temperature=temperature,
max_tokens=max_tokens,
functions = functions,
function_call="auto"
)
if(response.choices[0].message["content"]!="Nones"):
return response.choices[0].message["function_call"]
else:
return response.choices[0].message["content"]
def chat_completion_request(messages, functions=None, function_call=None, model="gpt-3.5-turbo-0613"):
headers = {
"Content-Type": "application/json",
"Authorization": "Bearer " + openai.api_key,
}
json_data = {"model": model, "messages": messages}
if functions is not None:
json_data.update({"functions": functions})
if function_call is not None:
json_data.update({"function_call": function_call})
try:
response = requests.post(
"https://api.openai.com/v1/chat/completions",
headers=headers,
json=json_data,
)
return response
except Exception as e:
print("Unable to generate ChatCompletion response")
print(f"Exception: {e}")
return e
if __name__ == "__main__":
ai = AI()
functions = [
{
"name": "get_current_weather",
"description": "Get the current weather",
"parameters": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA",
},
"format": {
"type": "string",
"enum": ["celsius", "fahrenheit"],
"description": "The temperature unit to use. Infer this from the users location.",
},
},
"required": ["location", "format"],
},
},
{
"name": "get_n_day_weather_forecast",
"description": "Get an N-day weather forecast",
"parameters": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA",
},
"format": {
"type": "string",
"enum": ["celsius", "fahrenheit"],
"description": "The temperature unit to use. Infer this from the users location.",
},
"num_days": {
"type": "integer",
"description": "The number of days to forecast",
}
},
"required": ["location", "format", "num_days"]
},
},
]
MA = []
MA.append({"role": "system", "content": "Don't make assumptions about what values to plug into functions. Ask for clarification if a user request is ambiguous."})
MA.append({"role": "user", "content": "What's the weather like today"})
chat_response = ai.chat_completion_request(
messages=MA, functions=functions)
assistant_message = chat_response.json()["choices"][0]["message"]
print(assistant_message)