-
Notifications
You must be signed in to change notification settings - Fork 327
/
Copy pathopenai_prompt_caching.py
64 lines (27 loc) · 1.03 KB
/
openai_prompt_caching.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
from typing import List
import ell
@ell.simple(model="gpt-4o-2024-08-06", store=True)
def cached_chat(history : List[str], new_message : str) -> str:
"""You are a helpful assistant who chats with the user.
Your response should < 2 sentences."""
return f"""Here is the chat history: {'\n'.join(history)}.
Please respond to this message:
{new_message}"""
if __name__ == "__main__":
pass
ell.init(verbose=True, store='./logdir')
if __name__ == "__main__":
while True:
history = []
simulate_user_messages = [
"Hello, how are you?",
"What's the weather like today?",
"Can you recommend a good book?",
"Tell me a joke.",
"What's your favorite color?",
"How do you make pancakes?",
]
for message in simulate_user_messages:
response = cached_chat(history, message)
history.append("User: " + message + "\n")
history.append("Assistant: " + response + "\n")