-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathbasic_chat.py
More file actions
48 lines (35 loc) · 1.15 KB
/
basic_chat.py
File metadata and controls
48 lines (35 loc) · 1.15 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
"""Basic agentcache usage: create a session, send a message, check cache status."""
import asyncio
import os
from dotenv import load_dotenv
load_dotenv()
from agentcache import AgentSession, LiteLLMSDKProvider
async def main():
provider = LiteLLMSDKProvider()
if os.getenv("OPENAI_API_KEY"):
model = "gpt-4o-mini"
elif os.getenv("GEMINI_API_KEY"):
model = "gemini/gemini-2.5-flash"
elif os.getenv("ANTHROPIC_API_KEY"):
model = "anthropic/claude-sonnet-4-20250514"
else:
print("Set OPENAI_API_KEY, GEMINI_API_KEY, or ANTHROPIC_API_KEY in .env")
return
session = AgentSession(
model=model,
provider=provider,
system_prompt="You are a careful code assistant.",
)
reply = await session.respond("Analyze my cache layer and suggest improvements.")
print("Reply:", reply.text)
print()
status = session.cache_status()
print(status.pretty())
print()
explanation = session.explain_last_cache_break()
if explanation:
print(explanation.pretty())
else:
print("No cache break detected.")
if __name__ == "__main__":
asyncio.run(main())