import agentops
from memori import Memori
from openai import OpenAI
# Start a trace to group related operations
agentops.start_trace("memori_conversation_flow", tags=["memori_memory_example"])
try:
# Initialize OpenAI client
openai_client = OpenAI()
# Initialize Memori with conscious ingestion enabled
# AgentOps tracks the memory configuration
memori = Memori(
database_connect="sqlite:///agentops_example.db",
conscious_ingest=True,
auto_ingest=True,
)
memori.enable()
# First conversation - AgentOps tracks LLM call and memory recording
response1 = openai_client.chat.completions.create(
model="gpt-4o-mini",
messages=[
{"role": "user", "content": "I'm working on a Python FastAPI project"}
],
)
print("Assistant:", response1.choices[0].message.content)
# Second conversation - AgentOps tracks memory retrieval and context injection
response2 = openai_client.chat.completions.create(
model="gpt-4o-mini",
messages=[{"role": "user", "content": "Help me add user authentication"}],
)
print("Assistant:", response2.choices[0].message.content)
print("💡 Notice: Memori automatically provided FastAPI project context!")
# End trace - AgentOps aggregates all operations
agentops.end_trace(end_state="success")
except Exception as e:
agentops.end_trace(end_state="error")