intermediate
Build a Customer Support Agent
Create an AI-powered customer support agent that remembers customer history and provides personalized assistance
Prerequisites
- Python 3.8+
- OpenAI API key
- Memoid account
What You’ll Build
A customer support AI agent that:
- Remembers customer interactions and preferences
- Provides context-aware responses based on history
- Tracks issues and resolutions per customer
- Streams responses for better UX
Prerequisites
- Python 3.8 or higher
- OpenAI API key
- Memoid API key (get one at memoid.dev)
Setup
Install the required packages:
pip install openai requests Full Implementation
import os
import requests
from openai import OpenAI
# Configuration
MEMOID_API_KEY = os.environ.get("MEMOID_API_KEY")
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY")
MEMOID_BASE_URL = "https://api.memoid.dev/v1"
class CustomerSupportAgent:
def __init__(self):
self.openai = OpenAI(api_key=OPENAI_API_KEY)
self.headers = {
"Authorization": f"Bearer {MEMOID_API_KEY}",
"Content-Type": "application/json"
}
def get_customer_context(self, customer_id: str, query: str) -> str:
"""Retrieve relevant memories for the customer."""
response = requests.post(
f"{MEMOID_BASE_URL}/memories/search",
headers=self.headers,
json={
"query": query,
"user_id": customer_id,
"limit": 5
}
)
if response.status_code == 200:
memories = response.json().get("results", [])
if memories:
context = "\n".join(f"- {m['memory']}" for m in memories)
return f"Customer History:\n{context}"
return ""
def store_interaction(self, customer_id: str, query: str, response: str):
"""Store the interaction in memory."""
requests.post(
f"{MEMOID_BASE_URL}/memories",
headers=self.headers,
json={
"messages": [
{"role": "user", "content": query},
{"role": "assistant", "content": response}
],
"user_id": customer_id,
"metadata": {"type": "support_interaction"}
}
)
def handle_query(self, customer_id: str, query: str) -> str:
"""Handle a customer support query."""
# Get customer context
context = self.get_customer_context(customer_id, query)
# Build the prompt
system_prompt = """You are a helpful customer support agent.
Use the customer's history to provide personalized assistance.
Be empathetic, professional, and solution-oriented."""
messages = [{"role": "system", "content": system_prompt}]
if context:
messages.append({
"role": "system",
"content": context
})
messages.append({"role": "user", "content": query})
# Generate response with streaming
response_text = ""
stream = self.openai.chat.completions.create(
model="gpt-4",
messages=messages,
stream=True
)
print("Agent: ", end="", flush=True)
for chunk in stream:
if chunk.choices[0].delta.content:
content = chunk.choices[0].delta.content
print(content, end="", flush=True)
response_text += content
print()
# Store the interaction
self.store_interaction(customer_id, query, response_text)
return response_text
def get_customer_history(self, customer_id: str):
"""Get all memories for a customer."""
response = requests.get(
f"{MEMOID_BASE_URL}/memories",
headers=self.headers,
params={"user_id": customer_id}
)
if response.status_code == 200:
return response.json().get("results", [])
return []
def main():
agent = CustomerSupportAgent()
customer_id = "customer_jane_doe"
print("Customer Support Agent")
print("=" * 40)
print("Type 'quit' to exit, 'history' to see memories\n")
while True:
query = input("Customer: ").strip()
if query.lower() == "quit":
break
if query.lower() == "history":
memories = agent.get_customer_history(customer_id)
print("\nCustomer Memories:")
for m in memories:
print(f" - {m['memory']}")
print()
continue
if query:
agent.handle_query(customer_id, query)
print()
if __name__ == "__main__":
main() Example Conversation
Customer Support Agent
========================================
Type 'quit' to exit, 'history' to see memories
Customer: Hi, I ordered a laptop last week but it hasn't arrived yet.
Agent: I'm sorry to hear your laptop hasn't arrived yet. I'd be happy
to help you track your order. Could you please provide your order
number so I can look into this for you?
Customer: It's ORDER-12345. I'm really frustrated because I needed
it for work.
Agent: I completely understand your frustration, especially when
you need it for work. Let me check on ORDER-12345 right away...
Customer: history
Customer Memories:
- Customer ordered a laptop last week that hasn't arrived
- Order number is ORDER-12345
- Customer needs the laptop for work and is frustrated Key Concepts
Memory-Augmented Responses
The agent retrieves relevant customer history before generating responses:
context = self.get_customer_context(customer_id, query) This enables personalized responses that reference past interactions.
Automatic Memory Storage
Every interaction is stored for future context:
self.store_interaction(customer_id, query, response_text) Memoid automatically extracts key facts from the conversation.
Streaming Responses
Using OpenAI’s streaming API provides better user experience:
stream = self.openai.chat.completions.create(
model="gpt-4",
messages=messages,
stream=True
) Enhancements
Add Ticket Tracking
def create_ticket(self, customer_id: str, issue: str):
"""Create a support ticket and store in memory."""
ticket_id = f"TICKET-{int(time.time())}"
requests.post(
f"{MEMOID_BASE_URL}/memories",
headers=self.headers,
json={
"messages": [{"role": "user", "content": f"Opened ticket {ticket_id}: {issue}"}],
"user_id": customer_id,
"metadata": {
"type": "ticket",
"ticket_id": ticket_id,
"status": "open"
}
}
)
return ticket_id Add Sentiment Analysis
def analyze_sentiment(self, message: str) -> str:
"""Detect customer sentiment for escalation."""
response = self.openai.chat.completions.create(
model="gpt-4",
messages=[{
"role": "user",
"content": f"Rate sentiment (positive/neutral/negative): {message}"
}]
)
return response.choices[0].message.content.lower() Next Steps
- Add multi-channel support (email, chat, phone)
- Implement escalation to human agents
- Add knowledge base integration
- Build a dashboard for support metrics