Python Integration
Build AI applications with Python, FastAPI, and Super Agent Stack.
Setup
Install Dependencies
bash
pip install openai fastapi uvicorn python-dotenvEnvironment Variables
.env
OPENROUTER_KEY=your_openrouter_key_here
SUPER_AGENT_KEY=your_super_agent_key_hereBasic Usage
chat.py
from openai import OpenAI
import os
from dotenv import load_dotenv
load_dotenv()
client = OpenAI(
base_url="https://superagentstack.orionixtech.com/api/v1",
api_key=os.environ.get("OPENROUTER_KEY"),
default_headers={
"superAgentKey": os.environ.get("SUPER_AGENT_KEY"),
}
)
def chat(message: str, session_id: str) -> str:
completion = client.chat.completions.create(
model="anthropic/claude-3-sonnet",
messages=[{"role": "user", "content": message}],
session_id=session_id,
save_to_memory=True,
use_rag=True,
)
return completion.choices[0].message.content
# Usage
response = chat("Hello!", "user-123")
print(response)FastAPI Application
main.py
from fastapi import FastAPI, HTTPException
from fastapi.responses import StreamingResponse
from pydantic import BaseModel
from openai import OpenAI
import os
app = FastAPI()
client = OpenAI(
base_url="https://superagentstack.orionixtech.com/api/v1",
api_key=os.environ.get("OPENROUTER_KEY"),
default_headers={
"superAgentKey": os.environ.get("SUPER_AGENT_KEY"),
}
)
class ChatRequest(BaseModel):
message: str
session_id: str
stream: bool = False
@app.post("/chat")
async def chat(request: ChatRequest):
try:
if request.stream:
return StreamingResponse(
stream_chat(request.message, request.session_id),
media_type="text/plain"
)
else:
completion = client.chat.completions.create(
model="anthropic/claude-3-sonnet",
messages=[{"role": "user", "content": request.message}],
session_id=request.session_id,
save_to_memory=True,
use_rag=True,
)
return {
"response": completion.choices[0].message.content,
"metadata": getattr(completion, '_metadata', {})
}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
def stream_chat(message: str, session_id: str):
stream = client.chat.completions.create(
model="anthropic/claude-3-sonnet",
messages=[{"role": "user", "content": message}],
session_id=session_id,
save_to_memory=True,
stream=True,
)
for chunk in stream:
if chunk.choices[0].delta.content:
yield chunk.choices[0].delta.content
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=8000)Run the Server
bash
python main.pyFlask Application
app.py
from flask import Flask, request, jsonify, Response
from openai import OpenAI
import os
app = Flask(__name__)
client = OpenAI(
base_url="https://superagentstack.orionixtech.com/api/v1",
api_key=os.environ.get("OPENROUTER_KEY"),
default_headers={
"superAgentKey": os.environ.get("SUPER_AGENT_KEY"),
}
)
@app.route('/chat', methods=['POST'])
def chat():
data = request.json
message = data.get('message')
session_id = data.get('session_id')
try:
completion = client.chat.completions.create(
model="anthropic/claude-3-sonnet",
messages=[{"role": "user", "content": message}],
session_id=session_id,
save_to_memory=True,
use_rag=True,
)
return jsonify({
"response": completion.choices[0].message.content
})
except Exception as e:
return jsonify({"error": str(e)}), 500
if __name__ == '__main__':
app.run(debug=True, port=5000)CLI Application
cli_chat.py
from openai import OpenAI
import os
import time
client = OpenAI(
base_url="https://superagentstack.orionixtech.com/api/v1",
api_key=os.environ.get("OPENROUTER_KEY"),
default_headers={
"superAgentKey": os.environ.get("SUPER_AGENT_KEY"),
}
)
session_id = f"user-{int(time.time())}"
def chat(message: str):
try:
completion = client.chat.completions.create(
model="anthropic/claude-3-sonnet",
messages=[{"role": "user", "content": message}],
session_id=session_id,
save_to_memory=True,
use_rag=True,
)
return completion.choices[0].message.content
except Exception as e:
return f"Error: {e}"
def main():
print("Chat started! Type 'exit' to quit.\n")
while True:
user_input = input("You: ")
if user_input.lower() == 'exit':
print("Goodbye!")
break
response = chat(user_input)
print(f"\nAI: {response}\n")
if __name__ == "__main__":
main()Django Integration
views.py
from django.http import JsonResponse
from django.views.decorators.csrf import csrf_exempt
from openai import OpenAI
import json
import os
client = OpenAI(
base_url="https://superagentstack.orionixtech.com/api/v1",
api_key=os.environ.get("OPENROUTER_KEY"),
default_headers={
"superAgentKey": os.environ.get("SUPER_AGENT_KEY"),
}
)
@csrf_exempt
def chat_view(request):
if request.method == 'POST':
data = json.loads(request.body)
message = data.get('message')
session_id = data.get('session_id')
try:
completion = client.chat.completions.create(
model="anthropic/claude-3-sonnet",
messages=[{"role": "user", "content": message}],
session_id=session_id,
save_to_memory=True,
use_rag=True,
)
return JsonResponse({
"response": completion.choices[0].message.content
})
except Exception as e:
return JsonResponse({"error": str(e)}, status=500)
return JsonResponse({"error": "Method not allowed"}, status=405)Best Practices
- Use environment variables: Never hardcode API keys
- Implement error handling: Catch and handle exceptions properly
- Add rate limiting: Protect your endpoints from abuse
- Use async/await: For better performance with FastAPI
- Log requests: Monitor API usage and errors