Basic Chat Example

Build a simple chat application with Super Agent Stack.

What You'll Build

A basic chat application that sends messages to the AI and displays responses. This example covers the fundamentals without streaming or advanced features.

Node.js / TypeScript

Complete Example

basic-chat.ts
import OpenAI from 'openai';
import * as readline from 'readline';

const client = new OpenAI({
  baseURL: 'https://superagentstack.orionixtech.com/api/v1',
  apiKey: process.env.OPENROUTER_KEY!,
  defaultHeaders: {
    'superAgentKey': process.env.SUPER_AGENT_KEY!,
  },
});

// Store conversation history
const messages: Array<{ role: string; content: string }> = [];

async function chat(userMessage: string) {
  // Add user message to history
  messages.push({
    role: 'user',
    content: userMessage,
  });

  try {
    // Get AI response
    const completion = await client.chat.completions.create({
      model: 'anthropic/claude-3-sonnet',
      messages,
      temperature: 0.7,
      max_tokens: 1000,
    });

    const assistantMessage = completion.choices[0].message.content;

    // Add assistant response to history
    messages.push({
      role: 'assistant',
      content: assistantMessage,
    });

    return assistantMessage;
  } catch (error: any) {
    console.error('Error:', error.message);
    throw error;
  }
}

// Interactive CLI
async function main() {
  const rl = readline.createInterface({
    input: process.stdin,
    output: process.stdout,
  });

  console.log('Chat started! Type "exit" to quit.\n');

  const askQuestion = () => {
    rl.question('You: ', async (input) => {
      if (input.toLowerCase() === 'exit') {
        console.log('Goodbye!');
        rl.close();
        return;
      }

      try {
        const response = await chat(input);
        console.log(`\nAI: ${response}\n`);
      } catch (error) {
        console.error('Failed to get response');
      }

      askQuestion();
    });
  };

  askQuestion();
}

main();

Run the Example

bash
# Install dependencies
npm install openai

# Set environment variables
export OPENROUTER_KEY="your_openrouter_key"
export SUPER_AGENT_KEY="your_super_agent_key"

# Run
npx tsx basic-chat.ts

Python

Complete Example

basic_chat.py
from openai import OpenAI
import os

client = OpenAI(
    base_url="https://superagentstack.orionixtech.com/api/v1",
    api_key=os.environ.get("OPENROUTER_KEY"),
    default_headers={
        "superAgentKey": os.environ.get("SUPER_AGENT_KEY"),
    }
)

# Store conversation history
messages = []

def chat(user_message: str) -> str:
    """Send a message and get AI response"""
    # Add user message to history
    messages.append({
        "role": "user",
        "content": user_message
    })
    
    try:
        # Get AI response
        completion = client.chat.completions.create(
            model="anthropic/claude-3-sonnet",
            messages=messages,
            temperature=0.7,
            max_tokens=1000
        )
        
        assistant_message = completion.choices[0].message.content
        
        # Add assistant response to history
        messages.append({
            "role": "assistant",
            "content": assistant_message
        })
        
        return assistant_message
    except Exception as e:
        print(f"Error: {e}")
        raise

def main():
    """Interactive chat loop"""
    print("Chat started! Type 'exit' to quit.\n")
    
    while True:
        user_input = input("You: ")
        
        if user_input.lower() == 'exit':
            print("Goodbye!")
            break
        
        try:
            response = chat(user_input)
            print(f"\nAI: {response}\n")
        except Exception:
            print("Failed to get response\n")

if __name__ == "__main__":
    main()

Run the Example

bash
# Install dependencies
pip install openai

# Set environment variables
export OPENROUTER_KEY="your_openrouter_key"
export SUPER_AGENT_KEY="your_super_agent_key"

# Run
python basic_chat.py

Adding Session Memory

Enable persistent memory across sessions:

chat-with-memory.ts
import OpenAI from 'openai';

const client = new OpenAI({
  baseURL: 'https://superagentstack.orionixtech.com/api/v1',
  apiKey: process.env.OPENROUTER_KEY!,
  defaultHeaders: {
    'superAgentKey': process.env.SUPER_AGENT_KEY!,
  },
});

// Generate or use existing session ID
const sessionId = `user-${Date.now()}`;

async function chat(userMessage: string) {
  const completion = await client.chat.completions.create({
    model: 'anthropic/claude-3-sonnet',
    messages: [
      { role: 'user', content: userMessage }
    ],
    sessionId,        // Enable memory
    saveToMemory: true,  // Save this conversation
  });

  return completion.choices[0].message.content;
}

// First conversation
console.log(await chat('My name is Sarah'));
// "Nice to meet you, Sarah!"

// Later conversation (same session)
console.log(await chat('What is my name?'));
// "Your name is Sarah!"

Adding Error Handling

chat-with-errors.ts
async function chat(userMessage: string, retries = 3) {
  for (let attempt = 0; attempt < retries; attempt++) {
    try {
      const completion = await client.chat.completions.create({
        model: 'anthropic/claude-3-sonnet',
        messages: [{ role: 'user', content: userMessage }],
      });

      return completion.choices[0].message.content;
    } catch (error: any) {
      console.error(`Attempt ${attempt + 1} failed:`, error.message);

      // Don't retry on authentication errors
      if (error.status === 401) {
        throw new Error('Invalid API keys');
      }

      // Retry on rate limits and server errors
      if (attempt < retries - 1 && (error.status === 429 || error.status >= 500)) {
        const delay = 1000 * Math.pow(2, attempt);
        await new Promise(resolve => setTimeout(resolve, delay));
        continue;
      }

      throw error;
    }
  }
}

Simple Web Interface

chat.html
<!DOCTYPE html>
<html>
<head>
  <title>Basic Chat</title>
  <style>
    body { font-family: Arial, sans-serif; max-width: 600px; margin: 50px auto; }
    #messages { height: 400px; border: 1px solid #ccc; padding: 10px; overflow-y: auto; margin-bottom: 10px; }
    .message { margin: 10px 0; padding: 8px; border-radius: 5px; }
    .user { background: #e3f2fd; text-align: right; }
    .assistant { background: #f5f5f5; }
    #input { width: 80%; padding: 10px; }
    #send { padding: 10px 20px; }
  </style>
</head>
<body>
  <h1>Basic Chat</h1>
  <div id="messages"></div>
  <input type="text" id="input" placeholder="Type your message..." />
  <button id="send">Send</button>

  <script>
    const messagesDiv = document.getElementById('messages');
    const input = document.getElementById('input');
    const sendBtn = document.getElementById('send');

    async function sendMessage() {
      const message = input.value.trim();
      if (!message) return;

      // Display user message
      addMessage('user', message);
      input.value = '';

      try {
        // Call your backend API
        const response = await fetch('/api/chat', {
          method: 'POST',
          headers: { 'Content-Type': 'application/json' },
          body: JSON.stringify({ message }),
        });

        const data = await response.json();
        addMessage('assistant', data.response);
      } catch (error) {
        addMessage('assistant', 'Error: Failed to get response');
      }
    }

    function addMessage(role, content) {
      const div = document.createElement('div');
      div.className = `message ${role}`;
      div.textContent = content;
      messagesDiv.appendChild(div);
      messagesDiv.scrollTop = messagesDiv.scrollHeight;
    }

    sendBtn.addEventListener('click', sendMessage);
    input.addEventListener('keypress', (e) => {
      if (e.key === 'Enter') sendMessage();
    });
  </script>
</body>
</html>

Next Steps

This basic example is great for learning, but for production apps, consider:
  • Using streaming for better UX
  • Adding proper error handling
  • Implementing rate limiting
  • Adding user authentication

Next Steps