Next.js Integration

Build a complete AI-powered application with Next.js and Super Agent Stack.

Overview

This guide shows you how to build a production-ready AI chat application using Next.js 14+ App Router, Server Actions, and Super Agent Stack.

Project Setup

Create Next.js Project

bash
npx create-next-app@latest my-ai-app
cd my-ai-app

Install Dependencies

bash
npm install openai ai

Environment Variables

.env.local
OPENROUTER_KEY=your_openrouter_key_here
SUPER_AGENT_KEY=your_super_agent_key_here

Create API Route

app/api/chat/route.ts
import OpenAI from 'openai';
import { OpenAIStream, StreamingTextResponse } from 'ai';

// Allow streaming responses up to 30 seconds
export const maxDuration = 30;

const client = new OpenAI({
  baseURL: 'https://superagentstack.orionixtech.com/api/v1',
  apiKey: process.env.OPENROUTER_KEY!,
  defaultHeaders: {
    'superAgentKey': process.env.SUPER_AGENT_KEY!,
  },
});

export async function POST(req: Request) {
  try {
    const { messages, sessionId } = await req.json();

    const response = await client.chat.completions.create({
      model: 'anthropic/claude-3-sonnet',
      messages,
      stream: true,
      sessionId,
      saveToMemory: true,
      useRAG: true,
    });

    const stream = OpenAIStream(response);
    return new StreamingTextResponse(stream);
  } catch (error: any) {
    console.error('Chat error:', error);
    return new Response(
      JSON.stringify({ error: error.message }),
      { status: 500, headers: { 'Content-Type': 'application/json' } }
    );
  }
}

Chat Component

app/chat/page.tsx
'use client';

import { useChat } from 'ai/react';
import { useEffect, useRef } from 'react';

export default function ChatPage() {
  const { messages, input, handleInputChange, handleSubmit, isLoading, error } = useChat({
    api: '/api/chat',
    body: {
      sessionId: typeof window !== 'undefined' 
        ? localStorage.getItem('sessionId') || `user-${Date.now()}`
        : `user-${Date.now()}`,
    },
    onResponse: (response) => {
      if (response.ok) {
        // Save session ID for future requests
        const sessionId = `user-${Date.now()}`;
        localStorage.setItem('sessionId', sessionId);
      }
    },
  });

  const messagesEndRef = useRef<HTMLDivElement>(null);

  useEffect(() => {
    messagesEndRef.current?.scrollIntoView({ behavior: 'smooth' });
  }, [messages]);

  return (
    <div className="flex flex-col h-screen max-w-4xl mx-auto p-4">
      {/* Header */}
      <div className="mb-4">
        <h1 className="text-3xl font-bold">AI Chat</h1>
        <p className="text-gray-600">Powered by Super Agent Stack</p>
      </div>

      {/* Messages */}
      <div className="flex-1 overflow-y-auto mb-4 space-y-4 bg-gray-50 rounded-lg p-4">
        {messages.length === 0 && (
          <div className="text-center text-gray-500 mt-8">
            <p className="text-lg mb-2">👋 Welcome!</p>
            <p>Start a conversation by typing a message below.</p>
          </div>
        )}

        {messages.map((m) => (
          <div
            key={m.id}
            className={`flex ${m.role === 'user' ? 'justify-end' : 'justify-start'}`}
          >
            <div
              className={`max-w-[80%] rounded-lg p-4 ${
                m.role === 'user'
                  ? 'bg-blue-500 text-white'
                  : 'bg-white border border-gray-200'
              }`}
            >
              <div className="font-semibold mb-1">
                {m.role === 'user' ? 'You' : 'AI'}
              </div>
              <div className="whitespace-pre-wrap">{m.content}</div>
            </div>
          </div>
        ))}

        {isLoading && (
          <div className="flex justify-start">
            <div className="max-w-[80%] rounded-lg p-4 bg-white border border-gray-200">
              <div className="font-semibold mb-1">AI</div>
              <div className="flex space-x-2">
                <div className="w-2 h-2 bg-gray-400 rounded-full animate-bounce" />
                <div className="w-2 h-2 bg-gray-400 rounded-full animate-bounce delay-100" />
                <div className="w-2 h-2 bg-gray-400 rounded-full animate-bounce delay-200" />
              </div>
            </div>
          </div>
        )}

        {error && (
          <div className="bg-red-100 border border-red-400 text-red-700 px-4 py-3 rounded">
            <strong>Error:</strong> {error.message}
          </div>
        )}

        <div ref={messagesEndRef} />
      </div>

      {/* Input Form */}
      <form onSubmit={handleSubmit} className="flex gap-2">
        <input
          value={input}
          onChange={handleInputChange}
          placeholder="Type your message..."
          className="flex-1 p-3 border border-gray-300 rounded-lg focus:outline-none focus:ring-2 focus:ring-blue-500"
          disabled={isLoading}
        />
        <button
          type="submit"
          disabled={isLoading || !input.trim()}
          className="px-6 py-3 bg-blue-500 text-white rounded-lg hover:bg-blue-600 disabled:bg-gray-300 disabled:cursor-not-allowed transition-colors"
        >
          {isLoading ? 'Sending...' : 'Send'}
        </button>
      </form>
    </div>
  );
}

Using Server Actions

For non-streaming use cases, use Server Actions:

app/actions.ts
'use server';

import OpenAI from 'openai';

const client = new OpenAI({
  baseURL: 'https://superagentstack.orionixtech.com/api/v1',
  apiKey: process.env.OPENROUTER_KEY!,
  defaultHeaders: {
    'superAgentKey': process.env.SUPER_AGENT_KEY!,
  },
});

export async function sendMessage(message: string, sessionId: string) {
  try {
    const completion = await client.chat.completions.create({
      model: 'anthropic/claude-3-sonnet',
      messages: [{ role: 'user', content: message }],
      sessionId,
      saveToMemory: true,
      useRAG: true,
    });

    return {
      success: true,
      response: completion.choices[0].message.content,
      metadata: (completion as any)._metadata,
    };
  } catch (error: any) {
    return {
      success: false,
      error: error.message,
    };
  }
}

Using the Server Action

app/simple-chat/page.tsx
'use client';

import { useState } from 'react';
import { sendMessage } from '../actions';

export default function SimpleChatPage() {
  const [messages, setMessages] = useState<Array<{ role: string; content: string }>>([]);
  const [input, setInput] = useState('');
  const [isLoading, setIsLoading] = useState(false);
  const sessionId = `user-${Date.now()}`;

  async function handleSubmit(e: React.FormEvent) {
    e.preventDefault();
    if (!input.trim() || isLoading) return;

    const userMessage = input;
    setInput('');
    setMessages(prev => [...prev, { role: 'user', content: userMessage }]);
    setIsLoading(true);

    const result = await sendMessage(userMessage, sessionId);

    if (result.success) {
      setMessages(prev => [...prev, { role: 'assistant', content: result.response! }]);
    } else {
      alert(`Error: ${result.error}`);
    }

    setIsLoading(false);
  }

  return (
    <div className="max-w-2xl mx-auto p-4">
      <h1 className="text-2xl font-bold mb-4">Simple Chat</h1>
      
      <div className="space-y-4 mb-4">
        {messages.map((m, i) => (
          <div key={i} className={`p-4 rounded-lg ${m.role === 'user' ? 'bg-blue-100' : 'bg-gray-100'}`}>
            <strong>{m.role === 'user' ? 'You' : 'AI'}:</strong> {m.content}
          </div>
        ))}
      </div>

      <form onSubmit={handleSubmit} className="flex gap-2">
        <input
          value={input}
          onChange={(e) => setInput(e.target.value)}
          placeholder="Type your message..."
          className="flex-1 p-3 border rounded-lg"
          disabled={isLoading}
        />
        <button
          type="submit"
          disabled={isLoading}
          className="px-6 py-3 bg-blue-500 text-white rounded-lg"
        >
          {isLoading ? 'Sending...' : 'Send'}
        </button>
      </form>
    </div>
  );
}

Adding Authentication

middleware.ts
import { NextResponse } from 'next/server';
import type { NextRequest } from 'next/server';

export function middleware(request: NextRequest) {
  // Check if user is authenticated
  const session = request.cookies.get('session');

  if (!session && request.nextUrl.pathname.startsWith('/chat')) {
    return NextResponse.redirect(new URL('/login', request.url));
  }

  return NextResponse.next();
}

export const config = {
  matcher: ['/chat/:path*', '/api/chat/:path*'],
};

Deployment

Vercel Deployment

bash
# Install Vercel CLI
npm i -g vercel

# Deploy
vercel

# Set environment variables
vercel env add OPENROUTER_KEY
vercel env add SUPER_AGENT_KEY

# Deploy to production
vercel --prod

Environment Variables

Make sure to add your API keys in the Vercel dashboard under Settings → Environment Variables.

Best Practices

  • Use streaming: Better UX with real-time responses
  • Implement rate limiting: Protect your API from abuse
  • Add authentication: Secure your chat endpoints
  • Handle errors gracefully: Show user-friendly error messages
  • Store session IDs: Use localStorage or cookies for persistence
  • Monitor usage: Track API calls and costs

Next Steps