Python SDK
Full Python SDK reference with configuration and examples.
AI agents are hard to debug. Requests fly by, context builds up, and when something fails you’re left guessing. SideSeat captures every LLM call, tool call, and agent decision, then displays them in a web UI as they happen.
Start the server
npx sideseatInstall the SDK
pip install strands-agents sideseat# oruv add strands-agents sideseatnpm install ai @ai-sdk/openai @sideseat/sdkInitialize in your code
from strands import Agentfrom sideseat import SideSeat, Frameworks
SideSeat(framework=Frameworks.Strands) # add this
agent = Agent() # uses Amazon Bedrock by defaultagent("Analyze this dataset...")import { generateText } from "ai";import { openai } from "@ai-sdk/openai";import { init } from "@sideseat/sdk";
init(); // add this
const { text } = await generateText({ model: openai("gpt-5-mini"), prompt: "Analyze this dataset...", experimental_telemetry: { isEnabled: true },});Open http://localhost:5388. You’ll see a live timeline of each prompt, tool call, and model response.
from sideseat import SideSeat, Frameworksfrom strands import Agent
SideSeat(framework=Frameworks.Strands)
agent = Agent()response = agent("What is 2+2?")print(response)import { init } from '@sideseat/sdk';import { generateText } from 'ai';import { bedrock } from '@ai-sdk/amazon-bedrock';
init();
const { text } = await generateText({ model: bedrock('anthropic.claude-sonnet-4-5-20250929-v1:0'), prompt: 'What is 2+2?', experimental_telemetry: { isEnabled: true },});
console.log(text);import asynciofrom sideseat import SideSeat, Frameworksfrom google.adk.agents import Agentfrom google.adk.runners import Runnerfrom google.adk.sessions import InMemorySessionServicefrom google.genai import types
SideSeat(framework=Frameworks.GoogleADK)
agent = Agent( model="gemini-2.5-flash", name="assistant", instruction="You are a helpful assistant.",)
async def main(): session_service = InMemorySessionService() runner = Runner(agent=agent, app_name="my_app", session_service=session_service) session = await session_service.create_session(app_name="my_app", user_id="user") message = types.Content(role="user", parts=[types.Part(text="What is 2+2?")]) async for event in runner.run_async( session_id=session.id, user_id="user", new_message=message ): if event.content and event.content.parts: for part in event.content.parts: if hasattr(part, "text") and part.text: print(part.text)
asyncio.run(main())from sideseat import SideSeat, Frameworksfrom langgraph.prebuilt import create_react_agentfrom langchain_openai import ChatOpenAI
SideSeat(framework=Frameworks.LangGraph)
llm = ChatOpenAI(model="gpt-5-mini")agent = create_react_agent(llm, tools=[])result = agent.invoke({"messages": [("user", "What is 2+2?")]})print(result["messages"][-1].content)from sideseat import SideSeat, Frameworksfrom crewai import Agent, Task, Crew
SideSeat(framework=Frameworks.CrewAI)
researcher = Agent( role="Researcher", goal="Find information", backstory="Expert researcher",)
task = Task( description="Research AI trends", expected_output="Summary of trends", agent=researcher,)
crew = Crew(agents=[researcher], tasks=[task])
result = crew.kickoff()print(result)from sideseat import SideSeat, Frameworksfrom autogen import AssistantAgent, UserProxyAgent
SideSeat(framework=Frameworks.AutoGen)
llm_config = {"config_list": [{"model": "gpt-5-mini"}]}assistant = AssistantAgent("assistant", llm_config=llm_config)user = UserProxyAgent("user", human_input_mode="NEVER")user.initiate_chat(assistant, message="Hello!")from sideseat import SideSeat, Frameworksfrom agents import Agent, Runner
SideSeat(framework=Frameworks.OpenAIAgents)
agent = Agent(name="Assistant", instructions="You are helpful.")result = Runner.run_sync(agent, "What is the capital of France?")print(result.final_output)SideSeat includes a built-in MCP server that gives your coding agent direct access to your agent’s traces, conversations, and costs. Connect it and let your coding tool optimize prompts, debug failures, and reduce costs using real data.
# Kiro CLIkiro-cli mcp add --name sideseat --url http://localhost:5388/api/v1/projects/default/mcp
# Claude Codeclaude mcp add --transport http sideseat http://localhost:5388/api/v1/projects/default/mcp
# OpenAI Codexcodex mcp add --transport http sideseat http://localhost:5388/api/v1/projects/default/mcpSee the MCP Server guide for Kiro, Cursor, and other clients.
Python SDK
Full Python SDK reference with configuration and examples.
JavaScript SDK
Full JavaScript SDK reference for Node.js apps.
Integrations
Connect your framework — Strands, LangGraph, CrewAI, and more.
Concepts