Supyagent
Tools

Example Tools

Annotated examples of common supypowers tool patterns including API clients, file processors, and data transformers.

Example Tools

These annotated examples cover the most common patterns for building supypowers tools. Each one is a complete, runnable script.

HTTP API Client

Fetch data from a REST API. This pattern works for any JSON API.

powers/github_api.py
# /// script
# dependencies = ["pydantic", "httpx"]
# ///
"""
GitHub API client. Fetch repository information.

Run with: supypowers run github_api:get_repo '{"owner": "python", "repo": "cpython"}'
"""
from typing import Optional

import httpx
from pydantic import BaseModel, Field


class GetRepoInput(BaseModel):
    """Input for fetching a GitHub repository."""
    owner: str = Field(..., description="Repository owner (user or org)")
    repo: str = Field(..., description="Repository name")


class GetRepoOutput(BaseModel):
    """Repository information."""
    ok: bool
    name: Optional[str] = None
    description: Optional[str] = None
    stars: Optional[int] = None
    language: Optional[str] = None
    open_issues: Optional[int] = None
    url: Optional[str] = None
    error: Optional[str] = None


def get_repo(input: GetRepoInput) -> GetRepoOutput:
    """Fetch public information about a GitHub repository."""
    try:
        resp = httpx.get(
            f"https://api.github.com/repos/{input.owner}/{input.repo}",
            headers={"Accept": "application/vnd.github.v3+json"},
            timeout=15,
        )
        if resp.status_code == 404:
            return GetRepoOutput(ok=False, error=f"Repository not found: {input.owner}/{input.repo}")
        resp.raise_for_status()
        data = resp.json()

        return GetRepoOutput(
            ok=True,
            name=data["full_name"],
            description=data.get("description"),
            stars=data["stargazers_count"],
            language=data.get("language"),
            open_issues=data["open_issues_count"],
            url=data["html_url"],
        )
    except Exception as e:
        return GetRepoOutput(ok=False, error=str(e))

Key patterns: Error handling returns structured errors instead of raising. HTTP timeout prevents hanging. Status code 404 is handled separately with a clear message.


File Processor

Read a file, process it, and write the result. Useful for data transformations.

powers/csv_processor.py
# /// script
# dependencies = ["pydantic"]
# ///
"""
CSV file processor. Count rows, extract columns, compute statistics.

Run with: supypowers run csv_processor:summarize '{"path": "data.csv"}'
"""
import csv
import os
from io import StringIO
from pathlib import Path
from typing import Dict, List, Optional

from pydantic import BaseModel, Field


class SummarizeInput(BaseModel):
    """Input for CSV summarization."""
    path: str = Field(..., description="Path to the CSV file")
    delimiter: str = Field(default=",", description="Column delimiter")


class SummarizeOutput(BaseModel):
    """CSV summary statistics."""
    ok: bool
    rows: int = 0
    columns: List[str] = []
    sample_rows: List[Dict[str, str]] = []
    error: Optional[str] = None


def summarize(input: SummarizeInput) -> SummarizeOutput:
    """Read a CSV file and return its shape, column names, and a sample of rows."""
    try:
        p = Path(os.path.expanduser(input.path))
        if not p.exists():
            return SummarizeOutput(ok=False, error=f"File not found: {input.path}")

        text = p.read_text(encoding="utf-8")
        reader = csv.DictReader(StringIO(text), delimiter=input.delimiter)
        columns = reader.fieldnames or []

        rows = []
        for row in reader:
            rows.append(dict(row))

        # Return first 5 rows as sample
        sample = rows[:5]

        return SummarizeOutput(
            ok=True,
            rows=len(rows),
            columns=list(columns),
            sample_rows=sample,
        )
    except Exception as e:
        return SummarizeOutput(ok=False, error=str(e))

Key patterns: Uses standard library only (no pandas needed for basic CSV). Returns a bounded sample instead of the entire dataset. Path expansion with os.path.expanduser for ~ support.


Database Query

Run SQL queries against a PostgreSQL database.

powers/database.py
# /// script
# dependencies = ["pydantic", "psycopg2-binary"]
# ///
"""
PostgreSQL query tool. Run read-only SQL queries.

Run with: supypowers run database:query '{"sql": "SELECT * FROM users LIMIT 5"}' --secrets DATABASE_URL=postgresql://...
"""
import os
from typing import Any, Dict, List, Optional

import psycopg2
import psycopg2.extras
from pydantic import BaseModel, Field


class QueryInput(BaseModel):
    """Input for database query."""
    sql: str = Field(..., description="SQL query to execute (SELECT only)")
    params: Optional[List[Any]] = Field(
        default=None, description="Query parameters for parameterized queries"
    )
    limit: int = Field(default=100, description="Maximum rows to return")


class QueryOutput(BaseModel):
    """Query results."""
    ok: bool
    rows: List[Dict[str, Any]] = []
    columns: List[str] = []
    row_count: int = 0
    error: Optional[str] = None


def query(input: QueryInput) -> QueryOutput:
    """Execute a read-only SQL query and return the results as structured data."""
    try:
        # Safety check: only allow SELECT queries
        sql_upper = input.sql.strip().upper()
        if not sql_upper.startswith("SELECT") and not sql_upper.startswith("WITH"):
            return QueryOutput(ok=False, error="Only SELECT and WITH queries are allowed")

        database_url = os.environ.get("DATABASE_URL")
        if not database_url:
            return QueryOutput(ok=False, error="DATABASE_URL environment variable not set")

        # Append LIMIT if not already present
        sql = input.sql
        if "LIMIT" not in sql_upper:
            sql = f"{sql} LIMIT {input.limit}"

        conn = psycopg2.connect(database_url)
        try:
            with conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor) as cur:
                cur.execute(sql, input.params)
                rows = [dict(row) for row in cur.fetchall()]
                columns = [desc[0] for desc in cur.description] if cur.description else []
        finally:
            conn.close()

        return QueryOutput(
            ok=True,
            rows=rows,
            columns=columns,
            row_count=len(rows),
        )
    except Exception as e:
        return QueryOutput(ok=False, error=str(e))

Key patterns: Read-only enforcement (rejects non-SELECT queries). Automatic LIMIT to prevent returning millions of rows. Secrets passed via environment variables. Connection is always closed.


Web Scraper

Fetch a web page and extract structured data from it.

powers/scraper.py
# /// script
# dependencies = ["pydantic", "httpx", "beautifulsoup4"]
# ///
"""
Web scraper. Extract headings, links, and text content from a URL.

Run with: supypowers run scraper:extract '{"url": "https://example.com"}'
"""
from typing import List, Optional

import httpx
from bs4 import BeautifulSoup
from pydantic import BaseModel, Field


class ExtractInput(BaseModel):
    """Input for web extraction."""
    url: str = Field(..., description="URL to scrape")
    selector: Optional[str] = Field(
        default=None, description="CSS selector to narrow extraction (e.g., 'article', '#main')"
    )


class LinkInfo(BaseModel):
    text: str
    href: str


class ExtractOutput(BaseModel):
    """Extracted page data."""
    ok: bool
    title: Optional[str] = None
    headings: List[str] = []
    links: List[LinkInfo] = []
    text_preview: Optional[str] = None
    error: Optional[str] = None


def extract(input: ExtractInput) -> ExtractOutput:
    """Fetch a web page and extract its headings, links, and text content."""
    try:
        resp = httpx.get(
            input.url,
            headers={"User-Agent": "Mozilla/5.0 (compatible; supypowers/1.0)"},
            follow_redirects=True,
            timeout=20,
        )
        resp.raise_for_status()

        soup = BeautifulSoup(resp.text, "html.parser")

        # Narrow to selector if provided
        if input.selector:
            target = soup.select_one(input.selector)
            if not target:
                return ExtractOutput(ok=False, error=f"Selector '{input.selector}' not found on page")
        else:
            target = soup

        title_tag = soup.find("title")
        title = title_tag.get_text(strip=True) if title_tag else None

        headings = [h.get_text(strip=True) for h in target.find_all(["h1", "h2", "h3"])]

        links = []
        for a in target.find_all("a", href=True)[:50]:
            text = a.get_text(strip=True)
            if text:
                links.append(LinkInfo(text=text[:100], href=a["href"]))

        text = target.get_text(separator="\n", strip=True)
        preview = text[:2000] if text else None

        return ExtractOutput(
            ok=True,
            title=title,
            headings=headings[:30],
            links=links,
            text_preview=preview,
        )
    except Exception as e:
        return ExtractOutput(ok=False, error=str(e))

Key patterns: Bounds on extracted data (50 links, 30 headings, 2000 char preview). Optional CSS selector for targeting specific page sections. Follows redirects.


Math / Calculation Tool

Perform calculations that the LLM might get wrong on its own.

powers/calculator.py
# /// script
# dependencies = ["pydantic"]
# ///
"""
Calculator tool. Safe arithmetic and math operations.

Run with: supypowers run calculator:calculate '{"expression": "2**32 - 1"}'
"""
import math
from typing import Optional

from pydantic import BaseModel, Field


class CalculateInput(BaseModel):
    """Input for calculation."""
    expression: str = Field(
        ..., description="Mathematical expression to evaluate (Python syntax: +, -, *, /, **, %, sqrt(), sin(), etc.)"
    )


class CalculateOutput(BaseModel):
    """Calculation result."""
    ok: bool
    result: Optional[float] = None
    expression: Optional[str] = None
    error: Optional[str] = None


# Whitelist of safe functions
_SAFE_NAMES = {
    "abs": abs, "round": round, "min": min, "max": max,
    "sqrt": math.sqrt, "log": math.log, "log10": math.log10, "log2": math.log2,
    "sin": math.sin, "cos": math.cos, "tan": math.tan,
    "pi": math.pi, "e": math.e, "inf": math.inf,
    "ceil": math.ceil, "floor": math.floor,
    "pow": pow, "sum": sum,
}


def calculate(input: CalculateInput) -> CalculateOutput:
    """Evaluate a mathematical expression safely. Supports basic arithmetic, exponents, and math functions."""
    try:
        # Restricted eval with only math functions
        result = eval(input.expression, {"__builtins__": {}}, _SAFE_NAMES)
        return CalculateOutput(
            ok=True,
            result=float(result),
            expression=input.expression,
        )
    except Exception as e:
        return CalculateOutput(ok=False, error=f"Invalid expression: {e}")

Key patterns: Restricted eval with no builtins and a whitelist of safe math functions. Returns the expression alongside the result for verification.


Data Transformer

Convert data between formats (JSON, CSV, YAML).

powers/converter.py
# /// script
# dependencies = ["pydantic", "pyyaml"]
# ///
"""
Data format converter. Transform between JSON, CSV, and YAML.

Run with: supypowers run converter:json_to_csv '{"json_data": "[{\"name\": \"Alice\", \"age\": 30}]"}'
"""
import csv
import json
from io import StringIO
from typing import Optional

import yaml
from pydantic import BaseModel, Field


class JsonToCsvInput(BaseModel):
    """Convert a JSON array of objects to CSV."""
    json_data: str = Field(..., description="JSON string containing an array of objects")


class JsonToCsvOutput(BaseModel):
    ok: bool
    csv_data: Optional[str] = None
    row_count: int = 0
    error: Optional[str] = None


def json_to_csv(input: JsonToCsvInput) -> JsonToCsvOutput:
    """Convert a JSON array to CSV format."""
    try:
        data = json.loads(input.json_data)
        if not isinstance(data, list) or not data:
            return JsonToCsvOutput(ok=False, error="Input must be a non-empty JSON array")

        output = StringIO()
        writer = csv.DictWriter(output, fieldnames=data[0].keys())
        writer.writeheader()
        writer.writerows(data)

        return JsonToCsvOutput(
            ok=True,
            csv_data=output.getvalue(),
            row_count=len(data),
        )
    except json.JSONDecodeError as e:
        return JsonToCsvOutput(ok=False, error=f"Invalid JSON: {e}")
    except Exception as e:
        return JsonToCsvOutput(ok=False, error=str(e))


class JsonToYamlInput(BaseModel):
    """Convert JSON to YAML."""
    json_data: str = Field(..., description="JSON string to convert")


class JsonToYamlOutput(BaseModel):
    ok: bool
    yaml_data: Optional[str] = None
    error: Optional[str] = None


def json_to_yaml(input: JsonToYamlInput) -> JsonToYamlOutput:
    """Convert a JSON string to YAML format."""
    try:
        data = json.loads(input.json_data)
        yaml_str = yaml.dump(data, default_flow_style=False, sort_keys=False)
        return JsonToYamlOutput(ok=True, yaml_data=yaml_str)
    except json.JSONDecodeError as e:
        return JsonToYamlOutput(ok=False, error=f"Invalid JSON: {e}")
    except Exception as e:
        return JsonToYamlOutput(ok=False, error=str(e))

Key patterns: Multiple functions in one script -- each becomes a separate tool. String-based I/O so the LLM can pass data directly. Clear error messages for invalid input.


Image Generator (Media Output)

Tools that generate files should use the _media convention:

powers/chart.py
# /// script
# dependencies = ["pydantic", "matplotlib"]
# ///
"""
Chart generator. Create bar and line charts from data.

Run with: supypowers run chart:bar_chart '{"title": "Sales", "labels": ["Q1","Q2","Q3"], "values": [100,200,150]}'
"""
import hashlib
from pathlib import Path
from typing import List, Optional

import matplotlib
matplotlib.use("Agg")  # Non-interactive backend
import matplotlib.pyplot as plt
from pydantic import BaseModel, Field


class BarChartInput(BaseModel):
    """Input for bar chart generation."""
    title: str = Field(..., description="Chart title")
    labels: List[str] = Field(..., description="Category labels")
    values: List[float] = Field(..., description="Values for each category")
    output_dir: str = Field(default="/tmp/charts", description="Directory to save the chart")


class BarChartOutput(BaseModel):
    ok: bool
    path: Optional[str] = None
    _media: list = []
    error: Optional[str] = None


def bar_chart(input: BarChartInput) -> BarChartOutput:
    """Generate a bar chart image from labels and values."""
    try:
        out_dir = Path(input.output_dir)
        out_dir.mkdir(parents=True, exist_ok=True)

        fig, ax = plt.subplots(figsize=(10, 6))
        ax.bar(input.labels, input.values)
        ax.set_title(input.title)
        ax.set_ylabel("Value")

        filename = hashlib.md5(input.title.encode()).hexdigest()[:8] + ".png"
        path = out_dir / filename
        fig.savefig(str(path), dpi=100, bbox_inches="tight")
        plt.close(fig)

        abs_path = str(path.resolve())
        return BarChartOutput(
            ok=True,
            path=abs_path,
            _media=[{"path": abs_path, "type": "image"}],
        )
    except Exception as e:
        return BarChartOutput(ok=False, error=str(e))

Key patterns: The _media list tells the agent framework about generated files. Non-interactive matplotlib backend (Agg). Output directory is configurable with a sensible default.

What's Next