Building Blocks
Agents
The core container that hosts reasoners, skills, and connects to the AgentField control plane
The top-level container that turns your code into a discoverable, governed, production microservice.
Without Agent, you would wire an HTTP server, registration, routing, identity, tracing, memory access, and cross-agent calls separately. With Agent, that infrastructure boundary is the object you instantiate.
from agentfield import Agent, AIConfig
from pydantic import BaseModel
app = Agent(
node_id="support-triage", # unique ID in the network
ai_config=AIConfig(model="anthropic/claude-sonnet-4-20250514"),
)
class TicketClassification(BaseModel):
priority: str # "critical" | "high" | "normal" | "low"
department: str # route to the right team
summary: str # one-line summary for the queue
@app.reasoner() # AI-powered — gets an LLM client automatically
async def classify_ticket(subject: str, body: str, customer_id: str) -> TicketClassification:
result = await app.ai(
system="You triage customer support tickets.",
user=f"Subject: {subject}\n\n{body}",
schema=TicketClassification, # validated, typed output
)
await app.memory.set(f"ticket:{customer_id}:last_priority", result.priority)
return result
@app.skill() # deterministic — no AI, just business logic
def escalation_policy(priority: str) -> dict:
sla = {"critical": 15, "high": 60, "normal": 240, "low": 1440}
return {"sla_minutes": sla.get(priority, 240)}
app.run() # starts HTTP server + registers with control plane
# POST /reasoners/classify_ticket → AI classification
# POST /skills/escalation_policy → SLA lookupimport { Agent } from '@agentfield/sdk';
import { z } from 'zod';
const agent = new Agent({
nodeId: 'support-triage', // unique ID in the network
aiConfig: { provider: 'anthropic', model: 'claude-sonnet-4-20250514' },
});
const TicketClassification = z.object({
priority: z.enum(['critical', 'high', 'normal', 'low']),
department: z.string(), // route to the right team
summary: z.string(), // one-line summary for the queue
});
agent.reasoner('classifyTicket', async (ctx) => { // AI-powered
const result = await ctx.ai(
`Subject: ${ctx.input.subject}\n\n${ctx.input.body}`,
{
system: 'You triage customer support tickets.',
schema: TicketClassification, // validated, typed output
},
);
await ctx.memory.set(`ticket:${ctx.input.customerId}:lastPriority`, result.priority);
return result;
});
agent.skill('escalationPolicy', (ctx) => { // deterministic — no AI
const sla: Record<string, number> = { critical: 15, high: 60, normal: 240, low: 1440 };
return { slaMinutes: sla[ctx.input.priority] ?? 240 };
});
agent.serve(); // starts HTTP server + registers with control planepackage main
import (
"context"
"log"
"github.com/Agent-Field/agentfield/sdk/go/agent"
"github.com/Agent-Field/agentfield/sdk/go/ai"
)
func main() {
a, _ := agent.New(agent.Config{
NodeID: "support-triage", // unique ID in the network
Version: "1.0.0",
AgentFieldURL: "http://localhost:8080",
AIConfig: &ai.Config{Model: "anthropic/claude-sonnet-4-20250514"},
})
// AI-powered — gets an LLM client automatically
a.RegisterReasoner("classify_ticket", func(ctx context.Context, input map[string]any) (any, error) {
subject, _ := input["subject"].(string)
body, _ := input["body"].(string)
return map[string]any{"priority": "high", "department": "billing", "summary": subject}, nil
})
// Deterministic — no AI, just business logic
a.RegisterSkill("escalation_policy", func(ctx context.Context, input map[string]any) (any, error) {
sla := map[string]int{"critical": 15, "high": 60, "normal": 240, "low": 1440}
priority, _ := input["priority"].(string)
return map[string]any{"sla_minutes": sla[priority]}, nil
})
log.Fatal(a.Run(context.Background())) // starts HTTP server + registers with control plane
}What just happened
- One
Agentinstance exposed both AI and deterministic operations - The reasoner got model access, validation, and workflow context automatically
- The deterministic function became a separate callable endpoint without extra server code
- In all three SDKs, deterministic endpoints can be registered separately from AI-powered reasoners
- The memory write used the same execution context as the reasoner
Example generated surface:
Python/TypeScript:
POST /reasoners/classify_ticket
POST /skills/escalation_policy
target: support-triage.classify_ticket
Go equivalent:
POST /reasoners/classify_ticket
POST /skills/escalation_policy