Skip to content

Commit c90e7b5

Browse files
committed
docs(api): comprehensive API reference update with all endpoints
**Problem:** API Reference documentation was outdated and missing many endpoints added since December. Endpoint parameters weren't verified against actual implementation. **Solution:** - Added new 'Advanced Endpoints' tab documenting: • POST /api/chat/autonomous (multi-step orchestration) • POST /api/chat/tool-response (interactive tool execution) • GET /api/tool_result (large output retrieval with pagination) • Prompt discovery endpoints (/api/prompts/system, mini, topics) • Debug endpoints (/debug/mcp/tools, execute, tools/available) - Enhanced Models API with DELETE /api/models/download/:id - Expanded Chat Completions parameters: • SAM-specific: topic, mini_prompts, sam_config • Standard: top_p, repetition_penalty • SAM Config object: systemPromptId, maxIterations, workingDirectory, etc. - Updated provider list to include Google Gemini - Fixed stream parameter default (true - SAM is streaming-first) - Corrected endpoint signatures by verifying against source: • Tool response: userInput + toolCallId (not just response) • Tool result: query parameters with pagination support **Testing:** ✅ Build: PASS ✅ Line-by-line verification against SAMAPIServer.swift ✅ Parameter types cross-referenced with OpenAIChatRequest struct ✅ All 20 endpoints now documented with accurate signatures
1 parent af685a7 commit c90e7b5

1 file changed

Lines changed: 177 additions & 4 deletions

File tree

Sources/UserInterface/Help/APIReferenceView.swift

Lines changed: 177 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -56,6 +56,7 @@ struct APIReferenceView: View {
5656
enum APISection: String, CaseIterable, Identifiable {
5757
case overview
5858
case chatCompletions
59+
case advancedEndpoints
5960
case models
6061
case conversations
6162
case mcpTools
@@ -71,6 +72,7 @@ enum APISection: String, CaseIterable, Identifiable {
7172
switch self {
7273
case .overview: return "Overview"
7374
case .chatCompletions: return "Chat Completions"
75+
case .advancedEndpoints: return "Advanced Endpoints"
7476
case .models: return "Models API"
7577
case .conversations: return "Conversations API"
7678
case .mcpTools: return "MCP Tools"
@@ -86,6 +88,7 @@ enum APISection: String, CaseIterable, Identifiable {
8688
switch self {
8789
case .overview: return "book.fill"
8890
case .chatCompletions: return "message.fill"
91+
case .advancedEndpoints: return "gearshape.2.fill"
8992
case .models: return "cpu"
9093
case .conversations: return "bubble.left.and.bubble.right.fill"
9194
case .mcpTools: return "hammer.fill"
@@ -106,6 +109,9 @@ enum APISection: String, CaseIterable, Identifiable {
106109
case .chatCompletions:
107110
ChatCompletionsContent()
108111

112+
case .advancedEndpoints:
113+
AdvancedEndpointsContent()
114+
109115
case .models:
110116
ModelsAPIContent()
111117

@@ -160,7 +166,7 @@ struct OverviewContent: View {
160166
FeatureRow(icon: "checkmark.circle.fill", text: "OpenAI Chat Completions API compatible", color: .green)
161167
FeatureRow(icon: "checkmark.circle.fill", text: "Streaming responses with Server-Sent Events", color: .green)
162168
FeatureRow(icon: "checkmark.circle.fill", text: "MCP (Model Context Protocol) tool execution", color: .green)
163-
FeatureRow(icon: "checkmark.circle.fill", text: "Multiple AI provider support (OpenAI, Anthropic, Copilot, local models)", color: .green)
169+
FeatureRow(icon: "checkmark.circle.fill", text: "Multiple AI provider support (OpenAI, Anthropic, Google Gemini, GitHub Copilot, local models)", color: .green)
164170
}
165171

166172
SectionHeader(title: "API Endpoints")
@@ -189,6 +195,24 @@ struct OverviewContent: View {
189195
description: "Health check endpoint"
190196
)
191197

198+
EndpointCard(
199+
method: "POST",
200+
path: "/api/chat/autonomous",
201+
description: "Multi-step autonomous agent orchestration"
202+
)
203+
204+
EndpointCard(
205+
method: "POST",
206+
path: "/api/chat/tool-response",
207+
description: "Submit user response for interactive tool execution"
208+
)
209+
210+
EndpointCard(
211+
method: "GET",
212+
path: "/api/tool_result",
213+
description: "Retrieve large tool outputs by result ID"
214+
)
215+
192216
SectionHeader(title: "Response Formats")
193217
Text("All responses follow standard HTTP status codes:")
194218
VStack(alignment: .leading, spacing: 4) {
@@ -217,14 +241,40 @@ struct ChatCompletionsContent: View {
217241

218242
SectionHeader(title: "Request Body")
219243

244+
Text("**Standard OpenAI Parameters:**")
245+
.fontWeight(.semibold)
246+
220247
ParameterRow(name: "model", type: "string", required: true, description: "AI model identifier (e.g., 'gpt-4', 'claude-3-5-sonnet', 'copilot')")
221248
ParameterRow(name: "messages", type: "array", required: true, description: "Array of message objects with 'role' and 'content' fields")
222-
ParameterRow(name: "stream", type: "boolean", required: false, description: "Enable streaming responses (default: false)")
249+
ParameterRow(name: "stream", type: "boolean", required: false, description: "Enable streaming responses (default: true)")
223250
ParameterRow(name: "temperature", type: "number", required: false, description: "Sampling temperature 0.0-2.0 (default: 1.0)")
224251
ParameterRow(name: "max_tokens", type: "number", required: false, description: "Maximum tokens in response")
225-
ParameterRow(name: "conversationId", type: "string", required: false, description: "UUID of existing conversation for context")
252+
ParameterRow(name: "top_p", type: "number", required: false, description: "Nucleus sampling parameter")
253+
ParameterRow(name: "repetition_penalty", type: "number", required: false, description: "Repetition penalty for local models")
226254
ParameterRow(name: "tools", type: "array", required: false, description: "Array of tool definitions for function calling")
227255

256+
Text("**SAM-Specific Parameters:**")
257+
.fontWeight(.semibold)
258+
.padding(.top, 8)
259+
260+
ParameterRow(name: "conversation_id", type: "string", required: false, description: "UUID of existing conversation (maps to ConversationModel.id)")
261+
ParameterRow(name: "session_id", type: "string", required: false, description: "Alternative session identifier")
262+
ParameterRow(name: "context_id", type: "string", required: false, description: "Shared memory context identifier")
263+
ParameterRow(name: "topic", type: "string", required: false, description: "Topic folder ID for conversation organization")
264+
ParameterRow(name: "mini_prompts", type: "array", required: false, description: "Array of mini-prompt names to enable")
265+
ParameterRow(name: "sam_config", type: "object", required: false, description: "Advanced SAM configuration (see SAM Config below)")
266+
267+
SectionHeader(title: "SAM Config Object")
268+
Text("Optional configuration object for advanced features:")
269+
.foregroundColor(.secondary)
270+
.font(.caption)
271+
272+
ParameterRow(name: "systemPromptId", type: "string", required: false, description: "System prompt UUID or name ('sam_default', 'autonomous_editor')")
273+
ParameterRow(name: "maxIterations", type: "number", required: false, description: "Maximum workflow iterations (default: 300)")
274+
ParameterRow(name: "workingDirectory", type: "string", required: false, description: "Working directory for file operations")
275+
ParameterRow(name: "enableReasoning", type: "boolean", required: false, description: "Enable extended reasoning for complex tasks")
276+
ParameterRow(name: "enableWorkflowMode", type: "boolean", required: false, description: "Enable autonomous workflow orchestration")
277+
228278
SectionHeader(title: "Request Example (Non-Streaming)")
229279
CodeBlock(code: """
230280
curl -X POST http:
@@ -348,6 +398,101 @@ struct ChatCompletionsContent: View {
348398
}
349399
}
350400

401+
// MARK: - Advanced Endpoints Content
402+
struct AdvancedEndpointsContent: View {
403+
var body: some View {
404+
VStack(alignment: .leading, spacing: 16) {
405+
Text("Advanced Endpoints")
406+
.font(.title2)
407+
.fontWeight(.bold)
408+
409+
Text("Specialized endpoints for autonomous workflows, tool interaction, and agent introspection.")
410+
.foregroundColor(.secondary)
411+
412+
SectionHeader(title: "Autonomous Workflow")
413+
CodeBlock(code: "POST /api/chat/autonomous")
414+
415+
Text("Enables multi-step autonomous agent orchestration. The agent can execute multiple iterations with tool calls automatically until task completion.")
416+
.foregroundColor(.secondary)
417+
418+
ParameterRow(name: "model", type: "string", required: true, description: "AI model identifier")
419+
ParameterRow(name: "messages", type: "array", required: true, description: "Initial conversation messages")
420+
ParameterRow(name: "max_iterations", type: "number", required: false, description: "Maximum workflow iterations (default: 300)")
421+
ParameterRow(name: "conversationId", type: "string", required: false, description: "Existing conversation context")
422+
423+
CodeBlock(code: """
424+
curl -X POST http://localhost:8080/api/chat/autonomous \\
425+
-H "Content-Type: application/json" \\
426+
-d '{
427+
"model": "gpt-4",
428+
"messages": [
429+
{"role": "user", "content": "Research Swift 6 concurrency and create a summary document"}
430+
],
431+
"max_iterations": 50
432+
}'
433+
""")
434+
435+
Text("The agent will autonomously research, plan, and create the document with multiple tool calls.")
436+
.font(.caption)
437+
.foregroundColor(.secondary)
438+
439+
SectionHeader(title: "Tool Response Submission")
440+
CodeBlock(code: "POST /api/chat/tool-response")
441+
442+
Text("Submit user response when a tool requires interactive input (e.g., user_collaboration tool).")
443+
.foregroundColor(.secondary)
444+
445+
ParameterRow(name: "conversationId", type: "string", required: true, description: "Conversation UUID")
446+
ParameterRow(name: "toolCallId", type: "string", required: true, description: "Tool call identifier waiting for response")
447+
ParameterRow(name: "userInput", type: "string", required: true, description: "User's response text")
448+
449+
CodeBlock(code: """
450+
curl -X POST http://localhost:8080/api/chat/tool-response \\
451+
-H "Content-Type: application/json" \\
452+
-d '{
453+
"conversationId": "abc-123-def-456",
454+
"toolCallId": "call_abc123",
455+
"userInput": "Approve"
456+
}'
457+
""")
458+
459+
SectionHeader(title: "Tool Result Retrieval")
460+
CodeBlock(code: "GET /api/tool_result")
461+
462+
Text("Retrieve large tool outputs that were persisted instead of included inline. Supports pagination for very large results.")
463+
.foregroundColor(.secondary)
464+
465+
ParameterRow(name: "conversationId", type: "string", required: true, description: "Conversation UUID (query parameter)")
466+
ParameterRow(name: "toolCallId", type: "string", required: true, description: "Tool call identifier (query parameter)")
467+
ParameterRow(name: "offset", type: "number", required: false, description: "Character offset to start reading from (default: 0)")
468+
ParameterRow(name: "length", type: "number", required: false, description: "Characters to read (default: 8192, max: 32768)")
469+
470+
CodeBlock(code: """
471+
curl "http://localhost:8080/api/tool_result?conversationId=abc-123&toolCallId=call_xyz&offset=0&length=8192"
472+
""")
473+
474+
SectionHeader(title: "Prompt Discovery")
475+
Text("Endpoints for agent awareness and configuration discovery:")
476+
477+
EndpointCard(method: "GET", path: "/api/prompts/system", description: "List available system prompts")
478+
EndpointCard(method: "GET", path: "/api/prompts/mini", description: "List mini-prompt configurations")
479+
EndpointCard(method: "GET", path: "/api/topics", description: "List shared topics")
480+
481+
SectionHeader(title: "Debug Endpoints")
482+
Text("Development and debugging tools:")
483+
484+
EndpointCard(method: "GET", path: "/debug/mcp/tools", description: "List all MCP tools with schemas")
485+
EndpointCard(method: "POST", path: "/debug/mcp/execute", description: "Execute MCP tool directly")
486+
EndpointCard(method: "GET", path: "/debug/tools/available", description: "Tool registry status")
487+
488+
Text("Debug endpoints are for development only and may change without notice.")
489+
.font(.caption)
490+
.foregroundColor(.secondary)
491+
.italic()
492+
}
493+
}
494+
}
495+
351496
// MARK: - Models API Content
352497
struct ModelsAPIContent: View {
353498
var body: some View {
@@ -399,15 +544,43 @@ struct ModelsAPIContent: View {
399544
SectionHeader(title: "Download Local Models")
400545
CodeBlock(code: "POST /api/models/download")
401546

547+
Text("Download GGUF or MLX models from HuggingFace.")
548+
.foregroundColor(.secondary)
549+
402550
ParameterRow(name: "modelUrl", type: "string", required: true, description: "HuggingFace model URL or identifier")
403551

552+
CodeBlock(code: """
553+
curl -X POST http://localhost:8080/api/models/download \\
554+
-H "Content-Type: application/json" \\
555+
-d '{
556+
"modelUrl": "https://huggingface.co/mlx-community/Qwen2.5-3B-Instruct-4bit"
557+
}'
558+
""")
559+
404560
SectionHeader(title: "Check Download Status")
405561
CodeBlock(code: "GET /api/models/download/{downloadId}/status")
406562

563+
Text("Monitor download progress and completion status.")
564+
.foregroundColor(.secondary)
565+
566+
CodeBlock(code: """
567+
curl http://localhost:8080/api/models/download/abc-123/status
568+
""")
569+
570+
SectionHeader(title: "Cancel Download")
571+
CodeBlock(code: "DELETE /api/models/download/{downloadId}")
572+
573+
Text("Cancel an in-progress model download.")
574+
.foregroundColor(.secondary)
575+
576+
CodeBlock(code: """
577+
curl -X DELETE http://localhost:8080/api/models/download/abc-123
578+
""")
579+
407580
SectionHeader(title: "List Installed Models")
408581
CodeBlock(code: "GET /api/models")
409582

410-
Text("Returns locally installed MLX models in ~/Library/Caches/sam/models/")
583+
Text("Returns locally installed GGUF, MLX, and Stable Diffusion models in ~/Library/Caches/sam/models/")
411584
.font(.caption)
412585
.foregroundColor(.secondary)
413586
}

0 commit comments

Comments
 (0)