orca.ai/pkg/actor/subagent.go
大森 e18dde7c15 feat: implement TUI with bubbletea and multi-agent collaboration
- Add bubbletea/lipgloss/glamour dependencies for TUI
- Create internal/tui package with EventWriter, styles, and bubbletea Model
- Support streaming output display in conversation window
- Add right panel with statistics and active agent status
- Implement multi-agent collaboration with sub-agents
- Add AgentCallTool for delegating tasks to sub-agents
- Support parallel tool execution with goroutines
- Auto-discover sub-agents from ~/.orca/prompts/ directory
- Fix orchestrator routing based on msg.To field
- Add non-blocking event writer with timeout to prevent blocking
2026-05-10 14:28:17 +08:00

150 lines
3.0 KiB
Go

package actor
import (
"context"
"fmt"
"io"
"strings"
"github.com/orca/orca/pkg/bus"
"github.com/orca/orca/pkg/llm"
)
type SubAgent struct {
*BaseAgent
llmBackend llm.LLM
systemPrompt string
role string
streamWriter io.Writer
}
type SubAgentOption func(*SubAgent)
func WithSubAgentSystemPrompt(prompt string) SubAgentOption {
return func(a *SubAgent) {
a.systemPrompt = prompt
}
}
func WithSubAgentRole(role string) SubAgentOption {
return func(a *SubAgent) {
a.role = role
}
}
func WithSubAgentStreamWriter(w io.Writer) SubAgentOption {
return func(a *SubAgent) {
a.streamWriter = w
}
}
func NewSubAgent(id string, llmBackend llm.LLM, opts ...SubAgentOption) *SubAgent {
sa := &SubAgent{
BaseAgent: NewBaseAgent(id, "subagent"),
llmBackend: llmBackend,
systemPrompt: "你是一个专业的AI助手。",
role: "assistant",
}
for _, opt := range opts {
opt(sa)
}
sa.SetHandler(sa.handleMessage)
if err := sa.Start(); err != nil {
panic(fmt.Sprintf("subagent: failed to start %s: %v", id, err))
}
return sa
}
func (sa *SubAgent) Role() string {
return sa.role
}
func (sa *SubAgent) SystemPrompt() string {
return sa.systemPrompt
}
func (sa *SubAgent) SetStreamWriter(w io.Writer) {
sa.streamWriter = w
}
func (sa *SubAgent) handleMessage(ctx context.Context, msg bus.Message) (bus.Message, error) {
switch msg.Type {
case bus.MsgTypeTaskRequest:
return sa.handleTask(ctx, msg)
case bus.MsgTypeSystem:
return sa.handleSystem(ctx, msg)
default:
return bus.Message{}, fmt.Errorf("subagent %s: unsupported message type %s", sa.ID(), msg.Type)
}
}
func (sa *SubAgent) handleTask(ctx context.Context, msg bus.Message) (bus.Message, error) {
messages := []llm.Message{
{
Role: "system",
Content: sa.systemPrompt,
},
{
Role: "user",
Content: fmt.Sprintf("%v", msg.Content),
},
}
content, err := sa.streamChat(ctx, messages)
if err != nil {
return bus.Message{}, fmt.Errorf("subagent %s: LLM call failed: %w", sa.ID(), err)
}
return bus.Message{
ID: msg.ID + "-response",
Type: bus.MsgTypeTaskResponse,
From: sa.ID(),
To: msg.From,
Content: content,
Metadata: map[string]string{
"processed_by": sa.ID(),
"agent_role": sa.role,
},
}, nil
}
func (sa *SubAgent) streamChat(ctx context.Context, messages []llm.Message) (string, error) {
var content strings.Builder
if sa.streamWriter != nil {
fmt.Fprintf(sa.streamWriter, "\n[%s] ", sa.ID())
}
err := sa.llmBackend.Stream(ctx, messages, func(chunk string) error {
content.WriteString(chunk)
if sa.streamWriter != nil {
fmt.Fprint(sa.streamWriter, chunk)
}
return nil
})
if err != nil {
return "", err
}
if sa.streamWriter != nil {
fmt.Fprintln(sa.streamWriter)
}
return content.String(), nil
}
func (sa *SubAgent) handleSystem(ctx context.Context, msg bus.Message) (bus.Message, error) {
return bus.Message{
ID: msg.ID + "-ack",
Type: bus.MsgTypeSystem,
From: sa.ID(),
To: msg.From,
Content: fmt.Sprintf("subagent %s acknowledged", sa.ID()),
}, nil
}