service

package
v1.13.11 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Jan 7, 2026 License: Apache-2.0 Imports: 55 Imported by: 0

Documentation

Index

Constants

View Source
const (
	// ToolRespConfirmShell is the template for the response to the user before executing a command.
	ToolRespConfirmShell = "```\n%s\n```\n%s"

	// ToolRespShellOutput is the template for the response to the user after executing a command.
	ToolRespShellOutput = `shell executed: %s
Status:
%s
%s`

	ToolUserConfirmPrompt = "Do you want to proceed?"

	// ToolRespConfirmEdityFile is the template for the response to the user before modifying a file, including the diff.
	ToolRespDiscardEditFile = "Based on your request, the OPERATION is CANCELLED: " +
		"Cancel edit file: %s\n" +
		"The user has explicitly declined to apply these file edits. The file will remain unchanged. Do not proceed with any file modifications or ask for further confirmation without explicit new user instruction."
)
View Source
const (
	// Model types
	ModelProviderGemini           string = "gemini" // for google gemini models
	ModelProviderOpenAI           string = "openai"
	ModelProviderOpenAICompatible string = "openai-compatible"
	ModelProviderAnthropic        string = "anthropic" // for anthropic models (official sdk)
	ModelProviderUnknown          string = "unknown"
)
View Source
const (
	TavilyUrl          = "https://api.tavily.com/search"
	GoogleSearchEngine = "google"
	BingSearchEngine   = "bing"
	TavilySearchEngine = "tavily"
	NoneSearchEngine   = "none"
)
View Source
const (
	CharsPerTokenEnglish  = 4.0 // Average for English text
	CharsPerTokenChinese  = 2.5 // Tuned: 3 bytes/char / 2.5 = 1.2 tokens/char (balanced)
	CharsPerTokenJapanese = 2.0 // 3 bytes / 2.0 = 1.5 tokens/char
	CharsPerTokenKorean   = 2.0 // 3 bytes / 2.0 = 1.5 tokens/char
	CharsPerTokenCode     = 3.5 // Tuned: Code is dense. 3.5 chars/token.
	CharsPerTokenJSON     = 3.7 // JSON: Typically 3.5-4 characters per token. Tuned: 3.7 chars/token.
	CharsPerTokenDefault  = 4.0 // Default fallback
	MessageOverheadTokens = 3   // Standard overhead per message (<|start|>role and <|end|>)
	ToolCallOverhead      = 24  // Reduced from 100 to 24 (closer to reality for JSON overhead)

	// Media Token Costs (Heuristics)
	// 1MB = 1000 tokens
	TokenCostImageDefault = 1000 // Safe upper bound average for high-res images (OpenAI high detail is ~1105, low is 85)
	TokenCostImageGemini  = 1000 // Fixed cost for Gemini images <= 384px (often tiled, but 258 is the base unit)

	// Video/Audio Heuristics (Tokens per MB - heavily estimated as we don't have duration)
	// Assumptions:
	// - Video: 2Mbps (.25MB/s). 1MB = 4s. Gemini Video: 263 tokens/s. 4s * 263 = 1052 tokens.
	// - Audio: 128kbps (16KB/s). 1MB = 64s. Gemini Audio: 32 tokens/s. 64s * 32 = 2048 tokens.
	TokenCostVideoPerMBGemini   = 1000
	TokenCostVideoPerMBOpenChat = 1000 // For base64 encoded video
	TokenCostAudioPerMBGemini   = 2000
)

Token estimation constants These are refined based on modern tokenizer behavior (cl100k_base, qwen, etc.):

  • English: ~4 chars/token (ASCII)
  • Chinese: ~0.6-2.0 tokens/char (Qwen is efficient, OpenAI is 2.0). due to the different tokenization methods used by different models, the conversion ratios can vary We use 2.5 bytes/token => ~1.2 tokens/char as a balanced estimate.
  • Japanese/Korean: ~1.5 tokens/char. 3 bytes/char / 2.0 => 1.5 tokens/char.
  • Tool Calls: JSON structure overhead is small (~20 tokens), not 100.
View Source
const (
	DefaultShellTimeout = 60 * time.Second
	MaxFileSize         = 20 * 1024 * 1024 // 20MB
)

Tool robustness constants

View Source
const (
	WokflowConfirmPrompt = "\033[96mDo you want to proceed with this agent? (y/N):\033[0m "         // use for wait for user confirm
	WokflowProceedPrompt = "\033[96mDoes that work for you? Proceed with next step? (y/N):\033[0m " // use for wait for proceed prompt
	WokflowModifyPrompt  = "\033[96mPlease specify any changes you would like to make:\033[0m "     // use for wait for user modify prompt
)
View Source
const DefaultMaxCacheSize = 10000

DefaultMaxCacheSize is the default maximum number of entries in the cache

Variables

View Source
var (
	// RoleColors for message roles (initialized in init)
	RoleColors map[string]string

	// ContentTypeColors for special content (initialized in init)
	ContentTypeColors map[string]string
)
View Source
var DefaultLimitsLegacy = ModelLimits{

	ContextWindow:   32000,
	MaxOutputTokens: 4096,
}

DefaultLimits is the fallback for unknown models

View Source
var DefaultLimitsModern = ModelLimits{

	ContextWindow:   128000,
	MaxOutputTokens: 8192,
}
View Source
var DefaultModelLimits = map[string]ModelLimits{}/* 126 elements not displayed */

DefaultModelLimits is the registry of known model limits. Context window values must be from official documentation or verified by tests

View Source
var ExecutorPath string

ExecutorPath is the path to the executable to run for filtering. Defaults to os.Executable(). Can be overridden for testing.

Functions

func AvailableEmbeddingTool added in v1.9.2

func AvailableEmbeddingTool(toolName string) bool

func AvailableMCPTool added in v1.11.4

func AvailableMCPTool(toolName string, client *MCPClient) bool

func AvailableSearchTool added in v1.9.12

func AvailableSearchTool(toolName string) bool

func BuildAnthropicMessages added in v1.13.10

func BuildAnthropicMessages(messages []UniversalMessage) []anthropic.MessageParam

BuildAnthropicMessages converts universal messages to Anthropic format. Handles: System role is inlined into the first user message. Preserves: OfText, OfThinking blocks

func BuildGeminiMessages added in v1.13.10

func BuildGeminiMessages(messages []UniversalMessage) []*gemini.Content

BuildGeminiMessages converts universal messages to Gemini format. Handles: System role is inlined into the first user message. Preserves: Parts with Text, Thought Maps: "assistant" → "model"

func BuildOpenAIMessages added in v1.13.10

func BuildOpenAIMessages(messages []UniversalMessage) []openai.ChatCompletionMessage

BuildOpenAIMessages converts universal messages to OpenAI format. Preserves: system role, Content, ReasoningContent

func BuildOpenChatMessages added in v1.13.10

func BuildOpenChatMessages(messages []UniversalMessage) []*model.ChatCompletionMessage

BuildOpenChatMessages converts universal messages to OpenChat (Volcengine) format. Preserves: system role, Content, ReasoningContent

func CallAgent added in v1.9.4

func CallAgent(op *AgentOptions) error

func CheckIfImageFromBytes

func CheckIfImageFromBytes(data []byte) (bool, string, error)

func CheckIfImageFromPath

func CheckIfImageFromPath(filePath string) (bool, string, error)

checkIfImage attempts to decode a file as an image

func ClearTokenCache added in v1.12.14

func ClearTokenCache()

ClearTokenCache clears the global token cache (useful for testing)

func Contains added in v1.7.8

func Contains(list []string, item string) bool

func ConvertMessages added in v1.13.10

func ConvertMessages(data []byte, sourceProvider, targetProvider string) ([]byte, error)

ConvertMessages parses source provider data and builds target provider messages. Returns the converted data encoded as JSON.

Supported source/target providers: - ModelProviderOpenAI - ModelProviderOpenAICompatible (OpenChat) - ModelProviderAnthropic - ModelProviderGemini

func Debugf added in v1.2.0

func Debugf(format string, args ...interface{})

func Debugln added in v1.13.2

func Debugln(args ...interface{})

func DetectAnthropicKeyMessage added in v1.13.10

func DetectAnthropicKeyMessage(msg *anthropic.MessageParam) bool

Detects if a message is definitely an Anthropic message

func DetectGeminiKeyMessage added in v1.13.10

func DetectGeminiKeyMessage(msg *gemini.Content) bool

Detects if a message is definitely a Gemini message

func DetectMessageProvider added in v1.5.1

func DetectMessageProvider(data []byte) string

* Detects the conversation provider based on message format * OpenAICompatible: OpenAI messages that are pure text content * OpenAI: OpenAI messages that are unique to OpenAI * Anthropic: Anthropic messages that are unique to Anthropic * Gemini: Gemini messages that are unique to Gemini

func DetectModelProvider added in v1.6.0

func DetectModelProvider(endPoint string, modelName string) string

DetectModelProvider detects the model provider based on endpoint and model name. It first checks the endpoint domain, then falls back to model name patterns. This dual detection handles Chinese models hosted on US platforms (AWS, CoreWeave, etc.)

func DetectOpenAIKeyMessage added in v1.13.10

func DetectOpenAIKeyMessage(msg *openai.ChatCompletionMessage) bool

Detects if a message is definitely an OpenAI message

func Diff added in v1.11.10

func Diff(content1, content2, file1, file2 string, contextLines int) string

func DisableCodeExecution added in v1.7.1

func DisableCodeExecution()

func EnableCodeExecution added in v1.7.1

func EnableCodeExecution()

func EndWithNewline added in v1.9.7

func EndWithNewline(s string) bool

func Errorf added in v1.4.0

func Errorf(format string, args ...interface{})

func Errorln added in v1.13.2

func Errorln(args ...interface{})

func EstimateAnthropicMessageTokens added in v1.13.5

func EstimateAnthropicMessageTokens(msg anthropic.MessageParam) int

EstimateAnthropicMessageTokens estimates tokens for an Anthropic message.

func EstimateAnthropicMessagesTokens added in v1.13.5

func EstimateAnthropicMessagesTokens(messages []anthropic.MessageParam) int

EstimateAnthropicMessagesTokens estimates total tokens for a slice of Anthropic messages.

func EstimateAnthropicToolTokens added in v1.13.5

func EstimateAnthropicToolTokens(tools []anthropic.ToolUnionParam) int

EstimateAnthropicToolTokens estimates tokens for a slice of Anthropic tools.

func EstimateGeminiMessageTokens added in v1.12.14

func EstimateGeminiMessageTokens(msg *genai.Content) int

EstimateGeminiMessageTokens estimates tokens for a Gemini content message.

func EstimateGeminiMessagesTokens added in v1.12.14

func EstimateGeminiMessagesTokens(messages []*genai.Content) int

EstimateGeminiMessagesTokens estimates total tokens for a slice of Gemini messages.

func EstimateGeminiToolTokens added in v1.12.14

func EstimateGeminiToolTokens(tools []*genai.Tool) int

EstimateGeminiToolTokens estimates tokens for a slice of Gemini tools

func EstimateJSONTokens added in v1.12.14

func EstimateJSONTokens(data interface{}) int

EstimateJSONTokens estimates tokens for arbitrary JSON data. Useful for estimating tool results or complex structured content.

func EstimateOpenAIMessageTokens added in v1.12.14

func EstimateOpenAIMessageTokens(msg openai.ChatCompletionMessage) int

EstimateOpenAIMessageTokens estimates tokens for an OpenAI chat message. This accounts for role tokens, content, and tool calls.

func EstimateOpenAIMessagesTokens added in v1.12.14

func EstimateOpenAIMessagesTokens(messages []openai.ChatCompletionMessage) int

EstimateOpenAIMessagesTokens estimates total tokens for a slice of OpenAI messages.

func EstimateOpenAIToolTokens added in v1.12.14

func EstimateOpenAIToolTokens(tools []openai.Tool) int

EstimateOpenAIToolTokens estimates tokens for a slice of OpenAI tools

func EstimateOpenChatMessageTokens added in v1.12.14

func EstimateOpenChatMessageTokens(msg *openchat.ChatCompletionMessage) int

EstimateOpenChatMessageTokens estimates tokens for an OpenChat (Volcengine) message.

func EstimateOpenChatMessagesTokens added in v1.12.14

func EstimateOpenChatMessagesTokens(messages []*openchat.ChatCompletionMessage) int

EstimateOpenChatMessagesTokens estimates total tokens for a slice of OpenChat messages.

func EstimateOpenChatToolTokens added in v1.12.14

func EstimateOpenChatToolTokens(tools []*openchat.Tool) int

EstimateOpenChatToolTokens estimates tokens for a slice of OpenChat tools

func EstimateTokens added in v1.12.14

func EstimateTokens(text string) int

EstimateTokens provides fast character-based estimation for text. This is approximately 90% accurate compared to tiktoken.

func ExtractTextFromURL added in v1.6.14

func ExtractTextFromURL(url string, config *ExtractorConfig) ([]string, error)

ExtractTextFromURL fetches a URL and extracts the main text content Automatically detects content type and routes to appropriate handler: - text/plain, text/markdown: returns content directly - application/pdf: extracts text using PDF reader - text/html: parses and extracts text with boilerplate removal

func ExtractThinkTags added in v1.12.11

func ExtractThinkTags(content string) (thinking, cleaned string)

ExtractThinkTags extracts thinking content from <think>...</think> tags. Some providers (like MiniMax, some Qwen endpoints) embed reasoning content in <think> tags within the regular content field instead of using a separate reasoning_content field.

Returns:

  • thinking: the extracted thinking content (empty if no tags found)
  • cleaned: the content with <think> tags removed

func FetchProcess added in v1.6.14

func FetchProcess(urls []string) []string

func FilterToolArguments added in v1.12.19

func FilterToolArguments(argsMap map[string]interface{}, ignoreKeys []string) map[string]interface{}

func FindConvosByIndex added in v1.10.6

func FindConvosByIndex(idx string) (string, error)

func FormatMinutesSeconds added in v1.10.1

func FormatMinutesSeconds(d time.Duration) string

func GenerateTempFileName added in v1.10.9

func GenerateTempFileName() string

func GetAllEmbeddingTools added in v1.9.2

func GetAllEmbeddingTools() []string

func GetAllSearchTools added in v1.9.12

func GetAllSearchTools() []string

func GetAnthropicMessageKey added in v1.13.5

func GetAnthropicMessageKey(msg anthropic.MessageParam) string

GetAnthropicMessageKey generates a cache key for an Anthropic message.

func GetConvoDir added in v1.6.10

func GetConvoDir() string

func GetCurrentTokenCount added in v1.12.14

func GetCurrentTokenCount(messages []openai.ChatCompletionMessage) int

GetCurrentTokenCount returns the current token count for OpenAI messages

func GetCurrentTokenCountGemini added in v1.12.14

func GetCurrentTokenCountGemini(messages []*genai.Content) int

GetCurrentTokenCountGemini returns the current token count for Gemini messages

func GetCurrentTokenCountOpenChat added in v1.12.14

func GetCurrentTokenCountOpenChat(messages []*model.ChatCompletionMessage) int

GetCurrentTokenCountOpenChat returns the current token count for OpenChat messages

func GetDefaultSearchEngineName added in v1.6.0

func GetDefaultSearchEngineName() string

func GetFileContent added in v1.10.0

func GetFileContent(filePath string) (string, error)

func GetFilePath added in v1.5.1

func GetFilePath(dir string, filename string) string

func GetGeminiMessageKey added in v1.12.14

func GetGeminiMessageKey(msg *genai.Content) string

GetGeminiMessageKey generates a cache key for a Gemini message.

func GetLogger added in v1.2.0

func GetLogger() *log.Logger

func GetMIMEType added in v1.4.0

func GetMIMEType(filePath string) string

func GetMIMETypeByContent added in v1.4.0

func GetMIMETypeByContent(data []byte) string

func GetNoneSearchEngineName added in v1.6.2

func GetNoneSearchEngineName() string

func GetOpenAIMessageKey added in v1.12.14

func GetOpenAIMessageKey(msg openai.ChatCompletionMessage) string

GetOpenAIMessageKey generates a cache key for an OpenAI message by JSON marshaling. This captures ALL fields (Content, ReasoningContent, ToolCalls, MultiContent, etc.) ensuring different messages never produce the same key.

func GetOpenChatMessageKey added in v1.12.14

func GetOpenChatMessageKey(msg *model.ChatCompletionMessage) string

GetOpenChatMessageKey generates a cache key for an OpenChat (Volcengine) message.

func GetSanitizeTitle added in v1.5.1

func GetSanitizeTitle(title string) string

func GetStringValue added in v1.6.2

func GetStringValue(data map[string]interface{}, key string) string

Helper function to safely extract string values

func GetTerminalWidth added in v1.12.1

func GetTerminalWidth() int

GetTerminalWidth returns the width of the terminal using a robust fallback chain: 1. Direct TTY query via golang.org/x/term (most reliable) 2. Tmux pane width (if inside tmux) 3. COLUMNS environment variable 4. tput cols command 5. Default fallback of 80

func GetUserConfigDir added in v1.5.1

func GetUserConfigDir() string

func HasContent added in v1.8.1

func HasContent(s *string) bool

func Infof added in v1.5.1

func Infof(format string, args ...interface{})

func InitLogger added in v1.2.0

func InitLogger()

func IsAudioMIMEType added in v1.7.1

func IsAudioMIMEType(mimeType string) bool

func IsCodeExecutionEnabled added in v1.7.1

func IsCodeExecutionEnabled() bool

func IsExcelMIMEType added in v1.4.0

func IsExcelMIMEType(mimeType string) bool

func IsImageMIMEType added in v1.4.0

func IsImageMIMEType(mimeType string) bool

func IsModelGemini3 added in v1.13.5

func IsModelGemini3(modelName string) bool

IsModelGemini3 checks if the model name is a Gemini 3 model

func IsPDFMIMEType added in v1.4.0

func IsPDFMIMEType(mimeType string) bool

func IsStdinPipe added in v1.4.0

func IsStdinPipe(source string) bool

func IsSwitchAgentError added in v1.13.10

func IsSwitchAgentError(err error) bool

func IsTextMIMEType added in v1.4.0

func IsTextMIMEType(mimeType string) bool

func IsUnknownMIMEType added in v1.4.0

func IsUnknownMIMEType(mimeType string) bool

func IsValidEmbeddingTool added in v1.13.1

func IsValidEmbeddingTool(toolName string) bool

IsValidEmbeddingTool checks if a tool name is a valid embedding tool

func IsVideoMIMEType added in v1.13.1

func IsVideoMIMEType(mimeType string) bool

func MakeUserSubDir added in v1.5.1

func MakeUserSubDir(subparts ...string) string

func NeedUserConfirm added in v1.11.10

func NeedUserConfirm(info string, prompt string) (bool, error)

NeedUserConfirm prompts the user for confirmation using charmbracelet/huh.

func NewLogger added in v1.2.0

func NewLogger() *log.Logger

func Ptr added in v1.7.4

func Ptr[T any](t T) *T

func RenderAnthropicConversationLog added in v1.13.6

func RenderAnthropicConversationLog(data []byte) string

RenderAnthropicConversationLog returns a string summary of Anthropic conversation

func RenderGeminiConversationLog added in v1.13.6

func RenderGeminiConversationLog(data []byte) string

RenderGeminiConversationLog returns a string summary of Gemini conversation

func RenderOpenAIConversationLog added in v1.13.6

func RenderOpenAIConversationLog(data []byte) string

RenderOpenAIConversationLog returns a string summary of OpenAI conversation

func RunWorkflow added in v1.10.0

func RunWorkflow(config *WorkflowConfig, prompt string) error

RunWorkflow executes a workflow using the provided WorkflowConfig and initial prompt. It processes a series of agents in order, handling agent roles, interactive mode, and prompt modifications for the first agent as needed. Returns an error if the workflow encounters issues such as missing agent roles or agent execution errors.

RunWorkflow executes the defined workflow.

func Successf added in v1.10.0

func Successf(format string, args ...interface{})

func TerminalSupportsTrueColor added in v1.12.19

func TerminalSupportsTrueColor() bool

TerminalSupportsTrueColor detects if the terminal supports true color (24-bit) Returns true if COLORTERM is set to "truecolor" or "24bit"

func TruncateString added in v1.5.1

func TruncateString(s string, maxLen int) string

Helper function to truncate strings with ellipsis

func Warnf added in v1.4.0

func Warnf(format string, args ...interface{})

func Warnln added in v1.13.2

func Warnln(args ...interface{})

Types

type Agent added in v1.9.4

type Agent struct {
	Model           *ModelInfo
	SystemPrompt    string
	UserPrompt      string
	Files           []*FileData         // Attachment files
	NotifyChan      chan<- StreamNotify // Sub Channel to send notifications
	DataChan        chan<- StreamData   // Sub Channel to receive streamed text data
	ProceedChan     <-chan bool         // Sub Channel to receive proceed signal
	SearchEngine    *SearchEngine       // Search engine name
	ToolsUse        ToolsUse            // Use tools
	EnabledTools    []string            // List of enabled embedding tools
	UseCodeTool     bool                // Use code tool
	ThinkingLevel   ThinkingLevel       // Thinking level: off, low, medium, high
	MCPClient       *MCPClient          // MCP client for MCP tools
	MaxRecursions   int                 // Maximum number of recursions for model calls
	Markdown        *Markdown           // Markdown renderer
	TokenUsage      *TokenUsage         // Token usage metainfo
	Std             *StdRenderer        // Standard renderer
	OutputFile      *FileRenderer       // File renderer
	Status          StatusStack         // Stack to manage streaming status
	Convo           ConversationManager // Conversation manager
	Indicator       *Indicator          // Indicator Spinner
	LastWrittenData string              // Last written data
}

func (*Agent) CompleteReasoning added in v1.9.7

func (ag *Agent) CompleteReasoning()

func (*Agent) Error added in v1.9.7

func (ag *Agent) Error(text string)

func (*Agent) Gemini2CopyToolCall added in v1.10.4

func (ag *Agent) Gemini2CopyToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)

func (*Agent) Gemini2CreateDirectoryToolCall added in v1.10.4

func (ag *Agent) Gemini2CreateDirectoryToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)

func (*Agent) Gemini2DeleteDirectoryToolCall added in v1.10.4

func (ag *Agent) Gemini2DeleteDirectoryToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)

func (*Agent) Gemini2DeleteFileToolCall added in v1.10.4

func (ag *Agent) Gemini2DeleteFileToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)

func (*Agent) Gemini2EditFileToolCall added in v1.10.4

func (ag *Agent) Gemini2EditFileToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)

func (*Agent) Gemini2ListDirectoryToolCall added in v1.10.4

func (ag *Agent) Gemini2ListDirectoryToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)

func (*Agent) Gemini2ListMemoryToolCall added in v1.12.22

func (ag *Agent) Gemini2ListMemoryToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)

func (*Agent) Gemini2MCPToolCall added in v1.11.4

func (ag *Agent) Gemini2MCPToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)

func (*Agent) Gemini2MoveToolCall added in v1.10.4

func (ag *Agent) Gemini2MoveToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)

func (*Agent) Gemini2ReadFileToolCall added in v1.10.4

func (ag *Agent) Gemini2ReadFileToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)

func (*Agent) Gemini2ReadMultipleFilesToolCall added in v1.10.4

func (ag *Agent) Gemini2ReadMultipleFilesToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)

func (*Agent) Gemini2SaveMemoryToolCall added in v1.12.22

func (ag *Agent) Gemini2SaveMemoryToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)

func (*Agent) Gemini2SearchFilesToolCall added in v1.10.4

func (ag *Agent) Gemini2SearchFilesToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)

func (*Agent) Gemini2SearchTextInFileToolCall added in v1.10.4

func (ag *Agent) Gemini2SearchTextInFileToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)

func (*Agent) Gemini2ShellToolCall added in v1.10.4

func (ag *Agent) Gemini2ShellToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)

func (*Agent) Gemini2SwitchAgentToolCall added in v1.13.10

func (ag *Agent) Gemini2SwitchAgentToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)

func (*Agent) Gemini2WebFetchToolCall added in v1.10.4

func (ag *Agent) Gemini2WebFetchToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)

func (*Agent) Gemini2WriteFileToolCall added in v1.10.4

func (ag *Agent) Gemini2WriteFileToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)

func (*Agent) GenerateAnthropicStream added in v1.13.5

func (ag *Agent) GenerateAnthropicStream() error

GenerateAnthropicStream generates a streaming response using Anthropic API

func (*Agent) GenerateGemini2Stream added in v1.9.4

func (ag *Agent) GenerateGemini2Stream() error

func (*Agent) GenerateOpenAIStream added in v1.10.4

func (ag *Agent) GenerateOpenAIStream() error

GenerateOpenAIStream generates a streaming response using OpenAI API

func (*Agent) GenerateOpenChatStream added in v1.9.4

func (ag *Agent) GenerateOpenChatStream() error

In current openchat api, we can't use cached tokens The context api and response api are not available for current golang lib

func (*Agent) SortAnthropicMessagesByOrder added in v1.13.5

func (ag *Agent) SortAnthropicMessagesByOrder() error

func (*Agent) SortOpenAIMessagesByOrder added in v1.12.20

func (ag *Agent) SortOpenAIMessagesByOrder() error

* Sort the messages by order * 1. System Prompt -- always at the top * 2. History Prompts * - User Prompt * - Assistant Prompt

func (*Agent) SortOpenChatMessagesByOrder added in v1.12.20

func (ag *Agent) SortOpenChatMessagesByOrder() error

* Sort the messages by order * 1. System Prompt -- always at the top * 2. History Prompts * - User Prompt * - Assistant Prompt

func (*Agent) StartIndicator added in v1.9.7

func (ag *Agent) StartIndicator(text string)

func (*Agent) StartReasoning added in v1.9.7

func (ag *Agent) StartReasoning()

StartReasoning notifies the user and logs to file that the agent has started thinking. It writes a status message to both Std and OutputFile if they are available.

func (*Agent) StopIndicator added in v1.9.7

func (ag *Agent) StopIndicator()

func (*Agent) Warn added in v1.9.7

func (ag *Agent) Warn(text string)

func (*Agent) WriteDiffConfirm added in v1.11.10

func (ag *Agent) WriteDiffConfirm(text string)

func (*Agent) WriteEnd added in v1.9.7

func (ag *Agent) WriteEnd()

func (*Agent) WriteFunctionCall added in v1.9.7

func (ag *Agent) WriteFunctionCall(text string)

func (*Agent) WriteMarkdown added in v1.9.7

func (ag *Agent) WriteMarkdown()

func (*Agent) WriteReasoning added in v1.9.7

func (ag *Agent) WriteReasoning(text string)

WriteReasoning writes the provided reasoning text to both the standard output and an output file, applying specific formatting to each if they are available.

func (*Agent) WriteText added in v1.9.7

func (ag *Agent) WriteText(text string)

WriteText writes the given text to the Agent's Std, Markdown, and OutputFile writers if they are set.

func (*Agent) WriteUsage added in v1.9.7

func (ag *Agent) WriteUsage()

type AgentOptions added in v1.9.7

type AgentOptions struct {
	Prompt         string
	SysPrompt      string
	Files          []*FileData
	ModelInfo      *data.Model
	SearchEngine   *data.SearchEngine
	MaxRecursions  int
	ThinkingLevel  string
	EnabledTools   []string // List of enabled embedding tools
	UseMCP         bool
	YoloMode       bool // Whether to automatically approve tools
	AppendMarkdown bool
	AppendUsage    bool
	OutputFile     string
	QuietMode      bool
	ConvoName      string
	MCPConfig      map[string]*data.MCPServer
}

type Anthropic added in v1.13.5

type Anthropic struct {
	// contains filtered or unexported fields
}

type AnthropicConversation added in v1.13.5

type AnthropicConversation struct {
	BaseConversation
	Messages []anthropic.MessageParam
}

AnthropicConversation represents a conversation using Anthropic format

func (*AnthropicConversation) Clear added in v1.13.5

func (c *AnthropicConversation) Clear() error

Clear removes all messages from the conversation

func (*AnthropicConversation) GetMessages added in v1.13.5

func (c *AnthropicConversation) GetMessages() interface{}

func (*AnthropicConversation) Load added in v1.13.5

func (c *AnthropicConversation) Load() error

Load retrieves the conversation from disk

func (*AnthropicConversation) Push added in v1.13.5

func (c *AnthropicConversation) Push(messages ...interface{})

PushMessages adds multiple messages to the conversation

func (*AnthropicConversation) Save added in v1.13.5

func (c *AnthropicConversation) Save() error

Save persists the conversation to disk

func (*AnthropicConversation) SetMessages added in v1.13.5

func (c *AnthropicConversation) SetMessages(messages interface{})

type AtRefProcessor added in v1.12.9

type AtRefProcessor struct {
	// contains filtered or unexported fields
}

AtRefProcessor handles @ reference processing

func NewAtRefProcessor added in v1.12.9

func NewAtRefProcessor() *AtRefProcessor

NewAtRefProcessor creates a new @ reference processor

func (*AtRefProcessor) AddExcludePattern added in v1.12.9

func (p *AtRefProcessor) AddExcludePattern(pattern string)

AddExcludePattern adds a pattern to exclude from directory listings

func (*AtRefProcessor) ParseAtReferences added in v1.12.9

func (p *AtRefProcessor) ParseAtReferences(text string) []AtReference

ParseAtReferences finds all @ references in the given text

func (*AtRefProcessor) ProcessReferences added in v1.12.9

func (p *AtRefProcessor) ProcessReferences(text string, references []AtReference) (string, error)

ProcessReferences processes all @ references and returns augmented text

func (*AtRefProcessor) ProcessText added in v1.12.9

func (p *AtRefProcessor) ProcessText(text string) (string, error)

ProcessText processes text containing @ references and returns augmented text

func (*AtRefProcessor) SetMaxDirItems added in v1.12.9

func (p *AtRefProcessor) SetMaxDirItems(count int)

SetMaxDirItems sets the maximum number of directory items to list

func (*AtRefProcessor) SetMaxFileSize added in v1.12.9

func (p *AtRefProcessor) SetMaxFileSize(size int64)

SetMaxFileSize sets the maximum file size to include

type AtReference added in v1.12.9

type AtReference struct {
	Original string // Original @ reference text (e.g., "@main.go")
	Path     string // Resolved file/directory path
}

AtReference represents a single @ reference found in text

type BaseConversation added in v1.6.0

type BaseConversation struct {
	Name string
	Path string
}

BaseConversation holds common fields and methods for all conversation types

func (*BaseConversation) Clear added in v1.6.0

func (c *BaseConversation) Clear() error

func (*BaseConversation) GetMessages added in v1.10.6

func (c *BaseConversation) GetMessages() interface{}

func (*BaseConversation) GetPath added in v1.6.2

func (c *BaseConversation) GetPath() string

func (*BaseConversation) Load added in v1.10.6

func (c *BaseConversation) Load() error

func (*BaseConversation) Open added in v1.10.6

func (c *BaseConversation) Open(title string) error

Open initializes an OpenChatConversation with the provided title, resolving an index to the actual conversation name if necessary. It resets the messages, sanitizes the conversation name for the path, and sets the internal path accordingly. Returns an error if the title cannot be resolved.

func (*BaseConversation) Push added in v1.10.6

func (c *BaseConversation) Push(messages ...interface{})

func (*BaseConversation) Save added in v1.10.6

func (c *BaseConversation) Save() error

func (*BaseConversation) SetMessages added in v1.10.6

func (c *BaseConversation) SetMessages(messages interface{})

func (*BaseConversation) SetPath added in v1.6.0

func (c *BaseConversation) SetPath(title string)

SetPath sets the file path for saving the conversation

type ContextManager added in v1.12.14

type ContextManager struct {
	MaxInputTokens  int                // Maximum input tokens allowed
	MaxOutputTokens int                // Maximum output tokens allowed (new field for Anthropic)
	Strategy        TruncationStrategy // Strategy for handling overflow
	BufferPercent   float64            // Safety buffer (0.0-1.0)
}

ContextManager handles context window limits for LLM conversations

func NewContextManager added in v1.12.14

func NewContextManager(limits ModelLimits, strategy TruncationStrategy) *ContextManager

NewContextManager creates a context manager with the given model limits

func NewContextManagerForModel added in v1.12.14

func NewContextManagerForModel(modelName string, strategy TruncationStrategy) *ContextManager

NewContextManagerForModel creates a context manager by looking up the model name

func (*ContextManager) PrepareAnthropicMessages added in v1.13.5

func (cm *ContextManager) PrepareAnthropicMessages(messages []anthropic.MessageParam, systemPrompt string, tools []anthropic.ToolUnionParam) ([]anthropic.MessageParam, bool)

PrepareAnthropicMessages processes messages to fit within context window limits.

func (*ContextManager) PrepareGeminiMessages added in v1.12.14

func (cm *ContextManager) PrepareGeminiMessages(messages []*genai.Content, systemPrompt string, tools []*genai.Tool) ([]*genai.Content, bool)

PrepareGeminiMessages processes messages to fit within context window limits.

func (*ContextManager) PrepareOpenAIMessages added in v1.12.14

func (cm *ContextManager) PrepareOpenAIMessages(messages []openai.ChatCompletionMessage, tools []openai.Tool) ([]openai.ChatCompletionMessage, bool)

PrepareOpenAIMessages processes messages to fit within context window limits. Returns the processed messages and a boolean indicating if truncation occurred. PrepareOpenAIMessages processes messages to fit within context window limits. Returns the processed messages and a boolean indicating if truncation occurred.

func (*ContextManager) PrepareOpenChatMessages added in v1.12.14

func (cm *ContextManager) PrepareOpenChatMessages(messages []*model.ChatCompletionMessage, tools []*model.Tool) ([]*model.ChatCompletionMessage, bool)

PrepareOpenChatMessages processes messages to fit within context window limits for OpenChat format.

type ConversationManager added in v1.6.0

type ConversationManager interface {
	SetPath(title string)
	GetPath() string
	Load() error
	Save() error
	Open(title string) error
	Clear() error
	Push(messages ...interface{})
	GetMessages() interface{}
	SetMessages(messages interface{})
}

ConversationManager is an interface for handling conversation history

func ConstructConversationManager added in v1.10.6

func ConstructConversationManager(convoName string, provider string) (ConversationManager, error)

type ConvoMeta added in v1.6.10

type ConvoMeta struct {
	Name     string
	Provider string
	ModTime  int64
}

func ListSortedConvos added in v1.6.10

func ListSortedConvos(convoDir string) ([]ConvoMeta, error)

listSortedConvos returns a slice of convoMeta sorted by modTime descending

type ExtractorConfig added in v1.6.14

type ExtractorConfig struct {
	UserAgent          string
	HeaderAccept       string
	Timeout            time.Duration
	MinTextLength      int
	BoilerplateIDs     []string
	BoilerplateClasses []string
}

Configuration options for the text extractor

type FileData added in v1.4.0

type FileData struct {
	// contains filtered or unexported fields
}

func NewFileData added in v1.4.0

func NewFileData(format string, data []byte, path string) *FileData

func (*FileData) Data added in v1.4.0

func (i *FileData) Data() []byte

func (*FileData) Format added in v1.4.0

func (i *FileData) Format() string

func (*FileData) Path added in v1.6.0

func (i *FileData) Path() string

type FileRenderer added in v1.9.7

type FileRenderer struct {
	// contains filtered or unexported fields
}

FileRenderer is a renderer that writes output to a file

func NewFileRenderer added in v1.9.7

func NewFileRenderer(filename string) (*FileRenderer, error)

NewFileRenderer creates a new instance of FileRenderer

func (*FileRenderer) Close added in v1.9.7

func (fr *FileRenderer) Close() error

Close closes the file renderer and its underlying file

func (*FileRenderer) GetFilename added in v1.9.7

func (fr *FileRenderer) GetFilename() string

GetFilename returns the name of the file being written to

func (*FileRenderer) Write added in v1.9.7

func (fr *FileRenderer) Write(args ...interface{})

func (*FileRenderer) Writef added in v1.9.7

func (fr *FileRenderer) Writef(format string, args ...interface{})

Writef writes formatted output to the file

func (*FileRenderer) Writeln added in v1.9.7

func (fr *FileRenderer) Writeln(args ...interface{})

type Gemini2Agent added in v1.9.10

type Gemini2Agent struct {
	// contains filtered or unexported fields
}

type Gemini2Conversation added in v1.7.1

type Gemini2Conversation struct {
	BaseConversation
	Messages []*genai.Content
}
  • Google Gemini2.0 Conversation

GeminiConversation manages conversations for Google's Gemini model

func (*Gemini2Conversation) Clear added in v1.10.6

func (g *Gemini2Conversation) Clear() error

Clear removes all messages from the conversation

func (*Gemini2Conversation) GetMessages added in v1.10.6

func (g *Gemini2Conversation) GetMessages() interface{}

func (*Gemini2Conversation) Load added in v1.7.1

func (g *Gemini2Conversation) Load() error

Load retrieves the Gemini conversation from disk

func (*Gemini2Conversation) Push added in v1.10.6

func (g *Gemini2Conversation) Push(messages ...interface{})

Open initializes a Gemini2Conversation with the provided title PushContents adds multiple content items to the history

func (*Gemini2Conversation) Save added in v1.7.1

func (g *Gemini2Conversation) Save() error

Save persists the Gemini conversation to disk

func (*Gemini2Conversation) SetMessages added in v1.10.6

func (g *Gemini2Conversation) SetMessages(messages interface{})

type Indicator added in v1.9.7

type Indicator struct {
	// contains filtered or unexported fields
}

func NewIndicator added in v1.9.7

func NewIndicator(text string) *Indicator

func (*Indicator) Start added in v1.9.7

func (i *Indicator) Start(text string)

func (*Indicator) Stop added in v1.9.7

func (i *Indicator) Stop()

type MCPClient added in v1.11.4

type MCPClient struct {
	// contains filtered or unexported fields
}

func GetMCPClient added in v1.11.4

func GetMCPClient() *MCPClient

func (*MCPClient) AddHttpServer added in v1.11.4

func (mc *MCPClient) AddHttpServer(name string, url string, headers map[string]string) error

func (*MCPClient) AddSseServer added in v1.11.4

func (mc *MCPClient) AddSseServer(name string, url string, headers map[string]string) error

func (*MCPClient) AddStdServer added in v1.11.4

func (mc *MCPClient) AddStdServer(name string, cmd string, env map[string]string, cwd string, args ...string) error

func (*MCPClient) CallTool added in v1.11.4

func (mc *MCPClient) CallTool(toolName string, args map[string]any) (*MCPToolResponse, error)

func (*MCPClient) Close added in v1.11.4

func (mc *MCPClient) Close()

func (*MCPClient) FindTool added in v1.11.4

func (mc *MCPClient) FindTool(toolName string) *MCPSession

func (*MCPClient) GetAllServers added in v1.11.4

func (mc *MCPClient) GetAllServers() []*MCPServer

Returns a map grouping tools by MCP server session name, with each session containing a slice of its available tools.

func (*MCPClient) GetPrompts added in v1.11.8

func (mc *MCPClient) GetPrompts(session *MCPSession) (*[]MCPPrompt, error)

func (*MCPClient) GetResources added in v1.11.8

func (mc *MCPClient) GetResources(session *MCPSession) (*[]MCPResource, error)

func (*MCPClient) GetTools added in v1.11.4

func (mc *MCPClient) GetTools(session *MCPSession) (*[]MCPTool, error)

func (*MCPClient) Init added in v1.11.4

func (mc *MCPClient) Init(servers map[string]*data.MCPServer, option MCPLoadOption) error

three types of transports supported: httpUrl → StreamableHTTPClientTransport url → SSEClientTransport command → StdioClientTransport Only want list all servers, unless loadAll is false, then only load allowed servers

type MCPLoadOption added in v1.11.8

type MCPLoadOption struct {
	LoadAll       bool // load all tools(allowed|blocked)
	LoadTools     bool // load tools (tools/list)
	LoadResources bool // load resources (resources/list)
	LoadPrompts   bool // load prompts (prompts/list)
}

type MCPPrompt added in v1.11.8

type MCPPrompt struct {
	Name        string
	Description string
	Parameters  map[string]string
}

type MCPResource added in v1.11.8

type MCPResource struct {
	Name        string
	Description string
	URI         string
	MIMEType    string
}

type MCPServer added in v1.11.4

type MCPServer struct {
	Name      string
	Allowed   bool
	Tools     *[]MCPTool
	Resources *[]MCPResource
	Prompts   *[]MCPPrompt
}

type MCPSession added in v1.11.4

type MCPSession struct {
	// contains filtered or unexported fields
}

type MCPTool added in v1.11.8

type MCPTool struct {
	Name        string
	Description string
	Parameters  map[string]string
	Properties  map[string]*jsonschema.Schema // Keep origin JSON Schema
}

type MCPToolResponse added in v1.11.6

type MCPToolResponse struct {
	Types    []MCPToolResponseType
	Contents []string
}

type MCPToolResponseType added in v1.11.6

type MCPToolResponseType string
const (
	MCPResponseText  MCPToolResponseType = "text"
	MCPResponseImage MCPToolResponseType = "image"
	MCPResponseAudio MCPToolResponseType = "audio"
)

type Markdown added in v1.9.7

type Markdown struct {
	// contains filtered or unexported fields
}

func NewMarkdown added in v1.9.7

func NewMarkdown() *Markdown

NewMarkdown creates a new instance of Markdown

func (*Markdown) Render added in v1.9.7

func (mr *Markdown) Render(r Render)

RenderMarkdown clears the streaming output and re-renders the entire Markdown

func (*Markdown) Write added in v1.9.7

func (mr *Markdown) Write(args ...interface{})

func (*Markdown) Writef added in v1.9.7

func (mr *Markdown) Writef(format string, args ...interface{})

RenderString streams output incrementally and tracks the number of lines

type ModelInfo added in v1.13.2

type ModelInfo struct {
	ApiKey      string
	EndPoint    string
	ModelName   string
	Provider    string
	Temperature float32
	TopP        float32 // Top-p sampling parameter
	Seed        *int32  // Seed for deterministic generation
}

type ModelLimits added in v1.12.14

type ModelLimits struct {
	ContextWindow   int // Total context window in tokens
	MaxOutputTokens int // Maximum output tokens allowed
}

ModelLimits contains context window configuration for a model

func GetModelLimits added in v1.12.14

func GetModelLimits(modelName string) ModelLimits

GetModelLimits retrieves the limits for a given model name. It performs exact match first, then pattern matching, then returns defaults.

func (ModelLimits) MaxInputTokens added in v1.12.14

func (ml ModelLimits) MaxInputTokens(bufferPercent float64) int

MaxInputTokens calculates the maximum input tokens with a safety buffer. The buffer ensures there's always room for the model's response.

type OpenAI added in v1.10.4

type OpenAI struct {
	// contains filtered or unexported fields
}

OpenAI manages the state of an ongoing conversation with an AI assistant

type OpenAIConversation added in v1.10.4

type OpenAIConversation struct {
	BaseConversation
	Messages []openai.ChatCompletionMessage
}

OpenAIConversation represents a conversation using OpenAI format

func (*OpenAIConversation) Clear added in v1.10.4

func (c *OpenAIConversation) Clear() error

Clear removes all messages from the conversation

func (*OpenAIConversation) GetMessages added in v1.10.6

func (c *OpenAIConversation) GetMessages() interface{}

func (*OpenAIConversation) Load added in v1.10.4

func (c *OpenAIConversation) Load() error

Load retrieves the conversation from disk

func (*OpenAIConversation) Push added in v1.10.6

func (c *OpenAIConversation) Push(messages ...interface{})

PushMessages adds multiple messages to the conversation

func (*OpenAIConversation) Save added in v1.10.4

func (c *OpenAIConversation) Save() error

Save persists the conversation to disk

func (*OpenAIConversation) SetMessages added in v1.10.6

func (c *OpenAIConversation) SetMessages(messages interface{})

type OpenChat added in v1.5.1

type OpenChat struct {
	// contains filtered or unexported fields
}

Conversation manages the state of an ongoing conversation with an AI assistant

type OpenChatConversation added in v1.6.0

type OpenChatConversation struct {
	BaseConversation
	Messages []*model.ChatCompletionMessage
}

OpenChatConversation manages conversations for Volcengine model

func (*OpenChatConversation) Clear added in v1.10.6

func (c *OpenChatConversation) Clear() error

Clear removes all messages from the conversation

func (*OpenChatConversation) GetMessages added in v1.10.6

func (c *OpenChatConversation) GetMessages() interface{}

func (*OpenChatConversation) Load added in v1.6.0

func (c *OpenChatConversation) Load() error

Load retrieves the conversation from disk

func (*OpenChatConversation) Push added in v1.10.6

func (c *OpenChatConversation) Push(messages ...interface{})

PushMessages adds multiple messages to the conversation

func (*OpenChatConversation) Save added in v1.6.0

func (c *OpenChatConversation) Save() error

Save persists the conversation to disk

func (*OpenChatConversation) SetMessages added in v1.10.6

func (c *OpenChatConversation) SetMessages(messages interface{})

type OpenFunctionDefinition added in v1.10.4

type OpenFunctionDefinition struct {
	Name        string
	Description string
	Parameters  map[string]interface{}
}

OpenFunctionDefinition is a generic function definition that is not tied to any specific model.

type OpenProcessor added in v1.10.4

type OpenProcessor struct {
	// contains filtered or unexported fields
}

OpenProcessor is the main processor for OpenAI-like models For tools implementation - It manages the context, notifications, data streaming, and tool usage - It handles queries and references, and maintains the status stack

func (*OpenProcessor) AnthropicCopyToolCall added in v1.13.5

func (op *OpenProcessor) AnthropicCopyToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)

func (*OpenProcessor) AnthropicCreateDirectoryToolCall added in v1.13.5

func (op *OpenProcessor) AnthropicCreateDirectoryToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)

func (*OpenProcessor) AnthropicDeleteDirectoryToolCall added in v1.13.5

func (op *OpenProcessor) AnthropicDeleteDirectoryToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)

func (*OpenProcessor) AnthropicDeleteFileToolCall added in v1.13.5

func (op *OpenProcessor) AnthropicDeleteFileToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)

func (*OpenProcessor) AnthropicEditFileToolCall added in v1.13.5

func (op *OpenProcessor) AnthropicEditFileToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)

func (*OpenProcessor) AnthropicListDirectoryToolCall added in v1.13.5

func (op *OpenProcessor) AnthropicListDirectoryToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)

func (*OpenProcessor) AnthropicListMemoryToolCall added in v1.13.5

func (op *OpenProcessor) AnthropicListMemoryToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)

func (*OpenProcessor) AnthropicMCPToolCall added in v1.13.5

func (op *OpenProcessor) AnthropicMCPToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)

func (*OpenProcessor) AnthropicMoveToolCall added in v1.13.5

func (op *OpenProcessor) AnthropicMoveToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)

func (*OpenProcessor) AnthropicReadFileToolCall added in v1.13.5

func (op *OpenProcessor) AnthropicReadFileToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)

func (*OpenProcessor) AnthropicReadMultipleFilesToolCall added in v1.13.5

func (op *OpenProcessor) AnthropicReadMultipleFilesToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)

func (*OpenProcessor) AnthropicSaveMemoryToolCall added in v1.13.5

func (op *OpenProcessor) AnthropicSaveMemoryToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)

func (*OpenProcessor) AnthropicSearchFilesToolCall added in v1.13.5

func (op *OpenProcessor) AnthropicSearchFilesToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)

func (*OpenProcessor) AnthropicSearchTextInFileToolCall added in v1.13.5

func (op *OpenProcessor) AnthropicSearchTextInFileToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)

func (*OpenProcessor) AnthropicShellToolCall added in v1.13.5

func (op *OpenProcessor) AnthropicShellToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)

Anthropic tool implementations (wrapper functions)

func (*OpenProcessor) AnthropicSwitchAgentToolCall added in v1.13.10

func (op *OpenProcessor) AnthropicSwitchAgentToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)

func (*OpenProcessor) AnthropicWebFetchToolCall added in v1.13.5

func (op *OpenProcessor) AnthropicWebFetchToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)

func (*OpenProcessor) AnthropicWebSearchToolCall added in v1.13.5

func (op *OpenProcessor) AnthropicWebSearchToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)

func (*OpenProcessor) AnthropicWriteFileToolCall added in v1.13.5

func (op *OpenProcessor) AnthropicWriteFileToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)

func (*OpenProcessor) OpenAICopyToolCall added in v1.10.4

func (op *OpenProcessor) OpenAICopyToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)

func (*OpenProcessor) OpenAICreateDirectoryToolCall added in v1.10.4

func (op *OpenProcessor) OpenAICreateDirectoryToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)

func (*OpenProcessor) OpenAIDeleteDirectoryToolCall added in v1.10.4

func (op *OpenProcessor) OpenAIDeleteDirectoryToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)

func (*OpenProcessor) OpenAIDeleteFileToolCall added in v1.10.4

func (op *OpenProcessor) OpenAIDeleteFileToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)

func (*OpenProcessor) OpenAIEditFileToolCall added in v1.10.4

func (op *OpenProcessor) OpenAIEditFileToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)

func (*OpenProcessor) OpenAIListDirectoryToolCall added in v1.10.4

func (op *OpenProcessor) OpenAIListDirectoryToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)

func (*OpenProcessor) OpenAIListMemoryToolCall added in v1.12.22

func (op *OpenProcessor) OpenAIListMemoryToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)

func (*OpenProcessor) OpenAIMCPToolCall added in v1.11.4

func (op *OpenProcessor) OpenAIMCPToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)

func (*OpenProcessor) OpenAIMoveToolCall added in v1.10.4

func (op *OpenProcessor) OpenAIMoveToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)

func (*OpenProcessor) OpenAIReadFileToolCall added in v1.10.4

func (op *OpenProcessor) OpenAIReadFileToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)

func (*OpenProcessor) OpenAIReadMultipleFilesToolCall added in v1.10.4

func (op *OpenProcessor) OpenAIReadMultipleFilesToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)

func (*OpenProcessor) OpenAISaveMemoryToolCall added in v1.12.22

func (op *OpenProcessor) OpenAISaveMemoryToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)

func (*OpenProcessor) OpenAISearchFilesToolCall added in v1.10.4

func (op *OpenProcessor) OpenAISearchFilesToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)

func (*OpenProcessor) OpenAISearchTextInFileToolCall added in v1.10.4

func (op *OpenProcessor) OpenAISearchTextInFileToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)

func (*OpenProcessor) OpenAIShellToolCall added in v1.10.4

func (op *OpenProcessor) OpenAIShellToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)

OpenAI tool implementations (wrapper functions)

func (*OpenProcessor) OpenAISwitchAgentToolCall added in v1.13.10

func (op *OpenProcessor) OpenAISwitchAgentToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)

func (*OpenProcessor) OpenAIWebFetchToolCall added in v1.10.4

func (op *OpenProcessor) OpenAIWebFetchToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)

func (*OpenProcessor) OpenAIWebSearchToolCall added in v1.10.4

func (op *OpenProcessor) OpenAIWebSearchToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)

func (*OpenProcessor) OpenAIWriteFileToolCall added in v1.10.4

func (op *OpenProcessor) OpenAIWriteFileToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)

func (*OpenProcessor) OpenChatCopyToolCall added in v1.10.4

func (op *OpenProcessor) OpenChatCopyToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)

func (*OpenProcessor) OpenChatCreateDirectoryToolCall added in v1.10.4

func (op *OpenProcessor) OpenChatCreateDirectoryToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)

func (*OpenProcessor) OpenChatDeleteDirectoryToolCall added in v1.10.4

func (op *OpenProcessor) OpenChatDeleteDirectoryToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)

func (*OpenProcessor) OpenChatDeleteFileToolCall added in v1.10.4

func (op *OpenProcessor) OpenChatDeleteFileToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)

func (*OpenProcessor) OpenChatEditFileToolCall added in v1.10.4

func (op *OpenProcessor) OpenChatEditFileToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)

func (*OpenProcessor) OpenChatListDirectoryToolCall added in v1.10.4

func (op *OpenProcessor) OpenChatListDirectoryToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)

func (*OpenProcessor) OpenChatListMemoryToolCall added in v1.12.22

func (op *OpenProcessor) OpenChatListMemoryToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)

func (*OpenProcessor) OpenChatMCPToolCall added in v1.11.4

func (op *OpenProcessor) OpenChatMCPToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)

func (*OpenProcessor) OpenChatMoveToolCall added in v1.10.4

func (op *OpenProcessor) OpenChatMoveToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)

func (*OpenProcessor) OpenChatReadFileToolCall added in v1.10.4

func (op *OpenProcessor) OpenChatReadFileToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)

OpenChat tool implementations (wrapper functions)

func (*OpenProcessor) OpenChatReadMultipleFilesToolCall added in v1.10.4

func (op *OpenProcessor) OpenChatReadMultipleFilesToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)

func (*OpenProcessor) OpenChatSaveMemoryToolCall added in v1.12.22

func (op *OpenProcessor) OpenChatSaveMemoryToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)

func (*OpenProcessor) OpenChatSearchFilesToolCall added in v1.10.4

func (op *OpenProcessor) OpenChatSearchFilesToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)

func (*OpenProcessor) OpenChatSearchTextInFileToolCall added in v1.10.4

func (op *OpenProcessor) OpenChatSearchTextInFileToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)

func (*OpenProcessor) OpenChatShellToolCall added in v1.10.4

func (op *OpenProcessor) OpenChatShellToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)

func (*OpenProcessor) OpenChatSwitchAgentToolCall added in v1.13.10

func (op *OpenProcessor) OpenChatSwitchAgentToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)

func (*OpenProcessor) OpenChatWebFetchToolCall added in v1.10.4

func (op *OpenProcessor) OpenChatWebFetchToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)

func (*OpenProcessor) OpenChatWebSearchToolCall added in v1.10.4

func (op *OpenProcessor) OpenChatWebSearchToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)

func (*OpenProcessor) OpenChatWriteFileToolCall added in v1.10.4

func (op *OpenProcessor) OpenChatWriteFileToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)

type OpenTool added in v1.10.4

type OpenTool struct {
	Type     ToolType
	Function *OpenFunctionDefinition
}

OpenTool is a generic tool definition that is not tied to any specific model.

func GetOpenEmbeddingToolsFiltered added in v1.13.1

func GetOpenEmbeddingToolsFiltered(allowedTools []string) []*OpenTool

GetOpenEmbeddingToolsFiltered returns embedding tools filtered by the allowed list. If allowedTools is nil or empty, returns all embedding tools. Unknown tool names are gracefully ignored.

func MCPToolsToOpenTool added in v1.11.4

func MCPToolsToOpenTool(mcpTool MCPTool) *OpenTool

MCPToolsToOpenTool converts an MCPTools struct to an OpenTool with proper JSON schema

func (*OpenTool) ToAnthropicTool added in v1.13.5

func (ot *OpenTool) ToAnthropicTool() anthropic.ToolUnionParam

ToAnthropicTool converts a GenericTool to an anthropic.ToolUnionParam

func (*OpenTool) ToGeminiFunctions added in v1.10.4

func (ot *OpenTool) ToGeminiFunctions() *genai.FunctionDeclaration

ToGeminiFunctions converts a GenericTool to a genai.Tool

func (*OpenTool) ToOpenAITool added in v1.10.4

func (ot *OpenTool) ToOpenAITool() openai.Tool

ToOpenAITool converts a GenericTool to an openai.Tool

func (*OpenTool) ToOpenChatTool added in v1.10.4

func (ot *OpenTool) ToOpenChatTool() *model.Tool

ToOpenChatTool converts a GenericTool to a model.Tool

type Render added in v1.9.7

type Render interface {
	Writeln(args ...interface{})
	Writef(format string, args ...interface{})
	Write(args ...interface{})
}

type SearchEngine added in v1.9.4

type SearchEngine struct {
	UseSearch     bool
	Name          string
	ApiKey        string
	CxKey         string
	MaxReferences int

	// DeepDive indicates how many links to fetch content from
	// If 0, it defaults to a small number (e.g. 3) for efficiency.
	DeepDive int
}

func (*SearchEngine) BingSearch added in v1.9.4

func (s *SearchEngine) BingSearch(query string) (map[string]any, error)

--- Simulation of Bing Search ---

func (*SearchEngine) GoogleSearch added in v1.9.4

func (s *SearchEngine) GoogleSearch(query string) (map[string]any, error)

Alternative approach with explicit conversions for protocol buffer compatibility

func (*SearchEngine) NoneSearch added in v1.9.4

func (s *SearchEngine) NoneSearch(query string) (map[string]any, error)

func (*SearchEngine) RetrieveQueries added in v1.9.4

func (s *SearchEngine) RetrieveQueries(queries []string) string

func (*SearchEngine) RetrieveReferences added in v1.9.4

func (s *SearchEngine) RetrieveReferences(references []map[string]any) string

func (*SearchEngine) SerpAPISearch added in v1.9.4

func (s *SearchEngine) SerpAPISearch(query string, engine string) (map[string]any, error)

func (*SearchEngine) TavilySearch added in v1.9.4

func (s *SearchEngine) TavilySearch(query string) (map[string]any, error)

type StatusStack added in v1.9.2

type StatusStack struct {
	// contains filtered or unexported fields
}

StateStack is a stack data structure for managing states.

func (*StatusStack) ChangeTo added in v1.9.2

func (s *StatusStack) ChangeTo(
	proc chan<- StreamNotify,
	notify StreamNotify,
	proceed <-chan bool)

func (*StatusStack) Clear added in v1.9.2

func (s *StatusStack) Clear()

func (*StatusStack) Debug added in v1.9.2

func (s *StatusStack) Debug()

func (*StatusStack) IsEmpty added in v1.9.2

func (s *StatusStack) IsEmpty() bool

func (*StatusStack) IsTop added in v1.9.2

func (s *StatusStack) IsTop(status StreamStatus) bool

func (*StatusStack) Peek added in v1.9.2

func (s *StatusStack) Peek() StreamStatus

Peek returns the state from the top of the stack without removing it. If the stack is empty, it returns StateNormal.

func (*StatusStack) Pop added in v1.9.2

func (s *StatusStack) Pop() StreamStatus

Pop removes and returns the state from the top of the stack. If the stack is empty, it returns StateNormal.

func (*StatusStack) Push added in v1.9.2

func (s *StatusStack) Push(status StreamStatus)

Push adds a state to the top of the stack.

func (*StatusStack) Size added in v1.9.2

func (s *StatusStack) Size() int

type StdRenderer added in v1.9.5

type StdRenderer struct {
}

func NewStdRenderer added in v1.9.5

func NewStdRenderer() *StdRenderer

func (*StdRenderer) Write added in v1.9.7

func (r *StdRenderer) Write(args ...interface{})

func (*StdRenderer) Writef added in v1.9.7

func (r *StdRenderer) Writef(format string, args ...interface{})

func (*StdRenderer) Writeln added in v1.9.7

func (r *StdRenderer) Writeln(args ...interface{})

type StreamData added in v1.9.2

type StreamData struct {
	Text string
	Type StreamDataType
}

type StreamDataType added in v1.9.2

type StreamDataType int
const (
	DataTypeUnknown   StreamDataType = iota
	DataTypeNormal                   // 1
	DataTypeReasoning                // 2
	DataTypeFinished                 // 3
)

type StreamNotify

type StreamNotify struct {
	Status StreamStatus
	Data   string      // For text content or error messages
	Extra  interface{} // For additional metadata (e.g., switch instruction)
}

type StreamStatus

type StreamStatus int
const (
	StatusUnknown             StreamStatus = iota
	StatusProcessing                       // 1
	StatusStarted                          // 2
	StatusFinished                         // 3
	StatusWarn                             // 4
	StatusError                            // 5
	StatusReasoning                        // 6
	StatusReasoningOver                    // 7
	StatusFunctionCalling                  // 8
	StatusFunctionCallingOver              // 9
	StatusDiffConfirm                      // 10
	StatusDiffConfirmOver                  // 11
	StatusSwitchAgent                      // 12
)

type SwitchAgentError added in v1.13.10

type SwitchAgentError struct {
	TargetAgent string
	Instruction string
}

func (*SwitchAgentError) Error added in v1.13.10

func (e *SwitchAgentError) Error() string

type TavilyError added in v1.2.0

type TavilyError struct {
	Detail TavilyErrorDetail `json:"detail"`
}

type TavilyErrorDetail added in v1.2.0

type TavilyErrorDetail struct {
	Error string `json:"error"`
}

type TavilyResponse added in v1.2.0

type TavilyResponse struct {
	Query        string         `json:"query"`
	Answer       string         `json:"answer"`
	Images       []string       `json:"images"`
	Results      []TavilyResult `json:"results"`
	ResponseTime float32        `json:"response_time"` // e.g., "1.67"
}

Define a struct for the overall Tavily API response.

type TavilyResult added in v1.2.0

type TavilyResult struct {
	Title      string  `json:"title"`
	URL        string  `json:"url"`
	Content    string  `json:"content"`
	Score      float64 `json:"score"`
	RawContent *string `json:"raw_content"`
}

Define a struct for each result in the Tavily API response.

type ThinkingLevel added in v1.13.7

type ThinkingLevel string

ThinkingLevel represents the unified thinking/reasoning level across providers. Maps to provider-specific configurations: - OpenAI: reasoning_effort ("low"/"medium"/"high") - OpenChat: model.Thinking + ReasoningEffort - Gemini 2.5: ThinkingBudget (token count, -1 for dynamic) - Gemini 3: ThinkingLevel ("LOW"/"MEDIUM"/"HIGH") - Anthropic: thinking.budget_tokens

const (
	ThinkingLevelOff    ThinkingLevel = "off"
	ThinkingLevelLow    ThinkingLevel = "low"
	ThinkingLevelMedium ThinkingLevel = "medium"
	ThinkingLevelHigh   ThinkingLevel = "high"
)

func AllThinkingLevels added in v1.13.7

func AllThinkingLevels() []ThinkingLevel

AllThinkingLevels returns all valid thinking levels in order

func ParseThinkingLevel added in v1.13.7

func ParseThinkingLevel(s string) ThinkingLevel

ParseThinkingLevel normalizes user input to a valid ThinkingLevel. Supports backward compatibility with boolean values.

func (ThinkingLevel) Display added in v1.13.7

func (t ThinkingLevel) Display() string

Display returns a colorized display string for CLI output

func (ThinkingLevel) IsEnabled added in v1.13.7

func (t ThinkingLevel) IsEnabled() bool

IsEnabled returns true if thinking is enabled (not off)

func (ThinkingLevel) String added in v1.13.7

func (t ThinkingLevel) String() string

String returns the string representation

func (ThinkingLevel) ToAnthropicParams added in v1.13.7

func (t ThinkingLevel) ToAnthropicParams() anthropic.ThinkingConfigParamUnion

ToAnthropicParams returns the thinking budget tokens for Anthropic. Returns 0 for ThinkingLevelOff.

func (ThinkingLevel) ToGeminiConfig added in v1.13.7

func (t ThinkingLevel) ToGeminiConfig(modelName string) *genai.ThinkingConfig

ToGeminiConfig returns the Gemini ThinkingConfig based on model version. Gemini 3 uses ThinkingLevel, Gemini 2.5 uses ThinkingBudget.

func (ThinkingLevel) ToOpenAIReasoningEffort added in v1.13.7

func (t ThinkingLevel) ToOpenAIReasoningEffort() string

ToOpenAIReasoningEffort returns the OpenAI reasoning_effort parameter value. Returns empty string for ThinkingLevelOff (no param should be set).

func (ThinkingLevel) ToOpenChatParams added in v1.13.7

func (t ThinkingLevel) ToOpenChatParams() (*model.Thinking, *model.ReasoningEffort)

ToOpenChatParams returns the OpenChat model.Thinking and ReasoningEffort params.

type TokenCache added in v1.12.14

type TokenCache struct {
	// contains filtered or unexported fields
}

TokenCache provides a thread-safe cache for storing token counts of LLM messages. It uses JSON-marshaled message content as keys to ensure correct uniqueness.

func GetGlobalTokenCache added in v1.12.14

func GetGlobalTokenCache() *TokenCache

GetGlobalTokenCache returns the global token cache instance

func NewTokenCache added in v1.12.14

func NewTokenCache(maxSize int) *TokenCache

NewTokenCache creates a new TokenCache with the specified maximum size

func (*TokenCache) Clear added in v1.12.14

func (tc *TokenCache) Clear()

Clear removes all entries from the cache

func (*TokenCache) Get added in v1.12.14

func (tc *TokenCache) Get(key string) (int, bool)

Get retrieves a cached token count for the given key. Returns the count and true if found, or 0 and false if not found.

func (*TokenCache) GetOrComputeAnthropicTokens added in v1.13.5

func (tc *TokenCache) GetOrComputeAnthropicTokens(msg anthropic.MessageParam) int

GetOrComputeAnthropicTokens retrieves cached tokens or computes and caches them.

func (*TokenCache) GetOrComputeGeminiTokens added in v1.12.14

func (tc *TokenCache) GetOrComputeGeminiTokens(msg *genai.Content) int

GetOrComputeGeminiTokens retrieves cached tokens or computes and caches them for Gemini.

func (*TokenCache) GetOrComputeOpenAITokens added in v1.12.14

func (tc *TokenCache) GetOrComputeOpenAITokens(msg openai.ChatCompletionMessage) int

GetOrComputeOpenAITokens retrieves cached tokens or computes and caches them.

func (*TokenCache) GetOrComputeOpenChatTokens added in v1.12.14

func (tc *TokenCache) GetOrComputeOpenChatTokens(msg *model.ChatCompletionMessage) int

GetOrComputeOpenChatTokens retrieves cached tokens or computes and caches them.

func (*TokenCache) Set added in v1.12.14

func (tc *TokenCache) Set(key string, count int)

Set stores a token count for the given key. If the cache is full, it evicts approximately half of the entries.

func (*TokenCache) Size added in v1.12.14

func (tc *TokenCache) Size() int

Size returns the current number of entries in the cache

func (*TokenCache) Stats added in v1.12.14

func (tc *TokenCache) Stats() (hits, misses int64, size int)

Stats returns cache statistics (hits, misses, size)

type TokenUsage added in v1.9.5

type TokenUsage struct {
	InputTokens   int
	OutputTokens  int
	CachedTokens  int
	ThoughtTokens int
	TotalTokens   int
	// For providers like Anthropic, cached tokens are not included in the prompt tokens
	// OpenAI, OpenChat and Gemini all include cached tokens in the prompt tokens
	CachedTokensInPrompt bool
}

func NewTokenUsage added in v1.9.7

func NewTokenUsage() *TokenUsage

func (*TokenUsage) RecordTokenUsage added in v1.9.5

func (tu *TokenUsage) RecordTokenUsage(input, output, cached, thought, total int)

func (*TokenUsage) Render added in v1.9.7

func (tu *TokenUsage) Render(render Render)

type ToolType added in v1.10.4

type ToolType string
const (
	ToolTypeFunction ToolType = "function"
)

type ToolsUse added in v1.9.6

type ToolsUse struct {
	AutoApprove bool // Whether tools can be used without user confirmation
}

type TruncationStrategy added in v1.12.14

type TruncationStrategy string

TruncationStrategy defines how to handle context overflow

const (
	// StrategyTruncateOldest removes oldest messages first, preserving system prompt
	StrategyTruncateOldest TruncationStrategy = "truncate_oldest"

	// StrategySummarize replaces old context with a summary (future implementation)
	StrategySummarize TruncationStrategy = "summarize"

	// StrategyNone disables truncation - will fail if context exceeds limit
	StrategyNone TruncationStrategy = "none"

	// DefaultBufferPercent is the default safety buffer (80% of available space)
	DefaultBufferPercent = 0.80
)

type UniversalMessage added in v1.13.10

type UniversalMessage struct {
	Role      UniversalRole // "system", "user", "assistant"
	Content   string        // Main text content
	Reasoning string        // Thinking/reasoning content (if any)
}

* UniversalMessage is a provider-agnostic representation of a chat message. * It extracts only the essential semantic content for cross-provider conversion. * * Key Design Decisions: * 1. Only text content and reasoning are preserved. * 2. Tool calls, tool responses, images, and other multimodal content are discarded. * 3. Role normalization: "model" (Gemini) → "assistant"

func ParseAnthropicMessages added in v1.13.10

func ParseAnthropicMessages(messages []anthropic.MessageParam) []UniversalMessage

ParseAnthropicMessages converts Anthropic messages to universal format. Extracts: OfText blocks, OfThinking/OfRedactedThinking blocks Ignores: OfToolUse, OfToolResult, OfImage, OfDocument

func ParseGeminiMessages added in v1.13.10

func ParseGeminiMessages(messages []*gemini.Content) []UniversalMessage

ParseGeminiMessages converts Gemini messages to universal format. Extracts: Parts.Text, Parts.Thought Ignores: FunctionCall, FunctionResponse, InlineData Maps: "model" → "assistant"

func ParseOpenAIMessages added in v1.13.10

func ParseOpenAIMessages(messages []openai.ChatCompletionMessage) []UniversalMessage

ParseOpenAIMessages converts OpenAI messages to universal format. Extracts: Content, MultiContent[].Text, ReasoningContent Ignores: ToolCalls, FunctionCall, ImageURL

func ParseOpenChatMessages added in v1.13.10

func ParseOpenChatMessages(messages []*model.ChatCompletionMessage) []UniversalMessage

ParseOpenChatMessages converts OpenChat (Volcengine) messages to universal format.

type UniversalRole added in v1.13.10

type UniversalRole string
const (
	UniversalRoleSystem    UniversalRole = "system"
	UniversalRoleUser      UniversalRole = "user"
	UniversalRoleAssistant UniversalRole = "assistant"
)

func ConvertToUniversalRole added in v1.13.10

func ConvertToUniversalRole(role string) UniversalRole

func (UniversalRole) ConvertToAnthropic added in v1.13.10

func (r UniversalRole) ConvertToAnthropic() anthropic.MessageParamRole

func (UniversalRole) ConvertToGemini added in v1.13.10

func (r UniversalRole) ConvertToGemini() string

func (UniversalRole) ConvertToOpenAI added in v1.13.10

func (r UniversalRole) ConvertToOpenAI() string

func (UniversalRole) ConvertToOpenChat added in v1.13.10

func (r UniversalRole) ConvertToOpenChat() string

func (UniversalRole) String added in v1.13.10

func (r UniversalRole) String() string

type WorkflowAgent added in v1.10.0

type WorkflowAgent struct {
	Name          string
	Role          WorkflowAgentType
	Model         *data.Model
	Search        *data.SearchEngine
	Template      string
	SystemPrompt  string
	EnabledTools  []string
	Think         string
	MCP           bool
	Usage         bool
	Markdown      bool
	InputDir      string
	OutputDir     string
	MaxRecursions int
	OutputFile    string
	PassThrough   bool   // pass through current agent, only for debugging
	ConvoName     string // conversation name, for iterate prompt
}

WorkflowAgent defines the structure for a single agent in the workflow.

type WorkflowAgentType added in v1.10.0

type WorkflowAgentType string
const (
	WorkflowAgentTypeMaster WorkflowAgentType = "master"
	WorkflowAgentTypeWorker WorkflowAgentType = "worker"
)

type WorkflowConfig added in v1.10.0

type WorkflowConfig struct {
	Agents          []WorkflowAgent
	InterActiveMode bool // Allow user confirm at each agent
}

WorkflowConfig defines the structure for the entire workflow.

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL