Documentation
¶
Index ¶
- Variables
- func DefaultIsRetryableError(err error) bool
- func RegisterProvider(id string, factoryFunc FactoryFunc) error
- func Retry[T any](ctx context.Context, config RetryConfig, isRetryable IsRetryableFunc, ...) (T, error)
- type APIError
- type AzureOpenAICandidate
- type AzureOpenAIChat
- func (c *AzureOpenAIChat) Initialize(messages []*api.Message) error
- func (c *AzureOpenAIChat) IsRetryableError(err error) bool
- func (c *AzureOpenAIChat) Send(ctx context.Context, contents ...any) (ChatResponse, error)
- func (c *AzureOpenAIChat) SendStreaming(ctx context.Context, contents ...any) (ChatResponseIterator, error)
- func (c *AzureOpenAIChat) SetFunctionDefinitions(functionDefinitions []*FunctionDefinition) error
- type AzureOpenAIChatResponse
- type AzureOpenAIClient
- func (c *AzureOpenAIClient) Close() error
- func (c *AzureOpenAIClient) GenerateCompletion(ctx context.Context, request *CompletionRequest) (CompletionResponse, error)
- func (c *AzureOpenAIClient) ListModels(ctx context.Context) ([]string, error)
- func (c *AzureOpenAIClient) SetResponseSchema(schema *Schema) error
- func (c *AzureOpenAIClient) StartChat(systemPrompt string, model string) Chat
- type AzureOpenAICompletionResponse
- type AzureOpenAIPart
- type BedrockClient
- func (c *BedrockClient) Close() error
- func (c *BedrockClient) GenerateCompletion(ctx context.Context, req *CompletionRequest) (CompletionResponse, error)
- func (c *BedrockClient) ListModels(ctx context.Context) ([]string, error)
- func (c *BedrockClient) SetResponseSchema(schema *Schema) error
- func (c *BedrockClient) StartChat(systemPrompt, model string) Chat
- type Candidate
- type Chat
- type ChatResponse
- type ChatResponseIterator
- type Client
- type ClientOptions
- type CompletionRequest
- type CompletionResponse
- type FactoryFunc
- type FunctionCall
- type FunctionCallResult
- type FunctionDefinition
- type GeminiAPIClientOptions
- type GeminiCandidate
- type GeminiChat
- func (c *GeminiChat) Initialize(messages []*api.Message) error
- func (c *GeminiChat) IsRetryableError(err error) bool
- func (c *GeminiChat) Send(ctx context.Context, contents ...any) (ChatResponse, error)
- func (c *GeminiChat) SendStreaming(ctx context.Context, contents ...any) (ChatResponseIterator, error)
- func (c *GeminiChat) SetFunctionDefinitions(functionDefinitions []*FunctionDefinition) error
- type GeminiChatResponse
- type GeminiCompletionResponse
- type GeminiPart
- type GoogleAIClient
- func (c *GoogleAIClient) Close() error
- func (c *GoogleAIClient) GenerateCompletion(ctx context.Context, request *CompletionRequest) (CompletionResponse, error)
- func (c *GoogleAIClient) ListModels(ctx context.Context) (modelNames []string, err error)
- func (c *GoogleAIClient) SetResponseSchema(responseSchema *Schema) error
- func (c *GoogleAIClient) StartChat(systemPrompt string, model string) Chat
- type GrokClient
- func (c *GrokClient) Close() error
- func (c *GrokClient) GenerateCompletion(ctx context.Context, req *CompletionRequest) (CompletionResponse, error)
- func (c *GrokClient) ListModels(ctx context.Context) ([]string, error)
- func (c *GrokClient) SetResponseSchema(schema *Schema) error
- func (c *GrokClient) StartChat(systemPrompt, model string) Chat
- type IsRetryableFunc
- type LlamaCppCandidate
- type LlamaCppChat
- func (c *LlamaCppChat) Initialize(messages []*api.Message) error
- func (c *LlamaCppChat) IsRetryableError(err error) bool
- func (c *LlamaCppChat) Send(ctx context.Context, contents ...any) (ChatResponse, error)
- func (c *LlamaCppChat) SendStreaming(ctx context.Context, contents ...any) (ChatResponseIterator, error)
- func (c *LlamaCppChat) SetFunctionDefinitions(functionDefinitions []*FunctionDefinition) error
- type LlamaCppChatResponse
- type LlamaCppClient
- func (c *LlamaCppClient) Close() error
- func (c *LlamaCppClient) GenerateCompletion(ctx context.Context, request *CompletionRequest) (CompletionResponse, error)
- func (c *LlamaCppClient) ListModels(ctx context.Context) ([]string, error)
- func (c *LlamaCppClient) SetResponseSchema(responseSchema *Schema) error
- func (c *LlamaCppClient) StartChat(systemPrompt, model string) Chat
- type LlamaCppCompletionResponse
- type LlamaCppPart
- type OllamaCandidate
- type OllamaChat
- func (c *OllamaChat) Initialize(messages []*kctlApi.Message) error
- func (c *OllamaChat) IsRetryableError(err error) bool
- func (c *OllamaChat) Send(ctx context.Context, contents ...any) (ChatResponse, error)
- func (c *OllamaChat) SendStreaming(ctx context.Context, contents ...any) (ChatResponseIterator, error)
- func (c *OllamaChat) SetFunctionDefinitions(functionDefinitions []*FunctionDefinition) error
- type OllamaChatResponse
- type OllamaClient
- func (c *OllamaClient) Close() error
- func (c *OllamaClient) GenerateCompletion(ctx context.Context, request *CompletionRequest) (CompletionResponse, error)
- func (c *OllamaClient) ListModels(ctx context.Context) ([]string, error)
- func (c *OllamaClient) SetResponseSchema(schema *Schema) error
- func (c *OllamaClient) StartChat(systemPrompt, model string) Chat
- type OllamaCompletionResponse
- type OllamaPart
- type OpenAIClient
- func (c *OpenAIClient) Close() error
- func (c *OpenAIClient) GenerateCompletion(ctx context.Context, req *CompletionRequest) (CompletionResponse, error)
- func (c *OpenAIClient) ListModels(ctx context.Context) ([]string, error)
- func (c *OpenAIClient) SetResponseSchema(schema *Schema) error
- func (c *OpenAIClient) StartChat(systemPrompt, model string) Chat
- type Option
- type Part
- type RecordChatResponse
- type RecordCompletionResponse
- type RetryConfig
- type Schema
- type SchemaType
- type VertexAIClientOptions
Constants ¶
This section is empty.
Variables ¶
var DefaultRetryConfig = RetryConfig{ MaxAttempts: 5, InitialBackoff: 200 * time.Millisecond, MaxBackoff: 10 * time.Second, BackoffFactor: 2.0, Jitter: true, }
DefaultRetryConfig provides sensible defaults (same as before)
Functions ¶
func DefaultIsRetryableError ¶
DefaultIsRetryableError provides a default implementation based on common HTTP codes and network errors.
func RegisterProvider ¶
func RegisterProvider(id string, factoryFunc FactoryFunc) error
Types ¶
type AzureOpenAICandidate ¶
type AzureOpenAICandidate struct {
// contains filtered or unexported fields
}
func (*AzureOpenAICandidate) Parts ¶
func (r *AzureOpenAICandidate) Parts() []Part
func (*AzureOpenAICandidate) String ¶
func (r *AzureOpenAICandidate) String() string
type AzureOpenAIChat ¶
type AzureOpenAIChat struct {
// contains filtered or unexported fields
}
func (*AzureOpenAIChat) Initialize ¶
func (c *AzureOpenAIChat) Initialize(messages []*api.Message) error
func (*AzureOpenAIChat) IsRetryableError ¶
func (c *AzureOpenAIChat) IsRetryableError(err error) bool
func (*AzureOpenAIChat) Send ¶
func (c *AzureOpenAIChat) Send(ctx context.Context, contents ...any) (ChatResponse, error)
func (*AzureOpenAIChat) SendStreaming ¶
func (c *AzureOpenAIChat) SendStreaming(ctx context.Context, contents ...any) (ChatResponseIterator, error)
func (*AzureOpenAIChat) SetFunctionDefinitions ¶
func (c *AzureOpenAIChat) SetFunctionDefinitions(functionDefinitions []*FunctionDefinition) error
type AzureOpenAIChatResponse ¶
type AzureOpenAIChatResponse struct {
// contains filtered or unexported fields
}
func (*AzureOpenAIChatResponse) Candidates ¶
func (r *AzureOpenAIChatResponse) Candidates() []Candidate
func (*AzureOpenAIChatResponse) MarshalJSON ¶
func (r *AzureOpenAIChatResponse) MarshalJSON() ([]byte, error)
func (*AzureOpenAIChatResponse) String ¶
func (r *AzureOpenAIChatResponse) String() string
func (*AzureOpenAIChatResponse) UsageMetadata ¶
func (r *AzureOpenAIChatResponse) UsageMetadata() any
type AzureOpenAIClient ¶
type AzureOpenAIClient struct {
// contains filtered or unexported fields
}
func NewAzureOpenAIClient ¶
func NewAzureOpenAIClient(ctx context.Context, opts ClientOptions) (*AzureOpenAIClient, error)
NewAzureOpenAIClient creates a new Azure OpenAI client. Supports ClientOptions and SkipVerifySSL for custom HTTP transport.
func (*AzureOpenAIClient) Close ¶
func (c *AzureOpenAIClient) Close() error
func (*AzureOpenAIClient) GenerateCompletion ¶
func (c *AzureOpenAIClient) GenerateCompletion(ctx context.Context, request *CompletionRequest) (CompletionResponse, error)
func (*AzureOpenAIClient) ListModels ¶
func (c *AzureOpenAIClient) ListModels(ctx context.Context) ([]string, error)
func (*AzureOpenAIClient) SetResponseSchema ¶
func (c *AzureOpenAIClient) SetResponseSchema(schema *Schema) error
type AzureOpenAICompletionResponse ¶
type AzureOpenAICompletionResponse struct {
// contains filtered or unexported fields
}
func (*AzureOpenAICompletionResponse) Response ¶
func (r *AzureOpenAICompletionResponse) Response() string
func (*AzureOpenAICompletionResponse) UsageMetadata ¶
func (r *AzureOpenAICompletionResponse) UsageMetadata() any
type AzureOpenAIPart ¶
type AzureOpenAIPart struct {
// contains filtered or unexported fields
}
func (*AzureOpenAIPart) AsFunctionCalls ¶
func (p *AzureOpenAIPart) AsFunctionCalls() ([]FunctionCall, bool)
func (*AzureOpenAIPart) AsText ¶
func (p *AzureOpenAIPart) AsText() (string, bool)
type BedrockClient ¶
type BedrockClient struct {
// contains filtered or unexported fields
}
BedrockClient implements the gollm.Client interface for AWS Bedrock models
func NewBedrockClient ¶
func NewBedrockClient(ctx context.Context, opts ClientOptions) (*BedrockClient, error)
NewBedrockClient creates a new client for interacting with AWS Bedrock models
func (*BedrockClient) Close ¶
func (c *BedrockClient) Close() error
Close cleans up any resources used by the client
func (*BedrockClient) GenerateCompletion ¶
func (c *BedrockClient) GenerateCompletion(ctx context.Context, req *CompletionRequest) (CompletionResponse, error)
GenerateCompletion generates a single completion for the given request
func (*BedrockClient) ListModels ¶
func (c *BedrockClient) ListModels(ctx context.Context) ([]string, error)
ListModels returns the list of supported Bedrock models
func (*BedrockClient) SetResponseSchema ¶
func (c *BedrockClient) SetResponseSchema(schema *Schema) error
SetResponseSchema sets the response schema for the client (not supported by Bedrock)
func (*BedrockClient) StartChat ¶
func (c *BedrockClient) StartChat(systemPrompt, model string) Chat
StartChat starts a new chat session with the specified system prompt and model
type Candidate ¶
type Candidate interface {
// String returns a string representation of the candidate.
fmt.Stringer
// Parts returns the parts of the candidate.
Parts() []Part
}
Candidate is one of a set of candidate response from the LLM.
type Chat ¶
type Chat interface {
// Send adds a user message to the chat, and gets the response from the LLM.
// Note that this method automatically updates the state of the Chat,
// you do not need to "replay" any messages from the LLM.
Send(ctx context.Context, contents ...any) (ChatResponse, error)
// SendStreaming is the streaming version of Send.
SendStreaming(ctx context.Context, contents ...any) (ChatResponseIterator, error)
// SetFunctionDefinitions configures the set of tools (functions) available to the LLM
// for function calling.
SetFunctionDefinitions(functionDefinitions []*FunctionDefinition) error
// IsRetryableError returns true if the error is retryable.
IsRetryableError(error) bool
// Initialize initializes the chat with a previous conversation history.
Initialize(messages []*api.Message) error
}
Chat is an active conversation with a language model. Messages are sent and received, and add to a conversation history.
func NewRetryChat ¶
func NewRetryChat[C Chat]( underlying C, config RetryConfig, ) Chat
NewRetryChat creates a new Chat that wraps the given underlying client with retry logic using the provided configuration. It returns the Chat interface type, hiding the generic implementation detail.
type ChatResponse ¶
type ChatResponse interface {
UsageMetadata() any
// Candidates are a set of candidate responses from the LLM.
// The LLM may return multiple candidates, and we can choose the best one.
Candidates() []Candidate
}
ChatResponse is a generic chat response from the LLM.
type ChatResponseIterator ¶
type ChatResponseIterator iter.Seq2[ChatResponse, error]
ChatResponseIterator is a streaming chat response from the LLM.
type Client ¶
type Client interface {
io.Closer
// StartChat starts a new multi-turn chat with a language model.
StartChat(systemPrompt, model string) Chat
// GenerateCompletion generates a single completion for a given prompt.
GenerateCompletion(ctx context.Context, req *CompletionRequest) (CompletionResponse, error)
// SetResponseSchema constrains LLM responses to match the provided schema.
// Calling with nil will clear the current schema.
SetResponseSchema(schema *Schema) error
// ListModels lists the models available in the LLM.
ListModels(ctx context.Context) ([]string, error)
}
Client is a client for a language model.
type ClientOptions ¶
type CompletionRequest ¶
type CompletionRequest struct {
Model string `json:"model,omitempty"`
Prompt string `json:"prompt,omitempty"`
}
CompletionRequest is a request to generate a completion for a given prompt.
type CompletionResponse ¶
CompletionResponse is a response from the GenerateCompletion method.
type FactoryFunc ¶
type FactoryFunc func(ctx context.Context, opts ClientOptions) (Client, error)
type FunctionCall ¶
type FunctionCall struct {
ID string `json:"id,omitempty"`
Name string `json:"name,omitempty"`
Arguments map[string]any `json:"arguments,omitempty"`
}
FunctionCall is a function call to a language model. The LLM will reply with a FunctionCall to a user-defined function, and we will send the results back.
type FunctionCallResult ¶
type FunctionCallResult struct {
ID string `json:"id,omitempty"`
Name string `json:"name,omitempty"`
Result map[string]any `json:"result,omitempty"`
}
FunctionCallResult is the result of a function call. We use this to send the results back to the LLM.
type FunctionDefinition ¶
type FunctionDefinition struct {
Name string `json:"name,omitempty"`
Description string `json:"description,omitempty"`
Parameters *Schema `json:"parameters,omitempty"`
}
FunctionDefinition is a user-defined function that can be called by the LLM. If the LLM determines the function should be called, it will reply with a FunctionCall object; we will invoke the function and the results back.
type GeminiAPIClientOptions ¶
type GeminiAPIClientOptions struct {
// API Key for GenAI. Required for BackendGeminiAPI.
APIKey string
}
GeminiAPIClientOptions are the options for the Gemini API client.
type GeminiCandidate ¶
type GeminiCandidate struct {
// contains filtered or unexported fields
}
GeminiCandidate is a candidate for the response. It implements the Candidate interface.
func (*GeminiCandidate) Parts ¶
func (r *GeminiCandidate) Parts() []Part
Parts returns the parts of the candidate.
func (*GeminiCandidate) String ¶
func (r *GeminiCandidate) String() string
String returns a string representation of the response.
type GeminiChat ¶
type GeminiChat struct {
// contains filtered or unexported fields
}
GeminiChat is a chat with the model. It implements the Chat interface.
func (*GeminiChat) Initialize ¶
func (c *GeminiChat) Initialize(messages []*api.Message) error
func (*GeminiChat) IsRetryableError ¶
func (c *GeminiChat) IsRetryableError(err error) bool
func (*GeminiChat) Send ¶
func (c *GeminiChat) Send(ctx context.Context, contents ...any) (ChatResponse, error)
Send sends a message to the model. It returns a ChatResponse object containing the response from the model.
func (*GeminiChat) SendStreaming ¶
func (c *GeminiChat) SendStreaming(ctx context.Context, contents ...any) (ChatResponseIterator, error)
func (*GeminiChat) SetFunctionDefinitions ¶
func (c *GeminiChat) SetFunctionDefinitions(functionDefinitions []*FunctionDefinition) error
SetFunctionDefinitions sets the function definitions for the chat. This allows the LLM to call user-defined functions.
type GeminiChatResponse ¶
type GeminiChatResponse struct {
// contains filtered or unexported fields
}
GeminiChatResponse is a response from the Gemini API. It implements the ChatResponse interface.
func (*GeminiChatResponse) Candidates ¶
func (r *GeminiChatResponse) Candidates() []Candidate
Candidates returns the candidates for the response.
func (*GeminiChatResponse) MarshalJSON ¶
func (r *GeminiChatResponse) MarshalJSON() ([]byte, error)
func (*GeminiChatResponse) String ¶
func (r *GeminiChatResponse) String() string
String returns a string representation of the response.
func (*GeminiChatResponse) UsageMetadata ¶
func (r *GeminiChatResponse) UsageMetadata() any
UsageMetadata returns the usage metadata for the response.
type GeminiCompletionResponse ¶
type GeminiCompletionResponse struct {
// contains filtered or unexported fields
}
func (*GeminiCompletionResponse) MarshalJSON ¶
func (r *GeminiCompletionResponse) MarshalJSON() ([]byte, error)
func (*GeminiCompletionResponse) Response ¶
func (r *GeminiCompletionResponse) Response() string
func (*GeminiCompletionResponse) String ¶
func (r *GeminiCompletionResponse) String() string
func (*GeminiCompletionResponse) UsageMetadata ¶
func (r *GeminiCompletionResponse) UsageMetadata() any
type GeminiPart ¶
type GeminiPart struct {
// contains filtered or unexported fields
}
GeminiPart is a part of a candidate. It implements the Part interface.
func (*GeminiPart) AsFunctionCalls ¶
func (p *GeminiPart) AsFunctionCalls() ([]FunctionCall, bool)
AsFunctionCalls returns the function calls of the part.
func (*GeminiPart) AsText ¶
func (p *GeminiPart) AsText() (string, bool)
AsText returns the text of the part.
type GoogleAIClient ¶
type GoogleAIClient struct {
// contains filtered or unexported fields
}
GoogleAIClient is a client for the google AI APIs. It implements the Client interface.
func NewGeminiAPIClient ¶
func NewGeminiAPIClient(ctx context.Context, opt GeminiAPIClientOptions) (*GoogleAIClient, error)
NewGeminiAPIClient builds a client for the Gemini API.
func NewVertexAIClient ¶
func NewVertexAIClient(ctx context.Context, opt VertexAIClientOptions) (*GoogleAIClient, error)
NewVertexAIClient builds a client for the vertexai API.
func (*GoogleAIClient) Close ¶
func (c *GoogleAIClient) Close() error
Close frees the resources used by the client.
func (*GoogleAIClient) GenerateCompletion ¶
func (c *GoogleAIClient) GenerateCompletion(ctx context.Context, request *CompletionRequest) (CompletionResponse, error)
func (*GoogleAIClient) ListModels ¶
func (c *GoogleAIClient) ListModels(ctx context.Context) (modelNames []string, err error)
ListModels lists the models available in the Gemini API.
func (*GoogleAIClient) SetResponseSchema ¶
func (c *GoogleAIClient) SetResponseSchema(responseSchema *Schema) error
SetResponseSchema constrains LLM responses to match the provided schema. Calling with nil will clear the current schema.
type GrokClient ¶
type GrokClient struct {
// contains filtered or unexported fields
}
GrokClient implements the gollm.Client interface for X.AI's Grok model.
func NewGrokClient ¶
func NewGrokClient(ctx context.Context, opts ClientOptions) (*GrokClient, error)
NewGrokClient creates a new client for interacting with X.AI's Grok model. Supports custom HTTP client and skipVerifySSL via ClientOptions.
func (*GrokClient) Close ¶
func (c *GrokClient) Close() error
Close cleans up any resources used by the client.
func (*GrokClient) GenerateCompletion ¶
func (c *GrokClient) GenerateCompletion(ctx context.Context, req *CompletionRequest) (CompletionResponse, error)
GenerateCompletion sends a completion request to the Grok API.
func (*GrokClient) ListModels ¶
func (c *GrokClient) ListModels(ctx context.Context) ([]string, error)
ListModels returns a list of available Grok models.
func (*GrokClient) SetResponseSchema ¶
func (c *GrokClient) SetResponseSchema(schema *Schema) error
SetResponseSchema is not implemented yet for Grok.
func (*GrokClient) StartChat ¶
func (c *GrokClient) StartChat(systemPrompt, model string) Chat
StartChat starts a new chat session.
type IsRetryableFunc ¶
IsRetryableFunc defines the signature for functions that check if an error is retryable. TODO (droot): Adjust the signature to allow underlying client to relay the backoff delay etc. for example, Gemini's error codes contain retryDelay information.
type LlamaCppCandidate ¶
type LlamaCppCandidate struct {
// contains filtered or unexported fields
}
func (*LlamaCppCandidate) Parts ¶
func (r *LlamaCppCandidate) Parts() []Part
func (*LlamaCppCandidate) String ¶
func (r *LlamaCppCandidate) String() string
type LlamaCppChat ¶
type LlamaCppChat struct {
// contains filtered or unexported fields
}
func (*LlamaCppChat) Initialize ¶
func (c *LlamaCppChat) Initialize(messages []*api.Message) error
func (*LlamaCppChat) IsRetryableError ¶
func (c *LlamaCppChat) IsRetryableError(err error) bool
func (*LlamaCppChat) Send ¶
func (c *LlamaCppChat) Send(ctx context.Context, contents ...any) (ChatResponse, error)
func (*LlamaCppChat) SendStreaming ¶
func (c *LlamaCppChat) SendStreaming(ctx context.Context, contents ...any) (ChatResponseIterator, error)
func (*LlamaCppChat) SetFunctionDefinitions ¶
func (c *LlamaCppChat) SetFunctionDefinitions(functionDefinitions []*FunctionDefinition) error
type LlamaCppChatResponse ¶
type LlamaCppChatResponse struct {
LlamaCppResponse llamacppChatResponse
// contains filtered or unexported fields
}
func (*LlamaCppChatResponse) Candidates ¶
func (r *LlamaCppChatResponse) Candidates() []Candidate
func (*LlamaCppChatResponse) MarshalJSON ¶
func (r *LlamaCppChatResponse) MarshalJSON() ([]byte, error)
func (*LlamaCppChatResponse) String ¶
func (r *LlamaCppChatResponse) String() string
func (*LlamaCppChatResponse) UsageMetadata ¶
func (r *LlamaCppChatResponse) UsageMetadata() any
type LlamaCppClient ¶
type LlamaCppClient struct {
// contains filtered or unexported fields
}
func NewLlamaCppClient ¶
func NewLlamaCppClient(ctx context.Context, opts ClientOptions) (*LlamaCppClient, error)
NewLlamaCppClient creates a new client for llama.cpp. Supports custom HTTP client and skipVerifySSL via ClientOptions.
func (*LlamaCppClient) Close ¶
func (c *LlamaCppClient) Close() error
func (*LlamaCppClient) GenerateCompletion ¶
func (c *LlamaCppClient) GenerateCompletion(ctx context.Context, request *CompletionRequest) (CompletionResponse, error)
func (*LlamaCppClient) ListModels ¶
func (c *LlamaCppClient) ListModels(ctx context.Context) ([]string, error)
func (*LlamaCppClient) SetResponseSchema ¶
func (c *LlamaCppClient) SetResponseSchema(responseSchema *Schema) error
func (*LlamaCppClient) StartChat ¶
func (c *LlamaCppClient) StartChat(systemPrompt, model string) Chat
type LlamaCppCompletionResponse ¶
type LlamaCppCompletionResponse struct {
// contains filtered or unexported fields
}
func (*LlamaCppCompletionResponse) Response ¶
func (r *LlamaCppCompletionResponse) Response() string
func (*LlamaCppCompletionResponse) UsageMetadata ¶
func (r *LlamaCppCompletionResponse) UsageMetadata() any
type LlamaCppPart ¶
type LlamaCppPart struct {
// contains filtered or unexported fields
}
func (*LlamaCppPart) AsFunctionCalls ¶
func (p *LlamaCppPart) AsFunctionCalls() ([]FunctionCall, bool)
func (*LlamaCppPart) AsText ¶
func (p *LlamaCppPart) AsText() (string, bool)
type OllamaCandidate ¶
type OllamaCandidate struct {
// contains filtered or unexported fields
}
func (*OllamaCandidate) Parts ¶
func (r *OllamaCandidate) Parts() []Part
func (*OllamaCandidate) String ¶
func (r *OllamaCandidate) String() string
type OllamaChat ¶
type OllamaChat struct {
// contains filtered or unexported fields
}
func (*OllamaChat) Initialize ¶
func (c *OllamaChat) Initialize(messages []*kctlApi.Message) error
func (*OllamaChat) IsRetryableError ¶
func (c *OllamaChat) IsRetryableError(err error) bool
func (*OllamaChat) Send ¶
func (c *OllamaChat) Send(ctx context.Context, contents ...any) (ChatResponse, error)
func (*OllamaChat) SendStreaming ¶
func (c *OllamaChat) SendStreaming(ctx context.Context, contents ...any) (ChatResponseIterator, error)
func (*OllamaChat) SetFunctionDefinitions ¶
func (c *OllamaChat) SetFunctionDefinitions(functionDefinitions []*FunctionDefinition) error
type OllamaChatResponse ¶
type OllamaChatResponse struct {
// contains filtered or unexported fields
}
func (*OllamaChatResponse) Candidates ¶
func (r *OllamaChatResponse) Candidates() []Candidate
func (*OllamaChatResponse) MarshalJSON ¶
func (r *OllamaChatResponse) MarshalJSON() ([]byte, error)
func (*OllamaChatResponse) String ¶
func (r *OllamaChatResponse) String() string
func (*OllamaChatResponse) UsageMetadata ¶
func (r *OllamaChatResponse) UsageMetadata() any
type OllamaClient ¶
type OllamaClient struct {
// contains filtered or unexported fields
}
func NewOllamaClient ¶
func NewOllamaClient(ctx context.Context, opts ClientOptions) (*OllamaClient, error)
NewOllamaClient creates a new client for Ollama. Supports custom HTTP client and skipVerifySSL via ClientOptions if the SDK supports it.
func (*OllamaClient) Close ¶
func (c *OllamaClient) Close() error
func (*OllamaClient) GenerateCompletion ¶
func (c *OllamaClient) GenerateCompletion(ctx context.Context, request *CompletionRequest) (CompletionResponse, error)
func (*OllamaClient) ListModels ¶
func (c *OllamaClient) ListModels(ctx context.Context) ([]string, error)
func (*OllamaClient) SetResponseSchema ¶
func (c *OllamaClient) SetResponseSchema(schema *Schema) error
func (*OllamaClient) StartChat ¶
func (c *OllamaClient) StartChat(systemPrompt, model string) Chat
type OllamaCompletionResponse ¶
type OllamaCompletionResponse struct {
// contains filtered or unexported fields
}
func (*OllamaCompletionResponse) Response ¶
func (r *OllamaCompletionResponse) Response() string
func (*OllamaCompletionResponse) UsageMetadata ¶
func (r *OllamaCompletionResponse) UsageMetadata() any
type OllamaPart ¶
type OllamaPart struct {
// contains filtered or unexported fields
}
func (*OllamaPart) AsFunctionCalls ¶
func (p *OllamaPart) AsFunctionCalls() ([]FunctionCall, bool)
func (*OllamaPart) AsText ¶
func (p *OllamaPart) AsText() (string, bool)
type OpenAIClient ¶
type OpenAIClient struct {
// contains filtered or unexported fields
}
OpenAIClient implements the gollm.Client interface for OpenAI models.
func NewOpenAIClient ¶
func NewOpenAIClient(ctx context.Context, opts ClientOptions) (*OpenAIClient, error)
NewOpenAIClient creates a new client for interacting with OpenAI. Supports custom HTTP client (e.g., for skipping SSL verification).
func (*OpenAIClient) Close ¶
func (c *OpenAIClient) Close() error
Close cleans up any resources used by the client.
func (*OpenAIClient) GenerateCompletion ¶
func (c *OpenAIClient) GenerateCompletion(ctx context.Context, req *CompletionRequest) (CompletionResponse, error)
GenerateCompletion sends a completion request to the OpenAI API.
func (*OpenAIClient) ListModels ¶
func (c *OpenAIClient) ListModels(ctx context.Context) ([]string, error)
ListModels returns a slice of strings with model IDs. Note: This may not work with all OpenAI-compatible providers if they don't fully implement the Models.List endpoint or return data in a different format.
func (*OpenAIClient) SetResponseSchema ¶
func (c *OpenAIClient) SetResponseSchema(schema *Schema) error
SetResponseSchema is not implemented yet.
func (*OpenAIClient) StartChat ¶
func (c *OpenAIClient) StartChat(systemPrompt, model string) Chat
StartChat starts a new chat session.
type Option ¶
type Option func(*ClientOptions)
Option is a functional option for configuring ClientOptions.
func WithSkipVerifySSL ¶
func WithSkipVerifySSL() Option
WithSkipVerifySSL enables skipping SSL certificate verification for HTTP clients.
type Part ¶
type Part interface {
// AsText returns the text of the part.
// if the part is not text, it returns ("", false)
AsText() (string, bool)
// AsFunctionCalls returns the function calls of the part.
// if the part is not a function call, it returns (nil, false)
AsFunctionCalls() ([]FunctionCall, bool)
}
Part is a part of a candidate response from the LLM. It can be a text response, or a function call. A response may comprise multiple parts, for example a text response and a function call where the text response is "I need to do the necessary" and then the function call is "do_necessary".
type RecordChatResponse ¶
type RecordChatResponse struct {
// TODO: Structured data?
Raw any `json:"raw"`
}
type RetryConfig ¶
type RetryConfig struct {
MaxAttempts int
InitialBackoff time.Duration
MaxBackoff time.Duration
BackoffFactor float64
Jitter bool
}
RetryConfig holds the configuration for the retry mechanism (same as before)
type Schema ¶
type Schema struct {
Type SchemaType `json:"type,omitempty"`
Properties map[string]*Schema `json:"properties,omitempty"`
Items *Schema `json:"items,omitempty"`
Description string `json:"description,omitempty"`
Required []string `json:"required,omitempty"`
}
Schema is a schema for a function definition.
func BuildSchemaFor ¶
BuildSchemaFor will build a schema for the given golang type. Because this does not have description populated, it is more useful for the response schema than tools/functions.
func (*Schema) ToRawSchema ¶
func (s *Schema) ToRawSchema() (json.RawMessage, error)
ToRawSchema converts a Schema to a json.RawMessage.
type SchemaType ¶
type SchemaType string
SchemaType is the type of a field in a Schema.
const ( TypeObject SchemaType = "object" TypeArray SchemaType = "array" TypeString SchemaType = "string" TypeBoolean SchemaType = "boolean" TypeNumber SchemaType = "number" TypeInteger SchemaType = "integer" )
type VertexAIClientOptions ¶
type VertexAIClientOptions struct {
// GCP Project ID for Vertex AI. Required for BackendVertexAI.
Project string
// GCP Location/Region for Vertex AI. Required for BackendVertexAI. See https://cloud.google.com/vertex-ai/docs/general/locations
Location string
}
VertexAIClientOptions are the options for using the VertexAPI.