API reference
gollm API Reference
1. Core Types
1.1 LLM
1.2 Prompt
1.3 Config
1.4 PromptOptimizer
1.5 Example
1.6 ComparisonResult
1.7 PromptTemplate
2. Core Functions
2.1 LLM Creation and Management
func NewLLM(opts ...ConfigOption) (LLM, error)
func CompareModels[T any](ctx context.Context, prompt string, validateFunc ValidateFunc[T], configs ...*Config) ([]ComparisonResult[T], error)
func AnalyzeComparisonResults[T any](results []ComparisonResult[T]) string
2.2 Prompt Handling
func NewPrompt(input string, opts ...PromptOption) *Prompt
func NewPromptTemplate(name, description, template string, opts ...PromptTemplateOption) *PromptTemplate
func NewPromptOptimizer(l LLM, initialPrompt string, taskDesc string, opts ...OptimizerOption) *PromptOptimizer
2.3 Data Extraction and Manipulation
func ExtractStructuredData[T any](ctx context.Context, l LLM, text string, opts ...PromptOption) (*T, error)
func readExamplesFromFile(filePath string) ([]string, error)
func selectExamples(examples []string, n int, order string) []string
3. Utility Functions
func Validate(s interface{}) error
func GenerateJSONSchema(v interface{}, opts ...SchemaOption) ([]byte, error)
func CleanResponse(response string) string
func cleanResponse(response string) string
4. Methods
4.1 Prompt Methods
func (p *Prompt) GenerateJSONSchema(opts ...SchemaOption) ([]byte, error)
func (p *Prompt) Validate() error
func (p *Prompt) Apply(opts ...PromptOption)
func (p *Prompt) String() string
4.2 PromptOptimizer Methods
func (po *PromptOptimizer) OptimizePrompt(ctx context.Context) (string, error)
func (po *PromptOptimizer) GetOptimizationHistory() []OptimizationEntry
4.3 PromptTemplate Methods
func (pt *PromptTemplate) Execute(data map[string]interface{}) (*Prompt, error)
5. Configuration Options
5.1 LLM Configuration
SetProvider(provider string) ConfigOption
SetModel(model string) ConfigOption
SetOllamaEndpoint(endpoint string) ConfigOption
SetTemperature(temperature float64) ConfigOption
SetMaxTokens(maxTokens int) ConfigOption
SetTimeout(timeout time.Duration) ConfigOption
SetAPIKey(apiKey string) ConfigOption
SetMaxRetries(maxRetries int) ConfigOption
SetRetryDelay(retryDelay time.Duration) ConfigOption
SetDebugLevel(level LogLevel) ConfigOption
SetMemory(maxTokens int) ConfigOption
5.2 Prompt Configuration
WithDirectives(directives ...string) PromptOption
WithOutput(output string) PromptOption
WithContext(context string) PromptOption
WithMaxLength(length int) PromptOption
WithExamples(examples ...string) PromptOption
5.3 Optimizer Configuration
WithVerbose() OptimizerOption
WithIterationCallback(callback IterationCallback) OptimizerOption
WithMaxRetries(maxRetries int) OptimizerOption
WithRetryDelay(delay time.Duration) OptimizerOption
WithMemorySize(size int) OptimizerOption
WithIterations(iterations int) OptimizerOption
WithCustomMetrics(metrics ...Metric) OptimizerOption
WithOptimizationGoal(goal string) OptimizerOption
WithRatingSystem(system string) OptimizerOption
WithThreshold(threshold float64) OptimizerOption
5.4 PromptTemplate Configuration
WithPromptOptions(options ...PromptOption) PromptTemplateOption
6. Type Aliases and Custom Types
type ValidateFunc[T any] func(T) error
7. Best Practices and Usage Tips
Structured Data Extraction: Use
ExtractStructuredData
when you need to parse unstructured text into a specific Go struct.Prompt Templates: Utilize
PromptTemplate
for creating reusable prompt structures.Model Comparison: Use the
CompareModels
function for benchmarking and selecting the best model for a specific task.Working with Examples: Use
readExamplesFromFile
andselectExamples
to manage and use example data in your prompts.Response Cleaning: Always use
cleanResponse
or similar cleaning methods on raw LLM outputs.Custom Validation: Implement custom
ValidateFunc
functions to ensure that the data extracted or generated by the LLM meets your specific requirements.
For more detailed information on each type, function, and method, including parameters and return values, please refer to the package documentation and source code.
Last updated