Skip to content

Commit

Permalink
[Go] ollama.Init takes Config (#608)
Browse files Browse the repository at this point in the history
Pass a Config struct to Init instead of just the server address,
to allow for additional configuration options.

By passing a struct, adding options is not a breaking change.

Also make other minor stylistic changes.
  • Loading branch information
jba authored Jul 13, 2024
1 parent cf47f3f commit d17884e
Show file tree
Hide file tree
Showing 2 changed files with 23 additions and 18 deletions.
4 changes: 3 additions & 1 deletion go/internal/doc-snippets/ollama.go
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,9 @@ func ollamaEx(ctx context.Context) error {

//!+init
// Init with Ollama's default local address.
if err := ollama.Init(ctx, "http://127.0.0.1:11434"); err != nil {
if err := ollama.Init(ctx, &ollama.Config{
ServerAddress: "http://127.0.0.1:11434",
}); err != nil {
return err
}
//!-init
Expand Down
37 changes: 20 additions & 17 deletions go/plugins/ollama/ollama.go
Original file line number Diff line number Diff line change
Expand Up @@ -84,14 +84,6 @@ type ModelDefinition struct {
Type string
}

// Config provides configuration options for the Init function.
type Config struct {
// Server Address of oLLama.
ServerAddress string
// Generative models to provide.
Models []ModelDefinition
}

type generator struct {
model ModelDefinition
serverAddress string
Expand Down Expand Up @@ -147,15 +139,25 @@ type ollamaGenerateResponse struct {
Response string `json:"response"`
}

// Note: Since Ollama models are locally hosted, the plugin doesn't initialize any default models.
// The user has to explicitly decide which model to pull down.
func Init(ctx context.Context, serverAddress string) (err error) {
// Config provides configuration options for the Init function.
type Config struct {
// Server Address of oLLama.
ServerAddress string
}

// Init initializes the plugin.
// Since Ollama models are locally hosted, the plugin doesn't initialize any default models.
// After downloading a model, call [DefineModel] to use it.
func Init(ctx context.Context, cfg *Config) (err error) {
state.mu.Lock()
defer state.mu.Unlock()
if state.initted {
panic("ollama.Init already called")
}
state.serverAddress = serverAddress
if cfg == nil || cfg.ServerAddress == "" {
return errors.New("ollama: need ServerAddress")
}
state.serverAddress = cfg.ServerAddress
state.initted = true
return nil
}
Expand Down Expand Up @@ -194,16 +196,17 @@ func (g *generator) generate(ctx context.Context, input *ai.GenerateRequest, cb
Stream: stream,
}
}
client := &http.Client{
Timeout: time.Second * 30,
}
client := &http.Client{Timeout: 30 * time.Second}
payloadBytes, err := json.Marshal(payload)
if err != nil {
return nil, err
}
// Determine the correct endpoint
endpoint := g.serverAddress + "/api/chat"
if !isChatModel {
endpoint = g.serverAddress + "/api/generate"
}
req, err := http.NewRequest("POST", endpoint, bytes.NewBuffer(payloadBytes))
req, err := http.NewRequest("POST", endpoint, bytes.NewReader(payloadBytes))
if err != nil {
return nil, fmt.Errorf("failed to create request: %v", err)
}
Expand Down Expand Up @@ -253,7 +256,7 @@ func (g *generator) generate(ctx context.Context, input *ai.GenerateRequest, cb
cb(ctx, chunk)
}
if err := scanner.Err(); err != nil {
return nil, fmt.Errorf("failed to read stream: %v", err)
return nil, fmt.Errorf("reading response stream: %v", err)
}
// Create a final response with the merged chunks
finalResponse := &ai.GenerateResponse{
Expand Down

0 comments on commit d17884e

Please sign in to comment.