How to Use Ollama API from Go for Local LLMs

Send a JSON POST request to localhost:11434/api/generate using Go's net/http package to query a local Ollama model.

Use the net/http package to send a POST request to the Ollama API endpoint running on your local machine.

package main

import (
	"bytes"
	"encoding/json"
	"fmt"
	"io"
	"net/http"
)

type Request struct {
	Model string `json:"model"`
	Prompt string `json:"prompt"`
}

type Response struct {
	Response string `json:"response"`
}

func main() {
	req := Request{Model: "llama3", Prompt: "Hello"}
	body, _ := json.Marshal(req)

	resp, err := http.Post("http://localhost:11434/api/generate", "application/json", bytes.NewBuffer(body))
	if err != nil {
		panic(err)
	}
	defer resp.Body.Close()

	data, _ := io.ReadAll(resp.Body)
	var result Response
	json.Unmarshal(data, &result)
	fmt.Println(result.Response)
}