diff --git a/examples/golang-streamchat/README.md b/examples/golang-streamchat/README.md new file mode 100644 index 00000000..a1ec1312 --- /dev/null +++ b/examples/golang-streamchat/README.md @@ -0,0 +1,31 @@ +# golang-steamchat + +This is a simple golang code to stream response from llama2. + +## Get Started + +1. Run the Ollama Docker container: + +```shell +sudo docker run -d -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama +``` + +For more detailed information, refer to the [Ollama Quickstart Docker](https://hub.docker.com/r/ollama/ollama). Please note we are using CPU only, the AI will response slow, if you have GPU, you can follow the instruction to run the docker and using your GPU to improve performance. + +2. Pull the llama2 model: + +```shell +curl --location 'http://localhost:11434/api/pull' \ +--header 'Content-Type: application/json' \ +--data '{ + "name": "llama2:7b" +}' +``` + +3. Run the golang code. + +```shell +go run main.go +``` + +![get-started-gif](./assets/get-started.gif) diff --git a/examples/golang-streamchat/assets/get-started.gif b/examples/golang-streamchat/assets/get-started.gif new file mode 100644 index 00000000..0d3bc8cf Binary files /dev/null and b/examples/golang-streamchat/assets/get-started.gif differ diff --git a/examples/golang-streamchat/go.mod b/examples/golang-streamchat/go.mod new file mode 100644 index 00000000..36008f76 --- /dev/null +++ b/examples/golang-streamchat/go.mod @@ -0,0 +1,3 @@ +module github.com/jmorganca/ollama/tree/main/examples/golang-streamchat + +go 1.21.4 diff --git a/examples/golang-streamchat/main.go b/examples/golang-streamchat/main.go new file mode 100644 index 00000000..05916e43 --- /dev/null +++ b/examples/golang-streamchat/main.go @@ -0,0 +1,84 @@ +package main + +import ( + "bytes" + "encoding/json" + "fmt" + "net/http" +) + +func main() { + err := SendChatRequest(ReqStreamChat{ + Model: ModelLlama27b, + Messages: []ReqStreamChatMessage{ + { + Role: RoleUser, + Content: "Show me how to use golang channel.", + }, + }, + }) + if err != nil { + panic(err) + } +} + +func SendChatRequest(payload ReqStreamChat) error { + jsonBytes, err := json.Marshal(payload) + if err != nil { + return fmt.Errorf("%s: %w", "error marshal payload", err) + } + + url := "http://localhost:11434/api/chat" + res, err := http.Post(url, "application/json", bytes.NewBuffer(jsonBytes)) + if err != nil { + return fmt.Errorf("%s: %w", "error http post request", err) + } + defer res.Body.Close() + + dec := json.NewDecoder(res.Body) + for { + var r ResStreamChat + if err := dec.Decode(&r); err != nil { + break + } + fmt.Print(r.Message.Content) + } + + return nil +} + +type Model string + +const ( + ModelLlama27b Model = "llama2:7b" +) + +type Role string + +const ( + RoleUser Role = "user" + RoleAssistant Role = "assistant" +) + +type ReqStreamChat struct { + Model Model `json:"model"` + Messages []ReqStreamChatMessage `json:"messages"` +} + +type ReqStreamChatMessage struct { + Role Role + Content string +} + +type ResStreamChat struct { + Model Model `json:"model"` + CreatedAt string `json:"created_at"` + Message ResStreamChatMessage `json:"message"` + Done bool `json:"done"` +} + +type ResStreamChatMessage struct { + Role Role `json:"role"` + Content string `json:"content"` + Images interface{} `json:"images"` +}