From 1ade380bd73424f1a11639daa0c338b1b9092dea Mon Sep 17 00:00:00 2001 From: Matt Williams Date: Thu, 7 Dec 2023 11:48:25 -0800 Subject: [PATCH 1/7] Simple chat example for typescript Signed-off-by: Matt Williams --- examples/typescript-simplechat/client.ts | 78 +++++++++++++++++++++ examples/typescript-simplechat/package.json | 1 + examples/typescript-simplechat/readme.md | 31 ++++++++ 3 files changed, 110 insertions(+) create mode 100644 examples/typescript-simplechat/client.ts create mode 100644 examples/typescript-simplechat/package.json create mode 100644 examples/typescript-simplechat/readme.md diff --git a/examples/typescript-simplechat/client.ts b/examples/typescript-simplechat/client.ts new file mode 100644 index 00000000..7e37fe30 --- /dev/null +++ b/examples/typescript-simplechat/client.ts @@ -0,0 +1,78 @@ +import * as readline from "readline"; + +const model = "llama2"; +type Message = { + role: "assistant" | "user" | "system"; + content: string; +} +const messages: Message[] = [{ + role: "system", + content: "You are a helpful AI agent." +}] + +const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout +}) + +async function chat(messages: Message[]): Promise { + const body = { + model: model, + messages: messages + } + + const response = await fetch("http://localhost:11434/api/chat", { + method: "POST", + body: JSON.stringify(body) + }) + + const reader = response.body?.getReader() + if (!reader) { + throw new Error("Failed to read response body") + } + const content: string[] = [] + while (true) { + const { done, value } = await reader.read() + if (done) { + break; + } + const rawjson = new TextDecoder().decode(value); + const json = JSON.parse(rawjson) + + if (json.done === false) { + process.stdout.write(json.message.content); + content.push(json.message.content) + // messages.push({role: "system", content: text}) + } + + } + return { role: "assistant", content: content.join("") }; +} + +async function askQuestion(): Promise { + return new Promise((resolve) => { + rl.question("\n\nAsk a question: (press enter alone to quit)\n\n", async (user_input) => { + if (user_input.trim() === "") { + rl.close(); + console.log("Thankyou. Goodbye.\n") + console.log("=======\nHere is the message history that was used in this conversation.\n=======\n") + messages.forEach(message => { + console.log(message) + }) + resolve(); + } else { + console.log(); + messages.push({ role: "user", content: user_input }); + messages.push(await chat(messages)); + await askQuestion(); // Ask the next question + } + }); + }); +} + +async function main() { + await askQuestion(); + +} + +main(); \ No newline at end of file diff --git a/examples/typescript-simplechat/package.json b/examples/typescript-simplechat/package.json new file mode 100644 index 00000000..4ee1647d --- /dev/null +++ b/examples/typescript-simplechat/package.json @@ -0,0 +1 @@ +{ "dependencies": { "@types/node": "^20.10.4", "prompt-sync": "^4.2.0", "readline": "^1.3.0" } } \ No newline at end of file diff --git a/examples/typescript-simplechat/readme.md b/examples/typescript-simplechat/readme.md new file mode 100644 index 00000000..ea61bd8a --- /dev/null +++ b/examples/typescript-simplechat/readme.md @@ -0,0 +1,31 @@ +# Simple Chat Example + +The **chat** endpoint is one of two ways to generate text from an LLM with Ollama. At a high level you provide the endpoint an array of objects with a role and content specified. Then with each output and prompt, you add more of those role/content objects, which builds up the history. + +## Review the Code + +You can see in the **chat** function that actually calling the endpoint is done simply with: + +```typescript +const body = { + model: model, + messages: messages +} + +const response = await fetch("http://localhost:11434/api/chat", { + method: "POST", + body: JSON.stringify(body) +}) +``` + +With the **generate** endpoint, you need to provide a `prompt`. But with **chat**, you provide `messages`. And the resulting stream of responses includes a `message` object with a `content` field. + +The final JSON object doesn't provide the full content, so you will need to build the content yourself. In this example, **chat** takes the full array of messages and outputs the resulting message from this call of the chat endpoint. + +In the **askQuestion** function, we collect `user_input` and add it as a message to our messages and that is passed to the chat function. When the LLM is done responding the output is added as another message to the messages array. + +At the end, you will see a printout of all the messages. + +## Next Steps + +In this example, all generations are kept. You might want to experiment with summarizing everything older than 10 conversations to enable longer history with less context being used. From 30229a913cad1bec8b78a5ca349c14856261a71c Mon Sep 17 00:00:00 2001 From: Matt Williams Date: Thu, 7 Dec 2023 13:39:24 -0800 Subject: [PATCH 2/7] Update examples/typescript-simplechat/client.ts Co-authored-by: Bruce MacDonald --- examples/typescript-simplechat/client.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/examples/typescript-simplechat/client.ts b/examples/typescript-simplechat/client.ts index 7e37fe30..5d703a75 100644 --- a/examples/typescript-simplechat/client.ts +++ b/examples/typescript-simplechat/client.ts @@ -42,7 +42,6 @@ async function chat(messages: Message[]): Promise { if (json.done === false) { process.stdout.write(json.message.content); content.push(json.message.content) - // messages.push({role: "system", content: text}) } } From b84d34e632a8636c837d0c901a879467b51f2c7b Mon Sep 17 00:00:00 2001 From: Matt Williams Date: Thu, 7 Dec 2023 13:39:33 -0800 Subject: [PATCH 3/7] Update examples/typescript-simplechat/readme.md Co-authored-by: Bruce MacDonald --- examples/typescript-simplechat/readme.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/typescript-simplechat/readme.md b/examples/typescript-simplechat/readme.md index ea61bd8a..da15b13a 100644 --- a/examples/typescript-simplechat/readme.md +++ b/examples/typescript-simplechat/readme.md @@ -4,7 +4,7 @@ The **chat** endpoint is one of two ways to generate text from an LLM with Ollam ## Review the Code -You can see in the **chat** function that actually calling the endpoint is done simply with: +You can see in the **chat** function that is actually calling the endpoint is simply done with: ```typescript const body = { From 6cc823c9b5725a38d15c3bbda2389b8b7d0fc013 Mon Sep 17 00:00:00 2001 From: Matt Williams Date: Thu, 7 Dec 2023 13:39:59 -0800 Subject: [PATCH 4/7] Update examples/typescript-simplechat/client.ts Co-authored-by: Bruce MacDonald --- examples/typescript-simplechat/client.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/typescript-simplechat/client.ts b/examples/typescript-simplechat/client.ts index 5d703a75..9e7a93b9 100644 --- a/examples/typescript-simplechat/client.ts +++ b/examples/typescript-simplechat/client.ts @@ -30,7 +30,7 @@ async function chat(messages: Message[]): Promise { if (!reader) { throw new Error("Failed to read response body") } - const content: string[] = [] + let content = "" while (true) { const { done, value } = await reader.read() if (done) { @@ -41,7 +41,7 @@ async function chat(messages: Message[]): Promise { if (json.done === false) { process.stdout.write(json.message.content); - content.push(json.message.content) + content += json.message.content } } From 5344f886c8e4800df1ac23a207921acd37cb5300 Mon Sep 17 00:00:00 2001 From: Matt Williams Date: Thu, 7 Dec 2023 13:40:37 -0800 Subject: [PATCH 5/7] Update examples/typescript-simplechat/client.ts Co-authored-by: Bruce MacDonald --- examples/typescript-simplechat/client.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/typescript-simplechat/client.ts b/examples/typescript-simplechat/client.ts index 9e7a93b9..3e571ab6 100644 --- a/examples/typescript-simplechat/client.ts +++ b/examples/typescript-simplechat/client.ts @@ -45,7 +45,7 @@ async function chat(messages: Message[]): Promise { } } - return { role: "assistant", content: content.join("") }; + return { role: "assistant", content: content }; } async function askQuestion(): Promise { From fa75998c0db9f993346456549100ecba77d1381c Mon Sep 17 00:00:00 2001 From: Matt Williams Date: Thu, 7 Dec 2023 13:40:54 -0800 Subject: [PATCH 6/7] Update examples/typescript-simplechat/readme.md Co-authored-by: Bruce MacDonald --- examples/typescript-simplechat/readme.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/typescript-simplechat/readme.md b/examples/typescript-simplechat/readme.md index da15b13a..6c0a1f4b 100644 --- a/examples/typescript-simplechat/readme.md +++ b/examples/typescript-simplechat/readme.md @@ -1,6 +1,6 @@ # Simple Chat Example -The **chat** endpoint is one of two ways to generate text from an LLM with Ollama. At a high level you provide the endpoint an array of objects with a role and content specified. Then with each output and prompt, you add more of those role/content objects, which builds up the history. +The **chat** endpoint is one of two ways to generate text from an LLM with Ollama. At a high level you provide the endpoint an array of message objects with a role and content specified. Then with each output and prompt, you add more messages, which builds up the history. ## Review the Code From 02fe26c44b8c96744c7467c631c81af1bdf00921 Mon Sep 17 00:00:00 2001 From: Matt Williams Date: Thu, 7 Dec 2023 13:46:30 -0800 Subject: [PATCH 7/7] update the readme as per bruce Signed-off-by: Matt Williams --- examples/typescript-simplechat/readme.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/examples/typescript-simplechat/readme.md b/examples/typescript-simplechat/readme.md index 6c0a1f4b..ccd4aaf6 100644 --- a/examples/typescript-simplechat/readme.md +++ b/examples/typescript-simplechat/readme.md @@ -2,6 +2,14 @@ The **chat** endpoint is one of two ways to generate text from an LLM with Ollama. At a high level you provide the endpoint an array of message objects with a role and content specified. Then with each output and prompt, you add more messages, which builds up the history. +## Run the Example + +There are a few ways to run this, just like any Typescript code: + +1. Compile with `tsc` and then run it with `node client.js`. +2. Install `tsx` and run it with `tsx client.ts`. +3. Install `bun` and run it with `bun client.ts`. + ## Review the Code You can see in the **chat** function that is actually calling the endpoint is simply done with: