From 36c87c433b7d880ef8b3a2b05ef93b0cd1675520 Mon Sep 17 00:00:00 2001 From: Michael Yang Date: Fri, 12 Jul 2024 11:48:06 -0700 Subject: [PATCH 1/6] template: preprocess message and collect system --- template/template.go | 37 +++++++++++---------------- template/template_test.go | 53 ++++++--------------------------------- 2 files changed, 23 insertions(+), 67 deletions(-) diff --git a/template/template.go b/template/template.go index 21e1614d..9b351666 100644 --- a/template/template.go +++ b/template/template.go @@ -102,22 +102,8 @@ var response = parse.ActionNode{ }, } -var funcs = template.FuncMap{ - // contents returns the contents of messages with an optional role filter - "contents": func(v []*api.Message, role ...string) string { - var parts []string - for _, m := range v { - if len(role) == 0 || role[0] == "" || m.Role == role[0] { - parts = append(parts, m.Content) - } - } - - return strings.Join(parts, "\n\n") - }, -} - func Parse(s string) (*Template, error) { - tmpl := template.New("").Option("missingkey=zero").Funcs(funcs) + tmpl := template.New("").Option("missingkey=zero") tmpl, err := tmpl.Parse(s) if err != nil { @@ -163,15 +149,16 @@ type Values struct { } func (t *Template) Execute(w io.Writer, v Values) error { - collated := collate(v.Messages) + system, collated := collate(v.Messages) if !v.forceLegacy && slices.Contains(t.Vars(), "messages") { return t.Template.Execute(w, map[string]any{ + "System": system, "Messages": collated, }) } var b bytes.Buffer - var system, prompt, response string + var prompt, response string for i, m := range collated { switch m.Role { case "system": @@ -223,11 +210,13 @@ func (t *Template) Execute(w io.Writer, v Values) error { } // collate messages based on role. consecutive messages of the same role are merged -// into a single message. collate also pulls out and merges messages with Role == "system" -// which are templated separately. As a side effect, it mangles message content adding image -// tags ([img-%d]) as needed -func collate(msgs []api.Message) (collated []*api.Message) { +// into a single message. collate also collects and returns all system messages. +// collate mutates message content adding image tags ([img-%d]) as needed +func collate(msgs []api.Message) (string, []*api.Message) { var n int + + var system []string + var collated []*api.Message for i := range msgs { msg := msgs[i] for range msg.Images { @@ -240,6 +229,10 @@ func collate(msgs []api.Message) (collated []*api.Message) { n++ } + if msg.Role == "system" { + system = append(system, msg.Content) + } + if len(collated) > 0 && collated[len(collated)-1].Role == msg.Role { collated[len(collated)-1].Content += "\n\n" + msg.Content } else { @@ -247,7 +240,7 @@ func collate(msgs []api.Message) (collated []*api.Message) { } } - return + return strings.Join(system, "\n\n"), collated } func parseNode(n parse.Node) []string { diff --git a/template/template_test.go b/template/template_test.go index 5e5f4257..c678f1b1 100644 --- a/template/template_test.go +++ b/template/template_test.go @@ -216,13 +216,11 @@ func TestExecuteWithMessages(t *testing.T) { {"response", `[INST] {{ if .System }}{{ .System }} {{ end }}{{ .Prompt }}[/INST] {{ .Response }}`}, - {"messages", `{{- $system := contents .Messages "system" -}} -{{- range $index, $_ := .Messages }} -{{- if eq .Role "user" }}[INST] {{ if $system }}{{ $system }} -{{- $system = "" }} + {"messages", `[INST] {{ if .System }}{{ .System }} -{{ end }}{{ .Content }}[/INST] {{ else if eq .Role "assistant" }}{{ .Content }} -{{- end }} +{{ end }} +{{- range .Messages }} +{{- if eq .Role "user" }}{{ .Content }}[/INST] {{ else if eq .Role "assistant" }}{{ .Content }}[INST] {{ end }} {{- end }}`}, }, Values{ @@ -243,13 +241,11 @@ func TestExecuteWithMessages(t *testing.T) { {"response", `[INST] {{ if .System }}{{ .System }} {{ end }}{{ .Prompt }}[/INST] {{ .Response }}`}, - {"messages", `{{- $system := contents .Messages "system" -}} -{{- range $index, $_ := .Messages }} -{{- if eq .Role "user" }}[INST] {{ if $system }}{{ $system }} -{{- $system = "" }} + {"messages", `[INST] {{ if .System }}{{ .System }} -{{ end }}{{ .Content }}[/INST] {{ else if eq .Role "assistant" }}{{ .Content }} -{{- end }} +{{ end }} +{{- range .Messages }} +{{- if eq .Role "user" }}{{ .Content }}[/INST] {{ else if eq .Role "assistant" }}{{ .Content }}[INST] {{ end }} {{- end }}`}, }, Values{ @@ -363,36 +359,3 @@ Answer: `, }) } } - -func TestFuncs(t *testing.T) { - t.Run("contents", func(t *testing.T) { - cases := map[string]string{ - "": "A\n\nB\n\nC\n\nD\n\nE\n\nF", - "system": "A\n\nF", - "user": "B\n\nE", - "assistant": "C\n\nD", - } - - s := []*api.Message{ - {Role: "system", Content: "A"}, - {Role: "user", Content: "B"}, - {Role: "assistant", Content: "C"}, - {Role: "assistant", Content: "D"}, - {Role: "user", Content: "E"}, - {Role: "system", Content: "F"}, - } - - fn, ok := funcs["contents"].(func([]*api.Message, ...string) string) - if !ok { - t.Fatal("contents is not a function") - } - - for k, v := range cases { - t.Run(k, func(t *testing.T) { - if diff := cmp.Diff(fn(s, k), v); diff != "" { - t.Errorf("mismatch (-got +want):\n%s", diff) - } - }) - } - }) -} From 33627331a370755ff5033c0fcd71d1c9210c9d96 Mon Sep 17 00:00:00 2001 From: Jeffrey Morgan Date: Fri, 12 Jul 2024 12:29:23 -0700 Subject: [PATCH 2/6] app: also clean up tempdir runners on install (#5646) --- app/ollama.iss | 1 + 1 file changed, 1 insertion(+) diff --git a/app/ollama.iss b/app/ollama.iss index fef4a7b2..6bedb9ff 100644 --- a/app/ollama.iss +++ b/app/ollama.iss @@ -128,6 +128,7 @@ Type: filesandordirs; Name: "{%USERPROFILE}\.ollama\history" ; NOTE: if the user has a custom OLLAMA_MODELS it will be preserved [InstallDelete] +Type: filesandordirs; Name: "{%TEMP}\ollama*" Type: filesandordirs; Name: "{%LOCALAPPDATA}\Programs\Ollama" [Messages] From 9ac0a7a50b8d7a0f0627b037c7632181bfbcca97 Mon Sep 17 00:00:00 2001 From: Patrick Devine Date: Fri, 12 Jul 2024 15:41:31 -0700 Subject: [PATCH 3/6] remove template from tests --- cmd/interactive_test.go | 3 --- 1 file changed, 3 deletions(-) diff --git a/cmd/interactive_test.go b/cmd/interactive_test.go index d9af01eb..711f3860 100644 --- a/cmd/interactive_test.go +++ b/cmd/interactive_test.go @@ -59,7 +59,6 @@ func TestModelfileBuilder(t *testing.T) { opts := runOptions{ Model: "hork", System: "You are part horse and part shark, but all hork. Do horklike things", - Template: "This is a template.", Messages: []api.Message{ {Role: "user", Content: "Hey there hork!"}, {Role: "assistant", Content: "Yes it is true, I am half horse, half shark."}, @@ -75,7 +74,6 @@ func TestModelfileBuilder(t *testing.T) { mf := buildModelfile(opts) expectedModelfile := `FROM {{.Model}} SYSTEM """{{.System}}""" -TEMPLATE """{{.Template}}""" PARAMETER penalize_newline false PARAMETER seed 42 PARAMETER stop [hi there] @@ -97,7 +95,6 @@ MESSAGE assistant """Yes it is true, I am half horse, half shark.""" mf = buildModelfile(opts) expectedModelfile = `FROM {{.ParentModel}} SYSTEM """{{.System}}""" -TEMPLATE """{{.Template}}""" PARAMETER penalize_newline false PARAMETER seed 42 PARAMETER stop [hi there] From 23ebbaa46ead40c44c20b707b0e53d954ea51dc5 Mon Sep 17 00:00:00 2001 From: Patrick Devine Date: Fri, 12 Jul 2024 15:47:17 -0700 Subject: [PATCH 4/6] Revert "remove template from tests" This reverts commit 9ac0a7a50b8d7a0f0627b037c7632181bfbcca97. --- cmd/interactive_test.go | 3 +++ 1 file changed, 3 insertions(+) diff --git a/cmd/interactive_test.go b/cmd/interactive_test.go index 711f3860..d9af01eb 100644 --- a/cmd/interactive_test.go +++ b/cmd/interactive_test.go @@ -59,6 +59,7 @@ func TestModelfileBuilder(t *testing.T) { opts := runOptions{ Model: "hork", System: "You are part horse and part shark, but all hork. Do horklike things", + Template: "This is a template.", Messages: []api.Message{ {Role: "user", Content: "Hey there hork!"}, {Role: "assistant", Content: "Yes it is true, I am half horse, half shark."}, @@ -74,6 +75,7 @@ func TestModelfileBuilder(t *testing.T) { mf := buildModelfile(opts) expectedModelfile := `FROM {{.Model}} SYSTEM """{{.System}}""" +TEMPLATE """{{.Template}}""" PARAMETER penalize_newline false PARAMETER seed 42 PARAMETER stop [hi there] @@ -95,6 +97,7 @@ MESSAGE assistant """Yes it is true, I am half horse, half shark.""" mf = buildModelfile(opts) expectedModelfile = `FROM {{.ParentModel}} SYSTEM """{{.System}}""" +TEMPLATE """{{.Template}}""" PARAMETER penalize_newline false PARAMETER seed 42 PARAMETER stop [hi there] From 22c5451fc28b20dd83a389c49d9caf6a1e50a9e3 Mon Sep 17 00:00:00 2001 From: Michael Yang Date: Fri, 12 Jul 2024 21:04:44 -0700 Subject: [PATCH 5/6] fix system prompt (#5662) * fix system prompt * execute template when hitting previous roles * fix tests --------- Co-authored-by: jmorganca --- server/prompt.go | 23 +++++++---------------- server/prompt_test.go | 18 ++++++++++++++++++ template/template.go | 40 ++++++++++++++++++++++++++-------------- 3 files changed, 51 insertions(+), 30 deletions(-) diff --git a/server/prompt.go b/server/prompt.go index 51d691a9..abc5e61e 100644 --- a/server/prompt.go +++ b/server/prompt.go @@ -4,7 +4,6 @@ import ( "bytes" "context" "log/slog" - "slices" "github.com/ollama/ollama/api" "github.com/ollama/ollama/llm" @@ -17,26 +16,18 @@ type tokenizeFunc func(context.Context, string) ([]int, error) // chatPrompt truncates any messages that exceed the context window of the model, making sure to always include 1) the // latest message and 2) system messages func chatPrompt(ctx context.Context, m *Model, tokenize tokenizeFunc, opts *api.Options, msgs []api.Message) (prompt string, images []llm.ImageData, _ error) { - // pull out any system messages which should always be included in the prompt var system []api.Message - msgs = slices.DeleteFunc(msgs, func(m api.Message) bool { - if m.Role == "system" { - system = append(system, m) - return true - } - - return false - }) - - if len(system) == 0 && m.System != "" { - // add model system prompt since it wasn't provided - system = append(system, api.Message{Role: "system", Content: m.System}) - } - // always include the last message n := len(msgs) - 1 // in reverse, find all messages that fit into context window for i := n - 1; i >= 0; i-- { + system = make([]api.Message, 0) + for j := range i { + if msgs[j].Role == "system" { + system = append(system, msgs[j]) + } + } + var b bytes.Buffer if err := m.Template.Execute(&b, template.Values{Messages: append(system, msgs[i:]...)}); err != nil { return "", nil, err diff --git a/server/prompt_test.go b/server/prompt_test.go index 1435b143..d8caf3ed 100644 --- a/server/prompt_test.go +++ b/server/prompt_test.go @@ -6,6 +6,7 @@ import ( "strings" "testing" + "github.com/google/go-cmp/cmp" "github.com/ollama/ollama/api" "github.com/ollama/ollama/template" ) @@ -164,6 +165,19 @@ func TestChatPrompt(t *testing.T) { prompt: "You are the Test Who Lived. You're a test, Harry! I-I'm a what? A test. And a thumping good one at that, I'd wager. ", }, }, + { + name: "out of order system", + limit: 2048, + msgs: []api.Message{ + {Role: "user", Content: "You're a test, Harry!"}, + {Role: "assistant", Content: "I-I'm a what?"}, + {Role: "system", Content: "You are the Test Who Lived."}, + {Role: "user", Content: "A test. And a thumping good one at that, I'd wager."}, + }, + expect: expect{ + prompt: "You're a test, Harry! I-I'm a what? You are the Test Who Lived. A test. And a thumping good one at that, I'd wager. ", + }, + }, } tmpl, err := template.Parse(` @@ -187,6 +201,10 @@ func TestChatPrompt(t *testing.T) { t.Errorf("expected %q, got %q", tt.prompt, prompt) } + if diff := cmp.Diff(prompt, tt.prompt); diff != "" { + t.Errorf("mismatch (-got +want):\n%s", diff) + } + if len(images) != len(tt.images) { t.Fatalf("expected %d images, got %d", len(tt.images), len(images)) } diff --git a/template/template.go b/template/template.go index 9b351666..90014ec1 100644 --- a/template/template.go +++ b/template/template.go @@ -149,27 +149,19 @@ type Values struct { } func (t *Template) Execute(w io.Writer, v Values) error { - system, collated := collate(v.Messages) + system, messages := collate(v.Messages) if !v.forceLegacy && slices.Contains(t.Vars(), "messages") { return t.Template.Execute(w, map[string]any{ "System": system, - "Messages": collated, + "Messages": messages, }) } + system = "" var b bytes.Buffer var prompt, response string - for i, m := range collated { - switch m.Role { - case "system": - system = m.Content - case "user": - prompt = m.Content - case "assistant": - response = m.Content - } - - if i != len(collated)-1 && prompt != "" && response != "" { + for _, m := range messages { + execute := func () error { if err := t.Template.Execute(&b, map[string]any{ "System": system, "Prompt": prompt, @@ -181,6 +173,26 @@ func (t *Template) Execute(w io.Writer, v Values) error { system = "" prompt = "" response = "" + return nil + } + + switch m.Role { + case "system": + if prompt != "" || response != "" { + if err := execute(); err != nil { + return err + } + } + system = m.Content + case "user": + if response != "" { + if err := execute(); err != nil { + return err + } + } + prompt = m.Content + case "assistant": + response = m.Content } } @@ -199,7 +211,7 @@ func (t *Template) Execute(w io.Writer, v Values) error { tree := parse.Tree{Root: nodes.(*parse.ListNode)} if err := template.Must(template.New("").AddParseTree("", &tree)).Execute(&b, map[string]any{ - "System": "", + "System": system, "Prompt": prompt, }); err != nil { return err From 02fea420e5a0042d5e4cfbb5024a6d7e092dc789 Mon Sep 17 00:00:00 2001 From: Jarek Date: Sat, 13 Jul 2024 17:33:46 +0200 Subject: [PATCH 6/6] Add Kerlig AI, an app for macOS (#5675) --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 62f5cd65..eb5e8532 100644 --- a/README.md +++ b/README.md @@ -293,6 +293,7 @@ See the [API documentation](./docs/api.md) for all endpoints. - [OllamaSpring](https://github.com/CrazyNeil/OllamaSpring) (Ollama Client for macOS) - [LLocal.in](https://github.com/kartikm7/llocal) (Easy to use Electron Desktop Client for Ollama) - [Ollama with Google Mesop](https://github.com/rapidarchitect/ollama_mesop/) (Mesop Chat Client implementation with Ollama) +- [Kerlig AI](https://www.kerlig.com/) (AI writing assistant for macOS) ### Terminal