diff --git a/model/parsers/olmo3_think.go b/model/parsers/olmo3_think.go
new file mode 100644
index 00000000..eddb9ff9
--- /dev/null
+++ b/model/parsers/olmo3_think.go
@@ -0,0 +1,170 @@
+package parsers
+
+import (
+ "context"
+ "log/slog"
+ "strings"
+ "unicode"
+
+ "github.com/ollama/ollama/api"
+ "github.com/ollama/ollama/logutil"
+)
+
+type olmo3ThinkParserState int
+
+const (
+ olmo3CollectingThink olmo3ThinkParserState = iota
+ olmo3CollectingContent
+)
+
+const (
+ olmo3ThinkCloseTag = ""
+)
+
+type Olmo3ThinkParser struct {
+ state olmo3ThinkParserState
+ buffer strings.Builder
+}
+
+func (p *Olmo3ThinkParser) HasToolSupport() bool {
+ return false
+}
+
+func (p *Olmo3ThinkParser) HasThinkingSupport() bool {
+ return true
+}
+
+func (p *Olmo3ThinkParser) setInitialState(lastMessage *api.Message) {
+ prefill := lastMessage != nil && lastMessage.Role == "assistant"
+
+ // If prefilling with content, skip to content collection
+ if prefill && lastMessage.Content != "" {
+ p.state = olmo3CollectingContent
+ return
+ }
+
+ // Model always thinks first (the tag is injected in the prompt)
+ p.state = olmo3CollectingThink
+}
+
+func (p *Olmo3ThinkParser) Init(tools []api.Tool, lastMessage *api.Message, thinkValue *api.ThinkValue) []api.Tool {
+ p.setInitialState(lastMessage)
+ return tools
+}
+
+// Event types for internal parser communication
+type olmo3Event interface {
+ isOlmo3Event()
+}
+
+type olmo3EventThinkContent struct {
+ content string
+}
+
+type olmo3EventContent struct {
+ content string
+}
+
+func (olmo3EventThinkContent) isOlmo3Event() {}
+func (olmo3EventContent) isOlmo3Event() {}
+
+func (p *Olmo3ThinkParser) Add(s string, done bool) (content string, thinking string, calls []api.ToolCall, err error) {
+ p.buffer.WriteString(s)
+ events := p.parseEvents()
+
+ var contentSb strings.Builder
+ var thinkingSb strings.Builder
+ for _, event := range events {
+ switch event := event.(type) {
+ case olmo3EventThinkContent:
+ thinkingSb.WriteString(event.content)
+ case olmo3EventContent:
+ contentSb.WriteString(event.content)
+ }
+ }
+
+ return contentSb.String(), thinkingSb.String(), nil, nil
+}
+
+func (p *Olmo3ThinkParser) parseEvents() []olmo3Event {
+ var all []olmo3Event
+
+ keepLooping := true
+ for keepLooping {
+ var events []olmo3Event
+ events, keepLooping = p.eat()
+ if len(events) > 0 {
+ all = append(all, events...)
+ }
+ }
+
+ if len(all) > 0 {
+ slog.Log(context.TODO(), logutil.LevelTrace, "olmo3 events parsed", "events", all, "state", p.state, "buffer", p.buffer.String())
+ }
+
+ return all
+}
+
+func (p *Olmo3ThinkParser) eat() ([]olmo3Event, bool) {
+ var events []olmo3Event
+ bufStr := p.buffer.String()
+ if bufStr == "" {
+ return events, false
+ }
+
+ switch p.state {
+ case olmo3CollectingThink:
+ if strings.Contains(bufStr, olmo3ThinkCloseTag) {
+ // Found complete tag
+ split := strings.SplitN(bufStr, olmo3ThinkCloseTag, 2)
+ thinking := strings.TrimRightFunc(split[0], unicode.IsSpace)
+ remaining := strings.TrimLeftFunc(split[1], unicode.IsSpace)
+
+ p.buffer.Reset()
+ p.buffer.WriteString(remaining)
+ p.state = olmo3CollectingContent
+
+ if len(thinking) > 0 {
+ events = append(events, olmo3EventThinkContent{content: thinking})
+ }
+ return events, true
+ } else if overlapLen := overlap(bufStr, olmo3ThinkCloseTag); overlapLen > 0 {
+ // Partial tag - withhold ambiguous content
+ beforePartialTag := bufStr[:len(bufStr)-overlapLen]
+ trailingLen := trailingWhitespaceLen(beforePartialTag)
+ ambiguousStart := len(beforePartialTag) - trailingLen
+
+ unambiguous := bufStr[:ambiguousStart]
+ ambiguous := bufStr[ambiguousStart:]
+ p.buffer.Reset()
+ p.buffer.WriteString(ambiguous)
+ if len(unambiguous) > 0 {
+ events = append(events, olmo3EventThinkContent{content: unambiguous})
+ }
+ return events, false
+ } else {
+ // Regular thinking content - withhold trailing whitespace in case follows
+ whitespaceLen := trailingWhitespaceLen(bufStr)
+ ambiguousStart := len(bufStr) - whitespaceLen
+
+ unambiguous := bufStr[:ambiguousStart]
+ ambiguous := bufStr[ambiguousStart:]
+ p.buffer.Reset()
+ p.buffer.WriteString(ambiguous)
+ if len(unambiguous) > 0 {
+ events = append(events, olmo3EventThinkContent{content: unambiguous})
+ }
+ return events, false
+ }
+
+ case olmo3CollectingContent:
+ // Emit all content directly
+ p.buffer.Reset()
+ if len(bufStr) > 0 {
+ events = append(events, olmo3EventContent{content: bufStr})
+ }
+ return events, false
+ }
+
+ return events, false
+}
diff --git a/model/parsers/olmo3_think_test.go b/model/parsers/olmo3_think_test.go
new file mode 100644
index 00000000..9479cef8
--- /dev/null
+++ b/model/parsers/olmo3_think_test.go
@@ -0,0 +1,390 @@
+package parsers
+
+import (
+ "strings"
+ "testing"
+
+ "github.com/google/go-cmp/cmp"
+
+ "github.com/ollama/ollama/api"
+)
+
+func TestOlmo3ThinkParser(t *testing.T) {
+ tests := []struct {
+ name string
+ input string
+ expectedContent string
+ expectedThinking string
+ lastMessage *api.Message
+ }{
+ {
+ name: "thinking_only",
+ input: "I need to think about this.Here is my response.",
+ expectedContent: "Here is my response.",
+ expectedThinking: "I need to think about this.",
+ },
+ {
+ name: "thinking_with_newlines",
+ input: "Let me think step by step.\n\n1. First point\n2. Second pointThe answer is 42.",
+ expectedContent: "The answer is 42.",
+ expectedThinking: "Let me think step by step.\n\n1. First point\n2. Second point",
+ },
+ {
+ name: "thinking_then_content",
+ input: "Deep thinking here.Here is my detailed response with multiple sentences. I have thought carefully.",
+ expectedContent: "Here is my detailed response with multiple sentences. I have thought carefully.",
+ expectedThinking: "Deep thinking here.",
+ },
+ {
+ name: "empty_thinking",
+ input: "Just content here.",
+ expectedContent: "Just content here.",
+ expectedThinking: "",
+ },
+ {
+ name: "prefill_skips_thinking",
+ input: "Continuing from previous content.",
+ expectedContent: "Continuing from previous content.",
+ lastMessage: &api.Message{
+ Role: "assistant",
+ Content: "Previous content",
+ },
+ },
+ {
+ name: "thinking_with_whitespace",
+ input: " Some thinking Content here ",
+ expectedContent: "Content here ",
+ expectedThinking: " Some thinking",
+ },
+ {
+ name: "real_model_output_with_newlines",
+ input: "Yes, that should work. Let me go with that response.\n\n\n\nHi! I'm all set and ready to assist. How about you? How are you today? 😊",
+ expectedThinking: "Yes, that should work. Let me go with that response.",
+ expectedContent: "Hi! I'm all set and ready to assist. How about you? How are you today? 😊",
+ },
+ // Edge cases
+ {
+ name: "nested_think_tags_in_thinking",
+ input: "I'm thinking nested more thinkingFinal content.",
+ expectedContent: "more thinkingFinal content.",
+ expectedThinking: "I'm thinking nested",
+ },
+ {
+ name: "multiple_think_close_tags",
+ input: "First thinkingContentMore content.",
+ expectedContent: "ContentMore content.",
+ expectedThinking: "First thinking",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ parser := &Olmo3ThinkParser{}
+ parser.Init(nil, tt.lastMessage, nil)
+
+ content, thinking, toolCalls, err := parser.Add(tt.input, true)
+ if err != nil {
+ t.Fatalf("Add() error = %v", err)
+ }
+
+ if diff := cmp.Diff(tt.expectedContent, content); diff != "" {
+ t.Errorf("content mismatch (-want +got):\n%s", diff)
+ }
+
+ if diff := cmp.Diff(tt.expectedThinking, thinking); diff != "" {
+ t.Errorf("thinking mismatch (-want +got):\n%s", diff)
+ }
+
+ // No tool calls expected
+ if len(toolCalls) > 0 {
+ t.Errorf("expected no tool calls, got %d", len(toolCalls))
+ }
+ })
+ }
+}
+
+func TestOlmo3ThinkParser_Streaming(t *testing.T) {
+ parser := &Olmo3ThinkParser{}
+ parser.Init(nil, nil, nil)
+
+ chunks := []string{
+ "I am ",
+ "thinking about",
+ " this.Here ",
+ "is the response.",
+ }
+
+ var finalContent, finalThinking strings.Builder
+
+ for i, chunk := range chunks {
+ done := i == len(chunks)-1
+ content, thinking, _, err := parser.Add(chunk, done)
+ if err != nil {
+ t.Fatalf("Add() error on chunk %d: %v", i, err)
+ }
+
+ finalContent.WriteString(content)
+ finalThinking.WriteString(thinking)
+ }
+
+ expectedContent := "Here is the response."
+ expectedThinking := "I am thinking about this."
+
+ if finalContent.String() != expectedContent {
+ t.Errorf("expected content %q, got %q", expectedContent, finalContent.String())
+ }
+
+ if finalThinking.String() != expectedThinking {
+ t.Errorf("expected thinking %q, got %q", expectedThinking, finalThinking.String())
+ }
+}
+
+func TestOlmo3ThinkParser_StreamingEdgeCases(t *testing.T) {
+ tests := []struct {
+ name string
+ chunks []string
+ expectedContent string
+ expectedThinking string
+ }{
+ {
+ name: "thinking_tag_split_across_chunks",
+ chunks: []string{
+ "This is thinking content",
+ "",
+ "This is content.",
+ },
+ expectedContent: "This is content.",
+ expectedThinking: "This is thinking content",
+ },
+ {
+ name: "thinking_tag_split_mid_token",
+ chunks: []string{
+ "Thinking?",
+ "think>",
+ "Content here.",
+ },
+ expectedContent: "Content here.",
+ expectedThinking: "Thinking?",
+ },
+ {
+ name: "thinking_tag_split_at_angle_bracket",
+ chunks: []string{
+ "Thinking<",
+ "/think>",
+ "Content.",
+ },
+ expectedContent: "Content.",
+ expectedThinking: "Thinking",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ parser := &Olmo3ThinkParser{}
+ parser.Init(nil, nil, nil)
+
+ var finalContent, finalThinking strings.Builder
+
+ for i, chunk := range tt.chunks {
+ done := i == len(tt.chunks)-1
+ content, thinking, _, err := parser.Add(chunk, done)
+ if err != nil {
+ t.Fatalf("Add() error on chunk %d: %v", i, err)
+ }
+
+ finalContent.WriteString(content)
+ finalThinking.WriteString(thinking)
+ }
+
+ if finalContent.String() != tt.expectedContent {
+ t.Errorf("expected content %q, got %q", tt.expectedContent, finalContent.String())
+ }
+
+ if finalThinking.String() != tt.expectedThinking {
+ t.Errorf("expected thinking %q, got %q", tt.expectedThinking, finalThinking.String())
+ }
+ })
+ }
+}
+
+// TestOlmo3ThinkParser_ThinkBoundary tests streaming thinking content
+// where thinking chunks come in succession before the tag
+func TestOlmo3ThinkParser_ThinkBoundary(t *testing.T) {
+ tests := []struct {
+ name string
+ chunks []string
+ expectedThinking string
+ expectedContent string
+ }{
+ {
+ name: "multiple_thinking_chunks",
+ chunks: []string{
+ "First part of thinking. ",
+ "Second part of thinking. ",
+ "Third part.",
+ "Content here.",
+ },
+ expectedThinking: "First part of thinking. Second part of thinking. Third part.",
+ expectedContent: "Content here.",
+ },
+ {
+ name: "thinking_chunks_with_newlines",
+ chunks: []string{
+ "Step 1: Analyze the problem.\n",
+ "Step 2: Consider options.\n",
+ "Step 3: Make decision.",
+ "Here is my answer.",
+ },
+ expectedThinking: "Step 1: Analyze the problem.\nStep 2: Consider options.\nStep 3: Make decision.",
+ expectedContent: "Here is my answer.",
+ },
+ {
+ name: "single_char_thinking_chunks",
+ chunks: []string{
+ "H", "e", "l", "l", "o", "", "World",
+ },
+ expectedThinking: "Hello",
+ expectedContent: "World",
+ },
+ {
+ name: "thinking_with_special_chars",
+ chunks: []string{
+ "Let me think... ",
+ "Option A: $100 ",
+ "Option B: €200",
+ "I recommend Option A.",
+ },
+ expectedThinking: "Let me think... Option A: $100 Option B: €200",
+ expectedContent: "I recommend Option A.",
+ },
+ {
+ name: "long_thinking_multiple_chunks",
+ chunks: []string{
+ "This is a very long thinking process. ",
+ "I need to consider many factors. ",
+ "First, let me look at the data. ",
+ "The numbers show interesting patterns. ",
+ "Based on my analysis, ",
+ "I can conclude that...",
+ "The answer is 42.",
+ },
+ expectedThinking: "This is a very long thinking process. I need to consider many factors. First, let me look at the data. The numbers show interesting patterns. Based on my analysis, I can conclude that...",
+ expectedContent: "The answer is 42.",
+ },
+ {
+ name: "thinking_ends_exactly_at_chunk_boundary",
+ chunks: []string{
+ "Thinking content",
+ "",
+ "Content",
+ },
+ expectedThinking: "Thinking content",
+ expectedContent: "Content",
+ },
+ {
+ name: "empty_chunks_between_thinking",
+ chunks: []string{
+ "Start thinking",
+ "",
+ " middle ",
+ "",
+ "end",
+ "Content",
+ },
+ expectedThinking: "Start thinking middle end",
+ expectedContent: "Content",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ parser := &Olmo3ThinkParser{}
+ parser.Init(nil, nil, nil)
+
+ var finalContent, finalThinking strings.Builder
+
+ for i, chunk := range tt.chunks {
+ done := i == len(tt.chunks)-1
+ content, thinking, _, err := parser.Add(chunk, done)
+ if err != nil {
+ t.Fatalf("Add() error on chunk %d: %v", i, err)
+ }
+
+ finalContent.WriteString(content)
+ finalThinking.WriteString(thinking)
+ }
+
+ if finalThinking.String() != tt.expectedThinking {
+ t.Errorf("thinking mismatch:\nexpected: %q\ngot: %q", tt.expectedThinking, finalThinking.String())
+ }
+
+ if finalContent.String() != tt.expectedContent {
+ t.Errorf("content mismatch:\nexpected: %q\ngot: %q", tt.expectedContent, finalContent.String())
+ }
+ })
+ }
+}
+
+// TestOlmo3ThinkParser_StateTransitions tests that state transitions work correctly
+func TestOlmo3ThinkParser_StateTransitions(t *testing.T) {
+ t.Run("thinking_to_content", func(t *testing.T) {
+ parser := &Olmo3ThinkParser{}
+ parser.Init(nil, nil, nil)
+
+ if parser.state != olmo3CollectingThink {
+ t.Errorf("initial state should be olmo3CollectingThink, got %v", parser.state)
+ }
+
+ parser.Add("thinkingcontent", true)
+
+ if parser.state != olmo3CollectingContent {
+ t.Errorf("state after should be olmo3CollectingContent, got %v", parser.state)
+ }
+ })
+}
+
+func TestOlmo3ThinkParser_HasToolSupport(t *testing.T) {
+ parser := &Olmo3ThinkParser{}
+ if parser.HasToolSupport() {
+ t.Error("Olmo3ThinkParser should NOT support tools")
+ }
+}
+
+func TestOlmo3ThinkParser_HasThinkingSupport(t *testing.T) {
+ parser := &Olmo3ThinkParser{}
+ if !parser.HasThinkingSupport() {
+ t.Error("Olmo3ThinkParser should support thinking")
+ }
+}
+
+func TestOlmo3ThinkParser_Init(t *testing.T) {
+ parser := &Olmo3ThinkParser{}
+
+ tools := []api.Tool{
+ {Function: api.ToolFunction{Name: "test_tool"}},
+ }
+
+ lastMessage := &api.Message{Role: "assistant", Content: "previous"}
+
+ returnedTools := parser.Init(tools, lastMessage, nil)
+
+ if len(returnedTools) != len(tools) {
+ t.Errorf("expected %d tools returned, got %d", len(tools), len(returnedTools))
+ }
+
+ // Should be in content collection mode due to prefill
+ if parser.state != olmo3CollectingContent {
+ t.Errorf("expected state olmo3CollectingContent, got %v", parser.state)
+ }
+}
+
+func TestOlmo3ThinkParser_InitWithoutPrefill(t *testing.T) {
+ parser := &Olmo3ThinkParser{}
+
+ parser.Init(nil, nil, nil)
+
+ // Should be in thinking collection mode (model always thinks first)
+ if parser.state != olmo3CollectingThink {
+ t.Errorf("expected state olmo3CollectingThink, got %v", parser.state)
+ }
+}
diff --git a/model/parsers/parsers.go b/model/parsers/parsers.go
index 24ab07fb..4e15dc93 100644
--- a/model/parsers/parsers.go
+++ b/model/parsers/parsers.go
@@ -58,6 +58,8 @@ func ParserForName(name string) Parser {
return harmony.NewHarmonyMessageHandler()
case "cogito":
return &CogitoParser{}
+ case "olmo3-think":
+ return &Olmo3ThinkParser{}
default:
return nil
}
diff --git a/model/renderers/json.go b/model/renderers/json.go
new file mode 100644
index 00000000..76d46a90
--- /dev/null
+++ b/model/renderers/json.go
@@ -0,0 +1,45 @@
+package renderers
+
+import "encoding/json"
+
+// marshalWithSpaces marshals v to JSON and adds a space after each ':' and ','
+// that appears outside of string values. This matches the formatting expected
+// by certain model architectures.
+func marshalWithSpaces(v any) ([]byte, error) {
+ b, err := json.Marshal(v)
+ if err != nil {
+ return nil, err
+ }
+
+ out := make([]byte, 0, len(b)+len(b)/8)
+ inStr, esc := false, false
+ for _, c := range b {
+ if inStr {
+ out = append(out, c)
+ if esc {
+ esc = false
+ continue
+ }
+ if c == '\\' {
+ esc = true
+ continue
+ }
+ if c == '"' {
+ inStr = false
+ }
+ continue
+ }
+ switch c {
+ case '"':
+ inStr = true
+ out = append(out, c)
+ case ':':
+ out = append(out, ':', ' ')
+ case ',':
+ out = append(out, ',', ' ')
+ default:
+ out = append(out, c)
+ }
+ }
+ return out, nil
+}
diff --git a/model/renderers/qwen3vl_test.go b/model/renderers/json_test.go
similarity index 99%
rename from model/renderers/qwen3vl_test.go
rename to model/renderers/json_test.go
index 6810a7c9..c1ed05b9 100644
--- a/model/renderers/qwen3vl_test.go
+++ b/model/renderers/json_test.go
@@ -6,7 +6,6 @@ import (
"github.com/google/go-cmp/cmp"
)
-// TODO(drifkin): this will be moved to utils in the near future and used by other renderers as well
func TestMarshalWithSpaces(t *testing.T) {
tests := []struct {
name string
diff --git a/model/renderers/olmo3_think.go b/model/renderers/olmo3_think.go
new file mode 100644
index 00000000..b327d044
--- /dev/null
+++ b/model/renderers/olmo3_think.go
@@ -0,0 +1,130 @@
+package renderers
+
+import (
+ "encoding/json"
+ "strings"
+
+ "github.com/ollama/ollama/api"
+)
+
+const (
+ olmo3ThinkDefaultSystemMessage = "You are OLMo, a helpful function-calling AI assistant built by Ai2. Your date cutoff is November 2024, and your model weights are available at https://huggingface.co/allenai."
+ olmo3ThinkNoFunctionsMessage = " You do not currently have access to any functions."
+)
+
+type Olmo3ThinkRenderer struct{}
+
+type olmo3ThinkToolCall struct {
+ ID string `json:"id,omitempty"`
+ Type string `json:"type,omitempty"`
+ Function olmo3ThinkToolCallFunc `json:"function"`
+}
+
+type olmo3ThinkToolCallFunc struct {
+ Name string `json:"name"`
+ Arguments string `json:"arguments"`
+}
+
+func (r *Olmo3ThinkRenderer) Render(messages []api.Message, tools []api.Tool, _ *api.ThinkValue) (string, error) {
+ var sb strings.Builder
+
+ var systemMessage *api.Message
+ filteredMessages := make([]api.Message, 0, len(messages))
+ for i, message := range messages {
+ if message.Role == "system" {
+ if systemMessage == nil {
+ systemMessage = &messages[i]
+ }
+ continue
+ }
+ filteredMessages = append(filteredMessages, message)
+ }
+
+ systemContent := olmo3ThinkDefaultSystemMessage
+ if systemMessage != nil {
+ systemContent = systemMessage.Content
+ }
+
+ sb.WriteString("<|im_start|>system\n")
+ sb.WriteString(systemContent)
+
+ if len(tools) > 0 {
+ functionsJSON, err := marshalWithSpaces(tools)
+ if err != nil {
+ return "", err
+ }
+ sb.WriteString(" ")
+ sb.WriteString(string(functionsJSON))
+ sb.WriteString("")
+ } else {
+ sb.WriteString(olmo3ThinkNoFunctionsMessage)
+ sb.WriteString(" ")
+ }
+ sb.WriteString("<|im_end|>\n")
+
+ for i, message := range filteredMessages {
+ lastMessage := i == len(filteredMessages)-1
+
+ switch message.Role {
+ case "user":
+ sb.WriteString("<|im_start|>user\n")
+ sb.WriteString(message.Content)
+ sb.WriteString("<|im_end|>\n")
+
+ case "assistant":
+ sb.WriteString("<|im_start|>assistant\n")
+
+ if message.Content != "" {
+ sb.WriteString(message.Content)
+ }
+
+ if len(message.ToolCalls) > 0 {
+ toolCalls := make([]olmo3ThinkToolCall, len(message.ToolCalls))
+ for j, tc := range message.ToolCalls {
+ argsJSON, err := json.Marshal(tc.Function.Arguments)
+ if err != nil {
+ return "", err
+ }
+ toolCalls[j] = olmo3ThinkToolCall{
+ ID: tc.ID,
+ Type: "function",
+ Function: olmo3ThinkToolCallFunc{
+ Name: tc.Function.Name,
+ Arguments: string(argsJSON),
+ },
+ }
+ }
+ toolCallsJSON, err := marshalWithSpaces(toolCalls)
+ if err != nil {
+ return "", err
+ }
+ sb.WriteString("")
+ sb.WriteString(string(toolCallsJSON))
+ sb.WriteString("")
+ }
+
+ if !lastMessage {
+ sb.WriteString("<|im_end|>\n")
+ }
+
+ case "tool":
+ sb.WriteString("<|im_start|>environment\n")
+ sb.WriteString(message.Content)
+ sb.WriteString("<|im_end|>\n")
+ }
+ }
+
+ needsGenerationPrompt := true
+ if len(filteredMessages) > 0 {
+ lastMsg := filteredMessages[len(filteredMessages)-1]
+ if lastMsg.Role == "assistant" && len(lastMsg.ToolCalls) == 0 && lastMsg.Content != "" {
+ needsGenerationPrompt = false
+ }
+ }
+
+ if needsGenerationPrompt {
+ sb.WriteString("<|im_start|>assistant\n")
+ }
+
+ return sb.String(), nil
+}
diff --git a/model/renderers/olmo3_think_test.go b/model/renderers/olmo3_think_test.go
new file mode 100644
index 00000000..21e333e3
--- /dev/null
+++ b/model/renderers/olmo3_think_test.go
@@ -0,0 +1,224 @@
+package renderers
+
+import (
+ "testing"
+
+ "github.com/google/go-cmp/cmp"
+
+ "github.com/ollama/ollama/api"
+)
+
+func TestOlmo3ThinkRenderer(t *testing.T) {
+ tests := []struct {
+ name string
+ msgs []api.Message
+ tools []api.Tool
+ expected string
+ }{
+ {
+ name: "basic without system - adds default system",
+ msgs: []api.Message{
+ {Role: "user", Content: "Hello!"},
+ },
+ expected: "<|im_start|>system\n" +
+ "You are OLMo, a helpful function-calling AI assistant built by Ai2. Your date cutoff is November 2024, and your model weights are available at https://huggingface.co/allenai. You do not currently have access to any functions. <|im_end|>\n" +
+ "<|im_start|>user\n" +
+ "Hello!<|im_end|>\n" +
+ "<|im_start|>assistant\n" +
+ "",
+ },
+ {
+ name: "with system message no tools",
+ msgs: []api.Message{
+ {Role: "system", Content: "You are a helpful assistant."},
+ {Role: "user", Content: "Hello!"},
+ },
+ expected: "<|im_start|>system\n" +
+ "You are a helpful assistant. You do not currently have access to any functions. <|im_end|>\n" +
+ "<|im_start|>user\n" +
+ "Hello!<|im_end|>\n" +
+ "<|im_start|>assistant\n" +
+ "",
+ },
+ {
+ name: "with system message and tools",
+ msgs: []api.Message{
+ {Role: "system", Content: "You are a helpful assistant."},
+ {Role: "user", Content: "What is the weather?"},
+ },
+ tools: []api.Tool{
+ {
+ Type: "function",
+ Function: api.ToolFunction{
+ Name: "get_weather",
+ Description: "Get the current weather",
+ Parameters: api.ToolFunctionParameters{
+ Type: "object",
+ Required: []string{"location"},
+ Properties: map[string]api.ToolProperty{
+ "location": {Type: api.PropertyType{"string"}, Description: "The city"},
+ },
+ },
+ },
+ },
+ },
+ expected: "<|im_start|>system\n" +
+ `You are a helpful assistant. [{"type": "function", "function": {"name": "get_weather", "description": "Get the current weather", "parameters": {"type": "object", "required": ["location"], "properties": {"location": {"type": "string", "description": "The city"}}}}}]<|im_end|>` + "\n" +
+ "<|im_start|>user\n" +
+ "What is the weather?<|im_end|>\n" +
+ "<|im_start|>assistant\n" +
+ "",
+ },
+ {
+ name: "assistant with tool calls",
+ msgs: []api.Message{
+ {Role: "system", Content: "You are a helpful assistant."},
+ {Role: "user", Content: "What is the weather in SF?"},
+ {
+ Role: "assistant",
+ Content: "Let me check the weather.",
+ ToolCalls: []api.ToolCall{
+ {
+ ID: "call_1",
+ Function: api.ToolCallFunction{
+ Name: "get_weather",
+ Arguments: map[string]any{
+ "location": "San Francisco",
+ },
+ },
+ },
+ },
+ },
+ {Role: "tool", Content: `{"temperature": 68}`, ToolName: "get_weather"},
+ },
+ tools: []api.Tool{
+ {
+ Type: "function",
+ Function: api.ToolFunction{
+ Name: "get_weather",
+ Description: "Get the current weather",
+ Parameters: api.ToolFunctionParameters{
+ Type: "object",
+ Required: []string{"location"},
+ Properties: map[string]api.ToolProperty{
+ "location": {Type: api.PropertyType{"string"}, Description: "The city"},
+ },
+ },
+ },
+ },
+ },
+ expected: "<|im_start|>system\n" +
+ `You are a helpful assistant. [{"type": "function", "function": {"name": "get_weather", "description": "Get the current weather", "parameters": {"type": "object", "required": ["location"], "properties": {"location": {"type": "string", "description": "The city"}}}}}]<|im_end|>` + "\n" +
+ "<|im_start|>user\n" +
+ "What is the weather in SF?<|im_end|>\n" +
+ "<|im_start|>assistant\n" +
+ `Let me check the weather.[{"id": "call_1", "type": "function", "function": {"name": "get_weather", "arguments": "{\"location\":\"San Francisco\"}"}}]<|im_end|>` + "\n" +
+ "<|im_start|>environment\n" +
+ `{"temperature": 68}<|im_end|>` + "\n" +
+ "<|im_start|>assistant\n" +
+ "",
+ },
+ {
+ name: "multi-turn conversation",
+ msgs: []api.Message{
+ {Role: "system", Content: "You are a helpful assistant."},
+ {Role: "user", Content: "Hello"},
+ {Role: "assistant", Content: "Hi there!"},
+ {Role: "user", Content: "How are you?"},
+ },
+ expected: "<|im_start|>system\n" +
+ "You are a helpful assistant. You do not currently have access to any functions. <|im_end|>\n" +
+ "<|im_start|>user\n" +
+ "Hello<|im_end|>\n" +
+ "<|im_start|>assistant\n" +
+ "Hi there!<|im_end|>\n" +
+ "<|im_start|>user\n" +
+ "How are you?<|im_end|>\n" +
+ "<|im_start|>assistant\n" +
+ "",
+ },
+ {
+ name: "parallel tool calls",
+ msgs: []api.Message{
+ {Role: "user", Content: "Get weather in SF and NYC"},
+ {
+ Role: "assistant",
+ ToolCalls: []api.ToolCall{
+ {
+ ID: "call_1",
+ Function: api.ToolCallFunction{
+ Name: "get_weather",
+ Arguments: map[string]any{"location": "San Francisco"},
+ },
+ },
+ {
+ ID: "call_2",
+ Function: api.ToolCallFunction{
+ Name: "get_weather",
+ Arguments: map[string]any{"location": "New York"},
+ },
+ },
+ },
+ },
+ {Role: "tool", Content: `{"temperature": 68}`, ToolName: "get_weather"},
+ {Role: "tool", Content: `{"temperature": 55}`, ToolName: "get_weather"},
+ },
+ tools: []api.Tool{
+ {
+ Type: "function",
+ Function: api.ToolFunction{
+ Name: "get_weather",
+ Parameters: api.ToolFunctionParameters{
+ Type: "object",
+ Properties: map[string]api.ToolProperty{
+ "location": {Type: api.PropertyType{"string"}},
+ },
+ },
+ },
+ },
+ },
+ expected: "<|im_start|>system\n" +
+ `You are OLMo, a helpful function-calling AI assistant built by Ai2. Your date cutoff is November 2024, and your model weights are available at https://huggingface.co/allenai. [{"type": "function", "function": {"name": "get_weather", "parameters": {"type": "object", "properties": {"location": {"type": "string"}}}}}]<|im_end|>` + "\n" +
+ "<|im_start|>user\n" +
+ "Get weather in SF and NYC<|im_end|>\n" +
+ "<|im_start|>assistant\n" +
+ `[{"id": "call_1", "type": "function", "function": {"name": "get_weather", "arguments": "{\"location\":\"San Francisco\"}"}}, {"id": "call_2", "type": "function", "function": {"name": "get_weather", "arguments": "{\"location\":\"New York\"}"}}]<|im_end|>` + "\n" +
+ "<|im_start|>environment\n" +
+ `{"temperature": 68}<|im_end|>` + "\n" +
+ "<|im_start|>environment\n" +
+ `{"temperature": 55}<|im_end|>` + "\n" +
+ "<|im_start|>assistant\n" +
+ "",
+ },
+ {
+ name: "assistant message only content no tool calls",
+ msgs: []api.Message{
+ {Role: "user", Content: "Tell me a joke"},
+ {Role: "assistant", Content: "Why did the chicken cross the road?"},
+ {Role: "user", Content: "I don't know, why?"},
+ },
+ expected: "<|im_start|>system\n" +
+ "You are OLMo, a helpful function-calling AI assistant built by Ai2. Your date cutoff is November 2024, and your model weights are available at https://huggingface.co/allenai. You do not currently have access to any functions. <|im_end|>\n" +
+ "<|im_start|>user\n" +
+ "Tell me a joke<|im_end|>\n" +
+ "<|im_start|>assistant\n" +
+ "Why did the chicken cross the road?<|im_end|>\n" +
+ "<|im_start|>user\n" +
+ "I don't know, why?<|im_end|>\n" +
+ "<|im_start|>assistant\n" +
+ "",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ rendered, err := (&Olmo3ThinkRenderer{}).Render(tt.msgs, tt.tools, nil)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if diff := cmp.Diff(rendered, tt.expected); diff != "" {
+ t.Errorf("mismatch (-got +want):\n%s", diff)
+ }
+ })
+ }
+}
diff --git a/model/renderers/qwen3vl.go b/model/renderers/qwen3vl.go
index 8ea4abbb..50879d29 100644
--- a/model/renderers/qwen3vl.go
+++ b/model/renderers/qwen3vl.go
@@ -1,51 +1,11 @@
package renderers
import (
- "encoding/json"
"strings"
"github.com/ollama/ollama/api"
)
-func marshalWithSpaces(v any) ([]byte, error) {
- b, err := json.Marshal(v)
- if err != nil {
- return nil, err
- }
-
- out := make([]byte, 0, len(b)+len(b)/8)
- inStr, esc := false, false
- for _, c := range b {
- if inStr {
- out = append(out, c)
- if esc {
- esc = false
- continue
- }
- if c == '\\' {
- esc = true
- continue
- }
- if c == '"' {
- inStr = false
- }
- continue
- }
- switch c {
- case '"':
- inStr = true
- out = append(out, c)
- case ':':
- out = append(out, ':', ' ')
- case ',':
- out = append(out, ',', ' ')
- default:
- out = append(out, c)
- }
- }
- return out, nil
-}
-
type Qwen3VLRenderer struct {
isThinking bool
diff --git a/model/renderers/renderer.go b/model/renderers/renderer.go
index 84df1b8a..098b16a8 100644
--- a/model/renderers/renderer.go
+++ b/model/renderers/renderer.go
@@ -59,6 +59,9 @@ func rendererForName(name string) Renderer {
case "cogito":
renderer := &CogitoRenderer{isThinking: true}
return renderer
+ case "olmo3-think":
+ renderer := &Olmo3ThinkRenderer{}
+ return renderer
default:
return nil
}