Revert "add truncate and shift parameters (#12519)" (#12545)

This reverts commit 6a62b894c7.
This commit is contained in:
Jeffrey Morgan
2025-10-08 17:57:57 -07:00
committed by GitHub
parent 6a62b894c7
commit 7d965258ce
8 changed files with 67 additions and 272 deletions

View File

@@ -27,18 +27,16 @@ func TestChatPrompt(t *testing.T) {
visionModel := Model{Template: tmpl, ProjectorPaths: []string{"vision"}}
cases := []struct {
name string
model Model
limit int
truncate bool
msgs []api.Message
name string
model Model
limit int
msgs []api.Message
expect
}{
{
name: "messages",
model: visionModel,
limit: 64,
truncate: true,
name: "messages",
model: visionModel,
limit: 64,
msgs: []api.Message{
{Role: "user", Content: "You're a test, Harry!"},
{Role: "assistant", Content: "I-I'm a what?"},
@@ -49,10 +47,9 @@ func TestChatPrompt(t *testing.T) {
},
},
{
name: "truncate messages",
model: visionModel,
limit: 1,
truncate: true,
name: "truncate messages",
model: visionModel,
limit: 1,
msgs: []api.Message{
{Role: "user", Content: "You're a test, Harry!"},
{Role: "assistant", Content: "I-I'm a what?"},
@@ -63,10 +60,9 @@ func TestChatPrompt(t *testing.T) {
},
},
{
name: "truncate messages with image",
model: visionModel,
limit: 64,
truncate: true,
name: "truncate messages with image",
model: visionModel,
limit: 64,
msgs: []api.Message{
{Role: "user", Content: "You're a test, Harry!"},
{Role: "assistant", Content: "I-I'm a what?"},
@@ -80,10 +76,9 @@ func TestChatPrompt(t *testing.T) {
},
},
{
name: "truncate messages with images",
model: visionModel,
limit: 64,
truncate: true,
name: "truncate messages with images",
model: visionModel,
limit: 64,
msgs: []api.Message{
{Role: "user", Content: "You're a test, Harry!", Images: []api.ImageData{[]byte("something")}},
{Role: "assistant", Content: "I-I'm a what?"},
@@ -97,10 +92,9 @@ func TestChatPrompt(t *testing.T) {
},
},
{
name: "messages with images",
model: visionModel,
limit: 2048,
truncate: true,
name: "messages with images",
model: visionModel,
limit: 2048,
msgs: []api.Message{
{Role: "user", Content: "You're a test, Harry!", Images: []api.ImageData{[]byte("something")}},
{Role: "assistant", Content: "I-I'm a what?"},
@@ -115,10 +109,9 @@ func TestChatPrompt(t *testing.T) {
},
},
{
name: "message with image tag",
model: visionModel,
limit: 2048,
truncate: true,
name: "message with image tag",
model: visionModel,
limit: 2048,
msgs: []api.Message{
{Role: "user", Content: "You're a test, Harry! [img]", Images: []api.ImageData{[]byte("something")}},
{Role: "assistant", Content: "I-I'm a what?"},
@@ -133,10 +126,9 @@ func TestChatPrompt(t *testing.T) {
},
},
{
name: "messages with interleaved images",
model: visionModel,
limit: 2048,
truncate: true,
name: "messages with interleaved images",
model: visionModel,
limit: 2048,
msgs: []api.Message{
{Role: "user", Content: "You're a test, Harry!"},
{Role: "user", Images: []api.ImageData{[]byte("something")}},
@@ -153,10 +145,9 @@ func TestChatPrompt(t *testing.T) {
},
},
{
name: "truncate message with interleaved images",
model: visionModel,
limit: 1024,
truncate: true,
name: "truncate message with interleaved images",
model: visionModel,
limit: 1024,
msgs: []api.Message{
{Role: "user", Content: "You're a test, Harry!"},
{Role: "user", Images: []api.ImageData{[]byte("something")}},
@@ -172,10 +163,9 @@ func TestChatPrompt(t *testing.T) {
},
},
{
name: "message with system prompt",
model: visionModel,
limit: 2048,
truncate: true,
name: "message with system prompt",
model: visionModel,
limit: 2048,
msgs: []api.Message{
{Role: "system", Content: "You are the Test Who Lived."},
{Role: "user", Content: "You're a test, Harry!"},
@@ -187,10 +177,9 @@ func TestChatPrompt(t *testing.T) {
},
},
{
name: "out of order system",
model: visionModel,
limit: 2048,
truncate: true,
name: "out of order system",
model: visionModel,
limit: 2048,
msgs: []api.Message{
{Role: "user", Content: "You're a test, Harry!"},
{Role: "assistant", Content: "I-I'm a what?"},
@@ -202,10 +191,9 @@ func TestChatPrompt(t *testing.T) {
},
},
{
name: "multiple images same prompt",
model: visionModel,
limit: 2048,
truncate: true,
name: "multiple images same prompt",
model: visionModel,
limit: 2048,
msgs: []api.Message{
{Role: "user", Content: "Compare these two pictures of hotdogs", Images: []api.ImageData{[]byte("one hotdog"), []byte("two hotdogs")}},
},
@@ -214,20 +202,6 @@ func TestChatPrompt(t *testing.T) {
images: [][]byte{[]byte("one hotdog"), []byte("two hotdogs")},
},
},
{
name: "no truncate with limit exceeded",
model: visionModel,
limit: 10,
truncate: false,
msgs: []api.Message{
{Role: "user", Content: "You're a test, Harry!"},
{Role: "assistant", Content: "I-I'm a what?"},
{Role: "user", Content: "A test. And a thumping good one at that, I'd wager."},
},
expect: expect{
prompt: "You're a test, Harry! I-I'm a what? A test. And a thumping good one at that, I'd wager. ",
},
},
}
for _, tt := range cases {
@@ -235,7 +209,7 @@ func TestChatPrompt(t *testing.T) {
model := tt.model
opts := api.Options{Runner: api.Runner{NumCtx: tt.limit}}
think := false
prompt, images, err := chatPrompt(t.Context(), &model, mockRunner{}.Tokenize, &opts, tt.msgs, nil, &api.ThinkValue{Value: think}, tt.truncate)
prompt, images, err := chatPrompt(t.Context(), &model, mockRunner{}.Tokenize, &opts, tt.msgs, nil, &api.ThinkValue{Value: think})
if tt.error == nil && err != nil {
t.Fatal(err)
} else if tt.error != nil && err != tt.error {