Skip to content

Commit

Permalink
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Fix embed integration test assumption
Browse files Browse the repository at this point in the history
The token eval count has changed with recent llama.cpp bumps (0.3.5+)
dhiltgen committed Aug 22, 2024
1 parent c8e03d2 commit 80dc0d2
Showing 1 changed file with 4 additions and 4 deletions.
8 changes: 4 additions & 4 deletions integration/embed_test.go
Original file line number Diff line number Diff line change
@@ -70,8 +70,8 @@ func TestAllMiniLMEmbed(t *testing.T) {
t.Fatalf("expected 0.010071031, got %.8f", res.Embeddings[0][0])
}

if res.PromptEvalCount != 8 {
t.Fatalf("expected 8 prompt tokens, got %d", res.PromptEvalCount)
if res.PromptEvalCount != 6 {
t.Fatalf("expected 6 prompt tokens, got %d", res.PromptEvalCount)
}
}

@@ -102,8 +102,8 @@ func TestAllMiniLMBatchEmbed(t *testing.T) {
t.Fatalf("expected 0.010071031 and -0.009802706, got %.8f and %.8f", res.Embeddings[0][0], res.Embeddings[1][0])
}

if res.PromptEvalCount != 16 {
t.Fatalf("expected 16 prompt tokens, got %d", res.PromptEvalCount)
if res.PromptEvalCount != 12 {
t.Fatalf("expected 12 prompt tokens, got %d", res.PromptEvalCount)
}
}

0 comments on commit 80dc0d2

Please sign in to comment.