mirror of
https://github.com/likelovewant/ollama-for-amd.git
synced 2025-12-21 22:33:56 +00:00
ml: add more rope options (#10775)
This commit is contained in:
@@ -115,21 +115,6 @@ type Context interface {
|
||||
Layer(int) Context
|
||||
}
|
||||
|
||||
// RopeOptions contains optional parameters for RoPE function
|
||||
type RopeOptions struct {
|
||||
OriginalContextLen uint32
|
||||
}
|
||||
|
||||
// RopeOption defines a function that modifies RopeOpts
|
||||
type RopeOption func(*RopeOptions)
|
||||
|
||||
// WithContextLen sets a custom context length
|
||||
func WithContextLen(len uint32) RopeOption {
|
||||
return func(opts *RopeOptions) {
|
||||
opts.OriginalContextLen = len
|
||||
}
|
||||
}
|
||||
|
||||
type Tensor interface {
|
||||
Dim(n int) int
|
||||
Stride(n int) int
|
||||
@@ -155,7 +140,6 @@ type Tensor interface {
|
||||
AvgPool2D(ctx Context, k, s int, p float32) Tensor
|
||||
Conv2D(ctx Context, weight Tensor, s0, s1, p0, p1, d0, d1 int) Tensor
|
||||
|
||||
RoPE(ctx Context, positionIDs, ropeFactors Tensor, dim, ropeType uint32, base, scale float32, options ...RopeOption) Tensor
|
||||
IM2Col(ctx Context, weight Tensor, s0, s1, p0, p1, d0, d1 int) Tensor
|
||||
|
||||
Sin(ctx Context) Tensor
|
||||
|
||||
@@ -30,6 +30,7 @@ import (
|
||||
"github.com/ollama/ollama/logutil"
|
||||
"github.com/ollama/ollama/ml"
|
||||
ggml "github.com/ollama/ollama/ml/backend/ggml/ggml/src"
|
||||
"github.com/ollama/ollama/ml/nn/rope"
|
||||
"golang.org/x/sync/errgroup"
|
||||
)
|
||||
|
||||
@@ -1074,28 +1075,15 @@ func (t *Tensor) View(ctx ml.Context, offset int, shape ...int) ml.Tensor {
|
||||
}
|
||||
}
|
||||
|
||||
const (
|
||||
ropeTypeNorm C.int = 0
|
||||
ropeTypeNeox C.int = 2
|
||||
ropeTypeMrope C.int = 8
|
||||
ropeTypeVision C.int = 24
|
||||
)
|
||||
|
||||
func (t *Tensor) RoPE(ctx ml.Context, positionIDs, ropeFactors ml.Tensor, ropeDim, ropeType uint32, ropeBase, ropeScale float32, options ...ml.RopeOption) ml.Tensor {
|
||||
func (t *Tensor) RoPE(ctx ml.Context, positions ml.Tensor, ropeDim int, ropeBase, ropeScale float32, options ...func(*rope.Options)) ml.Tensor {
|
||||
// Default options
|
||||
opts := &ml.RopeOptions{
|
||||
OriginalContextLen: 131072,
|
||||
}
|
||||
opts := &rope.Options{OriginalContextLength: 131072, Factors: &Tensor{}}
|
||||
|
||||
// Apply any provided options
|
||||
for _, option := range options {
|
||||
option(opts)
|
||||
}
|
||||
|
||||
if ropeFactors == nil {
|
||||
ropeFactors = &Tensor{b: t.b}
|
||||
}
|
||||
|
||||
dequant := t.t
|
||||
if C.ggml_is_quantized(t.t._type) {
|
||||
dequant = C.ggml_cast(ctx.(*Context).ctx, t.t, C.GGML_TYPE_F32)
|
||||
@@ -1106,11 +1094,11 @@ func (t *Tensor) RoPE(ctx ml.Context, positionIDs, ropeFactors ml.Tensor, ropeDi
|
||||
t: C.ggml_rope_ext(
|
||||
ctx.(*Context).ctx,
|
||||
dequant,
|
||||
positionIDs.(*Tensor).t,
|
||||
ropeFactors.(*Tensor).t,
|
||||
positions.(*Tensor).t,
|
||||
opts.Factors.(*Tensor).t,
|
||||
C.int(ropeDim),
|
||||
C.int(ropeType),
|
||||
C.int(opts.OriginalContextLen),
|
||||
C.int(opts.Type),
|
||||
C.int(opts.OriginalContextLength),
|
||||
C.float(ropeBase),
|
||||
C.float(ropeScale),
|
||||
C.float(0.0),
|
||||
|
||||
21
ml/nn/fast/rope.go
Normal file
21
ml/nn/fast/rope.go
Normal file
@@ -0,0 +1,21 @@
|
||||
// fast provides implementations of fast (fused) operations for increased performance.
|
||||
package fast
|
||||
|
||||
import (
|
||||
"github.com/ollama/ollama/ml"
|
||||
"github.com/ollama/ollama/ml/nn/rope"
|
||||
)
|
||||
|
||||
// fastRoPE is an interface for tensors that support fast rotary positional embedding.
|
||||
type fastRoPE interface {
|
||||
RoPE(ctx ml.Context, positionIDs ml.Tensor, dim int, base, scale float32, options ...func(*rope.Options)) ml.Tensor
|
||||
}
|
||||
|
||||
// RoPE applies rotary positional embedding to tensor `t`.
|
||||
func RoPE(ctx ml.Context, t, positions ml.Tensor, dim int, base, scale float32, options ...func(*rope.Options)) ml.Tensor {
|
||||
if t, ok := t.(fastRoPE); ok {
|
||||
return t.RoPE(ctx, positions, dim, base, scale, options...)
|
||||
}
|
||||
|
||||
panic("RoPE not implemented for this tensor type")
|
||||
}
|
||||
33
ml/nn/rope/rope.go
Normal file
33
ml/nn/rope/rope.go
Normal file
@@ -0,0 +1,33 @@
|
||||
package rope
|
||||
|
||||
import "github.com/ollama/ollama/ml"
|
||||
|
||||
// Options contains optional parameters for RoPE function
|
||||
type Options struct {
|
||||
OriginalContextLength int
|
||||
Type int
|
||||
Factors ml.Tensor
|
||||
}
|
||||
|
||||
// WithOriginalContextLength sets a custom context length
|
||||
func WithOriginalContextLength(n int) func(*Options) {
|
||||
return func(opts *Options) {
|
||||
opts.OriginalContextLength = n
|
||||
}
|
||||
}
|
||||
|
||||
// WithType sets RoPE type to NeoX
|
||||
func WithTypeNeoX() func(*Options) {
|
||||
return func(opts *Options) {
|
||||
opts.Type = 2
|
||||
}
|
||||
}
|
||||
|
||||
// WithFactors sets custom rope factors
|
||||
func WithFactors(factors ml.Tensor) func(*Options) {
|
||||
return func(opts *Options) {
|
||||
if factors != nil {
|
||||
opts.Factors = factors
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user