Compare commits
1 Commits
v0.13.2
...
v0.1.47-Al
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4f1a191422 |
@@ -3,9 +3,7 @@ ollama
|
||||
app
|
||||
macapp
|
||||
dist
|
||||
build
|
||||
llm/llama.cpp
|
||||
.env
|
||||
.cache
|
||||
test_data
|
||||
.git
|
||||
|
||||
|
||||
29
.gitattributes
vendored
@@ -1,28 +1 @@
|
||||
llama/**/*.cpp linguist-vendored
|
||||
llama/**/*.hpp linguist-vendored
|
||||
llama/**/*.h linguist-vendored
|
||||
llama/**/*.c linguist-vendored
|
||||
llama/**/*.cu linguist-vendored
|
||||
llama/**/*.cuh linguist-vendored
|
||||
llama/**/*.m linguist-vendored
|
||||
llama/**/*.metal linguist-vendored
|
||||
|
||||
ml/backend/**/*.c linguist-vendored
|
||||
ml/backend/**/*.h linguist-vendored
|
||||
ml/backend/**/*.cpp linguist-vendored
|
||||
ml/backend/**/*.hpp linguist-vendored
|
||||
ml/backend/**/*.cu linguist-vendored
|
||||
ml/backend/**/*.cuh linguist-vendored
|
||||
ml/backend/**/*.m linguist-vendored
|
||||
ml/backend/**/*.metal linguist-vendored
|
||||
ml/backend/**/*.comp linguist-vendored
|
||||
ml/backend/**/*.glsl linguist-vendored
|
||||
ml/backend/**/CMakeLists.txt linguist-vendored
|
||||
|
||||
app/webview linguist-vendored
|
||||
|
||||
llama/build-info.cpp linguist-generated
|
||||
ml/backend/ggml/ggml/src/ggml-metal/ggml-metal-embed.s linguist-generated
|
||||
|
||||
* text=auto
|
||||
*.go text eol=lf
|
||||
llm/ext_server/* linguist-vendored
|
||||
|
||||
8
.github/ISSUE_TEMPLATE/10_bug_report.yml
vendored
@@ -9,14 +9,6 @@ body:
|
||||
description: What happened? What did you expect to happen?
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: Relevant log output
|
||||
description: Please copy and paste any relevant log output. See [Troubleshooting Guide](https://github.com/ollama/ollama/blob/main/docs/troubleshooting.md#how-to-troubleshoot-issues) for details.
|
||||
render: shell
|
||||
validations:
|
||||
required: false
|
||||
- type: dropdown
|
||||
id: os
|
||||
attributes:
|
||||
|
||||
853
.github/workflows/release.yaml
vendored
@@ -5,44 +5,23 @@ on:
|
||||
tags:
|
||||
- 'v*'
|
||||
|
||||
env:
|
||||
CGO_CFLAGS: '-O3'
|
||||
CGO_CXXFLAGS: '-O3'
|
||||
|
||||
jobs:
|
||||
setup-environment:
|
||||
runs-on: ubuntu-latest
|
||||
# Full build of the Mac assets
|
||||
build-darwin:
|
||||
runs-on: macos-12
|
||||
environment: release
|
||||
outputs:
|
||||
GOFLAGS: ${{ steps.goflags.outputs.GOFLAGS }}
|
||||
VERSION: ${{ steps.goflags.outputs.VERSION }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set environment
|
||||
id: goflags
|
||||
- name: Set Version
|
||||
shell: bash
|
||||
run: |
|
||||
echo GOFLAGS="'-ldflags=-w -s \"-X=github.com/ollama/ollama/version.Version=${GITHUB_REF_NAME#v}\" \"-X=github.com/ollama/ollama/server.mode=release\"'" >>$GITHUB_OUTPUT
|
||||
echo VERSION="${GITHUB_REF_NAME#v}" >>$GITHUB_OUTPUT
|
||||
|
||||
darwin-build:
|
||||
runs-on: macos-14-xlarge
|
||||
environment: release
|
||||
needs: setup-environment
|
||||
echo "VERSION=${GITHUB_REF_NAME#v}" >> $GITHUB_ENV
|
||||
echo "RELEASE_VERSION=$(echo ${GITHUB_REF_NAME} | cut -f1 -d-)" >> $GITHUB_ENV
|
||||
- name: key
|
||||
env:
|
||||
GOFLAGS: ${{ needs.setup-environment.outputs.GOFLAGS }}
|
||||
VERSION: ${{ needs.setup-environment.outputs.VERSION }}
|
||||
APPLE_IDENTITY: ${{ secrets.APPLE_IDENTITY }}
|
||||
APPLE_PASSWORD: ${{ secrets.APPLE_PASSWORD }}
|
||||
APPLE_TEAM_ID: ${{ vars.APPLE_TEAM_ID }}
|
||||
APPLE_ID: ${{ vars.APPLE_ID }}
|
||||
MACOS_SIGNING_KEY: ${{ secrets.MACOS_SIGNING_KEY }}
|
||||
MACOS_SIGNING_KEY_PASSWORD: ${{ secrets.MACOS_SIGNING_KEY_PASSWORD }}
|
||||
CGO_CFLAGS: '-mmacosx-version-min=14.0 -O3'
|
||||
CGO_CXXFLAGS: '-mmacosx-version-min=14.0 -O3'
|
||||
CGO_LDFLAGS: '-mmacosx-version-min=14.0 -O3'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- run: |
|
||||
run: |
|
||||
echo $MACOS_SIGNING_KEY | base64 --decode > certificate.p12
|
||||
security create-keychain -p password build.keychain
|
||||
security default-keychain -s build.keychain
|
||||
@@ -53,481 +32,449 @@ jobs:
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: go.mod
|
||||
- run: |
|
||||
cache: true
|
||||
- name: Build Darwin
|
||||
env:
|
||||
APPLE_IDENTITY: ${{ secrets.APPLE_IDENTITY }}
|
||||
APPLE_PASSWORD: ${{ secrets.APPLE_PASSWORD }}
|
||||
APPLE_TEAM_ID: ${{ vars.APPLE_TEAM_ID }}
|
||||
APPLE_ID: ${{ vars.APPLE_ID }}
|
||||
SDKROOT: /Applications/Xcode_13.4.1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk
|
||||
DEVELOPER_DIR: /Applications/Xcode_13.4.1.app/Contents/Developer
|
||||
run: |
|
||||
./scripts/build_darwin.sh
|
||||
- name: Log build results
|
||||
run: |
|
||||
ls -l dist/
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bundles-darwin
|
||||
name: dist-darwin
|
||||
path: |
|
||||
dist/*.tgz
|
||||
dist/*.zip
|
||||
dist/*.dmg
|
||||
dist/*arwin*
|
||||
!dist/*-cov
|
||||
|
||||
windows-depends:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [windows]
|
||||
arch: [amd64]
|
||||
preset: ['CPU']
|
||||
include:
|
||||
- os: windows
|
||||
arch: amd64
|
||||
preset: 'CUDA 12'
|
||||
install: https://developer.download.nvidia.com/compute/cuda/12.8.0/local_installers/cuda_12.8.0_571.96_windows.exe
|
||||
cuda-components:
|
||||
- '"cudart"'
|
||||
- '"nvcc"'
|
||||
- '"cublas"'
|
||||
- '"cublas_dev"'
|
||||
cuda-version: '12.8'
|
||||
flags: ''
|
||||
- os: windows
|
||||
arch: amd64
|
||||
preset: 'CUDA 13'
|
||||
install: https://developer.download.nvidia.com/compute/cuda/13.0.0/local_installers/cuda_13.0.0_windows.exe
|
||||
cuda-components:
|
||||
- '"cudart"'
|
||||
- '"nvcc"'
|
||||
- '"cublas"'
|
||||
- '"cublas_dev"'
|
||||
- '"crt"'
|
||||
- '"nvvm"'
|
||||
- '"nvptxcompiler"'
|
||||
cuda-version: '13.0'
|
||||
flags: ''
|
||||
- os: windows
|
||||
arch: amd64
|
||||
preset: 'ROCm 6'
|
||||
install: https://download.amd.com/developer/eula/rocm-hub/AMD-Software-PRO-Edition-24.Q4-WinSvr2022-For-HIP.exe
|
||||
rocm-version: '6.2'
|
||||
flags: '-DCMAKE_C_COMPILER=clang -DCMAKE_CXX_COMPILER=clang++ -DCMAKE_C_FLAGS="-parallel-jobs=4 -Wno-ignored-attributes -Wno-deprecated-pragma" -DCMAKE_CXX_FLAGS="-parallel-jobs=4 -Wno-ignored-attributes -Wno-deprecated-pragma"'
|
||||
runner_dir: 'rocm'
|
||||
- os: windows
|
||||
arch: amd64
|
||||
preset: Vulkan
|
||||
install: https://sdk.lunarg.com/sdk/download/1.4.321.1/windows/vulkansdk-windows-X64-1.4.321.1.exe
|
||||
flags: ''
|
||||
runner_dir: 'vulkan'
|
||||
runs-on: ${{ matrix.arch == 'arm64' && format('{0}-{1}', matrix.os, matrix.arch) || matrix.os }}
|
||||
# Windows builds take a long time to both install the dependencies and build, so parallelize
|
||||
# CPU generation step
|
||||
generate-windows-cpu:
|
||||
environment: release
|
||||
env:
|
||||
GOFLAGS: ${{ needs.setup-environment.outputs.GOFLAGS }}
|
||||
steps:
|
||||
- name: Install system dependencies
|
||||
run: |
|
||||
choco install -y --no-progress ccache ninja
|
||||
ccache -o cache_dir=${{ github.workspace }}\.ccache
|
||||
- if: startsWith(matrix.preset, 'CUDA ') || startsWith(matrix.preset, 'ROCm ') || startsWith(matrix.preset, 'Vulkan')
|
||||
id: cache-install
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: |
|
||||
C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA
|
||||
C:\Program Files\AMD\ROCm
|
||||
C:\VulkanSDK
|
||||
key: ${{ matrix.install }}
|
||||
- if: startsWith(matrix.preset, 'CUDA ')
|
||||
name: Install CUDA ${{ matrix.cuda-version }}
|
||||
run: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
if ("${{ steps.cache-install.outputs.cache-hit }}" -ne 'true') {
|
||||
Invoke-WebRequest -Uri "${{ matrix.install }}" -OutFile "install.exe"
|
||||
$subpackages = @(${{ join(matrix.cuda-components, ', ') }}) | Foreach-Object {"${_}_${{ matrix.cuda-version }}"}
|
||||
Start-Process -FilePath .\install.exe -ArgumentList (@("-s") + $subpackages) -NoNewWindow -Wait
|
||||
}
|
||||
|
||||
$cudaPath = (Resolve-Path "C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\*").path
|
||||
echo "$cudaPath\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
|
||||
- if: startsWith(matrix.preset, 'ROCm')
|
||||
name: Install ROCm ${{ matrix.rocm-version }}
|
||||
run: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
if ("${{ steps.cache-install.outputs.cache-hit }}" -ne 'true') {
|
||||
Invoke-WebRequest -Uri "${{ matrix.install }}" -OutFile "install.exe"
|
||||
Start-Process -FilePath .\install.exe -ArgumentList '-install' -NoNewWindow -Wait
|
||||
}
|
||||
|
||||
$hipPath = (Resolve-Path "C:\Program Files\AMD\ROCm\*").path
|
||||
echo "$hipPath\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
|
||||
echo "CC=$hipPath\bin\clang.exe" | Out-File -FilePath $env:GITHUB_ENV -Append
|
||||
echo "CXX=$hipPath\bin\clang++.exe" | Out-File -FilePath $env:GITHUB_ENV -Append
|
||||
echo "HIPCXX=$hipPath\bin\clang++.exe" | Out-File -FilePath $env:GITHUB_ENV -Append
|
||||
echo "HIP_PLATFORM=amd" | Out-File -FilePath $env:GITHUB_ENV -Append
|
||||
echo "CMAKE_PREFIX_PATH=$hipPath" | Out-File -FilePath $env:GITHUB_ENV -Append
|
||||
- if: matrix.preset == 'Vulkan'
|
||||
name: Install Vulkan ${{ matrix.rocm-version }}
|
||||
run: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
if ("${{ steps.cache-install.outputs.cache-hit }}" -ne 'true') {
|
||||
Invoke-WebRequest -Uri "${{ matrix.install }}" -OutFile "install.exe"
|
||||
Start-Process -FilePath .\install.exe -ArgumentList "-c","--am","--al","in" -NoNewWindow -Wait
|
||||
}
|
||||
|
||||
$vulkanPath = (Resolve-Path "C:\VulkanSDK\*").path
|
||||
echo "$vulkanPath\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
|
||||
echo "VULKAN_SDK=$vulkanPath" >> $env:GITHUB_ENV
|
||||
- if: matrix.preset == 'CPU'
|
||||
run: |
|
||||
echo "CC=clang.exe" | Out-File -FilePath $env:GITHUB_ENV -Append
|
||||
echo "CXX=clang++.exe" | Out-File -FilePath $env:GITHUB_ENV -Append
|
||||
- if: ${{ !cancelled() && steps.cache-install.outputs.cache-hit != 'true' }}
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: |
|
||||
C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA
|
||||
C:\Program Files\AMD\ROCm
|
||||
C:\VulkanSDK
|
||||
key: ${{ matrix.install }}
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: ${{ github.workspace }}\.ccache
|
||||
key: ccache-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.preset }}
|
||||
- name: Build target "${{ matrix.preset }}"
|
||||
run: |
|
||||
Import-Module 'C:\Program Files\Microsoft Visual Studio\2022\Enterprise\Common7\Tools\Microsoft.VisualStudio.DevShell.dll'
|
||||
Enter-VsDevShell -VsInstallPath 'C:\Program Files\Microsoft Visual Studio\2022\Enterprise' -SkipAutomaticLocation -DevCmdArguments '-arch=x64 -no_logo'
|
||||
cmake --preset "${{ matrix.preset }}" ${{ matrix.flags }} --install-prefix "$((pwd).Path)\dist\${{ matrix.os }}-${{ matrix.arch }}"
|
||||
cmake --build --parallel ([Environment]::ProcessorCount) --preset "${{ matrix.preset }}"
|
||||
cmake --install build --component "${{ startsWith(matrix.preset, 'CUDA ') && 'CUDA' || startsWith(matrix.preset, 'ROCm ') && 'HIP' || startsWith(matrix.preset, 'Vulkan') && 'Vulkan' || 'CPU' }}" --strip
|
||||
Remove-Item -Path dist\lib\ollama\rocm\rocblas\library\*gfx906* -ErrorAction SilentlyContinue
|
||||
env:
|
||||
CMAKE_GENERATOR: Ninja
|
||||
- name: Log build results
|
||||
run: |
|
||||
gci -path .\dist -Recurse -File | ForEach-Object { get-filehash -path $_.FullName -Algorithm SHA256 } | format-list
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: depends-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.preset }}
|
||||
path: dist\*
|
||||
|
||||
windows-build:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [windows]
|
||||
arch: [amd64, arm64]
|
||||
include:
|
||||
- os: windows
|
||||
arch: amd64
|
||||
llvmarch: x86_64
|
||||
- os: windows
|
||||
arch: arm64
|
||||
llvmarch: aarch64
|
||||
runs-on: ${{ matrix.arch == 'arm64' && format('{0}-{1}', matrix.os, matrix.arch) || matrix.os }}
|
||||
environment: release
|
||||
needs: [setup-environment]
|
||||
env:
|
||||
GOFLAGS: ${{ needs.setup-environment.outputs.GOFLAGS }}
|
||||
VERSION: ${{ needs.setup-environment.outputs.VERSION }}
|
||||
steps:
|
||||
- name: Install ARM64 system dependencies
|
||||
if: matrix.arch == 'arm64'
|
||||
run: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
Set-ExecutionPolicy Bypass -Scope Process -Force
|
||||
[System.Net.ServicePointManager]::SecurityProtocol = [System.Net.ServicePointManager]::SecurityProtocol -bor 3072
|
||||
iex ((New-Object System.Net.WebClient).DownloadString('https://community.chocolatey.org/install.ps1'))
|
||||
echo "C:\ProgramData\chocolatey\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
|
||||
|
||||
Invoke-WebRequest -Uri https://aka.ms/vs/17/release/vc_redist.arm64.exe -OutFile "${{ runner.temp }}\vc_redist.arm64.exe"
|
||||
Start-Process -FilePath "${{ runner.temp }}\vc_redist.arm64.exe" -ArgumentList @("/install", "/quiet", "/norestart") -NoNewWindow -Wait
|
||||
|
||||
choco install -y --no-progress git gzip
|
||||
echo "C:\Program Files\Git\cmd" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
|
||||
- name: Install clang and gcc-compat
|
||||
run: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
Set-ExecutionPolicy Bypass -Scope Process -Force
|
||||
Invoke-WebRequest -Uri "https://github.com/mstorsjo/llvm-mingw/releases/download/20240619/llvm-mingw-20240619-ucrt-${{ matrix.llvmarch }}.zip" -OutFile "${{ runner.temp }}\llvm-mingw-ucrt.zip"
|
||||
Expand-Archive -Path ${{ runner.temp }}\llvm-mingw-ucrt.zip -DestinationPath "C:\Program Files\"
|
||||
$installPath=(Resolve-Path -Path "C:\Program Files\llvm-mingw-*-ucrt*").path
|
||||
echo "$installPath\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: go.mod
|
||||
- name: Verify gcc is actually clang
|
||||
run: |
|
||||
$ErrorActionPreference='Continue'
|
||||
$version=& gcc -v 2>&1
|
||||
$version=$version -join "`n"
|
||||
echo "gcc is $version"
|
||||
if ($version -notmatch 'clang') {
|
||||
echo "ERROR: GCC must be clang for proper utf16 handling"
|
||||
exit 1
|
||||
}
|
||||
$ErrorActionPreference='Stop'
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
- run: |
|
||||
./scripts/build_windows ollama app
|
||||
- name: Log build results
|
||||
run: |
|
||||
gci -path .\dist -Recurse -File | ForEach-Object { get-filehash -path $_.FullName -Algorithm SHA256 } | format-list
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build-${{ matrix.os }}-${{ matrix.arch }}
|
||||
path: |
|
||||
dist\*
|
||||
|
||||
windows-app:
|
||||
runs-on: windows
|
||||
environment: release
|
||||
needs: [windows-build, windows-depends]
|
||||
env:
|
||||
GOFLAGS: ${{ needs.setup-environment.outputs.GOFLAGS }}
|
||||
VERSION: ${{ needs.setup-environment.outputs.VERSION }}
|
||||
KEY_CONTAINER: ${{ vars.KEY_CONTAINER }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: google-github-actions/auth@v2
|
||||
- name: Set Version
|
||||
shell: bash
|
||||
run: echo "VERSION=${GITHUB_REF_NAME#v}" >> $GITHUB_ENV
|
||||
- uses: 'google-github-actions/auth@v2'
|
||||
with:
|
||||
project_id: ollama
|
||||
credentials_json: ${{ secrets.GOOGLE_SIGNING_CREDENTIALS }}
|
||||
- run: |
|
||||
project_id: 'ollama'
|
||||
credentials_json: '${{ secrets.GOOGLE_SIGNING_CREDENTIALS }}'
|
||||
- run: echo "${{ vars.OLLAMA_CERT }}" > ollama_inc.crt
|
||||
- name: install Windows SDK 8.1 to get signtool
|
||||
run: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
Invoke-WebRequest -Uri "https://go.microsoft.com/fwlink/p/?LinkId=323507" -OutFile "${{ runner.temp }}\sdksetup.exe"
|
||||
Start-Process "${{ runner.temp }}\sdksetup.exe" -ArgumentList @("/q") -NoNewWindow -Wait
|
||||
|
||||
Invoke-WebRequest -Uri "https://github.com/GoogleCloudPlatform/kms-integrations/releases/download/cng-v1.0/kmscng-1.0-windows-amd64.zip" -OutFile "${{ runner.temp }}\plugin.zip"
|
||||
Expand-Archive -Path "${{ runner.temp }}\plugin.zip" -DestinationPath "${{ runner.temp }}\plugin\"
|
||||
& "${{ runner.temp }}\plugin\*\kmscng.msi" /quiet
|
||||
|
||||
echo "${{ vars.OLLAMA_CERT }}" >ollama_inc.crt
|
||||
write-host "downloading SDK"
|
||||
Invoke-WebRequest -Uri "https://go.microsoft.com/fwlink/p/?LinkId=323507" -OutFile "${env:RUNNER_TEMP}\sdksetup.exe"
|
||||
Start-Process "${env:RUNNER_TEMP}\sdksetup.exe" -ArgumentList @("/q") -NoNewWindow -Wait
|
||||
write-host "Win SDK 8.1 installed"
|
||||
gci -path 'C:\Program Files (x86)\Windows Kits\' -r -fi 'signtool.exe'
|
||||
- name: install signing plugin
|
||||
run: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
write-host "downloading plugin"
|
||||
Invoke-WebRequest -Uri "https://github.com/GoogleCloudPlatform/kms-integrations/releases/download/cng-v1.0/kmscng-1.0-windows-amd64.zip" -OutFile "${env:RUNNER_TEMP}\plugin.zip"
|
||||
Expand-Archive -Path "${env:RUNNER_TEMP}\plugin.zip" -DestinationPath ${env:RUNNER_TEMP}\plugin\
|
||||
write-host "Installing plugin"
|
||||
& "${env:RUNNER_TEMP}\plugin\*\kmscng.msi" /quiet
|
||||
write-host "plugin installed"
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: go.mod
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: depends-windows*
|
||||
path: dist
|
||||
merge-multiple: true
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: build-windows*
|
||||
path: dist
|
||||
merge-multiple: true
|
||||
- name: Log dist contents after download
|
||||
run: |
|
||||
gci -path .\dist -recurse
|
||||
cache: true
|
||||
- run: go get ./...
|
||||
- run: |
|
||||
./scripts/build_windows.ps1 deps sign installer zip
|
||||
- name: Log contents after build
|
||||
run: |
|
||||
gci -path .\dist -Recurse -File | ForEach-Object { get-filehash -path $_.FullName -Algorithm SHA256 } | format-list
|
||||
$gopath=(get-command go).source | split-path -parent
|
||||
& "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\Common7\Tools\Launch-VsDevShell.ps1"
|
||||
cd $env:GITHUB_WORKSPACE
|
||||
$env:CMAKE_SYSTEM_VERSION="10.0.22621.0"
|
||||
$env:PATH="$gopath;$env:PATH"
|
||||
go generate -x ./...
|
||||
name: go generate
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bundles-windows
|
||||
name: generate-windows-cpu
|
||||
path: |
|
||||
llm/build/**/bin/*
|
||||
llm/build/**/*.a
|
||||
dist/windows-amd64/**
|
||||
|
||||
# ROCm generation step
|
||||
generate-windows-rocm:
|
||||
environment: release
|
||||
runs-on: windows
|
||||
env:
|
||||
KEY_CONTAINER: ${{ vars.KEY_CONTAINER }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set Version
|
||||
shell: bash
|
||||
run: echo "VERSION=${GITHUB_REF_NAME#v}" >> $GITHUB_ENV
|
||||
- uses: 'google-github-actions/auth@v2'
|
||||
with:
|
||||
project_id: 'ollama'
|
||||
credentials_json: '${{ secrets.GOOGLE_SIGNING_CREDENTIALS }}'
|
||||
- run: echo "${{ vars.OLLAMA_CERT }}" > ollama_inc.crt
|
||||
- name: install Windows SDK 8.1 to get signtool
|
||||
run: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
write-host "downloading SDK"
|
||||
Invoke-WebRequest -Uri "https://go.microsoft.com/fwlink/p/?LinkId=323507" -OutFile "${env:RUNNER_TEMP}\sdksetup.exe"
|
||||
Start-Process "${env:RUNNER_TEMP}\sdksetup.exe" -ArgumentList @("/q") -NoNewWindow -Wait
|
||||
write-host "Win SDK 8.1 installed"
|
||||
gci -path 'C:\Program Files (x86)\Windows Kits\' -r -fi 'signtool.exe'
|
||||
- name: install signing plugin
|
||||
run: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
write-host "downloading plugin"
|
||||
Invoke-WebRequest -Uri "https://github.com/GoogleCloudPlatform/kms-integrations/releases/download/cng-v1.0/kmscng-1.0-windows-amd64.zip" -OutFile "${env:RUNNER_TEMP}\plugin.zip"
|
||||
Expand-Archive -Path "${env:RUNNER_TEMP}\plugin.zip" -DestinationPath ${env:RUNNER_TEMP}\plugin\
|
||||
write-host "Installing plugin"
|
||||
& "${env:RUNNER_TEMP}\plugin\*\kmscng.msi" /quiet
|
||||
write-host "plugin installed"
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: go.mod
|
||||
cache: true
|
||||
- name: 'Install ROCm'
|
||||
run: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
write-host "downloading AMD HIP Installer"
|
||||
Invoke-WebRequest -Uri "https://download.amd.com/developer/eula/rocm-hub/AMD-Software-PRO-Edition-23.Q4-WinSvr2022-For-HIP.exe" -OutFile "${env:RUNNER_TEMP}\rocm-install.exe"
|
||||
write-host "Installing AMD HIP"
|
||||
Start-Process "${env:RUNNER_TEMP}\rocm-install.exe" -ArgumentList '-install' -NoNewWindow -Wait
|
||||
write-host "Completed AMD HIP"
|
||||
- name: 'Verify ROCm'
|
||||
run: |
|
||||
& 'C:\Program Files\AMD\ROCm\*\bin\clang.exe' --version
|
||||
- run: go get ./...
|
||||
- run: |
|
||||
$gopath=(get-command go).source | split-path -parent
|
||||
& "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\Common7\Tools\Launch-VsDevShell.ps1"
|
||||
cd $env:GITHUB_WORKSPACE
|
||||
$env:CMAKE_SYSTEM_VERSION="10.0.22621.0"
|
||||
$env:PATH="$gopath;$env:PATH"
|
||||
$env:OLLAMA_SKIP_CPU_GENERATE="1"
|
||||
$env:HIP_PATH=$(Resolve-Path 'C:\Program Files\AMD\ROCm\*\bin\clang.exe' | split-path | split-path)
|
||||
go generate -x ./...
|
||||
name: go generate
|
||||
- name: 'gather rocm dependencies'
|
||||
run: |
|
||||
$HIP_PATH=$(Resolve-Path 'C:\Program Files\AMD\ROCm\*\bin\clang.exe' | split-path | split-path)
|
||||
md "dist\deps\bin\rocblas\library"
|
||||
cp "${HIP_PATH}\bin\hipblas.dll" "dist\deps\bin\"
|
||||
cp "${HIP_PATH}\bin\rocblas.dll" "dist\deps\bin\"
|
||||
cp "${HIP_PATH}\bin\rocblas\library\*" "dist\deps\bin\rocblas\library\"
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: generate-windows-rocm
|
||||
path: |
|
||||
llm/build/**/bin/*
|
||||
dist/windows-amd64/**
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: windows-rocm-deps
|
||||
path: dist/deps/*
|
||||
|
||||
# CUDA generation step
|
||||
generate-windows-cuda:
|
||||
environment: release
|
||||
runs-on: windows
|
||||
env:
|
||||
KEY_CONTAINER: ${{ vars.KEY_CONTAINER }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set Version
|
||||
shell: bash
|
||||
run: echo "VERSION=${GITHUB_REF_NAME#v}" >> $GITHUB_ENV
|
||||
- uses: 'google-github-actions/auth@v2'
|
||||
with:
|
||||
project_id: 'ollama'
|
||||
credentials_json: '${{ secrets.GOOGLE_SIGNING_CREDENTIALS }}'
|
||||
- run: echo "${{ vars.OLLAMA_CERT }}" > ollama_inc.crt
|
||||
- name: install Windows SDK 8.1 to get signtool
|
||||
run: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
write-host "downloading SDK"
|
||||
Invoke-WebRequest -Uri "https://go.microsoft.com/fwlink/p/?LinkId=323507" -OutFile "${env:RUNNER_TEMP}\sdksetup.exe"
|
||||
Start-Process "${env:RUNNER_TEMP}\sdksetup.exe" -ArgumentList @("/q") -NoNewWindow -Wait
|
||||
write-host "Win SDK 8.1 installed"
|
||||
gci -path 'C:\Program Files (x86)\Windows Kits\' -r -fi 'signtool.exe'
|
||||
- name: install signing plugin
|
||||
run: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
write-host "downloading plugin"
|
||||
Invoke-WebRequest -Uri "https://github.com/GoogleCloudPlatform/kms-integrations/releases/download/cng-v1.0/kmscng-1.0-windows-amd64.zip" -OutFile "${env:RUNNER_TEMP}\plugin.zip"
|
||||
Expand-Archive -Path "${env:RUNNER_TEMP}\plugin.zip" -DestinationPath ${env:RUNNER_TEMP}\plugin\
|
||||
write-host "Installing plugin"
|
||||
& "${env:RUNNER_TEMP}\plugin\*\kmscng.msi" /quiet
|
||||
write-host "plugin installed"
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: go.mod
|
||||
cache: true
|
||||
- name: 'Install CUDA'
|
||||
run: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
write-host "downloading CUDA Installer"
|
||||
Invoke-WebRequest -Uri "https://developer.download.nvidia.com/compute/cuda/11.3.1/local_installers/cuda_11.3.1_465.89_win10.exe" -OutFile "${env:RUNNER_TEMP}\cuda-install.exe"
|
||||
write-host "Installing CUDA"
|
||||
Start-Process "${env:RUNNER_TEMP}\cuda-install.exe" -ArgumentList '-s' -NoNewWindow -Wait
|
||||
write-host "Completed CUDA"
|
||||
$cudaPath=((resolve-path "c:\Program Files\NVIDIA*\CUDA\v*\bin\nvcc.exe")[0].path | split-path | split-path)
|
||||
$cudaVer=($cudaPath | split-path -leaf ) -replace 'v(\d+).(\d+)', '$1_$2'
|
||||
echo "$cudaPath\bin" >> $env:GITHUB_PATH
|
||||
echo "CUDA_PATH=$cudaPath" >> $env:GITHUB_ENV
|
||||
echo "CUDA_PATH_V${cudaVer}=$cudaPath" >> $env:GITHUB_ENV
|
||||
echo "CUDA_PATH_VX_Y=CUDA_PATH_V${cudaVer}" >> $env:GITHUB_ENV
|
||||
- name: 'Verify CUDA'
|
||||
run: nvcc -V
|
||||
- run: go get ./...
|
||||
- name: go generate
|
||||
run: |
|
||||
$gopath=(get-command go).source | split-path -parent
|
||||
$cudabin=(get-command nvcc).source | split-path
|
||||
& "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\Common7\Tools\Launch-VsDevShell.ps1"
|
||||
cd $env:GITHUB_WORKSPACE
|
||||
$env:CMAKE_SYSTEM_VERSION="10.0.22621.0"
|
||||
$env:PATH="$gopath;$cudabin;$env:PATH"
|
||||
$env:OLLAMA_SKIP_CPU_GENERATE="1"
|
||||
go generate -x ./...
|
||||
- name: 'gather cuda dependencies'
|
||||
run: |
|
||||
$NVIDIA_DIR=(resolve-path 'C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\*\bin\')[0]
|
||||
md "dist\deps"
|
||||
cp "${NVIDIA_DIR}\cudart64_*.dll" "dist\deps\"
|
||||
cp "${NVIDIA_DIR}\cublas64_*.dll" "dist\deps\"
|
||||
cp "${NVIDIA_DIR}\cublasLt64_*.dll" "dist\deps\"
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: generate-windows-cuda
|
||||
path: |
|
||||
llm/build/**/bin/*
|
||||
dist/windows-amd64/**
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: windows-cuda-deps
|
||||
path: dist/deps/*
|
||||
|
||||
# Import the prior generation steps and build the final windows assets
|
||||
build-windows:
|
||||
environment: release
|
||||
runs-on: windows
|
||||
needs:
|
||||
- generate-windows-cuda
|
||||
- generate-windows-rocm
|
||||
- generate-windows-cpu
|
||||
env:
|
||||
KEY_CONTAINER: ${{ vars.KEY_CONTAINER }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Set Version
|
||||
shell: bash
|
||||
run: echo "VERSION=${GITHUB_REF_NAME#v}" >> $GITHUB_ENV
|
||||
- uses: 'google-github-actions/auth@v2'
|
||||
with:
|
||||
project_id: 'ollama'
|
||||
credentials_json: '${{ secrets.GOOGLE_SIGNING_CREDENTIALS }}'
|
||||
- run: echo "${{ vars.OLLAMA_CERT }}" > ollama_inc.crt
|
||||
- name: install Windows SDK 8.1 to get signtool
|
||||
run: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
write-host "downloading SDK"
|
||||
Invoke-WebRequest -Uri "https://go.microsoft.com/fwlink/p/?LinkId=323507" -OutFile "${env:RUNNER_TEMP}\sdksetup.exe"
|
||||
Start-Process "${env:RUNNER_TEMP}\sdksetup.exe" -ArgumentList @("/q") -NoNewWindow -Wait
|
||||
write-host "Win SDK 8.1 installed"
|
||||
gci -path 'C:\Program Files (x86)\Windows Kits\' -r -fi 'signtool.exe'
|
||||
- name: install signing plugin
|
||||
run: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
write-host "downloading plugin"
|
||||
Invoke-WebRequest -Uri "https://github.com/GoogleCloudPlatform/kms-integrations/releases/download/cng-v1.0/kmscng-1.0-windows-amd64.zip" -OutFile "${env:RUNNER_TEMP}\plugin.zip"
|
||||
Expand-Archive -Path "${env:RUNNER_TEMP}\plugin.zip" -DestinationPath ${env:RUNNER_TEMP}\plugin\
|
||||
write-host "Installing plugin"
|
||||
& "${env:RUNNER_TEMP}\plugin\*\kmscng.msi" /quiet
|
||||
write-host "plugin installed"
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: go.mod
|
||||
cache: true
|
||||
- run: go get
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: generate-windows-cpu
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: generate-windows-cuda
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: windows-cuda-deps
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: windows-rocm-deps
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: generate-windows-rocm
|
||||
- run: dir llm/build
|
||||
- run: |
|
||||
$gopath=(get-command go).source | split-path -parent
|
||||
& "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\Common7\Tools\Launch-VsDevShell.ps1"
|
||||
cd $env:GITHUB_WORKSPACE
|
||||
$env:CMAKE_SYSTEM_VERSION="10.0.22621.0"
|
||||
$env:PATH="$gopath;$env:PATH"
|
||||
$env:OLLAMA_SKIP_GENERATE="1"
|
||||
& .\scripts\build_windows.ps1
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: dist-windows
|
||||
path: |
|
||||
dist/*.zip
|
||||
dist/OllamaSetup.exe
|
||||
dist/ollama-windows-*.zip
|
||||
|
||||
linux-build:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: linux
|
||||
arch: amd64
|
||||
target: archive
|
||||
- os: linux
|
||||
arch: amd64
|
||||
target: rocm
|
||||
- os: linux
|
||||
arch: arm64
|
||||
target: archive
|
||||
runs-on: ${{ matrix.arch == 'arm64' && format('{0}-{1}', matrix.os, matrix.arch) || matrix.os }}
|
||||
# Linux x86 assets built using the container based build
|
||||
build-linux-amd64:
|
||||
environment: release
|
||||
needs: setup-environment
|
||||
runs-on: linux
|
||||
env:
|
||||
GOFLAGS: ${{ needs.setup-environment.outputs.GOFLAGS }}
|
||||
OLLAMA_SKIP_MANIFEST_CREATE: '1'
|
||||
BUILD_ARCH: amd64
|
||||
PUSH: '1'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
- uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
platforms: ${{ matrix.os }}/${{ matrix.arch }}
|
||||
target: ${{ matrix.target }}
|
||||
build-args: |
|
||||
GOFLAGS=${{ env.GOFLAGS }}
|
||||
CGO_CFLAGS=${{ env.CGO_CFLAGS }}
|
||||
CGO_CXXFLAGS=${{ env.CGO_CXXFLAGS }}
|
||||
outputs: type=local,dest=dist/${{ matrix.os }}-${{ matrix.arch }}
|
||||
cache-from: type=registry,ref=${{ vars.DOCKER_REPO }}:latest
|
||||
cache-to: type=inline
|
||||
- run: |
|
||||
for COMPONENT in bin/* lib/ollama/*; do
|
||||
case "$COMPONENT" in
|
||||
bin/ollama) echo $COMPONENT >>ollama-${{ matrix.os }}-${{ matrix.arch }}.tar.in ;;
|
||||
lib/ollama/*.so*) echo $COMPONENT >>ollama-${{ matrix.os }}-${{ matrix.arch }}.tar.in ;;
|
||||
lib/ollama/cuda_v*) echo $COMPONENT >>ollama-${{ matrix.os }}-${{ matrix.arch }}.tar.in ;;
|
||||
lib/ollama/vulkan*) echo $COMPONENT >>ollama-${{ matrix.os }}-${{ matrix.arch }}.tar.in ;;
|
||||
lib/ollama/cuda_jetpack5) echo $COMPONENT >>ollama-${{ matrix.os }}-${{ matrix.arch }}-jetpack5.tar.in ;;
|
||||
lib/ollama/cuda_jetpack6) echo $COMPONENT >>ollama-${{ matrix.os }}-${{ matrix.arch }}-jetpack6.tar.in ;;
|
||||
lib/ollama/rocm) echo $COMPONENT >>ollama-${{ matrix.os }}-${{ matrix.arch }}-rocm.tar.in ;;
|
||||
esac
|
||||
done
|
||||
working-directory: dist/${{ matrix.os }}-${{ matrix.arch }}
|
||||
- run: |
|
||||
echo "Manifests"
|
||||
for ARCHIVE in dist/${{ matrix.os }}-${{ matrix.arch }}/*.tar.in ; do
|
||||
echo $ARCHIVE
|
||||
cat $ARCHIVE
|
||||
done
|
||||
- run: |
|
||||
for ARCHIVE in dist/${{ matrix.os }}-${{ matrix.arch }}/*.tar.in; do
|
||||
tar c -C dist/${{ matrix.os }}-${{ matrix.arch }} -T $ARCHIVE --owner 0 --group 0 | pigz -9vc >$(basename ${ARCHIVE//.*/}.tgz);
|
||||
done
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bundles-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.target }}
|
||||
path: |
|
||||
*.tgz
|
||||
|
||||
# Build each Docker variant (OS, arch, and flavor) separately. Using QEMU is unreliable and slower.
|
||||
docker-build-push:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: linux
|
||||
arch: arm64
|
||||
build-args: |
|
||||
CGO_CFLAGS
|
||||
CGO_CXXFLAGS
|
||||
GOFLAGS
|
||||
- os: linux
|
||||
arch: amd64
|
||||
build-args: |
|
||||
CGO_CFLAGS
|
||||
CGO_CXXFLAGS
|
||||
GOFLAGS
|
||||
- os: linux
|
||||
arch: amd64
|
||||
suffix: '-rocm'
|
||||
build-args: |
|
||||
CGO_CFLAGS
|
||||
CGO_CXXFLAGS
|
||||
GOFLAGS
|
||||
FLAVOR=rocm
|
||||
runs-on: ${{ matrix.arch == 'arm64' && format('{0}-{1}', matrix.os, matrix.arch) || matrix.os }}
|
||||
environment: release
|
||||
needs: setup-environment
|
||||
env:
|
||||
GOFLAGS: ${{ needs.setup-environment.outputs.GOFLAGS }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
- uses: docker/login-action@v3
|
||||
submodules: recursive
|
||||
- name: Set Version
|
||||
shell: bash
|
||||
run: echo "VERSION=${GITHUB_REF_NAME#v}" >> $GITHUB_ENV
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ vars.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
|
||||
- id: build-push
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
platforms: ${{ matrix.os }}/${{ matrix.arch }}
|
||||
build-args: ${{ matrix.build-args }}
|
||||
outputs: type=image,name=${{ vars.DOCKER_REPO }},push-by-digest=true,name-canonical=true,push=true
|
||||
cache-from: type=registry,ref=${{ vars.DOCKER_REPO }}:latest
|
||||
cache-to: type=inline
|
||||
- run: |
|
||||
mkdir -p ${{ matrix.os }}-${{ matrix.arch }}
|
||||
echo "${{ steps.build-push.outputs.digest }}" >${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.suffix }}.txt
|
||||
working-directory: ${{ runner.temp }}
|
||||
./scripts/build_linux.sh
|
||||
./scripts/build_docker.sh
|
||||
mv dist/deps/* dist/
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: digest-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.suffix }}
|
||||
name: dist-linux-amd64
|
||||
path: |
|
||||
${{ runner.temp }}/${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.suffix }}.txt
|
||||
dist/*linux*
|
||||
!dist/*-cov
|
||||
|
||||
# Merge Docker images for the same flavor into a single multi-arch manifest
|
||||
docker-merge-push:
|
||||
strategy:
|
||||
matrix:
|
||||
suffix: ['', '-rocm']
|
||||
# Linux ARM assets built using the container based build
|
||||
# (at present, docker isn't pre-installed on arm ubunutu images)
|
||||
build-linux-arm64:
|
||||
environment: release
|
||||
runs-on: linux-arm64
|
||||
env:
|
||||
OLLAMA_SKIP_MANIFEST_CREATE: '1'
|
||||
BUILD_ARCH: arm64
|
||||
PUSH: '1'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Set Version
|
||||
shell: bash
|
||||
run: echo "VERSION=${GITHUB_REF_NAME#v}" >> $GITHUB_ENV
|
||||
- name: 'Install Docker'
|
||||
run: |
|
||||
# Add Docker's official GPG key:
|
||||
env
|
||||
uname -a
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y ca-certificates curl
|
||||
sudo install -m 0755 -d /etc/apt/keyrings
|
||||
sudo curl -fsSL https://download.docker.com/linux/ubuntu/gpg -o /etc/apt/keyrings/docker.asc
|
||||
sudo chmod a+r /etc/apt/keyrings/docker.asc
|
||||
|
||||
# Add the repository to Apt sources:
|
||||
echo \
|
||||
"deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/ubuntu \
|
||||
$(. /etc/os-release && echo "$VERSION_CODENAME") stable" | \
|
||||
sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y docker-ce docker-ce-cli containerd.io
|
||||
sudo usermod -aG docker $USER
|
||||
sudo apt-get install acl
|
||||
sudo setfacl --modify user:$USER:rw /var/run/docker.sock
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ vars.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
|
||||
- run: |
|
||||
./scripts/build_linux.sh
|
||||
./scripts/build_docker.sh
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: dist-linux-arm64
|
||||
path: |
|
||||
dist/*linux*
|
||||
!dist/*-cov
|
||||
|
||||
# Aggregate all the assets and ship a release
|
||||
release:
|
||||
needs:
|
||||
- build-darwin
|
||||
- build-windows
|
||||
- build-linux-amd64
|
||||
- build-linux-arm64
|
||||
runs-on: linux
|
||||
environment: release
|
||||
needs: [docker-build-push]
|
||||
steps:
|
||||
- uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ vars.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
|
||||
- id: metadata
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
flavor: |
|
||||
latest=false
|
||||
suffix=${{ matrix.suffix }}
|
||||
images: |
|
||||
${{ vars.DOCKER_REPO }}
|
||||
tags: |
|
||||
type=ref,enable=true,priority=600,prefix=pr-,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: digest-*
|
||||
path: ${{ runner.temp }}
|
||||
merge-multiple: true
|
||||
- run: |
|
||||
docker buildx imagetools create $(echo '${{ steps.metadata.outputs.json }}' | jq -cr '.tags | map("-t", .) | join(" ")') $(cat *-${{ matrix.suffix }}.txt | xargs printf '${{ vars.DOCKER_REPO }}@%s ')
|
||||
docker buildx imagetools inspect ${{ vars.DOCKER_REPO }}:${{ steps.metadata.outputs.version }}
|
||||
working-directory: ${{ runner.temp }}
|
||||
|
||||
# Final release process
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
environment: release
|
||||
needs: [darwin-build, windows-app, linux-build]
|
||||
permissions:
|
||||
contents: write
|
||||
env:
|
||||
OLLAMA_SKIP_IMAGE_BUILD: '1'
|
||||
PUSH: '1'
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/download-artifact@v4
|
||||
- name: Set Version
|
||||
shell: bash
|
||||
run: |
|
||||
echo "VERSION=${GITHUB_REF_NAME#v}" >> $GITHUB_ENV
|
||||
echo "RELEASE_VERSION=$(echo ${GITHUB_REF_NAME} | cut -f1 -d-)" >> $GITHUB_ENV
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ vars.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
|
||||
- run: ./scripts/build_docker.sh
|
||||
- name: Retrieve built artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: bundles-*
|
||||
path: dist
|
||||
pattern: dist-*
|
||||
merge-multiple: true
|
||||
- name: Log dist contents
|
||||
- run: |
|
||||
ls -lh dist/
|
||||
(cd dist; sha256sum * > sha256sum.txt)
|
||||
cat dist/sha256sum.txt
|
||||
- name: Create or update Release
|
||||
run: |
|
||||
ls -l dist/
|
||||
- name: Generate checksum file
|
||||
run: find . -type f -not -name 'sha256sum.txt' | xargs sha256sum | tee sha256sum.txt
|
||||
working-directory: dist
|
||||
- name: Create or update Release for tag
|
||||
run: |
|
||||
RELEASE_VERSION="$(echo ${GITHUB_REF_NAME} | cut -f1 -d-)"
|
||||
echo "Looking for existing release for ${RELEASE_VERSION}"
|
||||
OLD_TAG=$(gh release ls --json name,tagName | jq -r ".[] | select(.name == \"${RELEASE_VERSION}\") | .tagName")
|
||||
echo "Looking for existing release for ${{ env.RELEASE_VERSION }}"
|
||||
OLD_TAG=$(gh release ls --json name,tagName | jq -r ".[] | select(.name == \"${{ env.RELEASE_VERSION }}\") | .tagName")
|
||||
if [ -n "$OLD_TAG" ]; then
|
||||
echo "Updating release ${RELEASE_VERSION} to point to new tag ${GITHUB_REF_NAME}"
|
||||
echo "Updating release ${{ env.RELEASE_VERSION }} to point to new tag ${GITHUB_REF_NAME}"
|
||||
gh release edit ${OLD_TAG} --tag ${GITHUB_REF_NAME}
|
||||
else
|
||||
echo "Creating new release ${RELEASE_VERSION} pointing to tag ${GITHUB_REF_NAME}"
|
||||
echo "Creating new release ${{ env.RELEASE_VERSION }} pointing to tag ${GITHUB_REF_NAME}"
|
||||
gh release create ${GITHUB_REF_NAME} \
|
||||
--title ${RELEASE_VERSION} \
|
||||
--title ${{ env.RELEASE_VERSION }} \
|
||||
--draft \
|
||||
--generate-notes \
|
||||
--prerelease
|
||||
fi
|
||||
- name: Upload release artifacts
|
||||
run: |
|
||||
pids=()
|
||||
for payload in dist/*.txt dist/*.zip dist/*.tgz dist/*.exe dist/*.dmg ; do
|
||||
echo "Uploading $payload"
|
||||
gh release upload ${GITHUB_REF_NAME} $payload --clobber &
|
||||
pids[$!]=$!
|
||||
sleep 1
|
||||
done
|
||||
echo "Waiting for uploads to complete"
|
||||
for pid in "${pids[*]}"; do
|
||||
wait $pid
|
||||
done
|
||||
echo "done"
|
||||
echo "Uploading artifacts for tag ${GITHUB_REF_NAME}"
|
||||
gh release upload ${GITHUB_REF_NAME} dist/* --clobber
|
||||
|
||||
463
.github/workflows/test.yaml
vendored
@@ -21,7 +21,9 @@ jobs:
|
||||
changes:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
changed: ${{ steps.changes.outputs.changed }}
|
||||
GENERATE: ${{ steps.changes.outputs.GENERATE }}
|
||||
GENERATE_CUDA: ${{ steps.changes.outputs.GENERATE_CUDA }}
|
||||
GENERATE_ROCM: ${{ steps.changes.outputs.GENERATE_ROCM }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
@@ -29,212 +31,291 @@ jobs:
|
||||
- id: changes
|
||||
run: |
|
||||
changed() {
|
||||
local BASE=${{ github.event.pull_request.base.sha }}
|
||||
local HEAD=${{ github.event.pull_request.head.sha }}
|
||||
local MERGE_BASE=$(git merge-base $BASE $HEAD)
|
||||
git diff-tree -r --no-commit-id --name-only "$MERGE_BASE" "$HEAD" \
|
||||
git diff-tree -r --no-commit-id --name-only \
|
||||
$(git merge-base ${{ github.event.pull_request.base.sha }} ${{ github.event.pull_request.head.sha }}) \
|
||||
${{ github.event.pull_request.head.sha }} \
|
||||
| xargs python3 -c "import sys; from pathlib import Path; print(any(Path(x).match(glob) for x in sys.argv[1:] for glob in '$*'.split(' ')))"
|
||||
}
|
||||
|
||||
echo changed=$(changed 'llama/llama.cpp/**/*' 'ml/backend/ggml/ggml/**/*') | tee -a $GITHUB_OUTPUT
|
||||
{
|
||||
echo GENERATE=$(changed 'llm/llama.cpp' 'llm/patches/**' 'llm/ext_server/**' 'llm/generate/**')
|
||||
echo GENERATE_CUDA=$(changed 'llm/llama.cpp' 'llm/patches/**' 'llm/ext_server/**' 'llm/generate/**')
|
||||
echo GENERATE_ROCM=$(changed 'llm/llama.cpp' 'llm/patches/**' 'llm/ext_server/**' 'llm/generate/**')
|
||||
} >>$GITHUB_OUTPUT
|
||||
|
||||
linux:
|
||||
generate:
|
||||
needs: [changes]
|
||||
if: needs.changes.outputs.changed == 'True'
|
||||
if: ${{ needs.changes.outputs.GENERATE == 'True' }}
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- preset: CPU
|
||||
- preset: CUDA
|
||||
container: nvidia/cuda:13.0.0-devel-ubuntu22.04
|
||||
flags: '-DCMAKE_CUDA_ARCHITECTURES=87'
|
||||
- preset: ROCm
|
||||
container: rocm/dev-ubuntu-22.04:6.1.2
|
||||
extra-packages: rocm-libs
|
||||
flags: '-DAMDGPU_TARGETS=gfx1010 -DCMAKE_PREFIX_PATH=/opt/rocm'
|
||||
- preset: Vulkan
|
||||
container: ubuntu:22.04
|
||||
extra-packages: >
|
||||
mesa-vulkan-drivers vulkan-tools
|
||||
libvulkan1 libvulkan-dev
|
||||
vulkan-sdk cmake ccache g++ make
|
||||
runs-on: linux
|
||||
container: ${{ matrix.container }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- run: |
|
||||
[ -n "${{ matrix.container }}" ] || sudo=sudo
|
||||
$sudo apt-get update
|
||||
# Add LunarG Vulkan SDK apt repo for Ubuntu 22.04
|
||||
if [ "${{ matrix.preset }}" = "Vulkan" ]; then
|
||||
$sudo apt-get install -y --no-install-recommends wget gnupg ca-certificates software-properties-common
|
||||
wget -qO - https://packages.lunarg.com/lunarg-signing-key-pub.asc | $sudo gpg --dearmor -o /usr/share/keyrings/lunarg-archive-keyring.gpg
|
||||
# Use signed-by to bind the repo to the installed keyring to avoid NO_PUBKEY
|
||||
echo "deb [signed-by=/usr/share/keyrings/lunarg-archive-keyring.gpg] https://packages.lunarg.com/vulkan/1.4.313 jammy main" | $sudo tee /etc/apt/sources.list.d/lunarg-vulkan-1.4.313-jammy.list > /dev/null
|
||||
$sudo apt-get update
|
||||
fi
|
||||
$sudo apt-get install -y cmake ccache ${{ matrix.extra-packages }}
|
||||
# Export VULKAN_SDK if provided by LunarG package (defensive)
|
||||
if [ -d "/usr/lib/x86_64-linux-gnu/vulkan" ] && [ "${{ matrix.preset }}" = "Vulkan" ]; then
|
||||
echo "VULKAN_SDK=/usr" >> $GITHUB_ENV
|
||||
fi
|
||||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: /github/home/.cache/ccache
|
||||
key: ccache-${{ runner.os }}-${{ runner.arch }}-${{ matrix.preset }}
|
||||
- run: |
|
||||
cmake --preset ${{ matrix.preset }} ${{ matrix.flags }}
|
||||
cmake --build --preset ${{ matrix.preset }} --parallel
|
||||
|
||||
windows:
|
||||
needs: [changes]
|
||||
if: needs.changes.outputs.changed == 'True'
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- preset: CPU
|
||||
- preset: CUDA
|
||||
install: https://developer.download.nvidia.com/compute/cuda/13.0.0/local_installers/cuda_13.0.0_windows.exe
|
||||
flags: '-DCMAKE_CUDA_ARCHITECTURES=80'
|
||||
cuda-components:
|
||||
- '"cudart"'
|
||||
- '"nvcc"'
|
||||
- '"cublas"'
|
||||
- '"cublas_dev"'
|
||||
- '"crt"'
|
||||
- '"nvvm"'
|
||||
- '"nvptxcompiler"'
|
||||
cuda-version: '13.0'
|
||||
- preset: ROCm
|
||||
install: https://download.amd.com/developer/eula/rocm-hub/AMD-Software-PRO-Edition-24.Q4-WinSvr2022-For-HIP.exe
|
||||
flags: '-DAMDGPU_TARGETS=gfx1010 -DCMAKE_C_COMPILER=clang -DCMAKE_CXX_COMPILER=clang++ -DCMAKE_C_FLAGS="-parallel-jobs=4 -Wno-ignored-attributes -Wno-deprecated-pragma" -DCMAKE_CXX_FLAGS="-parallel-jobs=4 -Wno-ignored-attributes -Wno-deprecated-pragma"'
|
||||
- preset: Vulkan
|
||||
install: https://sdk.lunarg.com/sdk/download/1.4.321.1/windows/vulkansdk-windows-X64-1.4.321.1.exe
|
||||
runs-on: windows
|
||||
steps:
|
||||
- run: |
|
||||
choco install -y --no-progress ccache ninja
|
||||
ccache -o cache_dir=${{ github.workspace }}\.ccache
|
||||
- if: matrix.preset == 'CUDA' || matrix.preset == 'ROCm' || matrix.preset == 'Vulkan'
|
||||
id: cache-install
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: |
|
||||
C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA
|
||||
C:\Program Files\AMD\ROCm
|
||||
C:\VulkanSDK
|
||||
key: ${{ matrix.install }}
|
||||
- if: matrix.preset == 'CUDA'
|
||||
name: Install CUDA ${{ matrix.cuda-version }}
|
||||
run: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
if ("${{ steps.cache-install.outputs.cache-hit }}" -ne 'true') {
|
||||
Invoke-WebRequest -Uri "${{ matrix.install }}" -OutFile "install.exe"
|
||||
$subpackages = @(${{ join(matrix.cuda-components, ', ') }}) | Foreach-Object {"${_}_${{ matrix.cuda-version }}"}
|
||||
Start-Process -FilePath .\install.exe -ArgumentList (@("-s") + $subpackages) -NoNewWindow -Wait
|
||||
}
|
||||
|
||||
$cudaPath = (Resolve-Path "C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\*").path
|
||||
echo "$cudaPath\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
|
||||
- if: matrix.preset == 'ROCm'
|
||||
name: Install ROCm ${{ matrix.rocm-version }}
|
||||
run: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
if ("${{ steps.cache-install.outputs.cache-hit }}" -ne 'true') {
|
||||
Invoke-WebRequest -Uri "${{ matrix.install }}" -OutFile "install.exe"
|
||||
Start-Process -FilePath .\install.exe -ArgumentList '-install' -NoNewWindow -Wait
|
||||
}
|
||||
|
||||
$hipPath = (Resolve-Path "C:\Program Files\AMD\ROCm\*").path
|
||||
echo "$hipPath\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
|
||||
echo "CC=$hipPath\bin\clang.exe" | Out-File -FilePath $env:GITHUB_ENV -Append
|
||||
echo "CXX=$hipPath\bin\clang++.exe" | Out-File -FilePath $env:GITHUB_ENV -Append
|
||||
echo "HIPCXX=$hipPath\bin\clang++.exe" | Out-File -FilePath $env:GITHUB_ENV -Append
|
||||
echo "HIP_PLATFORM=amd" | Out-File -FilePath $env:GITHUB_ENV -Append
|
||||
echo "CMAKE_PREFIX_PATH=$hipPath" | Out-File -FilePath $env:GITHUB_ENV -Append
|
||||
- if: matrix.preset == 'Vulkan'
|
||||
name: Install Vulkan ${{ matrix.rocm-version }}
|
||||
run: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
if ("${{ steps.cache-install.outputs.cache-hit }}" -ne 'true') {
|
||||
Invoke-WebRequest -Uri "${{ matrix.install }}" -OutFile "install.exe"
|
||||
Start-Process -FilePath .\install.exe -ArgumentList "-c","--am","--al","in" -NoNewWindow -Wait
|
||||
}
|
||||
|
||||
$vulkanPath = (Resolve-Path "C:\VulkanSDK\*").path
|
||||
echo "$vulkanPath\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
|
||||
echo "VULKAN_SDK=$vulkanPath" >> $env:GITHUB_ENV
|
||||
- if: ${{ !cancelled() && steps.cache-install.outputs.cache-hit != 'true' }}
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: |
|
||||
C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA
|
||||
C:\Program Files\AMD\ROCm
|
||||
C:\VulkanSDK
|
||||
key: ${{ matrix.install }}
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: ${{ github.workspace }}\.ccache
|
||||
key: ccache-${{ runner.os }}-${{ runner.arch }}-${{ matrix.preset }}
|
||||
- run: |
|
||||
Import-Module 'C:\Program Files\Microsoft Visual Studio\2022\Enterprise\Common7\Tools\Microsoft.VisualStudio.DevShell.dll'
|
||||
Enter-VsDevShell -VsInstallPath 'C:\Program Files\Microsoft Visual Studio\2022\Enterprise' -SkipAutomaticLocation -DevCmdArguments '-arch=x64 -no_logo'
|
||||
cmake --preset "${{ matrix.preset }}" ${{ matrix.flags }}
|
||||
cmake --build --parallel --preset "${{ matrix.preset }}"
|
||||
env:
|
||||
CMAKE_GENERATOR: Ninja
|
||||
|
||||
go_mod_tidy:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: check that 'go mod tidy' is clean
|
||||
run: go mod tidy --diff || (echo "Please run 'go mod tidy'." && exit 1)
|
||||
|
||||
test:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||
os: [ubuntu-latest, macos-latest, windows-2019]
|
||||
arch: [amd64, arm64]
|
||||
exclude:
|
||||
- os: ubuntu-latest
|
||||
arch: arm64
|
||||
- os: windows-2019
|
||||
arch: arm64
|
||||
runs-on: ${{ matrix.os }}
|
||||
env:
|
||||
CGO_ENABLED: '1'
|
||||
GOARCH: ${{ matrix.arch }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: 'go.mod'
|
||||
- uses: actions/setup-node@v4
|
||||
go-version-file: go.mod
|
||||
cache: true
|
||||
- run: go get ./...
|
||||
- run: |
|
||||
$gopath=(get-command go).source | split-path -parent
|
||||
$gccpath=(get-command gcc).source | split-path -parent
|
||||
& "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\Common7\Tools\Launch-VsDevShell.ps1"
|
||||
cd $env:GITHUB_WORKSPACE
|
||||
$env:CMAKE_SYSTEM_VERSION="10.0.22621.0"
|
||||
$env:PATH="$gopath;$gccpath;$env:PATH"
|
||||
echo $env:PATH
|
||||
go generate -x ./...
|
||||
if: ${{ startsWith(matrix.os, 'windows-') }}
|
||||
name: 'Windows Go Generate'
|
||||
- run: go generate -x ./...
|
||||
if: ${{ ! startsWith(matrix.os, 'windows-') }}
|
||||
name: 'Unix Go Generate'
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
- name: Install UI dependencies
|
||||
working-directory: ./app/ui/app
|
||||
run: npm ci
|
||||
- name: Install tscriptify
|
||||
run: |
|
||||
go install github.com/tkrajina/typescriptify-golang-structs/tscriptify@latest
|
||||
- name: Run UI tests
|
||||
if: ${{ startsWith(matrix.os, 'ubuntu') }}
|
||||
working-directory: ./app/ui/app
|
||||
run: npm test
|
||||
- name: Run go generate
|
||||
run: go generate ./...
|
||||
|
||||
- name: go test
|
||||
if: always()
|
||||
run: go test -count=1 -benchtime=1x ./...
|
||||
|
||||
- uses: golangci/golangci-lint-action@v9
|
||||
name: ${{ matrix.os }}-${{ matrix.arch }}-libraries
|
||||
path: |
|
||||
llm/build/**/bin/*
|
||||
llm/build/**/*.a
|
||||
generate-cuda:
|
||||
needs: [changes]
|
||||
if: ${{ needs.changes.outputs.GENERATE_CUDA == 'True' }}
|
||||
strategy:
|
||||
matrix:
|
||||
cuda-version:
|
||||
- '11.8.0'
|
||||
runs-on: linux
|
||||
container: nvidia/cuda:${{ matrix.cuda-version }}-devel-ubuntu20.04
|
||||
steps:
|
||||
- run: |
|
||||
apt-get update && apt-get install -y git build-essential curl
|
||||
curl -fsSL https://github.com/Kitware/CMake/releases/download/v3.28.1/cmake-3.28.1-linux-x86_64.tar.gz \
|
||||
| tar -zx -C /usr --strip-components 1
|
||||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-go@v4
|
||||
with:
|
||||
only-new-issues: true
|
||||
go-version-file: go.mod
|
||||
cache: true
|
||||
- run: go get ./...
|
||||
- run: |
|
||||
git config --global --add safe.directory /__w/ollama/ollama
|
||||
go generate -x ./...
|
||||
env:
|
||||
OLLAMA_SKIP_CPU_GENERATE: '1'
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: cuda-${{ matrix.cuda-version }}-libraries
|
||||
path: |
|
||||
llm/build/**/bin/*
|
||||
dist/windows-amd64/**
|
||||
generate-rocm:
|
||||
needs: [changes]
|
||||
if: ${{ needs.changes.outputs.GENERATE_ROCM == 'True' }}
|
||||
strategy:
|
||||
matrix:
|
||||
rocm-version:
|
||||
- '6.1.1'
|
||||
runs-on: linux
|
||||
container: rocm/dev-ubuntu-20.04:${{ matrix.rocm-version }}
|
||||
steps:
|
||||
- run: |
|
||||
apt-get update && apt-get install -y git build-essential curl rocm-libs
|
||||
curl -fsSL https://github.com/Kitware/CMake/releases/download/v3.28.1/cmake-3.28.1-linux-x86_64.tar.gz \
|
||||
| tar -zx -C /usr --strip-components 1
|
||||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version-file: go.mod
|
||||
cache: true
|
||||
- run: go get ./...
|
||||
- run: |
|
||||
git config --global --add safe.directory /__w/ollama/ollama
|
||||
go generate -x ./...
|
||||
env:
|
||||
OLLAMA_SKIP_CPU_GENERATE: '1'
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: rocm-${{ matrix.rocm-version }}-libraries
|
||||
path: |
|
||||
llm/build/**/bin/*
|
||||
dist/windows-amd64/**
|
||||
|
||||
patches:
|
||||
runs-on: ubuntu-latest
|
||||
# ROCm generation step
|
||||
generate-windows-rocm:
|
||||
needs: [changes]
|
||||
if: ${{ needs.changes.outputs.GENERATE_ROCM == 'True' }}
|
||||
runs-on: windows
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Verify patches apply cleanly and do not change files
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: go.mod
|
||||
cache: true
|
||||
- name: 'Install ROCm'
|
||||
run: |
|
||||
make -f Makefile.sync clean checkout apply-patches sync
|
||||
git diff --compact-summary --exit-code
|
||||
$ErrorActionPreference = "Stop"
|
||||
write-host "downloading AMD HIP Installer"
|
||||
Invoke-WebRequest -Uri "https://download.amd.com/developer/eula/rocm-hub/AMD-Software-PRO-Edition-23.Q4-WinSvr2022-For-HIP.exe" -OutFile "${env:RUNNER_TEMP}\rocm-install.exe"
|
||||
write-host "Installing AMD HIP"
|
||||
Start-Process "${env:RUNNER_TEMP}\rocm-install.exe" -ArgumentList '-install' -NoNewWindow -Wait
|
||||
write-host "Completed AMD HIP"
|
||||
- name: 'Verify ROCm'
|
||||
run: |
|
||||
& 'C:\Program Files\AMD\ROCm\*\bin\clang.exe' --version
|
||||
- run: go get ./...
|
||||
- run: |
|
||||
$gopath=(get-command go).source | split-path -parent
|
||||
& "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\Common7\Tools\Launch-VsDevShell.ps1"
|
||||
cd $env:GITHUB_WORKSPACE
|
||||
$env:CMAKE_SYSTEM_VERSION="10.0.22621.0"
|
||||
$env:PATH="$gopath;$env:PATH"
|
||||
$env:OLLAMA_SKIP_CPU_GENERATE="1"
|
||||
$env:HIP_PATH=$(Resolve-Path 'C:\Program Files\AMD\ROCm\*\bin\clang.exe' | split-path | split-path)
|
||||
go generate -x ./...
|
||||
name: go generate
|
||||
env:
|
||||
OLLAMA_SKIP_CPU_GENERATE: '1'
|
||||
# TODO - do we need any artifacts?
|
||||
|
||||
# CUDA generation step
|
||||
generate-windows-cuda:
|
||||
needs: [changes]
|
||||
if: ${{ needs.changes.outputs.GENERATE_CUDA == 'True' }}
|
||||
runs-on: windows
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: go.mod
|
||||
cache: true
|
||||
- name: 'Install CUDA'
|
||||
run: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
write-host "downloading CUDA Installer"
|
||||
Invoke-WebRequest -Uri "https://developer.download.nvidia.com/compute/cuda/11.3.1/local_installers/cuda_11.3.1_465.89_win10.exe" -OutFile "${env:RUNNER_TEMP}\cuda-install.exe"
|
||||
write-host "Installing CUDA"
|
||||
Start-Process "${env:RUNNER_TEMP}\cuda-install.exe" -ArgumentList '-s' -NoNewWindow -Wait
|
||||
write-host "Completed CUDA"
|
||||
$cudaPath=((resolve-path "c:\Program Files\NVIDIA*\CUDA\v*\bin\nvcc.exe")[0].path | split-path | split-path)
|
||||
$cudaVer=($cudaPath | split-path -leaf ) -replace 'v(\d+).(\d+)', '$1_$2'
|
||||
echo "$cudaPath\bin" >> $env:GITHUB_PATH
|
||||
echo "CUDA_PATH=$cudaPath" >> $env:GITHUB_ENV
|
||||
echo "CUDA_PATH_V${cudaVer}=$cudaPath" >> $env:GITHUB_ENV
|
||||
echo "CUDA_PATH_VX_Y=CUDA_PATH_V${cudaVer}" >> $env:GITHUB_ENV
|
||||
- name: 'Verify CUDA'
|
||||
run: nvcc -V
|
||||
- run: go get ./...
|
||||
- name: go generate
|
||||
run: |
|
||||
$gopath=(get-command go).source | split-path -parent
|
||||
$cudabin=(get-command nvcc).source | split-path
|
||||
& "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\Common7\Tools\Launch-VsDevShell.ps1"
|
||||
cd $env:GITHUB_WORKSPACE
|
||||
$env:CMAKE_SYSTEM_VERSION="10.0.22621.0"
|
||||
$env:PATH="$gopath;$cudabin;$env:PATH"
|
||||
$env:OLLAMA_SKIP_CPU_GENERATE="1"
|
||||
go generate -x ./...
|
||||
env:
|
||||
OLLAMA_SKIP_CPU_GENERATE: '1'
|
||||
# TODO - do we need any artifacts?
|
||||
|
||||
lint:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest, windows-2019]
|
||||
arch: [amd64, arm64]
|
||||
exclude:
|
||||
- os: ubuntu-latest
|
||||
arch: arm64
|
||||
- os: windows-2019
|
||||
arch: arm64
|
||||
- os: macos-latest
|
||||
arch: amd64
|
||||
runs-on: ${{ matrix.os }}
|
||||
env:
|
||||
GOARCH: ${{ matrix.arch }}
|
||||
CGO_ENABLED: '1'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: go.mod
|
||||
cache: false
|
||||
- run: |
|
||||
case ${{ matrix.arch }} in
|
||||
amd64) echo ARCH=x86_64 ;;
|
||||
arm64) echo ARCH=arm64 ;;
|
||||
esac >>$GITHUB_ENV
|
||||
shell: bash
|
||||
- run: |
|
||||
mkdir -p llm/build/linux/$ARCH/stub/bin
|
||||
touch llm/build/linux/$ARCH/stub/bin/ollama_llama_server
|
||||
if: ${{ startsWith(matrix.os, 'ubuntu-') }}
|
||||
- run: |
|
||||
mkdir -p llm/build/darwin/$ARCH/stub/bin
|
||||
touch llm/build/darwin/$ARCH/stub/bin/ollama_llama_server
|
||||
if: ${{ startsWith(matrix.os, 'macos-') }}
|
||||
- uses: golangci/golangci-lint-action@v6
|
||||
with:
|
||||
args: --timeout 8m0s -v ${{ startsWith(matrix.os, 'windows-') && '' || '--disable gofmt --disable goimports' }}
|
||||
test:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest, windows-2019]
|
||||
arch: [amd64]
|
||||
exclude:
|
||||
- os: ubuntu-latest
|
||||
arch: arm64
|
||||
- os: windows-2019
|
||||
arch: arm64
|
||||
runs-on: ${{ matrix.os }}
|
||||
env:
|
||||
GOARCH: ${{ matrix.arch }}
|
||||
CGO_ENABLED: '1'
|
||||
OLLAMA_CPU_TARGET: 'static'
|
||||
OLLAMA_SKIP_CPU_GENERATE: '1'
|
||||
OLLAMA_SKIP_METAL_GENERATE: '1'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: go.mod
|
||||
cache: true
|
||||
- run: |
|
||||
case ${{ matrix.arch }} in
|
||||
amd64) echo ARCH=x86_64 ;;
|
||||
arm64) echo ARCH=arm64 ;;
|
||||
esac >>$GITHUB_ENV
|
||||
shell: bash
|
||||
- run: |
|
||||
mkdir -p llm/build/linux/$ARCH/stub/bin
|
||||
touch llm/build/linux/$ARCH/stub/bin/ollama_llama_server
|
||||
if: ${{ startsWith(matrix.os, 'ubuntu-') }}
|
||||
- run: |
|
||||
mkdir -p llm/build/darwin/$ARCH/stub/bin
|
||||
touch llm/build/darwin/$ARCH/stub/bin/ollama_llama_server
|
||||
if: ${{ startsWith(matrix.os, 'macos-') }}
|
||||
shell: bash
|
||||
- run: go generate ./...
|
||||
- run: go build
|
||||
- run: go test -v ./...
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.os }}-binaries
|
||||
path: ollama
|
||||
|
||||
11
.gitignore
vendored
@@ -1,19 +1,16 @@
|
||||
.DS_Store
|
||||
.vscode
|
||||
.vs/
|
||||
.vs
|
||||
.env
|
||||
.venv
|
||||
.swp
|
||||
0
|
||||
dist
|
||||
build
|
||||
ollama
|
||||
ggml-metal.metal
|
||||
.cache
|
||||
.gocache
|
||||
*.exe
|
||||
.idea
|
||||
test_data
|
||||
*.crt
|
||||
llm/build
|
||||
__debug_bin*
|
||||
llama/build
|
||||
llama/vendor
|
||||
/ollama
|
||||
|
||||
4
.gitmodules
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
[submodule "llama.cpp"]
|
||||
path = llm/llama.cpp
|
||||
url = https://github.com/ggerganov/llama.cpp.git
|
||||
shallow = true
|
||||
@@ -1,51 +1,34 @@
|
||||
version: "2"
|
||||
run:
|
||||
timeout: 5m
|
||||
linters:
|
||||
enable:
|
||||
- asasalint
|
||||
- bidichk
|
||||
- bodyclose
|
||||
- containedctx
|
||||
- contextcheck
|
||||
- exportloopref
|
||||
- gocheckcompilerdirectives
|
||||
# conditionally enable this on linux/macos
|
||||
# - gofmt
|
||||
# - goimports
|
||||
- intrange
|
||||
- makezero
|
||||
- misspell
|
||||
- nilerr
|
||||
- nolintlint
|
||||
- nosprintfhostport
|
||||
- testifylint
|
||||
- unconvert
|
||||
- usetesting
|
||||
- unused
|
||||
- wastedassign
|
||||
- whitespace
|
||||
disable:
|
||||
- errcheck
|
||||
- usestdlibvars
|
||||
settings:
|
||||
govet:
|
||||
disable:
|
||||
- unusedresult
|
||||
staticcheck:
|
||||
checks:
|
||||
- all
|
||||
- -QF* # disable quick fix suggestions
|
||||
- -SA1019
|
||||
- -ST1000 # package comment format
|
||||
- -ST1003 # underscores in package names
|
||||
- -ST1005 # error strings should not be capitalized
|
||||
- -ST1012 # error var naming (ErrFoo)
|
||||
- -ST1016 # receiver name consistency
|
||||
- -ST1020 # comment on exported function format
|
||||
- -ST1021 # comment on exported type format
|
||||
- -ST1022 # comment on exported var format
|
||||
- -ST1023 # omit type from declaration
|
||||
severity:
|
||||
default: error
|
||||
default-severity: error
|
||||
rules:
|
||||
- linters:
|
||||
- gofmt
|
||||
- goimports
|
||||
- intrange
|
||||
- usestdlibvars
|
||||
severity: info
|
||||
formatters:
|
||||
enable:
|
||||
- gofmt
|
||||
- gofumpt
|
||||
|
||||
10
.prettierrc.json
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"trailingComma": "es5",
|
||||
"tabWidth": 2,
|
||||
"useTabs": false,
|
||||
"semi": false,
|
||||
"singleQuote": true,
|
||||
"jsxSingleQuote": true,
|
||||
"printWidth": 120,
|
||||
"arrowParens": "avoid"
|
||||
}
|
||||
3
.vs/CMakeWorkspaceSettings.json
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"enableCMake": false
|
||||
}
|
||||
3
.vs/ProjectSettings.json
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"CurrentProjectSetting": "\u65E0\u914D\u7F6E"
|
||||
}
|
||||
6
.vs/VSWorkspaceState.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"ExpandedNodes": [
|
||||
""
|
||||
],
|
||||
"PreviewInSolutionExplorer": false
|
||||
}
|
||||
BIN
.vs/ollama-for-amd/v17/.wsuo
Normal file
BIN
.vs/ollama-for-amd/v17/Browse.VC.db
Normal file
73
.vs/ollama-for-amd/v17/DocumentLayout.json
Normal file
@@ -0,0 +1,73 @@
|
||||
{
|
||||
"Version": 1,
|
||||
"WorkspaceRootPath": "D:\\ollama-for-amd\\",
|
||||
"Documents": [
|
||||
{
|
||||
"AbsoluteMoniker": "D:0:0:{A2FE74E1-B743-11D0-AE1A-00A0C90FFFC3}|\u003CMiscFiles\u003E|D:\\ollama-for-amd\\version\\version.go||{3B902123-F8A7-4915-9F01-361F908088D0}",
|
||||
"RelativeMoniker": "D:0:0:{A2FE74E1-B743-11D0-AE1A-00A0C90FFFC3}|\u003CMiscFiles\u003E|solutionrelative:version\\version.go||{3B902123-F8A7-4915-9F01-361F908088D0}"
|
||||
},
|
||||
{
|
||||
"AbsoluteMoniker": "D:0:0:{A2FE74E1-B743-11D0-AE1A-00A0C90FFFC3}|\u003CMiscFiles\u003E|D:\\ollama-for-amd\\llm\\generate\\gen_windows.ps1||{3B902123-F8A7-4915-9F01-361F908088D0}",
|
||||
"RelativeMoniker": "D:0:0:{A2FE74E1-B743-11D0-AE1A-00A0C90FFFC3}|\u003CMiscFiles\u003E|solutionrelative:llm\\generate\\gen_windows.ps1||{3B902123-F8A7-4915-9F01-361F908088D0}"
|
||||
},
|
||||
{
|
||||
"AbsoluteMoniker": "D:0:0:{A2FE74E1-B743-11D0-AE1A-00A0C90FFFC3}|\u003CMiscFiles\u003E|D:\\ollama-for-amd\\llm\\llm_windows.go||{3B902123-F8A7-4915-9F01-361F908088D0}",
|
||||
"RelativeMoniker": "D:0:0:{A2FE74E1-B743-11D0-AE1A-00A0C90FFFC3}|\u003CMiscFiles\u003E|solutionrelative:llm\\llm_windows.go||{3B902123-F8A7-4915-9F01-361F908088D0}"
|
||||
}
|
||||
],
|
||||
"DocumentGroupContainers": [
|
||||
{
|
||||
"Orientation": 0,
|
||||
"VerticalTabListWidth": 256,
|
||||
"DocumentGroups": [
|
||||
{
|
||||
"DockedWidth": 200,
|
||||
"SelectedChildIndex": 0,
|
||||
"Children": [
|
||||
{
|
||||
"$type": "Document",
|
||||
"DocumentIndex": 0,
|
||||
"Title": "version.go",
|
||||
"DocumentMoniker": "D:\\ollama-for-amd\\version\\version.go",
|
||||
"RelativeDocumentMoniker": "version\\version.go",
|
||||
"ToolTip": "D:\\ollama-for-amd\\version\\version.go",
|
||||
"RelativeToolTip": "version\\version.go",
|
||||
"ViewState": "AQIAAAAAAAAAAAAAAAAAAAIAAAAaAAAA",
|
||||
"Icon": "ae27a6b0-e345-4288-96df-5eaf394ee369.001001|",
|
||||
"WhenOpened": "2024-05-08T07:03:06.844Z",
|
||||
"EditorCaption": ""
|
||||
},
|
||||
{
|
||||
"$type": "Document",
|
||||
"DocumentIndex": 1,
|
||||
"Title": "gen_windows.ps1",
|
||||
"DocumentMoniker": "D:\\ollama-for-amd\\llm\\generate\\gen_windows.ps1",
|
||||
"RelativeDocumentMoniker": "llm\\generate\\gen_windows.ps1",
|
||||
"ToolTip": "D:\\ollama-for-amd\\llm\\generate\\gen_windows.ps1",
|
||||
"RelativeToolTip": "llm\\generate\\gen_windows.ps1",
|
||||
"ViewState": "AQIAAAAAAAAAAAAAAAAAAAAAAAAAAAAA",
|
||||
"Icon": "ae27a6b0-e345-4288-96df-5eaf394ee369.001001|",
|
||||
"WhenOpened": "2024-05-08T07:02:18.423Z"
|
||||
},
|
||||
{
|
||||
"$type": "Document",
|
||||
"DocumentIndex": 2,
|
||||
"Title": "llm_windows.go",
|
||||
"DocumentMoniker": "D:\\ollama-for-amd\\llm\\llm_windows.go",
|
||||
"RelativeDocumentMoniker": "llm\\llm_windows.go",
|
||||
"ToolTip": "D:\\ollama-for-amd\\llm\\llm_windows.go",
|
||||
"RelativeToolTip": "llm\\llm_windows.go",
|
||||
"ViewState": "AQIAAAAAAAAAAAAAAAAAAAAAAAAAAAAA",
|
||||
"Icon": "ae27a6b0-e345-4288-96df-5eaf394ee369.001001|",
|
||||
"WhenOpened": "2024-05-08T07:01:58.602Z"
|
||||
},
|
||||
{
|
||||
"$type": "Bookmark",
|
||||
"Name": "ST:0:0:{cce594b6-0c39-4442-ba28-10c64ac7e89f}"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
BIN
.vs/ollama-for-amd/v17/workspaceFileList.bin
Normal file
BIN
.vs/slnx.sqlite
Normal file
154
CMakeLists.txt
@@ -1,154 +0,0 @@
|
||||
cmake_minimum_required(VERSION 3.21)
|
||||
|
||||
project(Ollama C CXX)
|
||||
|
||||
include(CheckLanguage)
|
||||
include(GNUInstallDirs)
|
||||
|
||||
find_package(Threads REQUIRED)
|
||||
|
||||
set(CMAKE_BUILD_TYPE Release)
|
||||
set(BUILD_SHARED_LIBS ON)
|
||||
|
||||
set(CMAKE_CXX_STANDARD 17)
|
||||
set(CMAKE_CXX_STANDARD_REQUIRED ON)
|
||||
set(CMAKE_CXX_EXTENSIONS OFF)
|
||||
|
||||
set(GGML_BUILD ON)
|
||||
set(GGML_SHARED ON)
|
||||
set(GGML_CCACHE ON)
|
||||
set(GGML_BACKEND_DL ON)
|
||||
set(GGML_BACKEND_SHARED ON)
|
||||
set(GGML_SCHED_MAX_COPIES 4)
|
||||
|
||||
set(GGML_LLAMAFILE ON)
|
||||
set(GGML_CUDA_PEER_MAX_BATCH_SIZE 128)
|
||||
set(GGML_CUDA_GRAPHS ON)
|
||||
set(GGML_CUDA_FA ON)
|
||||
set(GGML_CUDA_COMPRESSION_MODE default)
|
||||
|
||||
if((CMAKE_OSX_ARCHITECTURES AND NOT CMAKE_OSX_ARCHITECTURES MATCHES "arm64")
|
||||
OR (NOT CMAKE_OSX_ARCHITECTURES AND NOT CMAKE_SYSTEM_PROCESSOR MATCHES "arm|aarch64|ARM64|ARMv[0-9]+"))
|
||||
set(GGML_CPU_ALL_VARIANTS ON)
|
||||
endif()
|
||||
|
||||
if (CMAKE_OSX_ARCHITECTURES MATCHES "x86_64")
|
||||
set(CMAKE_BUILD_RPATH "@loader_path")
|
||||
set(CMAKE_INSTALL_RPATH "@loader_path")
|
||||
endif()
|
||||
|
||||
set(OLLAMA_BUILD_DIR ${CMAKE_BINARY_DIR}/lib/ollama)
|
||||
set(OLLAMA_INSTALL_DIR ${CMAKE_INSTALL_PREFIX}/lib/ollama/${OLLAMA_RUNNER_DIR})
|
||||
|
||||
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${OLLAMA_BUILD_DIR})
|
||||
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY_DEBUG ${OLLAMA_BUILD_DIR})
|
||||
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY_RELEASE ${OLLAMA_BUILD_DIR})
|
||||
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${OLLAMA_BUILD_DIR})
|
||||
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY_DEBUG ${OLLAMA_BUILD_DIR})
|
||||
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY_RELEASE ${OLLAMA_BUILD_DIR})
|
||||
|
||||
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/ml/backend/ggml/ggml/src)
|
||||
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/ml/backend/ggml/ggml/src/include)
|
||||
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/ml/backend/ggml/ggml/src/ggml-cpu)
|
||||
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/ml/backend/ggml/ggml/src/ggml-cpu/amx)
|
||||
|
||||
add_compile_definitions(NDEBUG GGML_VERSION=0x0 GGML_COMMIT=0x0)
|
||||
|
||||
set(GGML_CPU ON)
|
||||
add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/ml/backend/ggml/ggml/src)
|
||||
set_property(TARGET ggml PROPERTY EXCLUDE_FROM_ALL TRUE)
|
||||
|
||||
get_target_property(CPU_VARIANTS ggml-cpu MANUALLY_ADDED_DEPENDENCIES)
|
||||
if(NOT CPU_VARIANTS)
|
||||
set(CPU_VARIANTS "ggml-cpu")
|
||||
endif()
|
||||
|
||||
install(TARGETS ggml-base ${CPU_VARIANTS}
|
||||
RUNTIME_DEPENDENCIES
|
||||
PRE_EXCLUDE_REGEXES ".*"
|
||||
RUNTIME DESTINATION ${OLLAMA_INSTALL_DIR} COMPONENT CPU
|
||||
LIBRARY DESTINATION ${OLLAMA_INSTALL_DIR} COMPONENT CPU
|
||||
FRAMEWORK DESTINATION ${OLLAMA_INSTALL_DIR} COMPONENT CPU
|
||||
)
|
||||
|
||||
check_language(CUDA)
|
||||
if(CMAKE_CUDA_COMPILER)
|
||||
if(CMAKE_VERSION VERSION_GREATER_EQUAL "3.24" AND NOT CMAKE_CUDA_ARCHITECTURES)
|
||||
set(CMAKE_CUDA_ARCHITECTURES "native")
|
||||
endif()
|
||||
|
||||
find_package(CUDAToolkit)
|
||||
add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/ml/backend/ggml/ggml/src/ggml-cuda)
|
||||
install(TARGETS ggml-cuda
|
||||
RUNTIME_DEPENDENCIES
|
||||
DIRECTORIES ${CUDAToolkit_BIN_DIR} ${CUDAToolkit_BIN_DIR}/x64 ${CUDAToolkit_LIBRARY_DIR}
|
||||
PRE_INCLUDE_REGEXES cublas cublasLt cudart
|
||||
PRE_EXCLUDE_REGEXES ".*"
|
||||
RUNTIME DESTINATION ${OLLAMA_INSTALL_DIR} COMPONENT CUDA
|
||||
LIBRARY DESTINATION ${OLLAMA_INSTALL_DIR} COMPONENT CUDA
|
||||
)
|
||||
endif()
|
||||
|
||||
|
||||
set(WINDOWS_AMDGPU_TARGETS_EXCLUDE_REGEX ""
|
||||
CACHE STRING
|
||||
"Regular expression describing AMDGPU_TARGETS not supported on Windows. Override to force building these targets. Default \"^gfx(908|90a):xnack[+-]$\"."
|
||||
)
|
||||
|
||||
check_language(HIP)
|
||||
if(CMAKE_HIP_COMPILER)
|
||||
set(HIP_PLATFORM "amd")
|
||||
|
||||
if(NOT AMDGPU_TARGETS)
|
||||
find_package(hip REQUIRED)
|
||||
list(FILTER AMDGPU_TARGETS INCLUDE REGEX "^gfx(803|90[012]|906(:xnack-)|90c(:xnack-)|1010(:xnack-)|1011(:xnack-)|1012(:xnack-)|103[0-6]|110[0-3]|115[0123]|120[01])$")
|
||||
endif()
|
||||
|
||||
if(WIN32 AND WINDOWS_AMDGPU_TARGETS_EXCLUDE_REGEX)
|
||||
list(FILTER AMDGPU_TARGETS EXCLUDE REGEX ${WINDOWS_AMDGPU_TARGETS_EXCLUDE_REGEX})
|
||||
endif()
|
||||
|
||||
if(AMDGPU_TARGETS)
|
||||
find_package(hip REQUIRED)
|
||||
add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/ml/backend/ggml/ggml/src/ggml-hip)
|
||||
|
||||
if (WIN32)
|
||||
target_compile_definitions(ggml-hip PRIVATE GGML_CUDA_NO_PEER_COPY)
|
||||
endif()
|
||||
|
||||
target_compile_definitions(ggml-hip PRIVATE GGML_HIP_NO_VMM)
|
||||
|
||||
install(TARGETS ggml-hip
|
||||
RUNTIME_DEPENDENCY_SET rocm
|
||||
RUNTIME DESTINATION ${OLLAMA_INSTALL_DIR} COMPONENT HIP
|
||||
LIBRARY DESTINATION ${OLLAMA_INSTALL_DIR} COMPONENT HIP
|
||||
)
|
||||
install(RUNTIME_DEPENDENCY_SET rocm
|
||||
DIRECTORIES ${HIP_BIN_INSTALL_DIR} ${HIP_LIB_INSTALL_DIR}
|
||||
PRE_INCLUDE_REGEXES hipblas rocblas amdhip64 rocsolver amd_comgr hsa-runtime64 rocsparse tinfo rocprofiler-register drm drm_amdgpu numa elf
|
||||
PRE_EXCLUDE_REGEXES ".*"
|
||||
POST_EXCLUDE_REGEXES "system32"
|
||||
RUNTIME DESTINATION ${OLLAMA_INSTALL_DIR} COMPONENT HIP
|
||||
LIBRARY DESTINATION ${OLLAMA_INSTALL_DIR} COMPONENT HIP
|
||||
)
|
||||
|
||||
foreach(HIP_LIB_BIN_INSTALL_DIR IN ITEMS ${HIP_BIN_INSTALL_DIR} ${HIP_LIB_INSTALL_DIR})
|
||||
if(EXISTS ${HIP_LIB_BIN_INSTALL_DIR}/rocblas)
|
||||
install(DIRECTORY ${HIP_LIB_BIN_INSTALL_DIR}/rocblas DESTINATION ${OLLAMA_INSTALL_DIR} COMPONENT HIP)
|
||||
break()
|
||||
endif()
|
||||
endforeach()
|
||||
endif()
|
||||
endif()
|
||||
|
||||
find_package(Vulkan)
|
||||
if(Vulkan_FOUND)
|
||||
add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/ml/backend/ggml/ggml/src/ggml-vulkan)
|
||||
install(TARGETS ggml-vulkan
|
||||
RUNTIME_DEPENDENCIES
|
||||
PRE_INCLUDE_REGEXES vulkan
|
||||
PRE_EXCLUDE_REGEXES ".*"
|
||||
RUNTIME DESTINATION ${OLLAMA_INSTALL_DIR} COMPONENT Vulkan
|
||||
LIBRARY DESTINATION ${OLLAMA_INSTALL_DIR} COMPONENT Vulkan
|
||||
)
|
||||
endif()
|
||||
@@ -1,145 +0,0 @@
|
||||
{
|
||||
"version": 3,
|
||||
"configurePresets": [
|
||||
{
|
||||
"name": "Default",
|
||||
"binaryDir": "${sourceDir}/build",
|
||||
"installDir": "${sourceDir}/dist",
|
||||
"cacheVariables": {
|
||||
"CMAKE_BUILD_TYPE": "Release",
|
||||
"CMAKE_MSVC_RUNTIME_LIBRARY": "MultiThreaded"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "CPU",
|
||||
"inherits": [ "Default" ]
|
||||
},
|
||||
{
|
||||
"name": "CUDA",
|
||||
"inherits": [ "Default" ]
|
||||
},
|
||||
{
|
||||
"name": "CUDA 11",
|
||||
"inherits": [ "CUDA" ],
|
||||
"cacheVariables": {
|
||||
"CMAKE_CUDA_ARCHITECTURES": "50-virtual;60-virtual;61-virtual;70-virtual;75-virtual;80-virtual;86-virtual;87-virtual;89-virtual;90-virtual",
|
||||
"CMAKE_CUDA_FLAGS": "-Wno-deprecated-gpu-targets -t 2",
|
||||
"OLLAMA_RUNNER_DIR": "cuda_v11"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "CUDA 12",
|
||||
"inherits": [ "CUDA" ],
|
||||
"cacheVariables": {
|
||||
"CMAKE_CUDA_ARCHITECTURES": "50;52;60;61;70;75;80;86;89;90;90a;120",
|
||||
"CMAKE_CUDA_FLAGS": "-Wno-deprecated-gpu-targets -t 2",
|
||||
"OLLAMA_RUNNER_DIR": "cuda_v12"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "CUDA 13",
|
||||
"inherits": [ "CUDA" ],
|
||||
"cacheVariables": {
|
||||
"CMAKE_CUDA_ARCHITECTURES": "75-virtual;80-virtual;86-virtual;87-virtual;89-virtual;90-virtual;90a-virtual;100-virtual;103-virtual;110-virtual;120-virtual;121-virtual",
|
||||
"CMAKE_CUDA_FLAGS": "-t 2",
|
||||
"OLLAMA_RUNNER_DIR": "cuda_v13"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "JetPack 5",
|
||||
"inherits": [ "CUDA" ],
|
||||
"cacheVariables": {
|
||||
"CMAKE_CUDA_ARCHITECTURES": "72;87",
|
||||
"OLLAMA_RUNNER_DIR": "cuda_jetpack5"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "JetPack 6",
|
||||
"inherits": [ "CUDA" ],
|
||||
"cacheVariables": {
|
||||
"CMAKE_CUDA_ARCHITECTURES": "87",
|
||||
"OLLAMA_RUNNER_DIR": "cuda_jetpack6"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "ROCm",
|
||||
"inherits": [ "Default" ],
|
||||
"cacheVariables": {
|
||||
"CMAKE_HIP_PLATFORM": "amd"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "ROCm 6",
|
||||
"inherits": [ "ROCm" ],
|
||||
"cacheVariables": {
|
||||
"CMAKE_HIP_FLAGS": "-parallel-jobs=4",
|
||||
"AMDGPU_TARGETS": "gfx1030;gfx1031;gfx1032;gfx1034;gfx1035;gfx1036;gfx1100;gfx1101;gfx1102;gfx1103;gfx1150;gfx1151;gfx1152;gfx1153;gfx1200;gfx1201;gfx900:xnack-;gfx906:xnack-;gfx90c:xnack-;gfx1010:xnack-;gfx1011:xnack-;gfx1012:xnack-",
|
||||
"OLLAMA_RUNNER_DIR": "rocm"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Vulkan",
|
||||
"inherits": [ "Default" ],
|
||||
"cacheVariables": {
|
||||
"OLLAMA_RUNNER_DIR": "vulkan"
|
||||
}
|
||||
}
|
||||
],
|
||||
"buildPresets": [
|
||||
{
|
||||
"name": "Default",
|
||||
"configurePreset": "Default",
|
||||
"configuration": "Release"
|
||||
},
|
||||
{
|
||||
"name": "CPU",
|
||||
"configurePreset": "Default",
|
||||
"targets": [ "ggml-cpu" ]
|
||||
},
|
||||
{
|
||||
"name": "CUDA",
|
||||
"configurePreset": "CUDA",
|
||||
"targets": [ "ggml-cuda" ]
|
||||
},
|
||||
{
|
||||
"name": "CUDA 11",
|
||||
"inherits": [ "CUDA" ],
|
||||
"configurePreset": "CUDA 11"
|
||||
},
|
||||
{
|
||||
"name": "CUDA 12",
|
||||
"inherits": [ "CUDA" ],
|
||||
"configurePreset": "CUDA 12"
|
||||
},
|
||||
{
|
||||
"name": "CUDA 13",
|
||||
"inherits": [ "CUDA" ],
|
||||
"configurePreset": "CUDA 13"
|
||||
},
|
||||
{
|
||||
"name": "JetPack 5",
|
||||
"inherits": [ "CUDA" ],
|
||||
"configurePreset": "JetPack 5"
|
||||
},
|
||||
{
|
||||
"name": "JetPack 6",
|
||||
"inherits": [ "CUDA" ],
|
||||
"configurePreset": "JetPack 6"
|
||||
},
|
||||
{
|
||||
"name": "ROCm",
|
||||
"configurePreset": "ROCm",
|
||||
"targets": [ "ggml-hip" ]
|
||||
},
|
||||
{
|
||||
"name": "ROCm 6",
|
||||
"inherits": [ "ROCm" ],
|
||||
"configurePreset": "ROCm 6"
|
||||
},
|
||||
{
|
||||
"name": "Vulkan",
|
||||
"targets": [ "ggml-vulkan" ],
|
||||
"configurePreset": "Vulkan"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,88 +0,0 @@
|
||||
# Contributing to Ollama
|
||||
|
||||
Thank you for your interest in contributing to Ollama! Here are a few guidelines to help get you started.
|
||||
|
||||
## Set up
|
||||
|
||||
See the [development documentation](./docs/development.md) for instructions on how to build and run Ollama locally.
|
||||
|
||||
### Ideal issues
|
||||
|
||||
* [Bugs](https://github.com/ollama/ollama/issues?q=is%3Aissue+is%3Aopen+label%3Abug): issues where Ollama stops working or where it results in an unexpected error.
|
||||
* [Performance](https://github.com/ollama/ollama/issues?q=is%3Aissue+is%3Aopen+label%3Aperformance): issues to make Ollama faster at model inference, downloading or uploading.
|
||||
* [Security](https://github.com/ollama/ollama/blob/main/SECURITY.md): issues that could lead to a security vulnerability. As mentioned in [SECURITY.md](https://github.com/ollama/ollama/blob/main/SECURITY.md), please do not disclose security vulnerabilities publicly.
|
||||
|
||||
### Issues that are harder to review
|
||||
|
||||
* New features: new features (e.g. API fields, environment variables) add surface area to Ollama and make it harder to maintain in the long run as they cannot be removed without potentially breaking users in the future.
|
||||
* Refactoring: large code improvements are important, but can be harder or take longer to review and merge.
|
||||
* Documentation: small updates to fill in or correct missing documentation are helpful, however large documentation additions can be hard to maintain over time.
|
||||
|
||||
### Issues that may not be accepted
|
||||
|
||||
* Changes that break backwards compatibility in Ollama's API (including the OpenAI-compatible API)
|
||||
* Changes that add significant friction to the user experience
|
||||
* Changes that create a large future maintenance burden for maintainers and contributors
|
||||
|
||||
## Proposing a (non-trivial) change
|
||||
|
||||
> By "non-trivial", we mean a change that is not a bug fix or small
|
||||
> documentation update. If you are unsure, please ask us on our [Discord
|
||||
> server](https://discord.gg/ollama).
|
||||
|
||||
Before opening a non-trivial Pull Request, please open an issue to discuss the change and
|
||||
get feedback from the maintainers. This helps us understand the context of the
|
||||
change and how it fits into Ollama's roadmap and prevents us from duplicating
|
||||
work or you from spending time on a change that we may not be able to accept.
|
||||
|
||||
Tips for proposals:
|
||||
|
||||
* Explain the problem you are trying to solve, not what you are trying to do.
|
||||
* Explain why the change is important.
|
||||
* Explain how the change will be used.
|
||||
* Explain how the change will be tested.
|
||||
|
||||
Additionally, for bonus points: Provide draft documentation you would expect to
|
||||
see if the changes were accepted.
|
||||
|
||||
## Pull requests
|
||||
|
||||
**Commit messages**
|
||||
|
||||
The title should look like:
|
||||
|
||||
<package>: <short description>
|
||||
|
||||
The package is the most affected Go package. If the change does not affect Go
|
||||
code, then use the directory name instead. Changes to a single well-known
|
||||
file in the root directory may use the file name.
|
||||
|
||||
The short description should start with a lowercase letter and be a
|
||||
continuation of the sentence:
|
||||
|
||||
"This changes Ollama to..."
|
||||
|
||||
Examples:
|
||||
|
||||
llm/backend/mlx: support the llama architecture
|
||||
CONTRIBUTING: provide clarity on good commit messages, and bad
|
||||
|
||||
Bad Examples:
|
||||
|
||||
feat: add more emoji
|
||||
fix: was not using famous web framework
|
||||
chore: generify code
|
||||
|
||||
**Tests**
|
||||
|
||||
Please include tests. Strive to test behavior, not implementation.
|
||||
|
||||
**New dependencies**
|
||||
|
||||
Dependencies should be added sparingly. If you are adding a new dependency,
|
||||
please explain why it is necessary and what other ways you attempted that
|
||||
did not work without it.
|
||||
|
||||
## Need help?
|
||||
|
||||
If you need help with anything, feel free to reach out to us on our [Discord server](https://discord.gg/ollama).
|
||||
302
Dockerfile
@@ -1,186 +1,144 @@
|
||||
# vim: filetype=dockerfile
|
||||
ARG GOLANG_VERSION=1.22.1
|
||||
ARG CMAKE_VERSION=3.22.1
|
||||
# this CUDA_VERSION corresponds with the one specified in docs/gpu.md
|
||||
ARG CUDA_VERSION=11.3.1
|
||||
ARG ROCM_VERSION=6.1.1
|
||||
|
||||
ARG FLAVOR=${TARGETARCH}
|
||||
ARG PARALLEL=8
|
||||
# Copy the minimal context we need to run the generate scripts
|
||||
FROM scratch AS llm-code
|
||||
COPY .git .git
|
||||
COPY .gitmodules .gitmodules
|
||||
COPY llm llm
|
||||
|
||||
ARG ROCMVERSION=6.3.3
|
||||
ARG JETPACK5VERSION=r35.4.1
|
||||
ARG JETPACK6VERSION=r36.4.0
|
||||
ARG CMAKEVERSION=3.31.2
|
||||
ARG VULKANVERSION=1.4.321.1
|
||||
|
||||
# We require gcc v10 minimum. v10.3 has regressions, so the rockylinux 8.5 AppStream has the latest compatible version
|
||||
FROM --platform=linux/amd64 rocm/dev-almalinux-8:${ROCMVERSION}-complete AS base-amd64
|
||||
RUN yum install -y yum-utils \
|
||||
&& yum-config-manager --add-repo https://dl.rockylinux.org/vault/rocky/8.5/AppStream/\$basearch/os/ \
|
||||
&& rpm --import https://dl.rockylinux.org/pub/rocky/RPM-GPG-KEY-Rocky-8 \
|
||||
&& dnf install -y yum-utils ccache gcc-toolset-10-gcc-10.2.1-8.2.el8 gcc-toolset-10-gcc-c++-10.2.1-8.2.el8 gcc-toolset-10-binutils-2.35-11.el8 \
|
||||
&& dnf install -y ccache \
|
||||
&& yum-config-manager --add-repo https://developer.download.nvidia.com/compute/cuda/repos/rhel8/x86_64/cuda-rhel8.repo
|
||||
ENV PATH=/opt/rh/gcc-toolset-10/root/usr/bin:$PATH
|
||||
ARG VULKANVERSION
|
||||
RUN wget https://sdk.lunarg.com/sdk/download/${VULKANVERSION}/linux/vulkansdk-linux-x86_64-${VULKANVERSION}.tar.xz -O /tmp/vulkansdk-linux-x86_64-${VULKANVERSION}.tar.xz \
|
||||
&& tar xvf /tmp/vulkansdk-linux-x86_64-${VULKANVERSION}.tar.xz \
|
||||
&& dnf -y install ninja-build \
|
||||
&& ln -s /usr/bin/python3 /usr/bin/python \
|
||||
&& /${VULKANVERSION}/vulkansdk -j 8 vulkan-headers \
|
||||
&& /${VULKANVERSION}/vulkansdk -j 8 shaderc
|
||||
RUN cp -r /${VULKANVERSION}/x86_64/include/* /usr/local/include/ \
|
||||
&& cp -r /${VULKANVERSION}/x86_64/lib/* /usr/local/lib
|
||||
ENV PATH=/${VULKANVERSION}/x86_64/bin:$PATH
|
||||
|
||||
FROM --platform=linux/arm64 almalinux:8 AS base-arm64
|
||||
# install epel-release for ccache
|
||||
RUN yum install -y yum-utils epel-release \
|
||||
&& dnf install -y clang ccache \
|
||||
&& yum-config-manager --add-repo https://developer.download.nvidia.com/compute/cuda/repos/rhel8/sbsa/cuda-rhel8.repo
|
||||
ENV CC=clang CXX=clang++
|
||||
|
||||
FROM base-${TARGETARCH} AS base
|
||||
ARG CMAKEVERSION
|
||||
RUN curl -fsSL https://github.com/Kitware/CMake/releases/download/v${CMAKEVERSION}/cmake-${CMAKEVERSION}-linux-$(uname -m).tar.gz | tar xz -C /usr/local --strip-components 1
|
||||
ENV LDFLAGS=-s
|
||||
|
||||
FROM base AS cpu
|
||||
RUN dnf install -y gcc-toolset-11-gcc gcc-toolset-11-gcc-c++
|
||||
ENV PATH=/opt/rh/gcc-toolset-11/root/usr/bin:$PATH
|
||||
ARG PARALLEL
|
||||
COPY CMakeLists.txt CMakePresets.json .
|
||||
COPY ml/backend/ggml/ggml ml/backend/ggml/ggml
|
||||
RUN --mount=type=cache,target=/root/.ccache \
|
||||
cmake --preset 'CPU' \
|
||||
&& cmake --build --parallel ${PARALLEL} --preset 'CPU' \
|
||||
&& cmake --install build --component CPU --strip --parallel ${PARALLEL}
|
||||
|
||||
FROM base AS cuda-11
|
||||
ARG CUDA11VERSION=11.8
|
||||
RUN dnf install -y cuda-toolkit-${CUDA11VERSION//./-}
|
||||
ENV PATH=/usr/local/cuda-11/bin:$PATH
|
||||
ARG PARALLEL
|
||||
COPY CMakeLists.txt CMakePresets.json .
|
||||
COPY ml/backend/ggml/ggml ml/backend/ggml/ggml
|
||||
RUN --mount=type=cache,target=/root/.ccache \
|
||||
cmake --preset 'CUDA 11' \
|
||||
&& cmake --build --parallel ${PARALLEL} --preset 'CUDA 11' \
|
||||
&& cmake --install build --component CUDA --strip --parallel ${PARALLEL}
|
||||
|
||||
FROM base AS cuda-12
|
||||
ARG CUDA12VERSION=12.8
|
||||
RUN dnf install -y cuda-toolkit-${CUDA12VERSION//./-}
|
||||
ENV PATH=/usr/local/cuda-12/bin:$PATH
|
||||
ARG PARALLEL
|
||||
COPY CMakeLists.txt CMakePresets.json .
|
||||
COPY ml/backend/ggml/ggml ml/backend/ggml/ggml
|
||||
RUN --mount=type=cache,target=/root/.ccache \
|
||||
cmake --preset 'CUDA 12' \
|
||||
&& cmake --build --parallel ${PARALLEL} --preset 'CUDA 12' \
|
||||
&& cmake --install build --component CUDA --strip --parallel ${PARALLEL}
|
||||
|
||||
|
||||
FROM base AS cuda-13
|
||||
ARG CUDA13VERSION=13.0
|
||||
RUN dnf install -y cuda-toolkit-${CUDA13VERSION//./-}
|
||||
ENV PATH=/usr/local/cuda-13/bin:$PATH
|
||||
ARG PARALLEL
|
||||
COPY CMakeLists.txt CMakePresets.json .
|
||||
COPY ml/backend/ggml/ggml ml/backend/ggml/ggml
|
||||
RUN --mount=type=cache,target=/root/.ccache \
|
||||
cmake --preset 'CUDA 13' \
|
||||
&& cmake --build --parallel ${PARALLEL} --preset 'CUDA 13' \
|
||||
&& cmake --install build --component CUDA --strip --parallel ${PARALLEL}
|
||||
|
||||
|
||||
FROM base AS rocm-6
|
||||
ENV PATH=/opt/rocm/hcc/bin:/opt/rocm/hip/bin:/opt/rocm/bin:/opt/rocm/hcc/bin:$PATH
|
||||
ARG PARALLEL
|
||||
COPY CMakeLists.txt CMakePresets.json .
|
||||
COPY ml/backend/ggml/ggml ml/backend/ggml/ggml
|
||||
RUN --mount=type=cache,target=/root/.ccache \
|
||||
cmake --preset 'ROCm 6' \
|
||||
&& cmake --build --parallel ${PARALLEL} --preset 'ROCm 6' \
|
||||
&& cmake --install build --component HIP --strip --parallel ${PARALLEL}
|
||||
RUN rm -f dist/lib/ollama/rocm/rocblas/library/*gfx90[06]*
|
||||
|
||||
FROM --platform=linux/arm64 nvcr.io/nvidia/l4t-jetpack:${JETPACK5VERSION} AS jetpack-5
|
||||
ARG CMAKEVERSION
|
||||
RUN apt-get update && apt-get install -y curl ccache \
|
||||
&& curl -fsSL https://github.com/Kitware/CMake/releases/download/v${CMAKEVERSION}/cmake-${CMAKEVERSION}-linux-$(uname -m).tar.gz | tar xz -C /usr/local --strip-components 1
|
||||
COPY CMakeLists.txt CMakePresets.json .
|
||||
COPY ml/backend/ggml/ggml ml/backend/ggml/ggml
|
||||
ARG PARALLEL
|
||||
RUN --mount=type=cache,target=/root/.ccache \
|
||||
cmake --preset 'JetPack 5' \
|
||||
&& cmake --build --parallel ${PARALLEL} --preset 'JetPack 5' \
|
||||
&& cmake --install build --component CUDA --strip --parallel ${PARALLEL}
|
||||
|
||||
FROM --platform=linux/arm64 nvcr.io/nvidia/l4t-jetpack:${JETPACK6VERSION} AS jetpack-6
|
||||
ARG CMAKEVERSION
|
||||
RUN apt-get update && apt-get install -y curl ccache \
|
||||
&& curl -fsSL https://github.com/Kitware/CMake/releases/download/v${CMAKEVERSION}/cmake-${CMAKEVERSION}-linux-$(uname -m).tar.gz | tar xz -C /usr/local --strip-components 1
|
||||
COPY CMakeLists.txt CMakePresets.json .
|
||||
COPY ml/backend/ggml/ggml ml/backend/ggml/ggml
|
||||
ARG PARALLEL
|
||||
RUN --mount=type=cache,target=/root/.ccache \
|
||||
cmake --preset 'JetPack 6' \
|
||||
&& cmake --build --parallel ${PARALLEL} --preset 'JetPack 6' \
|
||||
&& cmake --install build --component CUDA --strip --parallel ${PARALLEL}
|
||||
|
||||
FROM base AS vulkan
|
||||
COPY CMakeLists.txt CMakePresets.json .
|
||||
COPY ml/backend/ggml/ggml ml/backend/ggml/ggml
|
||||
RUN --mount=type=cache,target=/root/.ccache \
|
||||
cmake --preset 'Vulkan' \
|
||||
&& cmake --build --parallel --preset 'Vulkan' \
|
||||
&& cmake --install build --component Vulkan --strip --parallel 8
|
||||
|
||||
|
||||
FROM base AS build
|
||||
WORKDIR /go/src/github.com/ollama/ollama
|
||||
COPY go.mod go.sum .
|
||||
RUN curl -fsSL https://golang.org/dl/go$(awk '/^go/ { print $2 }' go.mod).linux-$(case $(uname -m) in x86_64) echo amd64 ;; aarch64) echo arm64 ;; esac).tar.gz | tar xz -C /usr/local
|
||||
ENV PATH=/usr/local/go/bin:$PATH
|
||||
RUN go mod download
|
||||
COPY . .
|
||||
ARG GOFLAGS="'-ldflags=-w -s'"
|
||||
ENV CGO_ENABLED=1
|
||||
FROM --platform=linux/amd64 nvidia/cuda:$CUDA_VERSION-devel-centos7 AS cuda-build-amd64
|
||||
ARG CMAKE_VERSION
|
||||
COPY ./scripts/rh_linux_deps.sh /
|
||||
RUN CMAKE_VERSION=${CMAKE_VERSION} sh /rh_linux_deps.sh
|
||||
ENV PATH /opt/rh/devtoolset-10/root/usr/bin:$PATH
|
||||
COPY --from=llm-code / /go/src/github.com/ollama/ollama/
|
||||
WORKDIR /go/src/github.com/ollama/ollama/llm/generate
|
||||
ARG CGO_CFLAGS
|
||||
ARG CGO_CXXFLAGS
|
||||
RUN --mount=type=cache,target=/root/.cache/go-build \
|
||||
go build -trimpath -buildmode=pie -o /bin/ollama .
|
||||
RUN OLLAMA_SKIP_STATIC_GENERATE=1 OLLAMA_SKIP_CPU_GENERATE=1 sh gen_linux.sh
|
||||
|
||||
FROM --platform=linux/amd64 scratch AS amd64
|
||||
# COPY --from=cuda-11 dist/lib/ollama/ /lib/ollama/
|
||||
COPY --from=cuda-12 dist/lib/ollama /lib/ollama/
|
||||
COPY --from=cuda-13 dist/lib/ollama /lib/ollama/
|
||||
COPY --from=vulkan dist/lib/ollama /lib/ollama/
|
||||
FROM --platform=linux/arm64 nvidia/cuda:$CUDA_VERSION-devel-rockylinux8 AS cuda-build-arm64
|
||||
ARG CMAKE_VERSION
|
||||
COPY ./scripts/rh_linux_deps.sh /
|
||||
RUN CMAKE_VERSION=${CMAKE_VERSION} sh /rh_linux_deps.sh
|
||||
ENV PATH /opt/rh/gcc-toolset-10/root/usr/bin:$PATH
|
||||
COPY --from=llm-code / /go/src/github.com/ollama/ollama/
|
||||
WORKDIR /go/src/github.com/ollama/ollama/llm/generate
|
||||
ARG CGO_CFLAGS
|
||||
RUN OLLAMA_SKIP_STATIC_GENERATE=1 OLLAMA_SKIP_CPU_GENERATE=1 sh gen_linux.sh
|
||||
|
||||
FROM --platform=linux/arm64 scratch AS arm64
|
||||
# COPY --from=cuda-11 dist/lib/ollama/ /lib/ollama/
|
||||
COPY --from=cuda-12 dist/lib/ollama /lib/ollama/
|
||||
COPY --from=cuda-13 dist/lib/ollama/ /lib/ollama/
|
||||
COPY --from=jetpack-5 dist/lib/ollama/ /lib/ollama/
|
||||
COPY --from=jetpack-6 dist/lib/ollama/ /lib/ollama/
|
||||
FROM --platform=linux/amd64 rocm/dev-centos-7:${ROCM_VERSION}-complete AS rocm-build-amd64
|
||||
ARG CMAKE_VERSION
|
||||
COPY ./scripts/rh_linux_deps.sh /
|
||||
RUN CMAKE_VERSION=${CMAKE_VERSION} sh /rh_linux_deps.sh
|
||||
ENV PATH /opt/rh/devtoolset-10/root/usr/bin:$PATH
|
||||
ENV LIBRARY_PATH /opt/amdgpu/lib64
|
||||
COPY --from=llm-code / /go/src/github.com/ollama/ollama/
|
||||
WORKDIR /go/src/github.com/ollama/ollama/llm/generate
|
||||
ARG CGO_CFLAGS
|
||||
ARG AMDGPU_TARGETS
|
||||
RUN OLLAMA_SKIP_STATIC_GENERATE=1 OLLAMA_SKIP_CPU_GENERATE=1 sh gen_linux.sh
|
||||
RUN mkdir /tmp/scratch && \
|
||||
for dep in $(zcat /go/src/github.com/ollama/ollama/llm/build/linux/x86_64/rocm*/bin/deps.txt.gz) ; do \
|
||||
cp ${dep} /tmp/scratch/ || exit 1 ; \
|
||||
done && \
|
||||
(cd /opt/rocm/lib && tar cf - rocblas/library) | (cd /tmp/scratch/ && tar xf - ) && \
|
||||
mkdir -p /go/src/github.com/ollama/ollama/dist/deps/ && \
|
||||
(cd /tmp/scratch/ && tar czvf /go/src/github.com/ollama/ollama/dist/deps/ollama-linux-amd64-rocm.tgz . )
|
||||
|
||||
FROM scratch AS rocm
|
||||
COPY --from=rocm-6 dist/lib/ollama /lib/ollama
|
||||
|
||||
FROM ${FLAVOR} AS archive
|
||||
ARG VULKANVERSION
|
||||
COPY --from=cpu dist/lib/ollama /lib/ollama
|
||||
COPY --from=build /bin/ollama /bin/ollama
|
||||
FROM --platform=linux/amd64 centos:7 AS cpu-builder-amd64
|
||||
ARG CMAKE_VERSION
|
||||
ARG GOLANG_VERSION
|
||||
COPY ./scripts/rh_linux_deps.sh /
|
||||
RUN CMAKE_VERSION=${CMAKE_VERSION} GOLANG_VERSION=${GOLANG_VERSION} sh /rh_linux_deps.sh
|
||||
ENV PATH /opt/rh/devtoolset-10/root/usr/bin:$PATH
|
||||
COPY --from=llm-code / /go/src/github.com/ollama/ollama/
|
||||
ARG OLLAMA_CUSTOM_CPU_DEFS
|
||||
ARG CGO_CFLAGS
|
||||
WORKDIR /go/src/github.com/ollama/ollama/llm/generate
|
||||
|
||||
FROM ubuntu:24.04
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y ca-certificates libvulkan1 \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
COPY --from=archive /bin /usr/bin
|
||||
FROM --platform=linux/amd64 cpu-builder-amd64 AS static-build-amd64
|
||||
RUN OLLAMA_CPU_TARGET="static" sh gen_linux.sh
|
||||
FROM --platform=linux/amd64 cpu-builder-amd64 AS cpu-build-amd64
|
||||
RUN OLLAMA_SKIP_STATIC_GENERATE=1 OLLAMA_CPU_TARGET="cpu" sh gen_linux.sh
|
||||
FROM --platform=linux/amd64 cpu-builder-amd64 AS cpu_avx-build-amd64
|
||||
RUN OLLAMA_SKIP_STATIC_GENERATE=1 OLLAMA_CPU_TARGET="cpu_avx" sh gen_linux.sh
|
||||
FROM --platform=linux/amd64 cpu-builder-amd64 AS cpu_avx2-build-amd64
|
||||
RUN OLLAMA_SKIP_STATIC_GENERATE=1 OLLAMA_CPU_TARGET="cpu_avx2" sh gen_linux.sh
|
||||
|
||||
FROM --platform=linux/arm64 centos:7 AS cpu-builder-arm64
|
||||
ARG CMAKE_VERSION
|
||||
ARG GOLANG_VERSION
|
||||
COPY ./scripts/rh_linux_deps.sh /
|
||||
RUN CMAKE_VERSION=${CMAKE_VERSION} GOLANG_VERSION=${GOLANG_VERSION} sh /rh_linux_deps.sh
|
||||
ENV PATH /opt/rh/devtoolset-10/root/usr/bin:$PATH
|
||||
COPY --from=llm-code / /go/src/github.com/ollama/ollama/
|
||||
ARG OLLAMA_CUSTOM_CPU_DEFS
|
||||
ARG CGO_CFLAGS
|
||||
WORKDIR /go/src/github.com/ollama/ollama/llm/generate
|
||||
|
||||
FROM --platform=linux/arm64 cpu-builder-arm64 AS static-build-arm64
|
||||
RUN OLLAMA_CPU_TARGET="static" sh gen_linux.sh
|
||||
FROM --platform=linux/arm64 cpu-builder-arm64 AS cpu-build-arm64
|
||||
RUN OLLAMA_SKIP_STATIC_GENERATE=1 OLLAMA_CPU_TARGET="cpu" sh gen_linux.sh
|
||||
|
||||
|
||||
# Intermediate stage used for ./scripts/build_linux.sh
|
||||
FROM --platform=linux/amd64 cpu-build-amd64 AS build-amd64
|
||||
ENV CGO_ENABLED 1
|
||||
WORKDIR /go/src/github.com/ollama/ollama
|
||||
COPY . .
|
||||
COPY --from=static-build-amd64 /go/src/github.com/ollama/ollama/llm/build/linux/ llm/build/linux/
|
||||
COPY --from=cpu_avx-build-amd64 /go/src/github.com/ollama/ollama/llm/build/linux/ llm/build/linux/
|
||||
COPY --from=cpu_avx2-build-amd64 /go/src/github.com/ollama/ollama/llm/build/linux/ llm/build/linux/
|
||||
COPY --from=cuda-build-amd64 /go/src/github.com/ollama/ollama/llm/build/linux/ llm/build/linux/
|
||||
COPY --from=rocm-build-amd64 /go/src/github.com/ollama/ollama/llm/build/linux/ llm/build/linux/
|
||||
COPY --from=rocm-build-amd64 /go/src/github.com/ollama/ollama/dist/deps/ ./dist/deps/
|
||||
ARG GOFLAGS
|
||||
ARG CGO_CFLAGS
|
||||
RUN go build -trimpath .
|
||||
|
||||
# Intermediate stage used for ./scripts/build_linux.sh
|
||||
FROM --platform=linux/arm64 cpu-build-arm64 AS build-arm64
|
||||
ENV CGO_ENABLED 1
|
||||
ARG GOLANG_VERSION
|
||||
WORKDIR /go/src/github.com/ollama/ollama
|
||||
COPY . .
|
||||
COPY --from=static-build-arm64 /go/src/github.com/ollama/ollama/llm/build/linux/ llm/build/linux/
|
||||
COPY --from=cuda-build-arm64 /go/src/github.com/ollama/ollama/llm/build/linux/ llm/build/linux/
|
||||
ARG GOFLAGS
|
||||
ARG CGO_CFLAGS
|
||||
RUN go build -trimpath .
|
||||
|
||||
# Runtime stages
|
||||
FROM --platform=linux/amd64 ubuntu:22.04 as runtime-amd64
|
||||
RUN apt-get update && apt-get install -y ca-certificates
|
||||
COPY --from=build-amd64 /go/src/github.com/ollama/ollama/ollama /bin/ollama
|
||||
FROM --platform=linux/arm64 ubuntu:22.04 as runtime-arm64
|
||||
RUN apt-get update && apt-get install -y ca-certificates
|
||||
COPY --from=build-arm64 /go/src/github.com/ollama/ollama/ollama /bin/ollama
|
||||
|
||||
# Radeon images are much larger so we keep it distinct from the CPU/CUDA image
|
||||
FROM --platform=linux/amd64 rocm/dev-centos-7:${ROCM_VERSION}-complete as runtime-rocm
|
||||
RUN update-pciids
|
||||
COPY --from=build-amd64 /go/src/github.com/ollama/ollama/ollama /bin/ollama
|
||||
EXPOSE 11434
|
||||
ENV OLLAMA_HOST 0.0.0.0
|
||||
|
||||
ENTRYPOINT ["/bin/ollama"]
|
||||
CMD ["serve"]
|
||||
|
||||
FROM runtime-$TARGETARCH
|
||||
EXPOSE 11434
|
||||
ENV OLLAMA_HOST 0.0.0.0
|
||||
ENV PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
|
||||
COPY --from=archive /lib/ollama /usr/lib/ollama
|
||||
ENV LD_LIBRARY_PATH=/usr/local/nvidia/lib:/usr/local/nvidia/lib64
|
||||
ENV NVIDIA_DRIVER_CAPABILITIES=compute,utility
|
||||
ENV NVIDIA_VISIBLE_DEVICES=all
|
||||
ENV OLLAMA_HOST=0.0.0.0:11434
|
||||
EXPOSE 11434
|
||||
|
||||
ENTRYPOINT ["/bin/ollama"]
|
||||
CMD ["serve"]
|
||||
|
||||
@@ -1,72 +0,0 @@
|
||||
UPSTREAM=https://github.com/ggml-org/llama.cpp.git
|
||||
WORKDIR=llama/vendor
|
||||
FETCH_HEAD=7f8ef50cce40e3e7e4526a3696cb45658190e69a
|
||||
|
||||
.PHONY: help
|
||||
help:
|
||||
@echo "Available targets:"
|
||||
@echo " sync Sync with upstream repositories"
|
||||
@echo " checkout Checkout upstream repository"
|
||||
@echo " apply-patches Apply patches to local repository"
|
||||
@echo " format-patches Format patches from local repository"
|
||||
@echo " clean Clean local repository"
|
||||
@echo
|
||||
@echo "Example:"
|
||||
@echo " make -f $(lastword $(MAKEFILE_LIST)) clean apply-patches sync"
|
||||
|
||||
.PHONY: sync
|
||||
sync: llama/build-info.cpp ml/backend/ggml/ggml/src/ggml-metal/ggml-metal-embed.metal
|
||||
|
||||
llama/build-info.cpp: llama/build-info.cpp.in llama/llama.cpp
|
||||
sed -e 's|@FETCH_HEAD@|$(FETCH_HEAD)|' <$< >$@
|
||||
|
||||
ml/backend/ggml/ggml/src/ggml-metal/ggml-metal-embed.metal: ml/backend/ggml/ggml
|
||||
go generate ./$(@D)
|
||||
|
||||
.PHONY: llama/llama.cpp
|
||||
llama/llama.cpp: llama/vendor
|
||||
rsync -arvzc --delete -f "include LICENSE" -f "merge $@/.rsync-filter" $(addprefix $<,/LICENSE /) $@
|
||||
|
||||
.PHONY: ml/backend/ggml/ggml
|
||||
ml/backend/ggml/ggml: llama/vendor
|
||||
rsync -arvzc --delete -f "include LICENSE" -f "merge $@/.rsync-filter" $(addprefix $<,/LICENSE /ggml/) $@
|
||||
|
||||
PATCHES=$(wildcard llama/patches/*.patch)
|
||||
PATCHED=$(join $(dir $(PATCHES)), $(addsuffix ed, $(addprefix ., $(notdir $(PATCHES)))))
|
||||
|
||||
.PHONY: apply-patches
|
||||
.NOTPARALLEL:
|
||||
apply-patches: $(PATCHED)
|
||||
|
||||
llama/patches/.%.patched: llama/patches/%.patch
|
||||
@if git -c user.name=nobody -c 'user.email=<>' -C $(WORKDIR) am -3 $(realpath $<); then \
|
||||
touch $@; \
|
||||
else \
|
||||
echo "Patch failed. Resolve any conflicts then continue."; \
|
||||
echo "1. Run 'git -C $(WORKDIR) am --continue'"; \
|
||||
echo "2. Run 'make -f $(lastword $(MAKEFILE_LIST)) format-patches'"; \
|
||||
echo "3. Run 'make -f $(lastword $(MAKEFILE_LIST)) clean apply-patches'"; \
|
||||
exit 1; \
|
||||
fi
|
||||
|
||||
.PHONY: checkout
|
||||
checkout: $(WORKDIR)
|
||||
git -C $(WORKDIR) fetch
|
||||
git -C $(WORKDIR) checkout -f $(FETCH_HEAD)
|
||||
|
||||
$(WORKDIR):
|
||||
git clone $(UPSTREAM) $(WORKDIR)
|
||||
|
||||
.PHONE: format-patches
|
||||
format-patches: llama/patches
|
||||
git -C $(WORKDIR) format-patch \
|
||||
--no-signature \
|
||||
--no-numbered \
|
||||
--zero-commit \
|
||||
-o $(realpath $<) \
|
||||
$(FETCH_HEAD)
|
||||
|
||||
.PHONE: clean
|
||||
clean: checkout
|
||||
@git -C $(WORKDIR) am --abort || true
|
||||
$(RM) llama/patches/.*.patched
|
||||
382
README.md
@@ -1,18 +1,18 @@
|
||||
<div align="center">
|
||||
<a href="https://ollama.com">
|
||||
<img alt="ollama" width="240" src="https://github.com/ollama/ollama/assets/3325447/0d0b44e2-8f4a-4e99-9b52-a5c1c741c8f7">
|
||||
</a>
|
||||
<img alt="ollama" height="200px" src="https://github.com/ollama/ollama/assets/3325447/0d0b44e2-8f4a-4e99-9b52-a5c1c741c8f7">
|
||||
</div>
|
||||
|
||||
# Ollama
|
||||
|
||||
[](https://discord.gg/ollama)
|
||||
|
||||
Get up and running with large language models.
|
||||
|
||||
### macOS
|
||||
|
||||
[Download](https://ollama.com/download/Ollama.dmg)
|
||||
[Download](https://ollama.com/download/Ollama-darwin.zip)
|
||||
|
||||
### Windows
|
||||
### Windows preview
|
||||
|
||||
[Download](https://github.com/likelovewant/ollama-for-amd/releases)
|
||||
|
||||
@@ -20,31 +20,27 @@ For AMD use or build , please follow the guide on [wiki](https://github.com/like
|
||||
|
||||
official support list
|
||||
```
|
||||
"gfx900" "gfx940" "gfx941" "gfx942" "gfx1010""gfx1012" "gfx1030" "gfx1100""gfx1101" "gfx1102"
|
||||
"gfx900" "gfx906:xnack-" "gfx908:xnack-" "gfx90a:xnack+" "gfx90a:xnack-" "gfx940" "gfx941" "gfx942" "gfx1010""gfx1012" "gfx1030" "gfx1100""gfx1101" "gfx1102"
|
||||
```
|
||||
Please download from ollama [official](https://ollama.com/download/OllamaSetup.exe)
|
||||
|
||||
Example extra list add on this repo.
|
||||
```
|
||||
(ROCm5) "gfx803" "gfx900:xnack-" "gfx902" (ROCm6) gfx906:xnack- "gfx1010:xnack-" "gfx1011" "gfx1012:xnack-" "gfx1031" "gfx1032" "gfx1034" "gfx1035" "gfx1036" "gfx1103" "gfx1150" "gfx1201" (expertimental)"...
|
||||
"gfx803" "gfx902" "gfx904""gfx940" "gfx941" "gfx942" "gfx1010" "gfx1011" "gfx1012" "gfx1031" "gfx1032""gfx1034" "gfx1035" "gfx1036" "gfx1103"
|
||||
```
|
||||
Please follow the [wiki](https://github.com/likelovewant/ollama-for-amd/wiki) guide to build or use the pre-release version.
|
||||
|
||||
Note: **gfx803:** Reported as partially functional in HIP SDK 5.7 using the wiki method, but disabled in HIP SDK 6.1.2.
|
||||
Note: `gfx803` reported partialy working by the wiki method ,expected a future support
|
||||
|
||||
Note: **gfx90c (with xnack-):** Reported as partially functional in HIP SDK 5.7, with some testers experiencing partial success while others encountered issues in recent update. removed from
|
||||
support lists. Explore its through self-build as guided on the wiki.
|
||||
|
||||
|
||||
### Linux
|
||||
|
||||
```shell
|
||||
```
|
||||
curl -fsSL https://ollama.com/install.sh | sh
|
||||
```
|
||||
|
||||
[Manual install instructions](https://docs.ollama.com/linux#manual-install)
|
||||
|
||||
[Configuring Environment Variables Tip For Unsupport GPUs](https://github.com/likelovewant/ollama-for-amd/wiki#troubleshooting-amd-gpu-support-in-linux)
|
||||
[Manual install instructions](https://github.com/ollama/ollama/blob/main/docs/linux.md)
|
||||
|
||||
### Docker
|
||||
|
||||
@@ -55,17 +51,12 @@ The official [Ollama Docker image](https://hub.docker.com/r/ollama/ollama) `olla
|
||||
- [ollama-python](https://github.com/ollama/ollama-python)
|
||||
- [ollama-js](https://github.com/ollama/ollama-js)
|
||||
|
||||
### Community
|
||||
|
||||
- [Discord](https://discord.gg/ollama)
|
||||
- [Reddit](https://reddit.com/r/ollama)
|
||||
|
||||
## Quickstart
|
||||
|
||||
To run and chat with [Gemma 3](https://ollama.com/library/gemma3):
|
||||
To run and chat with [Llama 3](https://ollama.com/library/llama3):
|
||||
|
||||
```shell
|
||||
ollama run gemma3
|
||||
```
|
||||
ollama run llama3
|
||||
```
|
||||
|
||||
## Model library
|
||||
@@ -75,25 +66,13 @@ Ollama supports a list of models available on [ollama.com/library](https://ollam
|
||||
Here are some example models that can be downloaded:
|
||||
|
||||
| Model | Parameters | Size | Download |
|
||||
| ------------------ | ---------- | ----- | -------------------------------- |
|
||||
| Gemma 3 | 1B | 815MB | `ollama run gemma3:1b` |
|
||||
| Gemma 3 | 4B | 3.3GB | `ollama run gemma3` |
|
||||
| Gemma 3 | 12B | 8.1GB | `ollama run gemma3:12b` |
|
||||
| Gemma 3 | 27B | 17GB | `ollama run gemma3:27b` |
|
||||
| QwQ | 32B | 20GB | `ollama run qwq` |
|
||||
| DeepSeek-R1 | 7B | 4.7GB | `ollama run deepseek-r1` |
|
||||
| DeepSeek-R1 | 671B | 404GB | `ollama run deepseek-r1:671b` |
|
||||
| Llama 4 | 109B | 67GB | `ollama run llama4:scout` |
|
||||
| Llama 4 | 400B | 245GB | `ollama run llama4:maverick` |
|
||||
| Llama 3.3 | 70B | 43GB | `ollama run llama3.3` |
|
||||
| Llama 3.2 | 3B | 2.0GB | `ollama run llama3.2` |
|
||||
| Llama 3.2 | 1B | 1.3GB | `ollama run llama3.2:1b` |
|
||||
| Llama 3.2 Vision | 11B | 7.9GB | `ollama run llama3.2-vision` |
|
||||
| Llama 3.2 Vision | 90B | 55GB | `ollama run llama3.2-vision:90b` |
|
||||
| Llama 3.1 | 8B | 4.7GB | `ollama run llama3.1` |
|
||||
| Llama 3.1 | 405B | 231GB | `ollama run llama3.1:405b` |
|
||||
| Phi 4 | 14B | 9.1GB | `ollama run phi4` |
|
||||
| Phi 4 Mini | 3.8B | 2.5GB | `ollama run phi4-mini` |
|
||||
| ------------------ | ---------- | ----- | ------------------------------ |
|
||||
| Llama 3 | 8B | 4.7GB | `ollama run llama3` |
|
||||
| Llama 3 | 70B | 40GB | `ollama run llama3:70b` |
|
||||
| Phi 3 Mini | 3.8B | 2.3GB | `ollama run phi3` |
|
||||
| Phi 3 Medium | 14B | 7.9GB | `ollama run phi3:medium` |
|
||||
| Gemma | 2B | 1.4GB | `ollama run gemma:2b` |
|
||||
| Gemma | 7B | 4.8GB | `ollama run gemma:7b` |
|
||||
| Mistral | 7B | 4.1GB | `ollama run mistral` |
|
||||
| Moondream 2 | 1.4B | 829MB | `ollama run moondream` |
|
||||
| Neural Chat | 7B | 4.1GB | `ollama run neural-chat` |
|
||||
@@ -101,10 +80,9 @@ Here are some example models that can be downloaded:
|
||||
| Code Llama | 7B | 3.8GB | `ollama run codellama` |
|
||||
| Llama 2 Uncensored | 7B | 3.8GB | `ollama run llama2-uncensored` |
|
||||
| LLaVA | 7B | 4.5GB | `ollama run llava` |
|
||||
| Granite-3.3 | 8B | 4.9GB | `ollama run granite3.3` |
|
||||
| Solar | 10.7B | 6.1GB | `ollama run solar` |
|
||||
|
||||
> [!NOTE]
|
||||
> You should have at least 8 GB of RAM available to run the 7B models, 16 GB to run the 13B models, and 32 GB to run the 33B models.
|
||||
> Note: You should have at least 8 GB of RAM available to run the 7B models, 16 GB to run the 13B models, and 32 GB to run the 33B models.
|
||||
|
||||
## Customize a model
|
||||
|
||||
@@ -120,32 +98,32 @@ Ollama supports importing GGUF models in the Modelfile:
|
||||
|
||||
2. Create the model in Ollama
|
||||
|
||||
```shell
|
||||
```
|
||||
ollama create example -f Modelfile
|
||||
```
|
||||
|
||||
3. Run the model
|
||||
|
||||
```shell
|
||||
```
|
||||
ollama run example
|
||||
```
|
||||
|
||||
### Import from Safetensors
|
||||
### Import from PyTorch or Safetensors
|
||||
|
||||
See the [guide](https://docs.ollama.com/import) on importing models for more information.
|
||||
See the [guide](docs/import.md) on importing models for more information.
|
||||
|
||||
### Customize a prompt
|
||||
|
||||
Models from the Ollama library can be customized with a prompt. For example, to customize the `llama3.2` model:
|
||||
Models from the Ollama library can be customized with a prompt. For example, to customize the `llama3` model:
|
||||
|
||||
```shell
|
||||
ollama pull llama3.2
|
||||
```
|
||||
ollama pull llama3
|
||||
```
|
||||
|
||||
Create a `Modelfile`:
|
||||
|
||||
```
|
||||
FROM llama3.2
|
||||
FROM llama3
|
||||
|
||||
# set the temperature to 1 [higher is more creative, lower is more coherent]
|
||||
PARAMETER temperature 1
|
||||
@@ -165,7 +143,7 @@ ollama run mario
|
||||
Hello! It's your friend Mario.
|
||||
```
|
||||
|
||||
For more information on working with a Modelfile, see the [Modelfile](https://docs.ollama.com/modelfile) documentation.
|
||||
For more examples, see the [examples](examples) directory. For more information on working with a Modelfile, see the [Modelfile](docs/modelfile.md) documentation.
|
||||
|
||||
## CLI Reference
|
||||
|
||||
@@ -173,28 +151,28 @@ For more information on working with a Modelfile, see the [Modelfile](https://do
|
||||
|
||||
`ollama create` is used to create a model from a Modelfile.
|
||||
|
||||
```shell
|
||||
```
|
||||
ollama create mymodel -f ./Modelfile
|
||||
```
|
||||
|
||||
### Pull a model
|
||||
|
||||
```shell
|
||||
ollama pull llama3.2
|
||||
```
|
||||
ollama pull llama3
|
||||
```
|
||||
|
||||
> This command can also be used to update a local model. Only the diff will be pulled.
|
||||
|
||||
### Remove a model
|
||||
|
||||
```shell
|
||||
ollama rm llama3.2
|
||||
```
|
||||
ollama rm llama3
|
||||
```
|
||||
|
||||
### Copy a model
|
||||
|
||||
```shell
|
||||
ollama cp llama3.2 my-model
|
||||
```
|
||||
ollama cp llama3 my-model
|
||||
```
|
||||
|
||||
### Multiline input
|
||||
@@ -211,55 +189,29 @@ I'm a basic program that prints the famous "Hello, world!" message to the consol
|
||||
### Multimodal models
|
||||
|
||||
```
|
||||
ollama run llava "What's in this image? /Users/jmorgan/Desktop/smile.png"
|
||||
>>> What's in this image? /Users/jmorgan/Desktop/smile.png
|
||||
The image features a yellow smiley face, which is likely the central focus of the picture.
|
||||
```
|
||||
|
||||
> **Output**: The image features a yellow smiley face, which is likely the central focus of the picture.
|
||||
|
||||
### Pass the prompt as an argument
|
||||
|
||||
```shell
|
||||
ollama run llama3.2 "Summarize this file: $(cat README.md)"
|
||||
```
|
||||
|
||||
> **Output**: Ollama is a lightweight, extensible framework for building and running language models on the local machine. It provides a simple API for creating, running, and managing models, as well as a library of pre-built models that can be easily used in a variety of applications.
|
||||
$ ollama run llama3 "Summarize this file: $(cat README.md)"
|
||||
Ollama is a lightweight, extensible framework for building and running language models on the local machine. It provides a simple API for creating, running, and managing models, as well as a library of pre-built models that can be easily used in a variety of applications.
|
||||
```
|
||||
|
||||
### Show model information
|
||||
|
||||
```shell
|
||||
ollama show llama3.2
|
||||
```
|
||||
ollama show llama3
|
||||
```
|
||||
|
||||
### List models on your computer
|
||||
|
||||
```shell
|
||||
```
|
||||
ollama list
|
||||
```
|
||||
|
||||
### List which models are currently loaded
|
||||
|
||||
```shell
|
||||
ollama ps
|
||||
```
|
||||
|
||||
### Stop a model which is currently running
|
||||
|
||||
```shell
|
||||
ollama stop llama3.2
|
||||
```
|
||||
|
||||
### Generate embeddings from the CLI
|
||||
|
||||
```shell
|
||||
ollama run embeddinggemma "Your text to embed"
|
||||
```
|
||||
|
||||
You can also pipe text for scripted workflows:
|
||||
|
||||
```shell
|
||||
echo "Your text to embed" | ollama run embeddinggemma
|
||||
```
|
||||
|
||||
### Start Ollama
|
||||
|
||||
`ollama serve` is used when you want to start ollama without running the desktop application.
|
||||
@@ -272,14 +224,14 @@ See the [developer guide](https://github.com/ollama/ollama/blob/main/docs/develo
|
||||
|
||||
Next, start the server:
|
||||
|
||||
```shell
|
||||
```
|
||||
./ollama serve
|
||||
```
|
||||
|
||||
Finally, in a separate shell, run a model:
|
||||
|
||||
```shell
|
||||
./ollama run llama3.2
|
||||
```
|
||||
./ollama run llama3
|
||||
```
|
||||
|
||||
## REST API
|
||||
@@ -288,18 +240,18 @@ Ollama has a REST API for running and managing models.
|
||||
|
||||
### Generate a response
|
||||
|
||||
```shell
|
||||
```
|
||||
curl http://localhost:11434/api/generate -d '{
|
||||
"model": "llama3.2",
|
||||
"model": "llama3",
|
||||
"prompt":"Why is the sky blue?"
|
||||
}'
|
||||
```
|
||||
|
||||
### Chat with a model
|
||||
|
||||
```shell
|
||||
```
|
||||
curl http://localhost:11434/api/chat -d '{
|
||||
"model": "llama3.2",
|
||||
"model": "llama3",
|
||||
"messages": [
|
||||
{ "role": "user", "content": "why is the sky blue?" }
|
||||
]
|
||||
@@ -313,23 +265,19 @@ See the [API documentation](./docs/api.md) for all endpoints.
|
||||
### Web & Desktop
|
||||
|
||||
- [Open WebUI](https://github.com/open-webui/open-webui)
|
||||
- [SwiftChat (macOS with ReactNative)](https://github.com/aws-samples/swift-chat)
|
||||
- [Enchanted (macOS native)](https://github.com/AugustDev/enchanted)
|
||||
- [Hollama](https://github.com/fmaclen/hollama)
|
||||
- [Lollms WebUI (Single user)](https://github.com/ParisNeo/lollms-webui)
|
||||
- [Lollms (Multi users)](https://github.com/ParisNeo/lollms)
|
||||
- [Lollms-Webui](https://github.com/ParisNeo/lollms-webui)
|
||||
- [LibreChat](https://github.com/danny-avila/LibreChat)
|
||||
- [Bionic GPT](https://github.com/bionic-gpt/bionic-gpt)
|
||||
- [HTML UI](https://github.com/rtcfirefly/ollama-ui)
|
||||
- [AI-UI](https://github.com/bajahaw/ai-ui)
|
||||
- [Saddle](https://github.com/jikkuatwork/saddle)
|
||||
- [TagSpaces](https://www.tagspaces.org) (A platform for file-based apps, [utilizing Ollama](https://docs.tagspaces.org/ai/) for the generation of tags and descriptions)
|
||||
- [Chatbot UI](https://github.com/ivanfioravanti/chatbot-ollama)
|
||||
- [Chatbot UI v2](https://github.com/mckaywrigley/chatbot-ui)
|
||||
- [Typescript UI](https://github.com/ollama-interface/Ollama-Gui?tab=readme-ov-file)
|
||||
- [Minimalistic React UI for Ollama Models](https://github.com/richawo/minimal-llm-ui)
|
||||
- [Ollamac](https://github.com/kevinhermawan/Ollamac)
|
||||
- [big-AGI](https://github.com/enricoros/big-AGI)
|
||||
- [big-AGI](https://github.com/enricoros/big-AGI/blob/main/docs/config-local-ollama.md)
|
||||
- [Cheshire Cat assistant framework](https://github.com/cheshire-cat-ai/core)
|
||||
- [Amica](https://github.com/semperai/amica)
|
||||
- [chatd](https://github.com/BruceMacD/chatd)
|
||||
@@ -349,10 +297,7 @@ See the [API documentation](./docs/api.md) for all endpoints.
|
||||
- [AnythingLLM (Docker + MacOs/Windows/Linux native app)](https://github.com/Mintplex-Labs/anything-llm)
|
||||
- [Ollama Basic Chat: Uses HyperDiv Reactive UI](https://github.com/rapidarchitect/ollama_basic_chat)
|
||||
- [Ollama-chats RPG](https://github.com/drazdra/ollama-chats)
|
||||
- [IntelliBar](https://intellibar.app/) (AI-powered assistant for macOS)
|
||||
- [Jirapt](https://github.com/AliAhmedNada/jirapt) (Jira Integration to generate issues, tasks, epics)
|
||||
- [ojira](https://github.com/AliAhmedNada/ojira) (Jira chrome plugin to easily generate descriptions for tasks)
|
||||
- [QA-Pilot](https://github.com/reid41/QA-Pilot) (Interactive chat tool that can leverage Ollama models for rapid understanding and navigation of GitHub code repositories)
|
||||
- [QA-Pilot](https://github.com/reid41/QA-Pilot) (Chat with Code Repository)
|
||||
- [ChatOllama](https://github.com/sugarforever/chat-ollama) (Open Source Chatbot based on Ollama with Knowledge Bases)
|
||||
- [CRAG Ollama Chat](https://github.com/Nagi-ovo/CRAG-Ollama-Chat) (Simple Web Search with Corrective RAG)
|
||||
- [RAGFlow](https://github.com/infiniflow/ragflow) (Open-source Retrieval-Augmented Generation engine based on deep document understanding)
|
||||
@@ -362,115 +307,15 @@ See the [API documentation](./docs/api.md) for all endpoints.
|
||||
- [Ollama RAG Chatbot](https://github.com/datvodinh/rag-chatbot.git) (Local Chat with multiple PDFs using Ollama and RAG)
|
||||
- [BrainSoup](https://www.nurgo-software.com/products/brainsoup) (Flexible native client with RAG & multi-agent automation)
|
||||
- [macai](https://github.com/Renset/macai) (macOS client for Ollama, ChatGPT, and other compatible API back-ends)
|
||||
- [RWKV-Runner](https://github.com/josStorer/RWKV-Runner) (RWKV offline LLM deployment tool, also usable as a client for ChatGPT and Ollama)
|
||||
- [Ollama Grid Search](https://github.com/dezoito/ollama-grid-search) (app to evaluate and compare models)
|
||||
- [Olpaka](https://github.com/Otacon/olpaka) (User-friendly Flutter Web App for Ollama)
|
||||
- [Casibase](https://casibase.org) (An open source AI knowledge base and dialogue system combining the latest RAG, SSO, ollama support, and multiple large language models.)
|
||||
- [OllamaSpring](https://github.com/CrazyNeil/OllamaSpring) (Ollama Client for macOS)
|
||||
- [LLocal.in](https://github.com/kartikm7/llocal) (Easy to use Electron Desktop Client for Ollama)
|
||||
- [Shinkai Desktop](https://github.com/dcSpark/shinkai-apps) (Two click install Local AI using Ollama + Files + RAG)
|
||||
- [AiLama](https://github.com/zeyoyt/ailama) (A Discord User App that allows you to interact with Ollama anywhere in Discord)
|
||||
- [Ollama with Google Mesop](https://github.com/rapidarchitect/ollama_mesop/) (Mesop Chat Client implementation with Ollama)
|
||||
- [R2R](https://github.com/SciPhi-AI/R2R) (Open-source RAG engine)
|
||||
- [Ollama-Kis](https://github.com/elearningshow/ollama-kis) (A simple easy-to-use GUI with sample custom LLM for Drivers Education)
|
||||
- [OpenGPA](https://opengpa.org) (Open-source offline-first Enterprise Agentic Application)
|
||||
- [Painting Droid](https://github.com/mateuszmigas/painting-droid) (Painting app with AI integrations)
|
||||
- [Kerlig AI](https://www.kerlig.com/) (AI writing assistant for macOS)
|
||||
- [AI Studio](https://github.com/MindWorkAI/AI-Studio)
|
||||
- [Sidellama](https://github.com/gyopak/sidellama) (browser-based LLM client)
|
||||
- [LLMStack](https://github.com/trypromptly/LLMStack) (No-code multi-agent framework to build LLM agents and workflows)
|
||||
- [BoltAI for Mac](https://boltai.com) (AI Chat Client for Mac)
|
||||
- [Harbor](https://github.com/av/harbor) (Containerized LLM Toolkit with Ollama as default backend)
|
||||
- [PyGPT](https://github.com/szczyglis-dev/py-gpt) (AI desktop assistant for Linux, Windows, and Mac)
|
||||
- [Alpaca](https://github.com/Jeffser/Alpaca) (An Ollama client application for Linux and macOS made with GTK4 and Adwaita)
|
||||
- [AutoGPT](https://github.com/Significant-Gravitas/AutoGPT/blob/master/docs/content/platform/ollama.md) (AutoGPT Ollama integration)
|
||||
- [Go-CREW](https://www.jonathanhecl.com/go-crew/) (Powerful Offline RAG in Golang)
|
||||
- [PartCAD](https://github.com/openvmp/partcad/) (CAD model generation with OpenSCAD and CadQuery)
|
||||
- [Ollama4j Web UI](https://github.com/ollama4j/ollama4j-web-ui) - Java-based Web UI for Ollama built with Vaadin, Spring Boot, and Ollama4j
|
||||
- [PyOllaMx](https://github.com/kspviswa/pyOllaMx) - macOS application capable of chatting with both Ollama and Apple MLX models.
|
||||
- [Cline](https://github.com/cline/cline) - Formerly known as Claude Dev is a VS Code extension for multi-file/whole-repo coding
|
||||
- [Void](https://github.com/voideditor/void) (Open source AI code editor and Cursor alternative)
|
||||
- [Cherry Studio](https://github.com/kangfenmao/cherry-studio) (Desktop client with Ollama support)
|
||||
- [ConfiChat](https://github.com/1runeberg/confichat) (Lightweight, standalone, multi-platform, and privacy-focused LLM chat interface with optional encryption)
|
||||
- [Archyve](https://github.com/nickthecook/archyve) (RAG-enabling document library)
|
||||
- [crewAI with Mesop](https://github.com/rapidarchitect/ollama-crew-mesop) (Mesop Web Interface to run crewAI with Ollama)
|
||||
- [Tkinter-based client](https://github.com/chyok/ollama-gui) (Python tkinter-based Client for Ollama)
|
||||
- [LLMChat](https://github.com/trendy-design/llmchat) (Privacy focused, 100% local, intuitive all-in-one chat interface)
|
||||
- [Local Multimodal AI Chat](https://github.com/Leon-Sander/Local-Multimodal-AI-Chat) (Ollama-based LLM Chat with support for multiple features, including PDF RAG, voice chat, image-based interactions, and integration with OpenAI.)
|
||||
- [ARGO](https://github.com/xark-argo/argo) (Locally download and run Ollama and Huggingface models with RAG and deep research on Mac/Windows/Linux)
|
||||
- [OrionChat](https://github.com/EliasPereirah/OrionChat) - OrionChat is a web interface for chatting with different AI providers
|
||||
- [G1](https://github.com/bklieger-groq/g1) (Prototype of using prompting strategies to improve the LLM's reasoning through o1-like reasoning chains.)
|
||||
- [Web management](https://github.com/lemonit-eric-mao/ollama-web-management) (Web management page)
|
||||
- [Promptery](https://github.com/promptery/promptery) (desktop client for Ollama.)
|
||||
- [Ollama App](https://github.com/JHubi1/ollama-app) (Modern and easy-to-use multi-platform client for Ollama)
|
||||
- [chat-ollama](https://github.com/annilq/chat-ollama) (a React Native client for Ollama)
|
||||
- [SpaceLlama](https://github.com/tcsenpai/spacellama) (Firefox and Chrome extension to quickly summarize web pages with ollama in a sidebar)
|
||||
- [YouLama](https://github.com/tcsenpai/youlama) (Webapp to quickly summarize any YouTube video, supporting Invidious as well)
|
||||
- [DualMind](https://github.com/tcsenpai/dualmind) (Experimental app allowing two models to talk to each other in the terminal or in a web interface)
|
||||
- [ollamarama-matrix](https://github.com/h1ddenpr0cess20/ollamarama-matrix) (Ollama chatbot for the Matrix chat protocol)
|
||||
- [ollama-chat-app](https://github.com/anan1213095357/ollama-chat-app) (Flutter-based chat app)
|
||||
- [Perfect Memory AI](https://www.perfectmemory.ai/) (Productivity AI assists personalized by what you have seen on your screen, heard, and said in the meetings)
|
||||
- [Hexabot](https://github.com/hexastack/hexabot) (A conversational AI builder)
|
||||
- [Reddit Rate](https://github.com/rapidarchitect/reddit_analyzer) (Search and Rate Reddit topics with a weighted summation)
|
||||
- [OpenTalkGpt](https://github.com/adarshM84/OpenTalkGpt) (Chrome Extension to manage open-source models supported by Ollama, create custom models, and chat with models from a user-friendly UI)
|
||||
- [VT](https://github.com/vinhnx/vt.ai) (A minimal multimodal AI chat app, with dynamic conversation routing. Supports local models via Ollama)
|
||||
- [Nosia](https://github.com/nosia-ai/nosia) (Easy to install and use RAG platform based on Ollama)
|
||||
- [Witsy](https://github.com/nbonamy/witsy) (An AI Desktop application available for Mac/Windows/Linux)
|
||||
- [Abbey](https://github.com/US-Artificial-Intelligence/abbey) (A configurable AI interface server with notebooks, document storage, and YouTube support)
|
||||
- [Minima](https://github.com/dmayboroda/minima) (RAG with on-premises or fully local workflow)
|
||||
- [aidful-ollama-model-delete](https://github.com/AidfulAI/aidful-ollama-model-delete) (User interface for simplified model cleanup)
|
||||
- [Perplexica](https://github.com/ItzCrazyKns/Perplexica) (An AI-powered search engine & an open-source alternative to Perplexity AI)
|
||||
- [Ollama Chat WebUI for Docker ](https://github.com/oslook/ollama-webui) (Support for local docker deployment, lightweight ollama webui)
|
||||
- [AI Toolkit for Visual Studio Code](https://aka.ms/ai-tooklit/ollama-docs) (Microsoft-official VS Code extension to chat, test, evaluate models with Ollama support, and use them in your AI applications.)
|
||||
- [MinimalNextOllamaChat](https://github.com/anilkay/MinimalNextOllamaChat) (Minimal Web UI for Chat and Model Control)
|
||||
- [Chipper](https://github.com/TilmanGriesel/chipper) AI interface for tinkerers (Ollama, Haystack RAG, Python)
|
||||
- [ChibiChat](https://github.com/CosmicEventHorizon/ChibiChat) (Kotlin-based Android app to chat with Ollama and Koboldcpp API endpoints)
|
||||
- [LocalLLM](https://github.com/qusaismael/localllm) (Minimal Web-App to run ollama models on it with a GUI)
|
||||
- [Ollamazing](https://github.com/buiducnhat/ollamazing) (Web extension to run Ollama models)
|
||||
- [OpenDeepResearcher-via-searxng](https://github.com/benhaotang/OpenDeepResearcher-via-searxng) (A Deep Research equivalent endpoint with Ollama support for running locally)
|
||||
- [AntSK](https://github.com/AIDotNet/AntSK) (Out-of-the-box & Adaptable RAG Chatbot)
|
||||
- [MaxKB](https://github.com/1Panel-dev/MaxKB/) (Ready-to-use & flexible RAG Chatbot)
|
||||
- [yla](https://github.com/danielekp/yla) (Web interface to freely interact with your customized models)
|
||||
- [LangBot](https://github.com/RockChinQ/LangBot) (LLM-based instant messaging bots platform, with Agents, RAG features, supports multiple platforms)
|
||||
- [1Panel](https://github.com/1Panel-dev/1Panel/) (Web-based Linux Server Management Tool)
|
||||
- [AstrBot](https://github.com/Soulter/AstrBot/) (User-friendly LLM-based multi-platform chatbot with a WebUI, supporting RAG, LLM agents, and plugins integration)
|
||||
- [Reins](https://github.com/ibrahimcetin/reins) (Easily tweak parameters, customize system prompts per chat, and enhance your AI experiments with reasoning model support.)
|
||||
- [Flufy](https://github.com/Aharon-Bensadoun/Flufy) (A beautiful chat interface for interacting with Ollama's API. Built with React, TypeScript, and Material-UI.)
|
||||
- [Ellama](https://github.com/zeozeozeo/ellama) (Friendly native app to chat with an Ollama instance)
|
||||
- [screenpipe](https://github.com/mediar-ai/screenpipe) Build agents powered by your screen history
|
||||
- [Ollamb](https://github.com/hengkysteen/ollamb) (Simple yet rich in features, cross-platform built with Flutter and designed for Ollama. Try the [web demo](https://hengkysteen.github.io/demo/ollamb/).)
|
||||
- [Writeopia](https://github.com/Writeopia/Writeopia) (Text editor with integration with Ollama)
|
||||
- [AppFlowy](https://github.com/AppFlowy-IO/AppFlowy) (AI collaborative workspace with Ollama, cross-platform and self-hostable)
|
||||
- [Lumina](https://github.com/cushydigit/lumina.git) (A lightweight, minimal React.js frontend for interacting with Ollama servers)
|
||||
- [Tiny Notepad](https://pypi.org/project/tiny-notepad) (A lightweight, notepad-like interface to chat with ollama available on PyPI)
|
||||
- [macLlama (macOS native)](https://github.com/hellotunamayo/macLlama) (A native macOS GUI application for interacting with Ollama models, featuring a chat interface.)
|
||||
- [GPTranslate](https://github.com/philberndt/GPTranslate) (A fast and lightweight, AI powered desktop translation application written with Rust and Tauri. Features real-time translation with OpenAI/Azure/Ollama.)
|
||||
- [ollama launcher](https://github.com/NGC13009/ollama-launcher) (A launcher for Ollama, aiming to provide users with convenient functions such as ollama server launching, management, or configuration.)
|
||||
- [ai-hub](https://github.com/Aj-Seven/ai-hub) (AI Hub supports multiple models via API keys and Chat support via Ollama API.)
|
||||
- [Mayan EDMS](https://gitlab.com/mayan-edms/mayan-edms) (Open source document management system to organize, tag, search, and automate your files with powerful Ollama driven workflows.)
|
||||
- [Serene Pub](https://github.com/doolijb/serene-pub) (Beginner friendly, open source AI Roleplaying App for Windows, Mac OS and Linux. Search, download and use models with Ollama all inside the app.)
|
||||
- [Andes](https://github.com/aqerd/andes) (A Visual Studio Code extension that provides a local UI interface for Ollama models)
|
||||
- [KDeps](https://github.com/kdeps/kdeps) (Kdeps is an offline-first AI framework for building Dockerized full-stack AI applications declaratively using Apple PKL and integrates APIs with Ollama on the backend.)
|
||||
- [Clueless](https://github.com/KashyapTan/clueless) (Open Source & Local Cluely: A desktop application LLM assistant to help you talk to anything on your screen using locally served Ollama models. Also undetectable to screenshare)
|
||||
- [ollama-co2](https://github.com/carbonatedWaterOrg/ollama-co2) (FastAPI web interface for monitoring and managing local and remote Ollama servers with real-time model monitoring and concurrent downloads)
|
||||
- [Hillnote](https://hillnote.com) (A Markdown-first workspace designed to supercharge your AI workflow. Create documents ready to integrate with Claude, ChatGPT, Gemini, Cursor, and more - all while keeping your work on your device.)
|
||||
|
||||
### Cloud
|
||||
|
||||
- [Google Cloud](https://cloud.google.com/run/docs/tutorials/gpu-gemma2-with-ollama)
|
||||
- [Fly.io](https://fly.io/docs/python/do-more/add-ollama/)
|
||||
- [Koyeb](https://www.koyeb.com/deploy/ollama)
|
||||
|
||||
### Tutorial
|
||||
|
||||
- [handy-ollama](https://github.com/datawhalechina/handy-ollama) (Chinese Tutorial for Ollama by [Datawhale ](https://github.com/datawhalechina) - China's Largest Open Source AI Learning Community)
|
||||
|
||||
### Terminal
|
||||
|
||||
- [oterm](https://github.com/ggozad/oterm)
|
||||
- [Ellama Emacs client](https://github.com/s-kostyaev/ellama)
|
||||
- [Emacs client](https://github.com/zweifisch/ollama)
|
||||
- [neollama](https://github.com/paradoxical-dev/neollama) UI client for interacting with models from within Neovim
|
||||
- [gen.nvim](https://github.com/David-Kunz/gen.nvim)
|
||||
- [ollama.nvim](https://github.com/nomnivore/ollama.nvim)
|
||||
- [ollero.nvim](https://github.com/marco-souza/ollero.nvim)
|
||||
@@ -480,7 +325,7 @@ See the [API documentation](./docs/api.md) for all endpoints.
|
||||
- [Oatmeal](https://github.com/dustinblackman/oatmeal)
|
||||
- [cmdh](https://github.com/pgibler/cmdh)
|
||||
- [ooo](https://github.com/npahlfer/ooo)
|
||||
- [shell-pilot](https://github.com/reid41/shell-pilot)(Interact with models via pure shell scripts on Linux or macOS)
|
||||
- [shell-pilot](https://github.com/reid41/shell-pilot)
|
||||
- [tenere](https://github.com/pythops/tenere)
|
||||
- [llm-ollama](https://github.com/taketwo/llm-ollama) for [Datasette's LLM CLI](https://llm.datasette.io/en/stable/).
|
||||
- [typechat-cli](https://github.com/anaisbetts/typechat-cli)
|
||||
@@ -488,69 +333,31 @@ See the [API documentation](./docs/api.md) for all endpoints.
|
||||
- [tlm](https://github.com/yusufcanb/tlm)
|
||||
- [podman-ollama](https://github.com/ericcurtin/podman-ollama)
|
||||
- [gollama](https://github.com/sammcj/gollama)
|
||||
- [ParLlama](https://github.com/paulrobello/parllama)
|
||||
- [Ollama eBook Summary](https://github.com/cognitivetech/ollama-ebook-summary/)
|
||||
- [Ollama Mixture of Experts (MOE) in 50 lines of code](https://github.com/rapidarchitect/ollama_moe)
|
||||
- [vim-intelligence-bridge](https://github.com/pepo-ec/vim-intelligence-bridge) Simple interaction of "Ollama" with the Vim editor
|
||||
- [x-cmd ollama](https://x-cmd.com/mod/ollama)
|
||||
- [bb7](https://github.com/drunkwcodes/bb7)
|
||||
- [SwollamaCLI](https://github.com/marcusziade/Swollama) bundled with the Swollama Swift package. [Demo](https://github.com/marcusziade/Swollama?tab=readme-ov-file#cli-usage)
|
||||
- [aichat](https://github.com/sigoden/aichat) All-in-one LLM CLI tool featuring Shell Assistant, Chat-REPL, RAG, AI tools & agents, with access to OpenAI, Claude, Gemini, Ollama, Groq, and more.
|
||||
- [PowershAI](https://github.com/rrg92/powershai) PowerShell module that brings AI to terminal on Windows, including support for Ollama
|
||||
- [DeepShell](https://github.com/Abyss-c0re/deepshell) Your self-hosted AI assistant. Interactive Shell, Files and Folders analysis.
|
||||
- [orbiton](https://github.com/xyproto/orbiton) Configuration-free text editor and IDE with support for tab completion with Ollama.
|
||||
- [orca-cli](https://github.com/molbal/orca-cli) Ollama Registry CLI Application - Browse, pull, and download models from Ollama Registry in your terminal.
|
||||
- [GGUF-to-Ollama](https://github.com/jonathanhecl/gguf-to-ollama) - Importing GGUF to Ollama made easy (multiplatform)
|
||||
- [AWS-Strands-With-Ollama](https://github.com/rapidarchitect/ollama_strands) - AWS Strands Agents with Ollama Examples
|
||||
- [ollama-multirun](https://github.com/attogram/ollama-multirun) - A bash shell script to run a single prompt against any or all of your locally installed ollama models, saving the output and performance statistics as easily navigable web pages. ([Demo](https://attogram.github.io/ai_test_zone/))
|
||||
- [ollama-bash-toolshed](https://github.com/attogram/ollama-bash-toolshed) - Bash scripts to chat with tool using models. Add new tools to your shed with ease. Runs on Ollama.
|
||||
- [hle-eval-ollama](https://github.com/mags0ft/hle-eval-ollama) - Runs benchmarks like "Humanity's Last Exam" (HLE) on your favorite local Ollama models and evaluates the quality of their responses
|
||||
- [VT Code](https://github.com/vinhnx/vtcode) - VT Code is a Rust-based terminal coding agent with semantic code intelligence via Tree-sitter. Ollama integration for running local/cloud models with configurable endpoints.
|
||||
|
||||
### Apple Vision Pro
|
||||
|
||||
- [SwiftChat](https://github.com/aws-samples/swift-chat) (Cross-platform AI chat app supporting Apple Vision Pro via "Designed for iPad")
|
||||
- [Enchanted](https://github.com/AugustDev/enchanted)
|
||||
|
||||
### Database
|
||||
|
||||
- [pgai](https://github.com/timescale/pgai) - PostgreSQL as a vector database (Create and search embeddings from Ollama models using pgvector)
|
||||
- [Get started guide](https://github.com/timescale/pgai/blob/main/docs/vectorizer-quick-start.md)
|
||||
- [MindsDB](https://github.com/mindsdb/mindsdb/blob/staging/mindsdb/integrations/handlers/ollama_handler/README.md) (Connects Ollama models with nearly 200 data platforms and apps)
|
||||
- [chromem-go](https://github.com/philippgille/chromem-go/blob/v0.5.0/embed_ollama.go) with [example](https://github.com/philippgille/chromem-go/tree/v0.5.0/examples/rag-wikipedia-ollama)
|
||||
- [Kangaroo](https://github.com/dbkangaroo/kangaroo) (AI-powered SQL client and admin tool for popular databases)
|
||||
|
||||
### Package managers
|
||||
|
||||
- [Pacman](https://archlinux.org/packages/extra/x86_64/ollama/)
|
||||
- [Gentoo](https://github.com/gentoo/guru/tree/master/app-misc/ollama)
|
||||
- [Homebrew](https://formulae.brew.sh/formula/ollama)
|
||||
- [Helm Chart](https://artifacthub.io/packages/helm/ollama-helm/ollama)
|
||||
- [Guix channel](https://codeberg.org/tusharhero/ollama-guix)
|
||||
- [Nix package](https://search.nixos.org/packages?show=ollama&from=0&size=50&sort=relevance&type=packages&query=ollama)
|
||||
- [Flox](https://flox.dev/blog/ollama-part-one)
|
||||
|
||||
### Libraries
|
||||
|
||||
- [LangChain](https://python.langchain.com/docs/integrations/chat/ollama/) and [LangChain.js](https://js.langchain.com/docs/integrations/chat/ollama/) with [example](https://js.langchain.com/docs/tutorials/local_rag/)
|
||||
- [Firebase Genkit](https://firebase.google.com/docs/genkit/plugins/ollama)
|
||||
- [crewAI](https://github.com/crewAIInc/crewAI)
|
||||
- [Yacana](https://remembersoftwares.github.io/yacana/) (User-friendly multi-agent framework for brainstorming and executing predetermined flows with built-in tool integration)
|
||||
- [Strands Agents](https://github.com/strands-agents/sdk-python) (A model-driven approach to building AI agents in just a few lines of code)
|
||||
- [Spring AI](https://github.com/spring-projects/spring-ai) with [reference](https://docs.spring.io/spring-ai/reference/api/chat/ollama-chat.html) and [example](https://github.com/tzolov/ollama-tools)
|
||||
- [LangChain](https://python.langchain.com/docs/integrations/llms/ollama) and [LangChain.js](https://js.langchain.com/docs/modules/model_io/models/llms/integrations/ollama) with [example](https://js.langchain.com/docs/use_cases/question_answering/local_retrieval_qa)
|
||||
- [LangChainGo](https://github.com/tmc/langchaingo/) with [example](https://github.com/tmc/langchaingo/tree/main/examples/ollama-completion-example)
|
||||
- [LangChain4j](https://github.com/langchain4j/langchain4j) with [example](https://github.com/langchain4j/langchain4j-examples/tree/main/ollama-examples/src/main/java)
|
||||
- [LangChainRust](https://github.com/Abraxas-365/langchain-rust) with [example](https://github.com/Abraxas-365/langchain-rust/blob/main/examples/llm_ollama.rs)
|
||||
- [LangChain for .NET](https://github.com/tryAGI/LangChain) with [example](https://github.com/tryAGI/LangChain/blob/main/examples/LangChain.Samples.OpenAI/Program.cs)
|
||||
- [LLPhant](https://github.com/theodo-group/LLPhant?tab=readme-ov-file#ollama)
|
||||
- [LlamaIndex](https://docs.llamaindex.ai/en/stable/examples/llm/ollama/) and [LlamaIndexTS](https://ts.llamaindex.ai/modules/llms/available_llms/ollama)
|
||||
- [LlamaIndex](https://gpt-index.readthedocs.io/en/stable/examples/llm/ollama.html)
|
||||
- [LiteLLM](https://github.com/BerriAI/litellm)
|
||||
- [OllamaFarm for Go](https://github.com/presbrey/ollamafarm)
|
||||
- [OllamaSharp for .NET](https://github.com/awaescher/OllamaSharp)
|
||||
- [Ollama for Ruby](https://github.com/gbaptista/ollama-ai)
|
||||
- [Ollama-rs for Rust](https://github.com/pepperoni21/ollama-rs)
|
||||
- [Ollama-hpp for C++](https://github.com/jmont-dev/ollama-hpp)
|
||||
- [Ollama4j for Java](https://github.com/ollama4j/ollama4j)
|
||||
- [Ollama4j for Java](https://github.com/amithkoujalgi/ollama4j)
|
||||
- [ModelFusion Typescript Library](https://modelfusion.dev/integration/model-provider/ollama)
|
||||
- [OllamaKit for Swift](https://github.com/kevinhermawan/OllamaKit)
|
||||
- [Ollama for Dart](https://github.com/breitburg/dart-ollama)
|
||||
@@ -567,48 +374,17 @@ See the [API documentation](./docs/api.md) for all endpoints.
|
||||
- [Portkey](https://portkey.ai/docs/welcome/integration-guides/ollama)
|
||||
- [PromptingTools.jl](https://github.com/svilupp/PromptingTools.jl) with an [example](https://svilupp.github.io/PromptingTools.jl/dev/examples/working_with_ollama)
|
||||
- [LlamaScript](https://github.com/Project-Llama/llamascript)
|
||||
- [llm-axe](https://github.com/emirsahin1/llm-axe) (Python Toolkit for Building LLM Powered Apps)
|
||||
- [Gollm](https://docs.gollm.co/examples/ollama-example)
|
||||
- [Gollama for Golang](https://github.com/jonathanhecl/gollama)
|
||||
- [Ollamaclient for Golang](https://github.com/xyproto/ollamaclient)
|
||||
- [High-level function abstraction in Go](https://gitlab.com/tozd/go/fun)
|
||||
- [Ollama PHP](https://github.com/ArdaGnsrn/ollama-php)
|
||||
- [Agents-Flex for Java](https://github.com/agents-flex/agents-flex) with [example](https://github.com/agents-flex/agents-flex/tree/main/agents-flex-llm/agents-flex-llm-ollama/src/test/java/com/agentsflex/llm/ollama)
|
||||
- [Parakeet](https://github.com/parakeet-nest/parakeet) is a GoLang library, made to simplify the development of small generative AI applications with Ollama.
|
||||
- [Haverscript](https://github.com/andygill/haverscript) with [examples](https://github.com/andygill/haverscript/tree/main/examples)
|
||||
- [Ollama for Swift](https://github.com/mattt/ollama-swift)
|
||||
- [Swollama for Swift](https://github.com/marcusziade/Swollama) with [DocC](https://marcusziade.github.io/Swollama/documentation/swollama/)
|
||||
- [GoLamify](https://github.com/prasad89/golamify)
|
||||
- [Ollama for Haskell](https://github.com/tusharad/ollama-haskell)
|
||||
- [multi-llm-ts](https://github.com/nbonamy/multi-llm-ts) (A Typescript/JavaScript library allowing access to different LLM in a unified API)
|
||||
- [LlmTornado](https://github.com/lofcz/llmtornado) (C# library providing a unified interface for major FOSS & Commercial inference APIs)
|
||||
- [Ollama for Zig](https://github.com/dravenk/ollama-zig)
|
||||
- [Abso](https://github.com/lunary-ai/abso) (OpenAI-compatible TypeScript SDK for any LLM provider)
|
||||
- [Nichey](https://github.com/goodreasonai/nichey) is a Python package for generating custom wikis for your research topic
|
||||
- [Ollama for D](https://github.com/kassane/ollama-d)
|
||||
- [OllamaPlusPlus](https://github.com/HardCodeDev777/OllamaPlusPlus) (Very simple C++ library for Ollama)
|
||||
- [any-llm](https://github.com/mozilla-ai/any-llm) (A single interface to use different llm providers by [mozilla.ai](https://www.mozilla.ai/))
|
||||
- [any-agent](https://github.com/mozilla-ai/any-agent) (A single interface to use and evaluate different agent frameworks by [mozilla.ai](https://www.mozilla.ai/))
|
||||
- [Neuro SAN](https://github.com/cognizant-ai-lab/neuro-san-studio) (Data-driven multi-agent orchestration framework) with [example](https://github.com/cognizant-ai-lab/neuro-san-studio/blob/main/docs/user_guide.md#ollama)
|
||||
- [achatbot-go](https://github.com/ai-bot-pro/achatbot-go) a multimodal(text/audio/image) chatbot.
|
||||
- [Ollama Bash Lib](https://github.com/attogram/ollama-bash-lib) - A Bash Library for Ollama. Run LLM prompts straight from your shell, and more
|
||||
|
||||
### Mobile
|
||||
|
||||
- [SwiftChat](https://github.com/aws-samples/swift-chat) (Lightning-fast Cross-platform AI chat app with native UI for Android, iOS, and iPad)
|
||||
- [Enchanted](https://github.com/AugustDev/enchanted)
|
||||
- [Maid](https://github.com/Mobile-Artificial-Intelligence/maid)
|
||||
- [Ollama App](https://github.com/JHubi1/ollama-app) (Modern and easy-to-use multi-platform client for Ollama)
|
||||
- [ConfiChat](https://github.com/1runeberg/confichat) (Lightweight, standalone, multi-platform, and privacy-focused LLM chat interface with optional encryption)
|
||||
- [Ollama Android Chat](https://github.com/sunshine0523/OllamaServer) (No need for Termux, start the Ollama service with one click on an Android device)
|
||||
- [Reins](https://github.com/ibrahimcetin/reins) (Easily tweak parameters, customize system prompts per chat, and enhance your AI experiments with reasoning model support.)
|
||||
|
||||
### Extensions & Plugins
|
||||
|
||||
- [Raycast extension](https://github.com/MassimilianoPasquini97/raycast_ollama)
|
||||
- [Discollama](https://github.com/mxyng/discollama) (Discord bot inside the Ollama discord channel)
|
||||
- [Continue](https://github.com/continuedev/continue)
|
||||
- [Vibe](https://github.com/thewh1teagle/vibe) (Transcribe and analyze meetings with Ollama)
|
||||
- [Obsidian Ollama plugin](https://github.com/hinterdupfinger/obsidian-ollama)
|
||||
- [Logseq Ollama plugin](https://github.com/omagdy7/ollama-logseq)
|
||||
- [NotesOllama](https://github.com/andersrex/notesollama) (Apple Notes Ollama plugin)
|
||||
@@ -623,47 +399,17 @@ See the [API documentation](./docs/api.md) for all endpoints.
|
||||
- [Obsidian Local GPT plugin](https://github.com/pfrankov/obsidian-local-gpt)
|
||||
- [Open Interpreter](https://docs.openinterpreter.com/language-model-setup/local-models/ollama)
|
||||
- [Llama Coder](https://github.com/ex3ndr/llama-coder) (Copilot alternative using Ollama)
|
||||
- [Ollama Copilot](https://github.com/bernardo-bruning/ollama-copilot) (Proxy that allows you to use Ollama as a copilot like GitHub Copilot)
|
||||
- [Ollama Copilot](https://github.com/bernardo-bruning/ollama-copilot) (Proxy that allows you to use ollama as a copilot like Github copilot)
|
||||
- [twinny](https://github.com/rjmacarthy/twinny) (Copilot and Copilot chat alternative using Ollama)
|
||||
- [Wingman-AI](https://github.com/RussellCanfield/wingman-ai) (Copilot code and chat alternative using Ollama and Hugging Face)
|
||||
- [Wingman-AI](https://github.com/RussellCanfield/wingman-ai) (Copilot code and chat alternative using Ollama and HuggingFace)
|
||||
- [Page Assist](https://github.com/n4ze3m/page-assist) (Chrome Extension)
|
||||
- [Plasmoid Ollama Control](https://github.com/imoize/plasmoid-ollamacontrol) (KDE Plasma extension that allows you to quickly manage/control Ollama model)
|
||||
- [AI Telegram Bot](https://github.com/tusharhero/aitelegrambot) (Telegram bot using Ollama in backend)
|
||||
- [AI ST Completion](https://github.com/yaroslavyaroslav/OpenAI-sublime-text) (Sublime Text 4 AI assistant plugin with Ollama support)
|
||||
- [Discord-Ollama Chat Bot](https://github.com/kevinthedang/discord-ollama) (Generalized TypeScript Discord Bot w/ Tuning Documentation)
|
||||
- [ChatGPTBox: All in one browser extension](https://github.com/josStorer/chatGPTBox) with [Integrating Tutorial](https://github.com/josStorer/chatGPTBox/issues/616#issuecomment-1975186467)
|
||||
- [Discord AI chat/moderation bot](https://github.com/rapmd73/Companion) Chat/moderation bot written in python. Uses Ollama to create personalities.
|
||||
- [Headless Ollama](https://github.com/nischalj10/headless-ollama) (Scripts to automatically install ollama client & models on any OS for apps that depend on ollama server)
|
||||
- [Terraform AWS Ollama & Open WebUI](https://github.com/xuyangbocn/terraform-aws-self-host-llm) (A Terraform module to deploy on AWS a ready-to-use Ollama service, together with its front-end Open WebUI service.)
|
||||
- [node-red-contrib-ollama](https://github.com/jakubburkiewicz/node-red-contrib-ollama)
|
||||
- [Local AI Helper](https://github.com/ivostoykov/localAI) (Chrome and Firefox extensions that enable interactions with the active tab and customisable API endpoints. Includes secure storage for user prompts.)
|
||||
- [LSP-AI](https://github.com/SilasMarvin/lsp-ai) (Open-source language server for AI-powered functionality)
|
||||
- [QodeAssist](https://github.com/Palm1r/QodeAssist) (AI-powered coding assistant plugin for Qt Creator)
|
||||
- [Obsidian Quiz Generator plugin](https://github.com/ECuiDev/obsidian-quiz-generator)
|
||||
- [AI Summary Helper plugin](https://github.com/philffm/ai-summary-helper)
|
||||
- [TextCraft](https://github.com/suncloudsmoon/TextCraft) (Copilot in Word alternative using Ollama)
|
||||
- [Alfred Ollama](https://github.com/zeitlings/alfred-ollama) (Alfred Workflow)
|
||||
- [TextLLaMA](https://github.com/adarshM84/TextLLaMA) A Chrome Extension that helps you write emails, correct grammar, and translate into any language
|
||||
- [Simple-Discord-AI](https://github.com/zyphixor/simple-discord-ai)
|
||||
- [LLM Telegram Bot](https://github.com/innightwolfsleep/llm_telegram_bot) (telegram bot, primary for RP. Oobabooga-like buttons, [A1111](https://github.com/AUTOMATIC1111/stable-diffusion-webui) API integration e.t.c)
|
||||
- [mcp-llm](https://github.com/sammcj/mcp-llm) (MCP Server to allow LLMs to call other LLMs)
|
||||
- [SimpleOllamaUnity](https://github.com/HardCodeDev777/SimpleOllamaUnity) (Unity Engine extension for communicating with Ollama in a few lines of code. Also works at runtime)
|
||||
- [UnityCodeLama](https://github.com/HardCodeDev777/UnityCodeLama) (Unity Editor tool to analyze scripts via Ollama)
|
||||
- [NativeMind](https://github.com/NativeMindBrowser/NativeMindExtension) (Private, on-device AI Assistant, no cloud dependencies)
|
||||
- [GMAI - Gradle Managed AI](https://gmai.premex.se/) (Gradle plugin for automated Ollama lifecycle management during build phases)
|
||||
- [NOMYO Router](https://github.com/nomyo-ai/nomyo-router) (A transparent Ollama proxy with model deployment aware routing which auto-manages multiple Ollama instances in a given network)
|
||||
- [Headless Ollama](https://github.com/nischalj10/headless-ollama) (Scripts to automatically install ollama client & models on any OS for apps that depends on ollama server)
|
||||
|
||||
### Supported backends
|
||||
|
||||
- [llama.cpp](https://github.com/ggml-org/llama.cpp) project founded by Georgi Gerganov.
|
||||
- [llama.cpp](https://github.com/ggerganov/llama.cpp) project founded by Georgi Gerganov.
|
||||
|
||||
### Observability
|
||||
- [Opik](https://www.comet.com/docs/opik/cookbook/ollama) is an open-source platform to debug, evaluate, and monitor your LLM applications, RAG systems, and agentic workflows with comprehensive tracing, automated evaluations, and production-ready dashboards. Opik supports native integration to Ollama.
|
||||
- [Lunary](https://lunary.ai/docs/integrations/ollama) is the leading open-source LLM observability platform. It provides a variety of enterprise-grade features such as real-time analytics, prompt templates management, PII masking, and comprehensive agent tracing.
|
||||
- [OpenLIT](https://github.com/openlit/openlit) is an OpenTelemetry-native tool for monitoring Ollama Applications & GPUs using traces and metrics.
|
||||
- [HoneyHive](https://docs.honeyhive.ai/integrations/ollama) is an AI observability and evaluation platform for AI agents. Use HoneyHive to evaluate agent performance, interrogate failures, and monitor quality in production.
|
||||
- [Langfuse](https://langfuse.com/docs/integrations/ollama) is an open source LLM observability platform that enables teams to collaboratively monitor, evaluate and debug AI applications.
|
||||
- [MLflow Tracing](https://mlflow.org/docs/latest/llms/tracing/index.html#automatic-tracing) is an open source LLM observability tool with a convenient API to log and visualize traces, making it easy to debug and evaluate GenAI applications.
|
||||
|
||||
### Security
|
||||
- [Ollama Fortress](https://github.com/ParisNeo/ollama_proxy_server)
|
||||
|
||||
25
SECURITY.md
@@ -1,25 +0,0 @@
|
||||
# Security
|
||||
|
||||
The Ollama maintainer team takes security seriously and will actively work to resolve security issues.
|
||||
|
||||
## Reporting a vulnerability
|
||||
|
||||
If you discover a security vulnerability, please do not open a public issue. Instead, please report it by emailing hello@ollama.com. We ask that you give us sufficient time to investigate and address the vulnerability before disclosing it publicly.
|
||||
|
||||
Please include the following details in your report:
|
||||
- A description of the vulnerability
|
||||
- Steps to reproduce the issue
|
||||
- Your assessment of the potential impact
|
||||
- Any possible mitigations
|
||||
|
||||
## Security best practices
|
||||
|
||||
While the maintainer team does its best to secure Ollama, users are encouraged to implement their own security best practices, such as:
|
||||
|
||||
- Regularly updating to the latest version of Ollama
|
||||
- Securing access to hosted instances of Ollama
|
||||
- Monitoring systems for unusual activity
|
||||
|
||||
## Contact
|
||||
|
||||
For any other questions or concerns related to security, please contact us at hello@ollama.com
|
||||
128
api/client.go
@@ -10,7 +10,7 @@
|
||||
// repository].
|
||||
//
|
||||
// [the API documentation]: https://github.com/ollama/ollama/blob/main/docs/api.md
|
||||
// [in the GitHub repository]: https://github.com/ollama/ollama/tree/main/api/examples
|
||||
// [in the GitHub repository]: https://github.com/ollama/ollama/tree/main/examples
|
||||
package api
|
||||
|
||||
import (
|
||||
@@ -18,16 +18,13 @@ import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"runtime"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/ollama/ollama/auth"
|
||||
"github.com/ollama/ollama/envconfig"
|
||||
"github.com/ollama/ollama/format"
|
||||
"github.com/ollama/ollama/version"
|
||||
@@ -45,12 +42,6 @@ func checkError(resp *http.Response, body []byte) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
if resp.StatusCode == http.StatusUnauthorized {
|
||||
authError := AuthorizationError{StatusCode: resp.StatusCode}
|
||||
json.Unmarshal(body, &authError)
|
||||
return authError
|
||||
}
|
||||
|
||||
apiError := StatusError{StatusCode: resp.StatusCode}
|
||||
|
||||
err := json.Unmarshal(body, &apiError)
|
||||
@@ -64,7 +55,7 @@ func checkError(resp *http.Response, body []byte) error {
|
||||
|
||||
// ClientFromEnvironment creates a new [Client] using configuration from the
|
||||
// environment variable OLLAMA_HOST, which points to the network host and
|
||||
// port on which the ollama service is listening. The format of this variable
|
||||
// port on which the ollama service is listenting. The format of this variable
|
||||
// is:
|
||||
//
|
||||
// <scheme>://<host>:<port>
|
||||
@@ -72,8 +63,13 @@ func checkError(resp *http.Response, body []byte) error {
|
||||
// If the variable is not specified, a default ollama host and port will be
|
||||
// used.
|
||||
func ClientFromEnvironment() (*Client, error) {
|
||||
ollamaHost := envconfig.Host
|
||||
|
||||
return &Client{
|
||||
base: envconfig.Host(),
|
||||
base: &url.URL{
|
||||
Scheme: ollamaHost.Scheme,
|
||||
Host: net.JoinHostPort(ollamaHost.Host, ollamaHost.Port),
|
||||
},
|
||||
http: http.DefaultClient,
|
||||
}, nil
|
||||
}
|
||||
@@ -85,14 +81,6 @@ func NewClient(base *url.URL, http *http.Client) *Client {
|
||||
}
|
||||
}
|
||||
|
||||
func getAuthorizationToken(ctx context.Context, challenge string) (string, error) {
|
||||
token, err := auth.Sign(ctx, []byte(challenge))
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return token, nil
|
||||
}
|
||||
|
||||
func (c *Client) do(ctx context.Context, method, path string, reqData, respData any) error {
|
||||
var reqBody io.Reader
|
||||
var data []byte
|
||||
@@ -114,21 +102,6 @@ func (c *Client) do(ctx context.Context, method, path string, reqData, respData
|
||||
}
|
||||
|
||||
requestURL := c.base.JoinPath(path)
|
||||
|
||||
var token string
|
||||
if envconfig.UseAuth() || c.base.Hostname() == "ollama.com" {
|
||||
now := strconv.FormatInt(time.Now().Unix(), 10)
|
||||
chal := fmt.Sprintf("%s,%s?ts=%s", method, path, now)
|
||||
token, err = getAuthorizationToken(ctx, chal)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
q := requestURL.Query()
|
||||
q.Set("ts", now)
|
||||
requestURL.RawQuery = q.Encode()
|
||||
}
|
||||
|
||||
request, err := http.NewRequestWithContext(ctx, method, requestURL.String(), reqBody)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -138,10 +111,6 @@ func (c *Client) do(ctx context.Context, method, path string, reqData, respData
|
||||
request.Header.Set("Accept", "application/json")
|
||||
request.Header.Set("User-Agent", fmt.Sprintf("ollama/%s (%s %s) Go/%s", version.Version, runtime.GOARCH, runtime.GOOS, runtime.Version()))
|
||||
|
||||
if token != "" {
|
||||
request.Header.Set("Authorization", token)
|
||||
}
|
||||
|
||||
respObj, err := c.http.Do(request)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -168,7 +137,7 @@ func (c *Client) do(ctx context.Context, method, path string, reqData, respData
|
||||
const maxBufferSize = 512 * format.KiloByte
|
||||
|
||||
func (c *Client) stream(ctx context.Context, method, path string, data any, fn func([]byte) error) error {
|
||||
var buf io.Reader
|
||||
var buf *bytes.Buffer
|
||||
if data != nil {
|
||||
bts, err := json.Marshal(data)
|
||||
if err != nil {
|
||||
@@ -179,22 +148,6 @@ func (c *Client) stream(ctx context.Context, method, path string, data any, fn f
|
||||
}
|
||||
|
||||
requestURL := c.base.JoinPath(path)
|
||||
|
||||
var token string
|
||||
if envconfig.UseAuth() || c.base.Hostname() == "ollama.com" {
|
||||
var err error
|
||||
now := strconv.FormatInt(time.Now().Unix(), 10)
|
||||
chal := fmt.Sprintf("%s,%s?ts=%s", method, path, now)
|
||||
token, err = getAuthorizationToken(ctx, chal)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
q := requestURL.Query()
|
||||
q.Set("ts", now)
|
||||
requestURL.RawQuery = q.Encode()
|
||||
}
|
||||
|
||||
request, err := http.NewRequestWithContext(ctx, method, requestURL.String(), buf)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -204,10 +157,6 @@ func (c *Client) stream(ctx context.Context, method, path string, data any, fn f
|
||||
request.Header.Set("Accept", "application/x-ndjson")
|
||||
request.Header.Set("User-Agent", fmt.Sprintf("ollama/%s (%s %s) Go/%s", version.Version, runtime.GOARCH, runtime.GOOS, runtime.Version()))
|
||||
|
||||
if token != "" {
|
||||
request.Header.Set("Authorization", token)
|
||||
}
|
||||
|
||||
response, err := c.http.Do(request)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -221,28 +170,18 @@ func (c *Client) stream(ctx context.Context, method, path string, data any, fn f
|
||||
for scanner.Scan() {
|
||||
var errorResponse struct {
|
||||
Error string `json:"error,omitempty"`
|
||||
SigninURL string `json:"signin_url,omitempty"`
|
||||
}
|
||||
|
||||
bts := scanner.Bytes()
|
||||
if err := json.Unmarshal(bts, &errorResponse); err != nil {
|
||||
if response.StatusCode >= http.StatusBadRequest {
|
||||
return StatusError{
|
||||
StatusCode: response.StatusCode,
|
||||
Status: response.Status,
|
||||
ErrorMessage: string(bts),
|
||||
}
|
||||
}
|
||||
return errors.New(string(bts))
|
||||
return fmt.Errorf("unmarshal: %w", err)
|
||||
}
|
||||
|
||||
if response.StatusCode == http.StatusUnauthorized {
|
||||
return AuthorizationError{
|
||||
StatusCode: response.StatusCode,
|
||||
Status: response.Status,
|
||||
SigninURL: errorResponse.SigninURL,
|
||||
if errorResponse.Error != "" {
|
||||
return fmt.Errorf(errorResponse.Error)
|
||||
}
|
||||
} else if response.StatusCode >= http.StatusBadRequest {
|
||||
|
||||
if response.StatusCode >= http.StatusBadRequest {
|
||||
return StatusError{
|
||||
StatusCode: response.StatusCode,
|
||||
Status: response.Status,
|
||||
@@ -250,10 +189,6 @@ func (c *Client) stream(ctx context.Context, method, path string, data any, fn f
|
||||
}
|
||||
}
|
||||
|
||||
if errorResponse.Error != "" {
|
||||
return errors.New(errorResponse.Error)
|
||||
}
|
||||
|
||||
if err := fn(bts); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -368,7 +303,7 @@ func (c *Client) List(ctx context.Context) (*ListResponse, error) {
|
||||
return &lr, nil
|
||||
}
|
||||
|
||||
// ListRunning lists running models.
|
||||
// List running models.
|
||||
func (c *Client) ListRunning(ctx context.Context) (*ProcessResponse, error) {
|
||||
var lr ProcessResponse
|
||||
if err := c.do(ctx, http.MethodGet, "/api/ps", nil, &lr); err != nil {
|
||||
@@ -403,7 +338,7 @@ func (c *Client) Show(ctx context.Context, req *ShowRequest) (*ShowResponse, err
|
||||
return &resp, nil
|
||||
}
|
||||
|
||||
// Heartbeat checks if the server has started and is responsive; if yes, it
|
||||
// Hearbeat checks if the server has started and is responsive; if yes, it
|
||||
// returns nil, otherwise an error.
|
||||
func (c *Client) Heartbeat(ctx context.Context) error {
|
||||
if err := c.do(ctx, http.MethodHead, "/", nil, nil); err != nil {
|
||||
@@ -412,16 +347,7 @@ func (c *Client) Heartbeat(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Embed generates embeddings from a model.
|
||||
func (c *Client) Embed(ctx context.Context, req *EmbedRequest) (*EmbedResponse, error) {
|
||||
var resp EmbedResponse
|
||||
if err := c.do(ctx, http.MethodPost, "/api/embed", req, &resp); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &resp, nil
|
||||
}
|
||||
|
||||
// Embeddings generates an embedding from a model.
|
||||
// Embeddings generates embeddings from a model.
|
||||
func (c *Client) Embeddings(ctx context.Context, req *EmbeddingRequest) (*EmbeddingResponse, error) {
|
||||
var resp EmbeddingResponse
|
||||
if err := c.do(ctx, http.MethodPost, "/api/embeddings", req, &resp); err != nil {
|
||||
@@ -448,21 +374,3 @@ func (c *Client) Version(ctx context.Context) (string, error) {
|
||||
|
||||
return version.Version, nil
|
||||
}
|
||||
|
||||
// Signout will signout a client for a local ollama server.
|
||||
func (c *Client) Signout(ctx context.Context) error {
|
||||
return c.do(ctx, http.MethodPost, "/api/signout", nil, nil)
|
||||
}
|
||||
|
||||
// Disconnect will disconnect an ollama instance from ollama.com.
|
||||
func (c *Client) Disconnect(ctx context.Context, encodedKey string) error {
|
||||
return c.do(ctx, http.MethodDelete, fmt.Sprintf("/api/user/keys/%s", encodedKey), nil, nil)
|
||||
}
|
||||
|
||||
func (c *Client) Whoami(ctx context.Context) (*UserResponse, error) {
|
||||
var resp UserResponse
|
||||
if err := c.do(ctx, http.MethodPost, "/api/me", nil, &resp); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &resp, nil
|
||||
}
|
||||
|
||||
@@ -1,13 +1,9 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"net/url"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/ollama/ollama/envconfig"
|
||||
)
|
||||
|
||||
func TestClientFromEnvironment(t *testing.T) {
|
||||
@@ -37,6 +33,7 @@ func TestClientFromEnvironment(t *testing.T) {
|
||||
for k, v := range testCases {
|
||||
t.Run(k, func(t *testing.T) {
|
||||
t.Setenv("OLLAMA_HOST", v.value)
|
||||
envconfig.LoadConfig()
|
||||
|
||||
client, err := ClientFromEnvironment()
|
||||
if err != v.err {
|
||||
@@ -49,274 +46,3 @@ func TestClientFromEnvironment(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// testError represents an internal error type with status code and message
|
||||
// this is used since the error response from the server is not a standard error struct
|
||||
type testError struct {
|
||||
message string
|
||||
statusCode int
|
||||
raw bool // if true, write message as-is instead of JSON encoding
|
||||
}
|
||||
|
||||
func (e testError) Error() string {
|
||||
return e.message
|
||||
}
|
||||
|
||||
func TestClientStream(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
responses []any
|
||||
wantErr string
|
||||
}{
|
||||
{
|
||||
name: "immediate error response",
|
||||
responses: []any{
|
||||
testError{
|
||||
message: "test error message",
|
||||
statusCode: http.StatusBadRequest,
|
||||
},
|
||||
},
|
||||
wantErr: "test error message",
|
||||
},
|
||||
{
|
||||
name: "error after successful chunks, ok response",
|
||||
responses: []any{
|
||||
ChatResponse{Message: Message{Content: "partial response 1"}},
|
||||
ChatResponse{Message: Message{Content: "partial response 2"}},
|
||||
testError{
|
||||
message: "mid-stream error",
|
||||
statusCode: http.StatusOK,
|
||||
},
|
||||
},
|
||||
wantErr: "mid-stream error",
|
||||
},
|
||||
{
|
||||
name: "http status error takes precedence over general error",
|
||||
responses: []any{
|
||||
testError{
|
||||
message: "custom error message",
|
||||
statusCode: http.StatusInternalServerError,
|
||||
},
|
||||
},
|
||||
wantErr: "500",
|
||||
},
|
||||
{
|
||||
name: "successful stream completion",
|
||||
responses: []any{
|
||||
ChatResponse{Message: Message{Content: "chunk 1"}},
|
||||
ChatResponse{Message: Message{Content: "chunk 2"}},
|
||||
ChatResponse{
|
||||
Message: Message{Content: "final chunk"},
|
||||
Done: true,
|
||||
DoneReason: "stop",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "plain text error response",
|
||||
responses: []any{
|
||||
"internal server error",
|
||||
},
|
||||
wantErr: "internal server error",
|
||||
},
|
||||
{
|
||||
name: "HTML error page",
|
||||
responses: []any{
|
||||
"<html><body>404 Not Found</body></html>",
|
||||
},
|
||||
wantErr: "404 Not Found",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
flusher, ok := w.(http.Flusher)
|
||||
if !ok {
|
||||
t.Fatal("expected http.Flusher")
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "application/x-ndjson")
|
||||
|
||||
for _, resp := range tc.responses {
|
||||
if errResp, ok := resp.(testError); ok {
|
||||
w.WriteHeader(errResp.statusCode)
|
||||
err := json.NewEncoder(w).Encode(map[string]string{
|
||||
"error": errResp.message,
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatal("failed to encode error response:", err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if str, ok := resp.(string); ok {
|
||||
fmt.Fprintln(w, str)
|
||||
flusher.Flush()
|
||||
continue
|
||||
}
|
||||
|
||||
if err := json.NewEncoder(w).Encode(resp); err != nil {
|
||||
t.Fatalf("failed to encode response: %v", err)
|
||||
}
|
||||
flusher.Flush()
|
||||
}
|
||||
}))
|
||||
defer ts.Close()
|
||||
|
||||
client := NewClient(&url.URL{Scheme: "http", Host: ts.Listener.Addr().String()}, http.DefaultClient)
|
||||
|
||||
var receivedChunks []ChatResponse
|
||||
err := client.stream(t.Context(), http.MethodPost, "/v1/chat", nil, func(chunk []byte) error {
|
||||
var resp ChatResponse
|
||||
if err := json.Unmarshal(chunk, &resp); err != nil {
|
||||
return fmt.Errorf("failed to unmarshal chunk: %w", err)
|
||||
}
|
||||
receivedChunks = append(receivedChunks, resp)
|
||||
return nil
|
||||
})
|
||||
|
||||
if tc.wantErr != "" {
|
||||
if err == nil {
|
||||
t.Fatal("expected error but got nil")
|
||||
}
|
||||
if !strings.Contains(err.Error(), tc.wantErr) {
|
||||
t.Errorf("expected error containing %q, got %v", tc.wantErr, err)
|
||||
}
|
||||
return
|
||||
}
|
||||
if err != nil {
|
||||
t.Errorf("unexpected error: %v", err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestClientDo(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
response any
|
||||
wantErr string
|
||||
wantStatusCode int
|
||||
}{
|
||||
{
|
||||
name: "immediate error response",
|
||||
response: testError{
|
||||
message: "test error message",
|
||||
statusCode: http.StatusBadRequest,
|
||||
},
|
||||
wantErr: "test error message",
|
||||
wantStatusCode: http.StatusBadRequest,
|
||||
},
|
||||
{
|
||||
name: "server error response",
|
||||
response: testError{
|
||||
message: "internal error",
|
||||
statusCode: http.StatusInternalServerError,
|
||||
},
|
||||
wantErr: "internal error",
|
||||
wantStatusCode: http.StatusInternalServerError,
|
||||
},
|
||||
{
|
||||
name: "successful response",
|
||||
response: struct {
|
||||
ID string `json:"id"`
|
||||
Success bool `json:"success"`
|
||||
}{
|
||||
ID: "msg_123",
|
||||
Success: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "plain text error response",
|
||||
response: testError{
|
||||
message: "internal server error",
|
||||
statusCode: http.StatusInternalServerError,
|
||||
raw: true,
|
||||
},
|
||||
wantErr: "internal server error",
|
||||
wantStatusCode: http.StatusInternalServerError,
|
||||
},
|
||||
{
|
||||
name: "HTML error page",
|
||||
response: testError{
|
||||
message: "<html><body>404 Not Found</body></html>",
|
||||
statusCode: http.StatusNotFound,
|
||||
raw: true,
|
||||
},
|
||||
wantErr: "<html><body>404 Not Found</body></html>",
|
||||
wantStatusCode: http.StatusNotFound,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
if errResp, ok := tc.response.(testError); ok {
|
||||
w.WriteHeader(errResp.statusCode)
|
||||
if !errResp.raw {
|
||||
err := json.NewEncoder(w).Encode(map[string]string{
|
||||
"error": errResp.message,
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatal("failed to encode error response:", err)
|
||||
}
|
||||
} else {
|
||||
// Write raw message (simulates non-JSON error responses)
|
||||
fmt.Fprint(w, errResp.message)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
if err := json.NewEncoder(w).Encode(tc.response); err != nil {
|
||||
t.Fatalf("failed to encode response: %v", err)
|
||||
}
|
||||
}))
|
||||
defer ts.Close()
|
||||
|
||||
client := NewClient(&url.URL{Scheme: "http", Host: ts.Listener.Addr().String()}, http.DefaultClient)
|
||||
|
||||
var resp struct {
|
||||
ID string `json:"id"`
|
||||
Success bool `json:"success"`
|
||||
}
|
||||
err := client.do(t.Context(), http.MethodPost, "/v1/messages", nil, &resp)
|
||||
|
||||
if tc.wantErr != "" {
|
||||
if err == nil {
|
||||
t.Fatalf("got nil, want error %q", tc.wantErr)
|
||||
}
|
||||
if err.Error() != tc.wantErr {
|
||||
t.Errorf("error message mismatch: got %q, want %q", err.Error(), tc.wantErr)
|
||||
}
|
||||
if tc.wantStatusCode != 0 {
|
||||
if statusErr, ok := err.(StatusError); ok {
|
||||
if statusErr.StatusCode != tc.wantStatusCode {
|
||||
t.Errorf("status code mismatch: got %d, want %d", statusErr.StatusCode, tc.wantStatusCode)
|
||||
}
|
||||
} else {
|
||||
t.Errorf("expected StatusError, got %T", err)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("got error %q, want nil", err)
|
||||
}
|
||||
|
||||
if expectedResp, ok := tc.response.(struct {
|
||||
ID string `json:"id"`
|
||||
Success bool `json:"success"`
|
||||
}); ok {
|
||||
if resp.ID != expectedResp.ID {
|
||||
t.Errorf("response ID mismatch: got %q, want %q", resp.ID, expectedResp.ID)
|
||||
}
|
||||
if resp.Success != expectedResp.Success {
|
||||
t.Errorf("response Success mismatch: got %v, want %v", resp.Success, expectedResp.Success)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
# Ollama API Examples
|
||||
|
||||
Run the examples in this directory with:
|
||||
|
||||
```shell
|
||||
go run example_name/main.go
|
||||
```
|
||||
|
||||
## Chat - Chat with a model
|
||||
- [chat/main.go](chat/main.go)
|
||||
|
||||
## Generate - Generate text from a model
|
||||
- [generate/main.go](generate/main.go)
|
||||
- [generate-streaming/main.go](generate-streaming/main.go)
|
||||
|
||||
## Pull - Pull a model
|
||||
- [pull-progress/main.go](pull-progress/main.go)
|
||||
|
||||
680
api/types.go
@@ -10,14 +10,9 @@ import (
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
|
||||
"github.com/ollama/ollama/envconfig"
|
||||
"github.com/ollama/ollama/types/model"
|
||||
)
|
||||
|
||||
// StatusError is an error with an HTTP status code and message.
|
||||
// StatusError is an error with and HTTP status code.
|
||||
type StatusError struct {
|
||||
StatusCode int
|
||||
Status string
|
||||
@@ -38,19 +33,6 @@ func (e StatusError) Error() string {
|
||||
}
|
||||
}
|
||||
|
||||
type AuthorizationError struct {
|
||||
StatusCode int
|
||||
Status string
|
||||
SigninURL string `json:"signin_url"`
|
||||
}
|
||||
|
||||
func (e AuthorizationError) Error() string {
|
||||
if e.Status != "" {
|
||||
return e.Status
|
||||
}
|
||||
return "something went wrong, please see the ollama server logs for details"
|
||||
}
|
||||
|
||||
// ImageData represents the raw binary data of an image file.
|
||||
type ImageData []byte
|
||||
|
||||
@@ -65,9 +47,6 @@ type GenerateRequest struct {
|
||||
// Prompt is the textual prompt to send to the model.
|
||||
Prompt string `json:"prompt"`
|
||||
|
||||
// Suffix is the text that comes after the inserted text.
|
||||
Suffix string `json:"suffix"`
|
||||
|
||||
// System overrides the model's default system message/prompt.
|
||||
System string `json:"system"`
|
||||
|
||||
@@ -75,7 +54,7 @@ type GenerateRequest struct {
|
||||
Template string `json:"template"`
|
||||
|
||||
// Context is the context parameter returned from a previous call to
|
||||
// [Client.Generate]. It can be used to keep a short conversational memory.
|
||||
// Generate call. It can be used to keep a short conversational memory.
|
||||
Context []int `json:"context,omitempty"`
|
||||
|
||||
// Stream specifies whether the response is streaming; it is true by default.
|
||||
@@ -85,46 +64,19 @@ type GenerateRequest struct {
|
||||
Raw bool `json:"raw,omitempty"`
|
||||
|
||||
// Format specifies the format to return a response in.
|
||||
Format json.RawMessage `json:"format,omitempty"`
|
||||
Format string `json:"format"`
|
||||
|
||||
// KeepAlive controls how long the model will stay loaded in memory following
|
||||
// this request.
|
||||
KeepAlive *Duration `json:"keep_alive,omitempty"`
|
||||
|
||||
// Images is an optional list of raw image bytes accompanying this
|
||||
// Images is an optional list of base64-encoded images accompanying this
|
||||
// request, for multimodal models.
|
||||
Images []ImageData `json:"images,omitempty"`
|
||||
|
||||
// Options lists model-specific options. For example, temperature can be
|
||||
// set through this field, if the model supports it.
|
||||
Options map[string]any `json:"options"`
|
||||
|
||||
// Think controls whether thinking/reasoning models will think before
|
||||
// responding. Can be a boolean (true/false) or a string ("high", "medium", "low")
|
||||
// for supported models. Needs to be a pointer so we can distinguish between false
|
||||
// (request that thinking _not_ be used) and unset (use the old behavior
|
||||
// before this option was introduced)
|
||||
Think *ThinkValue `json:"think,omitempty"`
|
||||
|
||||
// Truncate is a boolean that, when set to true, truncates the chat history messages
|
||||
// if the rendered prompt exceeds the context length limit.
|
||||
Truncate *bool `json:"truncate,omitempty"`
|
||||
|
||||
// Shift is a boolean that, when set to true, shifts the chat history
|
||||
// when hitting the context length limit instead of erroring.
|
||||
Shift *bool `json:"shift,omitempty"`
|
||||
|
||||
// DebugRenderOnly is a debug option that, when set to true, returns the rendered
|
||||
// template instead of calling the model.
|
||||
DebugRenderOnly bool `json:"_debug_render_only,omitempty"`
|
||||
|
||||
// Logprobs specifies whether to return log probabilities of the output tokens.
|
||||
Logprobs bool `json:"logprobs,omitempty"`
|
||||
|
||||
// TopLogprobs is the number of most likely tokens to return at each token position,
|
||||
// each with an associated log probability. Only applies when Logprobs is true.
|
||||
// Valid values are 0-20. Default is 0 (only return the selected token's logprob).
|
||||
TopLogprobs int `json:"top_logprobs,omitempty"`
|
||||
Options map[string]interface{} `json:"options"`
|
||||
}
|
||||
|
||||
// ChatRequest describes a request sent by [Client.Chat].
|
||||
@@ -135,58 +87,18 @@ type ChatRequest struct {
|
||||
// Messages is the messages of the chat - can be used to keep a chat memory.
|
||||
Messages []Message `json:"messages"`
|
||||
|
||||
// Stream enables streaming of returned responses; true by default.
|
||||
// Stream enable streaming of returned response; true by default.
|
||||
Stream *bool `json:"stream,omitempty"`
|
||||
|
||||
// Format is the format to return the response in (e.g. "json").
|
||||
Format json.RawMessage `json:"format,omitempty"`
|
||||
Format string `json:"format"`
|
||||
|
||||
// KeepAlive controls how long the model will stay loaded into memory
|
||||
// following the request.
|
||||
// followin the request.
|
||||
KeepAlive *Duration `json:"keep_alive,omitempty"`
|
||||
|
||||
// Tools is an optional list of tools the model has access to.
|
||||
Tools `json:"tools,omitempty"`
|
||||
|
||||
// Options lists model-specific options.
|
||||
Options map[string]any `json:"options"`
|
||||
|
||||
// Think controls whether thinking/reasoning models will think before
|
||||
// responding. Can be a boolean (true/false) or a string ("high", "medium", "low")
|
||||
// for supported models.
|
||||
Think *ThinkValue `json:"think,omitempty"`
|
||||
|
||||
// Truncate is a boolean that, when set to true, truncates the chat history messages
|
||||
// if the rendered prompt exceeds the context length limit.
|
||||
Truncate *bool `json:"truncate,omitempty"`
|
||||
|
||||
// Shift is a boolean that, when set to true, shifts the chat history
|
||||
// when hitting the context length limit instead of erroring.
|
||||
Shift *bool `json:"shift,omitempty"`
|
||||
|
||||
// DebugRenderOnly is a debug option that, when set to true, returns the rendered
|
||||
// template instead of calling the model.
|
||||
DebugRenderOnly bool `json:"_debug_render_only,omitempty"`
|
||||
|
||||
// Logprobs specifies whether to return log probabilities of the output tokens.
|
||||
Logprobs bool `json:"logprobs,omitempty"`
|
||||
|
||||
// TopLogprobs is the number of most likely tokens to return at each token position,
|
||||
// each with an associated log probability. Only applies when Logprobs is true.
|
||||
// Valid values are 0-20. Default is 0 (only return the selected token's logprob).
|
||||
TopLogprobs int `json:"top_logprobs,omitempty"`
|
||||
}
|
||||
|
||||
type Tools []Tool
|
||||
|
||||
func (t Tools) String() string {
|
||||
bts, _ := json.Marshal(t)
|
||||
return string(bts)
|
||||
}
|
||||
|
||||
func (t Tool) String() string {
|
||||
bts, _ := json.Marshal(t)
|
||||
return string(bts)
|
||||
Options map[string]interface{} `json:"options"`
|
||||
}
|
||||
|
||||
// Message is a single message in a chat sequence. The message contains the
|
||||
@@ -195,230 +107,22 @@ func (t Tool) String() string {
|
||||
type Message struct {
|
||||
Role string `json:"role"`
|
||||
Content string `json:"content"`
|
||||
// Thinking contains the text that was inside thinking tags in the
|
||||
// original model output when ChatRequest.Think is enabled.
|
||||
Thinking string `json:"thinking,omitempty"`
|
||||
Images []ImageData `json:"images,omitempty"`
|
||||
ToolCalls []ToolCall `json:"tool_calls,omitempty"`
|
||||
ToolName string `json:"tool_name,omitempty"`
|
||||
ToolCallID string `json:"tool_call_id,omitempty"`
|
||||
}
|
||||
|
||||
func (m *Message) UnmarshalJSON(b []byte) error {
|
||||
type Alias Message
|
||||
var a Alias
|
||||
if err := json.Unmarshal(b, &a); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
*m = Message(a)
|
||||
m.Role = strings.ToLower(m.Role)
|
||||
return nil
|
||||
}
|
||||
|
||||
type ToolCall struct {
|
||||
ID string `json:"id,omitempty"`
|
||||
Function ToolCallFunction `json:"function"`
|
||||
}
|
||||
|
||||
type ToolCallFunction struct {
|
||||
Index int `json:"index"`
|
||||
Name string `json:"name"`
|
||||
Arguments ToolCallFunctionArguments `json:"arguments"`
|
||||
}
|
||||
|
||||
type ToolCallFunctionArguments map[string]any
|
||||
|
||||
func (t *ToolCallFunctionArguments) String() string {
|
||||
bts, _ := json.Marshal(t)
|
||||
return string(bts)
|
||||
}
|
||||
|
||||
type Tool struct {
|
||||
Type string `json:"type"`
|
||||
Items any `json:"items,omitempty"`
|
||||
Function ToolFunction `json:"function"`
|
||||
}
|
||||
|
||||
// PropertyType can be either a string or an array of strings
|
||||
type PropertyType []string
|
||||
|
||||
// UnmarshalJSON implements the json.Unmarshaler interface
|
||||
func (pt *PropertyType) UnmarshalJSON(data []byte) error {
|
||||
// Try to unmarshal as a string first
|
||||
var s string
|
||||
if err := json.Unmarshal(data, &s); err == nil {
|
||||
*pt = []string{s}
|
||||
return nil
|
||||
}
|
||||
|
||||
// If that fails, try to unmarshal as an array of strings
|
||||
var a []string
|
||||
if err := json.Unmarshal(data, &a); err != nil {
|
||||
return err
|
||||
}
|
||||
*pt = a
|
||||
return nil
|
||||
}
|
||||
|
||||
// MarshalJSON implements the json.Marshaler interface
|
||||
func (pt PropertyType) MarshalJSON() ([]byte, error) {
|
||||
if len(pt) == 1 {
|
||||
// If there's only one type, marshal as a string
|
||||
return json.Marshal(pt[0])
|
||||
}
|
||||
// Otherwise marshal as an array
|
||||
return json.Marshal([]string(pt))
|
||||
}
|
||||
|
||||
// String returns a string representation of the PropertyType
|
||||
func (pt PropertyType) String() string {
|
||||
if len(pt) == 0 {
|
||||
return ""
|
||||
}
|
||||
if len(pt) == 1 {
|
||||
return pt[0]
|
||||
}
|
||||
return fmt.Sprintf("%v", []string(pt))
|
||||
}
|
||||
|
||||
type ToolProperty struct {
|
||||
AnyOf []ToolProperty `json:"anyOf,omitempty"`
|
||||
Type PropertyType `json:"type,omitempty"`
|
||||
Items any `json:"items,omitempty"`
|
||||
Description string `json:"description,omitempty"`
|
||||
Enum []any `json:"enum,omitempty"`
|
||||
}
|
||||
|
||||
// ToTypeScriptType converts a ToolProperty to a TypeScript type string
|
||||
func (tp ToolProperty) ToTypeScriptType() string {
|
||||
if len(tp.AnyOf) > 0 {
|
||||
var types []string
|
||||
for _, anyOf := range tp.AnyOf {
|
||||
types = append(types, anyOf.ToTypeScriptType())
|
||||
}
|
||||
return strings.Join(types, " | ")
|
||||
}
|
||||
|
||||
if len(tp.Type) == 0 {
|
||||
return "any"
|
||||
}
|
||||
|
||||
if len(tp.Type) == 1 {
|
||||
return mapToTypeScriptType(tp.Type[0])
|
||||
}
|
||||
|
||||
var types []string
|
||||
for _, t := range tp.Type {
|
||||
types = append(types, mapToTypeScriptType(t))
|
||||
}
|
||||
return strings.Join(types, " | ")
|
||||
}
|
||||
|
||||
// mapToTypeScriptType maps JSON Schema types to TypeScript types
|
||||
func mapToTypeScriptType(jsonType string) string {
|
||||
switch jsonType {
|
||||
case "string":
|
||||
return "string"
|
||||
case "number", "integer":
|
||||
return "number"
|
||||
case "boolean":
|
||||
return "boolean"
|
||||
case "array":
|
||||
return "any[]"
|
||||
case "object":
|
||||
return "Record<string, any>"
|
||||
case "null":
|
||||
return "null"
|
||||
default:
|
||||
return "any"
|
||||
}
|
||||
}
|
||||
|
||||
type ToolFunctionParameters struct {
|
||||
Type string `json:"type"`
|
||||
Defs any `json:"$defs,omitempty"`
|
||||
Items any `json:"items,omitempty"`
|
||||
Required []string `json:"required,omitempty"`
|
||||
Properties map[string]ToolProperty `json:"properties"`
|
||||
}
|
||||
|
||||
func (t *ToolFunctionParameters) String() string {
|
||||
bts, _ := json.Marshal(t)
|
||||
return string(bts)
|
||||
}
|
||||
|
||||
type ToolFunction struct {
|
||||
Name string `json:"name"`
|
||||
Description string `json:"description,omitempty"`
|
||||
Parameters ToolFunctionParameters `json:"parameters"`
|
||||
}
|
||||
|
||||
func (t *ToolFunction) String() string {
|
||||
bts, _ := json.Marshal(t)
|
||||
return string(bts)
|
||||
}
|
||||
|
||||
// TokenLogprob represents log probability information for a single token alternative.
|
||||
type TokenLogprob struct {
|
||||
// Token is the text representation of the token.
|
||||
Token string `json:"token"`
|
||||
|
||||
// Logprob is the log probability of this token.
|
||||
Logprob float64 `json:"logprob"`
|
||||
|
||||
// Bytes contains the raw byte representation of the token
|
||||
Bytes []int `json:"bytes,omitempty"`
|
||||
}
|
||||
|
||||
// Logprob contains log probability information for a generated token.
|
||||
type Logprob struct {
|
||||
TokenLogprob
|
||||
|
||||
// TopLogprobs contains the most likely tokens and their log probabilities
|
||||
// at this position, if requested via TopLogprobs parameter.
|
||||
TopLogprobs []TokenLogprob `json:"top_logprobs,omitempty"`
|
||||
}
|
||||
|
||||
// ChatResponse is the response returned by [Client.Chat]. Its fields are
|
||||
// similar to [GenerateResponse].
|
||||
type ChatResponse struct {
|
||||
// Model is the model name that generated the response.
|
||||
Model string `json:"model"`
|
||||
|
||||
// RemoteModel is the name of the upstream model that generated the response.
|
||||
RemoteModel string `json:"remote_model,omitempty"`
|
||||
|
||||
// RemoteHost is the URL of the upstream Ollama host that generated the response.
|
||||
RemoteHost string `json:"remote_host,omitempty"`
|
||||
|
||||
// CreatedAt is the timestamp of the response.
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
|
||||
// Message contains the message or part of a message from the model.
|
||||
Message Message `json:"message"`
|
||||
|
||||
// Done specifies if the response is complete.
|
||||
Done bool `json:"done"`
|
||||
|
||||
// DoneReason is the reason the model stopped generating text.
|
||||
DoneReason string `json:"done_reason,omitempty"`
|
||||
|
||||
DebugInfo *DebugInfo `json:"_debug_info,omitempty"`
|
||||
|
||||
// Logprobs contains log probability information for the generated tokens,
|
||||
// if requested via the Logprobs parameter.
|
||||
Logprobs []Logprob `json:"logprobs,omitempty"`
|
||||
Done bool `json:"done"`
|
||||
|
||||
Metrics
|
||||
}
|
||||
|
||||
// DebugInfo contains debug information for template rendering
|
||||
type DebugInfo struct {
|
||||
RenderedTemplate string `json:"rendered_template"`
|
||||
ImageCount int `json:"image_count,omitempty"`
|
||||
}
|
||||
|
||||
type Metrics struct {
|
||||
TotalDuration time.Duration `json:"total_duration,omitempty"`
|
||||
LoadDuration time.Duration `json:"load_duration,omitempty"`
|
||||
@@ -428,8 +132,8 @@ type Metrics struct {
|
||||
EvalDuration time.Duration `json:"eval_duration,omitempty"`
|
||||
}
|
||||
|
||||
// Options specified in [GenerateRequest]. If you add a new option here, also
|
||||
// add it to the API docs.
|
||||
// Options specified in [GenerateRequest], if you add a new option here add it
|
||||
// to the API docs also.
|
||||
type Options struct {
|
||||
Runner
|
||||
|
||||
@@ -439,56 +143,65 @@ type Options struct {
|
||||
NumPredict int `json:"num_predict,omitempty"`
|
||||
TopK int `json:"top_k,omitempty"`
|
||||
TopP float32 `json:"top_p,omitempty"`
|
||||
MinP float32 `json:"min_p,omitempty"`
|
||||
TFSZ float32 `json:"tfs_z,omitempty"`
|
||||
TypicalP float32 `json:"typical_p,omitempty"`
|
||||
RepeatLastN int `json:"repeat_last_n,omitempty"`
|
||||
Temperature float32 `json:"temperature,omitempty"`
|
||||
RepeatPenalty float32 `json:"repeat_penalty,omitempty"`
|
||||
PresencePenalty float32 `json:"presence_penalty,omitempty"`
|
||||
FrequencyPenalty float32 `json:"frequency_penalty,omitempty"`
|
||||
Mirostat int `json:"mirostat,omitempty"`
|
||||
MirostatTau float32 `json:"mirostat_tau,omitempty"`
|
||||
MirostatEta float32 `json:"mirostat_eta,omitempty"`
|
||||
PenalizeNewline bool `json:"penalize_newline,omitempty"`
|
||||
Stop []string `json:"stop,omitempty"`
|
||||
}
|
||||
|
||||
// Runner options which must be set when the model is loaded into memory
|
||||
type Runner struct {
|
||||
UseNUMA bool `json:"numa,omitempty"`
|
||||
NumCtx int `json:"num_ctx,omitempty"`
|
||||
NumBatch int `json:"num_batch,omitempty"`
|
||||
NumGPU int `json:"num_gpu,omitempty"`
|
||||
MainGPU int `json:"main_gpu,omitempty"`
|
||||
UseMMap *bool `json:"use_mmap,omitempty"`
|
||||
LowVRAM bool `json:"low_vram,omitempty"`
|
||||
F16KV bool `json:"f16_kv,omitempty"`
|
||||
LogitsAll bool `json:"logits_all,omitempty"`
|
||||
VocabOnly bool `json:"vocab_only,omitempty"`
|
||||
UseMMap TriState `json:"use_mmap,omitempty"`
|
||||
UseMLock bool `json:"use_mlock,omitempty"`
|
||||
NumThread int `json:"num_thread,omitempty"`
|
||||
}
|
||||
|
||||
// EmbedRequest is the request passed to [Client.Embed].
|
||||
type EmbedRequest struct {
|
||||
// Model is the model name.
|
||||
Model string `json:"model"`
|
||||
type TriState int
|
||||
|
||||
// Input is the input to embed.
|
||||
Input any `json:"input"`
|
||||
const (
|
||||
TriStateUndefined TriState = -1
|
||||
TriStateFalse TriState = 0
|
||||
TriStateTrue TriState = 1
|
||||
)
|
||||
|
||||
// KeepAlive controls how long the model will stay loaded in memory following
|
||||
// this request.
|
||||
KeepAlive *Duration `json:"keep_alive,omitempty"`
|
||||
|
||||
// Truncate truncates the input to fit the model's max sequence length.
|
||||
Truncate *bool `json:"truncate,omitempty"`
|
||||
|
||||
// Dimensions truncates the output embedding to the specified dimension.
|
||||
Dimensions int `json:"dimensions,omitempty"`
|
||||
|
||||
// Options lists model-specific options.
|
||||
Options map[string]any `json:"options"`
|
||||
func (b *TriState) UnmarshalJSON(data []byte) error {
|
||||
var v bool
|
||||
if err := json.Unmarshal(data, &v); err != nil {
|
||||
return err
|
||||
}
|
||||
if v {
|
||||
*b = TriStateTrue
|
||||
}
|
||||
*b = TriStateFalse
|
||||
return nil
|
||||
}
|
||||
|
||||
// EmbedResponse is the response from [Client.Embed].
|
||||
type EmbedResponse struct {
|
||||
Model string `json:"model"`
|
||||
Embeddings [][]float32 `json:"embeddings"`
|
||||
|
||||
TotalDuration time.Duration `json:"total_duration,omitempty"`
|
||||
LoadDuration time.Duration `json:"load_duration,omitempty"`
|
||||
PromptEvalCount int `json:"prompt_eval_count,omitempty"`
|
||||
func (b *TriState) MarshalJSON() ([]byte, error) {
|
||||
if *b == TriStateUndefined {
|
||||
return nil, nil
|
||||
}
|
||||
var v bool
|
||||
if *b == TriStateTrue {
|
||||
v = true
|
||||
}
|
||||
return json.Marshal(v)
|
||||
}
|
||||
|
||||
// EmbeddingRequest is the request passed to [Client.Embeddings].
|
||||
@@ -504,7 +217,7 @@ type EmbeddingRequest struct {
|
||||
KeepAlive *Duration `json:"keep_alive,omitempty"`
|
||||
|
||||
// Options lists model-specific options.
|
||||
Options map[string]any `json:"options"`
|
||||
Options map[string]interface{} `json:"options"`
|
||||
}
|
||||
|
||||
// EmbeddingResponse is the response from [Client.Embeddings].
|
||||
@@ -514,51 +227,16 @@ type EmbeddingResponse struct {
|
||||
|
||||
// CreateRequest is the request passed to [Client.Create].
|
||||
type CreateRequest struct {
|
||||
// Model is the model name to create.
|
||||
Model string `json:"model"`
|
||||
|
||||
// Stream specifies whether the response is streaming; it is true by default.
|
||||
Path string `json:"path"`
|
||||
Modelfile string `json:"modelfile"`
|
||||
Stream *bool `json:"stream,omitempty"`
|
||||
|
||||
// Quantize is the quantization format for the model; leave blank to not change the quantization level.
|
||||
Quantize string `json:"quantize,omitempty"`
|
||||
|
||||
// From is the name of the model or file to use as the source.
|
||||
From string `json:"from,omitempty"`
|
||||
|
||||
// RemoteHost is the URL of the upstream ollama API for the model (if any).
|
||||
RemoteHost string `json:"remote_host,omitempty"`
|
||||
|
||||
// Files is a map of files include when creating the model.
|
||||
Files map[string]string `json:"files,omitempty"`
|
||||
|
||||
// Adapters is a map of LoRA adapters to include when creating the model.
|
||||
Adapters map[string]string `json:"adapters,omitempty"`
|
||||
|
||||
// Template is the template used when constructing a request to the model.
|
||||
Template string `json:"template,omitempty"`
|
||||
|
||||
// License is a string or list of strings for licenses.
|
||||
License any `json:"license,omitempty"`
|
||||
|
||||
// System is the system prompt for the model.
|
||||
System string `json:"system,omitempty"`
|
||||
|
||||
// Parameters is a map of hyper-parameters which are applied to the model.
|
||||
Parameters map[string]any `json:"parameters,omitempty"`
|
||||
|
||||
// Messages is a list of messages added to the model before chat and generation requests.
|
||||
Messages []Message `json:"messages,omitempty"`
|
||||
|
||||
Renderer string `json:"renderer,omitempty"`
|
||||
Parser string `json:"parser,omitempty"`
|
||||
|
||||
// Info is a map of additional information for the model
|
||||
Info map[string]any `json:"info,omitempty"`
|
||||
|
||||
// Deprecated: set the model name with Model instead
|
||||
// Name is deprecated, see Model
|
||||
Name string `json:"name"`
|
||||
// Deprecated: use Quantize instead
|
||||
|
||||
// Quantization is deprecated, see Quantize
|
||||
Quantization string `json:"quantization,omitempty"`
|
||||
}
|
||||
|
||||
@@ -566,7 +244,7 @@ type CreateRequest struct {
|
||||
type DeleteRequest struct {
|
||||
Model string `json:"model"`
|
||||
|
||||
// Deprecated: set the model name with Model instead
|
||||
// Name is deprecated, see Model
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
@@ -574,14 +252,12 @@ type DeleteRequest struct {
|
||||
type ShowRequest struct {
|
||||
Model string `json:"model"`
|
||||
System string `json:"system"`
|
||||
|
||||
// Template is deprecated
|
||||
Template string `json:"template"`
|
||||
Verbose bool `json:"verbose"`
|
||||
|
||||
Options map[string]any `json:"options"`
|
||||
Options map[string]interface{} `json:"options"`
|
||||
|
||||
// Deprecated: set the model name with Model instead
|
||||
// Name is deprecated, see Model
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
@@ -592,16 +268,10 @@ type ShowResponse struct {
|
||||
Parameters string `json:"parameters,omitempty"`
|
||||
Template string `json:"template,omitempty"`
|
||||
System string `json:"system,omitempty"`
|
||||
Renderer string `json:"renderer,omitempty"`
|
||||
Parser string `json:"parser,omitempty"`
|
||||
Details ModelDetails `json:"details,omitempty"`
|
||||
Messages []Message `json:"messages,omitempty"`
|
||||
RemoteModel string `json:"remote_model,omitempty"`
|
||||
RemoteHost string `json:"remote_host,omitempty"`
|
||||
ModelInfo map[string]any `json:"model_info,omitempty"`
|
||||
ProjectorInfo map[string]any `json:"projector_info,omitempty"`
|
||||
Tensors []Tensor `json:"tensors,omitempty"`
|
||||
Capabilities []model.Capability `json:"capabilities,omitempty"`
|
||||
ModifiedAt time.Time `json:"modified_at,omitempty"`
|
||||
}
|
||||
|
||||
@@ -614,12 +284,12 @@ type CopyRequest struct {
|
||||
// PullRequest is the request passed to [Client.Pull].
|
||||
type PullRequest struct {
|
||||
Model string `json:"model"`
|
||||
Insecure bool `json:"insecure,omitempty"` // Deprecated: ignored
|
||||
Username string `json:"username"` // Deprecated: ignored
|
||||
Password string `json:"password"` // Deprecated: ignored
|
||||
Insecure bool `json:"insecure,omitempty"`
|
||||
Username string `json:"username"`
|
||||
Password string `json:"password"`
|
||||
Stream *bool `json:"stream,omitempty"`
|
||||
|
||||
// Deprecated: set the model name with Model instead
|
||||
// Name is deprecated, see Model
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
@@ -640,7 +310,7 @@ type PushRequest struct {
|
||||
Password string `json:"password"`
|
||||
Stream *bool `json:"stream,omitempty"`
|
||||
|
||||
// Deprecated: set the model name with Model instead
|
||||
// Name is deprecated, see Model
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
@@ -658,8 +328,6 @@ type ProcessResponse struct {
|
||||
type ListModelResponse struct {
|
||||
Name string `json:"name"`
|
||||
Model string `json:"model"`
|
||||
RemoteModel string `json:"remote_model,omitempty"`
|
||||
RemoteHost string `json:"remote_host,omitempty"`
|
||||
ModifiedAt time.Time `json:"modified_at"`
|
||||
Size int64 `json:"size"`
|
||||
Digest string `json:"digest"`
|
||||
@@ -675,7 +343,6 @@ type ProcessModelResponse struct {
|
||||
Details ModelDetails `json:"details,omitempty"`
|
||||
ExpiresAt time.Time `json:"expires_at"`
|
||||
SizeVRAM int64 `json:"size_vram"`
|
||||
ContextLength int `json:"context_length"`
|
||||
}
|
||||
|
||||
type TokenResponse struct {
|
||||
@@ -687,22 +354,12 @@ type GenerateResponse struct {
|
||||
// Model is the model name that generated the response.
|
||||
Model string `json:"model"`
|
||||
|
||||
// RemoteModel is the name of the upstream model that generated the response.
|
||||
RemoteModel string `json:"remote_model,omitempty"`
|
||||
|
||||
// RemoteHost is the URL of the upstream Ollama host that generated the response.
|
||||
RemoteHost string `json:"remote_host,omitempty"`
|
||||
|
||||
// CreatedAt is the timestamp of the response.
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
|
||||
// Response is the textual response itself.
|
||||
Response string `json:"response"`
|
||||
|
||||
// Thinking contains the text that was inside thinking tags in the
|
||||
// original model output when ChatRequest.Think is enabled.
|
||||
Thinking string `json:"thinking,omitempty"`
|
||||
|
||||
// Done specifies if the response is complete.
|
||||
Done bool `json:"done"`
|
||||
|
||||
@@ -714,14 +371,6 @@ type GenerateResponse struct {
|
||||
Context []int `json:"context,omitempty"`
|
||||
|
||||
Metrics
|
||||
|
||||
ToolCalls []ToolCall `json:"tool_calls,omitempty"`
|
||||
|
||||
DebugInfo *DebugInfo `json:"_debug_info,omitempty"`
|
||||
|
||||
// Logprobs contains log probability information for the generated tokens,
|
||||
// if requested via the Logprobs parameter.
|
||||
Logprobs []Logprob `json:"logprobs,omitempty"`
|
||||
}
|
||||
|
||||
// ModelDetails provides details about a model.
|
||||
@@ -734,25 +383,6 @@ type ModelDetails struct {
|
||||
QuantizationLevel string `json:"quantization_level"`
|
||||
}
|
||||
|
||||
// UserResponse provides information about a user.
|
||||
type UserResponse struct {
|
||||
ID uuid.UUID `json:"id"`
|
||||
Email string `json:"email"`
|
||||
Name string `json:"name"`
|
||||
Bio string `json:"bio,omitempty"`
|
||||
AvatarURL string `json:"avatarurl,omitempty"`
|
||||
FirstName string `json:"firstname,omitempty"`
|
||||
LastName string `json:"lastname,omitempty"`
|
||||
Plan string `json:"plan,omitempty"`
|
||||
}
|
||||
|
||||
// Tensor describes the metadata for a given tensor.
|
||||
type Tensor struct {
|
||||
Name string `json:"name"`
|
||||
Type string `json:"type"`
|
||||
Shape []uint64 `json:"shape"`
|
||||
}
|
||||
|
||||
func (m *Metrics) Summary() {
|
||||
if m.TotalDuration > 0 {
|
||||
fmt.Fprintf(os.Stderr, "total duration: %v\n", m.TotalDuration)
|
||||
@@ -781,7 +411,7 @@ func (m *Metrics) Summary() {
|
||||
}
|
||||
}
|
||||
|
||||
func (opts *Options) FromMap(m map[string]any) error {
|
||||
func (opts *Options) FromMap(m map[string]interface{}) error {
|
||||
valueOpts := reflect.ValueOf(opts).Elem() // names of the fields in the options struct
|
||||
typeOpts := reflect.TypeOf(opts).Elem() // types of the fields in the options struct
|
||||
|
||||
@@ -797,7 +427,7 @@ func (opts *Options) FromMap(m map[string]any) error {
|
||||
for key, val := range m {
|
||||
opt, ok := jsonOpts[key]
|
||||
if !ok {
|
||||
slog.Warn("invalid option provided", "option", key)
|
||||
slog.Warn("invalid option provided", "option", opt.Name)
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -807,6 +437,19 @@ func (opts *Options) FromMap(m map[string]any) error {
|
||||
continue
|
||||
}
|
||||
|
||||
if reflect.PointerTo(field.Type()) == reflect.TypeOf((*TriState)(nil)) {
|
||||
val, ok := val.(bool)
|
||||
if !ok {
|
||||
return fmt.Errorf("option %q must be of type boolean", key)
|
||||
}
|
||||
if val {
|
||||
field.SetInt(int64(TriStateTrue))
|
||||
} else {
|
||||
field.SetInt(int64(TriStateFalse))
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
switch field.Kind() {
|
||||
case reflect.Int:
|
||||
switch t := val.(type) {
|
||||
@@ -838,12 +481,12 @@ func (opts *Options) FromMap(m map[string]any) error {
|
||||
}
|
||||
field.SetString(val)
|
||||
case reflect.Slice:
|
||||
// JSON unmarshals to []any, not []string
|
||||
val, ok := val.([]any)
|
||||
// JSON unmarshals to []interface{}, not []string
|
||||
val, ok := val.([]interface{})
|
||||
if !ok {
|
||||
return fmt.Errorf("option %q must be of type array", key)
|
||||
}
|
||||
// convert []any to []string
|
||||
// convert []interface{} to []string
|
||||
slice := make([]string, len(val))
|
||||
for i, item := range val {
|
||||
str, ok := item.(string)
|
||||
@@ -853,17 +496,6 @@ func (opts *Options) FromMap(m map[string]any) error {
|
||||
slice[i] = str
|
||||
}
|
||||
field.Set(reflect.ValueOf(slice))
|
||||
case reflect.Pointer:
|
||||
var b bool
|
||||
if field.Type() == reflect.TypeOf(&b) {
|
||||
val, ok := val.(bool)
|
||||
if !ok {
|
||||
return fmt.Errorf("option %q must be of type boolean", key)
|
||||
}
|
||||
field.Set(reflect.ValueOf(&val))
|
||||
} else {
|
||||
return fmt.Errorf("unknown type loading config params: %v %v", field.Kind(), field.Type())
|
||||
}
|
||||
default:
|
||||
return fmt.Errorf("unknown type loading config params: %v", field.Kind())
|
||||
}
|
||||
@@ -885,131 +517,33 @@ func DefaultOptions() Options {
|
||||
Temperature: 0.8,
|
||||
TopK: 40,
|
||||
TopP: 0.9,
|
||||
TFSZ: 1.0,
|
||||
TypicalP: 1.0,
|
||||
RepeatLastN: 64,
|
||||
RepeatPenalty: 1.1,
|
||||
PresencePenalty: 0.0,
|
||||
FrequencyPenalty: 0.0,
|
||||
Mirostat: 0,
|
||||
MirostatTau: 5.0,
|
||||
MirostatEta: 0.1,
|
||||
PenalizeNewline: true,
|
||||
Seed: -1,
|
||||
|
||||
Runner: Runner{
|
||||
// options set when the model is loaded
|
||||
NumCtx: int(envconfig.ContextLength()),
|
||||
NumCtx: 2048,
|
||||
NumBatch: 512,
|
||||
NumGPU: -1, // -1 here indicates that NumGPU should be set dynamically
|
||||
NumThread: 0, // let the runtime decide
|
||||
UseMMap: nil,
|
||||
LowVRAM: false,
|
||||
F16KV: true,
|
||||
UseMLock: false,
|
||||
UseMMap: TriStateUndefined,
|
||||
UseNUMA: false,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// ThinkValue represents a value that can be a boolean or a string ("high", "medium", "low")
|
||||
type ThinkValue struct {
|
||||
// Value can be a bool or string
|
||||
Value interface{}
|
||||
}
|
||||
|
||||
// IsValid checks if the ThinkValue is valid
|
||||
func (t *ThinkValue) IsValid() bool {
|
||||
if t == nil || t.Value == nil {
|
||||
return true // nil is valid (means not set)
|
||||
}
|
||||
|
||||
switch v := t.Value.(type) {
|
||||
case bool:
|
||||
return true
|
||||
case string:
|
||||
return v == "high" || v == "medium" || v == "low"
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// IsBool returns true if the value is a boolean
|
||||
func (t *ThinkValue) IsBool() bool {
|
||||
if t == nil || t.Value == nil {
|
||||
return false
|
||||
}
|
||||
_, ok := t.Value.(bool)
|
||||
return ok
|
||||
}
|
||||
|
||||
// IsString returns true if the value is a string
|
||||
func (t *ThinkValue) IsString() bool {
|
||||
if t == nil || t.Value == nil {
|
||||
return false
|
||||
}
|
||||
_, ok := t.Value.(string)
|
||||
return ok
|
||||
}
|
||||
|
||||
// Bool returns the value as a bool (true if enabled in any way)
|
||||
func (t *ThinkValue) Bool() bool {
|
||||
if t == nil || t.Value == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
switch v := t.Value.(type) {
|
||||
case bool:
|
||||
return v
|
||||
case string:
|
||||
// Any string value ("high", "medium", "low") means thinking is enabled
|
||||
return v == "high" || v == "medium" || v == "low"
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// String returns the value as a string
|
||||
func (t *ThinkValue) String() string {
|
||||
if t == nil || t.Value == nil {
|
||||
return ""
|
||||
}
|
||||
|
||||
switch v := t.Value.(type) {
|
||||
case string:
|
||||
return v
|
||||
case bool:
|
||||
if v {
|
||||
return "medium" // Default level when just true
|
||||
}
|
||||
return ""
|
||||
default:
|
||||
return ""
|
||||
}
|
||||
}
|
||||
|
||||
// UnmarshalJSON implements json.Unmarshaler
|
||||
func (t *ThinkValue) UnmarshalJSON(data []byte) error {
|
||||
// Try to unmarshal as bool first
|
||||
var b bool
|
||||
if err := json.Unmarshal(data, &b); err == nil {
|
||||
t.Value = b
|
||||
return nil
|
||||
}
|
||||
|
||||
// Try to unmarshal as string
|
||||
var s string
|
||||
if err := json.Unmarshal(data, &s); err == nil {
|
||||
// Validate string values
|
||||
if s != "high" && s != "medium" && s != "low" {
|
||||
return fmt.Errorf("invalid think value: %q (must be \"high\", \"medium\", \"low\", true, or false)", s)
|
||||
}
|
||||
t.Value = s
|
||||
return nil
|
||||
}
|
||||
|
||||
return fmt.Errorf("think must be a boolean or string (\"high\", \"medium\", \"low\", true, or false)")
|
||||
}
|
||||
|
||||
// MarshalJSON implements json.Marshaler
|
||||
func (t *ThinkValue) MarshalJSON() ([]byte, error) {
|
||||
if t == nil || t.Value == nil {
|
||||
return []byte("null"), nil
|
||||
}
|
||||
return json.Marshal(t.Value)
|
||||
}
|
||||
|
||||
type Duration struct {
|
||||
time.Duration
|
||||
}
|
||||
@@ -1034,7 +568,7 @@ func (d *Duration) UnmarshalJSON(b []byte) (err error) {
|
||||
if t < 0 {
|
||||
d.Duration = time.Duration(math.MaxInt64)
|
||||
} else {
|
||||
d.Duration = time.Duration(t * float64(time.Second))
|
||||
d.Duration = time.Duration(int(t) * int(time.Second))
|
||||
}
|
||||
case string:
|
||||
d.Duration, err = time.ParseDuration(t)
|
||||
@@ -1052,7 +586,7 @@ func (d *Duration) UnmarshalJSON(b []byte) (err error) {
|
||||
}
|
||||
|
||||
// FormatParams converts specified parameter options to their correct types
|
||||
func FormatParams(params map[string][]string) (map[string]any, error) {
|
||||
func FormatParams(params map[string][]string) (map[string]interface{}, error) {
|
||||
opts := Options{}
|
||||
valueOpts := reflect.ValueOf(&opts).Elem() // names of the fields in the options struct
|
||||
typeOpts := reflect.TypeOf(opts) // types of the fields in the options struct
|
||||
@@ -1066,7 +600,7 @@ func FormatParams(params map[string][]string) (map[string]any, error) {
|
||||
}
|
||||
}
|
||||
|
||||
out := make(map[string]any)
|
||||
out := make(map[string]interface{})
|
||||
// iterate params and set values based on json struct tags
|
||||
for key, vals := range params {
|
||||
if opt, ok := jsonOpts[key]; !ok {
|
||||
@@ -1074,6 +608,19 @@ func FormatParams(params map[string][]string) (map[string]any, error) {
|
||||
} else {
|
||||
field := valueOpts.FieldByName(opt.Name)
|
||||
if field.IsValid() && field.CanSet() {
|
||||
if reflect.PointerTo(field.Type()) == reflect.TypeOf((*TriState)(nil)) {
|
||||
boolVal, err := strconv.ParseBool(vals[0])
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("invalid bool value %s", vals)
|
||||
}
|
||||
if boolVal {
|
||||
out[key] = TriStateTrue
|
||||
} else {
|
||||
out[key] = TriStateFalse
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
switch field.Kind() {
|
||||
case reflect.Float32:
|
||||
floatVal, err := strconv.ParseFloat(vals[0], 32)
|
||||
@@ -1101,17 +648,6 @@ func FormatParams(params map[string][]string) (map[string]any, error) {
|
||||
case reflect.Slice:
|
||||
// TODO: only string slices are supported right now
|
||||
out[key] = vals
|
||||
case reflect.Pointer:
|
||||
var b bool
|
||||
if field.Type() == reflect.TypeOf(&b) {
|
||||
boolVal, err := strconv.ParseBool(vals[0])
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("invalid bool value %s", vals)
|
||||
}
|
||||
out[key] = &boolVal
|
||||
} else {
|
||||
return nil, fmt.Errorf("unknown type %s for %s", field.Kind(), key)
|
||||
}
|
||||
default:
|
||||
return nil, fmt.Errorf("unknown type %s for %s", field.Kind(), key)
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ package api
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"math"
|
||||
"testing"
|
||||
"time"
|
||||
@@ -17,11 +17,6 @@ func TestKeepAliveParsingFromJSON(t *testing.T) {
|
||||
req string
|
||||
exp *Duration
|
||||
}{
|
||||
{
|
||||
name: "Unset",
|
||||
req: `{ }`,
|
||||
exp: nil,
|
||||
},
|
||||
{
|
||||
name: "Positive Integer",
|
||||
req: `{ "keep_alive": 42 }`,
|
||||
@@ -30,7 +25,7 @@ func TestKeepAliveParsingFromJSON(t *testing.T) {
|
||||
{
|
||||
name: "Positive Float",
|
||||
req: `{ "keep_alive": 42.5 }`,
|
||||
exp: &Duration{42500 * time.Millisecond},
|
||||
exp: &Duration{42 * time.Second},
|
||||
},
|
||||
{
|
||||
name: "Positive Integer String",
|
||||
@@ -113,33 +108,31 @@ func TestDurationMarshalUnmarshal(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestUseMmapParsingFromJSON(t *testing.T) {
|
||||
tr := true
|
||||
fa := false
|
||||
tests := []struct {
|
||||
name string
|
||||
req string
|
||||
exp *bool
|
||||
exp TriState
|
||||
}{
|
||||
{
|
||||
name: "Undefined",
|
||||
req: `{ }`,
|
||||
exp: nil,
|
||||
exp: TriStateUndefined,
|
||||
},
|
||||
{
|
||||
name: "True",
|
||||
req: `{ "use_mmap": true }`,
|
||||
exp: &tr,
|
||||
exp: TriStateTrue,
|
||||
},
|
||||
{
|
||||
name: "False",
|
||||
req: `{ "use_mmap": false }`,
|
||||
exp: &fa,
|
||||
exp: TriStateFalse,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
var oMap map[string]any
|
||||
var oMap map[string]interface{}
|
||||
err := json.Unmarshal([]byte(test.req), &oMap)
|
||||
require.NoError(t, err)
|
||||
opts := DefaultOptions()
|
||||
@@ -151,402 +144,63 @@ func TestUseMmapParsingFromJSON(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestUseMmapFormatParams(t *testing.T) {
|
||||
tr := true
|
||||
fa := false
|
||||
tests := []struct {
|
||||
name string
|
||||
req map[string][]string
|
||||
exp *bool
|
||||
exp TriState
|
||||
err error
|
||||
}{
|
||||
{
|
||||
name: "True",
|
||||
req: map[string][]string{
|
||||
"use_mmap": {"true"},
|
||||
"use_mmap": []string{"true"},
|
||||
},
|
||||
exp: &tr,
|
||||
exp: TriStateTrue,
|
||||
err: nil,
|
||||
},
|
||||
{
|
||||
name: "False",
|
||||
req: map[string][]string{
|
||||
"use_mmap": {"false"},
|
||||
"use_mmap": []string{"false"},
|
||||
},
|
||||
exp: &fa,
|
||||
exp: TriStateFalse,
|
||||
err: nil,
|
||||
},
|
||||
{
|
||||
name: "Numeric True",
|
||||
req: map[string][]string{
|
||||
"use_mmap": {"1"},
|
||||
"use_mmap": []string{"1"},
|
||||
},
|
||||
exp: &tr,
|
||||
exp: TriStateTrue,
|
||||
err: nil,
|
||||
},
|
||||
{
|
||||
name: "Numeric False",
|
||||
req: map[string][]string{
|
||||
"use_mmap": {"0"},
|
||||
"use_mmap": []string{"0"},
|
||||
},
|
||||
exp: &fa,
|
||||
exp: TriStateFalse,
|
||||
err: nil,
|
||||
},
|
||||
{
|
||||
name: "invalid string",
|
||||
req: map[string][]string{
|
||||
"use_mmap": {"foo"},
|
||||
"use_mmap": []string{"foo"},
|
||||
},
|
||||
exp: nil,
|
||||
err: errors.New("invalid bool value [foo]"),
|
||||
exp: TriStateUndefined,
|
||||
err: fmt.Errorf("invalid bool value [foo]"),
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
resp, err := FormatParams(test.req)
|
||||
require.Equal(t, test.err, err)
|
||||
require.Equal(t, err, test.err)
|
||||
respVal, ok := resp["use_mmap"]
|
||||
if test.exp != nil {
|
||||
if test.exp != TriStateUndefined {
|
||||
assert.True(t, ok, "resp: %v", resp)
|
||||
assert.Equal(t, *test.exp, *respVal.(*bool))
|
||||
assert.Equal(t, test.exp, respVal)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestMessage_UnmarshalJSON(t *testing.T) {
|
||||
tests := []struct {
|
||||
input string
|
||||
expected string
|
||||
}{
|
||||
{`{"role": "USER", "content": "Hello!"}`, "user"},
|
||||
{`{"role": "System", "content": "Initialization complete."}`, "system"},
|
||||
{`{"role": "assistant", "content": "How can I help you?"}`, "assistant"},
|
||||
{`{"role": "TOOl", "content": "Access granted."}`, "tool"},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
var msg Message
|
||||
if err := json.Unmarshal([]byte(test.input), &msg); err != nil {
|
||||
t.Errorf("Unexpected error: %v", err)
|
||||
}
|
||||
|
||||
if msg.Role != test.expected {
|
||||
t.Errorf("role not lowercased: got %v, expected %v", msg.Role, test.expected)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestToolFunction_UnmarshalJSON(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input string
|
||||
wantErr string
|
||||
}{
|
||||
{
|
||||
name: "valid enum with same types",
|
||||
input: `{
|
||||
"name": "test",
|
||||
"description": "test function",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"required": ["test"],
|
||||
"properties": {
|
||||
"test": {
|
||||
"type": "string",
|
||||
"description": "test prop",
|
||||
"enum": ["a", "b", "c"]
|
||||
}
|
||||
}
|
||||
}
|
||||
}`,
|
||||
wantErr: "",
|
||||
},
|
||||
{
|
||||
name: "empty enum array",
|
||||
input: `{
|
||||
"name": "test",
|
||||
"description": "test function",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"required": ["test"],
|
||||
"properties": {
|
||||
"test": {
|
||||
"type": "string",
|
||||
"description": "test prop",
|
||||
"enum": []
|
||||
}
|
||||
}
|
||||
}
|
||||
}`,
|
||||
wantErr: "",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
var tf ToolFunction
|
||||
err := json.Unmarshal([]byte(tt.input), &tf)
|
||||
|
||||
if tt.wantErr != "" {
|
||||
require.Error(t, err)
|
||||
assert.Contains(t, err.Error(), tt.wantErr)
|
||||
} else {
|
||||
require.NoError(t, err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestToolFunctionParameters_MarshalJSON(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input ToolFunctionParameters
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple object with string property",
|
||||
input: ToolFunctionParameters{
|
||||
Type: "object",
|
||||
Required: []string{"name"},
|
||||
Properties: map[string]ToolProperty{
|
||||
"name": {Type: PropertyType{"string"}},
|
||||
},
|
||||
},
|
||||
expected: `{"type":"object","required":["name"],"properties":{"name":{"type":"string"}}}`,
|
||||
},
|
||||
{
|
||||
name: "no required",
|
||||
input: ToolFunctionParameters{
|
||||
Type: "object",
|
||||
Properties: map[string]ToolProperty{
|
||||
"name": {Type: PropertyType{"string"}},
|
||||
},
|
||||
},
|
||||
expected: `{"type":"object","properties":{"name":{"type":"string"}}}`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
data, err := json.Marshal(test.input)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, test.expected, string(data))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestToolCallFunction_IndexAlwaysMarshals(t *testing.T) {
|
||||
fn := ToolCallFunction{
|
||||
Name: "echo",
|
||||
Arguments: ToolCallFunctionArguments{"message": "hi"},
|
||||
}
|
||||
|
||||
data, err := json.Marshal(fn)
|
||||
require.NoError(t, err)
|
||||
|
||||
raw := map[string]any{}
|
||||
require.NoError(t, json.Unmarshal(data, &raw))
|
||||
require.Contains(t, raw, "index")
|
||||
assert.Equal(t, float64(0), raw["index"])
|
||||
|
||||
fn.Index = 3
|
||||
data, err = json.Marshal(fn)
|
||||
require.NoError(t, err)
|
||||
|
||||
raw = map[string]any{}
|
||||
require.NoError(t, json.Unmarshal(data, &raw))
|
||||
require.Contains(t, raw, "index")
|
||||
assert.Equal(t, float64(3), raw["index"])
|
||||
}
|
||||
|
||||
func TestPropertyType_UnmarshalJSON(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input string
|
||||
expected PropertyType
|
||||
}{
|
||||
{
|
||||
name: "string type",
|
||||
input: `"string"`,
|
||||
expected: PropertyType{"string"},
|
||||
},
|
||||
{
|
||||
name: "array of types",
|
||||
input: `["string", "number"]`,
|
||||
expected: PropertyType{"string", "number"},
|
||||
},
|
||||
{
|
||||
name: "array with single type",
|
||||
input: `["string"]`,
|
||||
expected: PropertyType{"string"},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
var pt PropertyType
|
||||
if err := json.Unmarshal([]byte(test.input), &pt); err != nil {
|
||||
t.Errorf("Unexpected error: %v", err)
|
||||
}
|
||||
|
||||
if len(pt) != len(test.expected) {
|
||||
t.Errorf("Length mismatch: got %v, expected %v", len(pt), len(test.expected))
|
||||
}
|
||||
|
||||
for i, v := range pt {
|
||||
if v != test.expected[i] {
|
||||
t.Errorf("Value mismatch at index %d: got %v, expected %v", i, v, test.expected[i])
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestPropertyType_MarshalJSON(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input PropertyType
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "single type",
|
||||
input: PropertyType{"string"},
|
||||
expected: `"string"`,
|
||||
},
|
||||
{
|
||||
name: "multiple types",
|
||||
input: PropertyType{"string", "number"},
|
||||
expected: `["string","number"]`,
|
||||
},
|
||||
{
|
||||
name: "empty type",
|
||||
input: PropertyType{},
|
||||
expected: `[]`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
data, err := json.Marshal(test.input)
|
||||
if err != nil {
|
||||
t.Errorf("Unexpected error: %v", err)
|
||||
}
|
||||
|
||||
if string(data) != test.expected {
|
||||
t.Errorf("Marshaled data mismatch: got %v, expected %v", string(data), test.expected)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestThinking_UnmarshalJSON(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input string
|
||||
expectedThinking *ThinkValue
|
||||
expectedError bool
|
||||
}{
|
||||
{
|
||||
name: "true",
|
||||
input: `{ "think": true }`,
|
||||
expectedThinking: &ThinkValue{Value: true},
|
||||
},
|
||||
{
|
||||
name: "false",
|
||||
input: `{ "think": false }`,
|
||||
expectedThinking: &ThinkValue{Value: false},
|
||||
},
|
||||
{
|
||||
name: "unset",
|
||||
input: `{ }`,
|
||||
expectedThinking: nil,
|
||||
},
|
||||
{
|
||||
name: "string_high",
|
||||
input: `{ "think": "high" }`,
|
||||
expectedThinking: &ThinkValue{Value: "high"},
|
||||
},
|
||||
{
|
||||
name: "string_medium",
|
||||
input: `{ "think": "medium" }`,
|
||||
expectedThinking: &ThinkValue{Value: "medium"},
|
||||
},
|
||||
{
|
||||
name: "string_low",
|
||||
input: `{ "think": "low" }`,
|
||||
expectedThinking: &ThinkValue{Value: "low"},
|
||||
},
|
||||
{
|
||||
name: "invalid_string",
|
||||
input: `{ "think": "invalid" }`,
|
||||
expectedThinking: nil,
|
||||
expectedError: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
var req GenerateRequest
|
||||
err := json.Unmarshal([]byte(test.input), &req)
|
||||
if test.expectedError {
|
||||
require.Error(t, err)
|
||||
} else {
|
||||
require.NoError(t, err)
|
||||
if test.expectedThinking == nil {
|
||||
assert.Nil(t, req.Think)
|
||||
} else {
|
||||
require.NotNil(t, req.Think)
|
||||
assert.Equal(t, test.expectedThinking.Value, req.Think.Value)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestToolFunctionParameters_String(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
params ToolFunctionParameters
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple object with string property",
|
||||
params: ToolFunctionParameters{
|
||||
Type: "object",
|
||||
Required: []string{"name"},
|
||||
Properties: map[string]ToolProperty{
|
||||
"name": {
|
||||
Type: PropertyType{"string"},
|
||||
Description: "The name of the person",
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: `{"type":"object","required":["name"],"properties":{"name":{"type":"string","description":"The name of the person"}}}`,
|
||||
},
|
||||
{
|
||||
name: "marshal failure returns empty string",
|
||||
params: ToolFunctionParameters{
|
||||
Type: "object",
|
||||
Defs: func() any {
|
||||
// Create a cycle that will cause json.Marshal to fail
|
||||
type selfRef struct {
|
||||
Self *selfRef
|
||||
}
|
||||
s := &selfRef{}
|
||||
s.Self = s
|
||||
return s
|
||||
}(),
|
||||
Properties: map[string]ToolProperty{},
|
||||
},
|
||||
expected: "",
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
result := test.params.String()
|
||||
assert.Equal(t, test.expected, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,142 +0,0 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestToolParameterToTypeScriptType(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
param ToolProperty
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "single string type",
|
||||
param: ToolProperty{
|
||||
Type: PropertyType{"string"},
|
||||
},
|
||||
expected: "string",
|
||||
},
|
||||
{
|
||||
name: "single number type",
|
||||
param: ToolProperty{
|
||||
Type: PropertyType{"number"},
|
||||
},
|
||||
expected: "number",
|
||||
},
|
||||
{
|
||||
name: "integer maps to number",
|
||||
param: ToolProperty{
|
||||
Type: PropertyType{"integer"},
|
||||
},
|
||||
expected: "number",
|
||||
},
|
||||
{
|
||||
name: "boolean type",
|
||||
param: ToolProperty{
|
||||
Type: PropertyType{"boolean"},
|
||||
},
|
||||
expected: "boolean",
|
||||
},
|
||||
{
|
||||
name: "array type",
|
||||
param: ToolProperty{
|
||||
Type: PropertyType{"array"},
|
||||
},
|
||||
expected: "any[]",
|
||||
},
|
||||
{
|
||||
name: "object type",
|
||||
param: ToolProperty{
|
||||
Type: PropertyType{"object"},
|
||||
},
|
||||
expected: "Record<string, any>",
|
||||
},
|
||||
{
|
||||
name: "null type",
|
||||
param: ToolProperty{
|
||||
Type: PropertyType{"null"},
|
||||
},
|
||||
expected: "null",
|
||||
},
|
||||
{
|
||||
name: "multiple types as union",
|
||||
param: ToolProperty{
|
||||
Type: PropertyType{"string", "number"},
|
||||
},
|
||||
expected: "string | number",
|
||||
},
|
||||
{
|
||||
name: "string or null union",
|
||||
param: ToolProperty{
|
||||
Type: PropertyType{"string", "null"},
|
||||
},
|
||||
expected: "string | null",
|
||||
},
|
||||
{
|
||||
name: "anyOf with single types",
|
||||
param: ToolProperty{
|
||||
AnyOf: []ToolProperty{
|
||||
{Type: PropertyType{"string"}},
|
||||
{Type: PropertyType{"number"}},
|
||||
},
|
||||
},
|
||||
expected: "string | number",
|
||||
},
|
||||
{
|
||||
name: "anyOf with multiple types in each branch",
|
||||
param: ToolProperty{
|
||||
AnyOf: []ToolProperty{
|
||||
{Type: PropertyType{"string", "null"}},
|
||||
{Type: PropertyType{"number"}},
|
||||
},
|
||||
},
|
||||
expected: "string | null | number",
|
||||
},
|
||||
{
|
||||
name: "nested anyOf",
|
||||
param: ToolProperty{
|
||||
AnyOf: []ToolProperty{
|
||||
{Type: PropertyType{"boolean"}},
|
||||
{
|
||||
AnyOf: []ToolProperty{
|
||||
{Type: PropertyType{"string"}},
|
||||
{Type: PropertyType{"number"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: "boolean | string | number",
|
||||
},
|
||||
{
|
||||
name: "empty type returns any",
|
||||
param: ToolProperty{
|
||||
Type: PropertyType{},
|
||||
},
|
||||
expected: "any",
|
||||
},
|
||||
{
|
||||
name: "unknown type maps to any",
|
||||
param: ToolProperty{
|
||||
Type: PropertyType{"unknown_type"},
|
||||
},
|
||||
expected: "any",
|
||||
},
|
||||
{
|
||||
name: "multiple types including array",
|
||||
param: ToolProperty{
|
||||
Type: PropertyType{"string", "array", "null"},
|
||||
},
|
||||
expected: "string | any[] | null",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := tt.param.ToTypeScriptType()
|
||||
if result != tt.expected {
|
||||
t.Errorf("ToTypeScriptType() = %q, want %q", result, tt.expected)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
10
app/.gitignore
vendored
@@ -1,11 +1 @@
|
||||
ollama.syso
|
||||
*.crt
|
||||
*.exe
|
||||
/app/app
|
||||
/app/squirrel
|
||||
ollama
|
||||
*cover*
|
||||
.vscode
|
||||
.env
|
||||
.DS_Store
|
||||
.claude
|
||||
|
||||
@@ -1,97 +1,22 @@
|
||||
# Ollama for macOS and Windows
|
||||
# Ollama App
|
||||
|
||||
## Download
|
||||
## Linux
|
||||
|
||||
- [macOS](https://github.com/ollama/app/releases/download/latest/Ollama.dmg)
|
||||
- [Windows](https://github.com/ollama/app/releases/download/latest/OllamaSetup.exe)
|
||||
TODO
|
||||
|
||||
## Development
|
||||
## MacOS
|
||||
|
||||
### Desktop App
|
||||
TODO
|
||||
|
||||
```bash
|
||||
go generate ./... &&
|
||||
go run ./cmd/app
|
||||
```
|
||||
|
||||
### UI Development
|
||||
|
||||
#### Setup
|
||||
|
||||
Install required tools:
|
||||
|
||||
```bash
|
||||
go install github.com/tkrajina/typescriptify-golang-structs/tscriptify@latest
|
||||
```
|
||||
|
||||
#### Develop UI (Development Mode)
|
||||
|
||||
1. Start the React development server (with hot-reload):
|
||||
|
||||
```bash
|
||||
cd ui/app
|
||||
npm install
|
||||
npm run dev
|
||||
```
|
||||
|
||||
2. In a separate terminal, run the Ollama app with the `-dev` flag:
|
||||
|
||||
```bash
|
||||
go generate ./... &&
|
||||
OLLAMA_DEBUG=1 go run ./cmd/app -dev
|
||||
```
|
||||
|
||||
The `-dev` flag enables:
|
||||
|
||||
- Loading the UI from the Vite dev server at http://localhost:5173
|
||||
- Fixed UI server port at http://127.0.0.1:3001 for API requests
|
||||
- CORS headers for cross-origin requests
|
||||
- Hot-reload support for UI development
|
||||
|
||||
## Build
|
||||
|
||||
|
||||
### Windows
|
||||
## Windows
|
||||
|
||||
If you want to build the installer, youll need to install
|
||||
- https://jrsoftware.org/isinfo.php
|
||||
|
||||
|
||||
**Dependencies** - either build a local copy of ollama, or use a github release
|
||||
```powershell
|
||||
# Local dependencies
|
||||
.\scripts\deps_local.ps1
|
||||
|
||||
# Release dependencies
|
||||
.\scripts\deps_release.ps1 0.6.8
|
||||
```
|
||||
|
||||
**Build**
|
||||
```powershell
|
||||
.\scripts\build_windows.ps1
|
||||
```
|
||||
|
||||
### macOS
|
||||
|
||||
CI builds with Xcode 14.1 for OS compatibility prior to v13. If you want to manually build v11+ support, you can download the older Xcode [here](https://developer.apple.com/services-account/download?path=/Developer_Tools/Xcode_14.1/Xcode_14.1.xip), extract, then `mv ./Xcode.app /Applications/Xcode_14.1.0.app` then activate with:
|
||||
In the top directory of this repo, run the following powershell script
|
||||
to build the ollama CLI, ollama app, and ollama installer.
|
||||
|
||||
```
|
||||
export CGO_CFLAGS=-mmacosx-version-min=12.0
|
||||
export CGO_CXXFLAGS=-mmacosx-version-min=12.0
|
||||
export CGO_LDFLAGS=-mmacosx-version-min=12.0
|
||||
export SDKROOT=/Applications/Xcode_14.1.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk
|
||||
export DEVELOPER_DIR=/Applications/Xcode_14.1.0.app/Contents/Developer
|
||||
```
|
||||
|
||||
**Dependencies** - either build a local copy of Ollama, or use a GitHub release:
|
||||
```sh
|
||||
# Local dependencies
|
||||
./scripts/deps_local.sh
|
||||
|
||||
# Release dependencies
|
||||
./scripts/deps_release.sh 0.6.8
|
||||
```
|
||||
|
||||
**Build**
|
||||
```sh
|
||||
./scripts/build_darwin.sh
|
||||
powershell -ExecutionPolicy Bypass -File .\scripts\build_windows.ps1
|
||||
```
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
//go:build windows || darwin
|
||||
|
||||
package assets
|
||||
|
||||
import (
|
||||
|
||||
|
Before Width: | Height: | Size: 15 KiB |
|
Before Width: | Height: | Size: 115 KiB After Width: | Height: | Size: 89 KiB |
|
Before Width: | Height: | Size: 116 KiB After Width: | Height: | Size: 91 KiB |
@@ -1,26 +0,0 @@
|
||||
//go:build windows || darwin
|
||||
|
||||
package auth
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"os"
|
||||
|
||||
"github.com/ollama/ollama/auth"
|
||||
)
|
||||
|
||||
// BuildConnectURL generates the connect URL with the public key and device name
|
||||
func BuildConnectURL(baseURL string) (string, error) {
|
||||
pubKey, err := auth.GetPublicKey()
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to get public key: %w", err)
|
||||
}
|
||||
|
||||
encodedKey := base64.RawURLEncoding.EncodeToString([]byte(pubKey))
|
||||
hostname, _ := os.Hostname()
|
||||
encodedDevice := url.QueryEscape(hostname)
|
||||
|
||||
return fmt.Sprintf("%s/connect?name=%s&key=%s&launch=true", baseURL, encodedDevice, encodedKey), nil
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
#import <Cocoa/Cocoa.h>
|
||||
|
||||
@interface AppDelegate : NSObject <NSApplicationDelegate>
|
||||
|
||||
- (void)applicationDidFinishLaunching:(NSNotification *)aNotification;
|
||||
|
||||
@end
|
||||
@@ -1,473 +0,0 @@
|
||||
//go:build windows || darwin
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"log/slog"
|
||||
"net"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"os/exec"
|
||||
"os/signal"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
"syscall"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/ollama/ollama/app/auth"
|
||||
"github.com/ollama/ollama/app/logrotate"
|
||||
"github.com/ollama/ollama/app/server"
|
||||
"github.com/ollama/ollama/app/store"
|
||||
"github.com/ollama/ollama/app/tools"
|
||||
"github.com/ollama/ollama/app/ui"
|
||||
"github.com/ollama/ollama/app/updater"
|
||||
"github.com/ollama/ollama/app/version"
|
||||
)
|
||||
|
||||
var (
|
||||
wv = &Webview{}
|
||||
uiServerPort int
|
||||
)
|
||||
|
||||
var debug = strings.EqualFold(os.Getenv("OLLAMA_DEBUG"), "true") || os.Getenv("OLLAMA_DEBUG") == "1"
|
||||
|
||||
var (
|
||||
fastStartup = false
|
||||
devMode = false
|
||||
)
|
||||
|
||||
type appMove int
|
||||
|
||||
const (
|
||||
CannotMove appMove = iota
|
||||
UserDeclinedMove
|
||||
MoveCompleted
|
||||
AlreadyMoved
|
||||
LoginSession
|
||||
PermissionDenied
|
||||
MoveError
|
||||
)
|
||||
|
||||
func main() {
|
||||
startHidden := false
|
||||
var urlSchemeRequest string
|
||||
if len(os.Args) > 1 {
|
||||
for _, arg := range os.Args {
|
||||
// Handle URL scheme requests (Windows)
|
||||
if strings.HasPrefix(arg, "ollama://") {
|
||||
urlSchemeRequest = arg
|
||||
slog.Info("received URL scheme request", "url", arg)
|
||||
continue
|
||||
}
|
||||
switch arg {
|
||||
case "serve":
|
||||
fmt.Fprintln(os.Stderr, "serve command not supported, use ollama")
|
||||
os.Exit(1)
|
||||
case "version", "-v", "--version":
|
||||
fmt.Println(version.Version)
|
||||
os.Exit(0)
|
||||
case "background":
|
||||
// When running the process in this "background" mode, we spawn a
|
||||
// child process for the main app. This is necessary so the
|
||||
// "Allow in the Background" setting in MacOS can be unchecked
|
||||
// without breaking the main app. Two copies of the app are
|
||||
// present in the bundle, one for the main app and one for the
|
||||
// background initiator.
|
||||
fmt.Fprintln(os.Stdout, "starting in background")
|
||||
runInBackground()
|
||||
os.Exit(0)
|
||||
case "hidden", "-j", "--hide":
|
||||
// startHidden suppresses the UI on startup, and can be triggered multiple ways
|
||||
// On windows, path based via login startup detection
|
||||
// On MacOS via [NSApp isHidden] from `open -j -a /Applications/Ollama.app` or equivalent
|
||||
// On both via the "hidden" command line argument
|
||||
startHidden = true
|
||||
case "--fast-startup":
|
||||
// Skip optional steps like pending updates to start quickly for immediate use
|
||||
fastStartup = true
|
||||
case "-dev", "--dev":
|
||||
// Development mode: use local dev server and enable CORS
|
||||
devMode = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
level := slog.LevelInfo
|
||||
if debug {
|
||||
level = slog.LevelDebug
|
||||
}
|
||||
|
||||
logrotate.Rotate(appLogPath)
|
||||
if _, err := os.Stat(filepath.Dir(appLogPath)); errors.Is(err, os.ErrNotExist) {
|
||||
if err := os.MkdirAll(filepath.Dir(appLogPath), 0o755); err != nil {
|
||||
slog.Error(fmt.Sprintf("failed to create server log dir %v", err))
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
var logFile io.Writer
|
||||
var err error
|
||||
logFile, err = os.OpenFile(appLogPath, os.O_APPEND|os.O_WRONLY|os.O_CREATE, 0o755)
|
||||
if err != nil {
|
||||
slog.Error(fmt.Sprintf("failed to create server log %v", err))
|
||||
return
|
||||
}
|
||||
// Detect if we're a GUI app on windows, and if not, send logs to console as well
|
||||
if os.Stderr.Fd() != 0 {
|
||||
// Console app detected
|
||||
logFile = io.MultiWriter(os.Stderr, logFile)
|
||||
}
|
||||
|
||||
handler := slog.NewTextHandler(logFile, &slog.HandlerOptions{
|
||||
Level: level,
|
||||
AddSource: true,
|
||||
ReplaceAttr: func(_ []string, attr slog.Attr) slog.Attr {
|
||||
if attr.Key == slog.SourceKey {
|
||||
source := attr.Value.Any().(*slog.Source)
|
||||
source.File = filepath.Base(source.File)
|
||||
}
|
||||
return attr
|
||||
},
|
||||
})
|
||||
|
||||
slog.SetDefault(slog.New(handler))
|
||||
logStartup()
|
||||
|
||||
// On Windows, check if another instance is running and send URL to it
|
||||
// Do this after logging is set up so we can debug issues
|
||||
if runtime.GOOS == "windows" && urlSchemeRequest != "" {
|
||||
slog.Debug("checking for existing instance", "url", urlSchemeRequest)
|
||||
if checkAndHandleExistingInstance(urlSchemeRequest) {
|
||||
// The function will exit if it successfully sends to another instance
|
||||
// If we reach here, we're the first/only instance
|
||||
} else {
|
||||
// No existing instance found, handle the URL scheme in this instance
|
||||
go func() {
|
||||
handleURLSchemeInCurrentInstance(urlSchemeRequest)
|
||||
}()
|
||||
}
|
||||
}
|
||||
|
||||
if u := os.Getenv("OLLAMA_UPDATE_URL"); u != "" {
|
||||
updater.UpdateCheckURLBase = u
|
||||
}
|
||||
|
||||
// Detect if this is a first start after an upgrade, in
|
||||
// which case we need to do some cleanup
|
||||
var skipMove bool
|
||||
if _, err := os.Stat(updater.UpgradeMarkerFile); err == nil {
|
||||
slog.Debug("first start after upgrade")
|
||||
err = updater.DoPostUpgradeCleanup()
|
||||
if err != nil {
|
||||
slog.Error("failed to cleanup prior version", "error", err)
|
||||
}
|
||||
// We never prompt to move the app after an upgrade
|
||||
skipMove = true
|
||||
// Start hidden after updates to prevent UI from opening automatically
|
||||
startHidden = true
|
||||
}
|
||||
|
||||
if !skipMove && !fastStartup {
|
||||
if maybeMoveAndRestart() == MoveCompleted {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Check if another instance is already running
|
||||
// On Windows, focus the existing instance; on other platforms, kill it
|
||||
handleExistingInstance(startHidden)
|
||||
|
||||
// on macOS, offer the user to create a symlink
|
||||
// from /usr/local/bin/ollama to the app bundle
|
||||
installSymlink()
|
||||
|
||||
var ln net.Listener
|
||||
if devMode {
|
||||
// Use a fixed port in dev mode for predictable API access
|
||||
ln, err = net.Listen("tcp", "127.0.0.1:3001")
|
||||
} else {
|
||||
ln, err = net.Listen("tcp", "127.0.0.1:0")
|
||||
}
|
||||
if err != nil {
|
||||
slog.Error("failed to find available port", "error", err)
|
||||
return
|
||||
}
|
||||
|
||||
port := ln.Addr().(*net.TCPAddr).Port
|
||||
token := uuid.NewString()
|
||||
wv.port = port
|
||||
wv.token = token
|
||||
uiServerPort = port
|
||||
|
||||
st := &store.Store{}
|
||||
|
||||
// Enable CORS in development mode
|
||||
if devMode {
|
||||
os.Setenv("OLLAMA_CORS", "1")
|
||||
|
||||
// Check if Vite dev server is running on port 5173
|
||||
var conn net.Conn
|
||||
var err error
|
||||
for _, addr := range []string{"127.0.0.1:5173", "localhost:5173"} {
|
||||
conn, err = net.DialTimeout("tcp", addr, 2*time.Second)
|
||||
if err == nil {
|
||||
conn.Close()
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
slog.Error("Vite dev server not running on port 5173")
|
||||
fmt.Fprintln(os.Stderr, "Error: Vite dev server is not running on port 5173")
|
||||
fmt.Fprintln(os.Stderr, "Please run 'npm run dev' in the ui/app directory to start the UI in development mode")
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize tools registry
|
||||
toolRegistry := tools.NewRegistry()
|
||||
slog.Info("initialized tools registry", "tool_count", len(toolRegistry.List()))
|
||||
|
||||
// ctx is the app-level context that will be used to stop the app
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
|
||||
// octx is the ollama server context that will be used to stop the ollama server
|
||||
octx, ocancel := context.WithCancel(ctx)
|
||||
|
||||
// TODO (jmorganca): instead we should instantiate the
|
||||
// webview with the store instead of assigning it here, however
|
||||
// making the webview a global variable is easier for now
|
||||
wv.Store = st
|
||||
done := make(chan error, 1)
|
||||
osrv := server.New(st, devMode)
|
||||
go func() {
|
||||
slog.Info("starting ollama server")
|
||||
done <- osrv.Run(octx)
|
||||
}()
|
||||
|
||||
uiServer := ui.Server{
|
||||
Token: token,
|
||||
Restart: func() {
|
||||
ocancel()
|
||||
<-done
|
||||
octx, ocancel = context.WithCancel(ctx)
|
||||
go func() {
|
||||
done <- osrv.Run(octx)
|
||||
}()
|
||||
},
|
||||
Store: st,
|
||||
ToolRegistry: toolRegistry,
|
||||
Dev: devMode,
|
||||
Logger: slog.Default(),
|
||||
}
|
||||
|
||||
srv := &http.Server{
|
||||
Handler: uiServer.Handler(),
|
||||
}
|
||||
|
||||
if _, err := uiServer.UserData(ctx); err != nil {
|
||||
slog.Warn("failed to load user data", "error", err)
|
||||
}
|
||||
|
||||
// Start the UI server
|
||||
slog.Info("starting ui server", "port", port)
|
||||
go func() {
|
||||
slog.Debug("starting ui server on port", "port", port)
|
||||
err = srv.Serve(ln)
|
||||
if err != nil && !errors.Is(err, http.ErrServerClosed) {
|
||||
slog.Warn("desktop server", "error", err)
|
||||
}
|
||||
slog.Debug("background desktop server done")
|
||||
}()
|
||||
|
||||
updater := &updater.Updater{Store: st}
|
||||
updater.StartBackgroundUpdaterChecker(ctx, UpdateAvailable)
|
||||
|
||||
hasCompletedFirstRun, err := st.HasCompletedFirstRun()
|
||||
if err != nil {
|
||||
slog.Error("failed to load has completed first run", "error", err)
|
||||
}
|
||||
|
||||
if !hasCompletedFirstRun {
|
||||
err = st.SetHasCompletedFirstRun(true)
|
||||
if err != nil {
|
||||
slog.Error("failed to set has completed first run", "error", err)
|
||||
}
|
||||
}
|
||||
|
||||
// capture SIGINT and SIGTERM signals and gracefully shutdown the app
|
||||
signals := make(chan os.Signal, 1)
|
||||
signal.Notify(signals, syscall.SIGINT, syscall.SIGTERM)
|
||||
go func() {
|
||||
<-signals
|
||||
slog.Info("received SIGINT or SIGTERM signal, shutting down")
|
||||
quit()
|
||||
}()
|
||||
|
||||
if urlSchemeRequest != "" {
|
||||
go func() {
|
||||
handleURLSchemeInCurrentInstance(urlSchemeRequest)
|
||||
}()
|
||||
} else {
|
||||
slog.Debug("no URL scheme request to handle")
|
||||
}
|
||||
|
||||
osRun(cancel, hasCompletedFirstRun, startHidden)
|
||||
|
||||
slog.Info("shutting down desktop server")
|
||||
if err := srv.Close(); err != nil {
|
||||
slog.Warn("error shutting down desktop server", "error", err)
|
||||
}
|
||||
|
||||
slog.Info("shutting down ollama server")
|
||||
cancel()
|
||||
<-done
|
||||
}
|
||||
|
||||
func startHiddenTasks() {
|
||||
// If an upgrade is ready and we're in hidden mode, perform it at startup.
|
||||
// If we're not in hidden mode, we want to start as fast as possible and not
|
||||
// slow the user down with an upgrade.
|
||||
if updater.IsUpdatePending() {
|
||||
if fastStartup {
|
||||
// CLI triggered app startup use-case
|
||||
slog.Info("deferring pending update for fast startup")
|
||||
} else {
|
||||
if err := updater.DoUpgradeAtStartup(); err != nil {
|
||||
slog.Info("unable to perform upgrade at startup", "error", err)
|
||||
// Make sure the restart to upgrade menu shows so we can attempt an interactive upgrade to get authorization
|
||||
UpdateAvailable("")
|
||||
} else {
|
||||
slog.Debug("launching new version...")
|
||||
// TODO - consider a timer that aborts if this takes too long and we haven't been killed yet...
|
||||
LaunchNewApp()
|
||||
os.Exit(0)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func checkUserLoggedIn(uiServerPort int) bool {
|
||||
if uiServerPort == 0 {
|
||||
slog.Debug("UI server not ready yet, skipping auth check")
|
||||
return false
|
||||
}
|
||||
|
||||
resp, err := http.Get(fmt.Sprintf("http://127.0.0.1:%d/api/v1/me", uiServerPort))
|
||||
if err != nil {
|
||||
slog.Debug("failed to call local auth endpoint", "error", err)
|
||||
return false
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
// Check if the response is successful
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
slog.Debug("auth endpoint returned non-OK status", "status", resp.StatusCode)
|
||||
return false
|
||||
}
|
||||
|
||||
var user struct {
|
||||
ID string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
if err := json.NewDecoder(resp.Body).Decode(&user); err != nil {
|
||||
slog.Debug("failed to parse user response", "error", err)
|
||||
return false
|
||||
}
|
||||
|
||||
// Verify we have a valid user with an ID and name
|
||||
if user.ID == "" || user.Name == "" {
|
||||
slog.Debug("user response missing required fields", "id", user.ID, "name", user.Name)
|
||||
return false
|
||||
}
|
||||
|
||||
slog.Debug("user is logged in", "user_id", user.ID, "user_name", user.Name)
|
||||
return true
|
||||
}
|
||||
|
||||
// handleConnectURLScheme fetches the connect URL and opens it in the browser
|
||||
func handleConnectURLScheme() {
|
||||
if checkUserLoggedIn(uiServerPort) {
|
||||
slog.Info("user is already logged in, opening app instead")
|
||||
showWindow(wv.webview.Window())
|
||||
return
|
||||
}
|
||||
|
||||
connectURL, err := auth.BuildConnectURL("https://ollama.com")
|
||||
if err != nil {
|
||||
slog.Error("failed to build connect URL", "error", err)
|
||||
openInBrowser("https://ollama.com/connect")
|
||||
return
|
||||
}
|
||||
|
||||
openInBrowser(connectURL)
|
||||
}
|
||||
|
||||
// openInBrowser opens the specified URL in the default browser
|
||||
func openInBrowser(url string) {
|
||||
var cmd string
|
||||
var args []string
|
||||
|
||||
switch runtime.GOOS {
|
||||
case "windows":
|
||||
cmd = "rundll32"
|
||||
args = []string{"url.dll,FileProtocolHandler", url}
|
||||
case "darwin":
|
||||
cmd = "open"
|
||||
args = []string{url}
|
||||
default: // "linux", "freebsd", "openbsd", "netbsd"... should not reach here
|
||||
slog.Warn("unsupported OS for openInBrowser", "os", runtime.GOOS)
|
||||
}
|
||||
|
||||
slog.Info("executing browser command", "cmd", cmd, "args", args)
|
||||
if err := exec.Command(cmd, args...).Start(); err != nil {
|
||||
slog.Error("failed to open URL in browser", "url", url, "cmd", cmd, "args", args, "error", err)
|
||||
}
|
||||
}
|
||||
|
||||
// parseURLScheme parses an ollama:// URL and validates it
|
||||
// Supports: ollama:// (open app) and ollama://connect (OAuth)
|
||||
func parseURLScheme(urlSchemeRequest string) (isConnect bool, err error) {
|
||||
parsedURL, err := url.Parse(urlSchemeRequest)
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("invalid URL: %w", err)
|
||||
}
|
||||
|
||||
// Check if this is a connect URL
|
||||
if parsedURL.Host == "connect" || strings.TrimPrefix(parsedURL.Path, "/") == "connect" {
|
||||
return true, nil
|
||||
}
|
||||
|
||||
// Allow bare ollama:// or ollama:/// to open the app
|
||||
if (parsedURL.Host == "" && parsedURL.Path == "") || parsedURL.Path == "/" {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
return false, fmt.Errorf("unsupported ollama:// URL path: %s", urlSchemeRequest)
|
||||
}
|
||||
|
||||
// handleURLSchemeInCurrentInstance processes URL scheme requests in the current instance
|
||||
func handleURLSchemeInCurrentInstance(urlSchemeRequest string) {
|
||||
isConnect, err := parseURLScheme(urlSchemeRequest)
|
||||
if err != nil {
|
||||
slog.Error("failed to parse URL scheme request", "url", urlSchemeRequest, "error", err)
|
||||
return
|
||||
}
|
||||
|
||||
if isConnect {
|
||||
handleConnectURLScheme()
|
||||
} else {
|
||||
if wv.webview != nil {
|
||||
showWindow(wv.webview.Window())
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,269 +0,0 @@
|
||||
//go:build windows || darwin
|
||||
|
||||
package main
|
||||
|
||||
// #cgo CFLAGS: -x objective-c
|
||||
// #cgo LDFLAGS: -framework Webkit -framework Cocoa -framework LocalAuthentication -framework ServiceManagement
|
||||
// #include "app_darwin.h"
|
||||
// #include "../../updater/updater_darwin.h"
|
||||
// typedef const char cchar_t;
|
||||
import "C"
|
||||
|
||||
import (
|
||||
"log/slog"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
"unsafe"
|
||||
|
||||
"github.com/ollama/ollama/app/updater"
|
||||
"github.com/ollama/ollama/app/version"
|
||||
)
|
||||
|
||||
var ollamaPath = func() string {
|
||||
if updater.BundlePath != "" {
|
||||
return filepath.Join(updater.BundlePath, "Contents", "Resources", "ollama")
|
||||
}
|
||||
|
||||
pwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
slog.Warn("failed to get pwd", "error", err)
|
||||
return ""
|
||||
}
|
||||
return filepath.Join(pwd, "ollama")
|
||||
}()
|
||||
|
||||
var (
|
||||
isApp = updater.BundlePath != ""
|
||||
appLogPath = filepath.Join(os.Getenv("HOME"), ".ollama", "logs", "app.log")
|
||||
launchAgentPath = filepath.Join(os.Getenv("HOME"), "Library", "LaunchAgents", "com.ollama.ollama.plist")
|
||||
)
|
||||
|
||||
// TODO(jmorganca): pre-create the window and pass
|
||||
// it to the webview instead of using the internal one
|
||||
//
|
||||
//export StartUI
|
||||
func StartUI(path *C.cchar_t) {
|
||||
p := C.GoString(path)
|
||||
wv.Run(p)
|
||||
styleWindow(wv.webview.Window())
|
||||
C.setWindowDelegate(wv.webview.Window())
|
||||
}
|
||||
|
||||
//export ShowUI
|
||||
func ShowUI() {
|
||||
// If webview is already running, just show the window
|
||||
if wv.IsRunning() && wv.webview != nil {
|
||||
showWindow(wv.webview.Window())
|
||||
} else {
|
||||
root := C.CString("/")
|
||||
defer C.free(unsafe.Pointer(root))
|
||||
StartUI(root)
|
||||
}
|
||||
}
|
||||
|
||||
//export StopUI
|
||||
func StopUI() {
|
||||
wv.Terminate()
|
||||
}
|
||||
|
||||
//export StartUpdate
|
||||
func StartUpdate() {
|
||||
if err := updater.DoUpgrade(true); err != nil {
|
||||
slog.Error("upgrade failed", "error", err)
|
||||
return
|
||||
}
|
||||
slog.Debug("launching new version...")
|
||||
// TODO - consider a timer that aborts if this takes too long and we haven't been killed yet...
|
||||
LaunchNewApp()
|
||||
// not reached if upgrade works, the new app will kill this process
|
||||
}
|
||||
|
||||
//export darwinStartHiddenTasks
|
||||
func darwinStartHiddenTasks() {
|
||||
startHiddenTasks()
|
||||
}
|
||||
|
||||
func init() {
|
||||
// Temporary code to mimic Squirrel ShipIt behavior
|
||||
if len(os.Args) > 2 {
|
||||
if os.Args[1] == "___launch___" {
|
||||
path := strings.TrimPrefix(os.Args[2], "file://")
|
||||
slog.Info("Ollama binary called as ShipIt - launching", "app", path)
|
||||
appName := C.CString(path)
|
||||
defer C.free(unsafe.Pointer(appName))
|
||||
C.launchApp(appName)
|
||||
slog.Info("other instance has been launched")
|
||||
time.Sleep(5 * time.Second)
|
||||
slog.Info("exiting with zero status")
|
||||
os.Exit(0)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// maybeMoveAndRestart checks if we should relocate
|
||||
// and returns true if we did and should immediately exit
|
||||
func maybeMoveAndRestart() appMove {
|
||||
if updater.BundlePath == "" {
|
||||
// Typically developer mode with 'go run ./cmd/app'
|
||||
return CannotMove
|
||||
}
|
||||
// Respect users intent if they chose "keep" vs. "replace" when dragging to Applications
|
||||
if strings.HasPrefix(updater.BundlePath, strings.TrimSuffix(updater.SystemWidePath, filepath.Ext(updater.SystemWidePath))) {
|
||||
return AlreadyMoved
|
||||
}
|
||||
|
||||
// Ask to move to applications directory
|
||||
status := (appMove)(C.askToMoveToApplications())
|
||||
if status == MoveCompleted {
|
||||
// Double check
|
||||
if _, err := os.Stat(updater.SystemWidePath); err != nil {
|
||||
slog.Warn("stat failure after move", "path", updater.SystemWidePath, "error", err)
|
||||
return MoveError
|
||||
}
|
||||
}
|
||||
return status
|
||||
}
|
||||
|
||||
// handleExistingInstance handles existing instances on macOS
|
||||
func handleExistingInstance(_ bool) {
|
||||
C.killOtherInstances()
|
||||
}
|
||||
|
||||
func installSymlink() {
|
||||
if !isApp {
|
||||
return
|
||||
}
|
||||
cliPath := C.CString(ollamaPath)
|
||||
defer C.free(unsafe.Pointer(cliPath))
|
||||
|
||||
// Check the users path first
|
||||
cmd, _ := exec.LookPath("ollama")
|
||||
if cmd != "" {
|
||||
resolved, err := os.Readlink(cmd)
|
||||
if err == nil {
|
||||
tmp, err := filepath.Abs(resolved)
|
||||
if err == nil {
|
||||
resolved = tmp
|
||||
}
|
||||
} else {
|
||||
resolved = cmd
|
||||
}
|
||||
if resolved == ollamaPath {
|
||||
slog.Info("ollama already in users PATH", "cli", cmd)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
code := C.installSymlink(cliPath)
|
||||
if code != 0 {
|
||||
slog.Error("Failed to install symlink")
|
||||
}
|
||||
}
|
||||
|
||||
func UpdateAvailable(ver string) error {
|
||||
slog.Debug("update detected, adjusting menu")
|
||||
// TODO (jmorganca): find a better check for development mode than checking the bundle path
|
||||
if updater.BundlePath != "" {
|
||||
C.updateAvailable()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func osRun(_ func(), hasCompletedFirstRun, startHidden bool) {
|
||||
registerLaunchAgent(hasCompletedFirstRun)
|
||||
|
||||
// Run the native macOS app
|
||||
// Note: this will block until the app is closed
|
||||
slog.Debug("starting native darwin event loop")
|
||||
C.run(C._Bool(hasCompletedFirstRun), C._Bool(startHidden))
|
||||
}
|
||||
|
||||
func quit() {
|
||||
C.quit()
|
||||
}
|
||||
|
||||
func LaunchNewApp() {
|
||||
appName := C.CString(updater.BundlePath)
|
||||
defer C.free(unsafe.Pointer(appName))
|
||||
C.launchApp(appName)
|
||||
}
|
||||
|
||||
// Send a request to the main app thread to load a UI page
|
||||
func sendUIRequestMessage(path string) {
|
||||
p := C.CString(path)
|
||||
defer C.free(unsafe.Pointer(p))
|
||||
C.uiRequest(p)
|
||||
}
|
||||
|
||||
func registerLaunchAgent(hasCompletedFirstRun bool) {
|
||||
// Remove any stale Login Item registrations
|
||||
C.unregisterSelfFromLoginItem()
|
||||
|
||||
C.registerSelfAsLoginItem(C._Bool(hasCompletedFirstRun))
|
||||
}
|
||||
|
||||
func logStartup() {
|
||||
appPath := updater.BundlePath
|
||||
if appPath == updater.SystemWidePath {
|
||||
// Detect sandboxed scenario
|
||||
exe, err := os.Executable()
|
||||
if err == nil {
|
||||
p := filepath.Dir(exe)
|
||||
if filepath.Base(p) == "MacOS" {
|
||||
p = filepath.Dir(filepath.Dir(p))
|
||||
if p != appPath {
|
||||
slog.Info("starting sandboxed Ollama", "app", appPath, "sandbox", p)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
slog.Info("starting Ollama", "app", appPath, "version", version.Version, "OS", updater.UserAgentOS)
|
||||
}
|
||||
|
||||
func hideWindow(ptr unsafe.Pointer) {
|
||||
C.hideWindow(C.uintptr_t(uintptr(ptr)))
|
||||
}
|
||||
|
||||
func showWindow(ptr unsafe.Pointer) {
|
||||
C.showWindow(C.uintptr_t(uintptr(ptr)))
|
||||
}
|
||||
|
||||
func styleWindow(ptr unsafe.Pointer) {
|
||||
C.styleWindow(C.uintptr_t(uintptr(ptr)))
|
||||
}
|
||||
|
||||
func runInBackground() {
|
||||
cmd := exec.Command(filepath.Join(updater.BundlePath, "Contents", "MacOS", "Ollama"), "hidden")
|
||||
if cmd != nil {
|
||||
err := cmd.Run()
|
||||
if err != nil {
|
||||
slog.Error("failed to run Ollama", "bundlePath", updater.BundlePath, "error", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
} else {
|
||||
slog.Error("failed to start Ollama in background", "bundlePath", updater.BundlePath)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
func drag(ptr unsafe.Pointer) {
|
||||
C.drag(C.uintptr_t(uintptr(ptr)))
|
||||
}
|
||||
|
||||
func doubleClick(ptr unsafe.Pointer) {
|
||||
C.doubleClick(C.uintptr_t(uintptr(ptr)))
|
||||
}
|
||||
|
||||
//export handleConnectURL
|
||||
func handleConnectURL() {
|
||||
handleConnectURLScheme()
|
||||
}
|
||||
|
||||
// checkAndHandleExistingInstance is not needed on non-Windows platforms
|
||||
func checkAndHandleExistingInstance(_ string) bool {
|
||||
return false
|
||||
}
|
||||
@@ -1,43 +0,0 @@
|
||||
#import <Cocoa/Cocoa.h>
|
||||
#import <Security/Security.h>
|
||||
|
||||
@interface AppDelegate : NSObject <NSApplicationDelegate>
|
||||
- (void)applicationDidFinishLaunching:(NSNotification *)aNotification;
|
||||
@end
|
||||
|
||||
enum AppMove
|
||||
{
|
||||
CannotMove,
|
||||
UserDeclinedMove,
|
||||
MoveCompleted,
|
||||
AlreadyMoved,
|
||||
LoginSession,
|
||||
PermissionDenied,
|
||||
MoveError,
|
||||
};
|
||||
|
||||
void run(bool firstTimeRun, bool startHidden);
|
||||
void killOtherInstances();
|
||||
enum AppMove askToMoveToApplications();
|
||||
int createSymlinkWithAuthorization();
|
||||
int installSymlink(const char *cliPath);
|
||||
extern void Restart();
|
||||
// extern void Quit();
|
||||
void StartUI(const char *path);
|
||||
void ShowUI();
|
||||
void StopUI();
|
||||
void StartUpdate();
|
||||
void darwinStartHiddenTasks();
|
||||
void launchApp(const char *appPath);
|
||||
void updateAvailable();
|
||||
void quit();
|
||||
void uiRequest(char *path);
|
||||
void registerSelfAsLoginItem(bool firstTimeRun);
|
||||
void unregisterSelfFromLoginItem();
|
||||
void setWindowDelegate(void *window);
|
||||
void showWindow(uintptr_t wndPtr);
|
||||
void hideWindow(uintptr_t wndPtr);
|
||||
void styleWindow(uintptr_t wndPtr);
|
||||
void drag(uintptr_t wndPtr);
|
||||
void doubleClick(uintptr_t wndPtr);
|
||||
void handleConnectURL();
|
||||
@@ -1,441 +0,0 @@
|
||||
//go:build windows || darwin
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"log/slog"
|
||||
"os"
|
||||
"os/exec"
|
||||
"os/signal"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
"syscall"
|
||||
"unsafe"
|
||||
|
||||
"github.com/ollama/ollama/app/updater"
|
||||
"github.com/ollama/ollama/app/version"
|
||||
"github.com/ollama/ollama/app/wintray"
|
||||
"golang.org/x/sys/windows"
|
||||
)
|
||||
|
||||
var (
|
||||
u32 = windows.NewLazySystemDLL("User32.dll")
|
||||
pBringWindowToTop = u32.NewProc("BringWindowToTop")
|
||||
pShowWindow = u32.NewProc("ShowWindow")
|
||||
pSendMessage = u32.NewProc("SendMessageA")
|
||||
pGetSystemMetrics = u32.NewProc("GetSystemMetrics")
|
||||
pGetWindowRect = u32.NewProc("GetWindowRect")
|
||||
pSetWindowPos = u32.NewProc("SetWindowPos")
|
||||
pSetForegroundWindow = u32.NewProc("SetForegroundWindow")
|
||||
pSetActiveWindow = u32.NewProc("SetActiveWindow")
|
||||
pIsIconic = u32.NewProc("IsIconic")
|
||||
|
||||
appPath = filepath.Join(os.Getenv("LOCALAPPDATA"), "Programs", "Ollama")
|
||||
appLogPath = filepath.Join(os.Getenv("LOCALAPPDATA"), "Ollama", "app.log")
|
||||
startupShortcut = filepath.Join(os.Getenv("APPDATA"), "Microsoft", "Windows", "Start Menu", "Programs", "Startup", "Ollama.lnk")
|
||||
ollamaPath string
|
||||
DesktopAppName = "ollama app.exe"
|
||||
)
|
||||
|
||||
func init() {
|
||||
// With alternate install location use executable location
|
||||
exe, err := os.Executable()
|
||||
if err != nil {
|
||||
slog.Warn("error discovering executable directory", "error", err)
|
||||
} else {
|
||||
appPath = filepath.Dir(exe)
|
||||
}
|
||||
ollamaPath = filepath.Join(appPath, "ollama.exe")
|
||||
|
||||
// Handle developer mode (go run ./cmd/app)
|
||||
if _, err := os.Stat(ollamaPath); err != nil {
|
||||
pwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
slog.Warn("missing ollama.exe and failed to get pwd", "error", err)
|
||||
return
|
||||
}
|
||||
distAppPath := filepath.Join(pwd, "dist", "windows-"+runtime.GOARCH)
|
||||
distOllamaPath := filepath.Join(distAppPath, "ollama.exe")
|
||||
if _, err := os.Stat(distOllamaPath); err == nil {
|
||||
slog.Info("detected developer mode")
|
||||
appPath = distAppPath
|
||||
ollamaPath = distOllamaPath
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func maybeMoveAndRestart() appMove {
|
||||
return 0
|
||||
}
|
||||
|
||||
// handleExistingInstance checks for existing instances and optionally focuses them
|
||||
func handleExistingInstance(startHidden bool) {
|
||||
if wintray.CheckAndFocusExistingInstance(!startHidden) {
|
||||
slog.Info("existing instance found, exiting")
|
||||
os.Exit(0)
|
||||
}
|
||||
}
|
||||
|
||||
func installSymlink() {}
|
||||
|
||||
type appCallbacks struct {
|
||||
t wintray.TrayCallbacks
|
||||
shutdown func()
|
||||
}
|
||||
|
||||
var app = &appCallbacks{}
|
||||
|
||||
func (ac *appCallbacks) UIRun(path string) {
|
||||
wv.Run(path)
|
||||
}
|
||||
|
||||
func (*appCallbacks) UIShow() {
|
||||
if wv.webview != nil {
|
||||
showWindow(wv.webview.Window())
|
||||
} else {
|
||||
wv.Run("/")
|
||||
}
|
||||
}
|
||||
|
||||
func (*appCallbacks) UITerminate() {
|
||||
wv.Terminate()
|
||||
}
|
||||
|
||||
func (*appCallbacks) UIRunning() bool {
|
||||
return wv.IsRunning()
|
||||
}
|
||||
|
||||
func (app *appCallbacks) Quit() {
|
||||
app.t.Quit()
|
||||
wv.Terminate()
|
||||
}
|
||||
|
||||
// TODO - reconcile with above for consistency between mac/windows
|
||||
func quit() {
|
||||
wv.Terminate()
|
||||
}
|
||||
|
||||
func (app *appCallbacks) DoUpdate() {
|
||||
// Safeguard in case we have requests in flight that need to drain...
|
||||
slog.Info("Waiting for server to shutdown")
|
||||
|
||||
app.shutdown()
|
||||
|
||||
if err := updater.DoUpgrade(true); err != nil {
|
||||
slog.Warn(fmt.Sprintf("upgrade attempt failed: %s", err))
|
||||
}
|
||||
}
|
||||
|
||||
// HandleURLScheme implements the URLSchemeHandler interface
|
||||
func (app *appCallbacks) HandleURLScheme(urlScheme string) {
|
||||
handleURLSchemeRequest(urlScheme)
|
||||
}
|
||||
|
||||
// handleURLSchemeRequest processes URL scheme requests from other instances
|
||||
func handleURLSchemeRequest(urlScheme string) {
|
||||
isConnect, err := parseURLScheme(urlScheme)
|
||||
if err != nil {
|
||||
slog.Error("failed to parse URL scheme request", "url", urlScheme, "error", err)
|
||||
return
|
||||
}
|
||||
|
||||
if isConnect {
|
||||
handleConnectURLScheme()
|
||||
} else {
|
||||
if wv.webview != nil {
|
||||
showWindow(wv.webview.Window())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func UpdateAvailable(ver string) error {
|
||||
return app.t.UpdateAvailable(ver)
|
||||
}
|
||||
|
||||
func osRun(shutdown func(), hasCompletedFirstRun, startHidden bool) {
|
||||
var err error
|
||||
app.shutdown = shutdown
|
||||
app.t, err = wintray.NewTray(app)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to start: %s", err)
|
||||
}
|
||||
|
||||
signals := make(chan os.Signal, 1)
|
||||
signal.Notify(signals, syscall.SIGINT, syscall.SIGTERM)
|
||||
|
||||
// TODO - can this be generalized?
|
||||
go func() {
|
||||
<-signals
|
||||
slog.Debug("shutting down due to signal")
|
||||
app.t.Quit()
|
||||
wv.Terminate()
|
||||
}()
|
||||
|
||||
// On windows, we run the final tasks in the main thread
|
||||
// before starting the tray event loop. These final tasks
|
||||
// may trigger the UI, and must do that from the main thread.
|
||||
if !startHidden {
|
||||
// Determine if the process was started from a shortcut
|
||||
// ~\AppData\Roaming\Microsoft\Windows\Start Menu\Programs\Startup\Ollama
|
||||
const STARTF_TITLEISLINKNAME = 0x00000800
|
||||
var info windows.StartupInfo
|
||||
if err := windows.GetStartupInfo(&info); err != nil {
|
||||
slog.Debug("unable to retrieve startup info", "error", err)
|
||||
} else if info.Flags&STARTF_TITLEISLINKNAME == STARTF_TITLEISLINKNAME {
|
||||
linkPath := windows.UTF16PtrToString(info.Title)
|
||||
if strings.Contains(linkPath, "Startup") {
|
||||
startHidden = true
|
||||
}
|
||||
}
|
||||
}
|
||||
if startHidden {
|
||||
startHiddenTasks()
|
||||
} else {
|
||||
ptr := wv.Run("/")
|
||||
|
||||
// Set the window icon using the tray icon
|
||||
if ptr != nil {
|
||||
iconHandle := app.t.GetIconHandle()
|
||||
if iconHandle != 0 {
|
||||
hwnd := uintptr(ptr)
|
||||
const ICON_SMALL = 0
|
||||
const ICON_BIG = 1
|
||||
const WM_SETICON = 0x0080
|
||||
|
||||
pSendMessage.Call(hwnd, uintptr(WM_SETICON), uintptr(ICON_SMALL), uintptr(iconHandle))
|
||||
pSendMessage.Call(hwnd, uintptr(WM_SETICON), uintptr(ICON_BIG), uintptr(iconHandle))
|
||||
}
|
||||
}
|
||||
|
||||
centerWindow(ptr)
|
||||
}
|
||||
|
||||
if !hasCompletedFirstRun {
|
||||
// Only create the login shortcut on first start
|
||||
// so we can respect users deletion of the link
|
||||
err = createLoginShortcut()
|
||||
if err != nil {
|
||||
slog.Warn("unable to create login shortcut", "error", err)
|
||||
}
|
||||
}
|
||||
|
||||
app.t.TrayRun() // This will block the main thread
|
||||
}
|
||||
|
||||
func createLoginShortcut() error {
|
||||
// The installer lays down a shortcut for us so we can copy it without
|
||||
// having to resort to calling COM APIs to establish the shortcut
|
||||
shortcutOrigin := filepath.Join(appPath, "lib", "Ollama.lnk")
|
||||
|
||||
_, err := os.Stat(startupShortcut)
|
||||
if err != nil {
|
||||
if errors.Is(err, os.ErrNotExist) {
|
||||
in, err := os.Open(shortcutOrigin)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to open shortcut %s : %w", shortcutOrigin, err)
|
||||
}
|
||||
defer in.Close()
|
||||
out, err := os.Create(startupShortcut)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to open startup link %s : %w", startupShortcut, err)
|
||||
}
|
||||
defer out.Close()
|
||||
_, err = io.Copy(out, in)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to copy shortcut %s : %w", startupShortcut, err)
|
||||
}
|
||||
err = out.Sync()
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to sync shortcut %s : %w", startupShortcut, err)
|
||||
}
|
||||
slog.Info("Created Startup shortcut", "shortcut", startupShortcut)
|
||||
} else {
|
||||
slog.Warn("unexpected error looking up Startup shortcut", "error", err)
|
||||
}
|
||||
} else {
|
||||
slog.Debug("Startup link already exists", "shortcut", startupShortcut)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Send a request to the main app thread to load a UI page
|
||||
func sendUIRequestMessage(path string) {
|
||||
wintray.SendUIRequestMessage(path)
|
||||
}
|
||||
|
||||
func LaunchNewApp() {
|
||||
}
|
||||
|
||||
func logStartup() {
|
||||
slog.Info("starting Ollama", "app", appPath, "version", version.Version, "OS", updater.UserAgentOS)
|
||||
}
|
||||
|
||||
const (
|
||||
SW_HIDE = 0 // Hides the window
|
||||
SW_SHOW = 5 // Shows window in its current size/position
|
||||
SW_SHOWNA = 8 // Shows without activating
|
||||
SW_MINIMIZE = 6 // Minimizes the window
|
||||
SW_RESTORE = 9 // Restores to previous size/position
|
||||
SW_SHOWDEFAULT = 10 // Sets show state based on program state
|
||||
SM_CXSCREEN = 0
|
||||
SM_CYSCREEN = 1
|
||||
HWND_TOP = 0
|
||||
SWP_NOSIZE = 0x0001
|
||||
SWP_NOMOVE = 0x0002
|
||||
SWP_NOZORDER = 0x0004
|
||||
SWP_SHOWWINDOW = 0x0040
|
||||
|
||||
// Menu constants
|
||||
MF_STRING = 0x00000000
|
||||
MF_SEPARATOR = 0x00000800
|
||||
MF_GRAYED = 0x00000001
|
||||
TPM_RETURNCMD = 0x0100
|
||||
)
|
||||
|
||||
// POINT structure for cursor position
|
||||
type POINT struct {
|
||||
X int32
|
||||
Y int32
|
||||
}
|
||||
|
||||
// Rect structure for GetWindowRect
|
||||
type Rect struct {
|
||||
Left int32
|
||||
Top int32
|
||||
Right int32
|
||||
Bottom int32
|
||||
}
|
||||
|
||||
func centerWindow(ptr unsafe.Pointer) {
|
||||
hwnd := uintptr(ptr)
|
||||
if hwnd == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
var rect Rect
|
||||
pGetWindowRect.Call(hwnd, uintptr(unsafe.Pointer(&rect)))
|
||||
|
||||
screenWidth, _, _ := pGetSystemMetrics.Call(uintptr(SM_CXSCREEN))
|
||||
screenHeight, _, _ := pGetSystemMetrics.Call(uintptr(SM_CYSCREEN))
|
||||
|
||||
windowWidth := rect.Right - rect.Left
|
||||
windowHeight := rect.Bottom - rect.Top
|
||||
|
||||
x := (int32(screenWidth) - windowWidth) / 2
|
||||
y := (int32(screenHeight) - windowHeight) / 2
|
||||
|
||||
// Ensure the window is not positioned off-screen
|
||||
if x < 0 {
|
||||
x = 0
|
||||
}
|
||||
if y < 0 {
|
||||
y = 0
|
||||
}
|
||||
|
||||
pSetWindowPos.Call(
|
||||
hwnd,
|
||||
uintptr(HWND_TOP),
|
||||
uintptr(x),
|
||||
uintptr(y),
|
||||
uintptr(windowWidth), // Keep original width
|
||||
uintptr(windowHeight), // Keep original height
|
||||
uintptr(SWP_SHOWWINDOW),
|
||||
)
|
||||
}
|
||||
|
||||
func showWindow(ptr unsafe.Pointer) {
|
||||
hwnd := uintptr(ptr)
|
||||
if hwnd != 0 {
|
||||
iconHandle := app.t.GetIconHandle()
|
||||
if iconHandle != 0 {
|
||||
const ICON_SMALL = 0
|
||||
const ICON_BIG = 1
|
||||
const WM_SETICON = 0x0080
|
||||
|
||||
pSendMessage.Call(hwnd, uintptr(WM_SETICON), uintptr(ICON_SMALL), uintptr(iconHandle))
|
||||
pSendMessage.Call(hwnd, uintptr(WM_SETICON), uintptr(ICON_BIG), uintptr(iconHandle))
|
||||
}
|
||||
|
||||
// Check if window is minimized
|
||||
isMinimized, _, _ := pIsIconic.Call(hwnd)
|
||||
if isMinimized != 0 {
|
||||
// Restore the window if it's minimized
|
||||
pShowWindow.Call(hwnd, uintptr(SW_RESTORE))
|
||||
}
|
||||
|
||||
// Show the window
|
||||
pShowWindow.Call(hwnd, uintptr(SW_SHOW))
|
||||
|
||||
// Bring window to top
|
||||
pBringWindowToTop.Call(hwnd)
|
||||
|
||||
// Force window to foreground
|
||||
pSetForegroundWindow.Call(hwnd)
|
||||
|
||||
// Make it the active window
|
||||
pSetActiveWindow.Call(hwnd)
|
||||
|
||||
// Ensure window is positioned on top
|
||||
pSetWindowPos.Call(
|
||||
hwnd,
|
||||
uintptr(HWND_TOP),
|
||||
0, 0, 0, 0,
|
||||
uintptr(SWP_NOSIZE|SWP_NOMOVE|SWP_SHOWWINDOW),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// HideWindow hides the application window
|
||||
func hideWindow(ptr unsafe.Pointer) {
|
||||
hwnd := uintptr(ptr)
|
||||
if hwnd != 0 {
|
||||
pShowWindow.Call(
|
||||
hwnd,
|
||||
uintptr(SW_HIDE),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
func runInBackground() {
|
||||
exe, err := os.Executable()
|
||||
if err != nil {
|
||||
slog.Error("failed to get executable path", "error", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
cmd := exec.Command(exe, "hidden")
|
||||
if cmd != nil {
|
||||
err = cmd.Run()
|
||||
if err != nil {
|
||||
slog.Error("failed to run Ollama", "exe", exe, "error", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
} else {
|
||||
slog.Error("failed to start Ollama", "exe", exe)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
func drag(ptr unsafe.Pointer) {}
|
||||
|
||||
func doubleClick(ptr unsafe.Pointer) {}
|
||||
|
||||
// checkAndHandleExistingInstance checks if another instance is running and sends the URL to it
|
||||
func checkAndHandleExistingInstance(urlSchemeRequest string) bool {
|
||||
if urlSchemeRequest == "" {
|
||||
return false
|
||||
}
|
||||
|
||||
// Try to send URL to existing instance using wintray messaging
|
||||
if wintray.CheckAndSendToExistingInstance(urlSchemeRequest) {
|
||||
os.Exit(0)
|
||||
return true
|
||||
}
|
||||
|
||||
// No existing instance, we'll handle it ourselves
|
||||
return false
|
||||
}
|
||||
@@ -1,27 +0,0 @@
|
||||
#ifndef MENU_H
|
||||
#define MENU_H
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
typedef struct
|
||||
{
|
||||
char *label;
|
||||
int enabled;
|
||||
int separator;
|
||||
} menuItem;
|
||||
|
||||
// TODO (jmorganca): these need to be forward declared in the webview.h file
|
||||
// for now but ideally they should be in this header file on windows too
|
||||
#ifndef WIN32
|
||||
int menu_get_item_count();
|
||||
void *menu_get_items();
|
||||
void menu_handle_selection(char *item);
|
||||
#endif
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif
|
||||
@@ -1,528 +0,0 @@
|
||||
//go:build windows || darwin
|
||||
|
||||
package main
|
||||
|
||||
// #include "menu.h"
|
||||
import "C"
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
"unsafe"
|
||||
|
||||
"github.com/ollama/ollama/app/dialog"
|
||||
"github.com/ollama/ollama/app/store"
|
||||
"github.com/ollama/ollama/app/webview"
|
||||
)
|
||||
|
||||
type Webview struct {
|
||||
port int
|
||||
token string
|
||||
webview webview.WebView
|
||||
mutex sync.Mutex
|
||||
|
||||
Store *store.Store
|
||||
}
|
||||
|
||||
// Run initializes the webview and starts its event loop.
|
||||
// Note: this must be called from the primary app thread
|
||||
// This returns the OS native window handle to the caller
|
||||
func (w *Webview) Run(path string) unsafe.Pointer {
|
||||
var url string
|
||||
if devMode {
|
||||
// In development mode, use the local dev server
|
||||
url = fmt.Sprintf("http://localhost:5173%s", path)
|
||||
} else {
|
||||
url = fmt.Sprintf("http://127.0.0.1:%d%s", w.port, path)
|
||||
}
|
||||
w.mutex.Lock()
|
||||
defer w.mutex.Unlock()
|
||||
|
||||
if w.webview == nil {
|
||||
// Note: turning on debug on macos throws errors but is marginally functional for debugging
|
||||
// TODO (jmorganca): we should pre-create the window and then provide it here to
|
||||
// webview so we can hide it from the start and make other modifications
|
||||
wv := webview.New(debug)
|
||||
// start the window hidden
|
||||
hideWindow(wv.Window())
|
||||
wv.SetTitle("Ollama")
|
||||
|
||||
// TODO (jmorganca): this isn't working yet since it needs to be set
|
||||
// on the first page load, ideally in an interstitial page like `/token`
|
||||
// that exists only to set the cookie and redirect to /
|
||||
// wv.Init(fmt.Sprintf(`document.cookie = "token=%s; path=/"`, w.token))
|
||||
init := `
|
||||
// Disable reload
|
||||
document.addEventListener('keydown', function(e) {
|
||||
if ((e.ctrlKey || e.metaKey) && e.key === 'r') {
|
||||
e.preventDefault();
|
||||
return false;
|
||||
}
|
||||
});
|
||||
|
||||
// Prevent back/forward navigation
|
||||
window.addEventListener('popstate', function(e) {
|
||||
e.preventDefault();
|
||||
history.pushState(null, '', window.location.pathname);
|
||||
return false;
|
||||
});
|
||||
|
||||
// Clear history on load
|
||||
window.addEventListener('load', function() {
|
||||
history.pushState(null, '', window.location.pathname);
|
||||
window.history.replaceState(null, '', window.location.pathname);
|
||||
});
|
||||
|
||||
// Set token cookie
|
||||
document.cookie = "token=` + w.token + `; path=/";
|
||||
`
|
||||
// Windows-specific scrollbar styling
|
||||
if runtime.GOOS == "windows" {
|
||||
init += `
|
||||
// Fix scrollbar styling for Edge WebView2 on Windows only
|
||||
function updateScrollbarStyles() {
|
||||
const isDark = window.matchMedia('(prefers-color-scheme: dark)').matches;
|
||||
const existingStyle = document.getElementById('scrollbar-style');
|
||||
if (existingStyle) existingStyle.remove();
|
||||
|
||||
const style = document.createElement('style');
|
||||
style.id = 'scrollbar-style';
|
||||
|
||||
if (isDark) {
|
||||
style.textContent = ` + "`" + `
|
||||
::-webkit-scrollbar { width: 6px !important; height: 6px !important; }
|
||||
::-webkit-scrollbar-track { background: #1a1a1a !important; }
|
||||
::-webkit-scrollbar-thumb { background: #404040 !important; border-radius: 6px !important; }
|
||||
::-webkit-scrollbar-thumb:hover { background: #505050 !important; }
|
||||
::-webkit-scrollbar-corner { background: #1a1a1a !important; }
|
||||
::-webkit-scrollbar-button {
|
||||
background: transparent !important;
|
||||
border: none !important;
|
||||
width: 0px !important;
|
||||
height: 0px !important;
|
||||
margin: 0 !important;
|
||||
padding: 0 !important;
|
||||
}
|
||||
::-webkit-scrollbar-button:vertical:start:decrement {
|
||||
background: transparent !important;
|
||||
height: 0px !important;
|
||||
}
|
||||
::-webkit-scrollbar-button:vertical:end:increment {
|
||||
background: transparent !important;
|
||||
height: 0px !important;
|
||||
}
|
||||
::-webkit-scrollbar-button:horizontal:start:decrement {
|
||||
background: transparent !important;
|
||||
width: 0px !important;
|
||||
}
|
||||
::-webkit-scrollbar-button:horizontal:end:increment {
|
||||
background: transparent !important;
|
||||
width: 0px !important;
|
||||
}
|
||||
` + "`" + `;
|
||||
} else {
|
||||
style.textContent = ` + "`" + `
|
||||
::-webkit-scrollbar { width: 6px !important; height: 6px !important; }
|
||||
::-webkit-scrollbar-track { background: #f0f0f0 !important; }
|
||||
::-webkit-scrollbar-thumb { background: #c0c0c0 !important; border-radius: 6px !important; }
|
||||
::-webkit-scrollbar-thumb:hover { background: #a0a0a0 !important; }
|
||||
::-webkit-scrollbar-corner { background: #f0f0f0 !important; }
|
||||
::-webkit-scrollbar-button {
|
||||
background: transparent !important;
|
||||
border: none !important;
|
||||
width: 0px !important;
|
||||
height: 0px !important;
|
||||
margin: 0 !important;
|
||||
padding: 0 !important;
|
||||
}
|
||||
::-webkit-scrollbar-button:vertical:start:decrement {
|
||||
background: transparent !important;
|
||||
height: 0px !important;
|
||||
}
|
||||
::-webkit-scrollbar-button:vertical:end:increment {
|
||||
background: transparent !important;
|
||||
height: 0px !important;
|
||||
}
|
||||
::-webkit-scrollbar-button:horizontal:start:decrement {
|
||||
background: transparent !important;
|
||||
width: 0px !important;
|
||||
}
|
||||
::-webkit-scrollbar-button:horizontal:end:increment {
|
||||
background: transparent !important;
|
||||
width: 0px !important;
|
||||
}
|
||||
` + "`" + `;
|
||||
}
|
||||
document.head.appendChild(style);
|
||||
}
|
||||
|
||||
window.addEventListener('load', updateScrollbarStyles);
|
||||
window.matchMedia('(prefers-color-scheme: dark)').addEventListener('change', updateScrollbarStyles);
|
||||
`
|
||||
}
|
||||
// on windows make ctrl+n open new chat
|
||||
// TODO (jmorganca): later we should use proper accelerators
|
||||
// once we introduce a native menu for the window
|
||||
// this is only used on windows since macOS uses the proper accelerators
|
||||
if runtime.GOOS == "windows" {
|
||||
init += `
|
||||
document.addEventListener('keydown', function(e) {
|
||||
if ((e.ctrlKey || e.metaKey) && e.key === 'n') {
|
||||
e.preventDefault();
|
||||
// Use the existing navigation method
|
||||
history.pushState({}, '', '/c/new');
|
||||
window.dispatchEvent(new PopStateEvent('popstate'));
|
||||
return false;
|
||||
}
|
||||
});
|
||||
`
|
||||
}
|
||||
|
||||
init += `
|
||||
window.OLLAMA_WEBSEARCH = true;
|
||||
`
|
||||
|
||||
wv.Init(init)
|
||||
|
||||
// Add keyboard handler for zoom
|
||||
wv.Init(`
|
||||
window.addEventListener('keydown', function(e) {
|
||||
// CMD/Ctrl + Plus/Equals (zoom in)
|
||||
if ((e.metaKey || e.ctrlKey) && (e.key === '+' || e.key === '=')) {
|
||||
e.preventDefault();
|
||||
window.zoomIn && window.zoomIn();
|
||||
return false;
|
||||
}
|
||||
|
||||
// CMD/Ctrl + Minus (zoom out)
|
||||
if ((e.metaKey || e.ctrlKey) && e.key === '-') {
|
||||
e.preventDefault();
|
||||
window.zoomOut && window.zoomOut();
|
||||
return false;
|
||||
}
|
||||
|
||||
// CMD/Ctrl + 0 (reset zoom)
|
||||
if ((e.metaKey || e.ctrlKey) && e.key === '0') {
|
||||
e.preventDefault();
|
||||
window.zoomReset && window.zoomReset();
|
||||
return false;
|
||||
}
|
||||
}, true);
|
||||
`)
|
||||
|
||||
wv.Bind("zoomIn", func() {
|
||||
current := wv.GetZoom()
|
||||
wv.SetZoom(current + 0.1)
|
||||
})
|
||||
|
||||
wv.Bind("zoomOut", func() {
|
||||
current := wv.GetZoom()
|
||||
wv.SetZoom(current - 0.1)
|
||||
})
|
||||
|
||||
wv.Bind("zoomReset", func() {
|
||||
wv.SetZoom(1.0)
|
||||
})
|
||||
|
||||
wv.Bind("ready", func() {
|
||||
showWindow(wv.Window())
|
||||
})
|
||||
|
||||
wv.Bind("close", func() {
|
||||
hideWindow(wv.Window())
|
||||
})
|
||||
|
||||
// Webviews do not allow access to the file system by default, so we need to
|
||||
// bind file system operations here
|
||||
wv.Bind("selectModelsDirectory", func() {
|
||||
go func() {
|
||||
// Helper function to call the JavaScript callback with data or null
|
||||
callCallback := func(data interface{}) {
|
||||
dataJSON, _ := json.Marshal(data)
|
||||
wv.Dispatch(func() {
|
||||
wv.Eval(fmt.Sprintf("window.__selectModelsDirectoryCallback && window.__selectModelsDirectoryCallback(%s)", dataJSON))
|
||||
})
|
||||
}
|
||||
|
||||
directory, err := dialog.Directory().Title("Select Model Directory").ShowHidden(true).Browse()
|
||||
if err != nil {
|
||||
slog.Debug("Directory selection cancelled or failed", "error", err)
|
||||
callCallback(nil)
|
||||
return
|
||||
}
|
||||
slog.Debug("Directory selected", "path", directory)
|
||||
callCallback(directory)
|
||||
}()
|
||||
})
|
||||
|
||||
// Bind selectFiles function for selecting multiple files at once
|
||||
wv.Bind("selectFiles", func() {
|
||||
go func() {
|
||||
// Helper function to call the JavaScript callback with data or null
|
||||
callCallback := func(data interface{}) {
|
||||
dataJSON, _ := json.Marshal(data)
|
||||
wv.Dispatch(func() {
|
||||
wv.Eval(fmt.Sprintf("window.__selectFilesCallback && window.__selectFilesCallback(%s)", dataJSON))
|
||||
})
|
||||
}
|
||||
|
||||
// Define allowed extensions for native dialog filtering
|
||||
textExts := []string{
|
||||
"pdf", "docx", "txt", "md", "csv", "json", "xml", "html", "htm",
|
||||
"js", "jsx", "ts", "tsx", "py", "java", "cpp", "c", "cc", "h", "cs", "php", "rb",
|
||||
"go", "rs", "swift", "kt", "scala", "sh", "bat", "yaml", "yml", "toml", "ini",
|
||||
"cfg", "conf", "log", "rtf",
|
||||
}
|
||||
imageExts := []string{"png", "jpg", "jpeg", "webp"}
|
||||
allowedExts := append(textExts, imageExts...)
|
||||
|
||||
// Use native multiple file selection with extension filtering
|
||||
filenames, err := dialog.File().
|
||||
Filter("Supported Files", allowedExts...).
|
||||
Title("Select Files").
|
||||
LoadMultiple()
|
||||
if err != nil {
|
||||
slog.Debug("Multiple file selection cancelled or failed", "error", err)
|
||||
callCallback(nil)
|
||||
return
|
||||
}
|
||||
|
||||
if len(filenames) == 0 {
|
||||
callCallback(nil)
|
||||
return
|
||||
}
|
||||
|
||||
var files []map[string]string
|
||||
maxFileSize := int64(10 * 1024 * 1024) // 10MB
|
||||
|
||||
for _, filename := range filenames {
|
||||
// Check file extension (double-check after native dialog filtering)
|
||||
ext := strings.ToLower(strings.TrimPrefix(filepath.Ext(filename), "."))
|
||||
validExt := false
|
||||
for _, allowedExt := range allowedExts {
|
||||
if ext == allowedExt {
|
||||
validExt = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !validExt {
|
||||
slog.Warn("file extension not allowed, skipping", "filename", filepath.Base(filename), "extension", ext)
|
||||
continue
|
||||
}
|
||||
|
||||
// Check file size before reading (pre-filter large files)
|
||||
fileStat, err := os.Stat(filename)
|
||||
if err != nil {
|
||||
slog.Error("failed to get file info", "error", err, "filename", filename)
|
||||
continue
|
||||
}
|
||||
|
||||
if fileStat.Size() > maxFileSize {
|
||||
slog.Warn("file too large, skipping", "filename", filepath.Base(filename), "size", fileStat.Size())
|
||||
continue
|
||||
}
|
||||
|
||||
fileBytes, err := os.ReadFile(filename)
|
||||
if err != nil {
|
||||
slog.Error("failed to read file", "error", err, "filename", filename)
|
||||
continue
|
||||
}
|
||||
|
||||
mimeType := http.DetectContentType(fileBytes)
|
||||
dataURL := fmt.Sprintf("data:%s;base64,%s", mimeType, base64.StdEncoding.EncodeToString(fileBytes))
|
||||
|
||||
fileResult := map[string]string{
|
||||
"filename": filepath.Base(filename),
|
||||
"path": filename,
|
||||
"dataURL": dataURL,
|
||||
}
|
||||
|
||||
files = append(files, fileResult)
|
||||
}
|
||||
|
||||
if len(files) == 0 {
|
||||
callCallback(nil)
|
||||
} else {
|
||||
callCallback(files)
|
||||
}
|
||||
}()
|
||||
})
|
||||
|
||||
wv.Bind("drag", func() {
|
||||
wv.Dispatch(func() {
|
||||
drag(wv.Window())
|
||||
})
|
||||
})
|
||||
|
||||
wv.Bind("doubleClick", func() {
|
||||
wv.Dispatch(func() {
|
||||
doubleClick(wv.Window())
|
||||
})
|
||||
})
|
||||
|
||||
// Add binding for working directory selection
|
||||
wv.Bind("selectWorkingDirectory", func() {
|
||||
go func() {
|
||||
// Helper function to call the JavaScript callback with data or null
|
||||
callCallback := func(data interface{}) {
|
||||
dataJSON, _ := json.Marshal(data)
|
||||
wv.Dispatch(func() {
|
||||
wv.Eval(fmt.Sprintf("window.__selectWorkingDirectoryCallback && window.__selectWorkingDirectoryCallback(%s)", dataJSON))
|
||||
})
|
||||
}
|
||||
|
||||
directory, err := dialog.Directory().Title("Select Working Directory").ShowHidden(true).Browse()
|
||||
if err != nil {
|
||||
slog.Debug("Directory selection cancelled or failed", "error", err)
|
||||
callCallback(nil)
|
||||
return
|
||||
}
|
||||
slog.Debug("Directory selected", "path", directory)
|
||||
callCallback(directory)
|
||||
}()
|
||||
})
|
||||
|
||||
wv.Bind("setContextMenuItems", func(items []map[string]interface{}) error {
|
||||
menuMutex.Lock()
|
||||
defer menuMutex.Unlock()
|
||||
|
||||
if len(menuItems) > 0 {
|
||||
pinner.Unpin()
|
||||
}
|
||||
|
||||
menuItems = nil
|
||||
for _, item := range items {
|
||||
menuItem := C.menuItem{
|
||||
label: C.CString(item["label"].(string)),
|
||||
enabled: 0,
|
||||
separator: 0,
|
||||
}
|
||||
|
||||
if item["enabled"] != nil {
|
||||
menuItem.enabled = 1
|
||||
}
|
||||
|
||||
if item["separator"] != nil {
|
||||
menuItem.separator = 1
|
||||
}
|
||||
menuItems = append(menuItems, menuItem)
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
// Debounce resize events
|
||||
var resizeTimer *time.Timer
|
||||
var resizeMutex sync.Mutex
|
||||
|
||||
wv.Bind("resize", func(width, height int) {
|
||||
if w.Store != nil {
|
||||
resizeMutex.Lock()
|
||||
if resizeTimer != nil {
|
||||
resizeTimer.Stop()
|
||||
}
|
||||
resizeTimer = time.AfterFunc(100*time.Millisecond, func() {
|
||||
err := w.Store.SetWindowSize(width, height)
|
||||
if err != nil {
|
||||
slog.Error("failed to set window size", "error", err)
|
||||
}
|
||||
})
|
||||
resizeMutex.Unlock()
|
||||
}
|
||||
})
|
||||
|
||||
// On Darwin, we can't have 2 threads both running global event loops
|
||||
// but on Windows, the event loops are tied to the window, so we're
|
||||
// able to run in both the tray and webview
|
||||
if runtime.GOOS != "darwin" {
|
||||
slog.Debug("starting webview event loop")
|
||||
go func() {
|
||||
wv.Run()
|
||||
slog.Debug("webview event loop exited")
|
||||
}()
|
||||
}
|
||||
|
||||
if w.Store != nil {
|
||||
width, height, err := w.Store.WindowSize()
|
||||
if err != nil {
|
||||
slog.Error("failed to get window size", "error", err)
|
||||
}
|
||||
if width > 0 && height > 0 {
|
||||
wv.SetSize(width, height, webview.HintNone)
|
||||
} else {
|
||||
wv.SetSize(800, 600, webview.HintNone)
|
||||
}
|
||||
}
|
||||
wv.SetSize(800, 600, webview.HintMin)
|
||||
|
||||
w.webview = wv
|
||||
w.webview.Navigate(url)
|
||||
} else {
|
||||
w.webview.Eval(fmt.Sprintf(`
|
||||
history.pushState({}, '', '%s');
|
||||
`, path))
|
||||
showWindow(w.webview.Window())
|
||||
}
|
||||
|
||||
return w.webview.Window()
|
||||
}
|
||||
|
||||
func (w *Webview) Terminate() {
|
||||
w.mutex.Lock()
|
||||
if w.webview == nil {
|
||||
w.mutex.Unlock()
|
||||
return
|
||||
}
|
||||
|
||||
wv := w.webview
|
||||
w.webview = nil
|
||||
w.mutex.Unlock()
|
||||
wv.Terminate()
|
||||
wv.Destroy()
|
||||
}
|
||||
|
||||
func (w *Webview) IsRunning() bool {
|
||||
w.mutex.Lock()
|
||||
defer w.mutex.Unlock()
|
||||
return w.webview != nil
|
||||
}
|
||||
|
||||
var (
|
||||
menuItems []C.menuItem
|
||||
menuMutex sync.RWMutex
|
||||
pinner runtime.Pinner
|
||||
)
|
||||
|
||||
//export menu_get_item_count
|
||||
func menu_get_item_count() C.int {
|
||||
menuMutex.RLock()
|
||||
defer menuMutex.RUnlock()
|
||||
return C.int(len(menuItems))
|
||||
}
|
||||
|
||||
//export menu_get_items
|
||||
func menu_get_items() unsafe.Pointer {
|
||||
menuMutex.RLock()
|
||||
defer menuMutex.RUnlock()
|
||||
|
||||
if len(menuItems) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Return pointer to the slice data
|
||||
pinner.Pin(&menuItems[0])
|
||||
return unsafe.Pointer(&menuItems[0])
|
||||
}
|
||||
|
||||
//export menu_handle_selection
|
||||
func menu_handle_selection(item *C.char) {
|
||||
wv.webview.Eval(fmt.Sprintf("window.handleContextMenuResult('%s')", C.GoString(item)))
|
||||
}
|
||||
@@ -1,40 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>CFBundleDevelopmentRegion</key>
|
||||
<string>English</string>
|
||||
<key>CFBundleExecutable</key>
|
||||
<string>Squirrel</string>
|
||||
<key>CFBundleIconFile</key>
|
||||
<string/>
|
||||
<key>CFBundleIdentifier</key>
|
||||
<string>com.github.Squirrel</string>
|
||||
<key>CFBundleInfoDictionaryVersion</key>
|
||||
<string>6.0</string>
|
||||
<key>CFBundleName</key>
|
||||
<string>Squirrel</string>
|
||||
<key>CFBundlePackageType</key>
|
||||
<string>FMWK</string>
|
||||
<key>CFBundleShortVersionString</key>
|
||||
<string>1.0</string>
|
||||
<key>CFBundleSignature</key>
|
||||
<string>????</string>
|
||||
<key>CFBundleVersion</key>
|
||||
<string>1</string>
|
||||
<key>DTCompiler</key>
|
||||
<string>com.apple.compilers.llvm.clang.1_0</string>
|
||||
<key>DTSDKBuild</key>
|
||||
<string>22E245</string>
|
||||
<key>DTSDKName</key>
|
||||
<string>macosx13.3</string>
|
||||
<key>DTXcode</key>
|
||||
<string>1431</string>
|
||||
<key>DTXcodeBuild</key>
|
||||
<string>14E300c</string>
|
||||
<key>NSHumanReadableCopyright</key>
|
||||
<string>Copyright © 2013 GitHub. All rights reserved.</string>
|
||||
<key>NSPrincipalClass</key>
|
||||
<string/>
|
||||
</dict>
|
||||
</plist>
|
||||
@@ -1,51 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>CFBundleDisplayName</key>
|
||||
<string>Ollama</string>
|
||||
<key>CFBundleExecutable</key>
|
||||
<string>Ollama</string>
|
||||
<key>CFBundleIconFile</key>
|
||||
<string>icon.icns</string>
|
||||
<key>CFBundleIdentifier</key>
|
||||
<string>com.electron.ollama</string>
|
||||
<key>CFBundleInfoDictionaryVersion</key>
|
||||
<string>6.0</string>
|
||||
<key>CFBundleName</key>
|
||||
<string>Ollama</string>
|
||||
<key>CFBundlePackageType</key>
|
||||
<string>APPL</string>
|
||||
<key>CFBundleShortVersionString</key>
|
||||
<string>0.0.0</string>
|
||||
<key>CFBundleVersion</key>
|
||||
<string>0.0.0</string>
|
||||
<key>DTCompiler</key>
|
||||
<string>com.apple.compilers.llvm.clang.1_0</string>
|
||||
<key>DTSDKBuild</key>
|
||||
<string>22E245</string>
|
||||
<key>DTSDKName</key>
|
||||
<string>macosx14.0</string>
|
||||
<key>DTXcode</key>
|
||||
<string>1431</string>
|
||||
<key>DTXcodeBuild</key>
|
||||
<string>14E300c</string>
|
||||
<key>LSApplicationCategoryType</key>
|
||||
<string>public.app-category.developer-tools</string>
|
||||
<key>LSMinimumSystemVersion</key>
|
||||
<string>14.0</string>
|
||||
<key>LSUIElement</key>
|
||||
<true/>
|
||||
<key>CFBundleURLTypes</key>
|
||||
<array>
|
||||
<dict>
|
||||
<key>CFBundleURLName</key>
|
||||
<string>Ollama URL</string>
|
||||
<key>CFBundleURLSchemes</key>
|
||||
<array>
|
||||
<string>ollama</string>
|
||||
</array>
|
||||
</dict>
|
||||
</array>
|
||||
</dict>
|
||||
</plist>
|
||||
@@ -1,25 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>Label</key>
|
||||
<string>com.ollama.ollama</string>
|
||||
<key>BundleProgram</key>
|
||||
<string>Contents/Frameworks/Squirrel.framework/Versions/A/Squirrel</string>
|
||||
<key>ProgramArguments</key>
|
||||
<array>
|
||||
<string>Contents/Frameworks/Squirrel.framework/Versions/A/Squirrel</string>
|
||||
<string>background</string>
|
||||
</array>
|
||||
<key>RunAtLoad</key>
|
||||
<true/>
|
||||
<key>LimitLoadToSessionType</key>
|
||||
<string>Aqua</string>
|
||||
<key>POSIXSpawnType</key>
|
||||
<string>Interactive</string>
|
||||
<key>LSUIElement</key>
|
||||
<true/>
|
||||
<key>LSBackgroundOnly</key>
|
||||
<false/>
|
||||
</dict>
|
||||
</plist>
|
||||
|
Before Width: | Height: | Size: 374 B |
|
Before Width: | Height: | Size: 661 B |
|
Before Width: | Height: | Size: 363 B |
|
Before Width: | Height: | Size: 745 B |
|
Before Width: | Height: | Size: 381 B |
|
Before Width: | Height: | Size: 648 B |
|
Before Width: | Height: | Size: 412 B |
|
Before Width: | Height: | Size: 771 B |
@@ -1,15 +0,0 @@
|
||||
ISC License
|
||||
|
||||
Copyright (c) 2018, the dialog authors.
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
@@ -1,43 +0,0 @@
|
||||
#include <objc/NSObjCRuntime.h>
|
||||
|
||||
typedef enum {
|
||||
MSG_YESNO,
|
||||
MSG_ERROR,
|
||||
MSG_INFO,
|
||||
} AlertStyle;
|
||||
|
||||
typedef struct {
|
||||
char* msg;
|
||||
char* title;
|
||||
AlertStyle style;
|
||||
} AlertDlgParams;
|
||||
|
||||
#define LOADDLG 0
|
||||
#define SAVEDLG 1
|
||||
#define DIRDLG 2 // browse for directory
|
||||
|
||||
typedef struct {
|
||||
int mode; /* which dialog style to invoke (see earlier defines) */
|
||||
char* buf; /* buffer to store selected file */
|
||||
int nbuf; /* number of bytes allocated at buf */
|
||||
char* title; /* title for dialog box (can be nil) */
|
||||
void** exts; /* list of valid extensions (elements actual type is NSString*) */
|
||||
int numext; /* number of items in exts */
|
||||
int relaxext; /* allow other extensions? */
|
||||
char* startDir; /* directory to start in (can be nil) */
|
||||
char* filename; /* default filename for dialog box (can be nil) */
|
||||
int showHidden; /* show hidden files? */
|
||||
int allowMultiple; /* allow multiple file selection? */
|
||||
} FileDlgParams;
|
||||
|
||||
typedef enum {
|
||||
DLG_OK,
|
||||
DLG_CANCEL,
|
||||
DLG_URLFAIL,
|
||||
} DlgResult;
|
||||
|
||||
DlgResult alertDlg(AlertDlgParams*);
|
||||
DlgResult fileDlg(FileDlgParams*);
|
||||
|
||||
void* NSStr(void* buf, int len);
|
||||
void NSRelease(void* obj);
|
||||
@@ -1,208 +0,0 @@
|
||||
#import <Cocoa/Cocoa.h>
|
||||
#include "dlg.h"
|
||||
#include <string.h>
|
||||
#include <sys/syslimits.h>
|
||||
|
||||
// Import UniformTypeIdentifiers for macOS 11+
|
||||
#if __MAC_OS_X_VERSION_MAX_ALLOWED >= 110000
|
||||
#import <UniformTypeIdentifiers/UniformTypeIdentifiers.h>
|
||||
#endif
|
||||
|
||||
void* NSStr(void* buf, int len) {
|
||||
return (void*)[[NSString alloc] initWithBytes:buf length:len encoding:NSUTF8StringEncoding];
|
||||
}
|
||||
|
||||
void checkActivationPolicy() {
|
||||
NSApplicationActivationPolicy policy = [NSApp activationPolicy];
|
||||
// prohibited NSApp will not show the panel at all.
|
||||
// It probably means that this is not run in a GUI app, that would set the policy on its own,
|
||||
// but in a terminal app - setting it to accessory will allow dialogs to show
|
||||
if (policy == NSApplicationActivationPolicyProhibited) {
|
||||
[NSApp setActivationPolicy:NSApplicationActivationPolicyAccessory];
|
||||
}
|
||||
}
|
||||
|
||||
void NSRelease(void* obj) {
|
||||
[(NSObject*)obj release];
|
||||
}
|
||||
|
||||
@interface AlertDlg : NSObject {
|
||||
AlertDlgParams* params;
|
||||
DlgResult result;
|
||||
}
|
||||
+ (AlertDlg*)init:(AlertDlgParams*)params;
|
||||
- (DlgResult)run;
|
||||
@end
|
||||
|
||||
DlgResult alertDlg(AlertDlgParams* params) {
|
||||
return [[AlertDlg init:params] run];
|
||||
}
|
||||
|
||||
@implementation AlertDlg
|
||||
+ (AlertDlg*)init:(AlertDlgParams*)params {
|
||||
AlertDlg* d = [AlertDlg alloc];
|
||||
d->params = params;
|
||||
return d;
|
||||
}
|
||||
|
||||
- (DlgResult)run {
|
||||
if(![NSThread isMainThread]) {
|
||||
[self performSelectorOnMainThread:@selector(run) withObject:nil waitUntilDone:YES];
|
||||
return self->result;
|
||||
}
|
||||
NSAlert* alert = [[NSAlert alloc] init];
|
||||
if(self->params->title != nil) {
|
||||
[[alert window] setTitle:[[NSString alloc] initWithUTF8String:self->params->title]];
|
||||
}
|
||||
[alert setMessageText:[[NSString alloc] initWithUTF8String:self->params->msg]];
|
||||
switch (self->params->style) {
|
||||
case MSG_YESNO:
|
||||
[alert addButtonWithTitle:@"Yes"];
|
||||
[alert addButtonWithTitle:@"No"];
|
||||
break;
|
||||
case MSG_ERROR:
|
||||
[alert setIcon:[NSImage imageNamed:NSImageNameCaution]];
|
||||
[alert addButtonWithTitle:@"OK"];
|
||||
break;
|
||||
case MSG_INFO:
|
||||
[alert setIcon:[NSImage imageNamed:NSImageNameInfo]];
|
||||
[alert addButtonWithTitle:@"OK"];
|
||||
break;
|
||||
}
|
||||
|
||||
checkActivationPolicy();
|
||||
|
||||
self->result = [alert runModal] == NSAlertFirstButtonReturn ? DLG_OK : DLG_CANCEL;
|
||||
return self->result;
|
||||
}
|
||||
@end
|
||||
|
||||
@interface FileDlg : NSObject {
|
||||
FileDlgParams* params;
|
||||
DlgResult result;
|
||||
}
|
||||
+ (FileDlg*)init:(FileDlgParams*)params;
|
||||
- (DlgResult)run;
|
||||
@end
|
||||
|
||||
DlgResult fileDlg(FileDlgParams* params) {
|
||||
return [[FileDlg init:params] run];
|
||||
}
|
||||
|
||||
@implementation FileDlg
|
||||
+ (FileDlg*)init:(FileDlgParams*)params {
|
||||
FileDlg* d = [FileDlg alloc];
|
||||
d->params = params;
|
||||
return d;
|
||||
}
|
||||
|
||||
- (DlgResult)run {
|
||||
if(![NSThread isMainThread]) {
|
||||
[self performSelectorOnMainThread:@selector(run) withObject:nil waitUntilDone:YES];
|
||||
} else if(self->params->mode == SAVEDLG) {
|
||||
self->result = [self save];
|
||||
} else {
|
||||
self->result = [self load];
|
||||
}
|
||||
return self->result;
|
||||
}
|
||||
|
||||
- (NSInteger)runPanel:(NSSavePanel*)panel {
|
||||
[panel setFloatingPanel:YES];
|
||||
[panel setShowsHiddenFiles:self->params->showHidden ? YES : NO];
|
||||
[panel setCanCreateDirectories:YES];
|
||||
if(self->params->title != nil) {
|
||||
[panel setTitle:[[NSString alloc] initWithUTF8String:self->params->title]];
|
||||
}
|
||||
// Use modern allowedContentTypes API for better file type support (especially video files)
|
||||
if(self->params->numext > 0) {
|
||||
NSMutableArray *utTypes = [NSMutableArray arrayWithCapacity:self->params->numext];
|
||||
NSString** exts = (NSString**)self->params->exts;
|
||||
for(int i = 0; i < self->params->numext; i++) {
|
||||
UTType *type = [UTType typeWithFilenameExtension:exts[i]];
|
||||
if(type) {
|
||||
[utTypes addObject:type];
|
||||
}
|
||||
}
|
||||
if([utTypes count] > 0) {
|
||||
[panel setAllowedContentTypes:utTypes];
|
||||
}
|
||||
}
|
||||
if(self->params->relaxext) {
|
||||
[panel setAllowsOtherFileTypes:YES];
|
||||
}
|
||||
if(self->params->startDir) {
|
||||
[panel setDirectoryURL:[NSURL URLWithString:[[NSString alloc] initWithUTF8String:self->params->startDir]]];
|
||||
}
|
||||
if(self->params->filename != nil) {
|
||||
[panel setNameFieldStringValue:[[NSString alloc] initWithUTF8String:self->params->filename]];
|
||||
}
|
||||
|
||||
checkActivationPolicy();
|
||||
|
||||
return [panel runModal];
|
||||
}
|
||||
|
||||
- (DlgResult)save {
|
||||
NSSavePanel* panel = [NSSavePanel savePanel];
|
||||
if(![self runPanel:panel]) {
|
||||
return DLG_CANCEL;
|
||||
} else if(![[panel URL] getFileSystemRepresentation:self->params->buf maxLength:self->params->nbuf]) {
|
||||
return DLG_URLFAIL;
|
||||
}
|
||||
return DLG_OK;
|
||||
}
|
||||
|
||||
- (DlgResult)load {
|
||||
NSOpenPanel* panel = [NSOpenPanel openPanel];
|
||||
if(self->params->mode == DIRDLG) {
|
||||
[panel setCanChooseDirectories:YES];
|
||||
[panel setCanChooseFiles:NO];
|
||||
}
|
||||
|
||||
if(self->params->allowMultiple) {
|
||||
[panel setAllowsMultipleSelection:YES];
|
||||
}
|
||||
|
||||
if(![self runPanel:panel]) {
|
||||
return DLG_CANCEL;
|
||||
}
|
||||
|
||||
NSArray* urls = [panel URLs];
|
||||
if(self->params->allowMultiple && [urls count] >= 1) {
|
||||
// For multiple files, we need to return all paths separated by null bytes
|
||||
char* bufPtr = self->params->buf;
|
||||
int remainingBuf = self->params->nbuf;
|
||||
|
||||
// Calculate total required buffer size first
|
||||
int totalSize = 0;
|
||||
for(NSURL* url in urls) {
|
||||
char tempBuf[PATH_MAX];
|
||||
if(![url getFileSystemRepresentation:tempBuf maxLength:PATH_MAX]) {
|
||||
return DLG_URLFAIL;
|
||||
}
|
||||
totalSize += strlen(tempBuf) + 1; // +1 for null terminator
|
||||
}
|
||||
totalSize += 1; // Final null terminator
|
||||
|
||||
if(totalSize > self->params->nbuf) {
|
||||
// Not enough buffer space
|
||||
return DLG_URLFAIL;
|
||||
}
|
||||
|
||||
// Now actually copy the paths (we know we have space)
|
||||
bufPtr = self->params->buf;
|
||||
for(NSURL* url in urls) {
|
||||
char tempBuf[PATH_MAX];
|
||||
[url getFileSystemRepresentation:tempBuf maxLength:PATH_MAX];
|
||||
int pathLen = strlen(tempBuf);
|
||||
strcpy(bufPtr, tempBuf);
|
||||
bufPtr += pathLen + 1;
|
||||
}
|
||||
*bufPtr = '\0'; // Final null terminator
|
||||
}
|
||||
|
||||
return DLG_OK;
|
||||
}
|
||||
|
||||
@end
|
||||
@@ -1,183 +0,0 @@
|
||||
package cocoa
|
||||
|
||||
// #cgo darwin LDFLAGS: -framework Cocoa -framework UniformTypeIdentifiers
|
||||
// #include <stdlib.h>
|
||||
// #include <sys/syslimits.h>
|
||||
// #include "dlg.h"
|
||||
import "C"
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
type AlertParams struct {
|
||||
p C.AlertDlgParams
|
||||
}
|
||||
|
||||
func mkAlertParams(msg, title string, style C.AlertStyle) *AlertParams {
|
||||
a := AlertParams{C.AlertDlgParams{msg: C.CString(msg), style: style}}
|
||||
if title != "" {
|
||||
a.p.title = C.CString(title)
|
||||
}
|
||||
return &a
|
||||
}
|
||||
|
||||
func (a *AlertParams) run() C.DlgResult {
|
||||
return C.alertDlg(&a.p)
|
||||
}
|
||||
|
||||
func (a *AlertParams) free() {
|
||||
C.free(unsafe.Pointer(a.p.msg))
|
||||
if a.p.title != nil {
|
||||
C.free(unsafe.Pointer(a.p.title))
|
||||
}
|
||||
}
|
||||
|
||||
func nsStr(s string) unsafe.Pointer {
|
||||
return C.NSStr(unsafe.Pointer(&[]byte(s)[0]), C.int(len(s)))
|
||||
}
|
||||
|
||||
func YesNoDlg(msg, title string) bool {
|
||||
a := mkAlertParams(msg, title, C.MSG_YESNO)
|
||||
defer a.free()
|
||||
return a.run() == C.DLG_OK
|
||||
}
|
||||
|
||||
func InfoDlg(msg, title string) {
|
||||
a := mkAlertParams(msg, title, C.MSG_INFO)
|
||||
defer a.free()
|
||||
a.run()
|
||||
}
|
||||
|
||||
func ErrorDlg(msg, title string) {
|
||||
a := mkAlertParams(msg, title, C.MSG_ERROR)
|
||||
defer a.free()
|
||||
a.run()
|
||||
}
|
||||
|
||||
const (
|
||||
BUFSIZE = C.PATH_MAX
|
||||
MULTI_FILE_BUF_SIZE = 32768
|
||||
)
|
||||
|
||||
// MultiFileDlg opens a file dialog that allows multiple file selection
|
||||
func MultiFileDlg(title string, exts []string, relaxExt bool, startDir string, showHidden bool) ([]string, error) {
|
||||
return fileDlgWithOptions(C.LOADDLG, title, exts, relaxExt, startDir, "", showHidden, true)
|
||||
}
|
||||
|
||||
// FileDlg opens a file dialog for single file selection (kept for compatibility)
|
||||
func FileDlg(save bool, title string, exts []string, relaxExt bool, startDir string, filename string, showHidden bool) (string, error) {
|
||||
mode := C.LOADDLG
|
||||
if save {
|
||||
mode = C.SAVEDLG
|
||||
}
|
||||
files, err := fileDlgWithOptions(mode, title, exts, relaxExt, startDir, filename, showHidden, false)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if len(files) == 0 {
|
||||
return "", nil
|
||||
}
|
||||
return files[0], nil
|
||||
}
|
||||
|
||||
func DirDlg(title string, startDir string, showHidden bool) (string, error) {
|
||||
files, err := fileDlgWithOptions(C.DIRDLG, title, nil, false, startDir, "", showHidden, false)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if len(files) == 0 {
|
||||
return "", nil
|
||||
}
|
||||
return files[0], nil
|
||||
}
|
||||
|
||||
// fileDlgWithOptions is the unified file dialog function that handles both single and multiple selection
|
||||
func fileDlgWithOptions(mode int, title string, exts []string, relaxExt bool, startDir, filename string, showHidden, allowMultiple bool) ([]string, error) {
|
||||
// Use larger buffer for multiple files, smaller for single
|
||||
bufSize := BUFSIZE
|
||||
if allowMultiple {
|
||||
bufSize = MULTI_FILE_BUF_SIZE
|
||||
}
|
||||
|
||||
p := C.FileDlgParams{
|
||||
mode: C.int(mode),
|
||||
nbuf: C.int(bufSize),
|
||||
}
|
||||
|
||||
if allowMultiple {
|
||||
p.allowMultiple = C.int(1) // Enable multiple selection //nolint:structcheck
|
||||
}
|
||||
if showHidden {
|
||||
p.showHidden = 1
|
||||
}
|
||||
|
||||
p.buf = (*C.char)(C.malloc(C.size_t(bufSize)))
|
||||
defer C.free(unsafe.Pointer(p.buf))
|
||||
buf := (*(*[MULTI_FILE_BUF_SIZE]byte)(unsafe.Pointer(p.buf)))[:bufSize]
|
||||
|
||||
if title != "" {
|
||||
p.title = C.CString(title)
|
||||
defer C.free(unsafe.Pointer(p.title))
|
||||
}
|
||||
if startDir != "" {
|
||||
p.startDir = C.CString(startDir)
|
||||
defer C.free(unsafe.Pointer(p.startDir))
|
||||
}
|
||||
if filename != "" {
|
||||
p.filename = C.CString(filename)
|
||||
defer C.free(unsafe.Pointer(p.filename))
|
||||
}
|
||||
|
||||
if len(exts) > 0 {
|
||||
if len(exts) > 999 {
|
||||
panic("more than 999 extensions not supported")
|
||||
}
|
||||
ptrSize := int(unsafe.Sizeof(&title))
|
||||
p.exts = (*unsafe.Pointer)(C.malloc(C.size_t(ptrSize * len(exts))))
|
||||
defer C.free(unsafe.Pointer(p.exts))
|
||||
cext := (*(*[999]unsafe.Pointer)(unsafe.Pointer(p.exts)))[:]
|
||||
for i, ext := range exts {
|
||||
cext[i] = nsStr(ext)
|
||||
defer C.NSRelease(cext[i])
|
||||
}
|
||||
p.numext = C.int(len(exts))
|
||||
if relaxExt {
|
||||
p.relaxext = 1
|
||||
}
|
||||
}
|
||||
|
||||
// Execute dialog and parse results
|
||||
switch C.fileDlg(&p) {
|
||||
case C.DLG_OK:
|
||||
if allowMultiple {
|
||||
// Parse multiple null-terminated strings from buffer
|
||||
var files []string
|
||||
start := 0
|
||||
for i := range len(buf) - 1 {
|
||||
if buf[i] == 0 {
|
||||
if i > start {
|
||||
files = append(files, string(buf[start:i]))
|
||||
}
|
||||
start = i + 1
|
||||
// Check for double null (end of list)
|
||||
if i+1 < len(buf) && buf[i+1] == 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
return files, nil
|
||||
} else {
|
||||
// Single file - return as array for consistency
|
||||
filename := string(buf[:bytes.Index(buf, []byte{0})])
|
||||
return []string{filename}, nil
|
||||
}
|
||||
case C.DLG_CANCEL:
|
||||
return nil, nil
|
||||
case C.DLG_URLFAIL:
|
||||
return nil, errors.New("failed to get file-system representation for selected URL")
|
||||
}
|
||||
panic("unhandled case")
|
||||
}
|
||||
@@ -1,182 +0,0 @@
|
||||
//go:build windows || darwin
|
||||
|
||||
// Package dialog provides a simple cross-platform common dialog API.
|
||||
// Eg. to prompt the user with a yes/no dialog:
|
||||
//
|
||||
// if dialog.MsgDlg("%s", "Do you want to continue?").YesNo() {
|
||||
// // user pressed Yes
|
||||
// }
|
||||
//
|
||||
// The general usage pattern is to call one of the toplevel *Dlg functions
|
||||
// which return a *Builder structure. From here you can optionally call
|
||||
// configuration functions (eg. Title) to customise the dialog, before
|
||||
// using a launcher function to run the dialog.
|
||||
package dialog
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
// ErrCancelled is an error returned when a user cancels/closes a dialog.
|
||||
var ErrCancelled = errors.New("Cancelled")
|
||||
|
||||
// Cancelled refers to ErrCancelled.
|
||||
// Deprecated: Use ErrCancelled instead.
|
||||
var Cancelled = ErrCancelled
|
||||
|
||||
// Dlg is the common type for dialogs.
|
||||
type Dlg struct {
|
||||
Title string
|
||||
}
|
||||
|
||||
// MsgBuilder is used for creating message boxes.
|
||||
type MsgBuilder struct {
|
||||
Dlg
|
||||
Msg string
|
||||
}
|
||||
|
||||
// Message initialises a MsgBuilder with the provided message.
|
||||
func Message(format string, args ...interface{}) *MsgBuilder {
|
||||
return &MsgBuilder{Msg: fmt.Sprintf(format, args...)}
|
||||
}
|
||||
|
||||
// Title specifies what the title of the message dialog will be.
|
||||
func (b *MsgBuilder) Title(title string) *MsgBuilder {
|
||||
b.Dlg.Title = title
|
||||
return b
|
||||
}
|
||||
|
||||
// YesNo spawns the message dialog with two buttons, "Yes" and "No".
|
||||
// Returns true iff the user selected "Yes".
|
||||
func (b *MsgBuilder) YesNo() bool {
|
||||
return b.yesNo()
|
||||
}
|
||||
|
||||
// Info spawns the message dialog with an information icon and single button, "Ok".
|
||||
func (b *MsgBuilder) Info() {
|
||||
b.info()
|
||||
}
|
||||
|
||||
// Error spawns the message dialog with an error icon and single button, "Ok".
|
||||
func (b *MsgBuilder) Error() {
|
||||
b.error()
|
||||
}
|
||||
|
||||
// FileFilter represents a category of files (eg. audio files, spreadsheets).
|
||||
type FileFilter struct {
|
||||
Desc string
|
||||
Extensions []string
|
||||
}
|
||||
|
||||
// FileBuilder is used for creating file browsing dialogs.
|
||||
type FileBuilder struct {
|
||||
Dlg
|
||||
StartDir string
|
||||
StartFile string
|
||||
Filters []FileFilter
|
||||
ShowHiddenFiles bool
|
||||
}
|
||||
|
||||
// File initialises a FileBuilder using the default configuration.
|
||||
func File() *FileBuilder {
|
||||
return &FileBuilder{}
|
||||
}
|
||||
|
||||
// Title specifies the title to be used for the dialog.
|
||||
func (b *FileBuilder) Title(title string) *FileBuilder {
|
||||
b.Dlg.Title = title
|
||||
return b
|
||||
}
|
||||
|
||||
// Filter adds a category of files to the types allowed by the dialog. Multiple
|
||||
// calls to Filter are cumulative - any of the provided categories will be allowed.
|
||||
// By default all files can be selected.
|
||||
//
|
||||
// The special extension '*' allows all files to be selected when the Filter is active.
|
||||
func (b *FileBuilder) Filter(desc string, extensions ...string) *FileBuilder {
|
||||
filt := FileFilter{desc, extensions}
|
||||
if len(filt.Extensions) == 0 {
|
||||
filt.Extensions = append(filt.Extensions, "*")
|
||||
}
|
||||
b.Filters = append(b.Filters, filt)
|
||||
return b
|
||||
}
|
||||
|
||||
// SetStartDir specifies the initial directory of the dialog.
|
||||
func (b *FileBuilder) SetStartDir(startDir string) *FileBuilder {
|
||||
b.StartDir = startDir
|
||||
return b
|
||||
}
|
||||
|
||||
// SetStartFile specifies the initial file name of the dialog.
|
||||
func (b *FileBuilder) SetStartFile(startFile string) *FileBuilder {
|
||||
b.StartFile = startFile
|
||||
return b
|
||||
}
|
||||
|
||||
// ShowHiddenFiles sets whether hidden files should be visible in the dialog.
|
||||
func (b *FileBuilder) ShowHidden(show bool) *FileBuilder {
|
||||
b.ShowHiddenFiles = show
|
||||
return b
|
||||
}
|
||||
|
||||
// Load spawns the file selection dialog using the configured settings,
|
||||
// asking the user to select a single file. Returns ErrCancelled as the error
|
||||
// if the user cancels or closes the dialog.
|
||||
func (b *FileBuilder) Load() (string, error) {
|
||||
return b.load()
|
||||
}
|
||||
|
||||
// LoadMultiple spawns the file selection dialog using the configured settings,
|
||||
// asking the user to select multiple files. Returns ErrCancelled as the error
|
||||
// if the user cancels or closes the dialog.
|
||||
func (b *FileBuilder) LoadMultiple() ([]string, error) {
|
||||
return b.loadMultiple()
|
||||
}
|
||||
|
||||
// Save spawns the file selection dialog using the configured settings,
|
||||
// asking the user for a filename to save as. If the chosen file exists, the
|
||||
// user is prompted whether they want to overwrite the file. Returns
|
||||
// ErrCancelled as the error if the user cancels/closes the dialog, or selects
|
||||
// not to overwrite the file.
|
||||
func (b *FileBuilder) Save() (string, error) {
|
||||
return b.save()
|
||||
}
|
||||
|
||||
// DirectoryBuilder is used for directory browse dialogs.
|
||||
type DirectoryBuilder struct {
|
||||
Dlg
|
||||
StartDir string
|
||||
ShowHiddenFiles bool
|
||||
}
|
||||
|
||||
// Directory initialises a DirectoryBuilder using the default configuration.
|
||||
func Directory() *DirectoryBuilder {
|
||||
return &DirectoryBuilder{}
|
||||
}
|
||||
|
||||
// Browse spawns the directory selection dialog using the configured settings,
|
||||
// asking the user to select a single folder. Returns ErrCancelled as the error
|
||||
// if the user cancels or closes the dialog.
|
||||
func (b *DirectoryBuilder) Browse() (string, error) {
|
||||
return b.browse()
|
||||
}
|
||||
|
||||
// Title specifies the title to be used for the dialog.
|
||||
func (b *DirectoryBuilder) Title(title string) *DirectoryBuilder {
|
||||
b.Dlg.Title = title
|
||||
return b
|
||||
}
|
||||
|
||||
// StartDir specifies the initial directory to be used for the dialog.
|
||||
func (b *DirectoryBuilder) SetStartDir(dir string) *DirectoryBuilder {
|
||||
b.StartDir = dir
|
||||
return b
|
||||
}
|
||||
|
||||
// ShowHiddenFiles sets whether hidden files should be visible in the dialog.
|
||||
func (b *DirectoryBuilder) ShowHidden(show bool) *DirectoryBuilder {
|
||||
b.ShowHiddenFiles = show
|
||||
return b
|
||||
}
|
||||
@@ -1,82 +0,0 @@
|
||||
package dialog
|
||||
|
||||
import (
|
||||
"github.com/ollama/ollama/app/dialog/cocoa"
|
||||
)
|
||||
|
||||
func (b *MsgBuilder) yesNo() bool {
|
||||
return cocoa.YesNoDlg(b.Msg, b.Dlg.Title)
|
||||
}
|
||||
|
||||
func (b *MsgBuilder) info() {
|
||||
cocoa.InfoDlg(b.Msg, b.Dlg.Title)
|
||||
}
|
||||
|
||||
func (b *MsgBuilder) error() {
|
||||
cocoa.ErrorDlg(b.Msg, b.Dlg.Title)
|
||||
}
|
||||
|
||||
func (b *FileBuilder) load() (string, error) {
|
||||
return b.run(false)
|
||||
}
|
||||
|
||||
func (b *FileBuilder) loadMultiple() ([]string, error) {
|
||||
return b.runMultiple()
|
||||
}
|
||||
|
||||
func (b *FileBuilder) save() (string, error) {
|
||||
return b.run(true)
|
||||
}
|
||||
|
||||
func (b *FileBuilder) run(save bool) (string, error) {
|
||||
star := false
|
||||
var exts []string
|
||||
for _, filt := range b.Filters {
|
||||
for _, ext := range filt.Extensions {
|
||||
if ext == "*" {
|
||||
star = true
|
||||
} else {
|
||||
exts = append(exts, ext)
|
||||
}
|
||||
}
|
||||
}
|
||||
if star && save {
|
||||
/* OSX doesn't allow the user to switch visible file types/extensions. Also
|
||||
** NSSavePanel's allowsOtherFileTypes property has no effect for an open
|
||||
** dialog, so if "*" is a possible extension we must always show all files. */
|
||||
exts = nil
|
||||
}
|
||||
f, err := cocoa.FileDlg(save, b.Dlg.Title, exts, star, b.StartDir, b.StartFile, b.ShowHiddenFiles)
|
||||
if f == "" && err == nil {
|
||||
return "", ErrCancelled
|
||||
}
|
||||
return f, err
|
||||
}
|
||||
|
||||
func (b *FileBuilder) runMultiple() ([]string, error) {
|
||||
star := false
|
||||
var exts []string
|
||||
for _, filt := range b.Filters {
|
||||
for _, ext := range filt.Extensions {
|
||||
if ext == "*" {
|
||||
star = true
|
||||
} else {
|
||||
exts = append(exts, ext)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
files, err := cocoa.MultiFileDlg(b.Dlg.Title, exts, star, b.StartDir, b.ShowHiddenFiles)
|
||||
if len(files) == 0 && err == nil {
|
||||
return nil, ErrCancelled
|
||||
}
|
||||
return files, err
|
||||
}
|
||||
|
||||
func (b *DirectoryBuilder) browse() (string, error) {
|
||||
f, err := cocoa.DirDlg(b.Dlg.Title, b.StartDir, b.ShowHiddenFiles)
|
||||
if f == "" && err == nil {
|
||||
return "", ErrCancelled
|
||||
}
|
||||
return f, err
|
||||
}
|
||||
@@ -1,241 +0,0 @@
|
||||
package dialog
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"syscall"
|
||||
"unicode/utf16"
|
||||
"unsafe"
|
||||
|
||||
"github.com/TheTitanrain/w32"
|
||||
)
|
||||
|
||||
const multiFileBufferSize = w32.MAX_PATH * 10
|
||||
|
||||
type WinDlgError int
|
||||
|
||||
func (e WinDlgError) Error() string {
|
||||
return fmt.Sprintf("CommDlgExtendedError: %#x", e)
|
||||
}
|
||||
|
||||
func err() error {
|
||||
e := w32.CommDlgExtendedError()
|
||||
if e == 0 {
|
||||
return ErrCancelled
|
||||
}
|
||||
return WinDlgError(e)
|
||||
}
|
||||
|
||||
func (b *MsgBuilder) yesNo() bool {
|
||||
r := w32.MessageBox(w32.HWND(0), b.Msg, firstOf(b.Dlg.Title, "Confirm?"), w32.MB_YESNO)
|
||||
return r == w32.IDYES
|
||||
}
|
||||
|
||||
func (b *MsgBuilder) info() {
|
||||
w32.MessageBox(w32.HWND(0), b.Msg, firstOf(b.Dlg.Title, "Information"), w32.MB_OK|w32.MB_ICONINFORMATION)
|
||||
}
|
||||
|
||||
func (b *MsgBuilder) error() {
|
||||
w32.MessageBox(w32.HWND(0), b.Msg, firstOf(b.Dlg.Title, "Error"), w32.MB_OK|w32.MB_ICONERROR)
|
||||
}
|
||||
|
||||
type filedlg struct {
|
||||
buf []uint16
|
||||
filters []uint16
|
||||
opf *w32.OPENFILENAME
|
||||
}
|
||||
|
||||
func (d filedlg) Filename() string {
|
||||
i := 0
|
||||
for i < len(d.buf) && d.buf[i] != 0 {
|
||||
i++
|
||||
}
|
||||
return string(utf16.Decode(d.buf[:i]))
|
||||
}
|
||||
|
||||
func (d filedlg) parseMultipleFilenames() []string {
|
||||
var files []string
|
||||
i := 0
|
||||
|
||||
// Find first null terminator (directory path)
|
||||
for i < len(d.buf) && d.buf[i] != 0 {
|
||||
i++
|
||||
}
|
||||
|
||||
if i >= len(d.buf) {
|
||||
return files
|
||||
}
|
||||
|
||||
// Get directory path
|
||||
dirPath := string(utf16.Decode(d.buf[:i]))
|
||||
i++ // Skip null terminator
|
||||
|
||||
// Check if there are more files (multiple selection)
|
||||
if i < len(d.buf) && d.buf[i] != 0 {
|
||||
// Multiple files selected - parse filenames
|
||||
for i < len(d.buf) {
|
||||
start := i
|
||||
// Find next null terminator
|
||||
for i < len(d.buf) && d.buf[i] != 0 {
|
||||
i++
|
||||
}
|
||||
if i >= len(d.buf) {
|
||||
break
|
||||
}
|
||||
|
||||
if start < i {
|
||||
filename := string(utf16.Decode(d.buf[start:i]))
|
||||
if dirPath != "" {
|
||||
files = append(files, dirPath+"\\"+filename)
|
||||
} else {
|
||||
files = append(files, filename)
|
||||
}
|
||||
}
|
||||
i++ // Skip null terminator
|
||||
if i >= len(d.buf) || d.buf[i] == 0 {
|
||||
break // End of list
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Single file selected
|
||||
files = append(files, dirPath)
|
||||
}
|
||||
|
||||
return files
|
||||
}
|
||||
|
||||
func (b *FileBuilder) load() (string, error) {
|
||||
d := openfile(w32.OFN_FILEMUSTEXIST|w32.OFN_NOCHANGEDIR, b)
|
||||
if w32.GetOpenFileName(d.opf) {
|
||||
return d.Filename(), nil
|
||||
}
|
||||
return "", err()
|
||||
}
|
||||
|
||||
func (b *FileBuilder) loadMultiple() ([]string, error) {
|
||||
d := openfile(w32.OFN_FILEMUSTEXIST|w32.OFN_NOCHANGEDIR|w32.OFN_ALLOWMULTISELECT|w32.OFN_EXPLORER, b)
|
||||
d.buf = make([]uint16, multiFileBufferSize)
|
||||
d.opf.File = utf16ptr(d.buf)
|
||||
d.opf.MaxFile = uint32(len(d.buf))
|
||||
|
||||
if w32.GetOpenFileName(d.opf) {
|
||||
return d.parseMultipleFilenames(), nil
|
||||
}
|
||||
return nil, err()
|
||||
}
|
||||
|
||||
func (b *FileBuilder) save() (string, error) {
|
||||
d := openfile(w32.OFN_OVERWRITEPROMPT|w32.OFN_NOCHANGEDIR, b)
|
||||
if w32.GetSaveFileName(d.opf) {
|
||||
return d.Filename(), nil
|
||||
}
|
||||
return "", err()
|
||||
}
|
||||
|
||||
/* syscall.UTF16PtrFromString not sufficient because we need to encode embedded NUL bytes */
|
||||
func utf16ptr(utf16 []uint16) *uint16 {
|
||||
if utf16[len(utf16)-1] != 0 {
|
||||
panic("refusing to make ptr to non-NUL terminated utf16 slice")
|
||||
}
|
||||
h := (*reflect.SliceHeader)(unsafe.Pointer(&utf16))
|
||||
return (*uint16)(unsafe.Pointer(h.Data))
|
||||
}
|
||||
|
||||
func utf16slice(ptr *uint16) []uint16 { //nolint:unused
|
||||
hdr := reflect.SliceHeader{Data: uintptr(unsafe.Pointer(ptr)), Len: 1, Cap: 1}
|
||||
slice := *((*[]uint16)(unsafe.Pointer(&hdr))) //nolint:govet
|
||||
i := 0
|
||||
for slice[len(slice)-1] != 0 {
|
||||
i++
|
||||
}
|
||||
hdr.Len = i
|
||||
slice = *((*[]uint16)(unsafe.Pointer(&hdr))) //nolint:govet
|
||||
return slice
|
||||
}
|
||||
|
||||
func openfile(flags uint32, b *FileBuilder) (d filedlg) {
|
||||
d.buf = make([]uint16, w32.MAX_PATH)
|
||||
if b.StartFile != "" {
|
||||
initialName, _ := syscall.UTF16FromString(b.StartFile)
|
||||
for i := 0; i < len(initialName) && i < w32.MAX_PATH; i++ {
|
||||
d.buf[i] = initialName[i]
|
||||
}
|
||||
}
|
||||
d.opf = &w32.OPENFILENAME{
|
||||
File: utf16ptr(d.buf),
|
||||
MaxFile: uint32(len(d.buf)),
|
||||
Flags: flags,
|
||||
}
|
||||
d.opf.StructSize = uint32(unsafe.Sizeof(*d.opf))
|
||||
if b.StartDir != "" {
|
||||
d.opf.InitialDir, _ = syscall.UTF16PtrFromString(b.StartDir)
|
||||
}
|
||||
if b.Dlg.Title != "" {
|
||||
d.opf.Title, _ = syscall.UTF16PtrFromString(b.Dlg.Title)
|
||||
}
|
||||
for _, filt := range b.Filters {
|
||||
/* build utf16 string of form "Music File\0*.mp3;*.ogg;*.wav;\0" */
|
||||
d.filters = append(d.filters, utf16.Encode([]rune(filt.Desc))...)
|
||||
d.filters = append(d.filters, 0)
|
||||
for _, ext := range filt.Extensions {
|
||||
s := fmt.Sprintf("*.%s;", ext)
|
||||
d.filters = append(d.filters, utf16.Encode([]rune(s))...)
|
||||
}
|
||||
d.filters = append(d.filters, 0)
|
||||
}
|
||||
if d.filters != nil {
|
||||
d.filters = append(d.filters, 0, 0) // two extra NUL chars to terminate the list
|
||||
d.opf.Filter = utf16ptr(d.filters)
|
||||
}
|
||||
return d
|
||||
}
|
||||
|
||||
type dirdlg struct {
|
||||
bi *w32.BROWSEINFO
|
||||
}
|
||||
|
||||
const (
|
||||
bffm_INITIALIZED = 1
|
||||
bffm_SELCHANGED = 2
|
||||
bffm_VALIDATEFAILEDA = 3
|
||||
bffm_VALIDATEFAILEDW = 4
|
||||
bffm_SETSTATUSTEXTA = (w32.WM_USER + 100)
|
||||
bffm_SETSTATUSTEXTW = (w32.WM_USER + 104)
|
||||
bffm_ENABLEOK = (w32.WM_USER + 101)
|
||||
bffm_SETSELECTIONA = (w32.WM_USER + 102)
|
||||
bffm_SETSELECTIONW = (w32.WM_USER + 103)
|
||||
bffm_SETOKTEXT = (w32.WM_USER + 105)
|
||||
bffm_SETEXPANDED = (w32.WM_USER + 106)
|
||||
bffm_SETSTATUSTEXT = bffm_SETSTATUSTEXTW
|
||||
bffm_SETSELECTION = bffm_SETSELECTIONW
|
||||
bffm_VALIDATEFAILED = bffm_VALIDATEFAILEDW
|
||||
)
|
||||
|
||||
func callbackDefaultDir(hwnd w32.HWND, msg uint, lParam, lpData uintptr) int {
|
||||
if msg == bffm_INITIALIZED {
|
||||
_ = w32.SendMessage(hwnd, bffm_SETSELECTION, w32.TRUE, lpData)
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func selectdir(b *DirectoryBuilder) (d dirdlg) {
|
||||
d.bi = &w32.BROWSEINFO{Flags: w32.BIF_RETURNONLYFSDIRS | w32.BIF_NEWDIALOGSTYLE}
|
||||
if b.Dlg.Title != "" {
|
||||
d.bi.Title, _ = syscall.UTF16PtrFromString(b.Dlg.Title)
|
||||
}
|
||||
if b.StartDir != "" {
|
||||
s16, _ := syscall.UTF16PtrFromString(b.StartDir)
|
||||
d.bi.LParam = uintptr(unsafe.Pointer(s16))
|
||||
d.bi.CallbackFunc = syscall.NewCallback(callbackDefaultDir)
|
||||
}
|
||||
return d
|
||||
}
|
||||
|
||||
func (b *DirectoryBuilder) browse() (string, error) {
|
||||
d := selectdir(b)
|
||||
res := w32.SHBrowseForFolder(d.bi)
|
||||
if res == 0 {
|
||||
return "", ErrCancelled
|
||||
}
|
||||
return w32.SHGetPathFromIDList(res), nil
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
//go:build windows
|
||||
|
||||
package dialog
|
||||
|
||||
func firstOf(args ...string) string {
|
||||
for _, arg := range args {
|
||||
if arg != "" {
|
||||
return arg
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
@@ -1,30 +0,0 @@
|
||||
//go:build windows || darwin
|
||||
|
||||
package format
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"unicode"
|
||||
)
|
||||
|
||||
// KebabCase converts a string from camelCase or PascalCase to kebab-case.
|
||||
// (e.g. "camelCase" -> "camel-case")
|
||||
func KebabCase(str string) string {
|
||||
var result strings.Builder
|
||||
|
||||
for i, char := range str {
|
||||
if i > 0 {
|
||||
prevChar := rune(str[i-1])
|
||||
|
||||
// Add hyphen before uppercase letters
|
||||
if unicode.IsUpper(char) &&
|
||||
(unicode.IsLower(prevChar) || unicode.IsDigit(prevChar) ||
|
||||
(i < len(str)-1 && unicode.IsLower(rune(str[i+1])))) {
|
||||
result.WriteRune('-')
|
||||
}
|
||||
}
|
||||
result.WriteRune(unicode.ToLower(char))
|
||||
}
|
||||
|
||||
return result.String()
|
||||
}
|
||||
@@ -1,34 +0,0 @@
|
||||
//go:build windows || darwin
|
||||
|
||||
package format
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestKebabCase(t *testing.T) {
|
||||
tests := []struct {
|
||||
input string
|
||||
expected string
|
||||
}{
|
||||
{"already-kebab-case", "already-kebab-case"},
|
||||
{"simpleCamelCase", "simple-camel-case"},
|
||||
{"PascalCase", "pascal-case"},
|
||||
{"camelCaseWithNumber123", "camel-case-with-number123"},
|
||||
{"APIResponse", "api-response"},
|
||||
{"mixedCASE", "mixed-case"},
|
||||
{"WithACRONYMS", "with-acronyms"},
|
||||
{"ALLCAPS", "allcaps"},
|
||||
{"camelCaseWITHMixedACRONYMS", "camel-case-with-mixed-acronyms"},
|
||||
{"numbers123in456string", "numbers123in456string"},
|
||||
{"5", "5"},
|
||||
{"S", "s"},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.input, func(t *testing.T) {
|
||||
result := KebabCase(tt.input)
|
||||
if result != tt.expected {
|
||||
t.Errorf("toKebabCase(%q) = %q, want %q", tt.input, result, tt.expected)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
9
app/lifecycle/getstarted_nonwindows.go
Normal file
@@ -0,0 +1,9 @@
|
||||
//go:build !windows
|
||||
|
||||
package lifecycle
|
||||
|
||||
import "fmt"
|
||||
|
||||
func GetStarted() error {
|
||||
return fmt.Errorf("GetStarted not implemented")
|
||||
}
|
||||
44
app/lifecycle/getstarted_windows.go
Normal file
@@ -0,0 +1,44 @@
|
||||
package lifecycle
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"syscall"
|
||||
)
|
||||
|
||||
func GetStarted() error {
|
||||
const CREATE_NEW_CONSOLE = 0x00000010
|
||||
var err error
|
||||
bannerScript := filepath.Join(AppDir, "ollama_welcome.ps1")
|
||||
args := []string{
|
||||
// TODO once we're signed, the execution policy bypass should be removed
|
||||
"powershell", "-noexit", "-ExecutionPolicy", "Bypass", "-nologo", "-file", bannerScript,
|
||||
}
|
||||
args[0], err = exec.LookPath(args[0])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Make sure the script actually exists
|
||||
_, err = os.Stat(bannerScript)
|
||||
if err != nil {
|
||||
return fmt.Errorf("getting started banner script error %s", err)
|
||||
}
|
||||
|
||||
slog.Info(fmt.Sprintf("opening getting started terminal with %v", args))
|
||||
attrs := &os.ProcAttr{
|
||||
Files: []*os.File{os.Stdin, os.Stdout, os.Stderr},
|
||||
Sys: &syscall.SysProcAttr{CreationFlags: CREATE_NEW_CONSOLE, HideWindow: false},
|
||||
}
|
||||
proc, err := os.StartProcess(args[0], args, attrs)
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to start getting started shell %w", err)
|
||||
}
|
||||
|
||||
slog.Debug(fmt.Sprintf("getting started terminal PID: %d", proc.Pid))
|
||||
return proc.Release()
|
||||
}
|
||||
92
app/lifecycle/lifecycle.go
Normal file
@@ -0,0 +1,92 @@
|
||||
package lifecycle
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log"
|
||||
"log/slog"
|
||||
"os"
|
||||
"os/signal"
|
||||
"syscall"
|
||||
|
||||
"github.com/ollama/ollama/app/store"
|
||||
"github.com/ollama/ollama/app/tray"
|
||||
)
|
||||
|
||||
func Run() {
|
||||
InitLogging()
|
||||
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
var done chan int
|
||||
|
||||
t, err := tray.NewTray()
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to start: %s", err)
|
||||
}
|
||||
callbacks := t.GetCallbacks()
|
||||
|
||||
signals := make(chan os.Signal, 1)
|
||||
signal.Notify(signals, syscall.SIGINT, syscall.SIGTERM)
|
||||
|
||||
go func() {
|
||||
slog.Debug("starting callback loop")
|
||||
for {
|
||||
select {
|
||||
case <-callbacks.Quit:
|
||||
slog.Debug("quit called")
|
||||
t.Quit()
|
||||
case <-signals:
|
||||
slog.Debug("shutting down due to signal")
|
||||
t.Quit()
|
||||
case <-callbacks.Update:
|
||||
err := DoUpgrade(cancel, done)
|
||||
if err != nil {
|
||||
slog.Warn(fmt.Sprintf("upgrade attempt failed: %s", err))
|
||||
}
|
||||
case <-callbacks.ShowLogs:
|
||||
ShowLogs()
|
||||
case <-callbacks.DoFirstUse:
|
||||
err := GetStarted()
|
||||
if err != nil {
|
||||
slog.Warn(fmt.Sprintf("Failed to launch getting started shell: %s", err))
|
||||
}
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
// Are we first use?
|
||||
if !store.GetFirstTimeRun() {
|
||||
slog.Debug("First time run")
|
||||
err = t.DisplayFirstUseNotification()
|
||||
if err != nil {
|
||||
slog.Debug(fmt.Sprintf("XXX failed to display first use notification %v", err))
|
||||
}
|
||||
store.SetFirstTimeRun(true)
|
||||
} else {
|
||||
slog.Debug("Not first time, skipping first run notification")
|
||||
}
|
||||
|
||||
if IsServerRunning(ctx) {
|
||||
slog.Info("Detected another instance of ollama running, exiting")
|
||||
os.Exit(1)
|
||||
} else {
|
||||
done, err = SpawnServer(ctx, CLIName)
|
||||
if err != nil {
|
||||
// TODO - should we retry in a backoff loop?
|
||||
// TODO - should we pop up a warning and maybe add a menu item to view application logs?
|
||||
slog.Error(fmt.Sprintf("Failed to spawn ollama server %s", err))
|
||||
done = make(chan int, 1)
|
||||
done <- 1
|
||||
}
|
||||
}
|
||||
|
||||
StartBackgroundUpdaterChecker(ctx, t.UpdateAvailable)
|
||||
|
||||
t.Run()
|
||||
cancel()
|
||||
slog.Info("Waiting for ollama server to shutdown...")
|
||||
if done != nil {
|
||||
<-done
|
||||
}
|
||||
slog.Info("Ollama app exiting")
|
||||
}
|
||||
80
app/lifecycle/logging.go
Normal file
@@ -0,0 +1,80 @@
|
||||
package lifecycle
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/ollama/ollama/envconfig"
|
||||
)
|
||||
|
||||
func InitLogging() {
|
||||
level := slog.LevelInfo
|
||||
|
||||
if envconfig.Debug {
|
||||
level = slog.LevelDebug
|
||||
}
|
||||
|
||||
var logFile *os.File
|
||||
var err error
|
||||
// Detect if we're a GUI app on windows, and if not, send logs to console
|
||||
if os.Stderr.Fd() != 0 {
|
||||
// Console app detected
|
||||
logFile = os.Stderr
|
||||
// TODO - write one-line to the app.log file saying we're running in console mode to help avoid confusion
|
||||
} else {
|
||||
rotateLogs(AppLogFile)
|
||||
logFile, err = os.OpenFile(AppLogFile, os.O_APPEND|os.O_WRONLY|os.O_CREATE, 0755)
|
||||
if err != nil {
|
||||
slog.Error(fmt.Sprintf("failed to create server log %v", err))
|
||||
return
|
||||
}
|
||||
}
|
||||
handler := slog.NewTextHandler(logFile, &slog.HandlerOptions{
|
||||
Level: level,
|
||||
AddSource: true,
|
||||
ReplaceAttr: func(_ []string, attr slog.Attr) slog.Attr {
|
||||
if attr.Key == slog.SourceKey {
|
||||
source := attr.Value.Any().(*slog.Source)
|
||||
source.File = filepath.Base(source.File)
|
||||
}
|
||||
return attr
|
||||
},
|
||||
})
|
||||
|
||||
slog.SetDefault(slog.New(handler))
|
||||
|
||||
slog.Info("ollama app started")
|
||||
}
|
||||
|
||||
func rotateLogs(logFile string) {
|
||||
if _, err := os.Stat(logFile); os.IsNotExist(err) {
|
||||
return
|
||||
}
|
||||
index := strings.LastIndex(logFile, ".")
|
||||
pre := logFile[:index]
|
||||
post := "." + logFile[index+1:]
|
||||
for i := LogRotationCount; i > 0; i-- {
|
||||
older := pre + "-" + strconv.Itoa(i) + post
|
||||
newer := pre + "-" + strconv.Itoa(i-1) + post
|
||||
if i == 1 {
|
||||
newer = pre + post
|
||||
}
|
||||
if _, err := os.Stat(newer); err == nil {
|
||||
if _, err := os.Stat(older); err == nil {
|
||||
err := os.Remove(older)
|
||||
if err != nil {
|
||||
slog.Warn("Failed to remove older log", "older", older, "error", err)
|
||||
continue
|
||||
}
|
||||
}
|
||||
err := os.Rename(newer, older)
|
||||
if err != nil {
|
||||
slog.Warn("Failed to rotate log", "older", older, "newer", newer, "error", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
9
app/lifecycle/logging_nonwindows.go
Normal file
@@ -0,0 +1,9 @@
|
||||
//go:build !windows
|
||||
|
||||
package lifecycle
|
||||
|
||||
import "log/slog"
|
||||
|
||||
func ShowLogs() {
|
||||
slog.Warn("ShowLogs not yet implemented")
|
||||
}
|
||||
44
app/lifecycle/logging_test.go
Normal file
@@ -0,0 +1,44 @@
|
||||
package lifecycle
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestRotateLogs(t *testing.T) {
|
||||
logDir := t.TempDir()
|
||||
logFile := filepath.Join(logDir, "testlog.log")
|
||||
|
||||
// No log exists
|
||||
rotateLogs(logFile)
|
||||
|
||||
require.NoError(t, os.WriteFile(logFile, []byte("1"), 0644))
|
||||
assert.FileExists(t, logFile)
|
||||
// First rotation
|
||||
rotateLogs(logFile)
|
||||
assert.FileExists(t, filepath.Join(logDir, "testlog-1.log"))
|
||||
assert.NoFileExists(t, filepath.Join(logDir, "testlog-2.log"))
|
||||
assert.NoFileExists(t, logFile)
|
||||
|
||||
// Should be a no-op without a new log
|
||||
rotateLogs(logFile)
|
||||
assert.FileExists(t, filepath.Join(logDir, "testlog-1.log"))
|
||||
assert.NoFileExists(t, filepath.Join(logDir, "testlog-2.log"))
|
||||
assert.NoFileExists(t, logFile)
|
||||
|
||||
for i := 2; i <= LogRotationCount+1; i++ {
|
||||
require.NoError(t, os.WriteFile(logFile, []byte(strconv.Itoa(i)), 0644))
|
||||
assert.FileExists(t, logFile)
|
||||
rotateLogs(logFile)
|
||||
assert.NoFileExists(t, logFile)
|
||||
for j := 1; j < i; j++ {
|
||||
assert.FileExists(t, filepath.Join(logDir, "testlog-"+strconv.Itoa(j)+".log"))
|
||||
}
|
||||
assert.NoFileExists(t, filepath.Join(logDir, "testlog-"+strconv.Itoa(i+1)+".log"))
|
||||
}
|
||||
}
|
||||
19
app/lifecycle/logging_windows.go
Normal file
@@ -0,0 +1,19 @@
|
||||
package lifecycle
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"os/exec"
|
||||
"syscall"
|
||||
)
|
||||
|
||||
func ShowLogs() {
|
||||
cmd_path := "c:\\Windows\\system32\\cmd.exe"
|
||||
slog.Debug(fmt.Sprintf("viewing logs with start %s", AppDataDir))
|
||||
cmd := exec.Command(cmd_path, "/c", "start", AppDataDir)
|
||||
cmd.SysProcAttr = &syscall.SysProcAttr{HideWindow: false, CreationFlags: 0x08000000}
|
||||
err := cmd.Start()
|
||||
if err != nil {
|
||||
slog.Error(fmt.Sprintf("Failed to open log dir: %s", err))
|
||||
}
|
||||
}
|
||||
79
app/lifecycle/paths.go
Normal file
@@ -0,0 +1,79 @@
|
||||
package lifecycle
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var (
|
||||
AppName = "ollama app"
|
||||
CLIName = "ollama"
|
||||
AppDir = "/opt/Ollama"
|
||||
AppDataDir = "/opt/Ollama"
|
||||
// TODO - should there be a distinct log dir?
|
||||
UpdateStageDir = "/tmp"
|
||||
AppLogFile = "/tmp/ollama_app.log"
|
||||
ServerLogFile = "/tmp/ollama.log"
|
||||
UpgradeLogFile = "/tmp/ollama_update.log"
|
||||
Installer = "OllamaSetup.exe"
|
||||
LogRotationCount = 5
|
||||
)
|
||||
|
||||
func init() {
|
||||
if runtime.GOOS == "windows" {
|
||||
AppName += ".exe"
|
||||
CLIName += ".exe"
|
||||
// Logs, configs, downloads go to LOCALAPPDATA
|
||||
localAppData := os.Getenv("LOCALAPPDATA")
|
||||
AppDataDir = filepath.Join(localAppData, "Ollama")
|
||||
UpdateStageDir = filepath.Join(AppDataDir, "updates")
|
||||
AppLogFile = filepath.Join(AppDataDir, "app.log")
|
||||
ServerLogFile = filepath.Join(AppDataDir, "server.log")
|
||||
UpgradeLogFile = filepath.Join(AppDataDir, "upgrade.log")
|
||||
|
||||
// Executables are stored in APPDATA
|
||||
AppDir = filepath.Join(localAppData, "Programs", "Ollama")
|
||||
|
||||
// Make sure we have PATH set correctly for any spawned children
|
||||
paths := strings.Split(os.Getenv("PATH"), ";")
|
||||
// Start with whatever we find in the PATH/LD_LIBRARY_PATH
|
||||
found := false
|
||||
for _, path := range paths {
|
||||
d, err := filepath.Abs(path)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
if strings.EqualFold(AppDir, d) {
|
||||
found = true
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
paths = append(paths, AppDir)
|
||||
|
||||
pathVal := strings.Join(paths, ";")
|
||||
slog.Debug("setting PATH=" + pathVal)
|
||||
err := os.Setenv("PATH", pathVal)
|
||||
if err != nil {
|
||||
slog.Error(fmt.Sprintf("failed to update PATH: %s", err))
|
||||
}
|
||||
}
|
||||
|
||||
// Make sure our logging dir exists
|
||||
_, err := os.Stat(AppDataDir)
|
||||
if errors.Is(err, os.ErrNotExist) {
|
||||
if err := os.MkdirAll(AppDataDir, 0o755); err != nil {
|
||||
slog.Error(fmt.Sprintf("create ollama dir %s: %v", AppDataDir, err))
|
||||
}
|
||||
}
|
||||
} else if runtime.GOOS == "darwin" {
|
||||
// TODO
|
||||
AppName += ".app"
|
||||
// } else if runtime.GOOS == "linux" {
|
||||
// TODO
|
||||
}
|
||||
}
|
||||
180
app/lifecycle/server.go
Normal file
@@ -0,0 +1,180 @@
|
||||
package lifecycle
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"log/slog"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
||||
"github.com/ollama/ollama/api"
|
||||
)
|
||||
|
||||
func getCLIFullPath(command string) string {
|
||||
var cmdPath string
|
||||
appExe, err := os.Executable()
|
||||
if err == nil {
|
||||
cmdPath = filepath.Join(filepath.Dir(appExe), command)
|
||||
_, err := os.Stat(cmdPath)
|
||||
if err == nil {
|
||||
return cmdPath
|
||||
}
|
||||
}
|
||||
cmdPath, err = exec.LookPath(command)
|
||||
if err == nil {
|
||||
_, err := os.Stat(cmdPath)
|
||||
if err == nil {
|
||||
return cmdPath
|
||||
}
|
||||
}
|
||||
pwd, err := os.Getwd()
|
||||
if err == nil {
|
||||
cmdPath = filepath.Join(pwd, command)
|
||||
_, err = os.Stat(cmdPath)
|
||||
if err == nil {
|
||||
return cmdPath
|
||||
}
|
||||
}
|
||||
|
||||
return command
|
||||
}
|
||||
|
||||
func start(ctx context.Context, command string) (*exec.Cmd, error) {
|
||||
cmd := getCmd(ctx, getCLIFullPath(command))
|
||||
stdout, err := cmd.StdoutPipe()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to spawn server stdout pipe: %w", err)
|
||||
}
|
||||
stderr, err := cmd.StderrPipe()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to spawn server stderr pipe: %w", err)
|
||||
}
|
||||
|
||||
rotateLogs(ServerLogFile)
|
||||
logFile, err := os.OpenFile(ServerLogFile, os.O_APPEND|os.O_WRONLY|os.O_CREATE, 0755)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create server log: %w", err)
|
||||
}
|
||||
|
||||
logDir := filepath.Dir(ServerLogFile)
|
||||
_, err = os.Stat(logDir)
|
||||
if err != nil {
|
||||
if !errors.Is(err, os.ErrNotExist) {
|
||||
return nil, fmt.Errorf("stat ollama server log dir %s: %v", logDir, err)
|
||||
}
|
||||
|
||||
if err := os.MkdirAll(logDir, 0o755); err != nil {
|
||||
return nil, fmt.Errorf("create ollama server log dir %s: %v", logDir, err)
|
||||
}
|
||||
}
|
||||
|
||||
go func() {
|
||||
defer logFile.Close()
|
||||
io.Copy(logFile, stdout) //nolint:errcheck
|
||||
}()
|
||||
go func() {
|
||||
defer logFile.Close()
|
||||
io.Copy(logFile, stderr) //nolint:errcheck
|
||||
}()
|
||||
|
||||
// Re-wire context done behavior to attempt a graceful shutdown of the server
|
||||
cmd.Cancel = func() error {
|
||||
if cmd.Process != nil {
|
||||
err := terminate(cmd)
|
||||
if err != nil {
|
||||
slog.Warn("error trying to gracefully terminate server", "err", err)
|
||||
return cmd.Process.Kill()
|
||||
}
|
||||
|
||||
tick := time.NewTicker(10 * time.Millisecond)
|
||||
defer tick.Stop()
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-tick.C:
|
||||
exited, err := isProcessExited(cmd.Process.Pid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if exited {
|
||||
return nil
|
||||
}
|
||||
case <-time.After(5 * time.Second):
|
||||
slog.Warn("graceful server shutdown timeout, killing", "pid", cmd.Process.Pid)
|
||||
return cmd.Process.Kill()
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// run the command and wait for it to finish
|
||||
if err := cmd.Start(); err != nil {
|
||||
return nil, fmt.Errorf("failed to start server %w", err)
|
||||
}
|
||||
if cmd.Process != nil {
|
||||
slog.Info(fmt.Sprintf("started ollama server with pid %d", cmd.Process.Pid))
|
||||
}
|
||||
slog.Info(fmt.Sprintf("ollama server logs %s", ServerLogFile))
|
||||
|
||||
return cmd, nil
|
||||
}
|
||||
|
||||
func SpawnServer(ctx context.Context, command string) (chan int, error) {
|
||||
done := make(chan int)
|
||||
|
||||
go func() {
|
||||
// Keep the server running unless we're shuttind down the app
|
||||
crashCount := 0
|
||||
for {
|
||||
slog.Info("starting server...")
|
||||
cmd, err := start(ctx, command)
|
||||
if err != nil {
|
||||
crashCount++
|
||||
slog.Error(fmt.Sprintf("failed to start server %s", err))
|
||||
time.Sleep(500 * time.Millisecond * time.Duration(crashCount))
|
||||
continue
|
||||
}
|
||||
|
||||
cmd.Wait() //nolint:errcheck
|
||||
var code int
|
||||
if cmd.ProcessState != nil {
|
||||
code = cmd.ProcessState.ExitCode()
|
||||
}
|
||||
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
slog.Info(fmt.Sprintf("server shutdown with exit code %d", code))
|
||||
done <- code
|
||||
return
|
||||
default:
|
||||
crashCount++
|
||||
slog.Warn(fmt.Sprintf("server crash %d - exit code %d - respawning", crashCount, code))
|
||||
time.Sleep(500 * time.Millisecond * time.Duration(crashCount))
|
||||
break
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
return done, nil
|
||||
}
|
||||
|
||||
func IsServerRunning(ctx context.Context) bool {
|
||||
client, err := api.ClientFromEnvironment()
|
||||
if err != nil {
|
||||
slog.Info("unable to connect to server")
|
||||
return false
|
||||
}
|
||||
err = client.Heartbeat(ctx)
|
||||
if err != nil {
|
||||
slog.Debug(fmt.Sprintf("heartbeat from server: %s", err))
|
||||
slog.Info("unable to connect to server")
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
38
app/lifecycle/server_unix.go
Normal file
@@ -0,0 +1,38 @@
|
||||
//go:build !windows
|
||||
|
||||
package lifecycle
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"syscall"
|
||||
)
|
||||
|
||||
func getCmd(ctx context.Context, cmd string) *exec.Cmd {
|
||||
return exec.CommandContext(ctx, cmd, "serve")
|
||||
}
|
||||
|
||||
func terminate(cmd *exec.Cmd) error {
|
||||
return cmd.Process.Signal(os.Interrupt)
|
||||
}
|
||||
|
||||
func isProcessExited(pid int) (bool, error) {
|
||||
proc, err := os.FindProcess(pid)
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("failed to find process: %v", err)
|
||||
}
|
||||
|
||||
err = proc.Signal(syscall.Signal(0))
|
||||
if err != nil {
|
||||
if errors.Is(err, os.ErrProcessDone) || errors.Is(err, syscall.ESRCH) {
|
||||
return true, nil
|
||||
}
|
||||
|
||||
return false, fmt.Errorf("error signaling process: %v", err)
|
||||
}
|
||||
|
||||
return false, nil
|
||||
}
|
||||
91
app/lifecycle/server_windows.go
Normal file
@@ -0,0 +1,91 @@
|
||||
package lifecycle
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os/exec"
|
||||
"syscall"
|
||||
|
||||
"golang.org/x/sys/windows"
|
||||
)
|
||||
|
||||
func getCmd(ctx context.Context, exePath string) *exec.Cmd {
|
||||
cmd := exec.CommandContext(ctx, exePath, "serve")
|
||||
cmd.SysProcAttr = &syscall.SysProcAttr{
|
||||
HideWindow: true,
|
||||
CreationFlags: windows.CREATE_NEW_PROCESS_GROUP,
|
||||
}
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func terminate(cmd *exec.Cmd) error {
|
||||
dll, err := windows.LoadDLL("kernel32.dll")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
//nolint:errcheck
|
||||
defer dll.Release()
|
||||
|
||||
pid := cmd.Process.Pid
|
||||
|
||||
f, err := dll.FindProc("AttachConsole")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
r1, _, err := f.Call(uintptr(pid))
|
||||
if r1 == 0 && err != syscall.ERROR_ACCESS_DENIED {
|
||||
return err
|
||||
}
|
||||
|
||||
f, err = dll.FindProc("SetConsoleCtrlHandler")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
r1, _, err = f.Call(0, 1)
|
||||
if r1 == 0 {
|
||||
return err
|
||||
}
|
||||
|
||||
f, err = dll.FindProc("GenerateConsoleCtrlEvent")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
r1, _, err = f.Call(windows.CTRL_BREAK_EVENT, uintptr(pid))
|
||||
if r1 == 0 {
|
||||
return err
|
||||
}
|
||||
|
||||
r1, _, err = f.Call(windows.CTRL_C_EVENT, uintptr(pid))
|
||||
if r1 == 0 {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
const STILL_ACTIVE = 259
|
||||
|
||||
func isProcessExited(pid int) (bool, error) {
|
||||
hProcess, err := windows.OpenProcess(windows.PROCESS_QUERY_INFORMATION, false, uint32(pid))
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("failed to open process: %v", err)
|
||||
}
|
||||
//nolint:errcheck
|
||||
defer windows.CloseHandle(hProcess)
|
||||
|
||||
var exitCode uint32
|
||||
err = windows.GetExitCodeProcess(hProcess, &exitCode)
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("failed to get exit code: %v", err)
|
||||
}
|
||||
|
||||
if exitCode == STILL_ACTIVE {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
return true, nil
|
||||
}
|
||||
@@ -1,6 +1,4 @@
|
||||
//go:build windows || darwin
|
||||
|
||||
package updater
|
||||
package lifecycle
|
||||
|
||||
import (
|
||||
"context"
|
||||
@@ -17,28 +15,17 @@ import (
|
||||
"path"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/ollama/ollama/app/store"
|
||||
"github.com/ollama/ollama/app/version"
|
||||
"github.com/ollama/ollama/auth"
|
||||
"github.com/ollama/ollama/version"
|
||||
)
|
||||
|
||||
var (
|
||||
UpdateCheckURLBase = "https://api.github.com/repos/likelovewant/ollama-for-amd/releases/:id"
|
||||
UpdateCheckURLBase = "https://ollama.com/api/update"
|
||||
UpdateDownloaded = false
|
||||
UpdateCheckInterval = 60 * 60 * time.Second
|
||||
UpdateCheckInitialDelay = 3 * time.Second // 30 * time.Second
|
||||
|
||||
UpdateStageDir string
|
||||
UpgradeLogFile string
|
||||
UpgradeMarkerFile string
|
||||
Installer string
|
||||
UserAgentOS string
|
||||
|
||||
VerifyDownload func() error
|
||||
)
|
||||
|
||||
// TODO - maybe move up to the API package?
|
||||
@@ -47,7 +34,7 @@ type UpdateResponse struct {
|
||||
UpdateVersion string `json:"version"`
|
||||
}
|
||||
|
||||
func (u *Updater) checkForUpdate(ctx context.Context) (bool, UpdateResponse) {
|
||||
func IsNewReleaseAvailable(ctx context.Context) (bool, UpdateResponse) {
|
||||
var updateResp UpdateResponse
|
||||
|
||||
requestURL, err := url.Parse(UpdateCheckURLBase)
|
||||
@@ -59,31 +46,20 @@ func (u *Updater) checkForUpdate(ctx context.Context) (bool, UpdateResponse) {
|
||||
query.Add("os", runtime.GOOS)
|
||||
query.Add("arch", runtime.GOARCH)
|
||||
query.Add("version", version.Version)
|
||||
query.Add("ts", strconv.FormatInt(time.Now().Unix(), 10))
|
||||
|
||||
// The original macOS app used to use the device ID
|
||||
// to check for updates so include it if present
|
||||
if runtime.GOOS == "darwin" {
|
||||
if id, err := u.Store.ID(); err == nil && id != "" {
|
||||
query.Add("id", id)
|
||||
}
|
||||
}
|
||||
|
||||
var signature string
|
||||
query.Add("ts", fmt.Sprintf("%d", time.Now().Unix()))
|
||||
|
||||
nonce, err := auth.NewNonce(rand.Reader, 16)
|
||||
if err != nil {
|
||||
// Don't sign if we haven't yet generated a key pair for the server
|
||||
slog.Debug("unable to generate nonce for update check request", "error", err)
|
||||
} else {
|
||||
return false, updateResp
|
||||
}
|
||||
|
||||
query.Add("nonce", nonce)
|
||||
requestURL.RawQuery = query.Encode()
|
||||
|
||||
data := []byte(fmt.Sprintf("%s,%s", http.MethodGet, requestURL.RequestURI()))
|
||||
signature, err = auth.Sign(ctx, data)
|
||||
signature, err := auth.Sign(ctx, data)
|
||||
if err != nil {
|
||||
slog.Debug("unable to generate signature for update check request", "error", err)
|
||||
}
|
||||
return false, updateResp
|
||||
}
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodGet, requestURL.String(), nil)
|
||||
@@ -91,13 +67,10 @@ func (u *Updater) checkForUpdate(ctx context.Context) (bool, UpdateResponse) {
|
||||
slog.Warn(fmt.Sprintf("failed to check for update: %s", err))
|
||||
return false, updateResp
|
||||
}
|
||||
if signature != "" {
|
||||
req.Header.Set("Authorization", signature)
|
||||
}
|
||||
ua := fmt.Sprintf("ollama/%s %s Go/%s %s", version.Version, runtime.GOARCH, runtime.Version(), UserAgentOS)
|
||||
req.Header.Set("User-Agent", ua)
|
||||
req.Header.Set("User-Agent", fmt.Sprintf("ollama/%s (%s %s) Go/%s", version.Version, runtime.GOARCH, runtime.GOOS, runtime.Version()))
|
||||
|
||||
slog.Debug("checking for available update", "requestURL", requestURL, "User-Agent", ua)
|
||||
slog.Debug("checking for available update", "requestURL", requestURL)
|
||||
resp, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
slog.Warn(fmt.Sprintf("failed to check for update: %s", err))
|
||||
@@ -130,27 +103,13 @@ func (u *Updater) checkForUpdate(ctx context.Context) (bool, UpdateResponse) {
|
||||
return true, updateResp
|
||||
}
|
||||
|
||||
func (u *Updater) DownloadNewRelease(ctx context.Context, updateResp UpdateResponse) error {
|
||||
func DownloadNewRelease(ctx context.Context, updateResp UpdateResponse) error {
|
||||
// Do a head first to check etag info
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodHead, updateResp.UpdateURL, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// In case of slow downloads, continue the update check in the background
|
||||
bgctx, cancel := context.WithCancel(ctx)
|
||||
defer cancel()
|
||||
go func() {
|
||||
for {
|
||||
select {
|
||||
case <-bgctx.Done():
|
||||
return
|
||||
case <-time.After(UpdateCheckInterval):
|
||||
u.checkForUpdate(bgctx)
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
resp, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error checking update: %w", err)
|
||||
@@ -175,11 +134,11 @@ func (u *Updater) DownloadNewRelease(ctx context.Context, updateResp UpdateRespo
|
||||
// Check to see if we already have it downloaded
|
||||
_, err = os.Stat(stageFilename)
|
||||
if err == nil {
|
||||
slog.Info("update already downloaded", "bundle", stageFilename)
|
||||
slog.Info("update already downloaded")
|
||||
return nil
|
||||
}
|
||||
|
||||
cleanupOldDownloads(UpdateStageDir)
|
||||
cleanupOldDownloads()
|
||||
|
||||
req.Method = http.MethodGet
|
||||
resp, err = http.DefaultClient.Do(req)
|
||||
@@ -216,16 +175,12 @@ func (u *Updater) DownloadNewRelease(ctx context.Context, updateResp UpdateRespo
|
||||
}
|
||||
slog.Info("new update downloaded " + stageFilename)
|
||||
|
||||
if err := VerifyDownload(); err != nil {
|
||||
_ = os.Remove(stageFilename)
|
||||
return fmt.Errorf("%s - %s", resp.Request.URL.String(), err)
|
||||
}
|
||||
UpdateDownloaded = true
|
||||
return nil
|
||||
}
|
||||
|
||||
func cleanupOldDownloads(stageDir string) {
|
||||
files, err := os.ReadDir(stageDir)
|
||||
func cleanupOldDownloads() {
|
||||
files, err := os.ReadDir(UpdateStageDir)
|
||||
if err != nil && errors.Is(err, os.ErrNotExist) {
|
||||
// Expected behavior on first run
|
||||
return
|
||||
@@ -234,7 +189,7 @@ func cleanupOldDownloads(stageDir string) {
|
||||
return
|
||||
}
|
||||
for _, file := range files {
|
||||
fullname := filepath.Join(stageDir, file.Name())
|
||||
fullname := filepath.Join(UpdateStageDir, file.Name())
|
||||
slog.Debug("cleaning up old download: " + fullname)
|
||||
err = os.RemoveAll(fullname)
|
||||
if err != nil {
|
||||
@@ -243,28 +198,24 @@ func cleanupOldDownloads(stageDir string) {
|
||||
}
|
||||
}
|
||||
|
||||
type Updater struct {
|
||||
Store *store.Store
|
||||
}
|
||||
|
||||
func (u *Updater) StartBackgroundUpdaterChecker(ctx context.Context, cb func(string) error) {
|
||||
func StartBackgroundUpdaterChecker(ctx context.Context, cb func(string) error) {
|
||||
go func() {
|
||||
// Don't blast an update message immediately after startup
|
||||
time.Sleep(UpdateCheckInitialDelay)
|
||||
slog.Info("beginning update checker", "interval", UpdateCheckInterval)
|
||||
// time.Sleep(30 * time.Second)
|
||||
time.Sleep(3 * time.Second)
|
||||
|
||||
for {
|
||||
available, resp := u.checkForUpdate(ctx)
|
||||
available, resp := IsNewReleaseAvailable(ctx)
|
||||
if available {
|
||||
err := u.DownloadNewRelease(ctx, resp)
|
||||
err := DownloadNewRelease(ctx, resp)
|
||||
if err != nil {
|
||||
slog.Error(fmt.Sprintf("failed to download new release: %s", err))
|
||||
} else {
|
||||
}
|
||||
err = cb(resp.UpdateVersion)
|
||||
if err != nil {
|
||||
slog.Warn(fmt.Sprintf("failed to register update available with tray: %s", err))
|
||||
}
|
||||
}
|
||||
}
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
slog.Debug("stopping background update checker")
|
||||
12
app/lifecycle/updater_nonwindows.go
Normal file
@@ -0,0 +1,12 @@
|
||||
//go:build !windows
|
||||
|
||||
package lifecycle
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
func DoUpgrade(cancel context.CancelFunc, done chan int) error {
|
||||
return fmt.Errorf("DoUpgrade not yet implemented")
|
||||
}
|
||||
77
app/lifecycle/updater_windows.go
Normal file
@@ -0,0 +1,77 @@
|
||||
package lifecycle
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
func DoUpgrade(cancel context.CancelFunc, done chan int) error {
|
||||
files, err := filepath.Glob(filepath.Join(UpdateStageDir, "*", "*.exe")) // TODO generalize for multiplatform
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to lookup downloads: %s", err)
|
||||
}
|
||||
if len(files) == 0 {
|
||||
return fmt.Errorf("no update downloads found")
|
||||
} else if len(files) > 1 {
|
||||
// Shouldn't happen
|
||||
slog.Warn(fmt.Sprintf("multiple downloads found, using first one %v", files))
|
||||
}
|
||||
installerExe := files[0]
|
||||
|
||||
slog.Info("starting upgrade with " + installerExe)
|
||||
slog.Info("upgrade log file " + UpgradeLogFile)
|
||||
|
||||
// When running in debug mode, we'll be "verbose" and let the installer pop up and prompt
|
||||
installArgs := []string{
|
||||
"/CLOSEAPPLICATIONS", // Quit the tray app if it's still running
|
||||
"/LOG=" + filepath.Base(UpgradeLogFile), // Only relative seems reliable, so set pwd
|
||||
"/FORCECLOSEAPPLICATIONS", // Force close the tray app - might be needed
|
||||
}
|
||||
// make the upgrade as quiet as possible (no GUI, no prompts)
|
||||
installArgs = append(installArgs,
|
||||
"/SP", // Skip the "This will install... Do you wish to continue" prompt
|
||||
"/SUPPRESSMSGBOXES",
|
||||
"/SILENT",
|
||||
"/VERYSILENT",
|
||||
)
|
||||
|
||||
// Safeguard in case we have requests in flight that need to drain...
|
||||
slog.Info("Waiting for server to shutdown")
|
||||
cancel()
|
||||
if done != nil {
|
||||
<-done
|
||||
} else {
|
||||
// Shouldn't happen
|
||||
slog.Warn("done chan was nil, not actually waiting")
|
||||
}
|
||||
|
||||
slog.Debug(fmt.Sprintf("starting installer: %s %v", installerExe, installArgs))
|
||||
os.Chdir(filepath.Dir(UpgradeLogFile)) //nolint:errcheck
|
||||
cmd := exec.Command(installerExe, installArgs...)
|
||||
|
||||
if err := cmd.Start(); err != nil {
|
||||
return fmt.Errorf("unable to start ollama app %w", err)
|
||||
}
|
||||
|
||||
if cmd.Process != nil {
|
||||
err = cmd.Process.Release()
|
||||
if err != nil {
|
||||
slog.Error(fmt.Sprintf("failed to release server process: %s", err))
|
||||
}
|
||||
} else {
|
||||
// TODO - some details about why it didn't start, or is this a pedantic error case?
|
||||
return fmt.Errorf("installer process did not start")
|
||||
}
|
||||
|
||||
// TODO should we linger for a moment and check to make sure it's actually running by checking the pid?
|
||||
|
||||
slog.Info("Installer started in background, exiting")
|
||||
|
||||
os.Exit(0)
|
||||
// Not reached
|
||||
return nil
|
||||
}
|
||||
@@ -1,45 +0,0 @@
|
||||
//go:build windows || darwin
|
||||
|
||||
// package logrotate provides utilities for rotating logs
|
||||
// TODO (jmorgan): this most likely doesn't need it's own
|
||||
// package and can be moved to app where log files are created
|
||||
package logrotate
|
||||
|
||||
import (
|
||||
"log/slog"
|
||||
"os"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
const MaxLogFiles = 5
|
||||
|
||||
func Rotate(filename string) {
|
||||
if _, err := os.Stat(filename); os.IsNotExist(err) {
|
||||
return
|
||||
}
|
||||
|
||||
index := strings.LastIndex(filename, ".")
|
||||
pre := filename[:index]
|
||||
post := "." + filename[index+1:]
|
||||
for i := MaxLogFiles; i > 0; i-- {
|
||||
older := pre + "-" + strconv.Itoa(i) + post
|
||||
newer := pre + "-" + strconv.Itoa(i-1) + post
|
||||
if i == 1 {
|
||||
newer = pre + post
|
||||
}
|
||||
if _, err := os.Stat(newer); err == nil {
|
||||
if _, err := os.Stat(older); err == nil {
|
||||
err := os.Remove(older)
|
||||
if err != nil {
|
||||
slog.Warn("Failed to remove older log", "older", older, "error", err)
|
||||
continue
|
||||
}
|
||||
}
|
||||
err := os.Rename(newer, older)
|
||||
if err != nil {
|
||||
slog.Warn("Failed to rotate log", "older", older, "newer", newer, "error", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,70 +0,0 @@
|
||||
//go:build windows || darwin
|
||||
|
||||
package logrotate
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestRotate(t *testing.T) {
|
||||
logDir := t.TempDir()
|
||||
logFile := filepath.Join(logDir, "testlog.log")
|
||||
|
||||
// No log exists
|
||||
Rotate(logFile)
|
||||
|
||||
if err := os.WriteFile(logFile, []byte("1"), 0o644); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if _, err := os.Stat(logFile); os.IsNotExist(err) {
|
||||
t.Fatal("expected log file to exist")
|
||||
}
|
||||
|
||||
// First rotation
|
||||
Rotate(logFile)
|
||||
if _, err := os.Stat(filepath.Join(logDir, "testlog-1.log")); os.IsNotExist(err) {
|
||||
t.Fatal("expected rotated log file to exist")
|
||||
}
|
||||
if _, err := os.Stat(filepath.Join(logDir, "testlog-2.log")); !os.IsNotExist(err) {
|
||||
t.Fatal("expected no second rotated log file")
|
||||
}
|
||||
if _, err := os.Stat(logFile); !os.IsNotExist(err) {
|
||||
t.Fatal("expected original log file to be moved")
|
||||
}
|
||||
|
||||
// Should be a no-op without a new log
|
||||
Rotate(logFile)
|
||||
if _, err := os.Stat(filepath.Join(logDir, "testlog-1.log")); os.IsNotExist(err) {
|
||||
t.Fatal("expected rotated log file to still exist")
|
||||
}
|
||||
if _, err := os.Stat(filepath.Join(logDir, "testlog-2.log")); !os.IsNotExist(err) {
|
||||
t.Fatal("expected no second rotated log file")
|
||||
}
|
||||
if _, err := os.Stat(logFile); !os.IsNotExist(err) {
|
||||
t.Fatal("expected no original log file")
|
||||
}
|
||||
|
||||
for i := 2; i <= MaxLogFiles+1; i++ {
|
||||
if err := os.WriteFile(logFile, []byte(strconv.Itoa(i)), 0o644); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if _, err := os.Stat(logFile); os.IsNotExist(err) {
|
||||
t.Fatal("expected log file to exist")
|
||||
}
|
||||
Rotate(logFile)
|
||||
if _, err := os.Stat(logFile); !os.IsNotExist(err) {
|
||||
t.Fatal("expected log file to be moved")
|
||||
}
|
||||
for j := 1; j < i; j++ {
|
||||
if _, err := os.Stat(filepath.Join(logDir, "testlog-"+strconv.Itoa(j)+".log")); os.IsNotExist(err) {
|
||||
t.Fatalf("expected rotated log file %d to exist", j)
|
||||
}
|
||||
}
|
||||
if _, err := os.Stat(filepath.Join(logDir, "testlog-"+strconv.Itoa(i+1)+".log")); !os.IsNotExist(err) {
|
||||
t.Fatalf("expected no rotated log file %d", i+1)
|
||||
}
|
||||
}
|
||||
}
|
||||
12
app/main.go
Normal file
@@ -0,0 +1,12 @@
|
||||
package main
|
||||
|
||||
// Compile with the following to get rid of the cmd pop up on windows
|
||||
// go build -ldflags="-H windowsgui" .
|
||||
|
||||
import (
|
||||
"github.com/ollama/ollama/app/lifecycle"
|
||||
)
|
||||
|
||||
func main() {
|
||||
lifecycle.Run()
|
||||
}
|
||||
265
app/ollama.iss
@@ -28,8 +28,8 @@ AppPublisher={#MyAppPublisher}
|
||||
AppPublisherURL={#MyAppURL}
|
||||
AppSupportURL={#MyAppURL}
|
||||
AppUpdatesURL={#MyAppURL}
|
||||
;ArchitecturesAllowed=x64compatible arm64
|
||||
;ArchitecturesInstallIn64BitMode=x64compatible arm64
|
||||
ArchitecturesAllowed=x64 arm64
|
||||
ArchitecturesInstallIn64BitMode=x64 arm64
|
||||
DefaultDirName={localappdata}\Programs\{#MyAppName}
|
||||
DefaultGroupName={#MyAppName}
|
||||
DisableProgramGroupPage=yes
|
||||
@@ -37,10 +37,8 @@ PrivilegesRequired=lowest
|
||||
OutputBaseFilename="OllamaSetup"
|
||||
SetupIconFile={#MyIcon}
|
||||
UninstallDisplayIcon={uninstallexe}
|
||||
Compression=lzma2/ultra64
|
||||
LZMAUseSeparateProcess=yes
|
||||
LZMANumBlockThreads=8
|
||||
SolidCompression=yes
|
||||
Compression=lzma2
|
||||
SolidCompression=no
|
||||
WizardStyle=modern
|
||||
ChangesEnvironment=yes
|
||||
OutputDir=..\dist\
|
||||
@@ -48,15 +46,14 @@ OutputDir=..\dist\
|
||||
; Disable logging once everything's battle tested
|
||||
; Filename will be %TEMP%\Setup Log*.txt
|
||||
SetupLogging=yes
|
||||
CloseApplications=no
|
||||
CloseApplications=yes
|
||||
RestartApplications=no
|
||||
RestartIfNeededByRun=no
|
||||
|
||||
; https://jrsoftware.org/ishelp/index.php?topic=setup_wizardimagefile
|
||||
WizardSmallImageFile=.\assets\setup.bmp
|
||||
|
||||
; Ollama requires Windows 10 22H2 or newer for proper unicode rendering
|
||||
; TODO: consider setting this to 10.0.19045
|
||||
; TODO verifty actual min windows version...
|
||||
; OG Win 10
|
||||
MinVersion=10.0.10240
|
||||
|
||||
; First release that supports WinRT UI Composition for win32 apps
|
||||
@@ -70,6 +67,7 @@ DisableFinishedPage=yes
|
||||
DisableReadyMemo=yes
|
||||
DisableReadyPage=yes
|
||||
DisableStartupPrompt=yes
|
||||
DisableWelcomePage=yes
|
||||
|
||||
; TODO - percentage can't be set less than 100, so how to make it shorter?
|
||||
; WizardSizePercent=100,80
|
||||
@@ -88,42 +86,28 @@ Name: "english"; MessagesFile: "compiler:Default.isl"
|
||||
DialogFontSize=12
|
||||
|
||||
[Files]
|
||||
#if FileExists("..\dist\windows-ollama-app-amd64.exe")
|
||||
Source: "..\dist\windows-ollama-app-amd64.exe"; DestDir: "{app}"; DestName: "{#MyAppExeName}" ;Check: not IsArm64(); Flags: ignoreversion 64bit; BeforeInstall: TaskKill('{#MyAppExeName}')
|
||||
Source: "..\dist\windows-amd64\vc_redist.x64.exe"; DestDir: "{tmp}"; Check: not IsArm64() and vc_redist_needed(); Flags: deleteafterinstall
|
||||
Source: "..\dist\windows-amd64\ollama.exe"; DestDir: "{app}"; Check: not IsArm64(); Flags: ignoreversion 64bit; BeforeInstall: TaskKill('ollama.exe')
|
||||
Source: "..\dist\windows-amd64\lib\ollama\*"; DestDir: "{app}\lib\ollama\"; Check: not IsArm64(); Flags: ignoreversion 64bit recursesubdirs
|
||||
#endif
|
||||
|
||||
; For local development, rely on binary compatibility at runtime since we can't cross compile
|
||||
#if FileExists("..\dist\windows-ollama-app-arm64.exe")
|
||||
Source: "..\dist\windows-ollama-app-arm64.exe"; DestDir: "{app}"; DestName: "{#MyAppExeName}" ;Check: IsArm64(); Flags: ignoreversion 64bit; BeforeInstall: TaskKill('{#MyAppExeName}')
|
||||
#else
|
||||
Source: "..\dist\windows-ollama-app-amd64.exe"; DestDir: "{app}"; DestName: "{#MyAppExeName}" ;Check: IsArm64(); Flags: ignoreversion 64bit; BeforeInstall: TaskKill('{#MyAppExeName}')
|
||||
#endif
|
||||
|
||||
#if FileExists("..\dist\windows-arm64\ollama.exe")
|
||||
Source: "..\dist\windows-arm64\vc_redist.arm64.exe"; DestDir: "{tmp}"; Check: IsArm64() and vc_redist_needed(); Flags: deleteafterinstall
|
||||
Source: "..\dist\windows-arm64\ollama.exe"; DestDir: "{app}"; Check: IsArm64(); Flags: ignoreversion 64bit; BeforeInstall: TaskKill('ollama.exe')
|
||||
#endif
|
||||
|
||||
Source: ".\app.exe"; DestDir: "{app}"; DestName: "{#MyAppExeName}" ; Flags: ignoreversion 64bit
|
||||
Source: "..\ollama.exe"; DestDir: "{app}"; Flags: ignoreversion 64bit
|
||||
Source: "..\dist\windows-{#ARCH}\ollama_runners\*"; DestDir: "{app}\ollama_runners"; Flags: ignoreversion 64bit recursesubdirs
|
||||
Source: "..\dist\ollama_welcome.ps1"; DestDir: "{app}"; Flags: ignoreversion
|
||||
Source: ".\assets\app.ico"; DestDir: "{app}"; Flags: ignoreversion
|
||||
#if DirExists("..\dist\windows-amd64\cuda")
|
||||
Source: "..\dist\windows-amd64\cuda\*"; DestDir: "{app}\cuda\"; Flags: ignoreversion recursesubdirs
|
||||
#endif
|
||||
#if DirExists("..\dist\windows-amd64\oneapi")
|
||||
Source: "..\dist\windows-amd64\oneapi\*"; DestDir: "{app}\oneapi\"; Flags: ignoreversion recursesubdirs
|
||||
#endif
|
||||
#if DirExists("..\dist\windows-amd64\rocm")
|
||||
Source: "..\dist\windows-amd64\rocm\*"; DestDir: "{app}\rocm\"; Flags: ignoreversion recursesubdirs
|
||||
#endif
|
||||
|
||||
|
||||
[Icons]
|
||||
Name: "{group}\{#MyAppName}"; Filename: "{app}\{#MyAppExeName}"; IconFilename: "{app}\app.ico"
|
||||
Name: "{app}\lib\{#MyAppName}"; Filename: "{app}\{#MyAppExeName}"; IconFilename: "{app}\app.ico"
|
||||
Name: "{userstartup}\{#MyAppName}"; Filename: "{app}\{#MyAppExeName}"; IconFilename: "{app}\app.ico"
|
||||
Name: "{userprograms}\{#MyAppName}"; Filename: "{app}\{#MyAppExeName}"; IconFilename: "{app}\app.ico"
|
||||
|
||||
[InstallDelete]
|
||||
Type: files; Name: "{%LOCALAPPDATA}\Ollama\updates"
|
||||
|
||||
[Run]
|
||||
#if DirExists("..\dist\windows-arm64")
|
||||
Filename: "{tmp}\vc_redist.arm64.exe"; Parameters: "/install /passive /norestart"; Check: IsArm64() and vc_redist_needed(); StatusMsg: "Installing VC++ Redistributables..."; Flags: waituntilterminated
|
||||
#endif
|
||||
#if DirExists("..\dist\windows-amd64")
|
||||
Filename: "{tmp}\vc_redist.x64.exe"; Parameters: "/install /passive /norestart"; Check: not IsArm64() and vc_redist_needed(); StatusMsg: "Installing VC++ Redistributables..."; Flags: waituntilterminated
|
||||
#endif
|
||||
Filename: "{cmd}"; Parameters: "/C set PATH={app};%PATH% & ""{app}\{#MyAppExeName}"""; Flags: postinstall nowait runhidden
|
||||
|
||||
[UninstallRun]
|
||||
@@ -139,32 +123,24 @@ Filename: "{cmd}"; Parameters: "/c timeout 5"; Flags: runhidden
|
||||
Type: filesandordirs; Name: "{%TEMP}\ollama*"
|
||||
Type: filesandordirs; Name: "{%LOCALAPPDATA}\Ollama"
|
||||
Type: filesandordirs; Name: "{%LOCALAPPDATA}\Programs\Ollama"
|
||||
Type: filesandordirs; Name: "{%USERPROFILE}\.ollama\models"
|
||||
Type: filesandordirs; Name: "{%USERPROFILE}\.ollama\history"
|
||||
Type: filesandordirs; Name: "{userstartup}\{#MyAppName}.lnk"
|
||||
; NOTE: if the user has a custom OLLAMA_MODELS it will be preserved
|
||||
|
||||
[InstallDelete]
|
||||
Type: filesandordirs; Name: "{%TEMP}\ollama*"
|
||||
Type: filesandordirs; Name: "{app}\lib\ollama"
|
||||
|
||||
[Messages]
|
||||
WizardReady=Ollama
|
||||
WizardReady=Ollama Windows Preview
|
||||
ReadyLabel1=%nLet's get you up and running with your own large language models.
|
||||
SetupAppRunningError=Another Ollama installer is running.%n%nPlease cancel or finish the other installer, then click OK to continue with this install, or Cancel to exit.
|
||||
|
||||
|
||||
;FinishedHeadingLabel=Run your first model
|
||||
;FinishedLabel=%nRun this command in a PowerShell or cmd terminal.%n%n%n ollama run llama3.2
|
||||
;FinishedLabel=%nRun this command in a PowerShell or cmd terminal.%n%n%n ollama run llama3
|
||||
;ClickFinish=%n
|
||||
|
||||
[Registry]
|
||||
Root: HKCU; Subkey: "Environment"; \
|
||||
ValueType: expandsz; ValueName: "Path"; ValueData: "{olddata};{app}"; \
|
||||
Check: NeedsAddPath('{app}')
|
||||
; Register ollama:// URL protocol
|
||||
Root: HKCU; Subkey: "Software\Classes\ollama"; ValueType: string; ValueName: ""; ValueData: "URL:Ollama Protocol"; Flags: uninsdeletekey
|
||||
Root: HKCU; Subkey: "Software\Classes\ollama"; ValueType: string; ValueName: "URL Protocol"; ValueData: ""; Flags: uninsdeletekey
|
||||
Root: HKCU; Subkey: "Software\Classes\ollama\shell\open\command"; ValueType: string; ValueName: ""; ValueData: """{app}\{#MyAppExeName}"" ""%1"""; Flags: uninsdeletekey
|
||||
|
||||
[Code]
|
||||
|
||||
@@ -183,192 +159,3 @@ begin
|
||||
{ Pos() returns 0 if not found }
|
||||
Result := Pos(';' + ExpandConstant(Param) + ';', ';' + OrigPath + ';') = 0;
|
||||
end;
|
||||
|
||||
{ --- VC Runtime libraries discovery code - Only install vc_redist if it isn't already installed ----- }
|
||||
const VCRTL_MIN_V1 = 14;
|
||||
const VCRTL_MIN_V2 = 40;
|
||||
const VCRTL_MIN_V3 = 33807;
|
||||
const VCRTL_MIN_V4 = 0;
|
||||
|
||||
// check if the minimum required vc redist is installed (by looking the registry)
|
||||
function vc_redist_needed (): Boolean;
|
||||
var
|
||||
sRegKey: string;
|
||||
v1: Cardinal;
|
||||
v2: Cardinal;
|
||||
v3: Cardinal;
|
||||
v4: Cardinal;
|
||||
begin
|
||||
if (IsArm64()) then begin
|
||||
sRegKey := 'SOFTWARE\WOW6432Node\Microsoft\VisualStudio\14.0\VC\Runtimes\arm64';
|
||||
end else begin
|
||||
sRegKey := 'SOFTWARE\Microsoft\VisualStudio\14.0\VC\Runtimes\x64';
|
||||
end;
|
||||
if (RegQueryDWordValue (HKEY_LOCAL_MACHINE, sRegKey, 'Major', v1) and
|
||||
RegQueryDWordValue (HKEY_LOCAL_MACHINE, sRegKey, 'Minor', v2) and
|
||||
RegQueryDWordValue (HKEY_LOCAL_MACHINE, sRegKey, 'Bld', v3) and
|
||||
RegQueryDWordValue (HKEY_LOCAL_MACHINE, sRegKey, 'RBld', v4)) then
|
||||
begin
|
||||
Log ('VC Redist version: ' + IntToStr (v1) +
|
||||
'.' + IntToStr (v2) + '.' + IntToStr (v3) +
|
||||
'.' + IntToStr (v4));
|
||||
{ Version info was found. Return true if later or equal to our
|
||||
minimal required version RTL_MIN_Vx }
|
||||
Result := not (
|
||||
(v1 > VCRTL_MIN_V1) or ((v1 = VCRTL_MIN_V1) and
|
||||
((v2 > VCRTL_MIN_V2) or ((v2 = VCRTL_MIN_V2) and
|
||||
((v3 > VCRTL_MIN_V3) or ((v3 = VCRTL_MIN_V3) and
|
||||
(v4 >= VCRTL_MIN_V4)))))));
|
||||
end
|
||||
else
|
||||
Result := TRUE;
|
||||
end;
|
||||
|
||||
function GetDirSize(Path: String): Int64;
|
||||
var
|
||||
FindRec: TFindRec;
|
||||
FilePath: string;
|
||||
Size: Int64;
|
||||
begin
|
||||
if FindFirst(Path + '\*', FindRec) then begin
|
||||
Result := 0;
|
||||
try
|
||||
repeat
|
||||
if (FindRec.Name <> '.') and (FindRec.Name <> '..') then begin
|
||||
FilePath := Path + '\' + FindRec.Name;
|
||||
if (FindRec.Attributes and FILE_ATTRIBUTE_DIRECTORY) <> 0 then begin
|
||||
Size := GetDirSize(FilePath);
|
||||
end else begin
|
||||
Size := Int64(FindRec.SizeHigh) shl 32 + FindRec.SizeLow;
|
||||
end;
|
||||
Result := Result + Size;
|
||||
end;
|
||||
until not FindNext(FindRec);
|
||||
finally
|
||||
FindClose(FindRec);
|
||||
end;
|
||||
end else begin
|
||||
Log(Format('Failed to list %s', [Path]));
|
||||
Result := -1;
|
||||
end;
|
||||
end;
|
||||
|
||||
var
|
||||
DeleteModelsChecked: Boolean;
|
||||
ModelsDir: string;
|
||||
|
||||
procedure InitializeUninstallProgressForm();
|
||||
var
|
||||
UninstallPage: TNewNotebookPage;
|
||||
UninstallButton: TNewButton;
|
||||
DeleteModelsCheckbox: TNewCheckBox;
|
||||
OriginalPageNameLabel: string;
|
||||
OriginalPageDescriptionLabel: string;
|
||||
OriginalCancelButtonEnabled: Boolean;
|
||||
OriginalCancelButtonModalResult: Integer;
|
||||
ctrl: TWinControl;
|
||||
ModelDirA: AnsiString;
|
||||
ModelsSize: Int64;
|
||||
begin
|
||||
if not UninstallSilent then begin
|
||||
ctrl := UninstallProgressForm.CancelButton;
|
||||
UninstallButton := TNewButton.Create(UninstallProgressForm);
|
||||
UninstallButton.Parent := UninstallProgressForm;
|
||||
UninstallButton.Left := ctrl.Left - ctrl.Width - ScaleX(10);
|
||||
UninstallButton.Top := ctrl.Top;
|
||||
UninstallButton.Width := ctrl.Width;
|
||||
UninstallButton.Height := ctrl.Height;
|
||||
UninstallButton.TabOrder := ctrl.TabOrder;
|
||||
UninstallButton.Caption := 'Uninstall';
|
||||
UninstallButton.ModalResult := mrOK;
|
||||
UninstallProgressForm.CancelButton.TabOrder := UninstallButton.TabOrder + 1;
|
||||
UninstallPage := TNewNotebookPage.Create(UninstallProgressForm);
|
||||
UninstallPage.Notebook := UninstallProgressForm.InnerNotebook;
|
||||
UninstallPage.Parent := UninstallProgressForm.InnerNotebook;
|
||||
UninstallPage.Align := alClient;
|
||||
UninstallProgressForm.InnerNotebook.ActivePage := UninstallPage;
|
||||
|
||||
ctrl := UninstallProgressForm.StatusLabel;
|
||||
with TNewStaticText.Create(UninstallProgressForm) do begin
|
||||
Parent := UninstallPage;
|
||||
Top := ctrl.Top;
|
||||
Left := ctrl.Left;
|
||||
Width := ctrl.Width;
|
||||
Height := ctrl.Height;
|
||||
AutoSize := False;
|
||||
ShowAccelChar := False;
|
||||
Caption := '';
|
||||
end;
|
||||
|
||||
if (DirExists(GetEnv('USERPROFILE') + '\.ollama\models\blobs')) then begin
|
||||
ModelsDir := GetEnv('USERPROFILE') + '\.ollama\models';
|
||||
ModelsSize := GetDirSize(ModelsDir);
|
||||
end;
|
||||
|
||||
DeleteModelsCheckbox := TNewCheckBox.Create(UninstallProgressForm);
|
||||
DeleteModelsCheckbox.Parent := UninstallPage;
|
||||
DeleteModelsCheckbox.Top := ctrl.Top + ScaleY(30);
|
||||
DeleteModelsCheckbox.Left := ctrl.Left;
|
||||
DeleteModelsCheckbox.Width := ScaleX(300);
|
||||
if ModelsSize > 1024*1024*1024 then begin
|
||||
DeleteModelsCheckbox.Caption := 'Remove models (' + IntToStr(ModelsSize/(1024*1024*1024)) + ' GB) ' + ModelsDir;
|
||||
end else if ModelsSize > 1024*1024 then begin
|
||||
DeleteModelsCheckbox.Caption := 'Remove models (' + IntToStr(ModelsSize/(1024*1024)) + ' MB) ' + ModelsDir;
|
||||
end else begin
|
||||
DeleteModelsCheckbox.Caption := 'Remove models ' + ModelsDir;
|
||||
end;
|
||||
DeleteModelsCheckbox.Checked := True;
|
||||
|
||||
OriginalPageNameLabel := UninstallProgressForm.PageNameLabel.Caption;
|
||||
OriginalPageDescriptionLabel := UninstallProgressForm.PageDescriptionLabel.Caption;
|
||||
OriginalCancelButtonEnabled := UninstallProgressForm.CancelButton.Enabled;
|
||||
OriginalCancelButtonModalResult := UninstallProgressForm.CancelButton.ModalResult;
|
||||
|
||||
UninstallProgressForm.PageNameLabel.Caption := '';
|
||||
UninstallProgressForm.PageDescriptionLabel.Caption := '';
|
||||
UninstallProgressForm.CancelButton.Enabled := True;
|
||||
UninstallProgressForm.CancelButton.ModalResult := mrCancel;
|
||||
|
||||
if UninstallProgressForm.ShowModal = mrCancel then Abort;
|
||||
|
||||
UninstallButton.Visible := False;
|
||||
UninstallProgressForm.PageNameLabel.Caption := OriginalPageNameLabel;
|
||||
UninstallProgressForm.PageDescriptionLabel.Caption := OriginalPageDescriptionLabel;
|
||||
UninstallProgressForm.CancelButton.Enabled := OriginalCancelButtonEnabled;
|
||||
UninstallProgressForm.CancelButton.ModalResult := OriginalCancelButtonModalResult;
|
||||
|
||||
UninstallProgressForm.InnerNotebook.ActivePage := UninstallProgressForm.InstallingPage;
|
||||
|
||||
if DeleteModelsCheckbox.Checked then begin
|
||||
DeleteModelsChecked:=True;
|
||||
end else begin
|
||||
DeleteModelsChecked:=False;
|
||||
end;
|
||||
end;
|
||||
end;
|
||||
|
||||
procedure CurUninstallStepChanged(CurUninstallStep: TUninstallStep);
|
||||
begin
|
||||
if CurUninstallStep = usDone then begin
|
||||
if DeleteModelsChecked then begin
|
||||
Log('user requested model cleanup');
|
||||
if (VarIsEmpty(ModelsDir)) then begin
|
||||
Log('cleaning up home directory models')
|
||||
DelTree(GetEnv('USERPROFILE') + '\.ollama\models', True, True, True);
|
||||
end else begin
|
||||
Log('cleaning up custom directory models ' + ModelsDir)
|
||||
DelTree(ModelsDir + '\blobs', True, True, True);
|
||||
DelTree(ModelsDir + '\manifests', True, True, True);
|
||||
end;
|
||||
end else begin
|
||||
Log('user requested to preserve model dir');
|
||||
end;
|
||||
end;
|
||||
end;
|
||||
|
||||
procedure TaskKill(FileName: String);
|
||||
var
|
||||
ResultCode: Integer;
|
||||
begin
|
||||
Exec('taskkill.exe', '/f /im ' + '"' + FileName + '"', '', SW_HIDE, ewWaitUntilTerminated, ResultCode);
|
||||
end;
|
||||
|
||||
8
app/ollama_welcome.ps1
Normal file
@@ -0,0 +1,8 @@
|
||||
# TODO - consider ANSI colors and maybe ASCII art...
|
||||
write-host ""
|
||||
write-host "Welcome to Ollama!"
|
||||
write-host ""
|
||||
write-host "Run your first model:"
|
||||
write-host ""
|
||||
write-host "`tollama run llama3"
|
||||
write-host ""
|
||||
@@ -1,357 +0,0 @@
|
||||
//go:build windows || darwin
|
||||
|
||||
package server
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"log/slog"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"runtime"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/ollama/ollama/app/logrotate"
|
||||
"github.com/ollama/ollama/app/store"
|
||||
)
|
||||
|
||||
const restartDelay = time.Second
|
||||
|
||||
// Server is a managed ollama server process
|
||||
type Server struct {
|
||||
store *store.Store
|
||||
bin string // resolved path to `ollama`
|
||||
log io.WriteCloser
|
||||
dev bool // true if running with the dev flag
|
||||
}
|
||||
|
||||
type InferenceCompute struct {
|
||||
Library string
|
||||
Variant string
|
||||
Compute string
|
||||
Driver string
|
||||
Name string
|
||||
VRAM string
|
||||
}
|
||||
|
||||
func New(s *store.Store, devMode bool) *Server {
|
||||
p := resolvePath("ollama")
|
||||
return &Server{store: s, bin: p, dev: devMode}
|
||||
}
|
||||
|
||||
func resolvePath(name string) string {
|
||||
// look in the app bundle first
|
||||
if exe, _ := os.Executable(); exe != "" {
|
||||
var dir string
|
||||
if runtime.GOOS == "windows" {
|
||||
dir = filepath.Dir(exe)
|
||||
} else {
|
||||
dir = filepath.Join(filepath.Dir(exe), "..", "Resources")
|
||||
}
|
||||
if _, err := os.Stat(filepath.Join(dir, name)); err == nil {
|
||||
return filepath.Join(dir, name)
|
||||
}
|
||||
}
|
||||
|
||||
// check the development dist path
|
||||
for _, path := range []string{
|
||||
filepath.Join("dist", runtime.GOOS, name),
|
||||
filepath.Join("dist", runtime.GOOS+"-"+runtime.GOARCH, name),
|
||||
} {
|
||||
if _, err := os.Stat(path); err == nil {
|
||||
return path
|
||||
}
|
||||
}
|
||||
|
||||
// fallback to system path
|
||||
if p, _ := exec.LookPath(name); p != "" {
|
||||
return p
|
||||
}
|
||||
|
||||
return name
|
||||
}
|
||||
|
||||
// cleanup checks the pid file for a running ollama process
|
||||
// and shuts it down gracefully if it is running
|
||||
func cleanup() error {
|
||||
data, err := os.ReadFile(pidFile)
|
||||
if err != nil {
|
||||
if os.IsNotExist(err) {
|
||||
return nil
|
||||
}
|
||||
return err
|
||||
}
|
||||
defer os.Remove(pidFile)
|
||||
|
||||
pid, err := strconv.Atoi(strings.TrimSpace(string(data)))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
proc, err := os.FindProcess(pid)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
ok, err := terminated(pid)
|
||||
if err != nil {
|
||||
slog.Debug("cleanup: error checking if terminated", "pid", pid, "err", err)
|
||||
}
|
||||
if ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
slog.Info("detected previous ollama process, cleaning up", "pid", pid)
|
||||
return stop(proc)
|
||||
}
|
||||
|
||||
// stop waits for a process with the provided pid to exit by polling
|
||||
// `terminated(pid)`. If the process has not exited within 5 seconds, it logs a
|
||||
// warning and kills the process.
|
||||
func stop(proc *os.Process) error {
|
||||
if proc == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := terminate(proc); err != nil {
|
||||
slog.Warn("graceful terminate failed, killing", "err", err)
|
||||
return proc.Kill()
|
||||
}
|
||||
|
||||
deadline := time.NewTimer(5 * time.Second)
|
||||
defer deadline.Stop()
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-deadline.C:
|
||||
slog.Warn("timeout waiting for graceful shutdown; killing", "pid", proc.Pid)
|
||||
return proc.Kill()
|
||||
default:
|
||||
ok, err := terminated(proc.Pid)
|
||||
if err != nil {
|
||||
slog.Error("error checking if ollama process is terminated", "err", err)
|
||||
return err
|
||||
}
|
||||
if ok {
|
||||
return nil
|
||||
}
|
||||
time.Sleep(10 * time.Millisecond)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Server) Run(ctx context.Context) error {
|
||||
l, err := openRotatingLog()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
s.log = l
|
||||
defer s.log.Close()
|
||||
|
||||
if err := cleanup(); err != nil {
|
||||
slog.Warn("failed to cleanup previous ollama process", "err", err)
|
||||
}
|
||||
|
||||
reaped := false
|
||||
for ctx.Err() == nil {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return ctx.Err()
|
||||
case <-time.After(restartDelay):
|
||||
}
|
||||
|
||||
cmd, err := s.cmd(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := cmd.Start(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = os.WriteFile(pidFile, []byte(strconv.Itoa(cmd.Process.Pid)), 0o644)
|
||||
if err != nil {
|
||||
slog.Warn("failed to write pid file", "file", pidFile, "err", err)
|
||||
}
|
||||
|
||||
if err = cmd.Wait(); err != nil && !errors.Is(err, context.Canceled) {
|
||||
var exitErr *exec.ExitError
|
||||
if errors.As(err, &exitErr) && exitErr.ExitCode() == 1 && !s.dev && !reaped {
|
||||
reaped = true
|
||||
// This could be a port conflict, try to kill any existing ollama processes
|
||||
if err := reapServers(); err != nil {
|
||||
slog.Warn("failed to stop existing ollama server", "err", err)
|
||||
} else {
|
||||
slog.Debug("conflicting server stopped, waiting for port to be released")
|
||||
continue
|
||||
}
|
||||
}
|
||||
slog.Error("ollama exited", "err", err)
|
||||
}
|
||||
}
|
||||
return ctx.Err()
|
||||
}
|
||||
|
||||
func (s *Server) cmd(ctx context.Context) (*exec.Cmd, error) {
|
||||
settings, err := s.store.Settings()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
cmd := commandContext(ctx, s.bin, "serve")
|
||||
cmd.Stdout, cmd.Stderr = s.log, s.log
|
||||
|
||||
// Copy and mutate the environment to merge in settings the user has specified without dups
|
||||
env := map[string]string{}
|
||||
for _, kv := range os.Environ() {
|
||||
s := strings.SplitN(kv, "=", 2)
|
||||
env[s[0]] = s[1]
|
||||
}
|
||||
if settings.Expose {
|
||||
env["OLLAMA_HOST"] = "0.0.0.0"
|
||||
}
|
||||
if settings.Browser {
|
||||
env["OLLAMA_ORIGINS"] = "*"
|
||||
}
|
||||
if settings.Models != "" {
|
||||
if _, err := os.Stat(settings.Models); err == nil {
|
||||
env["OLLAMA_MODELS"] = settings.Models
|
||||
} else {
|
||||
slog.Warn("models path not accessible, clearing models setting", "path", settings.Models, "err", err)
|
||||
settings.Models = ""
|
||||
s.store.SetSettings(settings)
|
||||
}
|
||||
}
|
||||
if settings.ContextLength > 0 {
|
||||
env["OLLAMA_CONTEXT_LENGTH"] = strconv.Itoa(settings.ContextLength)
|
||||
}
|
||||
cmd.Env = []string{}
|
||||
for k, v := range env {
|
||||
cmd.Env = append(cmd.Env, k+"="+v)
|
||||
}
|
||||
|
||||
cmd.Cancel = func() error {
|
||||
if cmd.Process == nil {
|
||||
return nil
|
||||
}
|
||||
return stop(cmd.Process)
|
||||
}
|
||||
|
||||
return cmd, nil
|
||||
}
|
||||
|
||||
func openRotatingLog() (io.WriteCloser, error) {
|
||||
// TODO consider rotation based on size or time, not just every server invocation
|
||||
dir := filepath.Dir(serverLogPath)
|
||||
if err := os.MkdirAll(dir, 0o755); err != nil {
|
||||
return nil, fmt.Errorf("create log directory: %w", err)
|
||||
}
|
||||
|
||||
logrotate.Rotate(serverLogPath)
|
||||
f, err := os.OpenFile(serverLogPath, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0o644)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("open log file: %w", err)
|
||||
}
|
||||
return f, nil
|
||||
}
|
||||
|
||||
// Attempt to retrieve inference compute information from the server
|
||||
// log. Set ctx to timeout to control how long to wait for the logs to appear
|
||||
func GetInferenceComputer(ctx context.Context) ([]InferenceCompute, error) {
|
||||
inference := []InferenceCompute{}
|
||||
marker := regexp.MustCompile(`inference compute.*library=`)
|
||||
q := `inference compute.*%s=["]([^"]*)["]`
|
||||
nq := `inference compute.*%s=(\S+)\s`
|
||||
type regex struct {
|
||||
q *regexp.Regexp
|
||||
nq *regexp.Regexp
|
||||
}
|
||||
regexes := map[string]regex{
|
||||
"library": {
|
||||
q: regexp.MustCompile(fmt.Sprintf(q, "library")),
|
||||
nq: regexp.MustCompile(fmt.Sprintf(nq, "library")),
|
||||
},
|
||||
"variant": {
|
||||
q: regexp.MustCompile(fmt.Sprintf(q, "variant")),
|
||||
nq: regexp.MustCompile(fmt.Sprintf(nq, "variant")),
|
||||
},
|
||||
"compute": {
|
||||
q: regexp.MustCompile(fmt.Sprintf(q, "compute")),
|
||||
nq: regexp.MustCompile(fmt.Sprintf(nq, "compute")),
|
||||
},
|
||||
"driver": {
|
||||
q: regexp.MustCompile(fmt.Sprintf(q, "driver")),
|
||||
nq: regexp.MustCompile(fmt.Sprintf(nq, "driver")),
|
||||
},
|
||||
"name": {
|
||||
q: regexp.MustCompile(fmt.Sprintf(q, "name")),
|
||||
nq: regexp.MustCompile(fmt.Sprintf(nq, "name")),
|
||||
},
|
||||
"total": {
|
||||
q: regexp.MustCompile(fmt.Sprintf(q, "total")),
|
||||
nq: regexp.MustCompile(fmt.Sprintf(nq, "total")),
|
||||
},
|
||||
}
|
||||
get := func(field, line string) string {
|
||||
regex, ok := regexes[field]
|
||||
if !ok {
|
||||
slog.Warn("missing field", "field", field)
|
||||
return ""
|
||||
}
|
||||
match := regex.q.FindStringSubmatch(line)
|
||||
|
||||
if len(match) > 1 {
|
||||
return match[1]
|
||||
}
|
||||
match = regex.nq.FindStringSubmatch(line)
|
||||
if len(match) > 1 {
|
||||
return match[1]
|
||||
}
|
||||
return ""
|
||||
}
|
||||
for {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return nil, fmt.Errorf("timeout scanning server log for inference compute details")
|
||||
default:
|
||||
}
|
||||
file, err := os.Open(serverLogPath)
|
||||
if err != nil {
|
||||
slog.Debug("failed to open server log", "log", serverLogPath, "error", err)
|
||||
time.Sleep(time.Second)
|
||||
continue
|
||||
}
|
||||
defer file.Close()
|
||||
scanner := bufio.NewScanner(file)
|
||||
for scanner.Scan() {
|
||||
line := scanner.Text()
|
||||
match := marker.FindStringSubmatch(line)
|
||||
if len(match) > 0 {
|
||||
ic := InferenceCompute{
|
||||
Library: get("library", line),
|
||||
Variant: get("variant", line),
|
||||
Compute: get("compute", line),
|
||||
Driver: get("driver", line),
|
||||
Name: get("name", line),
|
||||
VRAM: get("total", line),
|
||||
}
|
||||
|
||||
slog.Info("Matched", "inference compute", ic)
|
||||
inference = append(inference, ic)
|
||||
} else {
|
||||
// Break out on first non matching line after we start matching
|
||||
if len(inference) > 0 {
|
||||
return inference, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
time.Sleep(100 * time.Millisecond)
|
||||
}
|
||||
}
|
||||
@@ -1,249 +0,0 @@
|
||||
//go:build windows || darwin
|
||||
|
||||
package server
|
||||
|
||||
import (
|
||||
"context"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"reflect"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/ollama/ollama/app/store"
|
||||
)
|
||||
|
||||
func TestNew(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
st := &store.Store{DBPath: filepath.Join(tmpDir, "db.sqlite")}
|
||||
defer st.Close() // Ensure database is closed before cleanup
|
||||
s := New(st, false)
|
||||
|
||||
if s == nil {
|
||||
t.Fatal("expected non-nil server")
|
||||
}
|
||||
|
||||
if s.bin == "" {
|
||||
t.Error("expected non-empty bin path")
|
||||
}
|
||||
}
|
||||
|
||||
func TestServerCmd(t *testing.T) {
|
||||
os.Unsetenv("OLLAMA_HOST")
|
||||
os.Unsetenv("OLLAMA_ORIGINS")
|
||||
os.Unsetenv("OLLAMA_MODELS")
|
||||
var defaultModels string
|
||||
home, err := os.UserHomeDir()
|
||||
if err == nil {
|
||||
defaultModels = filepath.Join(home, ".ollama", "models")
|
||||
os.MkdirAll(defaultModels, 0o755)
|
||||
}
|
||||
|
||||
tmpModels := t.TempDir()
|
||||
tests := []struct {
|
||||
name string
|
||||
settings store.Settings
|
||||
want []string
|
||||
dont []string
|
||||
}{
|
||||
{
|
||||
name: "default",
|
||||
settings: store.Settings{},
|
||||
want: []string{"OLLAMA_MODELS=" + defaultModels},
|
||||
dont: []string{"OLLAMA_HOST=", "OLLAMA_ORIGINS="},
|
||||
},
|
||||
{
|
||||
name: "expose",
|
||||
settings: store.Settings{Expose: true},
|
||||
want: []string{"OLLAMA_HOST=0.0.0.0", "OLLAMA_MODELS=" + defaultModels},
|
||||
dont: []string{"OLLAMA_ORIGINS="},
|
||||
},
|
||||
{
|
||||
name: "browser",
|
||||
settings: store.Settings{Browser: true},
|
||||
want: []string{"OLLAMA_ORIGINS=*", "OLLAMA_MODELS=" + defaultModels},
|
||||
dont: []string{"OLLAMA_HOST="},
|
||||
},
|
||||
{
|
||||
name: "models",
|
||||
settings: store.Settings{Models: tmpModels},
|
||||
want: []string{"OLLAMA_MODELS=" + tmpModels},
|
||||
dont: []string{"OLLAMA_HOST=", "OLLAMA_ORIGINS="},
|
||||
},
|
||||
{
|
||||
name: "inaccessible_models",
|
||||
settings: store.Settings{Models: "/nonexistent/external/drive/models"},
|
||||
want: []string{},
|
||||
dont: []string{"OLLAMA_MODELS="},
|
||||
},
|
||||
{
|
||||
name: "all",
|
||||
settings: store.Settings{
|
||||
Expose: true,
|
||||
Browser: true,
|
||||
Models: tmpModels,
|
||||
},
|
||||
want: []string{
|
||||
"OLLAMA_HOST=0.0.0.0",
|
||||
"OLLAMA_ORIGINS=*",
|
||||
"OLLAMA_MODELS=" + tmpModels,
|
||||
},
|
||||
dont: []string{},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
st := &store.Store{DBPath: filepath.Join(tmpDir, "db.sqlite")}
|
||||
defer st.Close() // Ensure database is closed before cleanup
|
||||
st.SetSettings(tt.settings)
|
||||
s := &Server{
|
||||
store: st,
|
||||
}
|
||||
|
||||
cmd, err := s.cmd(t.Context())
|
||||
if err != nil {
|
||||
t.Fatalf("s.cmd() error = %v", err)
|
||||
}
|
||||
|
||||
for _, want := range tt.want {
|
||||
found := false
|
||||
for _, env := range cmd.Env {
|
||||
if strings.Contains(env, want) {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
t.Errorf("expected environment variable containing %s", want)
|
||||
}
|
||||
}
|
||||
|
||||
for _, dont := range tt.dont {
|
||||
for _, env := range cmd.Env {
|
||||
if strings.Contains(env, dont) {
|
||||
t.Errorf("unexpected environment variable: %s", env)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if cmd.Cancel == nil {
|
||||
t.Error("expected non-nil cancel function")
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetInferenceComputer(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
log string
|
||||
exp []InferenceCompute
|
||||
}{
|
||||
{
|
||||
name: "metal",
|
||||
log: `time=2025-06-30T09:23:07.374-07:00 level=DEBUG source=sched.go:108 msg="starting llm scheduler"
|
||||
time=2025-06-30T09:23:07.416-07:00 level=INFO source=types.go:130 msg="inference compute" id=0 library=metal variant="" compute="" driver=0.0 name="" total="96.0 GiB" available="96.0 GiB"
|
||||
time=2025-06-30T09:25:56.197-07:00 level=DEBUG source=ggml.go:155 msg="key not found" key=general.alignment default=32
|
||||
`,
|
||||
exp: []InferenceCompute{{
|
||||
Library: "metal",
|
||||
Driver: "0.0",
|
||||
VRAM: "96.0 GiB",
|
||||
}},
|
||||
},
|
||||
{
|
||||
name: "cpu",
|
||||
log: `time=2025-07-01T17:59:51.470Z level=INFO source=gpu.go:377 msg="no compatible GPUs were discovered"
|
||||
time=2025-07-01T17:59:51.470Z level=INFO source=types.go:130 msg="inference compute" id=0 library=cpu variant="" compute="" driver=0.0 name="" total="31.3 GiB" available="30.4 GiB"
|
||||
[GIN] 2025/07/01 - 18:00:09 | 200 | 50.263µs | 100.126.204.152 | HEAD "/"
|
||||
`,
|
||||
exp: []InferenceCompute{{
|
||||
Library: "cpu",
|
||||
Driver: "0.0",
|
||||
VRAM: "31.3 GiB",
|
||||
}},
|
||||
},
|
||||
{
|
||||
name: "cuda1",
|
||||
log: `time=2025-07-01T19:33:43.162Z level=DEBUG source=amd_linux.go:419 msg="amdgpu driver not detected /sys/module/amdgpu"
|
||||
releasing cuda driver library
|
||||
time=2025-07-01T19:33:43.162Z level=INFO source=types.go:130 msg="inference compute" id=GPU-452cac9f-6960-839c-4fb3-0cec83699196 library=cuda variant=v12 compute=6.1 driver=12.7 name="NVIDIA GeForce GT 1030" total="3.9 GiB" available="3.9 GiB"
|
||||
[GIN] 2025/07/01 - 18:00:09 | 200 | 50.263µs | 100.126.204.152 | HEAD "/"
|
||||
`,
|
||||
exp: []InferenceCompute{{
|
||||
Library: "cuda",
|
||||
Variant: "v12",
|
||||
Compute: "6.1",
|
||||
Driver: "12.7",
|
||||
Name: "NVIDIA GeForce GT 1030",
|
||||
VRAM: "3.9 GiB",
|
||||
}},
|
||||
},
|
||||
{
|
||||
name: "frank",
|
||||
log: `time=2025-07-01T19:36:13.315Z level=INFO source=amd_linux.go:386 msg="amdgpu is supported" gpu=GPU-9abb57639fa80c50 gpu_type=gfx1030
|
||||
releasing cuda driver library
|
||||
time=2025-07-01T19:36:13.315Z level=INFO source=types.go:130 msg="inference compute" id=GPU-d6de3398-9932-6902-11ec-fee8e424c8a2 library=cuda variant=v12 compute=7.5 driver=12.8 name="NVIDIA GeForce RTX 2080 Ti" total="10.6 GiB" available="10.4 GiB"
|
||||
time=2025-07-01T19:36:13.315Z level=INFO source=types.go:130 msg="inference compute" id=GPU-9abb57639fa80c50 library=rocm variant="" compute=gfx1030 driver=6.3 name=1002:73bf total="16.0 GiB" available="1.3 GiB"
|
||||
[GIN] 2025/07/01 - 18:00:09 | 200 | 50.263µs | 100.126.204.152 | HEAD "/"
|
||||
`,
|
||||
exp: []InferenceCompute{
|
||||
{
|
||||
Library: "cuda",
|
||||
Variant: "v12",
|
||||
Compute: "7.5",
|
||||
Driver: "12.8",
|
||||
Name: "NVIDIA GeForce RTX 2080 Ti",
|
||||
VRAM: "10.6 GiB",
|
||||
},
|
||||
{
|
||||
Library: "rocm",
|
||||
Compute: "gfx1030",
|
||||
Driver: "6.3",
|
||||
Name: "1002:73bf",
|
||||
VRAM: "16.0 GiB",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
serverLogPath = filepath.Join(tmpDir, "server.log")
|
||||
err := os.WriteFile(serverLogPath, []byte(tt.log), 0o644)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to write log file %s: %s", serverLogPath, err)
|
||||
}
|
||||
ctx, cancel := context.WithTimeout(t.Context(), 10*time.Millisecond)
|
||||
defer cancel()
|
||||
ics, err := GetInferenceComputer(ctx)
|
||||
if err != nil {
|
||||
t.Fatalf(" failed to get inference compute: %v", err)
|
||||
}
|
||||
if !reflect.DeepEqual(ics, tt.exp) {
|
||||
t.Fatalf("got:\n%#v\nwant:\n%#v", ics, tt.exp)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetInferenceComputerTimeout(t *testing.T) {
|
||||
ctx, cancel := context.WithTimeout(t.Context(), 10*time.Millisecond)
|
||||
defer cancel()
|
||||
tmpDir := t.TempDir()
|
||||
serverLogPath = filepath.Join(tmpDir, "server.log")
|
||||
err := os.WriteFile(serverLogPath, []byte("foo\nbar\nbaz\n"), 0o644)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to write log file %s: %s", serverLogPath, err)
|
||||
}
|
||||
_, err = GetInferenceComputer(ctx)
|
||||
if err == nil {
|
||||
t.Fatal("expected timeout")
|
||||
}
|
||||
if !strings.Contains(err.Error(), "timeout") {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
}
|
||||
@@ -1,104 +0,0 @@
|
||||
//go:build darwin
|
||||
|
||||
package server
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
"syscall"
|
||||
)
|
||||
|
||||
var (
|
||||
pidFile = filepath.Join(os.Getenv("HOME"), "Library", "Application Support", "Ollama", "ollama.pid")
|
||||
serverLogPath = filepath.Join(os.Getenv("HOME"), ".ollama", "logs", "server.log")
|
||||
)
|
||||
|
||||
func commandContext(ctx context.Context, name string, arg ...string) *exec.Cmd {
|
||||
return exec.CommandContext(ctx, name, arg...)
|
||||
}
|
||||
|
||||
func terminate(proc *os.Process) error {
|
||||
return proc.Signal(os.Interrupt)
|
||||
}
|
||||
|
||||
func terminated(pid int) (bool, error) {
|
||||
proc, err := os.FindProcess(pid)
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("failed to find process: %v", err)
|
||||
}
|
||||
|
||||
err = proc.Signal(syscall.Signal(0))
|
||||
if err != nil {
|
||||
if errors.Is(err, os.ErrProcessDone) || errors.Is(err, syscall.ESRCH) {
|
||||
return true, nil
|
||||
}
|
||||
|
||||
return false, fmt.Errorf("error signaling process: %v", err)
|
||||
}
|
||||
|
||||
return false, nil
|
||||
}
|
||||
|
||||
// reapServers kills all ollama processes except our own
|
||||
func reapServers() error {
|
||||
// Get our own PID to avoid killing ourselves
|
||||
currentPID := os.Getpid()
|
||||
|
||||
// Use pkill to kill ollama processes
|
||||
// -x matches the whole command name exactly
|
||||
// We'll get the list first, then kill selectively
|
||||
cmd := exec.Command("pgrep", "-x", "ollama")
|
||||
output, err := cmd.Output()
|
||||
if err != nil {
|
||||
// No ollama processes found
|
||||
slog.Debug("no ollama processes found")
|
||||
return nil //nolint:nilerr
|
||||
}
|
||||
|
||||
pidsStr := strings.TrimSpace(string(output))
|
||||
if pidsStr == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
pids := strings.Split(pidsStr, "\n")
|
||||
for _, pidStr := range pids {
|
||||
pidStr = strings.TrimSpace(pidStr)
|
||||
if pidStr == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
pid, err := strconv.Atoi(pidStr)
|
||||
if err != nil {
|
||||
slog.Debug("failed to parse PID", "pidStr", pidStr, "err", err)
|
||||
continue
|
||||
}
|
||||
if pid == currentPID {
|
||||
continue
|
||||
}
|
||||
|
||||
proc, err := os.FindProcess(pid)
|
||||
if err != nil {
|
||||
slog.Debug("failed to find process", "pid", pid, "err", err)
|
||||
continue
|
||||
}
|
||||
|
||||
if err := proc.Signal(syscall.SIGTERM); err != nil {
|
||||
// Try SIGKILL if SIGTERM fails
|
||||
if err := proc.Signal(syscall.SIGKILL); err != nil {
|
||||
slog.Warn("failed to stop external ollama process", "pid", pid, "err", err)
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
slog.Info("stopped external ollama process", "pid", pid)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -1,149 +0,0 @@
|
||||
package server
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
"syscall"
|
||||
|
||||
"golang.org/x/sys/windows"
|
||||
)
|
||||
|
||||
var (
|
||||
pidFile = filepath.Join(os.Getenv("LOCALAPPDATA"), "Ollama", "ollama.pid")
|
||||
serverLogPath = filepath.Join(os.Getenv("LOCALAPPDATA"), "Ollama", "server.log")
|
||||
)
|
||||
|
||||
func commandContext(ctx context.Context, name string, arg ...string) *exec.Cmd {
|
||||
cmd := exec.CommandContext(ctx, name, arg...)
|
||||
cmd.SysProcAttr = &syscall.SysProcAttr{
|
||||
HideWindow: true,
|
||||
CreationFlags: windows.CREATE_NEW_PROCESS_GROUP,
|
||||
}
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func terminate(proc *os.Process) error {
|
||||
dll, err := windows.LoadDLL("kernel32.dll")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer dll.Release()
|
||||
|
||||
pid := proc.Pid
|
||||
|
||||
f, err := dll.FindProc("AttachConsole")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
r1, _, err := f.Call(uintptr(pid))
|
||||
if r1 == 0 && err != syscall.ERROR_ACCESS_DENIED {
|
||||
return err
|
||||
}
|
||||
|
||||
f, err = dll.FindProc("SetConsoleCtrlHandler")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
r1, _, err = f.Call(0, 1)
|
||||
if r1 == 0 {
|
||||
return err
|
||||
}
|
||||
|
||||
f, err = dll.FindProc("GenerateConsoleCtrlEvent")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
r1, _, err = f.Call(windows.CTRL_BREAK_EVENT, uintptr(pid))
|
||||
if r1 == 0 {
|
||||
return err
|
||||
}
|
||||
|
||||
r1, _, err = f.Call(windows.CTRL_C_EVENT, uintptr(pid))
|
||||
if r1 == 0 {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
const STILL_ACTIVE = 259
|
||||
|
||||
func terminated(pid int) (bool, error) {
|
||||
hProcess, err := windows.OpenProcess(windows.PROCESS_QUERY_INFORMATION, false, uint32(pid))
|
||||
if err != nil {
|
||||
if errno, ok := err.(windows.Errno); ok && errno == windows.ERROR_INVALID_PARAMETER {
|
||||
return true, nil
|
||||
}
|
||||
return false, fmt.Errorf("failed to open process: %v", err)
|
||||
}
|
||||
defer windows.CloseHandle(hProcess)
|
||||
|
||||
var exitCode uint32
|
||||
err = windows.GetExitCodeProcess(hProcess, &exitCode)
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("failed to get exit code: %v", err)
|
||||
}
|
||||
|
||||
if exitCode == STILL_ACTIVE {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
return true, nil
|
||||
}
|
||||
|
||||
// reapServers kills all ollama processes except our own
|
||||
func reapServers() error {
|
||||
// Get current process ID to avoid killing ourselves
|
||||
currentPID := os.Getpid()
|
||||
|
||||
// Use wmic to find ollama processes
|
||||
cmd := exec.Command("wmic", "process", "where", "name='ollama.exe'", "get", "ProcessId")
|
||||
cmd.SysProcAttr = &syscall.SysProcAttr{HideWindow: true}
|
||||
output, err := cmd.Output()
|
||||
if err != nil {
|
||||
// No ollama processes found
|
||||
slog.Debug("no ollama processes found")
|
||||
return nil //nolint:nilerr
|
||||
}
|
||||
|
||||
lines := strings.Split(string(output), "\n")
|
||||
var pids []string
|
||||
for _, line := range lines {
|
||||
line = strings.TrimSpace(line)
|
||||
if line == "" || line == "ProcessId" {
|
||||
continue
|
||||
}
|
||||
|
||||
if _, err := strconv.Atoi(line); err == nil {
|
||||
pids = append(pids, line)
|
||||
}
|
||||
}
|
||||
|
||||
for _, pidStr := range pids {
|
||||
pid, err := strconv.Atoi(pidStr)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
if pid == currentPID {
|
||||
continue
|
||||
}
|
||||
|
||||
cmd := exec.Command("taskkill", "/F", "/PID", pidStr)
|
||||
if err := cmd.Run(); err != nil {
|
||||
slog.Warn("failed to kill ollama process", "pid", pid, "err", err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -1,407 +0,0 @@
|
||||
//go:build windows || darwin
|
||||
|
||||
package store
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
_ "github.com/mattn/go-sqlite3"
|
||||
)
|
||||
|
||||
func TestSchemaMigrations(t *testing.T) {
|
||||
t.Run("schema comparison after migration", func(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
migratedDBPath := filepath.Join(tmpDir, "migrated.db")
|
||||
migratedDB := loadV2Schema(t, migratedDBPath)
|
||||
defer migratedDB.Close()
|
||||
|
||||
if err := migratedDB.migrate(); err != nil {
|
||||
t.Fatalf("migration failed: %v", err)
|
||||
}
|
||||
|
||||
// Create fresh database with current schema
|
||||
freshDBPath := filepath.Join(tmpDir, "fresh.db")
|
||||
freshDB, err := newDatabase(freshDBPath)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to create fresh database: %v", err)
|
||||
}
|
||||
defer freshDB.Close()
|
||||
|
||||
// Extract tables and indexes from both databases, directly comparing their schemas won't work due to ordering
|
||||
migratedSchema := schemaMap(migratedDB)
|
||||
freshSchema := schemaMap(freshDB)
|
||||
|
||||
if !cmp.Equal(migratedSchema, freshSchema) {
|
||||
t.Errorf("Schema difference found:\n%s", cmp.Diff(freshSchema, migratedSchema))
|
||||
}
|
||||
|
||||
// Verify both databases have the same final schema version
|
||||
migratedVersion, _ := migratedDB.getSchemaVersion()
|
||||
freshVersion, _ := freshDB.getSchemaVersion()
|
||||
if migratedVersion != freshVersion {
|
||||
t.Errorf("schema version mismatch: migrated=%d, fresh=%d", migratedVersion, freshVersion)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("idempotent migrations", func(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
dbPath := filepath.Join(tmpDir, "test.db")
|
||||
db := loadV2Schema(t, dbPath)
|
||||
defer db.Close()
|
||||
|
||||
// Run migration twice
|
||||
if err := db.migrate(); err != nil {
|
||||
t.Fatalf("first migration failed: %v", err)
|
||||
}
|
||||
|
||||
if err := db.migrate(); err != nil {
|
||||
t.Fatalf("second migration failed: %v", err)
|
||||
}
|
||||
|
||||
// Verify schema version is still correct
|
||||
version, err := db.getSchemaVersion()
|
||||
if err != nil {
|
||||
t.Fatalf("failed to get schema version: %v", err)
|
||||
}
|
||||
if version != currentSchemaVersion {
|
||||
t.Errorf("expected schema version %d after double migration, got %d", currentSchemaVersion, version)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("init database has correct schema version", func(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
dbPath := filepath.Join(tmpDir, "test.db")
|
||||
db, err := newDatabase(dbPath)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to create database: %v", err)
|
||||
}
|
||||
defer db.Close()
|
||||
|
||||
// Get the schema version from the newly initialized database
|
||||
version, err := db.getSchemaVersion()
|
||||
if err != nil {
|
||||
t.Fatalf("failed to get schema version: %v", err)
|
||||
}
|
||||
|
||||
// Verify it matches the currentSchemaVersion constant
|
||||
if version != currentSchemaVersion {
|
||||
t.Errorf("expected schema version %d in initialized database, got %d", currentSchemaVersion, version)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestChatDeletionWithCascade(t *testing.T) {
|
||||
t.Run("chat deletion cascades to related messages", func(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
dbPath := filepath.Join(tmpDir, "test.db")
|
||||
db, err := newDatabase(dbPath)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to create database: %v", err)
|
||||
}
|
||||
defer db.Close()
|
||||
|
||||
// Create test chat
|
||||
testChatID := "test-chat-cascade-123"
|
||||
testChat := Chat{
|
||||
ID: testChatID,
|
||||
Title: "Test Chat for Cascade Delete",
|
||||
CreatedAt: time.Now(),
|
||||
Messages: []Message{
|
||||
{
|
||||
Role: "user",
|
||||
Content: "Hello, this is a test message",
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
},
|
||||
{
|
||||
Role: "assistant",
|
||||
Content: "Hi there! This is a response.",
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
// Save the chat with messages
|
||||
if err := db.saveChat(testChat); err != nil {
|
||||
t.Fatalf("failed to save test chat: %v", err)
|
||||
}
|
||||
|
||||
// Verify chat and messages exist
|
||||
chatCount := countRows(t, db, "chats")
|
||||
messageCount := countRows(t, db, "messages")
|
||||
|
||||
if chatCount != 1 {
|
||||
t.Errorf("expected 1 chat, got %d", chatCount)
|
||||
}
|
||||
if messageCount != 2 {
|
||||
t.Errorf("expected 2 messages, got %d", messageCount)
|
||||
}
|
||||
|
||||
// Verify specific chat exists
|
||||
var exists bool
|
||||
err = db.conn.QueryRow("SELECT EXISTS(SELECT 1 FROM chats WHERE id = ?)", testChatID).Scan(&exists)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to check chat existence: %v", err)
|
||||
}
|
||||
if !exists {
|
||||
t.Error("test chat should exist before deletion")
|
||||
}
|
||||
|
||||
// Verify messages exist for this chat
|
||||
messageCountForChat := countRowsWithCondition(t, db, "messages", "chat_id = ?", testChatID)
|
||||
if messageCountForChat != 2 {
|
||||
t.Errorf("expected 2 messages for test chat, got %d", messageCountForChat)
|
||||
}
|
||||
|
||||
// Delete the chat
|
||||
if err := db.deleteChat(testChatID); err != nil {
|
||||
t.Fatalf("failed to delete chat: %v", err)
|
||||
}
|
||||
|
||||
// Verify chat is deleted
|
||||
chatCountAfter := countRows(t, db, "chats")
|
||||
if chatCountAfter != 0 {
|
||||
t.Errorf("expected 0 chats after deletion, got %d", chatCountAfter)
|
||||
}
|
||||
|
||||
// Verify messages are CASCADE deleted
|
||||
messageCountAfter := countRows(t, db, "messages")
|
||||
if messageCountAfter != 0 {
|
||||
t.Errorf("expected 0 messages after CASCADE deletion, got %d", messageCountAfter)
|
||||
}
|
||||
|
||||
// Verify specific chat no longer exists
|
||||
err = db.conn.QueryRow("SELECT EXISTS(SELECT 1 FROM chats WHERE id = ?)", testChatID).Scan(&exists)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to check chat existence after deletion: %v", err)
|
||||
}
|
||||
if exists {
|
||||
t.Error("test chat should not exist after deletion")
|
||||
}
|
||||
|
||||
// Verify no orphaned messages remain
|
||||
orphanedCount := countRowsWithCondition(t, db, "messages", "chat_id = ?", testChatID)
|
||||
if orphanedCount != 0 {
|
||||
t.Errorf("expected 0 orphaned messages, got %d", orphanedCount)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("foreign keys are enabled", func(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
dbPath := filepath.Join(tmpDir, "test.db")
|
||||
db, err := newDatabase(dbPath)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to create database: %v", err)
|
||||
}
|
||||
defer db.Close()
|
||||
|
||||
// Verify foreign keys are enabled
|
||||
var foreignKeysEnabled int
|
||||
err = db.conn.QueryRow("PRAGMA foreign_keys").Scan(&foreignKeysEnabled)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to check foreign keys: %v", err)
|
||||
}
|
||||
if foreignKeysEnabled != 1 {
|
||||
t.Errorf("expected foreign keys to be enabled (1), got %d", foreignKeysEnabled)
|
||||
}
|
||||
})
|
||||
|
||||
// This test is only relevant for v8 migrations, but we keep it here for now
|
||||
// since it's a useful test to ensure that we don't introduce any new orphaned data
|
||||
t.Run("cleanup orphaned data", func(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
dbPath := filepath.Join(tmpDir, "test.db")
|
||||
db, err := newDatabase(dbPath)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to create database: %v", err)
|
||||
}
|
||||
defer db.Close()
|
||||
|
||||
// First disable foreign keys to simulate the bug from ollama/ollama#11785
|
||||
_, err = db.conn.Exec("PRAGMA foreign_keys = OFF")
|
||||
if err != nil {
|
||||
t.Fatalf("failed to disable foreign keys: %v", err)
|
||||
}
|
||||
|
||||
// Create a chat and message
|
||||
testChatID := "orphaned-test-chat"
|
||||
testMessageID := int64(999)
|
||||
|
||||
_, err = db.conn.Exec("INSERT INTO chats (id, title) VALUES (?, ?)", testChatID, "Orphaned Test Chat")
|
||||
if err != nil {
|
||||
t.Fatalf("failed to insert test chat: %v", err)
|
||||
}
|
||||
|
||||
_, err = db.conn.Exec("INSERT INTO messages (id, chat_id, role, content) VALUES (?, ?, ?, ?)",
|
||||
testMessageID, testChatID, "user", "test message")
|
||||
if err != nil {
|
||||
t.Fatalf("failed to insert test message: %v", err)
|
||||
}
|
||||
|
||||
// Delete chat but keep message (simulating the bug from ollama/ollama#11785)
|
||||
_, err = db.conn.Exec("DELETE FROM chats WHERE id = ?", testChatID)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to delete chat: %v", err)
|
||||
}
|
||||
|
||||
// Verify we have orphaned message
|
||||
orphanedCount := countRowsWithCondition(t, db, "messages", "chat_id = ?", testChatID)
|
||||
if orphanedCount != 1 {
|
||||
t.Errorf("expected 1 orphaned message, got %d", orphanedCount)
|
||||
}
|
||||
|
||||
// Run cleanup
|
||||
if err := db.cleanupOrphanedData(); err != nil {
|
||||
t.Fatalf("failed to cleanup orphaned data: %v", err)
|
||||
}
|
||||
|
||||
// Verify orphaned message is gone
|
||||
orphanedCountAfter := countRowsWithCondition(t, db, "messages", "chat_id = ?", testChatID)
|
||||
if orphanedCountAfter != 0 {
|
||||
t.Errorf("expected 0 orphaned messages after cleanup, got %d", orphanedCountAfter)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func countRows(t *testing.T, db *database, table string) int {
|
||||
t.Helper()
|
||||
var count int
|
||||
err := db.conn.QueryRow(fmt.Sprintf("SELECT COUNT(*) FROM %s", table)).Scan(&count)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to count rows in %s: %v", table, err)
|
||||
}
|
||||
return count
|
||||
}
|
||||
|
||||
func countRowsWithCondition(t *testing.T, db *database, table, condition string, args ...interface{}) int {
|
||||
t.Helper()
|
||||
var count int
|
||||
query := fmt.Sprintf("SELECT COUNT(*) FROM %s WHERE %s", table, condition)
|
||||
err := db.conn.QueryRow(query, args...).Scan(&count)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to count rows with condition: %v", err)
|
||||
}
|
||||
return count
|
||||
}
|
||||
|
||||
// Test helpers for schema migration testing
|
||||
|
||||
// schemaMap returns both tables/columns and indexes (ignoring order)
|
||||
func schemaMap(db *database) map[string]interface{} {
|
||||
result := make(map[string]any)
|
||||
|
||||
result["tables"] = columnMap(db)
|
||||
result["indexes"] = indexMap(db)
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// columnMap returns a map of table names to their column sets (ignoring order)
|
||||
func columnMap(db *database) map[string][]string {
|
||||
result := make(map[string][]string)
|
||||
|
||||
// Get all table names
|
||||
tableQuery := `SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%' ORDER BY name`
|
||||
rows, _ := db.conn.Query(tableQuery)
|
||||
defer rows.Close()
|
||||
|
||||
for rows.Next() {
|
||||
var tableName string
|
||||
rows.Scan(&tableName)
|
||||
|
||||
// Get columns for this table
|
||||
colQuery := fmt.Sprintf("PRAGMA table_info(%s)", tableName)
|
||||
colRows, _ := db.conn.Query(colQuery)
|
||||
|
||||
var columns []string
|
||||
for colRows.Next() {
|
||||
var cid int
|
||||
var name, dataType sql.NullString
|
||||
var notNull, primaryKey int
|
||||
var defaultValue sql.NullString
|
||||
|
||||
colRows.Scan(&cid, &name, &dataType, ¬Null, &defaultValue, &primaryKey)
|
||||
|
||||
// Create a normalized column description
|
||||
colDesc := fmt.Sprintf("%s %s", name.String, dataType.String)
|
||||
if notNull == 1 {
|
||||
colDesc += " NOT NULL"
|
||||
}
|
||||
if defaultValue.Valid && defaultValue.String != "" {
|
||||
// Skip DEFAULT for schema_version as it doesn't get updated during migrations
|
||||
if name.String != "schema_version" {
|
||||
colDesc += " DEFAULT " + defaultValue.String
|
||||
}
|
||||
}
|
||||
if primaryKey == 1 {
|
||||
colDesc += " PRIMARY KEY"
|
||||
}
|
||||
|
||||
columns = append(columns, colDesc)
|
||||
}
|
||||
colRows.Close()
|
||||
|
||||
// Sort columns to ignore order differences
|
||||
sort.Strings(columns)
|
||||
result[tableName] = columns
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// indexMap returns a map of index names to their definitions
|
||||
func indexMap(db *database) map[string]string {
|
||||
result := make(map[string]string)
|
||||
|
||||
// Get all indexes (excluding auto-created primary key indexes)
|
||||
indexQuery := `SELECT name, sql FROM sqlite_master WHERE type='index' AND name NOT LIKE 'sqlite_%' AND sql IS NOT NULL ORDER BY name`
|
||||
rows, _ := db.conn.Query(indexQuery)
|
||||
defer rows.Close()
|
||||
|
||||
for rows.Next() {
|
||||
var name, sql string
|
||||
rows.Scan(&name, &sql)
|
||||
|
||||
// Normalize the SQL by removing extra whitespace
|
||||
sql = strings.Join(strings.Fields(sql), " ")
|
||||
result[name] = sql
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// loadV2Schema loads the version 2 schema from testdata/schema.sql
|
||||
func loadV2Schema(t *testing.T, dbPath string) *database {
|
||||
t.Helper()
|
||||
|
||||
// Read the v1 schema file
|
||||
schemaFile := filepath.Join("testdata", "schema.sql")
|
||||
schemaSQL, err := os.ReadFile(schemaFile)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to read schema file: %v", err)
|
||||
}
|
||||
|
||||
// Open database connection
|
||||
conn, err := sql.Open("sqlite3", dbPath+"?_foreign_keys=on&_journal_mode=WAL&_busy_timeout=5000&_txlock=immediate")
|
||||
if err != nil {
|
||||
t.Fatalf("failed to open database: %v", err)
|
||||
}
|
||||
|
||||
// Execute the v1 schema
|
||||
_, err = conn.Exec(string(schemaSQL))
|
||||
if err != nil {
|
||||
conn.Close()
|
||||
t.Fatalf("failed to execute v1 schema: %v", err)
|
||||
}
|
||||
|
||||
return &database{conn: conn}
|
||||
}
|
||||
@@ -1,128 +0,0 @@
|
||||
//go:build windows || darwin
|
||||
|
||||
package store
|
||||
|
||||
import (
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type Image struct {
|
||||
Filename string `json:"filename"`
|
||||
Path string `json:"path"`
|
||||
Size int64 `json:"size,omitempty"`
|
||||
MimeType string `json:"mime_type,omitempty"`
|
||||
}
|
||||
|
||||
// Bytes loads image data from disk for a given ImageData reference
|
||||
func (i *Image) Bytes() ([]byte, error) {
|
||||
return ImgBytes(i.Path)
|
||||
}
|
||||
|
||||
// ImgBytes reads image data from the specified file path
|
||||
func ImgBytes(path string) ([]byte, error) {
|
||||
if path == "" {
|
||||
return nil, fmt.Errorf("empty image path")
|
||||
}
|
||||
|
||||
data, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("read image file %s: %w", path, err)
|
||||
}
|
||||
|
||||
return data, nil
|
||||
}
|
||||
|
||||
// ImgDir returns the directory path for storing images for a specific chat
|
||||
func (s *Store) ImgDir() string {
|
||||
dbPath := s.DBPath
|
||||
if dbPath == "" {
|
||||
dbPath = defaultDBPath
|
||||
}
|
||||
storeDir := filepath.Dir(dbPath)
|
||||
return filepath.Join(storeDir, "cache", "images")
|
||||
}
|
||||
|
||||
// ImgToFile saves image data to disk and returns ImageData reference
|
||||
func (s *Store) ImgToFile(chatID string, imageBytes []byte, filename, mimeType string) (Image, error) {
|
||||
baseImageDir := s.ImgDir()
|
||||
if err := os.MkdirAll(baseImageDir, 0o755); err != nil {
|
||||
return Image{}, fmt.Errorf("create base image directory: %w", err)
|
||||
}
|
||||
|
||||
// Root prevents path traversal issues
|
||||
root, err := os.OpenRoot(baseImageDir)
|
||||
if err != nil {
|
||||
return Image{}, fmt.Errorf("open image root directory: %w", err)
|
||||
}
|
||||
defer root.Close()
|
||||
|
||||
// Create chat-specific subdirectory within the root
|
||||
chatDir := sanitize(chatID)
|
||||
if err := root.Mkdir(chatDir, 0o755); err != nil && !os.IsExist(err) {
|
||||
return Image{}, fmt.Errorf("create chat directory: %w", err)
|
||||
}
|
||||
|
||||
// Generate a unique filename to avoid conflicts
|
||||
// Use hash of content + original filename for uniqueness
|
||||
hash := sha256.Sum256(imageBytes)
|
||||
hashStr := hex.EncodeToString(hash[:])[:16] // Use first 16 chars of hash
|
||||
|
||||
// Extract file extension from original filename or mime type
|
||||
ext := filepath.Ext(filename)
|
||||
if ext == "" {
|
||||
switch mimeType {
|
||||
case "image/jpeg":
|
||||
ext = ".jpg"
|
||||
case "image/png":
|
||||
ext = ".png"
|
||||
case "image/webp":
|
||||
ext = ".webp"
|
||||
default:
|
||||
ext = ".img"
|
||||
}
|
||||
}
|
||||
|
||||
// Create unique filename: hash + original name + extension
|
||||
baseFilename := sanitize(strings.TrimSuffix(filename, ext))
|
||||
uniqueFilename := fmt.Sprintf("%s_%s%s", hashStr, baseFilename, ext)
|
||||
relativePath := filepath.Join(chatDir, uniqueFilename)
|
||||
file, err := root.Create(relativePath)
|
||||
if err != nil {
|
||||
return Image{}, fmt.Errorf("create image file: %w", err)
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
if _, err := file.Write(imageBytes); err != nil {
|
||||
return Image{}, fmt.Errorf("write image data: %w", err)
|
||||
}
|
||||
|
||||
return Image{
|
||||
Filename: uniqueFilename,
|
||||
Path: filepath.Join(baseImageDir, relativePath),
|
||||
Size: int64(len(imageBytes)),
|
||||
MimeType: mimeType,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// sanitize removes unsafe characters from filenames
|
||||
func sanitize(filename string) string {
|
||||
// Convert to safe characters only
|
||||
safe := strings.Map(func(r rune) rune {
|
||||
if (r >= 'a' && r <= 'z') || (r >= 'A' && r <= 'Z') || (r >= '0' && r <= '9') || r == '-' {
|
||||
return r
|
||||
}
|
||||
return '_'
|
||||
}, filename)
|
||||
|
||||
// Clean up and validate
|
||||
safe = strings.Trim(safe, "_")
|
||||
if safe == "" {
|
||||
return "image"
|
||||
}
|
||||
return safe
|
||||
}
|
||||
@@ -1,231 +0,0 @@
|
||||
//go:build windows || darwin
|
||||
|
||||
package store
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestConfigMigration(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
// Create a legacy config.json
|
||||
legacyConfig := legacyData{
|
||||
ID: "test-device-id-12345",
|
||||
FirstTimeRun: true, // In old system, true meant "has completed first run"
|
||||
}
|
||||
|
||||
configData, err := json.MarshalIndent(legacyConfig, "", " ")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
configPath := filepath.Join(tmpDir, "config.json")
|
||||
if err := os.WriteFile(configPath, configData, 0o644); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// Override the legacy config path for testing
|
||||
oldLegacyConfigPath := legacyConfigPath
|
||||
legacyConfigPath = configPath
|
||||
defer func() { legacyConfigPath = oldLegacyConfigPath }()
|
||||
|
||||
// Create store with database in same directory
|
||||
s := Store{DBPath: filepath.Join(tmpDir, "db.sqlite")}
|
||||
defer s.Close()
|
||||
|
||||
// First access should trigger migration
|
||||
id, err := s.ID()
|
||||
if err != nil {
|
||||
t.Fatalf("failed to get ID: %v", err)
|
||||
}
|
||||
|
||||
if id != "test-device-id-12345" {
|
||||
t.Errorf("expected migrated ID 'test-device-id-12345', got '%s'", id)
|
||||
}
|
||||
|
||||
// Check HasCompletedFirstRun
|
||||
hasCompleted, err := s.HasCompletedFirstRun()
|
||||
if err != nil {
|
||||
t.Fatalf("failed to get has completed first run: %v", err)
|
||||
}
|
||||
|
||||
if !hasCompleted {
|
||||
t.Error("expected has completed first run to be true after migration")
|
||||
}
|
||||
|
||||
// Verify migration is marked as complete
|
||||
migrated, err := s.db.isConfigMigrated()
|
||||
if err != nil {
|
||||
t.Fatalf("failed to check migration status: %v", err)
|
||||
}
|
||||
|
||||
if !migrated {
|
||||
t.Error("expected config to be marked as migrated")
|
||||
}
|
||||
|
||||
// Create a new store instance to verify migration doesn't run again
|
||||
s2 := Store{DBPath: filepath.Join(tmpDir, "db.sqlite")}
|
||||
defer s2.Close()
|
||||
|
||||
// Delete the config file to ensure we're not reading from it
|
||||
os.Remove(configPath)
|
||||
|
||||
// Verify data is still there
|
||||
id2, err := s2.ID()
|
||||
if err != nil {
|
||||
t.Fatalf("failed to get ID from second store: %v", err)
|
||||
}
|
||||
|
||||
if id2 != "test-device-id-12345" {
|
||||
t.Errorf("expected persisted ID 'test-device-id-12345', got '%s'", id2)
|
||||
}
|
||||
}
|
||||
|
||||
func TestNoConfigToMigrate(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
// Override the legacy config path for testing
|
||||
oldLegacyConfigPath := legacyConfigPath
|
||||
legacyConfigPath = filepath.Join(tmpDir, "config.json")
|
||||
defer func() { legacyConfigPath = oldLegacyConfigPath }()
|
||||
|
||||
// Create store without any config.json
|
||||
s := Store{DBPath: filepath.Join(tmpDir, "db.sqlite")}
|
||||
defer s.Close()
|
||||
|
||||
// Should generate a new ID
|
||||
id, err := s.ID()
|
||||
if err != nil {
|
||||
t.Fatalf("failed to get ID: %v", err)
|
||||
}
|
||||
|
||||
if id == "" {
|
||||
t.Error("expected auto-generated ID, got empty string")
|
||||
}
|
||||
|
||||
// HasCompletedFirstRun should be false (default)
|
||||
hasCompleted, err := s.HasCompletedFirstRun()
|
||||
if err != nil {
|
||||
t.Fatalf("failed to get has completed first run: %v", err)
|
||||
}
|
||||
|
||||
if hasCompleted {
|
||||
t.Error("expected has completed first run to be false by default")
|
||||
}
|
||||
|
||||
// Migration should still be marked as complete
|
||||
migrated, err := s.db.isConfigMigrated()
|
||||
if err != nil {
|
||||
t.Fatalf("failed to check migration status: %v", err)
|
||||
}
|
||||
|
||||
if !migrated {
|
||||
t.Error("expected config to be marked as migrated even with no config.json")
|
||||
}
|
||||
}
|
||||
|
||||
const (
|
||||
v1Schema = `
|
||||
CREATE TABLE IF NOT EXISTS settings (
|
||||
id INTEGER PRIMARY KEY CHECK (id = 1),
|
||||
device_id TEXT NOT NULL DEFAULT '',
|
||||
has_completed_first_run BOOLEAN NOT NULL DEFAULT 0,
|
||||
expose BOOLEAN NOT NULL DEFAULT 0,
|
||||
browser BOOLEAN NOT NULL DEFAULT 0,
|
||||
models TEXT NOT NULL DEFAULT '',
|
||||
remote TEXT NOT NULL DEFAULT '',
|
||||
agent BOOLEAN NOT NULL DEFAULT 0,
|
||||
tools BOOLEAN NOT NULL DEFAULT 0,
|
||||
working_dir TEXT NOT NULL DEFAULT '',
|
||||
window_width INTEGER NOT NULL DEFAULT 0,
|
||||
window_height INTEGER NOT NULL DEFAULT 0,
|
||||
config_migrated BOOLEAN NOT NULL DEFAULT 0,
|
||||
schema_version INTEGER NOT NULL DEFAULT 1
|
||||
);
|
||||
|
||||
-- Insert default settings row if it doesn't exist
|
||||
INSERT OR IGNORE INTO settings (id) VALUES (1);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS chats (
|
||||
id TEXT PRIMARY KEY,
|
||||
title TEXT NOT NULL DEFAULT '',
|
||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS messages (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
chat_id TEXT NOT NULL,
|
||||
role TEXT NOT NULL,
|
||||
content TEXT NOT NULL DEFAULT '',
|
||||
thinking TEXT NOT NULL DEFAULT '',
|
||||
stream BOOLEAN NOT NULL DEFAULT 0,
|
||||
model_name TEXT,
|
||||
model_cloud BOOLEAN,
|
||||
model_ollama_host BOOLEAN,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
thinking_time_start TIMESTAMP,
|
||||
thinking_time_end TIMESTAMP,
|
||||
FOREIGN KEY (chat_id) REFERENCES chats(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_messages_chat_id ON messages(chat_id);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS tool_calls (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
message_id INTEGER NOT NULL,
|
||||
type TEXT NOT NULL,
|
||||
function_name TEXT NOT NULL,
|
||||
function_arguments TEXT NOT NULL,
|
||||
function_result TEXT,
|
||||
FOREIGN KEY (message_id) REFERENCES messages(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_tool_calls_message_id ON tool_calls(message_id);
|
||||
`
|
||||
)
|
||||
|
||||
func TestMigrationFromEpoc(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
s := Store{DBPath: filepath.Join(tmpDir, "db.sqlite")}
|
||||
defer s.Close()
|
||||
// Open database connection
|
||||
conn, err := sql.Open("sqlite3", s.DBPath+"?_foreign_keys=on&_journal_mode=WAL")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
// Test the connection
|
||||
if err := conn.Ping(); err != nil {
|
||||
conn.Close()
|
||||
t.Fatal(err)
|
||||
}
|
||||
s.db = &database{conn: conn}
|
||||
t.Logf("DB created: %s", s.DBPath)
|
||||
_, err = s.db.conn.Exec(v1Schema)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
version, err := s.db.getSchemaVersion()
|
||||
if err != nil {
|
||||
t.Fatalf("failed to get schema version: %v", err)
|
||||
}
|
||||
if version != 1 {
|
||||
t.Fatalf("expected: %d\n got: %d", 1, version)
|
||||
}
|
||||
|
||||
t.Logf("v1 schema created")
|
||||
if err := s.db.migrate(); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
t.Logf("migrations completed")
|
||||
version, err = s.db.getSchemaVersion()
|
||||
if err != nil {
|
||||
t.Fatalf("failed to get schema version: %v", err)
|
||||
}
|
||||
if version != currentSchemaVersion {
|
||||
t.Fatalf("expected: %d\n got: %d", currentSchemaVersion, version)
|
||||
}
|
||||
}
|
||||
@@ -1,61 +0,0 @@
|
||||
-- This is the version 2 schema for the app database, the first released schema to users.
|
||||
-- Do not modify this file. It is used to test that the database schema stays in a consistent state between schema migrations.
|
||||
|
||||
CREATE TABLE IF NOT EXISTS settings (
|
||||
id INTEGER PRIMARY KEY CHECK (id = 1),
|
||||
device_id TEXT NOT NULL DEFAULT '',
|
||||
has_completed_first_run BOOLEAN NOT NULL DEFAULT 0,
|
||||
expose BOOLEAN NOT NULL DEFAULT 0,
|
||||
survey BOOLEAN NOT NULL DEFAULT TRUE,
|
||||
browser BOOLEAN NOT NULL DEFAULT 0,
|
||||
models TEXT NOT NULL DEFAULT '',
|
||||
remote TEXT NOT NULL DEFAULT '',
|
||||
agent BOOLEAN NOT NULL DEFAULT 0,
|
||||
tools BOOLEAN NOT NULL DEFAULT 0,
|
||||
working_dir TEXT NOT NULL DEFAULT '',
|
||||
context_length INTEGER NOT NULL DEFAULT 4096,
|
||||
window_width INTEGER NOT NULL DEFAULT 0,
|
||||
window_height INTEGER NOT NULL DEFAULT 0,
|
||||
config_migrated BOOLEAN NOT NULL DEFAULT 0,
|
||||
schema_version INTEGER NOT NULL DEFAULT 2
|
||||
);
|
||||
|
||||
-- Insert default settings row if it doesn't exist
|
||||
INSERT OR IGNORE INTO settings (id) VALUES (1);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS chats (
|
||||
id TEXT PRIMARY KEY,
|
||||
title TEXT NOT NULL DEFAULT '',
|
||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS messages (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
chat_id TEXT NOT NULL,
|
||||
role TEXT NOT NULL,
|
||||
content TEXT NOT NULL DEFAULT '',
|
||||
thinking TEXT NOT NULL DEFAULT '',
|
||||
stream BOOLEAN NOT NULL DEFAULT 0,
|
||||
model_name TEXT,
|
||||
model_cloud BOOLEAN,
|
||||
model_ollama_host BOOLEAN,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
thinking_time_start TIMESTAMP,
|
||||
thinking_time_end TIMESTAMP,
|
||||
FOREIGN KEY (chat_id) REFERENCES chats(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_messages_chat_id ON messages(chat_id);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS tool_calls (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
message_id INTEGER NOT NULL,
|
||||
type TEXT NOT NULL,
|
||||
function_name TEXT NOT NULL,
|
||||
function_arguments TEXT NOT NULL,
|
||||
function_result TEXT,
|
||||
FOREIGN KEY (message_id) REFERENCES messages(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_tool_calls_message_id ON tool_calls(message_id);
|
||||
@@ -1,60 +0,0 @@
|
||||
//go:build windows || darwin
|
||||
|
||||
package store
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestSchemaVersioning(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
// Override legacy config path to avoid migration logs
|
||||
oldLegacyConfigPath := legacyConfigPath
|
||||
legacyConfigPath = filepath.Join(tmpDir, "config.json")
|
||||
defer func() { legacyConfigPath = oldLegacyConfigPath }()
|
||||
|
||||
t.Run("new database has correct schema version", func(t *testing.T) {
|
||||
dbPath := filepath.Join(tmpDir, "new_db.sqlite")
|
||||
db, err := newDatabase(dbPath)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to create database: %v", err)
|
||||
}
|
||||
defer db.Close()
|
||||
|
||||
// Check schema version
|
||||
version, err := db.getSchemaVersion()
|
||||
if err != nil {
|
||||
t.Fatalf("failed to get schema version: %v", err)
|
||||
}
|
||||
|
||||
if version != currentSchemaVersion {
|
||||
t.Errorf("expected schema version %d, got %d", currentSchemaVersion, version)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("can update schema version", func(t *testing.T) {
|
||||
dbPath := filepath.Join(tmpDir, "update_db.sqlite")
|
||||
db, err := newDatabase(dbPath)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to create database: %v", err)
|
||||
}
|
||||
defer db.Close()
|
||||
|
||||
// Set a different version
|
||||
testVersion := 42
|
||||
if err := db.setSchemaVersion(testVersion); err != nil {
|
||||
t.Fatalf("failed to set schema version: %v", err)
|
||||
}
|
||||
|
||||
// Verify it was updated
|
||||
version, err := db.getSchemaVersion()
|
||||
if err != nil {
|
||||
t.Fatalf("failed to get schema version: %v", err)
|
||||
}
|
||||
|
||||
if version != testVersion {
|
||||
t.Errorf("expected schema version %d, got %d", testVersion, version)
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,495 +1,97 @@
|
||||
//go:build windows || darwin
|
||||
|
||||
// Package store provides a simple JSON file store for the desktop application
|
||||
// to save and load data such as ollama server configuration, messages,
|
||||
// login information and more.
|
||||
package store
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/ollama/ollama/app/types/not"
|
||||
)
|
||||
|
||||
type File struct {
|
||||
Filename string `json:"filename"`
|
||||
Data []byte `json:"data"`
|
||||
}
|
||||
|
||||
type User struct {
|
||||
Name string `json:"name"`
|
||||
Email string `json:"email"`
|
||||
Plan string `json:"plan"`
|
||||
CachedAt time.Time `json:"cachedAt"`
|
||||
}
|
||||
|
||||
type Message struct {
|
||||
Role string `json:"role"`
|
||||
Content string `json:"content"`
|
||||
Thinking string `json:"thinking"`
|
||||
Stream bool `json:"stream"`
|
||||
Model string `json:"model,omitempty"`
|
||||
Attachments []File `json:"attachments,omitempty"`
|
||||
ToolCalls []ToolCall `json:"tool_calls,omitempty"`
|
||||
ToolCall *ToolCall `json:"tool_call,omitempty"`
|
||||
ToolName string `json:"tool_name,omitempty"`
|
||||
ToolResult *json.RawMessage `json:"tool_result,omitempty"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
ThinkingTimeStart *time.Time `json:"thinkingTimeStart,omitempty" ts_type:"Date | undefined" ts_transform:"__VALUE__ && new Date(__VALUE__)"`
|
||||
ThinkingTimeEnd *time.Time `json:"thinkingTimeEnd,omitempty" ts_type:"Date | undefined" ts_transform:"__VALUE__ && new Date(__VALUE__)"`
|
||||
}
|
||||
|
||||
// MessageOptions contains optional parameters for creating a Message
|
||||
type MessageOptions struct {
|
||||
Model string
|
||||
Attachments []File
|
||||
Stream bool
|
||||
Thinking string
|
||||
ToolCalls []ToolCall
|
||||
ToolCall *ToolCall
|
||||
ToolResult *json.RawMessage
|
||||
ThinkingTimeStart *time.Time
|
||||
ThinkingTimeEnd *time.Time
|
||||
}
|
||||
|
||||
// NewMessage creates a new Message with the given options
|
||||
func NewMessage(role, content string, opts *MessageOptions) Message {
|
||||
now := time.Now()
|
||||
msg := Message{
|
||||
Role: role,
|
||||
Content: content,
|
||||
CreatedAt: now,
|
||||
UpdatedAt: now,
|
||||
}
|
||||
|
||||
if opts != nil {
|
||||
msg.Model = opts.Model
|
||||
msg.Attachments = opts.Attachments
|
||||
msg.Stream = opts.Stream
|
||||
msg.Thinking = opts.Thinking
|
||||
msg.ToolCalls = opts.ToolCalls
|
||||
msg.ToolCall = opts.ToolCall
|
||||
msg.ToolResult = opts.ToolResult
|
||||
msg.ThinkingTimeStart = opts.ThinkingTimeStart
|
||||
msg.ThinkingTimeEnd = opts.ThinkingTimeEnd
|
||||
}
|
||||
|
||||
return msg
|
||||
}
|
||||
|
||||
type ToolCall struct {
|
||||
Type string `json:"type"`
|
||||
Function ToolFunction `json:"function"`
|
||||
}
|
||||
|
||||
type ToolFunction struct {
|
||||
Name string `json:"name"`
|
||||
Arguments string `json:"arguments"`
|
||||
Result any `json:"result,omitempty"`
|
||||
}
|
||||
|
||||
type Model struct {
|
||||
Model string `json:"model"` // Model name
|
||||
Digest string `json:"digest,omitempty"` // Model digest from the registry
|
||||
ModifiedAt *time.Time `json:"modified_at,omitempty"` // When the model was last modified locally
|
||||
}
|
||||
|
||||
type Chat struct {
|
||||
ID string `json:"id"`
|
||||
Messages []Message `json:"messages"`
|
||||
Title string `json:"title"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
BrowserState json.RawMessage `json:"browser_state,omitempty" ts_type:"BrowserStateData"`
|
||||
}
|
||||
|
||||
// NewChat creates a new Chat with the ID, with CreatedAt timestamp initialized
|
||||
func NewChat(id string) *Chat {
|
||||
return &Chat{
|
||||
ID: id,
|
||||
Messages: []Message{},
|
||||
CreatedAt: time.Now(),
|
||||
}
|
||||
}
|
||||
|
||||
type Settings struct {
|
||||
// Expose is a boolean that indicates if the ollama server should
|
||||
// be exposed to the network
|
||||
Expose bool
|
||||
|
||||
// Browser is a boolean that indicates if the ollama server should
|
||||
// be exposed to browser windows (e.g. CORS set to allow all origins)
|
||||
Browser bool
|
||||
|
||||
// Survey is a boolean that indicates if the user allows anonymous
|
||||
// inference information to be shared with Ollama
|
||||
Survey bool
|
||||
|
||||
// Models is a string that contains the models to load on startup
|
||||
Models string
|
||||
|
||||
// TODO(parthsareen): temporary for experimentation
|
||||
// Agent indicates if the app should use multi-turn tools to fulfill user requests
|
||||
Agent bool
|
||||
|
||||
// Tools indicates if the app should use single-turn tools to fulfill user requests
|
||||
Tools bool
|
||||
|
||||
// WorkingDir specifies the working directory for all agent operations
|
||||
WorkingDir string
|
||||
|
||||
// ContextLength specifies the context length for the ollama server (using OLLAMA_CONTEXT_LENGTH)
|
||||
ContextLength int
|
||||
|
||||
// AirplaneMode when true, turns off Ollama Turbo features and only uses local models
|
||||
AirplaneMode bool
|
||||
|
||||
// TurboEnabled indicates if Ollama Turbo features are enabled
|
||||
TurboEnabled bool
|
||||
|
||||
// Maps gpt-oss specific frontend name' BrowserToolEnabled' to db field 'websearch_enabled'
|
||||
WebSearchEnabled bool
|
||||
|
||||
// ThinkEnabled indicates if thinking is enabled
|
||||
ThinkEnabled bool
|
||||
|
||||
// ThinkLevel indicates the level of thinking to use for models that support multiple levels
|
||||
ThinkLevel string
|
||||
|
||||
// SelectedModel stores the last model that the user selected
|
||||
SelectedModel string
|
||||
|
||||
// SidebarOpen indicates if the chat sidebar is open
|
||||
SidebarOpen bool
|
||||
}
|
||||
|
||||
type Store struct {
|
||||
// DBPath allows overriding the default database path (mainly for testing)
|
||||
DBPath string
|
||||
|
||||
// dbMu protects database initialization only
|
||||
dbMu sync.Mutex
|
||||
db *database
|
||||
}
|
||||
|
||||
var defaultDBPath = func() string {
|
||||
switch runtime.GOOS {
|
||||
case "windows":
|
||||
return filepath.Join(os.Getenv("LOCALAPPDATA"), "Ollama", "db.sqlite")
|
||||
case "darwin":
|
||||
return filepath.Join(os.Getenv("HOME"), "Library", "Application Support", "Ollama", "db.sqlite")
|
||||
default:
|
||||
return filepath.Join(os.Getenv("HOME"), ".ollama", "db.sqlite")
|
||||
}
|
||||
}()
|
||||
|
||||
// legacyConfigPath is the path to the old config.json file
|
||||
var legacyConfigPath = func() string {
|
||||
switch runtime.GOOS {
|
||||
case "windows":
|
||||
return filepath.Join(os.Getenv("LOCALAPPDATA"), "Ollama", "config.json")
|
||||
case "darwin":
|
||||
return filepath.Join(os.Getenv("HOME"), "Library", "Application Support", "Ollama", "config.json")
|
||||
default:
|
||||
return filepath.Join(os.Getenv("HOME"), ".ollama", "config.json")
|
||||
}
|
||||
}()
|
||||
|
||||
// legacyData represents the old config.json structure (only fields we need to migrate)
|
||||
type legacyData struct {
|
||||
ID string `json:"id"`
|
||||
FirstTimeRun bool `json:"first-time-run"`
|
||||
}
|
||||
|
||||
func (s *Store) ensureDB() error {
|
||||
// Fast path: check if db is already initialized
|
||||
if s.db != nil {
|
||||
return nil
|
||||
var (
|
||||
lock sync.Mutex
|
||||
store Store
|
||||
)
|
||||
|
||||
func GetID() string {
|
||||
lock.Lock()
|
||||
defer lock.Unlock()
|
||||
if store.ID == "" {
|
||||
initStore()
|
||||
}
|
||||
return store.ID
|
||||
}
|
||||
|
||||
// Slow path: initialize database with lock
|
||||
s.dbMu.Lock()
|
||||
defer s.dbMu.Unlock()
|
||||
|
||||
// Double-check after acquiring lock
|
||||
if s.db != nil {
|
||||
return nil
|
||||
func GetFirstTimeRun() bool {
|
||||
lock.Lock()
|
||||
defer lock.Unlock()
|
||||
if store.ID == "" {
|
||||
initStore()
|
||||
}
|
||||
return store.FirstTimeRun
|
||||
}
|
||||
|
||||
dbPath := s.DBPath
|
||||
if dbPath == "" {
|
||||
dbPath = defaultDBPath
|
||||
func SetFirstTimeRun(val bool) {
|
||||
lock.Lock()
|
||||
defer lock.Unlock()
|
||||
if store.FirstTimeRun == val {
|
||||
return
|
||||
}
|
||||
store.FirstTimeRun = val
|
||||
writeStore(getStorePath())
|
||||
}
|
||||
|
||||
// Ensure directory exists
|
||||
if err := os.MkdirAll(filepath.Dir(dbPath), 0o755); err != nil {
|
||||
return fmt.Errorf("create db directory: %w", err)
|
||||
}
|
||||
|
||||
database, err := newDatabase(dbPath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("open database: %w", err)
|
||||
}
|
||||
|
||||
// Generate device ID if needed
|
||||
id, err := database.getID()
|
||||
if err != nil || id == "" {
|
||||
// Generate new UUID for device
|
||||
u, err := uuid.NewV7()
|
||||
// lock must be held
|
||||
func initStore() {
|
||||
storeFile, err := os.Open(getStorePath())
|
||||
if err == nil {
|
||||
database.setID(u.String())
|
||||
}
|
||||
}
|
||||
|
||||
s.db = database
|
||||
|
||||
// Check if we need to migrate from config.json
|
||||
migrated, err := database.isConfigMigrated()
|
||||
if err != nil || !migrated {
|
||||
if err := s.migrateFromConfig(database); err != nil {
|
||||
slog.Warn("failed to migrate from config.json", "error", err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// migrateFromConfig attempts to migrate ID and FirstTimeRun from config.json
|
||||
func (s *Store) migrateFromConfig(database *database) error {
|
||||
configPath := legacyConfigPath
|
||||
|
||||
// Check if config.json exists
|
||||
if _, err := os.Stat(configPath); os.IsNotExist(err) {
|
||||
// No config to migrate, mark as migrated
|
||||
return database.setConfigMigrated(true)
|
||||
}
|
||||
|
||||
// Read the config file
|
||||
b, err := os.ReadFile(configPath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("read legacy config: %w", err)
|
||||
}
|
||||
|
||||
var legacy legacyData
|
||||
if err := json.Unmarshal(b, &legacy); err != nil {
|
||||
// If we can't parse it, just mark as migrated and move on
|
||||
slog.Warn("failed to parse legacy config.json", "error", err)
|
||||
return database.setConfigMigrated(true)
|
||||
}
|
||||
|
||||
// Migrate the ID if present
|
||||
if legacy.ID != "" {
|
||||
if err := database.setID(legacy.ID); err != nil {
|
||||
return fmt.Errorf("migrate device ID: %w", err)
|
||||
}
|
||||
slog.Info("migrated device ID from config.json")
|
||||
}
|
||||
|
||||
hasCompleted := legacy.FirstTimeRun // If old FirstTimeRun is true, it means first run was completed
|
||||
if err := database.setHasCompletedFirstRun(hasCompleted); err != nil {
|
||||
return fmt.Errorf("migrate first time run: %w", err)
|
||||
}
|
||||
slog.Info("migrated first run status from config.json", "hasCompleted", hasCompleted)
|
||||
|
||||
// Mark as migrated
|
||||
if err := database.setConfigMigrated(true); err != nil {
|
||||
return fmt.Errorf("mark config as migrated: %w", err)
|
||||
}
|
||||
|
||||
slog.Info("successfully migrated settings from config.json")
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Store) ID() (string, error) {
|
||||
if err := s.ensureDB(); err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return s.db.getID()
|
||||
}
|
||||
|
||||
func (s *Store) HasCompletedFirstRun() (bool, error) {
|
||||
if err := s.ensureDB(); err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
return s.db.getHasCompletedFirstRun()
|
||||
}
|
||||
|
||||
func (s *Store) SetHasCompletedFirstRun(hasCompleted bool) error {
|
||||
if err := s.ensureDB(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return s.db.setHasCompletedFirstRun(hasCompleted)
|
||||
}
|
||||
|
||||
func (s *Store) Settings() (Settings, error) {
|
||||
if err := s.ensureDB(); err != nil {
|
||||
return Settings{}, fmt.Errorf("load settings: %w", err)
|
||||
}
|
||||
|
||||
settings, err := s.db.getSettings()
|
||||
if err != nil {
|
||||
return Settings{}, err
|
||||
}
|
||||
|
||||
// Set default models directory if not set
|
||||
if settings.Models == "" {
|
||||
dir := os.Getenv("OLLAMA_MODELS")
|
||||
if dir != "" {
|
||||
settings.Models = dir
|
||||
} else {
|
||||
home, err := os.UserHomeDir()
|
||||
defer storeFile.Close()
|
||||
err = json.NewDecoder(storeFile).Decode(&store)
|
||||
if err == nil {
|
||||
settings.Models = filepath.Join(home, ".ollama", "models")
|
||||
slog.Debug(fmt.Sprintf("loaded existing store %s - ID: %s", getStorePath(), store.ID))
|
||||
return
|
||||
}
|
||||
} else if !errors.Is(err, os.ErrNotExist) {
|
||||
slog.Debug(fmt.Sprintf("unexpected error searching for store: %s", err))
|
||||
}
|
||||
}
|
||||
|
||||
return settings, nil
|
||||
slog.Debug("initializing new store")
|
||||
store.ID = uuid.New().String()
|
||||
writeStore(getStorePath())
|
||||
}
|
||||
|
||||
func (s *Store) SetSettings(settings Settings) error {
|
||||
if err := s.ensureDB(); err != nil {
|
||||
return err
|
||||
func writeStore(storeFilename string) {
|
||||
ollamaDir := filepath.Dir(storeFilename)
|
||||
_, err := os.Stat(ollamaDir)
|
||||
if errors.Is(err, os.ErrNotExist) {
|
||||
if err := os.MkdirAll(ollamaDir, 0o755); err != nil {
|
||||
slog.Error(fmt.Sprintf("create ollama dir %s: %v", ollamaDir, err))
|
||||
return
|
||||
}
|
||||
|
||||
return s.db.setSettings(settings)
|
||||
}
|
||||
|
||||
func (s *Store) Chats() ([]Chat, error) {
|
||||
if err := s.ensureDB(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return s.db.getAllChats()
|
||||
}
|
||||
|
||||
func (s *Store) Chat(id string) (*Chat, error) {
|
||||
return s.ChatWithOptions(id, true)
|
||||
}
|
||||
|
||||
func (s *Store) ChatWithOptions(id string, loadAttachmentData bool) (*Chat, error) {
|
||||
if err := s.ensureDB(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
chat, err := s.db.getChatWithOptions(id, loadAttachmentData)
|
||||
payload, err := json.Marshal(store)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("%w: chat %s", not.Found, id)
|
||||
slog.Error(fmt.Sprintf("failed to marshal store: %s", err))
|
||||
return
|
||||
}
|
||||
|
||||
return chat, nil
|
||||
}
|
||||
|
||||
func (s *Store) SetChat(chat Chat) error {
|
||||
if err := s.ensureDB(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return s.db.saveChat(chat)
|
||||
}
|
||||
|
||||
func (s *Store) DeleteChat(id string) error {
|
||||
if err := s.ensureDB(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Delete from database
|
||||
if err := s.db.deleteChat(id); err != nil {
|
||||
return fmt.Errorf("%w: chat %s", not.Found, id)
|
||||
}
|
||||
|
||||
// Also delete associated images
|
||||
chatImgDir := filepath.Join(s.ImgDir(), id)
|
||||
if err := os.RemoveAll(chatImgDir); err != nil {
|
||||
// Log error but don't fail the deletion
|
||||
slog.Warn("failed to delete chat images", "chat_id", id, "error", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Store) WindowSize() (int, int, error) {
|
||||
if err := s.ensureDB(); err != nil {
|
||||
return 0, 0, err
|
||||
}
|
||||
|
||||
return s.db.getWindowSize()
|
||||
}
|
||||
|
||||
func (s *Store) SetWindowSize(width, height int) error {
|
||||
if err := s.ensureDB(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return s.db.setWindowSize(width, height)
|
||||
}
|
||||
|
||||
func (s *Store) UpdateLastMessage(chatID string, message Message) error {
|
||||
if err := s.ensureDB(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return s.db.updateLastMessage(chatID, message)
|
||||
}
|
||||
|
||||
func (s *Store) AppendMessage(chatID string, message Message) error {
|
||||
if err := s.ensureDB(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return s.db.appendMessage(chatID, message)
|
||||
}
|
||||
|
||||
func (s *Store) UpdateChatBrowserState(chatID string, state json.RawMessage) error {
|
||||
if err := s.ensureDB(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return s.db.updateChatBrowserState(chatID, state)
|
||||
}
|
||||
|
||||
func (s *Store) User() (*User, error) {
|
||||
if err := s.ensureDB(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return s.db.getUser()
|
||||
}
|
||||
|
||||
func (s *Store) SetUser(user User) error {
|
||||
if err := s.ensureDB(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
user.CachedAt = time.Now()
|
||||
return s.db.setUser(user)
|
||||
}
|
||||
|
||||
func (s *Store) ClearUser() error {
|
||||
if err := s.ensureDB(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return s.db.clearUser()
|
||||
}
|
||||
|
||||
func (s *Store) Close() error {
|
||||
s.dbMu.Lock()
|
||||
defer s.dbMu.Unlock()
|
||||
|
||||
if s.db != nil {
|
||||
return s.db.Close()
|
||||
}
|
||||
return nil
|
||||
fp, err := os.OpenFile(storeFilename, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0o755)
|
||||
if err != nil {
|
||||
slog.Error(fmt.Sprintf("write store payload %s: %v", storeFilename, err))
|
||||
return
|
||||
}
|
||||
defer fp.Close()
|
||||
if n, err := fp.Write(payload); err != nil || n != len(payload) {
|
||||
slog.Error(fmt.Sprintf("write store payload %s: %d vs %d -- %v", storeFilename, n, len(payload), err))
|
||||
return
|
||||
}
|
||||
slog.Debug("Store contents: " + string(payload))
|
||||
slog.Info(fmt.Sprintf("wrote store: %s", storeFilename))
|
||||
}
|
||||
|
||||
13
app/store/store_darwin.go
Normal file
@@ -0,0 +1,13 @@
|
||||
package store
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
func getStorePath() string {
|
||||
// TODO - system wide location?
|
||||
|
||||
home := os.Getenv("HOME")
|
||||
return filepath.Join(home, "Library", "Application Support", "Ollama", "config.json")
|
||||
}
|
||||
16
app/store/store_linux.go
Normal file
@@ -0,0 +1,16 @@
|
||||
package store
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
func getStorePath() string {
|
||||
if os.Geteuid() == 0 {
|
||||
// TODO where should we store this on linux for system-wide operation?
|
||||
return "/etc/ollama/config.json"
|
||||
}
|
||||
|
||||
home := os.Getenv("HOME")
|
||||
return filepath.Join(home, ".ollama", "config.json")
|
||||
}
|
||||