From b42a596425037148286281a1942dbff0bc9733f5 Mon Sep 17 00:00:00 2001 From: Jeffrey Morgan Date: Sun, 17 Nov 2024 11:48:12 -0800 Subject: [PATCH 01/11] docs: add customization section in linux.md (#7709) --- docs/linux.md | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/docs/linux.md b/docs/linux.md index 0eec014f..8204ece5 100644 --- a/docs/linux.md +++ b/docs/linux.md @@ -112,6 +112,21 @@ sudo systemctl status ollama > https://www.amd.com/en/support/linux-drivers for best support of your Radeon > GPU. +## Customizing + +To customize the installation of Ollama, you can edit the systemd service file or the environment variables by running: + +``` +sudo systemctl edit ollama +``` + +Alternatively, create an override file manually in `/etc/systemd/system/ollama.service.d/override.conf`: + +```ini +[Service] +Environment="OLLAMA_DEBUG=1" +``` + ## Updating Update Ollama by running the install script again: @@ -129,7 +144,7 @@ sudo tar -C /usr -xzf ollama-linux-amd64.tgz ## Installing specific versions -Use `OLLAMA_VERSION` environment variable with the install script to install a specific version of Ollama, including pre-releases. You can find the version numbers in the [releases page](https://github.com/ollama/ollama/releases). +Use `OLLAMA_VERSION` environment variable with the install script to install a specific version of Ollama, including pre-releases. You can find the version numbers in the [releases page](https://github.com/ollama/ollama/releases). For example: From 8b4b243f5fd31000515548e52bf66bcdb72f70e5 Mon Sep 17 00:00:00 2001 From: Jeffrey Morgan Date: Sun, 17 Nov 2024 13:01:04 -0800 Subject: [PATCH 02/11] server: fix warnings in prompt_test.go (#7710) --- server/prompt_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/prompt_test.go b/server/prompt_test.go index 6d04db53..21a5fd62 100644 --- a/server/prompt_test.go +++ b/server/prompt_test.go @@ -32,7 +32,7 @@ func TestChatPrompt(t *testing.T) { mllamaModel := Model{Template: tmpl, ProjectorPaths: []string{"vision"}, Config: ConfigV2{ModelFamilies: []string{"mllama"}}} createImg := func(width, height int) ([]byte, error) { - img := image.NewRGBA(image.Rect(0, 0, 5, 5)) + img := image.NewRGBA(image.Rect(0, 0, width, height)) var buf bytes.Buffer if err := png.Encode(&buf, img); err != nil { From 1c041171141f76b64669f990ca1ff228ce2968b6 Mon Sep 17 00:00:00 2001 From: Vinh Nguyen <1097578+vinhnx@users.noreply.github.com> Date: Mon, 18 Nov 2024 05:35:41 +0700 Subject: [PATCH 03/11] readme: add the VT app to the community integrations section (#7706) --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 1f5cf8fd..0bdab5fd 100644 --- a/README.md +++ b/README.md @@ -335,6 +335,7 @@ See the [API documentation](./docs/api.md) for all endpoints. - [Ollama App](https://github.com/JHubi1/ollama-app) (Modern and easy-to-use multi-platform client for Ollama) - [Hexabot](https://github.com/hexastack/hexabot) (A conversational AI builder) - [Reddit Rate]((https://github.com/rapidarchitect/reddit_analyzer)) (Search and Rate Reddit topics with a weighted summation) +- [VT](https://github.com/vinhnx/vt.ai) (A minimal multimodal AI chat app with dynamic conversation routing, support both models backend by Ollama) ### Terminal From d5da2ab7e82a04ec72e62b830a3f59c6ca601be6 Mon Sep 17 00:00:00 2001 From: Tushar Adhatrao <40828350+tusharad@users.noreply.github.com> Date: Mon, 18 Nov 2024 04:48:04 +0530 Subject: [PATCH 04/11] readme: add ollama-haskell library to community integrations (#7451) --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 0bdab5fd..49938b37 100644 --- a/README.md +++ b/README.md @@ -420,6 +420,7 @@ See the [API documentation](./docs/api.md) for all endpoints. - [Agents-Flex for Java](https://github.com/agents-flex/agents-flex) with [example](https://github.com/agents-flex/agents-flex/tree/main/agents-flex-llm/agents-flex-llm-ollama/src/test/java/com/agentsflex/llm/ollama) - [Ollama for Swift](https://github.com/mattt/ollama-swift) - [GoLamify](https://github.com/prasad89/golamify) +- [Ollama for Haskell](https://github.com/tusharad/ollama-haskell) ### Mobile From c9a5aca3daf3bd8e704f668c6995926b56a7e65f Mon Sep 17 00:00:00 2001 From: Darius Kocar <60488234+DariusKocar@users.noreply.github.com> Date: Sun, 17 Nov 2024 15:19:26 -0800 Subject: [PATCH 05/11] readme: add Perfect Memory AI to community integrations (#7431) --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 49938b37..9083b3b2 100644 --- a/README.md +++ b/README.md @@ -333,6 +333,7 @@ See the [API documentation](./docs/api.md) for all endpoints. - [ARGO](https://github.com/xark-argo/argo) (Locally download and run Ollama and Huggingface models with RAG on Mac/Windows/Linux) - [G1](https://github.com/bklieger-groq/g1) (Prototype of using prompting strategies to improve the LLM's reasoning through o1-like reasoning chains.) - [Ollama App](https://github.com/JHubi1/ollama-app) (Modern and easy-to-use multi-platform client for Ollama) +- [Perfect Memory AI](https://www.perfectmemory.ai/) (Productivity AI assists personalized by what you have seen on your screen, heard and said in the meetings) - [Hexabot](https://github.com/hexastack/hexabot) (A conversational AI builder) - [Reddit Rate]((https://github.com/rapidarchitect/reddit_analyzer)) (Search and Rate Reddit topics with a weighted summation) - [VT](https://github.com/vinhnx/vt.ai) (A minimal multimodal AI chat app with dynamic conversation routing, support both models backend by Ollama) From 760cfa27e503cd56ee61207c5ac9dfd66761ff44 Mon Sep 17 00:00:00 2001 From: Nicolas Bonamy Date: Sun, 17 Nov 2024 18:33:10 -0600 Subject: [PATCH 06/11] readme: add Witsy and multi-llm-ts to community integrations (#7713) --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index 9083b3b2..9623af65 100644 --- a/README.md +++ b/README.md @@ -337,6 +337,7 @@ See the [API documentation](./docs/api.md) for all endpoints. - [Hexabot](https://github.com/hexastack/hexabot) (A conversational AI builder) - [Reddit Rate]((https://github.com/rapidarchitect/reddit_analyzer)) (Search and Rate Reddit topics with a weighted summation) - [VT](https://github.com/vinhnx/vt.ai) (A minimal multimodal AI chat app with dynamic conversation routing, support both models backend by Ollama) +- [Witsy](https://github.com/nbonamy/witsy) (An AI Desktop application avaiable for Mac/Windows/Linux) ### Terminal @@ -422,6 +423,7 @@ See the [API documentation](./docs/api.md) for all endpoints. - [Ollama for Swift](https://github.com/mattt/ollama-swift) - [GoLamify](https://github.com/prasad89/golamify) - [Ollama for Haskell](https://github.com/tusharad/ollama-haskell) +- [multi-llm-ts](https://github.com/nbonamy/multi-llm-ts) (A Typescript/JavaScript library allowing access to different LLM in unified API) ### Mobile From a14f76491d694b2f5a0dec6473514b7f93beeea0 Mon Sep 17 00:00:00 2001 From: Vinh Nguyen <1097578+vinhnx@users.noreply.github.com> Date: Mon, 18 Nov 2024 10:30:22 +0700 Subject: [PATCH 07/11] readme: improve Community Integrations section (#7718) --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 9623af65..62ba4aba 100644 --- a/README.md +++ b/README.md @@ -335,8 +335,8 @@ See the [API documentation](./docs/api.md) for all endpoints. - [Ollama App](https://github.com/JHubi1/ollama-app) (Modern and easy-to-use multi-platform client for Ollama) - [Perfect Memory AI](https://www.perfectmemory.ai/) (Productivity AI assists personalized by what you have seen on your screen, heard and said in the meetings) - [Hexabot](https://github.com/hexastack/hexabot) (A conversational AI builder) -- [Reddit Rate]((https://github.com/rapidarchitect/reddit_analyzer)) (Search and Rate Reddit topics with a weighted summation) -- [VT](https://github.com/vinhnx/vt.ai) (A minimal multimodal AI chat app with dynamic conversation routing, support both models backend by Ollama) +- [Reddit Rate](https://github.com/rapidarchitect/reddit_analyzer) (Search and Rate Reddit topics with a weighted summation) +- [VT](https://github.com/vinhnx/vt.ai) (A minimal multimodal AI chat app, with dynamic conversation routing. Supports local models via Ollama) - [Witsy](https://github.com/nbonamy/witsy) (An AI Desktop application avaiable for Mac/Windows/Linux) ### Terminal From 81d55d3e4d3e18404414900dd341438aad329656 Mon Sep 17 00:00:00 2001 From: Daniel Hiltgen Date: Mon, 18 Nov 2024 11:48:13 -0800 Subject: [PATCH 08/11] fix index out of range on zero layer metal load (#7696) If the model doesn't fit any layers on metal, and we load zero layers we would panic trying to look up the GPU size during scheduling ops --- llm/server.go | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/llm/server.go b/llm/server.go index 96815826..624acbf8 100644 --- a/llm/server.go +++ b/llm/server.go @@ -1092,7 +1092,9 @@ func (s *llmServer) EstimatedTotal() uint64 { func (s *llmServer) EstimatedVRAMByGPU(gpuID string) uint64 { for i, gpu := range s.gpus { if gpu.ID == gpuID { - return s.estimate.GPUSizes[i] + if i < len(s.estimate.GPUSizes) { + return s.estimate.GPUSizes[i] + } } } return 0 From 35096a7eff0bc19e50def69f75138b55244d31c5 Mon Sep 17 00:00:00 2001 From: Daniel Hiltgen Date: Mon, 18 Nov 2024 14:39:52 -0800 Subject: [PATCH 09/11] win: add right click menu support (#7727) Enable both left and right click on the pop-up menu --- app/tray/wintray/tray.go | 2 +- app/tray/wintray/w32api.go | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/app/tray/wintray/tray.go b/app/tray/wintray/tray.go index 6f827893..19fa98e9 100644 --- a/app/tray/wintray/tray.go +++ b/app/tray/wintray/tray.go @@ -361,7 +361,7 @@ func (t *winTray) showMenu() error { boolRet, _, err = pTrackPopupMenu.Call( uintptr(t.menus[0]), - TPM_BOTTOMALIGN|TPM_LEFTALIGN, + TPM_BOTTOMALIGN|TPM_LEFTALIGN|TPM_RIGHTBUTTON, uintptr(p.X), uintptr(p.Y), 0, diff --git a/app/tray/wintray/w32api.go b/app/tray/wintray/w32api.go index 7c7c0ac8..d23bfd97 100644 --- a/app/tray/wintray/w32api.go +++ b/app/tray/wintray/w32api.go @@ -67,6 +67,7 @@ const ( SW_HIDE = 0 TPM_BOTTOMALIGN = 0x0020 TPM_LEFTALIGN = 0x0000 + TPM_RIGHTBUTTON = 0x0002 WM_CLOSE = 0x0010 WM_USER = 0x0400 WS_CAPTION = 0x00C00000 From 5c18e66384de7f8106fc3b26bfafe0145ed5f7a9 Mon Sep 17 00:00:00 2001 From: frob Date: Tue, 19 Nov 2024 00:02:41 +0100 Subject: [PATCH 10/11] Notify the user if systemd is not running (#6693) Co-authored-by: Richard Lyons --- scripts/install.sh | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/scripts/install.sh b/scripts/install.sh index ec58ddbd..850800a0 100644 --- a/scripts/install.sh +++ b/scripts/install.sh @@ -4,9 +4,12 @@ set -eu +red="$( (/usr/bin/tput bold; /usr/bin/tput setaf 1; :) 2>&-)" +plain="$( (/usr/bin/tput sgr0; :) 2>&-)" + status() { echo ">>> $*" >&2; } -error() { echo "ERROR $*"; exit 1; } -warning() { echo "WARNING: $*"; } +error() { echo "${red}ERROR:${plain} $*"; exit 1; } +warning() { echo "${red}WARNING:${plain} $*"; } TEMP_DIR=$(mktemp -d) cleanup() { rm -rf $TEMP_DIR; } @@ -162,6 +165,12 @@ EOF start_service() { $SUDO systemctl restart ollama; } trap start_service EXIT ;; + *) + warning "systemd is not running" + if [ "$IS_WSL2" = true ]; then + warning "see https://learn.microsoft.com/en-us/windows/wsl/systemd#how-to-enable-systemd to enable it" + fi + ;; esac } From 6cdf27d154e7df12d6b39cc059364a37f78679a2 Mon Sep 17 00:00:00 2001 From: Patrick Sy Date: Tue, 19 Nov 2024 04:33:23 +0100 Subject: [PATCH 11/11] readme: add Alfred Ollama to community integrations (#7724) --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 62ba4aba..e51be1e4 100644 --- a/README.md +++ b/README.md @@ -467,6 +467,7 @@ See the [API documentation](./docs/api.md) for all endpoints. - [QodeAssist](https://github.com/Palm1r/QodeAssist) (AI-powered coding assistant plugin for Qt Creator) - [Obsidian Quiz Generator plugin](https://github.com/ECuiDev/obsidian-quiz-generator) - [TextCraft](https://github.com/suncloudsmoon/TextCraft) (Copilot in Word alternative using Ollama) +- [Alfred Ollama](https://github.com/zeitlings/alfred-ollama) (Alfred Workflow) ### Supported backends