Fixed ollama package

This commit is contained in:
Alexandre 2025-12-26 23:45:13 +01:00
parent 7e7ea34ca6
commit cc588a1947
3 changed files with 20 additions and 4 deletions

View File

@ -1,6 +1,8 @@
{ pkgs ,... }:
{ {
# Enpty file for future imports # Enpty file for future imports
imports = [ imports = [
(import ../../common/ai.nix { gpu = "metal"; }) (import ../../common/ai.nix { inherit pkgs; })
]; ];
} }

View File

@ -1,3 +1,5 @@
{ pkgs ,... }:
{ {
# This file is used to import all modules required by this setup. # This file is used to import all modules required by this setup.
imports = imports =
@ -8,6 +10,6 @@
../../modules/nixos/gamming/steam.nix # For Steam/Proton/Lutris/MangoHUD/Heroic/Bottles ../../modules/nixos/gamming/steam.nix # For Steam/Proton/Lutris/MangoHUD/Heroic/Bottles
../../modules/common/spotify.nix # Spotify/Spicetify ../../modules/common/spotify.nix # Spotify/Spicetify
../../modules/common/ssh.nix ../../modules/common/ssh.nix
(import ../../modules/common/ai.nix { gpu = "cuda"; }) (import ../../modules/common/ai.nix { inherit pkgs; })
]; ];
} }

View File

@ -1,11 +1,23 @@
{ gpu, ... }: { pkgs, ... }:
let
# Détection simple du GPU pour choisir le package Ollama
gpu =
if builtins.hasAttr "nvidia" pkgs then
pkgs.ollama-cuda
else if builtins.hasAttr "vulkan" pkgs then
pkgs.ollama-vulkan
else if builtins.hasAttr "rocm" pkgs then
pkgs.ollama-rocm
else
pkgs.ollama; # CPU fallback
in
{ {
services = { services = {
ollama = { ollama = {
enable = true; enable = true;
loadModels = [ "codegemma:7b-instruct" "codegemma:7b-code" "codegemma:2b" "starcoder2:7b" ]; loadModels = [ "codegemma:7b-instruct" "codegemma:7b-code" "codegemma:2b" "starcoder2:7b" ];
acceleration = gpu; package = gpu;
}; };
open-webui = { open-webui = {
enable = true; enable = true;