Fixed ollama

This commit is contained in:
Alexandre 2025-12-27 22:13:42 +01:00
parent c0545f59a8
commit 92ee670ee3
4 changed files with 11 additions and 6 deletions

View File

@ -4,6 +4,6 @@
# Enpty file for future imports
imports = [
../../modules/macos/ollama.nix
(import ../../modules/common/ai.nix { inherit pkgs; })
../../modules/common/ai.nix
];
}

View File

@ -92,6 +92,7 @@
playerctl
# Nvidia packages
nvtopPackages.nvidia
# Video accel
libva
libva-utils

View File

@ -12,6 +12,6 @@
../../modules/nixos/gamming/steam.nix # For Steam/Proton/Lutris/MangoHUD/Heroic/Bottles
../../modules/common/spotify.nix # Spotify/Spicetify
../../modules/common/ssh.nix
(import ../../modules/common/ai.nix { inherit pkgs; })
../../modules/common/ai.nix
];
}

View File

@ -1,14 +1,18 @@
{ pkgs, ... }:
{ pkgs, lib, config, ... }:
let
# Détection simple du GPU pour choisir le package Ollama
hasNvidiaGpu = config.hardware.nvidia.package or null != null;
gpu =
if builtins.hasAttr "nvidia" pkgs then
if hasNvidiaGpu then
pkgs.ollama-cuda
else if builtins.hasAttr "vulkan" pkgs then
pkgs.ollama-vulkan
else if builtins.hasAttr "rocm" pkgs then
else if builtins.hasAttr "amd" pkgs then
pkgs.ollama-rocm
else if pkgs.stdenv.isDarwin then
pkgs.ollama # Ollama uses Metal by default
else
pkgs.ollama; # CPU fallback
in