feat(hass): use LLM as voice assistant
All checks were successful
Discord / discord commits (push) Has been skipped

This commit is contained in:
matt1432 2024-09-02 22:14:49 -04:00
parent fe75a68081
commit 761e0487ee
9 changed files with 80 additions and 0 deletions

View file

@ -1,4 +1,5 @@
{
pkgs,
self,
wakewords-src,
...
@ -13,12 +14,17 @@
"esphome"
"holiday"
"met"
"ollama"
"spotify"
"upnp"
"wyoming"
"yamaha_musiccast"
];
customComponents = builtins.attrValues {
inherit (self.legacyPackages.${pkgs.system}.hass-addons) home-llm;
};
config = {
http = {
server_host = "0.0.0.0";

View file

@ -14,6 +14,7 @@ in {
./modules/docker
./modules/jellyfin
./modules/mergerfs.nix
./modules/ollama.nix
./modules/qbittorrent
./modules/snapraid.nix
./modules/subtitles

View file

@ -0,0 +1,11 @@
{...}: {
services.ollama = {
enable = true;
acceleration = "cuda";
host = "100.64.0.4";
port = 11434;
loadModels = ["fixt/home-3b-v3"];
};
}

View file

@ -495,6 +495,22 @@
"type": "github"
}
},
"home-llm-src": {
"flake": false,
"locked": {
"lastModified": 1724280404,
"narHash": "sha256-mfMnESuXJH8vCzBcvv3lzhV7k/Hg1GfZn/4055dVuqk=",
"owner": "acon96",
"repo": "home-llm",
"rev": "90c0edc0907e4567eeca89a6f89b43c0b0b807eb",
"type": "github"
},
"original": {
"owner": "acon96",
"repo": "home-llm",
"type": "github"
}
},
"home-manager": {
"inputs": {
"nixpkgs": [
@ -1653,6 +1669,7 @@
"gtk-session-lock": "gtk-session-lock",
"gtk-theme-src": "gtk-theme-src",
"headscale": "headscale",
"home-llm-src": "home-llm-src",
"home-manager": "home-manager",
"hyprgrass": "hyprgrass",
"hyprland": "hyprland",

View file

@ -111,6 +111,12 @@
rev = "022fb24cd92035470496d50d86bf8c9ee74b1e7e";
type = "github";
};
home-llm-src = {
flake = false;
owner = "acon96";
repo = "home-llm";
type = "github";
};
home-manager = {
inputs.nixpkgs.follows = "nixpkgs";
owner = "nix-community";

View file

@ -177,6 +177,10 @@ let
srcs = [
# Home-assistant
{
owner = "acon96";
repo = "home-llm";
}
{
name = "wakewords-src";
owner = "fwartner";

View file

@ -11,6 +11,7 @@
(pkgs.callPackage file ({inherit mkVersion;} // inputs));
in {
dracula = mkScope ./dracula;
hass-addons = mkScope ./hass-addons;
firefoxAddons = mkScope ./firefox-addons;
mpvScripts = mkScope ./mpv-scripts;
}

View file

@ -0,0 +1,7 @@
{pkgs, ...} @ inputs:
pkgs.lib.makeScope pkgs.newScope (hass: let
buildHassAddon = file:
hass.callPackage file (inputs // {});
in {
home-llm = buildHassAddon ./home-llm.nix;
})

View file

@ -0,0 +1,27 @@
{
buildHomeAssistantComponent,
home-llm-src,
python3Packages,
...
}: let
manifest = builtins.fromJSON (builtins.readFile "${home-llm-src}/custom_components/llama_conversation/manifest.json");
in
buildHomeAssistantComponent {
owner = "acon96";
inherit (manifest) domain version;
src = home-llm-src;
postPatch = ''
substituteInPlace ./custom_components/llama_conversation/manifest.json \
--replace-warn "huggingface-hub==0.23.0" "huggingface-hub>=0.23.0" \
--replace-warn "webcolors<=1.13" "webcolors>=1.13"
'';
propagatedBuildInputs = with python3Packages; [
huggingface-hub
requests
webcolors
];
}