initial separation from system flake repo

This commit is contained in:
TheK0tYaRa 2026-02-24 01:59:36 +02:00
commit 214f035438
14 changed files with 1505 additions and 0 deletions

6
default.nix Normal file
View file

@ -0,0 +1,6 @@
final: prev:
import ./pkgs {
pkgs = final;
prev = prev;
lib = prev.lib;
}

39
flake.nix Normal file
View file

@ -0,0 +1,39 @@
{
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
outputs =
{ self, nixpkgs, ... }:
let
lib = nixpkgs.lib;
systems = [
"x86_64-linux"
# "aarch64-linux"
];
forAllSystems = f: lib.genAttrs systems (system: f system);
overlay = import ./default.nix;
pkgsPrev = system: import nixpkgs { inherit system; };
pkgsFinal =
system:
import nixpkgs {
inherit system;
overlays = [ overlay ];
};
overlayPkgs = system: overlay (pkgsFinal system) (pkgsPrev system);
mkDefault = p: if p ? raganything then p.raganything else p.${builtins.head (lib.attrNames p)};
in
{
overlays.default = overlay;
packages = forAllSystems (
system:
let
p = overlayPkgs system;
in
p // { default = mkDefault p; }
);
};
}

View file

@ -0,0 +1,46 @@
{
pkgs,
prev,
lib,
packagesFromTree,
}:
let
pythonPackagesDir = ./pythonPackages;
treeExts =
if builtins.pathExists pythonPackagesDir then
[
(
python-final: python-prev:
removeAttrs (packagesFromTree pythonPackagesDir python-final.callPackage) [
"python-packages"
]
)
]
else
[ ];
fileExts =
if builtins.pathExists (pythonPackagesDir + "/prefab-builder.nix") then
[
(
python-final: python-prev:
prev.lib.customisation.callPackagesWith (
python-prev
// {
lib = prev.lib;
pkgs = prev; # чтобы prefab-builder мог звать pkgs.callPackage
pyPkgs = python-prev; # явная подача python package set
fetchFromGitHub = prev.fetchFromGitHub;
# fetchPypi обычно уже есть в python-prev; если нет — добавь:
fetchPypi = prev.fetchPypi;
}
) (pythonPackagesDir + "/prefab-builder.nix") { }
)
]
else
[ ];
in
{
pythonPackagesExtensions = (prev.pythonPackagesExtensions or [ ]) ++ treeExts ++ fileExts;
}

View file

@ -0,0 +1,280 @@
{
# TODO
lib,
buildPythonPackage,
fetchFromGitHub,
hatchling,
hatch-vcs,
# haystack-ai,
# boilerpy3,
# events,
# httpx,
# jsonschema,
# lazy-imports,
# more-itertools,
# networkx,
# pandas,
# pillow,
# platformdirs,
# posthog,
# prompthub-py,
# pydantic,
# quantulum3,
# rank-bm25,
# requests,
# requests-cache,
# scikit-learn,
# sseclient-py,
# tenacity,
# tiktoken,
# tqdm,
# transformers,
# openai-whisper,
# boto3,
# botocore,
# # , beir
# selenium,
# coverage,
# dulwich,
# # , jupytercontrib
# mkdocs,
# mypy,
# pre-commit,
# psutil,
# # , pydoc-markdown
# pylint,
# pytest,
# pytest-asyncio,
# pytest-cov,
# # , pytest-custom-exit-code
# python-multipart,
# reno,
# responses,
# toml,
# tox,
# watchdog,
# elastic-transport,
# elasticsearch,
# # , azure-ai-formrecognizer
# beautifulsoup4,
# markdown,
# python-docx,
# python-frontmatter,
# python-magic,
# tika,
# black,
# huggingface-hub,
# sentence-transformers,
# mlflow,
# rapidfuzz,
# scipy,
# seqeval,
# pdf2image,
# pytesseract,
# faiss,
# # , faiss-gpu
# pinecone-client,
# onnxruntime,
# onnxruntime-tools,
# # , onnxruntime-gpu
# opensearch-py,
# pymupdf,
# langdetect,
# nltk,
# canals,
# jinja2,
# openai,
# aiorwlock,
# ray,
# psycopg2,
# sqlalchemy,
# sqlalchemy-utils,
# weaviate-client,
}:
buildPythonPackage rec {
pname = "haystack-experimental";
version = "0.19.0";
pyproject = true;
src = fetchFromGitHub {
owner = "deepset-ai";
repo = "haystack-experimental";
tag = "v${version}";
hash = "sha256-G3rmlIApgG3CEpIG/9/wgTIMIrD35hZZMJCKdlwKpxA=";
};
# nativeBuildInputs = [
# hatchling
# ];
# pythonRemoveDeps = [
# # We call it faiss, not faiss-cpu.
# "faiss-cpu"
# ];
dontCheckRuntimeDeps = true;
propagatedBuildInputs = [
hatchling
hatch-vcs
# haystack-ai
# boilerpy3
# events
# httpx
# jsonschema
# lazy-imports
# more-itertools
# networkx
# pandas
# pillow
# platformdirs
# posthog
# prompthub-py
# pydantic
# quantulum3
# rank-bm25
# requests
# requests-cache
# scikit-learn
# sseclient-py
# tenacity
# tiktoken
# tqdm
# transformers
];
env.HOME = "$(mktemp -d)";
# optional-dependencies = {
# # all = [
# # farm-haystack
# # ];
# # all-gpu = [
# # farm-haystack
# # ];
# audio = [ openai-whisper ];
# aws = [
# boto3
# botocore
# ];
# # beir = [
# # beir
# # ];
# colab = [ pillow ];
# crawler = [ selenium ];
# dev = [
# coverage
# dulwich
# # jupytercontrib
# mkdocs
# mypy
# pre-commit
# psutil
# # pydoc-markdown
# pylint
# pytest
# pytest-asyncio
# pytest-cov
# # pytest-custom-exit-code
# python-multipart
# reno
# responses
# toml
# tox
# watchdog
# ];
# elasticsearch7 = [
# elastic-transport
# elasticsearch
# ];
# elasticsearch8 = [
# elastic-transport
# elasticsearch
# ];
# file-conversion = [
# # azure-ai-formrecognizer
# beautifulsoup4
# markdown
# python-docx
# python-frontmatter
# python-magic
# # python-magic-bin
# tika
# ];
# formatting = [ black ];
# inference = [
# huggingface-hub
# sentence-transformers
# transformers
# ];
# metrics = [
# mlflow
# rapidfuzz
# scipy
# seqeval
# ];
# ocr = [
# pdf2image
# pytesseract
# ];
# only-faiss = [ faiss ];
# # only-faiss-gpu = [
# # faiss-gpu
# # ];
# only-pinecone = [ pinecone-client ];
# onnx = [
# onnxruntime
# onnxruntime-tools
# ];
# # onnx-gpu = [
# # onnxruntime-gpu
# # onnxruntime-tools
# # ];
# opensearch = [ opensearch-py ];
# pdf = [ pymupdf ];
# preprocessing = [
# langdetect
# nltk
# ];
# preview = [
# canals
# jinja2
# lazy-imports
# openai
# pandas
# rank-bm25
# requests
# tenacity
# tqdm
# ];
# ray = [
# aiorwlock
# ray
# ];
# sql = [
# psycopg2
# sqlalchemy
# sqlalchemy-utils
# ];
# weaviate = [ weaviate-client ];
# };
# the setup for test is intensive, hopefully can be done at some point
doCheck = false;
# pythonImportsCheck = [ "haystack" ];
meta = {
description = "Experimental features for Haystack";
longDescription = ''
Experimental features for Haystack
'';
changelog = "https://github.com/deepset-ai/haystack-experimental/releases/tag/${src.tag}";
homepage = "https://github.com/deepset-ai/haystack-experimental";
license = lib.licenses.asl20;
maintainers = with lib.maintainers; [ happysalada ];
# https://github.com/deepset-ai/haystack/issues/5304
# broken = false;
};
}

View file

@ -0,0 +1,61 @@
{
lib,
buildPythonPackage,
fetchFromGitHub,
pythonOlder,
setuptools,
wheel,
#
llama-cpp-python,
pydantic,
requests,
docstring-parser,
aiohttp,
}:
buildPythonPackage rec {
pname = "llama-cpp-agent";
version = "0.2.35";
pyproject = true;
disabled = pythonOlder "3.9";
src = fetchFromGitHub {
owner = "Maximilian-Winter";
repo = "llama-cpp-agent";
rev = "${version}";
hash = "sha256-r4bJK18JbuXndoNh6xdUvUjiUiw4Opuj/IQ+Tal0viQ=";
};
nativeBuildInputs = [
# pythonRelaxDepsHook
setuptools
];
# pythonRelaxDeps = [
# "llama-cpp-python"
# ];
propagatedBuildInputs = [
llama-cpp-python
pydantic
requests
docstring-parser
aiohttp
];
pythonImportsCheck = [ "llama_cpp_agent" ];
meta = with lib; {
description = "A framework for easy interaction with Large Language Models (LLMs), supporting chat, structured function calls, and structured output";
longDescription = ''
The llama-cpp-agent framework is a tool designed for easy interaction with Large Language Models (LLMs).
Allowing users to chat with LLM models, execute structured function calls and get structured output.
Works also with models not fine-tuned to JSON output and function calls.
'';
homepage = "https://github.com/Maximilian-Winter/llama-cpp-agent";
license = licenses.mit;
broken = true;
maintainers = with maintainers; [ ];
};
}

View file

@ -0,0 +1,23 @@
{
pkgs,
lib,
pyPkgs,
fetchFromGitHub,
fetchPypi,
}:
let
prefab = pkgs.callPackage ../../lib/prefab.nix {
inherit
lib
fetchFromGitHub
fetchPypi
pyPkgs
;
};
in
prefab.mkPrefabsRec (
lib.customisation.callPackageWith pyPkgs ./prefab-specs.nix {
inherit lib;
}
)

View file

@ -0,0 +1,414 @@
{
lib,
hatchling,
poetry-core,
huggingface-hub,
tqdm,
aiohttp,
configparser,
google-api-core,
google-genai,
json-repair,
networkx,
numpy,
pandas,
pydantic,
pypinyin,
python-dotenv,
tenacity,
tiktoken,
xlsxwriter,
boto3,
click,
loguru,
pdfminer-six,
requests,
httpx,
pillow,
pypdfium2,
pypdf,
reportlab,
modelscope,
opencv-python,
scikit-image,
openai,
beautifulsoup4,
magika,
av,
pdm-backend,
fasttext-predict,
pydantic-settings,
aiofiles,
hatch-fancy-pypi-readme,
colorlog,
torch,
torchvision,
wcwidth,
matplotlib,
pyyaml,
scipy,
psutil,
py-cpuinfo,
seaborn,
albumentations,
transformers,
accelerate,
ultralytics,
dill,
ftfy,
shapely,
pyclipper,
omegaconf,
onnxruntime,
fastapi,
python-multipart,
uvicorn,
gradio,
gradio-pdf,
ultralytics-thop,
}:
self:
let
inherit (self) # locally defined packages
lightrag-hku
mineru
nano-vectordb
pipmaster
ascii-colors
pdftext
fast-langdetect
mineru-vl-utils
qwen-vl-utils
httpx-retries
robust-downloader
doclayout-yolo
decord
;
in
{
raganything = {
url = "https://github.com/HKUDS/RAG-Anything/tree/v1.2.9";
hash = "sha256-yepiLYzPD6UcJRbAbovg/BwFE8nh903o/tHypiHGKSw=";
propagatedBuildDeps = [
huggingface-hub
lightrag-hku
# mineru
(mineru.optionalDeps [ "core" ])
tqdm
];
pythonImportsCheck = [ "raganything" ];
meta = {
description = "RAGAnything: All-in-One RAG System";
license = lib.licenses.mit;
};
};
lightrag-hku = {
url = "https://github.com/HKUDS/LightRAG/tree/v1.4.9.11";
hash = "sha256-TX/HSF2ZqoDo2SRlFzM+bkUxQXiUpnrl6kwI+lljjdo=";
propagatedBuildDeps = [
aiohttp
configparser
google-api-core
google-genai
json-repair
nano-vectordb
networkx
numpy
pandas
pipmaster
pydantic
pypinyin
python-dotenv
tenacity
tiktoken
xlsxwriter
];
meta = {
description = "LightRAG: Simple and Fast Retrieval-Augmented Generation";
license = lib.licenses.mit;
};
};
pipmaster = {
url = "https://github.com/ParisNeo/pipmaster/tree/820acdbc9d541443942bd8afd6ba968036bd8452";
hash = "sha256-H1R/hXPyjn1r6Dal0QMEQeBR5xUlgacxFsuDUZVwV+s=";
force.version = "1.1.0";
propagatedBuildDeps = [
ascii-colors
];
meta = with lib; {
description = "A versatile Python package manager utility for simplifying package installation, updates, checks, and environment management.";
license = licenses.asl20;
};
};
nano-vectordb = {
url = "https://github.com/gusye1234/nano-vectordb/tree/03f821348f04a93a9c36bb575faae05b61f4c02e";
hash = "sha256-jLco+1TAncF8Ep+VGd3DhsoiZTW7G/H8fAEwnNsovTY=";
force.version = "0.0.4.3";
propagatedBuildDeps = [
numpy
];
meta = {
description = "A simple, easy-to-hack Vector Database implementation";
license = lib.licenses.mit;
};
};
ascii-colors = {
url = "https://github.com/ParisNeo/ascii_colors/tree/817a21485136635e3da89ff08444183254b25aab";
hash = "sha256-zK4vM2sOfEFDJBpFjcJmQXljg3pgprP+VFuuk5JNWnE=";
force.version = "0.11.12";
propagatedBuildDeps = [
wcwidth
];
meta = {
description = "A python library for displaying stuff on the console in a pretty way";
license = lib.licenses.asl20;
};
};
mineru = {
url = "https://github.com/opendatalab/MinerU/tree/mineru-2.7.6-released";
hash = "sha256-A/nFNQYGEfmWUdpX8N1lbex3mdiF8+eN8s0UKQFc42E=";
propagatedBuildDeps = [
boto3
click
loguru
numpy
pdfminer-six
tqdm
requests
httpx
pillow
pypdfium2
pypdf
reportlab
pdftext
modelscope
huggingface-hub
json-repair
opencv-python
fast-langdetect
scikit-image
openai
beautifulsoup4
magika
mineru-vl-utils
qwen-vl-utils
];
optional-dependencies =
let
_gradio = gradio;
in
rec {
vlm = [
torch
transformers
accelerate
];
# vllm = [
# vllm
# ];
# lmdeploy = [
# lmdeploy
# ];
# mlx = [
# mlx-vlm
# ];
pipeline = [
matplotlib
ultralytics
doclayout-yolo
dill
pyyaml
ftfy
shapely
pyclipper
omegaconf
torch
torchvision
transformers
onnxruntime
];
api = [
fastapi
python-multipart
uvicorn
];
gradio = [
_gradio
gradio-pdf
];
core = vlm ++ pipeline ++ api ++ gradio;
# all = core ++ mlx ++ vllm ++ lmdeploy;
};
pythonRelaxDeps = [
"fast-langdetect"
];
meta = {
description = "Transforms complex documents like PDFs into LLM-ready markdown/JSON for your Agentic workflows.";
license = lib.licenses.gpl3;
};
};
doclayout-yolo = {
url = "https://pypi.org/project/doclayout-yolo/0.0.4/";
hash = "sha256-gDEdEL7QPPiExb/MYkv+D9z/bnhLp2eZNHsCkNedy00=";
propagatedBuildDeps = [
matplotlib
opencv-python
pillow
pyyaml
requests
scipy
torch
torchvision
tqdm
psutil
py-cpuinfo
pandas
seaborn
albumentations
huggingface-hub
]
++ [
ultralytics-thop # thop
];
postInstall = ''
rm -f "$out/bin/yolo" # collision with $'{pkgs.python3Packages.ultralytics}/bin/.yolo-wrapped
'';
pythonRelaxDeps = [
"thop"
];
pythonRemoveDeps = [
"thop"
];
meta = {
description = "DocLayout-YOLO: Enhancing Document Layout Analysis through Diverse Synthetic Data and Global-to-Local Adaptive Perception";
license = lib.licenses.agpl3Only;
};
};
# thop = { url = "use ultralytics-thop instead"; };
qwen-vl-utils = {
url = "https://github.com/QwenLM/Qwen3-VL/tree/fe12058/qwen-vl-utils";
hash = "sha256-Vha/Tc4q2v5RCL31hB9U4ZrfIFxfZjgwM6PYlvDeoAQ=";
force = {
version = "0.0.14";
};
propagatedBuildDeps = [
hatchling
av
pillow
requests
torch
torchvision
];
pythonImportsCheck = [ "qwen_vl_utils" ];
optional-dependencies = {
decord = [
decord
];
};
meta = {
description = "Qwen-VL Utils contains a set of helper functions for processing and integrating visual language information with Qwen-VL Series Model.";
license = lib.licenses.asl20;
};
};
decord = {
url = "https://github.com/dmlc/decord/tree/v0.6.0/python";
meta = {
description = "An efficient video loader for deep learning with smart shuffling that's super easy to digest";
license = lib.licenses.asl20;
};
};
mineru-vl-utils = {
url = "https://github.com/opendatalab/mineru-vl-utils/tree/mineru_vl_utils-0.1.22-released";
hash = "sha256-hpTW/1nwXPxfld4nx0XHZBMerWj+UL1vzDhYpwjezRU=";
propagatedBuildDeps = [
httpx
httpx-retries
aiofiles
pillow
pydantic
loguru
];
optional-dependencies = {
transformers = [
torch
transformers
accelerate
torchvision
];
# vllm = [
# vllm
# ];
# mlx = [
# mlx-vlm
# ];
# lmdeploy = [
# lmdeploy
# qwen-vl-utils
# ];
};
meta = {
description = "A Python package for interacting with the MinerU Vision-Language Model.";
license = lib.licenses.gpl3;
};
};
httpx-retries = {
url = "https://github.com/will-ockmore/httpx-retries/tree/0.4.5";
hash = "sha256-zJ3ExSEWxlHFluSdYA8/XZ3zb4KBelU+IOFyUu4ezvo=";
propagatedBuildDeps = [
hatchling
hatch-fancy-pypi-readme
httpx
];
meta = {
description = "A retry layer for HTTPX.";
license = lib.licenses.mit;
};
};
fast-langdetect = {
url = "https://github.com/LlmKira/fast-langdetect/tree/pypi_1.0.0";
hash = "sha256-pj46gHG9cjkSjnYc88bSctL/1LAUe0jkBuM/GZWMsUI=";
propagatedBuildDeps = [
pdm-backend
robust-downloader
requests
fasttext-predict
];
meta = {
description = " 80x faster Fasttext language detection out of the box | Split text by language ";
license = lib.licenses.mit;
};
};
robust-downloader = {
url = "https://github.com/fedebotu/robust-downloader/tree/0.0.2";
hash = "sha256-UmzfEIPiMtUkOG6sIMYgLxc8YwL5wRgMBRlywqKomv0=";
propagatedBuildDeps = [
tqdm
colorlog
requests
];
meta = {
description = "Minimal Python downloader with robustness in mind - resumable downloads, retries, and more";
license = lib.licenses.asl20;
};
};
pdftext = {
url = "https://github.com/datalab-to/pdftext/tree/v0.6.3";
hash = "sha256-EGVjzjDWtdcEPX//cOm5+xm9FvX0aP+h6fsD25hC8gA=";
propagatedBuildDeps = [
poetry-core
click
pydantic
pydantic-settings
pypdfium2
];
pythonRelaxDeps = [
"pypdfium2"
];
meta = {
description = "Extract structured text from pdfs quickly";
license = lib.licenses.asl20;
};
};
}

View file

@ -0,0 +1,114 @@
{
lib,
stdenv,
fetchFromGitHub,
nodejs_20,
yarn,
fetchYarnDeps,
fixup-yarn-lock,
python3,
pkg-config,
# makeWrapper,
# openssl,
# sqlite,
vips,
vite,
}:
let
pname = "anything-llm";
version = "1.10.0";
src = fetchFromGitHub {
owner = "Mintplex-Labs";
repo = "anything-llm";
rev = "v${version}";
hash = "sha256-W7wpgEJxo+IfIVRAWJNTmJL4RezO4NdlQEpV6dJp5IA=";
fetchSubmodules = true;
};
offlineCacheFrontend = fetchYarnDeps {
yarnLock = "${src}/frontend/yarn.lock";
hash = "sha256-ebAV+1Ux4aL6hIodfaRVjEUFSWQplI7c7sybTEzucdw=";
};
offlineCacheServer = fetchYarnDeps {
yarnLock = "${src}/server/yarn.lock";
hash = "sha256-+agJhFItPGLBUGLhhdNATiHuId51sDWQbG/Z1FyIVWM=";
};
in
stdenv.mkDerivation {
inherit pname version src;
nativeBuildInputs = [
nodejs_20
yarn
python3
pkg-config
stdenv.cc # чтобы был компилятор/линкер
fixup-yarn-lock
vite
];
buildInputs = [
vips
nodejs_20.dev
];
# важно: чтобы sharp не пытался скачать vendor-libvips и не игнорировал системный
SHARP_FORCE_GLOBAL_LIBVIPS = "1"; # всегда пытаться использовать global libvips :contentReference[oaicite:0]{index=0}
npm_config_build_from_source = "sharp"; # просим sharp собираться из исходников :contentReference[oaicite:1]{index=1}
configurePhase = ''
export HOME="$(mktemp -d)"
pushd server
fixup-yarn-lock yarn.lock
yarn config --offline set yarn-offline-mirror "${offlineCacheServer}"
# 1) ставим deps без postinstall'ов (иначе sharp полезет в сеть)
yarn install --offline --frozen-lockfile --ignore-scripts --no-progress
# 2) теперь собираем sharp из исходников против system libvips
export PKG_CONFIG_PATH="${vips.dev}/lib/pkgconfig:${vips}/lib/pkgconfig:$PKG_CONFIG_PATH"
export LD_LIBRARY_PATH="${vips}/lib:$LD_LIBRARY_PATH"
export npm_config_nodedir="${nodejs_20.dev}"
# yarn v1: rebuild прогоняет lifecycle для native-модулей
${nodejs_20}/bin/npm rebuild sharp --build-from-source --nodedir="${nodejs_20.dev}"
popd
'';
postPatch = ''
# frontend: build without Qt/X11 postbuild
substituteInPlace frontend/package.json \
--replace '"build": "vite build && node scripts/postbuild.js"' '"build": "vite build"'
'';
buildPhase = ''
runHook preBuild
(export QT_QPA_PLATFORM=offscreen QTWEBENGINE_DISABLE_SANDBOX=1 QTWEBENGINE_CHROMIUM_FLAGS="--headless --disable-gpu --no-sandbox"
cd frontend && yarn build) # --offline
# server/collector build steps, если нужны, добавите отдельно
runHook postBuild
'';
installPhase = ''
runHook preInstall
mkdir -p $out/lib/${pname} $out/bin
cp -r frontend server collector $out/lib/${pname}/
makeWrapper ${nodejs_20}/bin/node $out/bin/anything-llm-server \
--chdir $out/lib/${pname}/server \
--set NODE_ENV production \
--set-default PORT 3001 \
--add-flags "index.js"
runHook postInstall
'';
meta = with lib; {
description = "AnythingLLM (server + frontend + collector)";
homepage = "https://github.com/Mintplex-Labs/anything-llm";
license = licenses.mit;
platforms = platforms.linux;
};
}

View file

@ -0,0 +1,82 @@
{
lib,
fetchurl,
appimageTools,
imagemagick,
}:
let
pname = "lmstudio";
version = "0.4.2-2";
url = "https://installers.lmstudio.ai/linux/x64/${version}/LM-Studio-${version}-x64.AppImage";
src = fetchurl {
inherit url;
hash = "sha256-JxGlqgsuLcW81mOIcntVFSHv19zSFouIChgz/egc+J0=";
};
contents = appimageTools.extractType2 { inherit pname version src; };
in
appimageTools.wrapType2 {
inherit pname version src;
nativeBuildInputs = [ imagemagick ];
extraInstallCommands = ''
set -euo pipefail
mkdir -p "$out/share/applications"
mkdir -p "$out/share/icons/hicolor/256x256/apps"
# ---- desktop: create fresh, never edit in place ----
desktopSrc="$(find "${contents}" -type f -name '*.desktop' | head -n1 || true)"
if [ -n "$desktopSrc" ]; then
tmp="$(mktemp)"
# Rewrite keys deterministically; keep everything else.
awk -v exec="${pname}" -v icon="${pname}" '
BEGIN{hasExec=0;hasTry=0;hasIcon=0}
/^Exec=/{print "Exec="exec; hasExec=1; next}
/^TryExec=/{print "TryExec="exec; hasTry=1; next}
/^Icon=/{print "Icon="icon; hasIcon=1; next}
{print}
END{
if(!hasExec) print "Exec="exec
if(!hasTry) print "TryExec="exec
if(!hasIcon) print "Icon="icon
}
' "$desktopSrc" > "$tmp"
rm -f "$out/share/applications/${pname}.desktop"
install -m444 "$tmp" "$out/share/applications/${pname}.desktop"
rm -f "$tmp"
fi
# ---- icon: pick one, convert to canonical name ----
icon=""
for cand in \
"${contents}/.DirIcon" \
"${contents}/AppIcon.png" \
"${contents}/usr/share/icons/hicolor/512x512/apps/"*.png \
"${contents}/usr/share/icons/hicolor/256x256/apps/"*.png \
; do
if [ -f "$cand" ]; then icon="$cand"; break; fi
done
if [ -z "$icon" ]; then
icon="$(find "${contents}" -type f -name '*.png' | head -n1 || true)"
fi
if [ -n "$icon" ]; then
magick "$icon" -resize 256x256 "$out/share/icons/hicolor/256x256/apps/${pname}.png"
fi
'';
meta = with lib; {
description = "LM Studio (AppImage)";
homepage = "https://lmstudio.ai/";
license = licenses.unfreeRedistributable;
platforms = [ "x86_64-linux" ];
mainProgram = pname;
};
}

View file

@ -0,0 +1,120 @@
{
description = "stable-diffusion.cpp as a NixOS programs.* module (system nixpkgs)";
outputs = { self, ... }: {
nixosModules.default = { config, lib, pkgs, ... }:
let
cfg = config.programs.stable-diffusion-cpp;
builtPackage =
pkgs.stdenv.mkDerivation {
pname = "stable-diffusion-cpp";
version = "git";
src = pkgs.fetchFromGitHub {
owner = "leejet";
repo = "stable-diffusion.cpp";
rev = "master-453-4ff2c8c"; # pin to a commit for reproducibility
hash = "sha256-0Hl3M6NQK1ZfIH4eIdy/XiPZTeBCnRCtode88NipPp4=";
fetchSubmodules = true;
};
nativeBuildInputs = with pkgs; [ cmake ninja pkg-config ];
buildInputs =
(lib.optionals cfg.vulkan.enable (with pkgs; [
vulkan-headers
vulkan-loader
shaderc
]));
cmakeFlags = [
"-DCMAKE_BUILD_TYPE=Release"
"-DCMAKE_POLICY_VERSION_MINIMUM=3.5"
"-DSD_VULKAN=${if cfg.vulkan.enable then "ON" else "OFF"}"
];
installPhase = ''
runHook preInstall
mkdir -p $out/bin
cp -v bin/sd $out/bin/ || cp -v sd $out/bin/
runHook postInstall
'';
};
selectedPackage = if cfg.package != null then cfg.package else builtPackage;
extraEnvArgs =
lib.concatStringsSep " \\\n "
(lib.mapAttrsToList (k: v:
"--set ${lib.escapeShellArg k} ${lib.escapeShellArg v}"
) cfg.extraEnv);
wrappedPackage =
pkgs.symlinkJoin {
name = "${selectedPackage.pname or "stable-diffusion-cpp"}";
paths = [ selectedPackage ];
nativeBuildInputs = [ pkgs.makeWrapper ];
postBuild = ''
if [ -x "$out/bin/sd" ]; then
wrapProgram "$out/bin/sd" \
${lib.optionalString (cfg.vulkan.enable && cfg.vulkan.icdFile != null)
"--set-default VK_ICD_FILENAMES ${lib.escapeShellArg (toString cfg.vulkan.icdFile)}"} \
${lib.optionalString (cfg.vulkan.enable && cfg.vulkan.prefixOpenGLXdgDataDirs)
"--prefix XDG_DATA_DIRS : /run/opengl-driver/share"} \
${extraEnvArgs}
fi
'';
};
finalPackage = if cfg.wrap then wrappedPackage else selectedPackage;
in {
options.programs.stable-diffusion-cpp = {
enable = lib.mkEnableOption "stable-diffusion.cpp";
package = lib.mkOption {
type = lib.types.nullOr lib.types.package;
default = null;
description = "Override the package used (otherwise build from upstream source).";
};
wrap = lib.mkOption {
type = lib.types.bool;
default = true;
description = "Wrap the sd binary to inject environment variables.";
};
extraEnv = lib.mkOption {
type = lib.types.attrsOf lib.types.str;
default = { };
description = "Extra environment variables added to the sd wrapper.";
};
vulkan = {
enable = lib.mkOption {
type = lib.types.bool;
default = true;
description = "Build with Vulkan support (toggles -DSD_VULKAN and Vulkan deps).";
};
icdFile = lib.mkOption {
type = lib.types.nullOr lib.types.path;
default = null;
example = /run/opengl-driver/share/vulkan/icd.d/intel_icd.x86_64.json;
description = "Optional VK_ICD_FILENAMES value to select a Vulkan ICD.";
};
prefixOpenGLXdgDataDirs = lib.mkOption {
type = lib.types.bool;
default = true;
description = "Prefix XDG_DATA_DIRS with /run/opengl-driver/share (useful for Vulkan layers/ICDs on NixOS).";
};
};
};
config = lib.mkIf cfg.enable {
environment.systemPackages = [ finalPackage ];
};
};
};
}

43
pkgs/default.nix Normal file
View file

@ -0,0 +1,43 @@
{
pkgs,
prev,
lib,
...
}:
let
dirs = path: lib.attrNames (lib.filterAttrs (_: t: t == "directory") (builtins.readDir path));
packagesFromTree =
base: callPackage:
lib.foldl' (
acc: shorthand:
let
shorthandDir = base + "/${shorthand}";
names = dirs shorthandDir;
addOne =
name:
let
pkgDir = shorthandDir + "/${name}";
in
if builtins.pathExists (pkgDir + "/default.nix") then
{ ${name} = callPackage pkgDir { }; }
else
{ };
in
acc // lib.foldl' (a: n: a // addOne n) { } names
) { } (dirs base);
byNamePkgs =
if builtins.pathExists ./by-name then packagesFromTree ./by-name pkgs.callPackage else { };
byCategoryPkgs = import ./by-category {
inherit
pkgs
prev
lib
packagesFromTree
;
};
in
byNamePkgs // byCategoryPkgs

View file

@ -0,0 +1,40 @@
# { lib }:
# url:
# let
# u0 = builtins.head (builtins.split "[?#]" url);
# u = lib.strings.removeSuffix "/" u0;
# m = builtins.match "https://github.com/([^/]+)/([^/]+)(/(tree|tag|commit)/([^/]+)(/(.*))?)?" u;
# in
# assert m != null;
# let
# owner = builtins.elemAt m 0;
# repo = builtins.elemAt m 1;
# kind = builtins.elemAt m 3;
# rev = builtins.elemAt m 4;
# subdir = builtins.elemAt m 6;
# in
# {
# inherit
# owner
# repo
# kind
# rev
# subdir
# ;
# version =
# if rev == null then
# null
# else
# let
# vm = builtins.match "^.*([0-9]+(\\.[0-9]+)+).*$" rev;
# in
# if vm == null then null else builtins.elemAt vm 0;
# pname = repo;
# homepage = "https://github.com/${owner}/${repo}";
# }

View file

@ -0,0 +1,75 @@
{ lib }:
url:
let
# strip ?query/#fragment and trailing /
u0 = builtins.head (builtins.split "[?#]" url);
u = lib.removeSuffix "/" u0;
# GitHub: https://github.com/owner/repo/(tree|tag|commit)/rev/(optional/subdir)
gh = builtins.match "https://github.com/([^/]+)/([^/]+)(/(tree|tag|commit)/([^/]+)(/(.*))?)?" u;
# PyPI project page: https://pypi.org/project/name/version
pypiProject = builtins.match "https://pypi.org/project/([^/]+)(/([^/]+))?" u;
# PyPI short form:
# pypi:requests@2.32.3
# pypi:requests==2.32.3
# pypi:requests/2.32.3
pypiShort = builtins.match "^pypi:([^@=/]+)(==|@|/)?([^/]+)?$" u;
in
if gh != null then
let
owner = builtins.elemAt gh 0;
repo = builtins.elemAt gh 1;
kind = builtins.elemAt gh 3; # null if absent
rev = builtins.elemAt gh 4; # null if absent
subdir = builtins.elemAt gh 6; # null or "a/b"
version =
if rev == null then
null
else
let
vm = builtins.match "^.*([0-9]+(\\.[0-9]+)+).*$" rev;
in
if vm == null then null else builtins.elemAt vm 0;
in
{
type = "github";
pname = repo;
homepage = "https://github.com/${owner}/${repo}";
inherit
owner
repo
kind
rev
subdir
version
;
}
else if pypiShort != null then
let
pname = builtins.elemAt pypiShort 0;
version = builtins.elemAt pypiShort 2; # may be null
in
{
type = "pypi";
homepage = "https://pypi.org/project/${pname}/";
inherit pname version;
subdir = null;
}
else if pypiProject != null then
let
pname = builtins.elemAt pypiProject 0;
version = builtins.elemAt pypiProject 2; # may be null
in
{
type = "pypi";
homepage = "https://pypi.org/project/${pname}/";
inherit pname version;
subdir = null;
}
else
throw "parse-source-ref: unsupported url: ${url}"

162
pkgs/lib/prefab.nix Normal file
View file

@ -0,0 +1,162 @@
{
lib,
fetchFromGitHub,
fetchPypi,
pyPkgs,
}:
let
inherit (pyPkgs)
buildPythonPackage
setuptools
wheel
python
;
parseRef = import ./parse-source-ref.nix { inherit lib; };
mkPrefab =
{
url,
hash ? "",
force ? { },
meta ? { },
passthru ? { },
propagatedBuildDeps ? [ ],
nativeBuildDeps ? [ ],
pythonRelaxDeps ? [ ],
pythonRemoveDeps ? [ ],
postInstall ? null,
pythonImportsCheck ? null,
optional-dependencies ? { },
}:
let
ref = parseRef url;
pname = force.pname or ref.pname;
version = force.version or ref.version or "unstable";
isWheel = (force.format or null) == "wheel";
rev = force.rev or ref.rev or null;
src =
if (force.src or null) != null then
force.src
else if ref.type == "github" then
(
assert rev != null;
fetchFromGitHub {
owner = ref.owner;
repo = ref.repo;
inherit rev hash;
}
)
else if ref.type == "pypi" then
(
assert (force.version or ref.version or null) != null;
fetchPypi (
lib.filterAttrs (_: v: v != null) {
pname = lib.replaceStrings [ "-" ] [ "_" ] pname;
inherit version hash;
format = force.format or null;
dist = force.dist or null;
python = force.python or null;
abi = force.abi or null;
platform = force.platform or null;
}
)
)
else
throw "mkPrefab: unsupported ref.type=${ref.type}";
defaultImports = [ (lib.toLower (lib.replaceStrings [ "-" ] [ "_" ] pname)) ];
imports = if pythonImportsCheck == null then defaultImports else pythonImportsCheck;
homepage = force.homepage or ref.homepage;
passthru' =
passthru
// lib.optionalAttrs (optional-dependencies != { }) {
optional-dependencies = optional-dependencies;
};
in
let
mkDrv =
extraDeps:
buildPythonPackage (
{
inherit pname version src;
dependencies = propagatedBuildDeps ++ extraDeps;
pythonRelaxDeps = pythonRelaxDeps;
pythonRemoveDeps = pythonRemoveDeps;
pythonImportsCheck = imports;
dontBuild = (force.format or null) == "wheel";
installCheckPhase = ''
runHook preInstallCheck
${builtins.concatStringsSep "\n" (
map (mod: ''
${python.interpreter} -c "import ${mod}; print(getattr(${mod},'__version__','unknown'))"
'') imports
)}
runHook postInstallCheck
'';
passthru = passthru';
meta = {
inherit homepage;
}
// meta;
sourceRoot = if ref.subdir != null then "source/${ref.subdir}" else null;
}
// lib.optionalAttrs (postInstall != null) { inherit postInstall; }
// lib.optionalAttrs isWheel {
format = "wheel";
nativeBuildInputs = nativeBuildDeps;
}
// lib.optionalAttrs (!isWheel) {
pyproject = true;
build-system = [
setuptools
wheel
]
++ nativeBuildDeps;
}
);
drv = mkDrv [ ];
in
if optional-dependencies == { } then
drv
else
let
extraPkgs =
extras:
lib.concatLists (
map (
x:
if builtins.isString x then
(optional-dependencies.${x} or (throw "${pname}.optionalDeps: unknown extra '${x}'"))
else if builtins.isList x then
x
else
[ x ]
) extras
);
in
drv
// {
optionalDeps = extras: mkDrv (extraPkgs extras);
};
mkPrefabs = specs: lib.mapAttrs (_: spec: mkPrefab spec) specs;
mkPrefabsRec = specsFn: lib.fix (self: mkPrefabs (specsFn self));
in
{
inherit mkPrefab mkPrefabs mkPrefabsRec;
}