nixpkgs-extension/pkgs/by-category/pythonPackages/ll/llama-cpp-agent/default.nix

61 lines
1.4 KiB
Nix

{
lib,
buildPythonPackage,
fetchFromGitHub,
pythonOlder,
setuptools,
wheel,
#
llama-cpp-python,
pydantic,
requests,
docstring-parser,
aiohttp,
}:
buildPythonPackage rec {
pname = "llama-cpp-agent";
version = "0.2.35";
pyproject = true;
disabled = pythonOlder "3.9";
src = fetchFromGitHub {
owner = "Maximilian-Winter";
repo = "llama-cpp-agent";
rev = "${version}";
hash = "sha256-r4bJK18JbuXndoNh6xdUvUjiUiw4Opuj/IQ+Tal0viQ=";
};
nativeBuildInputs = [
# pythonRelaxDepsHook
setuptools
];
# pythonRelaxDeps = [
# "llama-cpp-python"
# ];
propagatedBuildInputs = [
llama-cpp-python
pydantic
requests
docstring-parser
aiohttp
];
pythonImportsCheck = [ "llama_cpp_agent" ];
meta = with lib; {
description = "A framework for easy interaction with Large Language Models (LLMs), supporting chat, structured function calls, and structured output";
longDescription = ''
The llama-cpp-agent framework is a tool designed for easy interaction with Large Language Models (LLMs).
Allowing users to chat with LLM models, execute structured function calls and get structured output.
Works also with models not fine-tuned to JSON output and function calls.
'';
homepage = "https://github.com/Maximilian-Winter/llama-cpp-agent";
license = licenses.mit;
broken = true;
maintainers = with maintainers; [ ];
};
}