paperless-field-extractor/Cargo.toml
ju6ge 3cf2d7fe7c
Some checks failed
Rust / build (push) Has been cancelled
fix temperature sampling
using temperature requires randomized picking of final token, since the
probability of the most probable token will also be the most probable
after appling temperature, so greedy may not be used!
2025-12-13 15:01:45 +01:00

41 lines
1.2 KiB
TOML

[package]
name = "paperless-llm-workflows"
version = "0.3.2"
edition = "2024"
author = "ju6ge <judge@felixrichter.tech>"
license = "AGPL-3.0"
[package.metadata.release]
tag-name = "{{version}}"
publish = false
[dependencies]
tokio = { version = "1.47.1", features = ["full"] }
paperless-api-client = "6.0.1"
serde = { version = "1.0.219", features = ["derive"] }
serde_json = "1.0.143"
futures = "0.3.31"
log = "0.4.27"
colog = "1.3.0"
#gbnf = "0.2.5"
gbnf = { git = "https://github.com/ju6ge/gbnf", branch = "feature/const-array-support", default-features = false }
llama-cpp-2 = { git = "https://github.com/ju6ge/llama-cpp-rs", branch = "fix/allow-decoding-eos-tokens", default-features = false }
encoding_rs = "0.8.35"
schemars = { version = "1.0.4", features = ["chrono04", "derive"] }
chrono = "0.4.41"
thiserror = "2.0.17"
toml = "0.9.8"
clap = { version = "3.2.22", features = [ "derive" ] }
actix-web = "4.11.0"
regex = "1.12.2"
regex_static = "0.1.1"
once_cell = "1.21.3"
itertools = "0.14.0"
utoipa = { version = "5.4.0", features = ["actix_extras"] }
utoipa-swagger-ui = { version = "9.0.2", features = ["actix-web"] }
rand = "0.9.2"
[features]
vulkan = [ "llama-cpp-2/vulkan" ]
openmp = [ "llama-cpp-2/openmp" ]
cuda = [ "llama-cpp-2/cuda" ]