Still not sure if this works at all, or how fast I'll be going over the free tier limit with this. But, the bart-large-cnn is pretty awful so I want to try something better.
21 lines
423 B
TOML
21 lines
423 B
TOML
name = "miniflux-ai"
|
|
main = "build/worker/shim.mjs"
|
|
compatibility_date = "2024-09-06"
|
|
|
|
[[kv_namespaces]]
|
|
binding = "entries"
|
|
id = "265898697f934ce483e6ede9bc16edd1"
|
|
|
|
[build]
|
|
command = "cargo install -q worker-build && worker-build --release"
|
|
|
|
[triggers]
|
|
crons = ["*/15 * * * *"]
|
|
|
|
[observability]
|
|
enabled = true
|
|
head_sampling_rate = 1 # optional. default = 1.
|
|
|
|
[vars]
|
|
CF_AI_MODEL = "@hf/meta-llama/meta-llama-3-8b-instruct"
|