Event JSON
{
"id": "4746e22a0c8ed01b4e7d3f5f09e86190b5dbd3183c426bfcbe7870e01153c502",
"pubkey": "efe5d120df0cc290fa748727fb45ac487caad346d4f2293ab069e8f01fc51981",
"created_at": 1750693001,
"kind": 1,
"tags": [
[
"r",
"wss://nos.lol/"
],
[
"r",
"wss://nostr.mom/"
],
[
"r",
"wss://nostr.oxtr.dev/"
],
[
"r",
"wss://nostr.wine/"
],
[
"r",
"wss://offchain.pub/"
],
[
"r",
"wss://premium.primal.net/"
],
[
"r",
"wss://relay.disobey.dev/",
"read"
],
[
"r",
"wss://relay.mostr.pub/"
],
[
"r",
"wss://nostr-pub.wellorder.net/"
]
],
"content": "ollama proxy with free mode -\u003e if you just need *some* llm backend to work i've made an ollama/openai proxy that pulls currently free models from openrouter and routes to them, if one fails the next one is hit. \n\nyou can use filters to say you only want mistral models out of the free models for example. It also supports paid models\n\nhttps://github.com/aljazceru/ollama-free-model-proxy",
"sig": "8a5db237fcc5a5bbacc5b1962b091b1a0da5a4c8cc0a33226de529555ea2bcaa5090ef2197a32e40ac16ad288597aee44390dfbeb2aace9ef3fd03dcf3e74506"
}