Event JSON
{
"id": "13c83e9f7fecbbaadacb64a37121994f200e72aa54fd5b980cdbae38a84b33a6",
"pubkey": "f2e15ad6a570214535a3262db4ad0e9a08063401b57d80002255424929beaa27",
"created_at": 1739310131,
"kind": 1,
"tags": [
[
"p",
"4ebb1885240ebc43fff7e4ff71a4f4a1b75f4e296809b61932f10de3e34c026b",
"wss://relay.mostr.pub"
],
[
"p",
"8b0be93ed69c30e9a68159fd384fd8308ce4bbf16c39e840e0803dcb6c08720e",
"wss://relay.mostr.pub"
],
[
"proxy",
"https://social.coop/users/harris/statuses/113987428761131789",
"activitypub"
]
],
"content": "nostr:nprofile1qy2hwumn8ghj7un9d3shjtnddaehgu3wwp6kyqpqf6a33pfyp67y8llhunlhrf855xm47n3fdqymvxfj7yx78c6vqf4sy8ssyg what do you know about running local LLMs on mobile? What are the existing options out there? (I figured there would probably be something on your blog about this, but after searching around a bit, I couldn’t find much recent, so thought I’d ask directly!)",
"sig": "d1fe4a19b0a030b9dc50a03e2e402fd186da67b77739992cc3d1c13ba1781b334dc9e8e543ae83ff4a8583d8b79623201e48f14dfee9f38b43838602f7ae6d6c"
}