Event JSON
{
"id": "719ee20f6e34df54d1229a46dda6a033459e29eb20339038231c731fb83fa9ef",
"pubkey": "ece105128c1a6aaad34fe356c2c684286b1bc15d0da9b01303b2df02a88fe7cd",
"created_at": 1731501998,
"kind": 1,
"tags": [
[
"p",
"4ebb1885240ebc43fff7e4ff71a4f4a1b75f4e296809b61932f10de3e34c026b",
"wss://relay.mostr.pub"
],
[
"p",
"8b0be93ed69c30e9a68159fd384fd8308ce4bbf16c39e840e0803dcb6c08720e",
"wss://relay.mostr.pub"
],
[
"proxy",
"https://mastodon.online/users/theofrancis/statuses/113475715005562499",
"activitypub"
]
],
"content": "nostr:npub1f6a33pfyp67y8llhunlhrf855xm47n3fdqymvxfj7yx78c6vqf4scxpnql Is there a good way to incorporate binary quantization using llm? Referring to https://simonwillison.net/2024/Nov/11/binary-vector-embeddings/",
"sig": "f81f316655d94c2b4f124d47ddd257a651e15a29d699811c5a92530b79b84926591defbe59132911e2dccc704c368eb23d4cae6e26c71146b3034e3ce1cdbfc5"
}