Event JSON
{
"id": "8c2229571245d2fe34d5c8f4df5aad1eeea45ec937bf47aaed8ef1826214424a",
"pubkey": "4315a187e024818492e61938093ba683dae66624d202cd43738de5b8ba198c0f",
"created_at": 1712672889,
"kind": 1,
"tags": [
[
"p",
"4315a187e024818492e61938093ba683dae66624d202cd43738de5b8ba198c0f"
],
[
"p",
"619c61d73c01588af2cd569316040f56c2e81f835b4d07122eb423f7126024dc"
],
[
"e",
"980e885943dca83627dd1b642c57da43d95a81cd8aaedbd7247b8eca0837e9e0",
"",
"reply"
],
[
"p",
"b8fcf3fa16a90df5527b31715505e245a962248ce8e86cdbf914151bcb9998fc"
],
[
"e",
"e4f2053cd891fc468b614c0a78407636e36a73f55911e77ba4c75bedb8624b46",
"",
"root"
],
[
"proxy",
"https://fedi.simonwillison.net/users/simon/statuses/112241730454954878",
"activitypub"
],
[
"L",
"pink.momostr"
],
[
"l",
"pink.momostr.activitypub:https://fedi.simonwillison.net/users/simon/statuses/112241730454954878",
"pink.momostr"
]
],
"content": "one of the many reasons I'm so interested in smaller models that can run directly on a laptop or phone - like Mistral 7B - is that the inference energy costs are then reduced to the cost of running one of those devices - and the one-time training cost gets shared between everyone using that model",
"sig": "f159ed751eb6c21f7c559ae262a8850d8f827b65e74b60662586bbc36004aaab1c411afdbd455e6e3ca911114d34ea87e2654a3b75f4cceae92ab31bf33a91f5"
}