Event JSON
{
"id": "36d2e42bc88235c3850c81fe89bc1a7e208675cff9b9848aa5b00a2f69b52efd",
"pubkey": "4aee237f443ac092d8fc51074f2e19bff395a08219997f77e4f7f79f0b70f088",
"created_at": 1683697753,
"kind": 1,
"tags": [
[
"p",
"62214198b6d9e72600f9a261eeab0a2085b671c99040e418df2013ab6d396c1e",
"wss://relay.mostr.pub"
],
[
"p",
"19132ebf4163a2399191bc90c23f33d1fe4945cbe9c807b0d196b172a2701961",
"wss://relay.mostr.pub"
],
[
"e",
"38c3c4f1a1396fee9ce318d8139cc653e4465e2a96de80b11f10e86c125f3fac",
"wss://relay.mostr.pub",
"reply"
],
[
"mostr",
"https://furry.engineer/users/zunelhak/statuses/110342815975160901"
]
],
"content": "nostr:npub1vgs5rx9km8njvq8e5fs7a2c2yzzmvuwfjpqwgxxlyqf6kmfeds0q7suja8 Man machine learning is full of this. Every time a new cutting edge model comes out, it means we're abandoning the model that came before, often times without really understanding how it works or what potential unexplored benefits it might have. Multiply that by the weirdly siloed nature of machine learning within different fields like linguistics vs. image recognition, they all use different families of models and understand little about the other fields' uses.",
"sig": "0d5b27469f1a9c21d2470864f1f869b9d144f6e446871522a36dceab0c2de1c0854495013e6d33f0259d916fecd310f65ad066213b407dd0618601088c1647c9"
}