Event JSON
{
"id": "6674d5ef1cff7d77e5fe8bf7df967cd11782901e50151626094bbe27d0d5410b",
"pubkey": "0d75d3e7943673b244fbdfeb4a4f0e76f1f9f5c05870cf295ca67efd1044ad3c",
"created_at": 1719932409,
"kind": 1,
"tags": [
[
"proxy",
"https://mastodon.opencloud.lu/@BrideOfLinux/112717490377618669",
"web"
],
[
"proxy",
"https://mastodon.opencloud.lu/users/BrideOfLinux/statuses/112717490377618669",
"activitypub"
],
[
"L",
"pink.momostr"
],
[
"l",
"pink.momostr.activitypub:https://mastodon.opencloud.lu/users/BrideOfLinux/statuses/112717490377618669",
"pink.momostr"
],
[
"expiration",
"1722524488"
]
],
"content": "About 18 months ago, the term “AI factory” would have referred to the infrastructure necessary to build and train the big frontier models – e.g., OpenAI’s ChatGPT-4o, etc. Today, however, “AI factory” better describes the product of inferencing – the predictions or conclusions produced by running data through a live, already-trained AI model: What is an AI Factory? https://www.nojitter.com/ai-automation/what-ai-factory",
"sig": "17c02f6945671d613864a2b1b7be4fc9d9082564bf735cffa58946d1a9a11d301f774a768da59c0008c5e442d012090713b00c140ad86d2609286bd998f6d062"
}