Event JSON
{
"id": "bf9161d90152bbd658be17fe88f9cdec0ce85614613d00ede0237a97a2b4706c",
"pubkey": "dc9fca0c311cbbf9886e13164c6475fa3d88b0645ad97502e7102af2dbcb0d09",
"created_at": 1736079637,
"kind": 1,
"tags": [
[
"p",
"fa34eb82d212f4f5774577b3a210dbab394b411cefab77d2c64a2bbf937b7d70",
"wss://nostr.sprovoost.nl"
],
[
"p",
"1f6673a47c24df3dbaad19754044f7e97930ca1df86aa65a46f3abc5aad11d81",
"wss://nostr.sprovoost.nl"
],
[
"e",
"43de8c3e3ee48f59ff02c8f13db8e88aef4ae0bd16943dbca86fbcfa5f81d8b4",
"wss://nostr.sprovoost.nl",
"reply"
],
[
"proxy",
"https://fosstodon.org/users/bert_hubert/statuses/113775715137457682",
"activitypub"
]
],
"content": "nostr:npub1lg6whqkjzt602a69w7e6yyxm4vu5ksgua74h05kxfg4mlymm04cq7wuxwl This is because out of the box, scrapy does not check robots.txt... https://scrapy-doc.readthedocs.io/en/latest/using.html",
"sig": "8eaab9534d8278930e324e3838c9d6f638b9a4efef6714181a2891a07762ebe3a37e3550a2222d8640f743d14c505ccb62964687b5012811888e97f5f5c0963c"
}