2022-11-17 14:03:24 -06:00
|
|
|
import datetime
|
|
|
|
from dataclasses import dataclass
|
2022-12-31 09:53:05 -06:00
|
|
|
from datetime import timezone
|
2022-11-18 13:20:58 -06:00
|
|
|
from typing import Any
|
2022-11-17 14:03:24 -06:00
|
|
|
from typing import Optional
|
|
|
|
|
|
|
|
from loguru import logger
|
|
|
|
|
|
|
|
from app import media
|
|
|
|
from app.models import InboxObject
|
|
|
|
from app.models import Webmention
|
2022-11-18 13:20:58 -06:00
|
|
|
from app.utils.datetime import parse_isoformat
|
2022-12-31 09:53:05 -06:00
|
|
|
from app.utils.url import must_make_abs
|
2022-11-17 14:03:24 -06:00
|
|
|
|
|
|
|
|
|
|
|
@dataclass
|
|
|
|
class Face:
|
|
|
|
ap_actor_id: str | None
|
|
|
|
url: str
|
|
|
|
name: str
|
|
|
|
picture_url: str
|
|
|
|
created_at: datetime.datetime
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def from_inbox_object(cls, like: InboxObject) -> "Face":
|
|
|
|
return cls(
|
|
|
|
ap_actor_id=like.actor.ap_id,
|
|
|
|
url=like.actor.url, # type: ignore
|
|
|
|
name=like.actor.handle, # type: ignore
|
|
|
|
picture_url=like.actor.resized_icon_url,
|
|
|
|
created_at=like.created_at, # type: ignore
|
|
|
|
)
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def from_webmention(cls, webmention: Webmention) -> Optional["Face"]:
|
|
|
|
items = webmention.source_microformats.get("items", []) # type: ignore
|
|
|
|
for item in items:
|
|
|
|
if item["type"][0] == "h-card":
|
|
|
|
try:
|
|
|
|
return cls(
|
|
|
|
ap_actor_id=None,
|
2022-11-18 13:20:58 -06:00
|
|
|
url=(
|
2022-12-31 09:53:05 -06:00
|
|
|
must_make_abs(
|
|
|
|
item["properties"]["url"][0], webmention.source
|
|
|
|
)
|
2022-11-19 01:12:33 -06:00
|
|
|
if item["properties"].get("url")
|
|
|
|
else webmention.source
|
2022-11-18 13:20:58 -06:00
|
|
|
),
|
2022-11-17 14:03:24 -06:00
|
|
|
name=item["properties"]["name"][0],
|
|
|
|
picture_url=media.resized_media_url(
|
2022-12-31 09:53:05 -06:00
|
|
|
must_make_abs(
|
2022-11-17 14:03:24 -06:00
|
|
|
item["properties"]["photo"][0], webmention.source
|
|
|
|
), # type: ignore
|
|
|
|
50,
|
|
|
|
),
|
|
|
|
created_at=webmention.created_at, # type: ignore
|
|
|
|
)
|
|
|
|
except Exception:
|
|
|
|
logger.exception(
|
|
|
|
f"Failed to build Face for webmention id={webmention.id}"
|
|
|
|
)
|
|
|
|
break
|
|
|
|
elif item["type"][0] == "h-entry":
|
|
|
|
author = item["properties"]["author"][0]
|
|
|
|
try:
|
|
|
|
return cls(
|
|
|
|
ap_actor_id=None,
|
|
|
|
url=webmention.source,
|
|
|
|
name=author["properties"]["name"][0],
|
|
|
|
picture_url=media.resized_media_url(
|
2022-12-31 09:53:05 -06:00
|
|
|
must_make_abs(
|
2022-11-17 14:03:24 -06:00
|
|
|
author["properties"]["photo"][0], webmention.source
|
|
|
|
), # type: ignore
|
|
|
|
50,
|
|
|
|
),
|
|
|
|
created_at=webmention.created_at, # type: ignore
|
|
|
|
)
|
|
|
|
except Exception:
|
|
|
|
logger.exception(
|
|
|
|
f"Failed to build Face for webmention id={webmention.id}"
|
|
|
|
)
|
|
|
|
break
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
def merge_faces(faces: list[Face]) -> list[Face]:
|
|
|
|
return sorted(
|
|
|
|
faces,
|
|
|
|
key=lambda f: f.created_at,
|
|
|
|
reverse=True,
|
|
|
|
)[:10]
|
2022-11-18 13:20:58 -06:00
|
|
|
|
|
|
|
|
|
|
|
def _parse_face(webmention: Webmention, items: list[dict[str, Any]]) -> Face | None:
|
|
|
|
for item in items:
|
|
|
|
if item["type"][0] == "h-card":
|
|
|
|
try:
|
|
|
|
return Face(
|
|
|
|
ap_actor_id=None,
|
|
|
|
url=(
|
2022-12-31 09:53:05 -06:00
|
|
|
must_make_abs(item["properties"]["url"][0], webmention.source)
|
2022-11-19 01:12:33 -06:00
|
|
|
if item["properties"].get("url")
|
|
|
|
else webmention.source
|
2022-11-18 13:20:58 -06:00
|
|
|
),
|
|
|
|
name=item["properties"]["name"][0],
|
|
|
|
picture_url=media.resized_media_url(
|
2022-12-31 09:53:05 -06:00
|
|
|
must_make_abs(
|
2022-11-18 13:20:58 -06:00
|
|
|
item["properties"]["photo"][0], webmention.source
|
|
|
|
), # type: ignore
|
|
|
|
50,
|
|
|
|
),
|
|
|
|
created_at=webmention.created_at, # type: ignore
|
|
|
|
)
|
|
|
|
except Exception:
|
|
|
|
logger.exception(
|
|
|
|
f"Failed to build Face for webmention id={webmention.id}"
|
|
|
|
)
|
|
|
|
break
|
|
|
|
|
2022-11-19 01:12:33 -06:00
|
|
|
return None
|
|
|
|
|
2022-11-18 13:20:58 -06:00
|
|
|
|
|
|
|
@dataclass
|
|
|
|
class WebmentionReply:
|
|
|
|
face: Face
|
|
|
|
content: str
|
|
|
|
url: str
|
|
|
|
published_at: datetime.datetime
|
2022-11-19 01:12:33 -06:00
|
|
|
in_reply_to: str
|
2022-11-20 04:56:58 -06:00
|
|
|
webmention_id: int
|
2022-11-18 13:20:58 -06:00
|
|
|
|
|
|
|
@classmethod
|
2022-11-19 01:12:33 -06:00
|
|
|
def from_webmention(cls, webmention: Webmention) -> Optional["WebmentionReply"]:
|
2022-11-18 13:20:58 -06:00
|
|
|
items = webmention.source_microformats.get("items", []) # type: ignore
|
|
|
|
for item in items:
|
|
|
|
if item["type"][0] == "h-entry":
|
|
|
|
try:
|
|
|
|
face = _parse_face(webmention, item["properties"].get("author", []))
|
|
|
|
if not face:
|
|
|
|
logger.info(
|
|
|
|
"Failed to build WebmentionReply/Face for "
|
|
|
|
f"webmention id={webmention.id}"
|
|
|
|
)
|
|
|
|
break
|
2022-12-31 09:53:05 -06:00
|
|
|
|
|
|
|
if "published" in item["properties"]:
|
|
|
|
published_at = (
|
|
|
|
parse_isoformat(item["properties"]["published"][0])
|
|
|
|
.astimezone(timezone.utc)
|
|
|
|
.replace(tzinfo=None)
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
published_at = webmention.created_at # type: ignore
|
|
|
|
|
2022-11-18 13:20:58 -06:00
|
|
|
return cls(
|
|
|
|
face=face,
|
|
|
|
content=item["properties"]["content"][0]["html"],
|
2022-12-31 09:53:05 -06:00
|
|
|
url=must_make_abs(
|
|
|
|
item["properties"]["url"][0], webmention.source
|
|
|
|
),
|
|
|
|
published_at=published_at,
|
2022-11-19 01:12:33 -06:00
|
|
|
in_reply_to=webmention.target, # type: ignore
|
2022-11-20 04:56:58 -06:00
|
|
|
webmention_id=webmention.id, # type: ignore
|
2022-11-18 13:20:58 -06:00
|
|
|
)
|
|
|
|
except Exception:
|
|
|
|
logger.exception(
|
|
|
|
f"Failed to build Face for webmention id={webmention.id}"
|
|
|
|
)
|
|
|
|
break
|
2022-11-19 01:12:33 -06:00
|
|
|
|
|
|
|
return None
|