2018-05-18 13:41:41 -05:00
|
|
|
import binascii
|
|
|
|
import json
|
|
|
|
import logging
|
2018-06-16 15:02:10 -05:00
|
|
|
import mimetypes
|
|
|
|
import os
|
2018-07-17 17:20:32 -05:00
|
|
|
import traceback
|
2018-06-16 15:02:10 -05:00
|
|
|
import urllib
|
2018-05-18 13:41:41 -05:00
|
|
|
from datetime import datetime
|
2019-04-07 14:24:52 -05:00
|
|
|
from datetime import timedelta
|
2018-06-22 15:27:28 -05:00
|
|
|
from datetime import timezone
|
2018-06-16 15:02:10 -05:00
|
|
|
from functools import wraps
|
2018-07-06 16:15:49 -05:00
|
|
|
from io import BytesIO
|
2018-06-16 15:02:10 -05:00
|
|
|
from typing import Any
|
|
|
|
from typing import Dict
|
2018-07-06 16:15:49 -05:00
|
|
|
from typing import Optional
|
2018-07-04 18:02:51 -05:00
|
|
|
from typing import Tuple
|
2018-06-16 15:02:10 -05:00
|
|
|
from urllib.parse import urlencode
|
|
|
|
from urllib.parse import urlparse
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
import bleach
|
2019-05-11 14:18:56 -05:00
|
|
|
import emoji_unicode
|
2018-05-18 13:41:41 -05:00
|
|
|
import mf2py
|
2019-04-22 02:58:11 -05:00
|
|
|
import requests
|
2018-06-16 15:02:10 -05:00
|
|
|
import timeago
|
2018-05-18 13:41:41 -05:00
|
|
|
from bson.objectid import ObjectId
|
2018-06-22 15:27:28 -05:00
|
|
|
from dateutil import parser
|
2018-05-18 13:41:41 -05:00
|
|
|
from flask import Flask
|
2018-06-16 15:02:10 -05:00
|
|
|
from flask import Response
|
2018-05-18 13:41:41 -05:00
|
|
|
from flask import abort
|
2018-06-16 15:02:10 -05:00
|
|
|
from flask import jsonify as flask_jsonify
|
2019-04-22 02:58:11 -05:00
|
|
|
from flask import make_response
|
2018-05-18 13:41:41 -05:00
|
|
|
from flask import redirect
|
|
|
|
from flask import render_template
|
2018-06-16 15:02:10 -05:00
|
|
|
from flask import request
|
2018-05-18 13:41:41 -05:00
|
|
|
from flask import session
|
|
|
|
from flask import url_for
|
2018-06-16 15:02:10 -05:00
|
|
|
from flask_wtf.csrf import CSRFProtect
|
2018-05-18 13:41:41 -05:00
|
|
|
from html2text import html2text
|
|
|
|
from itsdangerous import BadSignature
|
2018-07-11 16:22:47 -05:00
|
|
|
from little_boxes import activitypub as ap
|
|
|
|
from little_boxes.activitypub import ActivityType
|
|
|
|
from little_boxes.activitypub import _to_list
|
|
|
|
from little_boxes.activitypub import clean_activity
|
2019-04-14 12:17:54 -05:00
|
|
|
from little_boxes.activitypub import format_datetime
|
2018-07-11 16:22:47 -05:00
|
|
|
from little_boxes.activitypub import get_backend
|
|
|
|
from little_boxes.content_helper import parse_markdown
|
|
|
|
from little_boxes.errors import ActivityGoneError
|
|
|
|
from little_boxes.errors import ActivityNotFoundError
|
2019-04-22 02:58:11 -05:00
|
|
|
from little_boxes.errors import BadActivityError
|
2018-07-11 16:22:47 -05:00
|
|
|
from little_boxes.errors import Error
|
2019-04-22 02:58:11 -05:00
|
|
|
from little_boxes.errors import NotAnActivityError
|
2018-07-11 16:22:47 -05:00
|
|
|
from little_boxes.errors import NotFromOutboxError
|
|
|
|
from little_boxes.httpsig import HTTPSigAuth
|
|
|
|
from little_boxes.httpsig import verify_request
|
|
|
|
from little_boxes.webfinger import get_actor_url
|
|
|
|
from little_boxes.webfinger import get_remote_follow_template
|
2018-05-18 13:41:41 -05:00
|
|
|
from passlib.hash import bcrypt
|
2019-04-22 02:58:11 -05:00
|
|
|
from requests.exceptions import HTTPError
|
2018-05-18 13:41:41 -05:00
|
|
|
from u2flib_server import u2f
|
|
|
|
from werkzeug.utils import secure_filename
|
|
|
|
|
|
|
|
import activitypub
|
|
|
|
import config
|
2018-06-29 15:16:26 -05:00
|
|
|
from activitypub import Box
|
2019-04-14 12:17:54 -05:00
|
|
|
from activitypub import _answer_key
|
2019-04-22 02:58:11 -05:00
|
|
|
from activitypub import embed_collection
|
2018-06-16 15:02:10 -05:00
|
|
|
from config import ADMIN_API_KEY
|
|
|
|
from config import BASE_URL
|
2018-05-18 13:41:41 -05:00
|
|
|
from config import DB
|
2018-06-16 15:02:10 -05:00
|
|
|
from config import DEBUG_MODE
|
2018-05-18 13:41:41 -05:00
|
|
|
from config import DOMAIN
|
2019-05-12 06:05:27 -05:00
|
|
|
from config import EMOJI_TPL
|
2019-04-10 15:50:36 -05:00
|
|
|
from config import EMOJIS
|
2018-06-16 15:02:10 -05:00
|
|
|
from config import HEADERS
|
2018-07-04 14:08:45 -05:00
|
|
|
from config import ICON_URL
|
2018-06-16 15:02:10 -05:00
|
|
|
from config import ID
|
|
|
|
from config import JWT
|
|
|
|
from config import KEY
|
|
|
|
from config import ME
|
2018-07-06 16:15:49 -05:00
|
|
|
from config import MEDIA_CACHE
|
2018-05-18 13:41:41 -05:00
|
|
|
from config import PASS
|
2019-04-22 02:58:11 -05:00
|
|
|
from config import USER_AGENT
|
2018-06-16 15:02:10 -05:00
|
|
|
from config import USERNAME
|
2018-05-21 10:04:53 -05:00
|
|
|
from config import VERSION
|
2018-05-27 04:50:09 -05:00
|
|
|
from config import _drop_db
|
2019-04-22 02:58:11 -05:00
|
|
|
from poussetaches import PousseTaches
|
|
|
|
from utils import opengraph
|
2018-06-16 15:02:10 -05:00
|
|
|
from utils.key import get_secret_key
|
2018-07-20 17:04:15 -05:00
|
|
|
from utils.lookup import lookup
|
2018-07-20 18:05:51 -05:00
|
|
|
from utils.media import Kind
|
2018-06-17 13:51:23 -05:00
|
|
|
|
2019-04-12 14:10:10 -05:00
|
|
|
p = PousseTaches(
|
|
|
|
os.getenv("MICROBLOGPUB_POUSSETACHES_HOST", "http://localhost:7991"),
|
|
|
|
os.getenv("MICROBLOGPUB_INTERNAL_HOST", "http://localhost:5000"),
|
|
|
|
)
|
|
|
|
|
2019-04-10 15:50:36 -05:00
|
|
|
# p = PousseTaches("http://localhost:7991", "http://localhost:5000")
|
2019-04-08 10:24:50 -05:00
|
|
|
|
2018-06-17 13:51:23 -05:00
|
|
|
back = activitypub.MicroblogPubBackend()
|
|
|
|
ap.use_backend(back)
|
|
|
|
|
|
|
|
MY_PERSON = ap.Person(**ME)
|
2018-06-16 14:24:53 -05:00
|
|
|
|
2018-05-18 13:41:41 -05:00
|
|
|
app = Flask(__name__)
|
2018-06-16 15:02:10 -05:00
|
|
|
app.secret_key = get_secret_key("flask")
|
|
|
|
app.config.update(WTF_CSRF_CHECK_DEFAULT=False)
|
2018-06-01 14:54:43 -05:00
|
|
|
csrf = CSRFProtect(app)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
2018-05-28 12:46:23 -05:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2018-05-22 17:41:37 -05:00
|
|
|
# Hook up Flask logging with gunicorn
|
2018-06-03 05:51:57 -05:00
|
|
|
root_logger = logging.getLogger()
|
2018-06-16 15:02:10 -05:00
|
|
|
if os.getenv("FLASK_DEBUG"):
|
2018-06-03 15:44:19 -05:00
|
|
|
logger.setLevel(logging.DEBUG)
|
|
|
|
root_logger.setLevel(logging.DEBUG)
|
|
|
|
else:
|
2018-06-16 15:02:10 -05:00
|
|
|
gunicorn_logger = logging.getLogger("gunicorn.error")
|
2018-06-03 15:44:19 -05:00
|
|
|
root_logger.handlers = gunicorn_logger.handlers
|
|
|
|
root_logger.setLevel(gunicorn_logger.level)
|
2018-05-22 17:41:37 -05:00
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
SIG_AUTH = HTTPSigAuth(KEY)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
|
|
|
|
def verify_pass(pwd):
|
2018-06-16 15:02:10 -05:00
|
|
|
return bcrypt.verify(pwd, PASS)
|
|
|
|
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
@app.context_processor
|
|
|
|
def inject_config():
|
2018-06-22 15:27:28 -05:00
|
|
|
q = {
|
|
|
|
"type": "Create",
|
|
|
|
"activity.object.inReplyTo": None,
|
|
|
|
"meta.deleted": False,
|
2019-04-14 12:17:54 -05:00
|
|
|
"meta.public": True,
|
2018-06-22 15:27:28 -05:00
|
|
|
}
|
2018-06-29 15:16:26 -05:00
|
|
|
notes_count = DB.activities.find(
|
|
|
|
{"box": Box.OUTBOX.value, "$or": [q, {"type": "Announce", "meta.undo": False}]}
|
2018-06-22 15:27:28 -05:00
|
|
|
).count()
|
2018-06-29 15:16:26 -05:00
|
|
|
with_replies_count = DB.activities.find(
|
2019-04-14 12:17:54 -05:00
|
|
|
{
|
|
|
|
"box": Box.OUTBOX.value,
|
|
|
|
"type": {"$in": [ActivityType.CREATE.value, ActivityType.ANNOUNCE.value]},
|
|
|
|
"meta.undo": False,
|
|
|
|
"meta.deleted": False,
|
|
|
|
"meta.public": True,
|
|
|
|
}
|
2018-06-22 15:27:28 -05:00
|
|
|
).count()
|
2018-06-29 15:16:26 -05:00
|
|
|
liked_count = DB.activities.count(
|
|
|
|
{
|
|
|
|
"box": Box.OUTBOX.value,
|
|
|
|
"meta.deleted": False,
|
|
|
|
"meta.undo": False,
|
|
|
|
"type": ActivityType.LIKE.value,
|
|
|
|
}
|
|
|
|
)
|
2018-07-07 06:56:00 -05:00
|
|
|
followers_q = {
|
|
|
|
"box": Box.INBOX.value,
|
|
|
|
"type": ActivityType.FOLLOW.value,
|
|
|
|
"meta.undo": False,
|
|
|
|
}
|
|
|
|
following_q = {
|
|
|
|
"box": Box.OUTBOX.value,
|
|
|
|
"type": ActivityType.FOLLOW.value,
|
|
|
|
"meta.undo": False,
|
|
|
|
}
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
return dict(
|
|
|
|
microblogpub_version=VERSION,
|
|
|
|
config=config,
|
|
|
|
logged_in=session.get("logged_in", False),
|
2018-07-07 06:56:00 -05:00
|
|
|
followers_count=DB.activities.count(followers_q),
|
|
|
|
following_count=DB.activities.count(following_q),
|
2018-06-22 15:27:28 -05:00
|
|
|
notes_count=notes_count,
|
2018-06-25 16:45:43 -05:00
|
|
|
liked_count=liked_count,
|
2018-06-22 15:27:28 -05:00
|
|
|
with_replies_count=with_replies_count,
|
2018-06-29 15:16:26 -05:00
|
|
|
me=ME,
|
2019-04-14 12:17:54 -05:00
|
|
|
base_url=config.BASE_URL,
|
2018-06-16 15:02:10 -05:00
|
|
|
)
|
|
|
|
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
@app.after_request
|
|
|
|
def set_x_powered_by(response):
|
2018-06-16 15:02:10 -05:00
|
|
|
response.headers["X-Powered-By"] = "microblog.pub"
|
2018-05-18 13:41:41 -05:00
|
|
|
return response
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
|
2018-05-18 13:41:41 -05:00
|
|
|
# HTML/templates helper
|
|
|
|
ALLOWED_TAGS = [
|
2018-06-16 15:02:10 -05:00
|
|
|
"a",
|
|
|
|
"abbr",
|
|
|
|
"acronym",
|
|
|
|
"b",
|
2018-06-29 15:42:53 -05:00
|
|
|
"br",
|
2018-06-16 15:02:10 -05:00
|
|
|
"blockquote",
|
|
|
|
"code",
|
|
|
|
"pre",
|
|
|
|
"em",
|
|
|
|
"i",
|
|
|
|
"li",
|
|
|
|
"ol",
|
|
|
|
"strong",
|
|
|
|
"ul",
|
|
|
|
"span",
|
|
|
|
"div",
|
|
|
|
"p",
|
|
|
|
"h1",
|
|
|
|
"h2",
|
|
|
|
"h3",
|
|
|
|
"h4",
|
|
|
|
"h5",
|
|
|
|
"h6",
|
2018-05-18 13:41:41 -05:00
|
|
|
]
|
|
|
|
|
|
|
|
|
|
|
|
def clean_html(html):
|
2019-02-24 14:04:09 -06:00
|
|
|
try:
|
|
|
|
return bleach.clean(html, tags=ALLOWED_TAGS)
|
2019-04-05 04:35:48 -05:00
|
|
|
except Exception:
|
2019-02-24 14:04:09 -06:00
|
|
|
return ""
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
|
2018-07-05 15:27:29 -05:00
|
|
|
_GRIDFS_CACHE: Dict[Tuple[Kind, str, Optional[int]], str] = {}
|
2018-07-04 18:02:51 -05:00
|
|
|
|
|
|
|
|
2018-07-05 15:27:29 -05:00
|
|
|
def _get_file_url(url, size, kind):
|
|
|
|
k = (kind, url, size)
|
2018-07-04 18:02:51 -05:00
|
|
|
cached = _GRIDFS_CACHE.get(k)
|
|
|
|
if cached:
|
|
|
|
return cached
|
|
|
|
|
2018-07-06 16:15:49 -05:00
|
|
|
doc = MEDIA_CACHE.get_file(url, size, kind)
|
2018-07-04 18:02:51 -05:00
|
|
|
if doc:
|
2018-07-06 16:15:49 -05:00
|
|
|
u = f"/media/{str(doc._id)}"
|
2018-07-04 18:02:51 -05:00
|
|
|
_GRIDFS_CACHE[k] = u
|
|
|
|
return u
|
|
|
|
|
2018-07-24 16:58:13 -05:00
|
|
|
# MEDIA_CACHE.cache(url, kind)
|
2018-07-30 02:41:04 -05:00
|
|
|
app.logger.error(f"cache not available for {url}/{size}/{kind}")
|
2018-07-24 16:58:13 -05:00
|
|
|
return url
|
2018-07-04 18:02:51 -05:00
|
|
|
|
|
|
|
|
2019-05-11 14:18:56 -05:00
|
|
|
@app.template_filter()
|
|
|
|
def emojify(text):
|
|
|
|
return emoji_unicode.replace(
|
2019-05-12 06:05:27 -05:00
|
|
|
text, lambda e: EMOJI_TPL.format(filename=e.code_points, raw=e.unicode)
|
2019-05-11 14:18:56 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2019-04-14 12:17:54 -05:00
|
|
|
@app.template_filter()
|
|
|
|
def gtone(n):
|
|
|
|
return n > 1
|
|
|
|
|
|
|
|
|
|
|
|
@app.template_filter()
|
|
|
|
def gtnow(dtstr):
|
|
|
|
return format_datetime(datetime.now().astimezone()) > dtstr
|
|
|
|
|
|
|
|
|
2018-07-23 15:11:03 -05:00
|
|
|
@app.template_filter()
|
|
|
|
def remove_mongo_id(dat):
|
2018-07-26 15:58:28 -05:00
|
|
|
if isinstance(dat, list):
|
|
|
|
return [remove_mongo_id(item) for item in dat]
|
2018-07-23 15:11:03 -05:00
|
|
|
if "_id" in dat:
|
2018-07-23 15:25:51 -05:00
|
|
|
dat["_id"] = str(dat["_id"])
|
2018-07-26 15:58:28 -05:00
|
|
|
for k, v in dat.items():
|
|
|
|
if isinstance(v, dict):
|
|
|
|
dat[k] = remove_mongo_id(dat[k])
|
2018-07-23 15:11:03 -05:00
|
|
|
return dat
|
|
|
|
|
|
|
|
|
2018-09-02 12:43:09 -05:00
|
|
|
@app.template_filter()
|
|
|
|
def get_video_link(data):
|
|
|
|
for link in data:
|
|
|
|
if link.get("mimeType", "").startswith("video/"):
|
|
|
|
return link.get("href")
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2018-07-04 18:02:51 -05:00
|
|
|
@app.template_filter()
|
|
|
|
def get_actor_icon_url(url, size):
|
2018-07-05 15:27:29 -05:00
|
|
|
return _get_file_url(url, size, Kind.ACTOR_ICON)
|
|
|
|
|
|
|
|
|
|
|
|
@app.template_filter()
|
|
|
|
def get_attachment_url(url, size):
|
|
|
|
return _get_file_url(url, size, Kind.ATTACHMENT)
|
2018-07-04 18:02:51 -05:00
|
|
|
|
|
|
|
|
2018-07-21 16:16:40 -05:00
|
|
|
@app.template_filter()
|
|
|
|
def get_og_image_url(url, size=100):
|
2018-07-23 01:30:51 -05:00
|
|
|
try:
|
|
|
|
return _get_file_url(url, size, Kind.OG_IMAGE)
|
|
|
|
except Exception:
|
2018-07-23 15:11:03 -05:00
|
|
|
return ""
|
2018-07-21 16:16:40 -05:00
|
|
|
|
|
|
|
|
2018-06-29 15:16:26 -05:00
|
|
|
@app.template_filter()
|
|
|
|
def permalink_id(val):
|
|
|
|
return str(hash(val))
|
|
|
|
|
|
|
|
|
2018-06-16 14:24:53 -05:00
|
|
|
@app.template_filter()
|
|
|
|
def quote_plus(t):
|
|
|
|
return urllib.parse.quote_plus(t)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
|
2018-06-16 14:24:53 -05:00
|
|
|
@app.template_filter()
|
|
|
|
def is_from_outbox(t):
|
2018-06-04 11:53:44 -05:00
|
|
|
return t.startswith(ID)
|
|
|
|
|
|
|
|
|
2018-06-16 14:24:53 -05:00
|
|
|
@app.template_filter()
|
|
|
|
def clean(html):
|
2019-05-11 14:18:56 -05:00
|
|
|
out = clean_html(html)
|
|
|
|
return emoji_unicode.replace(
|
2019-05-12 06:05:27 -05:00
|
|
|
out, lambda e: EMOJI_TPL.format(filename=e.code_points, raw=e.unicode)
|
2019-05-11 14:18:56 -05:00
|
|
|
)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
|
2018-06-16 14:24:53 -05:00
|
|
|
@app.template_filter()
|
|
|
|
def html2plaintext(body):
|
2018-05-18 13:41:41 -05:00
|
|
|
return html2text(body)
|
|
|
|
|
|
|
|
|
|
|
|
@app.template_filter()
|
|
|
|
def domain(url):
|
|
|
|
return urlparse(url).netloc
|
|
|
|
|
|
|
|
|
2018-07-22 05:42:36 -05:00
|
|
|
@app.template_filter()
|
|
|
|
def url_or_id(d):
|
2019-02-24 14:04:09 -06:00
|
|
|
if isinstance(d, dict):
|
2019-04-21 14:25:36 -05:00
|
|
|
if "url" in d:
|
2019-02-24 14:04:09 -06:00
|
|
|
return d["url"]
|
|
|
|
else:
|
|
|
|
return d["id"]
|
|
|
|
return ""
|
2018-07-22 05:42:36 -05:00
|
|
|
|
|
|
|
|
2018-07-03 16:29:55 -05:00
|
|
|
@app.template_filter()
|
|
|
|
def get_url(u):
|
2018-09-03 01:20:43 -05:00
|
|
|
print(f"GET_URL({u!r})")
|
2018-09-02 12:43:09 -05:00
|
|
|
if isinstance(u, list):
|
|
|
|
for l in u:
|
2018-09-03 01:20:43 -05:00
|
|
|
if l.get("mimeType") == "text/html":
|
2018-09-02 12:43:09 -05:00
|
|
|
u = l
|
2018-07-03 16:29:55 -05:00
|
|
|
if isinstance(u, dict):
|
|
|
|
return u["href"]
|
2018-07-15 14:12:57 -05:00
|
|
|
elif isinstance(u, str):
|
2018-07-03 16:29:55 -05:00
|
|
|
return u
|
2018-07-15 14:12:57 -05:00
|
|
|
else:
|
2018-07-15 14:38:15 -05:00
|
|
|
return u
|
2018-07-03 16:29:55 -05:00
|
|
|
|
|
|
|
|
2018-05-18 13:41:41 -05:00
|
|
|
@app.template_filter()
|
|
|
|
def get_actor(url):
|
|
|
|
if not url:
|
|
|
|
return None
|
2018-09-02 12:43:09 -05:00
|
|
|
if isinstance(url, list):
|
|
|
|
url = url[0]
|
|
|
|
if isinstance(url, dict):
|
|
|
|
url = url.get("id")
|
2018-06-16 15:02:10 -05:00
|
|
|
print(f"GET_ACTOR {url}")
|
2018-07-10 16:02:06 -05:00
|
|
|
try:
|
2018-07-11 12:38:24 -05:00
|
|
|
return get_backend().fetch_iri(url)
|
2018-07-10 16:02:06 -05:00
|
|
|
except (ActivityNotFoundError, ActivityGoneError):
|
|
|
|
return f"Deleted<{url}>"
|
2018-09-02 12:43:09 -05:00
|
|
|
except Exception as exc:
|
|
|
|
return f"Error<{url}/{exc!r}>"
|
2018-06-16 15:02:10 -05:00
|
|
|
|
2018-07-10 16:04:05 -05:00
|
|
|
|
2018-05-18 13:41:41 -05:00
|
|
|
@app.template_filter()
|
|
|
|
def format_time(val):
|
|
|
|
if val:
|
2018-06-22 15:27:28 -05:00
|
|
|
dt = parser.parse(val)
|
|
|
|
return datetime.strftime(dt, "%B %d, %Y, %H:%M %p")
|
2018-05-18 13:41:41 -05:00
|
|
|
return val
|
|
|
|
|
|
|
|
|
2019-05-02 14:53:17 -05:00
|
|
|
@app.template_filter()
|
|
|
|
def format_ts(val):
|
|
|
|
return datetime.fromtimestamp(val).strftime("%B %d, %Y, %H:%M %p")
|
|
|
|
|
|
|
|
|
|
|
|
@app.template_filter()
|
|
|
|
def gt_ts(val):
|
|
|
|
return datetime.now() > datetime.fromtimestamp(val)
|
|
|
|
|
|
|
|
|
2018-05-18 13:41:41 -05:00
|
|
|
@app.template_filter()
|
|
|
|
def format_timeago(val):
|
|
|
|
if val:
|
2018-06-22 15:27:28 -05:00
|
|
|
dt = parser.parse(val)
|
2019-06-29 04:33:29 -05:00
|
|
|
return timeago.format(dt.astimezone(timezone.utc), datetime.now(timezone.utc))
|
2018-05-18 13:41:41 -05:00
|
|
|
return val
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
|
2018-06-29 15:16:26 -05:00
|
|
|
@app.template_filter()
|
2018-09-02 12:43:09 -05:00
|
|
|
def has_type(doc, _types):
|
|
|
|
for _type in _to_list(_types):
|
|
|
|
if _type in _to_list(doc["type"]):
|
|
|
|
return True
|
2018-06-29 15:16:26 -05:00
|
|
|
return False
|
|
|
|
|
|
|
|
|
2018-07-29 13:24:46 -05:00
|
|
|
@app.template_filter()
|
|
|
|
def has_actor_type(doc):
|
2019-06-29 04:33:29 -05:00
|
|
|
# FIXME(tsileo): skipping the last one "Question", cause Mastodon sends question restuls as an update coming from
|
|
|
|
# the question... Does Pleroma do that too?
|
|
|
|
for t in ap.ACTOR_TYPES[:-1]:
|
2018-07-29 13:24:46 -05:00
|
|
|
if has_type(doc, t.value):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2018-05-18 13:41:41 -05:00
|
|
|
def _is_img(filename):
|
|
|
|
filename = filename.lower()
|
2018-06-16 15:02:10 -05:00
|
|
|
if (
|
|
|
|
filename.endswith(".png")
|
|
|
|
or filename.endswith(".jpg")
|
|
|
|
or filename.endswith(".jpeg")
|
|
|
|
or filename.endswith(".gif")
|
|
|
|
or filename.endswith(".svg")
|
|
|
|
):
|
2018-05-18 13:41:41 -05:00
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
|
2018-05-18 13:41:41 -05:00
|
|
|
@app.template_filter()
|
|
|
|
def not_only_imgs(attachment):
|
|
|
|
for a in attachment:
|
2019-02-24 14:04:09 -06:00
|
|
|
if isinstance(a, dict) and not _is_img(a["url"]):
|
|
|
|
return True
|
|
|
|
if isinstance(a, str) and not _is_img(a):
|
2018-05-18 13:41:41 -05:00
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
|
2018-05-18 13:41:41 -05:00
|
|
|
@app.template_filter()
|
|
|
|
def is_img(filename):
|
|
|
|
return _is_img(filename)
|
|
|
|
|
|
|
|
|
2019-04-14 13:16:04 -05:00
|
|
|
@app.template_filter()
|
|
|
|
def get_answer_count(choice, meta):
|
|
|
|
print(choice, meta)
|
|
|
|
return meta.get("question_answers", {}).get(_answer_key(choice), 0)
|
|
|
|
|
|
|
|
|
2018-07-22 05:17:55 -05:00
|
|
|
def add_response_headers(headers={}):
|
|
|
|
"""This decorator adds the headers passed in to the response"""
|
2018-07-22 14:34:42 -05:00
|
|
|
|
2018-07-22 05:17:55 -05:00
|
|
|
def decorator(f):
|
|
|
|
@wraps(f)
|
|
|
|
def decorated_function(*args, **kwargs):
|
|
|
|
resp = make_response(f(*args, **kwargs))
|
|
|
|
h = resp.headers
|
|
|
|
for header, value in headers.items():
|
|
|
|
h[header] = value
|
|
|
|
return resp
|
2018-07-22 14:34:42 -05:00
|
|
|
|
2018-07-22 05:17:55 -05:00
|
|
|
return decorated_function
|
2018-07-22 14:34:42 -05:00
|
|
|
|
2018-07-22 05:17:55 -05:00
|
|
|
return decorator
|
|
|
|
|
|
|
|
|
|
|
|
def noindex(f):
|
|
|
|
"""This decorator passes X-Robots-Tag: noindex, nofollow"""
|
2018-07-22 14:34:42 -05:00
|
|
|
return add_response_headers({"X-Robots-Tag": "noindex, nofollow"})(f)
|
2018-07-22 05:17:55 -05:00
|
|
|
|
|
|
|
|
2018-05-18 13:41:41 -05:00
|
|
|
def login_required(f):
|
|
|
|
@wraps(f)
|
|
|
|
def decorated_function(*args, **kwargs):
|
2018-06-16 15:02:10 -05:00
|
|
|
if not session.get("logged_in"):
|
2018-07-22 05:17:55 -05:00
|
|
|
return redirect(url_for("admin_login", next=request.url))
|
2018-05-18 13:41:41 -05:00
|
|
|
return f(*args, **kwargs)
|
2018-06-16 15:02:10 -05:00
|
|
|
|
2018-05-18 13:41:41 -05:00
|
|
|
return decorated_function
|
|
|
|
|
2018-05-21 07:41:47 -05:00
|
|
|
|
2018-05-21 07:30:52 -05:00
|
|
|
def _api_required():
|
2018-06-16 15:02:10 -05:00
|
|
|
if session.get("logged_in"):
|
|
|
|
if request.method not in ["GET", "HEAD"]:
|
2018-06-04 10:59:38 -05:00
|
|
|
# If a standard API request is made with a "login session", it must havw a CSRF token
|
|
|
|
csrf.protect()
|
2018-05-21 07:30:52 -05:00
|
|
|
return
|
|
|
|
|
|
|
|
# Token verification
|
2018-06-16 15:02:10 -05:00
|
|
|
token = request.headers.get("Authorization", "").replace("Bearer ", "")
|
2018-05-21 07:30:52 -05:00
|
|
|
if not token:
|
2018-05-29 14:36:05 -05:00
|
|
|
# IndieAuth token
|
2018-06-16 15:02:10 -05:00
|
|
|
token = request.form.get("access_token", "")
|
2018-05-21 07:30:52 -05:00
|
|
|
|
|
|
|
# Will raise a BadSignature on bad auth
|
|
|
|
payload = JWT.loads(token)
|
2018-06-16 15:02:10 -05:00
|
|
|
logger.info(f"api call by {payload}")
|
2018-05-21 07:41:47 -05:00
|
|
|
|
|
|
|
|
|
|
|
def api_required(f):
|
2018-05-18 13:41:41 -05:00
|
|
|
@wraps(f)
|
|
|
|
def decorated_function(*args, **kwargs):
|
|
|
|
try:
|
2018-05-21 07:30:52 -05:00
|
|
|
_api_required()
|
2018-05-18 13:41:41 -05:00
|
|
|
except BadSignature:
|
|
|
|
abort(401)
|
2018-05-21 07:30:52 -05:00
|
|
|
|
2018-05-21 07:41:47 -05:00
|
|
|
return f(*args, **kwargs)
|
2018-06-16 15:02:10 -05:00
|
|
|
|
2018-05-18 13:41:41 -05:00
|
|
|
return decorated_function
|
|
|
|
|
|
|
|
|
|
|
|
def jsonify(**data):
|
2018-06-16 15:02:10 -05:00
|
|
|
if "@context" not in data:
|
2018-07-22 14:34:42 -05:00
|
|
|
data["@context"] = config.DEFAULT_CTX
|
2018-05-18 13:41:41 -05:00
|
|
|
return Response(
|
|
|
|
response=json.dumps(data),
|
2018-06-16 15:02:10 -05:00
|
|
|
headers={
|
|
|
|
"Content-Type": "application/json"
|
|
|
|
if app.debug
|
|
|
|
else "application/activity+json"
|
|
|
|
},
|
2018-05-18 13:41:41 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2019-05-12 03:02:28 -05:00
|
|
|
def _get_ip():
|
|
|
|
"""Guess the IP address from the request. Only used for security purpose (failed logins or bad payload).
|
|
|
|
|
|
|
|
Geoip will be returned if the "broxy" headers are set (it does Geoip
|
|
|
|
using an offline database and append these special headers).
|
|
|
|
"""
|
|
|
|
ip = request.headers.get("X-Forwarded-For", request.remote_addr)
|
|
|
|
geoip = None
|
|
|
|
if request.headers.get("Broxy-Geoip-Country"):
|
|
|
|
geoip = (
|
|
|
|
request.headers.get("Broxy-Geoip-Country")
|
|
|
|
+ "/"
|
|
|
|
+ request.headers.get("Broxy-Geoip-Region")
|
|
|
|
)
|
|
|
|
return ip, geoip
|
|
|
|
|
|
|
|
|
2018-05-18 13:41:41 -05:00
|
|
|
def is_api_request():
|
2018-06-16 15:02:10 -05:00
|
|
|
h = request.headers.get("Accept")
|
2018-05-18 13:41:41 -05:00
|
|
|
if h is None:
|
|
|
|
return False
|
2018-06-16 15:02:10 -05:00
|
|
|
h = h.split(",")[0]
|
|
|
|
if h in HEADERS or h == "application/json":
|
2018-05-18 13:41:41 -05:00
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2018-05-29 14:36:05 -05:00
|
|
|
|
|
|
|
@app.errorhandler(ValueError)
|
|
|
|
def handle_value_error(error):
|
2018-07-17 17:20:32 -05:00
|
|
|
logger.error(
|
|
|
|
f"caught value error: {error!r}, {traceback.format_tb(error.__traceback__)}"
|
|
|
|
)
|
2018-05-29 14:36:05 -05:00
|
|
|
response = flask_jsonify(message=error.args[0])
|
|
|
|
response.status_code = 400
|
|
|
|
return response
|
|
|
|
|
|
|
|
|
|
|
|
@app.errorhandler(Error)
|
|
|
|
def handle_activitypub_error(error):
|
2018-07-17 17:20:32 -05:00
|
|
|
logger.error(
|
|
|
|
f"caught activitypub error {error!r}, {traceback.format_tb(error.__traceback__)}"
|
|
|
|
)
|
2018-05-29 14:36:05 -05:00
|
|
|
response = flask_jsonify(error.to_dict())
|
|
|
|
response.status_code = error.status_code
|
|
|
|
return response
|
|
|
|
|
|
|
|
|
2019-04-07 07:37:05 -05:00
|
|
|
class TaskError(Exception):
|
|
|
|
"""Raised to log the error for poussetaches."""
|
2019-04-07 14:24:52 -05:00
|
|
|
|
2019-04-07 07:37:05 -05:00
|
|
|
def __init__(self):
|
|
|
|
self.message = traceback.format_exc()
|
|
|
|
|
|
|
|
|
|
|
|
@app.errorhandler(TaskError)
|
|
|
|
def handle_task_error(error):
|
|
|
|
logger.error(
|
|
|
|
f"caught activitypub error {error!r}, {traceback.format_tb(error.__traceback__)}"
|
|
|
|
)
|
|
|
|
response = flask_jsonify({"traceback": error.message})
|
|
|
|
response.status_code = 500
|
|
|
|
return response
|
|
|
|
|
|
|
|
|
2018-07-22 05:42:36 -05:00
|
|
|
# @app.errorhandler(Exception)
|
|
|
|
# def handle_other_error(error):
|
|
|
|
# logger.error(
|
|
|
|
# f"caught error {error!r}, {traceback.format_tb(error.__traceback__)}"
|
|
|
|
# )
|
|
|
|
# response = flask_jsonify({})
|
|
|
|
# response.status_code = 500
|
|
|
|
# return response
|
2018-07-22 05:25:56 -05:00
|
|
|
|
|
|
|
|
2018-06-16 14:24:53 -05:00
|
|
|
# App routes
|
2018-05-18 13:41:41 -05:00
|
|
|
|
2018-07-06 16:15:49 -05:00
|
|
|
ROBOTS_TXT = """User-agent: *
|
2018-07-06 17:08:44 -05:00
|
|
|
Disallow: /login
|
2018-07-06 16:15:49 -05:00
|
|
|
Disallow: /admin/
|
|
|
|
Disallow: /static/
|
|
|
|
Disallow: /media/
|
|
|
|
Disallow: /uploads/"""
|
|
|
|
|
2018-07-04 18:02:51 -05:00
|
|
|
|
2018-07-06 16:15:49 -05:00
|
|
|
@app.route("/robots.txt")
|
|
|
|
def robots_txt():
|
|
|
|
return Response(response=ROBOTS_TXT, headers={"Content-Type": "text/plain"})
|
|
|
|
|
|
|
|
|
|
|
|
@app.route("/media/<media_id>")
|
2018-07-22 05:17:55 -05:00
|
|
|
@noindex
|
2018-07-06 16:15:49 -05:00
|
|
|
def serve_media(media_id):
|
|
|
|
f = MEDIA_CACHE.fs.get(ObjectId(media_id))
|
|
|
|
resp = app.response_class(f, direct_passthrough=True, mimetype=f.content_type)
|
|
|
|
resp.headers.set("Content-Length", f.length)
|
|
|
|
resp.headers.set("ETag", f.md5)
|
|
|
|
resp.headers.set(
|
|
|
|
"Last-Modified", f.uploadDate.strftime("%a, %d %b %Y %H:%M:%S GMT")
|
|
|
|
)
|
|
|
|
resp.headers.set("Cache-Control", "public,max-age=31536000,immutable")
|
|
|
|
resp.headers.set("Content-Encoding", "gzip")
|
|
|
|
return resp
|
|
|
|
|
|
|
|
|
|
|
|
@app.route("/uploads/<oid>/<fname>")
|
|
|
|
def serve_uploads(oid, fname):
|
|
|
|
f = MEDIA_CACHE.fs.get(ObjectId(oid))
|
2018-07-04 18:02:51 -05:00
|
|
|
resp = app.response_class(f, direct_passthrough=True, mimetype=f.content_type)
|
|
|
|
resp.headers.set("Content-Length", f.length)
|
|
|
|
resp.headers.set("ETag", f.md5)
|
|
|
|
resp.headers.set(
|
|
|
|
"Last-Modified", f.uploadDate.strftime("%a, %d %b %Y %H:%M:%S GMT")
|
|
|
|
)
|
|
|
|
resp.headers.set("Cache-Control", "public,max-age=31536000,immutable")
|
|
|
|
resp.headers.set("Content-Encoding", "gzip")
|
|
|
|
return resp
|
|
|
|
|
|
|
|
|
2018-05-18 13:41:41 -05:00
|
|
|
#######
|
|
|
|
# Login
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
|
2018-07-06 17:08:44 -05:00
|
|
|
@app.route("/admin/logout")
|
2018-05-18 13:41:41 -05:00
|
|
|
@login_required
|
2018-07-06 17:08:44 -05:00
|
|
|
def admin_logout():
|
2018-06-16 15:02:10 -05:00
|
|
|
session["logged_in"] = False
|
|
|
|
return redirect("/")
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/login", methods=["POST", "GET"])
|
2018-07-22 05:17:55 -05:00
|
|
|
@noindex
|
2018-07-06 17:08:44 -05:00
|
|
|
def admin_login():
|
2018-07-15 14:25:09 -05:00
|
|
|
if session.get("logged_in") is True:
|
|
|
|
return redirect(url_for("admin_notifications"))
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
devices = [doc["device"] for doc in DB.u2f.find()]
|
2018-05-18 13:41:41 -05:00
|
|
|
u2f_enabled = True if devices else False
|
2018-06-16 15:02:10 -05:00
|
|
|
if request.method == "POST":
|
2018-06-01 14:54:43 -05:00
|
|
|
csrf.protect()
|
2019-04-08 11:01:02 -05:00
|
|
|
# 1. Check regular password login flow
|
2018-06-16 15:02:10 -05:00
|
|
|
pwd = request.form.get("pass")
|
2019-04-08 11:01:02 -05:00
|
|
|
if pwd:
|
|
|
|
if verify_pass(pwd):
|
|
|
|
session["logged_in"] = True
|
|
|
|
return redirect(
|
|
|
|
request.args.get("redirect") or url_for("admin_notifications")
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
abort(403)
|
|
|
|
# 2. Check for U2F payload, if any
|
|
|
|
elif devices:
|
2019-04-08 09:41:09 -05:00
|
|
|
resp = json.loads(request.form.get("resp"))
|
|
|
|
try:
|
|
|
|
u2f.complete_authentication(session["challenge"], resp)
|
|
|
|
except ValueError as exc:
|
|
|
|
print("failed", exc)
|
|
|
|
abort(403)
|
|
|
|
return
|
|
|
|
finally:
|
|
|
|
session["challenge"] = None
|
2018-05-18 13:41:41 -05:00
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
session["logged_in"] = True
|
2018-07-09 17:49:52 -05:00
|
|
|
return redirect(
|
|
|
|
request.args.get("redirect") or url_for("admin_notifications")
|
|
|
|
)
|
2018-05-18 13:41:41 -05:00
|
|
|
else:
|
|
|
|
abort(401)
|
|
|
|
|
|
|
|
payload = None
|
|
|
|
if devices:
|
|
|
|
payload = u2f.begin_authentication(ID, devices)
|
2018-06-16 15:02:10 -05:00
|
|
|
session["challenge"] = payload
|
2018-05-18 13:41:41 -05:00
|
|
|
|
2018-06-29 15:16:26 -05:00
|
|
|
return render_template("login.html", u2f_enabled=u2f_enabled, payload=payload)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/remote_follow", methods=["GET", "POST"])
|
2018-05-18 13:41:41 -05:00
|
|
|
def remote_follow():
|
2018-06-16 15:02:10 -05:00
|
|
|
if request.method == "GET":
|
|
|
|
return render_template("remote_follow.html")
|
2018-05-18 13:41:41 -05:00
|
|
|
|
2018-06-04 10:59:38 -05:00
|
|
|
csrf.protect()
|
2018-06-21 17:23:05 -05:00
|
|
|
profile = request.form.get("profile")
|
|
|
|
if not profile.startswith("@"):
|
|
|
|
profile = f"@{profile}"
|
2018-06-16 15:02:10 -05:00
|
|
|
return redirect(
|
2018-06-21 17:23:05 -05:00
|
|
|
get_remote_follow_template(profile).format(uri=f"{USERNAME}@{DOMAIN}")
|
2018-06-16 15:02:10 -05:00
|
|
|
)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/authorize_follow", methods=["GET", "POST"])
|
2018-05-18 13:41:41 -05:00
|
|
|
@login_required
|
|
|
|
def authorize_follow():
|
2018-06-16 15:02:10 -05:00
|
|
|
if request.method == "GET":
|
|
|
|
return render_template(
|
|
|
|
"authorize_remote_follow.html", profile=request.args.get("profile")
|
|
|
|
)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
actor = get_actor_url(request.form.get("profile"))
|
2018-05-18 13:41:41 -05:00
|
|
|
if not actor:
|
|
|
|
abort(500)
|
2018-07-07 06:56:00 -05:00
|
|
|
|
|
|
|
q = {
|
|
|
|
"box": Box.OUTBOX.value,
|
|
|
|
"type": ActivityType.FOLLOW.value,
|
|
|
|
"meta.undo": False,
|
|
|
|
"activity.object": actor,
|
|
|
|
}
|
|
|
|
if DB.activities.count(q) > 0:
|
2018-06-16 15:02:10 -05:00
|
|
|
return redirect("/following")
|
2018-05-18 13:41:41 -05:00
|
|
|
|
2018-06-24 12:22:40 -05:00
|
|
|
follow = ap.Follow(actor=MY_PERSON.id, object=actor)
|
2019-04-05 04:35:48 -05:00
|
|
|
post_to_outbox(follow)
|
2018-06-16 15:33:51 -05:00
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
return redirect("/following")
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/u2f/register", methods=["GET", "POST"])
|
2018-05-18 13:41:41 -05:00
|
|
|
@login_required
|
|
|
|
def u2f_register():
|
|
|
|
# TODO(tsileo): ensure no duplicates
|
2018-06-16 15:02:10 -05:00
|
|
|
if request.method == "GET":
|
2018-05-18 13:41:41 -05:00
|
|
|
payload = u2f.begin_registration(ID)
|
2018-06-16 15:02:10 -05:00
|
|
|
session["challenge"] = payload
|
|
|
|
return render_template("u2f.html", payload=payload)
|
2018-05-18 13:41:41 -05:00
|
|
|
else:
|
2018-06-16 15:02:10 -05:00
|
|
|
resp = json.loads(request.form.get("resp"))
|
|
|
|
device, device_cert = u2f.complete_registration(session["challenge"], resp)
|
|
|
|
session["challenge"] = None
|
|
|
|
DB.u2f.insert_one({"device": device, "cert": device_cert})
|
2019-04-08 09:41:09 -05:00
|
|
|
session["logged_in"] = False
|
|
|
|
return redirect("/login")
|
2018-06-16 15:02:10 -05:00
|
|
|
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
#######
|
|
|
|
# Activity pub routes
|
2018-07-16 15:24:14 -05:00
|
|
|
@app.route("/drop_cache")
|
|
|
|
@login_required
|
|
|
|
def drop_cache():
|
|
|
|
DB.actors.drop()
|
|
|
|
return "Done"
|
2018-05-18 13:41:41 -05:00
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
|
2018-07-06 16:54:41 -05:00
|
|
|
def paginated_query(db, q, limit=25, sort_key="_id"):
|
2018-07-06 16:15:49 -05:00
|
|
|
older_than = newer_than = None
|
|
|
|
query_sort = -1
|
2018-07-06 16:53:33 -05:00
|
|
|
first_page = not request.args.get("older_than") and not request.args.get(
|
|
|
|
"newer_than"
|
|
|
|
)
|
|
|
|
|
2018-07-06 16:15:49 -05:00
|
|
|
query_older_than = request.args.get("older_than")
|
|
|
|
query_newer_than = request.args.get("newer_than")
|
2018-07-06 16:53:33 -05:00
|
|
|
|
2018-07-06 16:15:49 -05:00
|
|
|
if query_older_than:
|
|
|
|
q["_id"] = {"$lt": ObjectId(query_older_than)}
|
|
|
|
elif query_newer_than:
|
|
|
|
q["_id"] = {"$gt": ObjectId(query_newer_than)}
|
|
|
|
query_sort = 1
|
|
|
|
|
2018-07-06 16:53:33 -05:00
|
|
|
outbox_data = list(db.find(q, limit=limit + 1).sort(sort_key, query_sort))
|
2018-07-06 16:15:49 -05:00
|
|
|
outbox_len = len(outbox_data)
|
2018-07-06 16:53:33 -05:00
|
|
|
outbox_data = sorted(
|
|
|
|
outbox_data[:limit], key=lambda x: str(x[sort_key]), reverse=True
|
|
|
|
)
|
2018-07-06 16:15:49 -05:00
|
|
|
|
|
|
|
if query_older_than:
|
|
|
|
newer_than = str(outbox_data[0]["_id"])
|
|
|
|
if outbox_len == limit + 1:
|
|
|
|
older_than = str(outbox_data[-1]["_id"])
|
|
|
|
elif query_newer_than:
|
|
|
|
older_than = str(outbox_data[-1]["_id"])
|
|
|
|
if outbox_len == limit + 1:
|
|
|
|
newer_than = str(outbox_data[0]["_id"])
|
|
|
|
elif first_page and outbox_len == limit + 1:
|
|
|
|
older_than = str(outbox_data[-1]["_id"])
|
2018-06-04 10:59:38 -05:00
|
|
|
|
2018-07-06 16:53:33 -05:00
|
|
|
return outbox_data, older_than, newer_than
|
|
|
|
|
|
|
|
|
2018-09-03 13:21:33 -05:00
|
|
|
CACHING = True
|
|
|
|
|
|
|
|
|
|
|
|
def _get_cached(type_="html", arg=None):
|
|
|
|
if not CACHING:
|
|
|
|
return None
|
|
|
|
logged_in = session.get("logged_in")
|
2018-09-03 02:38:29 -05:00
|
|
|
if not logged_in:
|
2018-09-03 13:21:33 -05:00
|
|
|
cached = DB.cache2.find_one({"path": request.path, "type": type_, "arg": arg})
|
2018-09-03 02:38:29 -05:00
|
|
|
if cached:
|
|
|
|
app.logger.info("from cache")
|
2019-04-05 04:35:48 -05:00
|
|
|
return cached["response_data"]
|
2018-09-03 13:21:33 -05:00
|
|
|
return None
|
|
|
|
|
2019-04-05 04:35:48 -05:00
|
|
|
|
2018-09-03 13:21:33 -05:00
|
|
|
def _cache(resp, type_="html", arg=None):
|
|
|
|
if not CACHING:
|
|
|
|
return None
|
|
|
|
logged_in = session.get("logged_in")
|
|
|
|
if not logged_in:
|
|
|
|
DB.cache2.update_one(
|
|
|
|
{"path": request.path, "type": type_, "arg": arg},
|
|
|
|
{"$set": {"response_data": resp, "date": datetime.now(timezone.utc)}},
|
|
|
|
upsert=True,
|
|
|
|
)
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
@app.route("/")
|
|
|
|
def index():
|
|
|
|
if is_api_request():
|
|
|
|
return jsonify(**ME)
|
2019-04-05 04:35:48 -05:00
|
|
|
cache_arg = (
|
|
|
|
f"{request.args.get('older_than', '')}:{request.args.get('newer_than', '')}"
|
|
|
|
)
|
2018-09-03 13:21:33 -05:00
|
|
|
cached = _get_cached("html", cache_arg)
|
|
|
|
if cached:
|
|
|
|
return cached
|
2018-07-06 16:53:33 -05:00
|
|
|
|
|
|
|
q = {
|
|
|
|
"box": Box.OUTBOX.value,
|
|
|
|
"type": {"$in": [ActivityType.CREATE.value, ActivityType.ANNOUNCE.value]},
|
|
|
|
"activity.object.inReplyTo": None,
|
|
|
|
"meta.deleted": False,
|
|
|
|
"meta.undo": False,
|
2018-07-22 15:22:30 -05:00
|
|
|
"$or": [{"meta.pinned": False}, {"meta.pinned": {"$exists": False}}],
|
2018-07-06 16:53:33 -05:00
|
|
|
}
|
2019-04-15 14:20:14 -05:00
|
|
|
print(list(DB.activities.find(q)))
|
2018-07-22 15:25:28 -05:00
|
|
|
|
|
|
|
pinned = []
|
|
|
|
# Only fetch the pinned notes if we're on the first page
|
|
|
|
if not request.args.get("older_than") and not request.args.get("newer_than"):
|
|
|
|
q_pinned = {
|
|
|
|
"box": Box.OUTBOX.value,
|
|
|
|
"type": ActivityType.CREATE.value,
|
|
|
|
"meta.deleted": False,
|
|
|
|
"meta.undo": False,
|
|
|
|
"meta.pinned": True,
|
|
|
|
}
|
|
|
|
pinned = list(DB.activities.find(q_pinned))
|
|
|
|
|
2018-07-22 15:22:30 -05:00
|
|
|
outbox_data, older_than, newer_than = paginated_query(
|
|
|
|
DB.activities, q, limit=25 - len(pinned)
|
|
|
|
)
|
2018-07-22 15:25:28 -05:00
|
|
|
|
2018-09-03 02:38:29 -05:00
|
|
|
resp = render_template(
|
2018-07-06 16:15:49 -05:00
|
|
|
"index.html",
|
|
|
|
outbox_data=outbox_data,
|
|
|
|
older_than=older_than,
|
|
|
|
newer_than=newer_than,
|
2018-07-22 15:22:30 -05:00
|
|
|
pinned=pinned,
|
2018-07-06 16:15:49 -05:00
|
|
|
)
|
2018-09-03 13:21:33 -05:00
|
|
|
_cache(resp, "html", cache_arg)
|
2018-09-03 02:38:29 -05:00
|
|
|
return resp
|
2018-06-04 10:59:38 -05:00
|
|
|
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/with_replies")
|
2018-07-17 17:06:42 -05:00
|
|
|
@login_required
|
2018-06-04 10:59:38 -05:00
|
|
|
def with_replies():
|
2018-06-29 15:16:26 -05:00
|
|
|
q = {
|
|
|
|
"box": Box.OUTBOX.value,
|
|
|
|
"type": {"$in": [ActivityType.CREATE.value, ActivityType.ANNOUNCE.value]},
|
|
|
|
"meta.deleted": False,
|
2019-04-14 12:17:54 -05:00
|
|
|
"meta.public": True,
|
2018-06-29 15:16:26 -05:00
|
|
|
"meta.undo": False,
|
|
|
|
}
|
2018-07-06 16:53:33 -05:00
|
|
|
outbox_data, older_than, newer_than = paginated_query(DB.activities, q)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
2018-07-06 16:53:33 -05:00
|
|
|
return render_template(
|
|
|
|
"index.html",
|
|
|
|
outbox_data=outbox_data,
|
|
|
|
older_than=older_than,
|
|
|
|
newer_than=newer_than,
|
|
|
|
)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
|
2019-04-10 15:50:36 -05:00
|
|
|
def _build_thread(data, include_children=True): # noqa: C901
|
2018-06-16 15:02:10 -05:00
|
|
|
data["_requested"] = True
|
2019-04-11 00:58:46 -05:00
|
|
|
app.logger.info(f"_build_thread({data!r})")
|
2018-06-16 15:02:10 -05:00
|
|
|
root_id = data["meta"].get("thread_root_parent", data["activity"]["object"]["id"])
|
2018-06-03 14:28:06 -05:00
|
|
|
|
2018-07-14 06:19:30 -05:00
|
|
|
query = {
|
2019-04-10 17:04:32 -05:00
|
|
|
"$or": [{"meta.thread_root_parent": root_id}, {"activity.object.id": root_id}],
|
2019-04-10 15:50:36 -05:00
|
|
|
"meta.deleted": False,
|
2018-07-14 06:19:30 -05:00
|
|
|
}
|
2019-04-10 15:50:36 -05:00
|
|
|
replies = [data]
|
|
|
|
for dat in DB.activities.find(query):
|
2019-04-10 16:39:08 -05:00
|
|
|
if dat["type"][0] == ActivityType.CREATE.value:
|
|
|
|
replies.append(dat)
|
|
|
|
else:
|
2019-04-10 15:50:36 -05:00
|
|
|
# Make a Note/Question/... looks like a Create
|
2019-04-13 03:00:56 -05:00
|
|
|
dat = {
|
|
|
|
"activity": {"object": dat["activity"]},
|
|
|
|
"meta": dat["meta"],
|
|
|
|
"_id": dat["_id"],
|
|
|
|
}
|
2019-04-10 15:50:36 -05:00
|
|
|
replies.append(dat)
|
2019-04-10 16:39:08 -05:00
|
|
|
|
2018-06-22 18:04:58 -05:00
|
|
|
replies = sorted(replies, key=lambda d: d["activity"]["object"]["published"])
|
2019-04-10 15:50:36 -05:00
|
|
|
|
2018-06-03 14:28:06 -05:00
|
|
|
# Index all the IDs in order to build a tree
|
|
|
|
idx = {}
|
2018-06-22 18:04:58 -05:00
|
|
|
replies2 = []
|
2018-05-18 13:41:41 -05:00
|
|
|
for rep in replies:
|
2018-06-16 15:02:10 -05:00
|
|
|
rep_id = rep["activity"]["object"]["id"]
|
2018-06-22 18:04:58 -05:00
|
|
|
if rep_id in idx:
|
|
|
|
continue
|
2018-06-03 14:28:06 -05:00
|
|
|
idx[rep_id] = rep.copy()
|
2018-06-16 15:02:10 -05:00
|
|
|
idx[rep_id]["_nodes"] = []
|
2018-06-22 18:04:58 -05:00
|
|
|
replies2.append(rep)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
2018-06-03 14:28:06 -05:00
|
|
|
# Build the tree
|
2018-06-22 18:04:58 -05:00
|
|
|
for rep in replies2:
|
2018-06-16 15:02:10 -05:00
|
|
|
rep_id = rep["activity"]["object"]["id"]
|
2018-06-03 14:28:06 -05:00
|
|
|
if rep_id == root_id:
|
|
|
|
continue
|
2019-04-16 15:54:08 -05:00
|
|
|
reply_of = ap._get_id(rep["activity"]["object"]["inReplyTo"])
|
2018-07-26 16:11:38 -05:00
|
|
|
try:
|
|
|
|
idx[reply_of]["_nodes"].append(rep)
|
|
|
|
except KeyError:
|
|
|
|
app.logger.info(f"{reply_of} is not there! skipping {rep}")
|
2018-07-12 01:05:23 -05:00
|
|
|
|
2018-06-03 14:28:06 -05:00
|
|
|
# Flatten the tree
|
|
|
|
thread = []
|
2018-06-16 15:02:10 -05:00
|
|
|
|
2018-06-03 14:28:06 -05:00
|
|
|
def _flatten(node, level=0):
|
2018-06-16 15:02:10 -05:00
|
|
|
node["_level"] = level
|
2018-06-03 14:28:06 -05:00
|
|
|
thread.append(node)
|
2018-06-16 14:24:53 -05:00
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
for snode in sorted(
|
|
|
|
idx[node["activity"]["object"]["id"]]["_nodes"],
|
|
|
|
key=lambda d: d["activity"]["object"]["published"],
|
|
|
|
):
|
|
|
|
_flatten(snode, level=level + 1)
|
|
|
|
|
2019-02-24 14:04:09 -06:00
|
|
|
try:
|
|
|
|
_flatten(idx[root_id])
|
|
|
|
except KeyError:
|
|
|
|
app.logger.info(f"{root_id} is not there! skipping")
|
2018-05-18 13:41:41 -05:00
|
|
|
|
2018-06-03 14:28:06 -05:00
|
|
|
return thread
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/note/<note_id>")
|
2018-06-16 14:24:53 -05:00
|
|
|
def note_by_id(note_id):
|
2018-07-20 05:14:11 -05:00
|
|
|
if is_api_request():
|
2018-07-20 17:04:15 -05:00
|
|
|
return redirect(url_for("outbox_activity", item_id=note_id))
|
2018-07-20 05:14:11 -05:00
|
|
|
|
2018-06-29 15:16:26 -05:00
|
|
|
data = DB.activities.find_one(
|
|
|
|
{"box": Box.OUTBOX.value, "remote_id": back.activity_url(note_id)}
|
|
|
|
)
|
2018-06-16 14:24:53 -05:00
|
|
|
if not data:
|
2018-06-03 14:28:06 -05:00
|
|
|
abort(404)
|
2018-06-16 15:02:10 -05:00
|
|
|
if data["meta"].get("deleted", False):
|
2018-06-03 14:28:06 -05:00
|
|
|
abort(410)
|
2019-04-14 12:17:54 -05:00
|
|
|
|
2018-06-03 14:28:06 -05:00
|
|
|
thread = _build_thread(data)
|
2018-08-01 01:29:08 -05:00
|
|
|
app.logger.info(f"thread={thread!r}")
|
2018-06-03 16:11:43 -05:00
|
|
|
|
2018-08-28 15:14:48 -05:00
|
|
|
raw_likes = list(
|
2018-06-29 15:16:26 -05:00
|
|
|
DB.activities.find(
|
2018-06-16 15:02:10 -05:00
|
|
|
{
|
|
|
|
"meta.undo": False,
|
2018-07-19 18:12:02 -05:00
|
|
|
"meta.deleted": False,
|
2018-06-16 15:02:10 -05:00
|
|
|
"type": ActivityType.LIKE.value,
|
|
|
|
"$or": [
|
2018-08-01 01:29:08 -05:00
|
|
|
# FIXME(tsileo): remove all the useless $or
|
2018-06-16 15:02:10 -05:00
|
|
|
{"activity.object.id": data["activity"]["object"]["id"]},
|
|
|
|
{"activity.object": data["activity"]["object"]["id"]},
|
|
|
|
],
|
|
|
|
}
|
|
|
|
)
|
|
|
|
)
|
2018-08-28 15:14:48 -05:00
|
|
|
likes = []
|
|
|
|
for doc in raw_likes:
|
|
|
|
try:
|
|
|
|
likes.append(doc["meta"]["actor"])
|
|
|
|
except Exception:
|
|
|
|
app.logger.exception(f"invalid doc: {doc!r}")
|
2018-08-01 01:29:08 -05:00
|
|
|
app.logger.info(f"likes={likes!r}")
|
2018-06-03 16:11:43 -05:00
|
|
|
|
2018-08-28 15:14:48 -05:00
|
|
|
raw_shares = list(
|
2018-06-29 15:16:26 -05:00
|
|
|
DB.activities.find(
|
2018-06-16 15:02:10 -05:00
|
|
|
{
|
|
|
|
"meta.undo": False,
|
2018-07-19 18:12:02 -05:00
|
|
|
"meta.deleted": False,
|
2018-06-16 15:02:10 -05:00
|
|
|
"type": ActivityType.ANNOUNCE.value,
|
|
|
|
"$or": [
|
|
|
|
{"activity.object.id": data["activity"]["object"]["id"]},
|
|
|
|
{"activity.object": data["activity"]["object"]["id"]},
|
|
|
|
],
|
|
|
|
}
|
|
|
|
)
|
|
|
|
)
|
2018-08-28 15:14:48 -05:00
|
|
|
shares = []
|
|
|
|
for doc in raw_shares:
|
|
|
|
try:
|
|
|
|
shares.append(doc["meta"]["actor"])
|
|
|
|
except Exception:
|
|
|
|
app.logger.exception(f"invalid doc: {doc!r}")
|
2018-08-01 01:29:08 -05:00
|
|
|
app.logger.info(f"shares={shares!r}")
|
2018-06-03 16:36:16 -05:00
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
return render_template(
|
2018-06-29 15:16:26 -05:00
|
|
|
"note.html", likes=likes, shares=shares, thread=thread, note=data
|
2018-06-16 15:02:10 -05:00
|
|
|
)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/nodeinfo")
|
2018-06-03 03:15:11 -05:00
|
|
|
def nodeinfo():
|
2018-09-03 13:21:33 -05:00
|
|
|
response = _get_cached("api")
|
|
|
|
cached = True
|
|
|
|
if not response:
|
|
|
|
cached = False
|
|
|
|
q = {
|
|
|
|
"box": Box.OUTBOX.value,
|
|
|
|
"meta.deleted": False, # TODO(tsileo): retrieve deleted and expose tombstone
|
|
|
|
"type": {"$in": [ActivityType.CREATE.value, ActivityType.ANNOUNCE.value]},
|
|
|
|
}
|
|
|
|
|
|
|
|
response = json.dumps(
|
2019-04-05 04:35:48 -05:00
|
|
|
{
|
|
|
|
"version": "2.0",
|
|
|
|
"software": {
|
|
|
|
"name": "microblogpub",
|
|
|
|
"version": f"Microblog.pub {VERSION}",
|
|
|
|
},
|
|
|
|
"protocols": ["activitypub"],
|
|
|
|
"services": {"inbound": [], "outbound": []},
|
|
|
|
"openRegistrations": False,
|
|
|
|
"usage": {"users": {"total": 1}, "localPosts": DB.activities.count(q)},
|
|
|
|
"metadata": {
|
|
|
|
"sourceCode": "https://github.com/tsileo/microblog.pub",
|
|
|
|
"nodeName": f"@{USERNAME}@{DOMAIN}",
|
|
|
|
},
|
|
|
|
}
|
|
|
|
)
|
2018-09-03 13:21:33 -05:00
|
|
|
|
|
|
|
if not cached:
|
|
|
|
_cache(response, "api")
|
2018-06-03 03:15:11 -05:00
|
|
|
return Response(
|
2018-06-16 15:02:10 -05:00
|
|
|
headers={
|
|
|
|
"Content-Type": "application/json; profile=http://nodeinfo.diaspora.software/ns/schema/2.0#"
|
|
|
|
},
|
2018-09-03 13:21:33 -05:00
|
|
|
response=response,
|
2018-06-03 03:15:11 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/.well-known/nodeinfo")
|
2018-06-03 03:15:11 -05:00
|
|
|
def wellknown_nodeinfo():
|
|
|
|
return flask_jsonify(
|
|
|
|
links=[
|
|
|
|
{
|
2018-06-16 15:02:10 -05:00
|
|
|
"rel": "http://nodeinfo.diaspora.software/ns/schema/2.0",
|
|
|
|
"href": f"{ID}/nodeinfo",
|
2018-06-03 03:15:11 -05:00
|
|
|
}
|
2018-06-16 15:02:10 -05:00
|
|
|
]
|
2018-06-03 03:15:11 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/.well-known/webfinger")
|
2018-06-03 03:15:11 -05:00
|
|
|
def wellknown_webfinger():
|
2018-05-18 13:41:41 -05:00
|
|
|
"""Enable WebFinger support, required for Mastodon interopability."""
|
2018-07-03 17:40:23 -05:00
|
|
|
# TODO(tsileo): move this to little-boxes?
|
2018-06-16 15:02:10 -05:00
|
|
|
resource = request.args.get("resource")
|
|
|
|
if resource not in [f"acct:{USERNAME}@{DOMAIN}", ID]:
|
2018-05-18 13:41:41 -05:00
|
|
|
abort(404)
|
|
|
|
|
|
|
|
out = {
|
2018-06-16 15:02:10 -05:00
|
|
|
"subject": f"acct:{USERNAME}@{DOMAIN}",
|
2018-05-18 13:41:41 -05:00
|
|
|
"aliases": [ID],
|
|
|
|
"links": [
|
2018-06-16 15:02:10 -05:00
|
|
|
{
|
|
|
|
"rel": "http://webfinger.net/rel/profile-page",
|
|
|
|
"type": "text/html",
|
|
|
|
"href": BASE_URL,
|
|
|
|
},
|
2018-05-18 13:41:41 -05:00
|
|
|
{"rel": "self", "type": "application/activity+json", "href": ID},
|
2018-06-16 15:02:10 -05:00
|
|
|
{
|
|
|
|
"rel": "http://ostatus.org/schema/1.0/subscribe",
|
|
|
|
"template": BASE_URL + "/authorize_follow?profile={uri}",
|
|
|
|
},
|
2018-07-03 17:40:23 -05:00
|
|
|
{"rel": "magic-public-key", "href": KEY.to_magic_key()},
|
2018-07-04 14:08:45 -05:00
|
|
|
{
|
|
|
|
"href": ICON_URL,
|
|
|
|
"rel": "http://webfinger.net/rel/avatar",
|
|
|
|
"type": mimetypes.guess_type(ICON_URL)[0],
|
|
|
|
},
|
2018-05-18 13:41:41 -05:00
|
|
|
],
|
|
|
|
}
|
|
|
|
|
|
|
|
return Response(
|
|
|
|
response=json.dumps(out),
|
2018-06-16 15:02:10 -05:00
|
|
|
headers={
|
|
|
|
"Content-Type": "application/jrd+json; charset=utf-8"
|
|
|
|
if not app.debug
|
|
|
|
else "application/json"
|
|
|
|
},
|
2018-05-18 13:41:41 -05:00
|
|
|
)
|
|
|
|
|
2018-05-28 12:46:23 -05:00
|
|
|
|
|
|
|
def add_extra_collection(raw_doc: Dict[str, Any]) -> Dict[str, Any]:
|
2018-06-16 15:02:10 -05:00
|
|
|
if raw_doc["activity"]["type"] != ActivityType.CREATE.value:
|
2018-05-29 11:59:37 -05:00
|
|
|
return raw_doc
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
raw_doc["activity"]["object"]["replies"] = embed_collection(
|
|
|
|
raw_doc.get("meta", {}).get("count_direct_reply", 0),
|
2018-06-18 15:01:21 -05:00
|
|
|
f'{raw_doc["remote_id"]}/replies',
|
2018-05-31 18:26:23 -05:00
|
|
|
)
|
2018-05-29 11:59:37 -05:00
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
raw_doc["activity"]["object"]["likes"] = embed_collection(
|
2018-06-18 15:01:21 -05:00
|
|
|
raw_doc.get("meta", {}).get("count_like", 0), f'{raw_doc["remote_id"]}/likes'
|
2018-05-31 18:26:23 -05:00
|
|
|
)
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
raw_doc["activity"]["object"]["shares"] = embed_collection(
|
2018-06-18 15:01:21 -05:00
|
|
|
raw_doc.get("meta", {}).get("count_boost", 0), f'{raw_doc["remote_id"]}/shares'
|
2018-05-31 18:26:23 -05:00
|
|
|
)
|
2018-05-28 12:46:23 -05:00
|
|
|
|
|
|
|
return raw_doc
|
|
|
|
|
|
|
|
|
2018-06-04 12:10:04 -05:00
|
|
|
def remove_context(activity: Dict[str, Any]) -> Dict[str, Any]:
|
2018-06-16 15:02:10 -05:00
|
|
|
if "@context" in activity:
|
|
|
|
del activity["@context"]
|
2018-06-04 12:10:04 -05:00
|
|
|
return activity
|
|
|
|
|
|
|
|
|
2019-04-14 12:17:54 -05:00
|
|
|
def _add_answers_to_questions(raw_doc: Dict[str, Any]) -> None:
|
|
|
|
activity = raw_doc["activity"]
|
|
|
|
if (
|
2019-04-14 13:16:04 -05:00
|
|
|
ap._has_type(activity["type"], ActivityType.CREATE)
|
|
|
|
and "object" in activity
|
|
|
|
and ap._has_type(activity["object"]["type"], ActivityType.QUESTION)
|
2019-04-14 12:17:54 -05:00
|
|
|
):
|
|
|
|
for choice in activity["object"].get("oneOf", activity["object"].get("anyOf")):
|
|
|
|
choice["replies"] = {
|
|
|
|
"type": ActivityType.COLLECTION.value,
|
|
|
|
"totalItems": raw_doc["meta"]
|
|
|
|
.get("question_answers", {})
|
|
|
|
.get(_answer_key(choice["name"]), 0),
|
|
|
|
}
|
|
|
|
now = datetime.now().astimezone()
|
|
|
|
if format_datetime(now) > activity["object"]["endTime"]:
|
|
|
|
activity["object"]["closed"] = activity["object"]["endTime"]
|
|
|
|
|
|
|
|
|
2018-06-04 12:10:04 -05:00
|
|
|
def activity_from_doc(raw_doc: Dict[str, Any], embed: bool = False) -> Dict[str, Any]:
|
2018-05-28 12:46:23 -05:00
|
|
|
raw_doc = add_extra_collection(raw_doc)
|
2018-06-16 15:02:10 -05:00
|
|
|
activity = clean_activity(raw_doc["activity"])
|
2019-04-14 12:17:54 -05:00
|
|
|
|
|
|
|
# Handle Questions
|
|
|
|
# TODO(tsileo): what about object embedded by ID/URL?
|
|
|
|
_add_answers_to_questions(raw_doc)
|
2018-06-04 12:10:04 -05:00
|
|
|
if embed:
|
|
|
|
return remove_context(activity)
|
|
|
|
return activity
|
|
|
|
|
2018-05-28 12:46:23 -05:00
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/outbox", methods=["GET", "POST"])
|
2018-06-16 14:24:53 -05:00
|
|
|
def outbox():
|
2018-06-16 15:02:10 -05:00
|
|
|
if request.method == "GET":
|
2018-06-16 14:24:53 -05:00
|
|
|
if not is_api_request():
|
|
|
|
abort(404)
|
2018-07-07 06:06:57 -05:00
|
|
|
# TODO(tsileo): returns the whole outbox if authenticated
|
2018-05-18 13:41:41 -05:00
|
|
|
q = {
|
2018-06-29 15:16:26 -05:00
|
|
|
"box": Box.OUTBOX.value,
|
2018-07-09 17:49:52 -05:00
|
|
|
"meta.deleted": False,
|
2019-04-14 12:17:54 -05:00
|
|
|
"meta.undo": False,
|
|
|
|
"meta.public": True,
|
2018-07-07 06:56:00 -05:00
|
|
|
"type": {"$in": [ActivityType.CREATE.value, ActivityType.ANNOUNCE.value]},
|
2018-05-18 13:41:41 -05:00
|
|
|
}
|
2018-06-16 15:02:10 -05:00
|
|
|
return jsonify(
|
|
|
|
**activitypub.build_ordered_collection(
|
2018-06-29 15:16:26 -05:00
|
|
|
DB.activities,
|
2018-06-16 15:02:10 -05:00
|
|
|
q=q,
|
|
|
|
cursor=request.args.get("cursor"),
|
|
|
|
map_func=lambda doc: activity_from_doc(doc, embed=True),
|
2018-07-31 16:23:20 -05:00
|
|
|
col_name="outbox",
|
2018-06-16 15:02:10 -05:00
|
|
|
)
|
|
|
|
)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
# Handle POST request
|
2018-05-21 07:30:52 -05:00
|
|
|
try:
|
|
|
|
_api_required()
|
|
|
|
except BadSignature:
|
|
|
|
abort(401)
|
2018-06-16 14:24:53 -05:00
|
|
|
|
2018-05-18 13:41:41 -05:00
|
|
|
data = request.get_json(force=True)
|
|
|
|
print(data)
|
2018-06-17 12:21:59 -05:00
|
|
|
activity = ap.parse_activity(data)
|
2019-04-05 04:35:48 -05:00
|
|
|
activity_id = post_to_outbox(activity)
|
2018-05-21 10:04:53 -05:00
|
|
|
|
2018-07-29 09:07:27 -05:00
|
|
|
return Response(status=201, headers={"Location": activity_id})
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/outbox/<item_id>")
|
2018-05-18 13:41:41 -05:00
|
|
|
def outbox_detail(item_id):
|
2018-06-29 15:16:26 -05:00
|
|
|
doc = DB.activities.find_one(
|
|
|
|
{"box": Box.OUTBOX.value, "remote_id": back.activity_url(item_id)}
|
|
|
|
)
|
2018-07-09 17:49:52 -05:00
|
|
|
if not doc:
|
|
|
|
abort(404)
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
if doc["meta"].get("deleted", False):
|
2018-06-17 12:21:59 -05:00
|
|
|
obj = ap.parse_activity(doc["activity"])
|
2018-07-09 17:49:52 -05:00
|
|
|
resp = jsonify(**obj.get_tombstone().to_dict())
|
2018-06-03 07:34:04 -05:00
|
|
|
resp.status_code = 410
|
|
|
|
return resp
|
2018-05-28 12:46:23 -05:00
|
|
|
return jsonify(**activity_from_doc(doc))
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/outbox/<item_id>/activity")
|
2018-05-18 13:41:41 -05:00
|
|
|
def outbox_activity(item_id):
|
2018-06-29 15:16:26 -05:00
|
|
|
data = DB.activities.find_one(
|
2018-07-09 17:49:52 -05:00
|
|
|
{"box": Box.OUTBOX.value, "remote_id": back.activity_url(item_id)}
|
2018-06-18 15:01:21 -05:00
|
|
|
)
|
2018-05-18 13:41:41 -05:00
|
|
|
if not data:
|
|
|
|
abort(404)
|
2019-04-14 12:17:54 -05:00
|
|
|
|
2018-05-28 12:46:23 -05:00
|
|
|
obj = activity_from_doc(data)
|
2018-07-09 17:49:52 -05:00
|
|
|
if data["meta"].get("deleted", False):
|
|
|
|
obj = ap.parse_activity(data["activity"])
|
|
|
|
resp = jsonify(**obj.get_object().get_tombstone().to_dict())
|
|
|
|
resp.status_code = 410
|
|
|
|
return resp
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
if obj["type"] != ActivityType.CREATE.value:
|
2018-05-18 13:41:41 -05:00
|
|
|
abort(404)
|
2018-06-16 15:02:10 -05:00
|
|
|
return jsonify(**obj["object"])
|
2018-05-28 12:46:23 -05:00
|
|
|
|
2018-05-18 13:41:41 -05:00
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/outbox/<item_id>/replies")
|
2018-05-31 18:26:23 -05:00
|
|
|
def outbox_activity_replies(item_id):
|
|
|
|
if not is_api_request():
|
|
|
|
abort(404)
|
2018-06-29 15:16:26 -05:00
|
|
|
data = DB.activities.find_one(
|
|
|
|
{
|
|
|
|
"box": Box.OUTBOX.value,
|
|
|
|
"remote_id": back.activity_url(item_id),
|
|
|
|
"meta.deleted": False,
|
|
|
|
}
|
2018-06-18 15:01:21 -05:00
|
|
|
)
|
2018-05-31 18:26:23 -05:00
|
|
|
if not data:
|
|
|
|
abort(404)
|
2018-06-17 12:21:59 -05:00
|
|
|
obj = ap.parse_activity(data["activity"])
|
|
|
|
if obj.ACTIVITY_TYPE != ActivityType.CREATE:
|
2018-05-31 18:26:23 -05:00
|
|
|
abort(404)
|
|
|
|
|
|
|
|
q = {
|
2018-06-16 15:02:10 -05:00
|
|
|
"meta.deleted": False,
|
|
|
|
"type": ActivityType.CREATE.value,
|
|
|
|
"activity.object.inReplyTo": obj.get_object().id,
|
2018-05-31 18:26:23 -05:00
|
|
|
}
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
return jsonify(
|
|
|
|
**activitypub.build_ordered_collection(
|
2018-06-29 15:16:26 -05:00
|
|
|
DB.activities,
|
2018-06-16 15:02:10 -05:00
|
|
|
q=q,
|
|
|
|
cursor=request.args.get("cursor"),
|
|
|
|
map_func=lambda doc: doc["activity"]["object"],
|
|
|
|
col_name=f"outbox/{item_id}/replies",
|
|
|
|
first_page=request.args.get("page") == "first",
|
|
|
|
)
|
|
|
|
)
|
2018-05-31 18:26:23 -05:00
|
|
|
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/outbox/<item_id>/likes")
|
2018-05-31 18:26:23 -05:00
|
|
|
def outbox_activity_likes(item_id):
|
|
|
|
if not is_api_request():
|
|
|
|
abort(404)
|
2018-06-29 15:16:26 -05:00
|
|
|
data = DB.activities.find_one(
|
|
|
|
{
|
|
|
|
"box": Box.OUTBOX.value,
|
|
|
|
"remote_id": back.activity_url(item_id),
|
|
|
|
"meta.deleted": False,
|
|
|
|
}
|
2018-06-18 15:01:21 -05:00
|
|
|
)
|
2018-05-31 18:26:23 -05:00
|
|
|
if not data:
|
|
|
|
abort(404)
|
2018-06-17 12:21:59 -05:00
|
|
|
obj = ap.parse_activity(data["activity"])
|
|
|
|
if obj.ACTIVITY_TYPE != ActivityType.CREATE:
|
2018-05-31 18:26:23 -05:00
|
|
|
abort(404)
|
|
|
|
|
|
|
|
q = {
|
2018-06-16 15:02:10 -05:00
|
|
|
"meta.undo": False,
|
|
|
|
"type": ActivityType.LIKE.value,
|
|
|
|
"$or": [
|
|
|
|
{"activity.object.id": obj.get_object().id},
|
|
|
|
{"activity.object": obj.get_object().id},
|
|
|
|
],
|
2018-05-31 18:26:23 -05:00
|
|
|
}
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
return jsonify(
|
|
|
|
**activitypub.build_ordered_collection(
|
2018-06-29 15:16:26 -05:00
|
|
|
DB.activities,
|
2018-06-16 15:02:10 -05:00
|
|
|
q=q,
|
|
|
|
cursor=request.args.get("cursor"),
|
|
|
|
map_func=lambda doc: remove_context(doc["activity"]),
|
|
|
|
col_name=f"outbox/{item_id}/likes",
|
|
|
|
first_page=request.args.get("page") == "first",
|
|
|
|
)
|
|
|
|
)
|
2018-05-31 18:26:23 -05:00
|
|
|
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/outbox/<item_id>/shares")
|
2018-05-31 18:26:23 -05:00
|
|
|
def outbox_activity_shares(item_id):
|
|
|
|
if not is_api_request():
|
|
|
|
abort(404)
|
2018-06-29 15:16:26 -05:00
|
|
|
data = DB.activities.find_one(
|
|
|
|
{
|
|
|
|
"box": Box.OUTBOX.value,
|
|
|
|
"remote_id": back.activity_url(item_id),
|
|
|
|
"meta.deleted": False,
|
|
|
|
}
|
2018-06-18 15:01:21 -05:00
|
|
|
)
|
2018-05-31 18:26:23 -05:00
|
|
|
if not data:
|
|
|
|
abort(404)
|
2018-06-17 12:21:59 -05:00
|
|
|
obj = ap.parse_activity(data["activity"])
|
|
|
|
if obj.ACTIVITY_TYPE != ActivityType.CREATE:
|
2018-05-31 18:26:23 -05:00
|
|
|
abort(404)
|
|
|
|
|
|
|
|
q = {
|
2018-06-16 15:02:10 -05:00
|
|
|
"meta.undo": False,
|
|
|
|
"type": ActivityType.ANNOUNCE.value,
|
|
|
|
"$or": [
|
|
|
|
{"activity.object.id": obj.get_object().id},
|
|
|
|
{"activity.object": obj.get_object().id},
|
|
|
|
],
|
2018-05-31 18:26:23 -05:00
|
|
|
}
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
return jsonify(
|
|
|
|
**activitypub.build_ordered_collection(
|
2018-06-29 15:16:26 -05:00
|
|
|
DB.activities,
|
2018-06-16 15:02:10 -05:00
|
|
|
q=q,
|
|
|
|
cursor=request.args.get("cursor"),
|
|
|
|
map_func=lambda doc: remove_context(doc["activity"]),
|
|
|
|
col_name=f"outbox/{item_id}/shares",
|
|
|
|
first_page=request.args.get("page") == "first",
|
|
|
|
)
|
|
|
|
)
|
2018-05-31 18:26:23 -05:00
|
|
|
|
|
|
|
|
2018-07-07 06:56:00 -05:00
|
|
|
@app.route("/admin", methods=["GET"])
|
2018-05-18 13:41:41 -05:00
|
|
|
@login_required
|
2018-07-07 06:56:00 -05:00
|
|
|
def admin():
|
2018-06-29 15:16:26 -05:00
|
|
|
q = {
|
|
|
|
"meta.deleted": False,
|
|
|
|
"meta.undo": False,
|
|
|
|
"type": ActivityType.LIKE.value,
|
|
|
|
"box": Box.OUTBOX.value,
|
|
|
|
}
|
|
|
|
col_liked = DB.activities.count(q)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
return render_template(
|
2018-06-16 15:02:10 -05:00
|
|
|
"admin.html",
|
|
|
|
instances=list(DB.instances.find()),
|
2018-06-29 15:16:26 -05:00
|
|
|
inbox_size=DB.activities.count({"box": Box.INBOX.value}),
|
|
|
|
outbox_size=DB.activities.count({"box": Box.OUTBOX.value}),
|
2018-06-16 15:02:10 -05:00
|
|
|
col_liked=col_liked,
|
2018-07-07 06:56:00 -05:00
|
|
|
col_followers=DB.activities.count(
|
|
|
|
{
|
|
|
|
"box": Box.INBOX.value,
|
|
|
|
"type": ActivityType.FOLLOW.value,
|
|
|
|
"meta.undo": False,
|
|
|
|
}
|
|
|
|
),
|
|
|
|
col_following=DB.activities.count(
|
|
|
|
{
|
|
|
|
"box": Box.OUTBOX.value,
|
|
|
|
"type": ActivityType.FOLLOW.value,
|
|
|
|
"meta.undo": False,
|
|
|
|
}
|
|
|
|
),
|
2018-05-18 13:41:41 -05:00
|
|
|
)
|
2018-06-16 14:24:53 -05:00
|
|
|
|
2018-05-18 13:41:41 -05:00
|
|
|
|
2019-05-02 14:53:17 -05:00
|
|
|
@app.route("/admin/indieauth", methods=["GET"])
|
|
|
|
@login_required
|
|
|
|
def admin_indieauth():
|
|
|
|
return render_template(
|
|
|
|
"admin_indieauth.html",
|
|
|
|
indieauth_actions=DB.indieauth.find().sort("ts", -1).limit(100),
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2019-04-07 05:27:48 -05:00
|
|
|
@app.route("/admin/tasks", methods=["GET"])
|
|
|
|
@login_required
|
|
|
|
def admin_tasks():
|
|
|
|
return render_template(
|
2019-04-08 09:41:09 -05:00
|
|
|
"admin_tasks.html",
|
2019-04-08 13:56:12 -05:00
|
|
|
success=p.get_success(),
|
2019-04-08 09:41:09 -05:00
|
|
|
dead=p.get_dead(),
|
|
|
|
waiting=p.get_waiting(),
|
2019-04-08 10:24:50 -05:00
|
|
|
cron=p.get_cron(),
|
2019-04-07 05:27:48 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2018-07-20 17:04:15 -05:00
|
|
|
@app.route("/admin/lookup", methods=["GET", "POST"])
|
|
|
|
@login_required
|
|
|
|
def admin_lookup():
|
|
|
|
data = None
|
2018-07-29 13:10:15 -05:00
|
|
|
meta = None
|
2018-07-20 17:04:15 -05:00
|
|
|
if request.method == "POST":
|
|
|
|
if request.form.get("url"):
|
|
|
|
data = lookup(request.form.get("url"))
|
2018-07-29 13:10:15 -05:00
|
|
|
if data.has_type(ActivityType.ANNOUNCE):
|
|
|
|
meta = dict(
|
|
|
|
object=data.get_object().to_dict(),
|
|
|
|
object_actor=data.get_object().get_actor().to_dict(),
|
|
|
|
actor=data.get_actor().to_dict(),
|
|
|
|
)
|
2018-07-20 17:04:15 -05:00
|
|
|
|
2018-09-02 12:43:09 -05:00
|
|
|
print(data)
|
2019-04-10 15:50:36 -05:00
|
|
|
app.logger.debug(data.to_dict())
|
2018-07-29 13:10:15 -05:00
|
|
|
return render_template(
|
|
|
|
"lookup.html", data=data, meta=meta, url=request.form.get("url")
|
|
|
|
)
|
2018-07-20 17:04:15 -05:00
|
|
|
|
|
|
|
|
2018-07-14 05:29:46 -05:00
|
|
|
@app.route("/admin/thread")
|
|
|
|
@login_required
|
|
|
|
def admin_thread():
|
|
|
|
data = DB.activities.find_one(
|
2019-04-13 03:00:56 -05:00
|
|
|
{
|
|
|
|
"type": ActivityType.CREATE.value,
|
|
|
|
"activity.object.id": request.args.get("oid"),
|
|
|
|
}
|
2018-07-14 05:29:46 -05:00
|
|
|
)
|
2019-04-11 12:24:28 -05:00
|
|
|
|
2018-07-14 05:29:46 -05:00
|
|
|
if not data:
|
|
|
|
abort(404)
|
|
|
|
if data["meta"].get("deleted", False):
|
|
|
|
abort(410)
|
|
|
|
thread = _build_thread(data)
|
|
|
|
|
2018-07-26 15:58:28 -05:00
|
|
|
tpl = "note.html"
|
|
|
|
if request.args.get("debug"):
|
|
|
|
tpl = "note_debug.html"
|
|
|
|
return render_template(tpl, thread=thread, note=data)
|
2018-07-14 05:29:46 -05:00
|
|
|
|
|
|
|
|
2018-07-06 17:08:44 -05:00
|
|
|
@app.route("/admin/new", methods=["GET"])
|
2018-05-18 13:41:41 -05:00
|
|
|
@login_required
|
2018-07-06 17:08:44 -05:00
|
|
|
def admin_new():
|
2018-05-18 13:41:41 -05:00
|
|
|
reply_id = None
|
2018-06-16 15:02:10 -05:00
|
|
|
content = ""
|
2018-06-03 14:28:06 -05:00
|
|
|
thread = []
|
2018-07-31 15:42:50 -05:00
|
|
|
print(request.args)
|
2018-06-16 15:02:10 -05:00
|
|
|
if request.args.get("reply"):
|
2018-06-29 15:16:26 -05:00
|
|
|
data = DB.activities.find_one({"activity.object.id": request.args.get("reply")})
|
2018-07-31 15:42:50 -05:00
|
|
|
if data:
|
|
|
|
reply = ap.parse_activity(data["activity"])
|
|
|
|
else:
|
|
|
|
data = dict(
|
|
|
|
meta={},
|
|
|
|
activity=dict(
|
|
|
|
object=get_backend().fetch_iri(request.args.get("reply"))
|
|
|
|
),
|
|
|
|
)
|
|
|
|
reply = ap.parse_activity(data["activity"]["object"])
|
2018-06-03 14:28:06 -05:00
|
|
|
|
2018-05-18 13:41:41 -05:00
|
|
|
reply_id = reply.id
|
2018-06-17 12:21:59 -05:00
|
|
|
if reply.ACTIVITY_TYPE == ActivityType.CREATE:
|
2018-06-03 14:28:06 -05:00
|
|
|
reply_id = reply.get_object().id
|
2018-05-18 13:41:41 -05:00
|
|
|
actor = reply.get_actor()
|
|
|
|
domain = urlparse(actor.id).netloc
|
2018-06-03 14:28:06 -05:00
|
|
|
# FIXME(tsileo): if reply of reply, fetch all participants
|
2018-06-16 15:02:10 -05:00
|
|
|
content = f"@{actor.preferredUsername}@{domain} "
|
2018-06-29 15:16:26 -05:00
|
|
|
thread = _build_thread(data)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
2019-04-13 03:00:56 -05:00
|
|
|
return render_template(
|
|
|
|
"new.html",
|
|
|
|
reply=reply_id,
|
|
|
|
content=content,
|
|
|
|
thread=thread,
|
|
|
|
emojis=EMOJIS.split(" "),
|
|
|
|
)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
|
2018-07-06 17:08:44 -05:00
|
|
|
@app.route("/admin/notifications")
|
2018-05-18 13:41:41 -05:00
|
|
|
@login_required
|
2018-07-06 17:08:44 -05:00
|
|
|
def admin_notifications():
|
2019-04-08 11:09:33 -05:00
|
|
|
# Setup the cron for deleting old activities
|
2019-04-09 01:40:48 -05:00
|
|
|
|
|
|
|
# FIXME(tsileo): put back to 12h
|
|
|
|
p.push({}, "/task/cleanup", schedule="@every 1h")
|
2019-04-08 11:09:33 -05:00
|
|
|
|
|
|
|
# Trigger a cleanup if asked
|
|
|
|
if request.args.get("cleanup"):
|
|
|
|
p.push({}, "/task/cleanup")
|
|
|
|
|
2018-06-29 15:16:26 -05:00
|
|
|
# FIXME(tsileo): show unfollow (performed by the current actor) and liked???
|
|
|
|
mentions_query = {
|
|
|
|
"type": ActivityType.CREATE.value,
|
2018-06-16 15:02:10 -05:00
|
|
|
"activity.object.tag.type": "Mention",
|
|
|
|
"activity.object.tag.name": f"@{USERNAME}@{DOMAIN}",
|
|
|
|
"meta.deleted": False,
|
2018-05-18 13:41:41 -05:00
|
|
|
}
|
2018-06-29 15:16:26 -05:00
|
|
|
replies_query = {
|
|
|
|
"type": ActivityType.CREATE.value,
|
|
|
|
"activity.object.inReplyTo": {"$regex": f"^{BASE_URL}"},
|
|
|
|
}
|
|
|
|
announced_query = {
|
|
|
|
"type": ActivityType.ANNOUNCE.value,
|
|
|
|
"activity.object": {"$regex": f"^{BASE_URL}"},
|
|
|
|
}
|
|
|
|
new_followers_query = {"type": ActivityType.FOLLOW.value}
|
2018-07-01 14:32:12 -05:00
|
|
|
unfollow_query = {
|
|
|
|
"type": ActivityType.UNDO.value,
|
|
|
|
"activity.object.type": ActivityType.FOLLOW.value,
|
|
|
|
}
|
2018-07-30 11:30:47 -05:00
|
|
|
likes_query = {
|
|
|
|
"type": ActivityType.LIKE.value,
|
|
|
|
"activity.object": {"$regex": f"^{BASE_URL}"},
|
|
|
|
}
|
2018-06-29 15:16:26 -05:00
|
|
|
followed_query = {"type": ActivityType.ACCEPT.value}
|
2018-06-16 15:02:10 -05:00
|
|
|
q = {
|
2018-06-29 15:16:26 -05:00
|
|
|
"box": Box.INBOX.value,
|
2018-06-16 15:02:10 -05:00
|
|
|
"$or": [
|
2018-06-29 15:16:26 -05:00
|
|
|
mentions_query,
|
|
|
|
announced_query,
|
|
|
|
replies_query,
|
|
|
|
new_followers_query,
|
|
|
|
followed_query,
|
2018-07-01 14:32:12 -05:00
|
|
|
unfollow_query,
|
2018-07-30 11:12:27 -05:00
|
|
|
likes_query,
|
2018-06-29 15:16:26 -05:00
|
|
|
],
|
2018-06-16 15:02:10 -05:00
|
|
|
}
|
2018-07-06 16:53:33 -05:00
|
|
|
inbox_data, older_than, newer_than = paginated_query(DB.activities, q)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
2018-07-06 16:53:33 -05:00
|
|
|
return render_template(
|
|
|
|
"stream.html",
|
|
|
|
inbox_data=inbox_data,
|
|
|
|
older_than=older_than,
|
|
|
|
newer_than=newer_than,
|
|
|
|
)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
2018-05-28 12:46:23 -05:00
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/api/key")
|
2018-05-29 14:36:05 -05:00
|
|
|
@login_required
|
|
|
|
def api_user_key():
|
|
|
|
return flask_jsonify(api_key=ADMIN_API_KEY)
|
|
|
|
|
|
|
|
|
2018-06-03 07:34:04 -05:00
|
|
|
def _user_api_arg(key: str, **kwargs):
|
2018-06-01 13:29:44 -05:00
|
|
|
"""Try to get the given key from the requests, try JSON body, form data and query arg."""
|
2018-05-29 15:16:09 -05:00
|
|
|
if request.is_json:
|
2018-06-01 13:29:44 -05:00
|
|
|
oid = request.json.get(key)
|
2018-05-29 14:36:05 -05:00
|
|
|
else:
|
2018-06-01 13:29:44 -05:00
|
|
|
oid = request.args.get(key) or request.form.get(key)
|
2018-05-29 14:36:05 -05:00
|
|
|
|
|
|
|
if not oid:
|
2018-06-16 15:02:10 -05:00
|
|
|
if "default" in kwargs:
|
2018-07-14 06:19:30 -05:00
|
|
|
app.logger.info(f'{key}={kwargs.get("default")}')
|
2018-06-16 15:02:10 -05:00
|
|
|
return kwargs.get("default")
|
2018-06-03 05:50:51 -05:00
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
raise ValueError(f"missing {key}")
|
2018-06-01 13:29:44 -05:00
|
|
|
|
2018-07-14 06:19:30 -05:00
|
|
|
app.logger.info(f"{key}={oid}")
|
2018-06-01 13:29:44 -05:00
|
|
|
return oid
|
|
|
|
|
|
|
|
|
|
|
|
def _user_api_get_note(from_outbox: bool = False):
|
2018-06-16 15:02:10 -05:00
|
|
|
oid = _user_api_arg("id")
|
2018-07-14 06:19:30 -05:00
|
|
|
app.logger.info(f"fetching {oid}")
|
2019-04-14 12:17:54 -05:00
|
|
|
note = ap.parse_activity(get_backend().fetch_iri(oid))
|
2018-06-01 13:29:44 -05:00
|
|
|
if from_outbox and not note.id.startswith(ID):
|
2018-06-16 15:02:10 -05:00
|
|
|
raise NotFromOutboxError(
|
2018-06-17 12:21:59 -05:00
|
|
|
f"cannot load {note.id}, id must be owned by the server"
|
2018-06-16 15:02:10 -05:00
|
|
|
)
|
2018-05-29 14:36:05 -05:00
|
|
|
|
2018-06-01 13:29:44 -05:00
|
|
|
return note
|
2018-05-29 14:36:05 -05:00
|
|
|
|
|
|
|
|
|
|
|
def _user_api_response(**kwargs):
|
2018-06-16 15:02:10 -05:00
|
|
|
_redirect = _user_api_arg("redirect", default=None)
|
2018-06-03 04:41:18 -05:00
|
|
|
if _redirect:
|
|
|
|
return redirect(_redirect)
|
2018-05-29 14:36:05 -05:00
|
|
|
|
|
|
|
resp = flask_jsonify(**kwargs)
|
|
|
|
resp.status_code = 201
|
|
|
|
return resp
|
|
|
|
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/api/note/delete", methods=["POST"])
|
2018-05-28 15:38:48 -05:00
|
|
|
@api_required
|
|
|
|
def api_delete():
|
2018-05-29 14:36:05 -05:00
|
|
|
"""API endpoint to delete a Note activity."""
|
2018-06-01 13:29:44 -05:00
|
|
|
note = _user_api_get_note(from_outbox=True)
|
|
|
|
|
2018-07-29 10:19:06 -05:00
|
|
|
delete = ap.Delete(actor=ID, object=ap.Tombstone(id=note.id).to_dict(embed=True))
|
|
|
|
|
2019-04-05 04:35:48 -05:00
|
|
|
delete_id = post_to_outbox(delete)
|
2018-05-29 14:36:05 -05:00
|
|
|
|
2018-07-29 09:07:27 -05:00
|
|
|
return _user_api_response(activity=delete_id)
|
2018-05-29 14:36:05 -05:00
|
|
|
|
2018-05-28 15:38:48 -05:00
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/api/boost", methods=["POST"])
|
2018-05-28 12:46:23 -05:00
|
|
|
@api_required
|
|
|
|
def api_boost():
|
2018-06-01 13:29:44 -05:00
|
|
|
note = _user_api_get_note()
|
|
|
|
|
2018-06-18 15:01:21 -05:00
|
|
|
announce = note.build_announce(MY_PERSON)
|
2019-04-05 04:35:48 -05:00
|
|
|
announce_id = post_to_outbox(announce)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
2018-07-29 09:07:27 -05:00
|
|
|
return _user_api_response(activity=announce_id)
|
2018-06-01 13:29:44 -05:00
|
|
|
|
|
|
|
|
2019-04-14 12:17:54 -05:00
|
|
|
@app.route("/api/vote", methods=["POST"])
|
|
|
|
@api_required
|
|
|
|
def api_vote():
|
|
|
|
oid = _user_api_arg("id")
|
|
|
|
app.logger.info(f"fetching {oid}")
|
|
|
|
note = ap.parse_activity(get_backend().fetch_iri(oid))
|
|
|
|
choice = _user_api_arg("choice")
|
|
|
|
|
|
|
|
raw_note = dict(
|
|
|
|
attributedTo=MY_PERSON.id,
|
|
|
|
cc=[],
|
|
|
|
to=note.get_actor().id,
|
|
|
|
name=choice,
|
|
|
|
tag=[],
|
|
|
|
inReplyTo=note.id,
|
|
|
|
)
|
|
|
|
|
|
|
|
note = ap.Note(**raw_note)
|
|
|
|
create = note.build_create()
|
|
|
|
create_id = post_to_outbox(create)
|
|
|
|
|
|
|
|
return _user_api_response(activity=create_id)
|
|
|
|
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/api/like", methods=["POST"])
|
2018-05-28 12:46:23 -05:00
|
|
|
@api_required
|
|
|
|
def api_like():
|
2018-06-01 13:29:44 -05:00
|
|
|
note = _user_api_get_note()
|
|
|
|
|
2018-06-18 15:01:21 -05:00
|
|
|
like = note.build_like(MY_PERSON)
|
2019-04-05 04:35:48 -05:00
|
|
|
like_id = post_to_outbox(like)
|
2018-06-01 13:29:44 -05:00
|
|
|
|
2018-07-29 09:07:27 -05:00
|
|
|
return _user_api_response(activity=like_id)
|
2018-05-28 12:46:23 -05:00
|
|
|
|
2018-05-18 13:41:41 -05:00
|
|
|
|
2018-07-22 15:22:30 -05:00
|
|
|
@app.route("/api/note/pin", methods=["POST"])
|
|
|
|
@api_required
|
|
|
|
def api_pin():
|
|
|
|
note = _user_api_get_note(from_outbox=True)
|
|
|
|
|
|
|
|
DB.activities.update_one(
|
|
|
|
{"activity.object.id": note.id, "box": Box.OUTBOX.value},
|
|
|
|
{"$set": {"meta.pinned": True}},
|
|
|
|
)
|
|
|
|
|
|
|
|
return _user_api_response(pinned=True)
|
|
|
|
|
|
|
|
|
|
|
|
@app.route("/api/note/unpin", methods=["POST"])
|
|
|
|
@api_required
|
|
|
|
def api_unpin():
|
|
|
|
note = _user_api_get_note(from_outbox=True)
|
|
|
|
|
|
|
|
DB.activities.update_one(
|
|
|
|
{"activity.object.id": note.id, "box": Box.OUTBOX.value},
|
|
|
|
{"$set": {"meta.pinned": False}},
|
|
|
|
)
|
|
|
|
|
|
|
|
return _user_api_response(pinned=False)
|
|
|
|
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/api/undo", methods=["POST"])
|
2018-05-27 07:21:06 -05:00
|
|
|
@api_required
|
|
|
|
def api_undo():
|
2018-06-16 15:02:10 -05:00
|
|
|
oid = _user_api_arg("id")
|
2018-06-29 15:16:26 -05:00
|
|
|
doc = DB.activities.find_one(
|
|
|
|
{
|
|
|
|
"box": Box.OUTBOX.value,
|
|
|
|
"$or": [{"remote_id": back.activity_url(oid)}, {"remote_id": oid}],
|
|
|
|
}
|
2018-06-18 15:01:21 -05:00
|
|
|
)
|
2018-06-01 13:29:44 -05:00
|
|
|
if not doc:
|
2018-06-16 15:02:10 -05:00
|
|
|
raise ActivityNotFoundError(f"cannot found {oid}")
|
2018-06-01 13:29:44 -05:00
|
|
|
|
2018-06-17 12:21:59 -05:00
|
|
|
obj = ap.parse_activity(doc.get("activity"))
|
2018-06-01 13:29:44 -05:00
|
|
|
# FIXME(tsileo): detect already undo-ed and make this API call idempotent
|
|
|
|
undo = obj.build_undo()
|
2019-04-05 04:35:48 -05:00
|
|
|
undo_id = post_to_outbox(undo)
|
2018-06-01 13:29:44 -05:00
|
|
|
|
2018-07-29 09:07:27 -05:00
|
|
|
return _user_api_response(activity=undo_id)
|
2018-05-27 07:21:06 -05:00
|
|
|
|
2018-05-18 13:41:41 -05:00
|
|
|
|
2018-07-06 17:08:44 -05:00
|
|
|
@app.route("/admin/stream")
|
2018-05-18 13:41:41 -05:00
|
|
|
@login_required
|
2018-07-06 17:08:44 -05:00
|
|
|
def admin_stream():
|
2018-07-14 06:19:30 -05:00
|
|
|
q = {"meta.stream": True, "meta.deleted": False}
|
2018-05-18 13:41:41 -05:00
|
|
|
|
2018-07-23 15:11:03 -05:00
|
|
|
tpl = "stream.html"
|
|
|
|
if request.args.get("debug"):
|
|
|
|
tpl = "stream_debug.html"
|
2018-07-23 15:25:51 -05:00
|
|
|
if request.args.get("debug_inbox"):
|
|
|
|
q = {}
|
|
|
|
|
2018-07-26 15:58:28 -05:00
|
|
|
inbox_data, older_than, newer_than = paginated_query(
|
|
|
|
DB.activities, q, limit=int(request.args.get("limit", 25))
|
|
|
|
)
|
2018-07-23 15:11:03 -05:00
|
|
|
|
2018-07-06 16:53:33 -05:00
|
|
|
return render_template(
|
2018-07-23 15:11:03 -05:00
|
|
|
tpl, inbox_data=inbox_data, older_than=older_than, newer_than=newer_than
|
2018-06-16 15:02:10 -05:00
|
|
|
)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
|
2019-04-05 04:35:48 -05:00
|
|
|
@app.route("/inbox", methods=["GET", "POST"]) # noqa: C901
|
2018-06-16 14:24:53 -05:00
|
|
|
def inbox():
|
2018-06-16 15:02:10 -05:00
|
|
|
if request.method == "GET":
|
2018-06-16 14:24:53 -05:00
|
|
|
if not is_api_request():
|
|
|
|
abort(404)
|
2018-05-21 07:30:52 -05:00
|
|
|
try:
|
|
|
|
_api_required()
|
|
|
|
except BadSignature:
|
|
|
|
abort(404)
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
return jsonify(
|
|
|
|
**activitypub.build_ordered_collection(
|
2018-06-29 15:16:26 -05:00
|
|
|
DB.activities,
|
|
|
|
q={"meta.deleted": False, "box": Box.INBOX.value},
|
2018-06-16 15:02:10 -05:00
|
|
|
cursor=request.args.get("cursor"),
|
|
|
|
map_func=lambda doc: remove_context(doc["activity"]),
|
2018-07-31 16:23:20 -05:00
|
|
|
col_name="inbox",
|
2018-06-16 15:02:10 -05:00
|
|
|
)
|
|
|
|
)
|
2018-05-21 07:30:52 -05:00
|
|
|
|
2019-05-12 03:02:28 -05:00
|
|
|
try:
|
|
|
|
data = request.get_json(force=True)
|
|
|
|
except Exception:
|
|
|
|
return Response(
|
|
|
|
status=422,
|
|
|
|
headers={"Content-Type": "application/json"},
|
2019-05-12 03:06:26 -05:00
|
|
|
response=json.dumps({"error": "failed to decode request as JSON"}),
|
2019-05-12 03:02:28 -05:00
|
|
|
)
|
|
|
|
|
2019-04-17 16:36:28 -05:00
|
|
|
print(f"req_headers={request.headers}")
|
|
|
|
print(f"raw_data={data}")
|
2018-06-16 15:02:10 -05:00
|
|
|
logger.debug(f"req_headers={request.headers}")
|
|
|
|
logger.debug(f"raw_data={data}")
|
2018-06-20 16:42:12 -05:00
|
|
|
try:
|
|
|
|
if not verify_request(
|
|
|
|
request.method, request.path, request.headers, request.data
|
|
|
|
):
|
2018-06-16 15:02:10 -05:00
|
|
|
raise Exception("failed to verify request")
|
2018-06-02 02:07:57 -05:00
|
|
|
except Exception:
|
2018-06-16 15:02:10 -05:00
|
|
|
logger.exception(
|
|
|
|
"failed to verify request, trying to verify the payload by fetching the remote"
|
|
|
|
)
|
2018-05-21 07:41:47 -05:00
|
|
|
try:
|
2018-06-20 16:42:12 -05:00
|
|
|
data = get_backend().fetch_iri(data["id"])
|
2018-09-02 13:32:15 -05:00
|
|
|
except ActivityGoneError:
|
|
|
|
# XXX Mastodon sends Delete activities that are not dereferencable, it's the actor url with #delete
|
|
|
|
# appended, so an `ActivityGoneError` kind of ensure it's "legit"
|
2018-09-03 01:20:43 -05:00
|
|
|
if data["type"] == ActivityType.DELETE.value and data["id"].startswith(
|
|
|
|
data["object"]
|
|
|
|
):
|
2019-05-12 03:02:28 -05:00
|
|
|
# If we're here, this means the key is not saved, so we cannot verify the object
|
|
|
|
logger.info(f"received a Delete for an unknown actor {data!r}, drop it")
|
2018-09-02 13:32:15 -05:00
|
|
|
|
2018-09-03 01:20:43 -05:00
|
|
|
return Response(status=201)
|
2018-05-21 07:41:47 -05:00
|
|
|
except Exception:
|
2019-05-12 03:06:26 -05:00
|
|
|
logger.exception(f"failed to fetch remote for payload {data!r}")
|
2019-05-12 03:02:28 -05:00
|
|
|
|
|
|
|
# Track/store the payload for analysis
|
|
|
|
ip, geoip = _get_ip()
|
|
|
|
|
2019-05-12 03:06:26 -05:00
|
|
|
DB.trash.insert(
|
|
|
|
{
|
|
|
|
"activity": data,
|
|
|
|
"meta": {
|
|
|
|
"ts": datetime.now().timestamp(),
|
|
|
|
"ip_address": ip,
|
|
|
|
"geoip": geoip,
|
|
|
|
"tb": traceback.format_exc(),
|
|
|
|
"headers": dict(request.headers),
|
|
|
|
},
|
|
|
|
}
|
|
|
|
)
|
2019-05-12 03:02:28 -05:00
|
|
|
|
2018-06-02 02:07:57 -05:00
|
|
|
return Response(
|
|
|
|
status=422,
|
2018-06-16 15:02:10 -05:00
|
|
|
headers={"Content-Type": "application/json"},
|
|
|
|
response=json.dumps(
|
|
|
|
{
|
|
|
|
"error": "failed to verify request (using HTTP signatures or fetching the IRI)"
|
|
|
|
}
|
|
|
|
),
|
2018-06-02 02:07:57 -05:00
|
|
|
)
|
2019-04-14 13:16:04 -05:00
|
|
|
print(data)
|
2018-06-17 12:21:59 -05:00
|
|
|
activity = ap.parse_activity(data)
|
2018-06-16 15:02:10 -05:00
|
|
|
logger.debug(f"inbox activity={activity}/{data}")
|
2019-04-05 04:35:48 -05:00
|
|
|
post_to_inbox(activity)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
return Response(status=201)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
|
2018-06-18 15:01:21 -05:00
|
|
|
def without_id(l):
|
|
|
|
out = []
|
|
|
|
for d in l:
|
|
|
|
if "_id" in d:
|
|
|
|
del d["_id"]
|
|
|
|
out.append(d)
|
|
|
|
return out
|
|
|
|
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/api/debug", methods=["GET", "DELETE"])
|
2018-05-27 04:50:09 -05:00
|
|
|
@api_required
|
|
|
|
def api_debug():
|
|
|
|
"""Endpoint used/needed for testing, only works in DEBUG_MODE."""
|
|
|
|
if not DEBUG_MODE:
|
2018-06-16 15:02:10 -05:00
|
|
|
return flask_jsonify(message="DEBUG_MODE is off")
|
2018-05-27 04:50:09 -05:00
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
if request.method == "DELETE":
|
2018-05-27 04:50:09 -05:00
|
|
|
_drop_db()
|
2018-06-16 15:02:10 -05:00
|
|
|
return flask_jsonify(message="DB dropped")
|
2018-05-27 04:50:09 -05:00
|
|
|
|
2018-06-18 15:01:21 -05:00
|
|
|
return flask_jsonify(
|
2018-06-29 15:16:26 -05:00
|
|
|
inbox=DB.activities.count({"box": Box.INBOX.value}),
|
|
|
|
outbox=DB.activities.count({"box": Box.OUTBOX.value}),
|
|
|
|
outbox_data=without_id(DB.activities.find({"box": Box.OUTBOX.value})),
|
2018-06-18 15:01:21 -05:00
|
|
|
)
|
2018-05-27 04:50:09 -05:00
|
|
|
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/api/new_note", methods=["POST"])
|
2018-06-16 14:24:53 -05:00
|
|
|
@api_required
|
|
|
|
def api_new_note():
|
2018-06-16 15:02:10 -05:00
|
|
|
source = _user_api_arg("content")
|
2018-05-26 02:50:59 -05:00
|
|
|
if not source:
|
2018-06-16 15:02:10 -05:00
|
|
|
raise ValueError("missing content")
|
2018-06-16 14:24:53 -05:00
|
|
|
|
2018-06-01 13:29:44 -05:00
|
|
|
_reply, reply = None, None
|
|
|
|
try:
|
2018-06-16 15:02:10 -05:00
|
|
|
_reply = _user_api_arg("reply")
|
2018-06-01 13:29:44 -05:00
|
|
|
except ValueError:
|
|
|
|
pass
|
|
|
|
|
2018-06-16 14:24:53 -05:00
|
|
|
content, tags = parse_markdown(source)
|
2018-06-16 15:02:10 -05:00
|
|
|
to = request.args.get("to")
|
|
|
|
cc = [ID + "/followers"]
|
2018-06-16 14:24:53 -05:00
|
|
|
|
2018-06-01 13:29:44 -05:00
|
|
|
if _reply:
|
2018-07-11 12:38:24 -05:00
|
|
|
reply = ap.fetch_remote_activity(_reply)
|
2018-05-28 17:12:44 -05:00
|
|
|
cc.append(reply.attributedTo)
|
2018-06-01 13:29:44 -05:00
|
|
|
|
2018-05-18 13:41:41 -05:00
|
|
|
for tag in tags:
|
2018-06-16 15:02:10 -05:00
|
|
|
if tag["type"] == "Mention":
|
|
|
|
cc.append(tag["href"])
|
2018-05-18 13:41:41 -05:00
|
|
|
|
2018-07-01 16:46:58 -05:00
|
|
|
raw_note = dict(
|
2018-06-17 14:54:16 -05:00
|
|
|
attributedTo=MY_PERSON.id,
|
2018-06-16 14:24:53 -05:00
|
|
|
cc=list(set(cc)),
|
2018-06-17 12:21:59 -05:00
|
|
|
to=[to if to else ap.AS_PUBLIC],
|
2018-06-01 13:29:44 -05:00
|
|
|
content=content,
|
2018-05-18 13:41:41 -05:00
|
|
|
tag=tags,
|
2018-06-16 15:02:10 -05:00
|
|
|
source={"mediaType": "text/markdown", "content": source},
|
|
|
|
inReplyTo=reply.id if reply else None,
|
2018-05-18 13:41:41 -05:00
|
|
|
)
|
2018-07-01 16:46:58 -05:00
|
|
|
|
2019-04-07 04:00:24 -05:00
|
|
|
if "file" in request.files and request.files["file"].filename:
|
2018-07-01 16:46:58 -05:00
|
|
|
file = request.files["file"]
|
|
|
|
rfilename = secure_filename(file.filename)
|
2018-07-06 16:15:49 -05:00
|
|
|
with BytesIO() as buf:
|
|
|
|
file.save(buf)
|
|
|
|
oid = MEDIA_CACHE.save_upload(buf, rfilename)
|
2018-07-01 16:46:58 -05:00
|
|
|
mtype = mimetypes.guess_type(rfilename)[0]
|
|
|
|
|
2018-07-03 16:29:55 -05:00
|
|
|
raw_note["attachment"] = [
|
2018-07-01 16:46:58 -05:00
|
|
|
{
|
|
|
|
"mediaType": mtype,
|
|
|
|
"name": rfilename,
|
|
|
|
"type": "Document",
|
2018-07-06 16:15:49 -05:00
|
|
|
"url": f"{BASE_URL}/uploads/{oid}/{rfilename}",
|
2018-07-01 16:46:58 -05:00
|
|
|
}
|
|
|
|
]
|
|
|
|
|
|
|
|
note = ap.Note(**raw_note)
|
2018-05-18 13:41:41 -05:00
|
|
|
create = note.build_create()
|
2019-04-05 04:35:48 -05:00
|
|
|
create_id = post_to_outbox(create)
|
2018-05-28 17:12:44 -05:00
|
|
|
|
2018-07-29 09:07:27 -05:00
|
|
|
return _user_api_response(activity=create_id)
|
2018-06-01 13:29:44 -05:00
|
|
|
|
2018-05-18 13:41:41 -05:00
|
|
|
|
2019-04-14 12:17:54 -05:00
|
|
|
@app.route("/api/new_question", methods=["POST"])
|
|
|
|
@api_required
|
|
|
|
def api_new_question():
|
|
|
|
source = _user_api_arg("content")
|
|
|
|
if not source:
|
|
|
|
raise ValueError("missing content")
|
|
|
|
|
|
|
|
content, tags = parse_markdown(source)
|
|
|
|
cc = [ID + "/followers"]
|
|
|
|
|
|
|
|
for tag in tags:
|
|
|
|
if tag["type"] == "Mention":
|
|
|
|
cc.append(tag["href"])
|
|
|
|
|
|
|
|
answers = []
|
|
|
|
for i in range(4):
|
|
|
|
a = _user_api_arg(f"answer{i}", default=None)
|
|
|
|
if not a:
|
|
|
|
break
|
|
|
|
answers.append({"type": ActivityType.NOTE.value, "name": a})
|
|
|
|
|
|
|
|
choices = {
|
|
|
|
"endTime": ap.format_datetime(
|
|
|
|
datetime.now().astimezone()
|
|
|
|
+ timedelta(minutes=int(_user_api_arg("open_for")))
|
|
|
|
)
|
|
|
|
}
|
|
|
|
of = _user_api_arg("of")
|
|
|
|
if of == "anyOf":
|
|
|
|
choices["anyOf"] = answers
|
|
|
|
else:
|
|
|
|
choices["oneOf"] = answers
|
|
|
|
|
|
|
|
raw_question = dict(
|
|
|
|
attributedTo=MY_PERSON.id,
|
|
|
|
cc=list(set(cc)),
|
|
|
|
to=[ap.AS_PUBLIC],
|
|
|
|
content=content,
|
|
|
|
tag=tags,
|
|
|
|
source={"mediaType": "text/markdown", "content": source},
|
|
|
|
inReplyTo=None,
|
|
|
|
**choices,
|
|
|
|
)
|
|
|
|
|
|
|
|
question = ap.Question(**raw_question)
|
|
|
|
create = question.build_create()
|
|
|
|
create_id = post_to_outbox(create)
|
|
|
|
|
|
|
|
return _user_api_response(activity=create_id)
|
|
|
|
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/api/stream")
|
2018-05-27 15:30:43 -05:00
|
|
|
@api_required
|
2018-05-18 13:41:41 -05:00
|
|
|
def api_stream():
|
|
|
|
return Response(
|
2018-06-16 15:02:10 -05:00
|
|
|
response=json.dumps(
|
|
|
|
activitypub.build_inbox_json_feed("/api/stream", request.args.get("cursor"))
|
|
|
|
),
|
|
|
|
headers={"Content-Type": "application/json"},
|
2018-05-18 13:41:41 -05:00
|
|
|
)
|
|
|
|
|
2018-05-28 17:12:44 -05:00
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/api/block", methods=["POST"])
|
2018-05-28 17:12:44 -05:00
|
|
|
@api_required
|
|
|
|
def api_block():
|
2018-06-16 15:02:10 -05:00
|
|
|
actor = _user_api_arg("actor")
|
2018-06-01 13:29:44 -05:00
|
|
|
|
2018-06-29 15:16:26 -05:00
|
|
|
existing = DB.activities.find_one(
|
|
|
|
{
|
|
|
|
"box": Box.OUTBOX.value,
|
|
|
|
"type": ActivityType.BLOCK.value,
|
|
|
|
"activity.object": actor,
|
|
|
|
"meta.undo": False,
|
|
|
|
}
|
2018-06-16 15:02:10 -05:00
|
|
|
)
|
2018-06-01 13:29:44 -05:00
|
|
|
if existing:
|
2018-06-16 15:02:10 -05:00
|
|
|
return _user_api_response(activity=existing["activity"]["id"])
|
2018-05-28 17:12:44 -05:00
|
|
|
|
2018-06-17 14:54:16 -05:00
|
|
|
block = ap.Block(actor=MY_PERSON.id, object=actor)
|
2019-04-05 04:35:48 -05:00
|
|
|
block_id = post_to_outbox(block)
|
2018-06-01 13:29:44 -05:00
|
|
|
|
2018-07-29 09:07:27 -05:00
|
|
|
return _user_api_response(activity=block_id)
|
2018-05-28 17:12:44 -05:00
|
|
|
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/api/follow", methods=["POST"])
|
2018-05-18 13:41:41 -05:00
|
|
|
@api_required
|
|
|
|
def api_follow():
|
2018-06-16 15:02:10 -05:00
|
|
|
actor = _user_api_arg("actor")
|
2018-06-01 13:29:44 -05:00
|
|
|
|
2018-07-07 06:56:00 -05:00
|
|
|
q = {
|
|
|
|
"box": Box.OUTBOX.value,
|
|
|
|
"type": ActivityType.FOLLOW.value,
|
|
|
|
"meta.undo": False,
|
|
|
|
"activity.object": actor,
|
|
|
|
}
|
|
|
|
|
|
|
|
existing = DB.activities.find_one(q)
|
2018-06-01 13:29:44 -05:00
|
|
|
if existing:
|
2018-06-16 15:02:10 -05:00
|
|
|
return _user_api_response(activity=existing["activity"]["id"])
|
2018-05-18 13:41:41 -05:00
|
|
|
|
2018-06-17 14:54:16 -05:00
|
|
|
follow = ap.Follow(actor=MY_PERSON.id, object=actor)
|
2019-04-05 04:35:48 -05:00
|
|
|
follow_id = post_to_outbox(follow)
|
2018-06-01 13:29:44 -05:00
|
|
|
|
2018-07-29 09:07:27 -05:00
|
|
|
return _user_api_response(activity=follow_id)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/followers")
|
2018-06-16 14:24:53 -05:00
|
|
|
def followers():
|
2018-07-07 06:56:00 -05:00
|
|
|
q = {"box": Box.INBOX.value, "type": ActivityType.FOLLOW.value, "meta.undo": False}
|
|
|
|
|
2018-06-16 14:24:53 -05:00
|
|
|
if is_api_request():
|
2018-05-18 13:41:41 -05:00
|
|
|
return jsonify(
|
|
|
|
**activitypub.build_ordered_collection(
|
2018-07-07 06:56:00 -05:00
|
|
|
DB.activities,
|
|
|
|
q=q,
|
2018-06-16 15:02:10 -05:00
|
|
|
cursor=request.args.get("cursor"),
|
2018-07-07 07:07:29 -05:00
|
|
|
map_func=lambda doc: doc["activity"]["actor"],
|
2018-07-31 16:23:20 -05:00
|
|
|
col_name="followers",
|
2018-05-18 13:41:41 -05:00
|
|
|
)
|
2018-06-16 14:24:53 -05:00
|
|
|
)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
2018-07-18 16:18:39 -05:00
|
|
|
raw_followers, older_than, newer_than = paginated_query(DB.activities, q)
|
2019-04-05 04:35:48 -05:00
|
|
|
followers = [
|
|
|
|
doc["meta"]["actor"] for doc in raw_followers if "actor" in doc.get("meta", {})
|
|
|
|
]
|
2018-07-07 06:56:00 -05:00
|
|
|
return render_template(
|
|
|
|
"followers.html",
|
|
|
|
followers_data=followers,
|
|
|
|
older_than=older_than,
|
|
|
|
newer_than=newer_than,
|
|
|
|
)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/following")
|
2018-05-18 13:41:41 -05:00
|
|
|
def following():
|
2018-07-07 06:56:00 -05:00
|
|
|
q = {"box": Box.OUTBOX.value, "type": ActivityType.FOLLOW.value, "meta.undo": False}
|
|
|
|
|
2018-05-18 13:41:41 -05:00
|
|
|
if is_api_request():
|
|
|
|
return jsonify(
|
|
|
|
**activitypub.build_ordered_collection(
|
2018-07-07 06:56:00 -05:00
|
|
|
DB.activities,
|
|
|
|
q=q,
|
2018-06-16 15:02:10 -05:00
|
|
|
cursor=request.args.get("cursor"),
|
2018-07-07 06:56:00 -05:00
|
|
|
map_func=lambda doc: doc["activity"]["object"],
|
2018-07-31 16:23:20 -05:00
|
|
|
col_name="following",
|
2018-06-16 15:02:10 -05:00
|
|
|
)
|
2018-05-18 13:41:41 -05:00
|
|
|
)
|
2018-06-16 14:24:53 -05:00
|
|
|
|
2018-07-18 16:18:39 -05:00
|
|
|
if config.HIDE_FOLLOWING and not session.get("logged_in", False):
|
2018-07-17 16:42:21 -05:00
|
|
|
abort(404)
|
|
|
|
|
2018-07-07 06:56:00 -05:00
|
|
|
following, older_than, newer_than = paginated_query(DB.activities, q)
|
2019-04-05 04:35:48 -05:00
|
|
|
following = [
|
|
|
|
(doc["remote_id"], doc["meta"]["object"])
|
|
|
|
for doc in following
|
|
|
|
if "remote_id" in doc and "object" in doc.get("meta", {})
|
|
|
|
]
|
2018-07-07 06:56:00 -05:00
|
|
|
return render_template(
|
|
|
|
"following.html",
|
|
|
|
following_data=following,
|
|
|
|
older_than=older_than,
|
|
|
|
newer_than=newer_than,
|
|
|
|
)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/tags/<tag>")
|
2018-05-18 13:41:41 -05:00
|
|
|
def tags(tag):
|
2018-06-29 15:16:26 -05:00
|
|
|
if not DB.activities.count(
|
|
|
|
{
|
|
|
|
"box": Box.OUTBOX.value,
|
|
|
|
"activity.object.tag.type": "Hashtag",
|
|
|
|
"activity.object.tag.name": "#" + tag,
|
|
|
|
}
|
2018-06-16 15:02:10 -05:00
|
|
|
):
|
2018-05-18 13:41:41 -05:00
|
|
|
abort(404)
|
|
|
|
if not is_api_request():
|
|
|
|
return render_template(
|
2018-06-16 15:02:10 -05:00
|
|
|
"tags.html",
|
2018-05-18 13:41:41 -05:00
|
|
|
tag=tag,
|
2018-06-29 15:16:26 -05:00
|
|
|
outbox_data=DB.activities.find(
|
2018-06-16 15:02:10 -05:00
|
|
|
{
|
2018-06-29 15:16:26 -05:00
|
|
|
"box": Box.OUTBOX.value,
|
|
|
|
"type": ActivityType.CREATE.value,
|
2018-06-16 15:02:10 -05:00
|
|
|
"meta.deleted": False,
|
|
|
|
"activity.object.tag.type": "Hashtag",
|
|
|
|
"activity.object.tag.name": "#" + tag,
|
|
|
|
}
|
|
|
|
),
|
2018-05-18 13:41:41 -05:00
|
|
|
)
|
|
|
|
q = {
|
2018-06-29 15:16:26 -05:00
|
|
|
"box": Box.OUTBOX.value,
|
2018-06-16 15:02:10 -05:00
|
|
|
"meta.deleted": False,
|
|
|
|
"meta.undo": False,
|
|
|
|
"type": ActivityType.CREATE.value,
|
|
|
|
"activity.object.tag.type": "Hashtag",
|
|
|
|
"activity.object.tag.name": "#" + tag,
|
2018-05-18 13:41:41 -05:00
|
|
|
}
|
2018-06-16 15:02:10 -05:00
|
|
|
return jsonify(
|
|
|
|
**activitypub.build_ordered_collection(
|
2018-06-29 15:16:26 -05:00
|
|
|
DB.activities,
|
2018-06-16 15:02:10 -05:00
|
|
|
q=q,
|
|
|
|
cursor=request.args.get("cursor"),
|
|
|
|
map_func=lambda doc: doc["activity"]["object"]["id"],
|
|
|
|
col_name=f"tags/{tag}",
|
|
|
|
)
|
|
|
|
)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
|
2018-07-22 14:54:24 -05:00
|
|
|
@app.route("/featured")
|
|
|
|
def featured():
|
|
|
|
if not is_api_request():
|
|
|
|
abort(404)
|
2018-07-22 15:22:30 -05:00
|
|
|
q = {
|
|
|
|
"box": Box.OUTBOX.value,
|
|
|
|
"type": ActivityType.CREATE.value,
|
|
|
|
"meta.deleted": False,
|
|
|
|
"meta.undo": False,
|
|
|
|
"meta.pinned": True,
|
|
|
|
}
|
|
|
|
data = [clean_activity(doc["activity"]["object"]) for doc in DB.activities.find(q)]
|
|
|
|
return jsonify(**activitypub.simple_build_ordered_collection("featured", data))
|
2018-07-22 14:54:24 -05:00
|
|
|
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/liked")
|
2018-05-18 13:41:41 -05:00
|
|
|
def liked():
|
|
|
|
if not is_api_request():
|
2018-07-06 16:53:33 -05:00
|
|
|
q = {
|
|
|
|
"box": Box.OUTBOX.value,
|
|
|
|
"type": ActivityType.LIKE.value,
|
|
|
|
"meta.deleted": False,
|
|
|
|
"meta.undo": False,
|
|
|
|
}
|
|
|
|
|
|
|
|
liked, older_than, newer_than = paginated_query(DB.activities, q)
|
|
|
|
|
2018-06-25 16:45:43 -05:00
|
|
|
return render_template(
|
2018-07-06 16:53:33 -05:00
|
|
|
"liked.html", liked=liked, older_than=older_than, newer_than=newer_than
|
2018-06-25 16:45:43 -05:00
|
|
|
)
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
q = {"meta.deleted": False, "meta.undo": False, "type": ActivityType.LIKE.value}
|
|
|
|
return jsonify(
|
|
|
|
**activitypub.build_ordered_collection(
|
2018-06-29 15:16:26 -05:00
|
|
|
DB.activities,
|
2018-06-16 15:02:10 -05:00
|
|
|
q=q,
|
|
|
|
cursor=request.args.get("cursor"),
|
|
|
|
map_func=lambda doc: doc["activity"]["object"],
|
|
|
|
col_name="liked",
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
#######
|
|
|
|
# IndieAuth
|
|
|
|
|
|
|
|
|
|
|
|
def build_auth_resp(payload):
|
2018-06-16 15:02:10 -05:00
|
|
|
if request.headers.get("Accept") == "application/json":
|
2018-05-18 13:41:41 -05:00
|
|
|
return Response(
|
|
|
|
status=200,
|
2018-06-16 15:02:10 -05:00
|
|
|
headers={"Content-Type": "application/json"},
|
2018-05-18 13:41:41 -05:00
|
|
|
response=json.dumps(payload),
|
|
|
|
)
|
|
|
|
return Response(
|
|
|
|
status=200,
|
2018-06-16 15:02:10 -05:00
|
|
|
headers={"Content-Type": "application/x-www-form-urlencoded"},
|
2018-05-18 13:41:41 -05:00
|
|
|
response=urlencode(payload),
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def _get_prop(props, name, default=None):
|
|
|
|
if name in props:
|
|
|
|
items = props.get(name)
|
|
|
|
if isinstance(items, list):
|
|
|
|
return items[0]
|
|
|
|
return items
|
|
|
|
return default
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
|
2018-05-18 13:41:41 -05:00
|
|
|
def get_client_id_data(url):
|
2019-06-29 04:33:29 -05:00
|
|
|
# FIXME(tsileo): ensure not localhost via `little_boxes.urlutils.is_url_valid`
|
2018-05-18 13:41:41 -05:00
|
|
|
data = mf2py.parse(url=url)
|
2018-06-16 15:02:10 -05:00
|
|
|
for item in data["items"]:
|
|
|
|
if "h-x-app" in item["type"] or "h-app" in item["type"]:
|
|
|
|
props = item.get("properties", {})
|
2018-05-18 13:41:41 -05:00
|
|
|
print(props)
|
|
|
|
return dict(
|
2018-06-16 15:02:10 -05:00
|
|
|
logo=_get_prop(props, "logo"),
|
|
|
|
name=_get_prop(props, "name"),
|
|
|
|
url=_get_prop(props, "url"),
|
2018-05-18 13:41:41 -05:00
|
|
|
)
|
2018-06-16 15:02:10 -05:00
|
|
|
return dict(logo=None, name=url, url=url)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/indieauth/flow", methods=["POST"])
|
2018-06-16 14:24:53 -05:00
|
|
|
@login_required
|
|
|
|
def indieauth_flow():
|
|
|
|
auth = dict(
|
2018-06-16 15:02:10 -05:00
|
|
|
scope=" ".join(request.form.getlist("scopes")),
|
|
|
|
me=request.form.get("me"),
|
|
|
|
client_id=request.form.get("client_id"),
|
|
|
|
state=request.form.get("state"),
|
|
|
|
redirect_uri=request.form.get("redirect_uri"),
|
|
|
|
response_type=request.form.get("response_type"),
|
2019-05-01 17:49:45 -05:00
|
|
|
ts=datetime.now().timestamp(),
|
|
|
|
code=binascii.hexlify(os.urandom(8)).decode("utf-8"),
|
|
|
|
verified=False,
|
2018-05-18 13:41:41 -05:00
|
|
|
)
|
|
|
|
|
2019-05-01 17:49:45 -05:00
|
|
|
# XXX(tsileo): a whitelist for me values?
|
|
|
|
|
|
|
|
# TODO(tsileo): redirect_uri checks
|
2018-06-16 15:02:10 -05:00
|
|
|
if not auth["redirect_uri"]:
|
2019-05-01 17:49:45 -05:00
|
|
|
abort(400)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
DB.indieauth.insert_one(auth)
|
|
|
|
|
|
|
|
# FIXME(tsileo): fetch client ID and validate redirect_uri
|
2019-05-01 17:49:45 -05:00
|
|
|
red = f'{auth["redirect_uri"]}?code={auth["code"]}&state={auth["state"]}&me={auth["me"]}'
|
2018-05-18 13:41:41 -05:00
|
|
|
return redirect(red)
|
|
|
|
|
|
|
|
|
2019-04-05 04:35:48 -05:00
|
|
|
@app.route("/indieauth", methods=["GET", "POST"])
|
2018-06-16 14:24:53 -05:00
|
|
|
def indieauth_endpoint():
|
2018-06-16 15:02:10 -05:00
|
|
|
if request.method == "GET":
|
|
|
|
if not session.get("logged_in"):
|
2018-07-22 05:17:55 -05:00
|
|
|
return redirect(url_for("admin_login", next=request.url))
|
2018-05-18 13:41:41 -05:00
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
me = request.args.get("me")
|
2018-06-16 14:24:53 -05:00
|
|
|
# FIXME(tsileo): ensure me == ID
|
2018-06-16 15:02:10 -05:00
|
|
|
client_id = request.args.get("client_id")
|
|
|
|
redirect_uri = request.args.get("redirect_uri")
|
|
|
|
state = request.args.get("state", "")
|
|
|
|
response_type = request.args.get("response_type", "id")
|
|
|
|
scope = request.args.get("scope", "").split()
|
2018-05-18 13:41:41 -05:00
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
print("STATE", state)
|
2018-05-18 13:41:41 -05:00
|
|
|
return render_template(
|
2018-06-16 15:02:10 -05:00
|
|
|
"indieauth_flow.html",
|
2018-05-18 13:41:41 -05:00
|
|
|
client=get_client_id_data(client_id),
|
|
|
|
scopes=scope,
|
|
|
|
redirect_uri=redirect_uri,
|
|
|
|
state=state,
|
|
|
|
response_type=response_type,
|
|
|
|
client_id=client_id,
|
|
|
|
me=me,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Auth verification via POST
|
2018-06-16 15:02:10 -05:00
|
|
|
code = request.form.get("code")
|
|
|
|
redirect_uri = request.form.get("redirect_uri")
|
|
|
|
client_id = request.form.get("client_id")
|
2018-05-18 13:41:41 -05:00
|
|
|
|
2019-05-08 06:19:31 -05:00
|
|
|
ip, geoip = _get_ip()
|
|
|
|
|
2018-05-18 13:41:41 -05:00
|
|
|
auth = DB.indieauth.find_one_and_update(
|
2018-06-16 15:02:10 -05:00
|
|
|
{
|
|
|
|
"code": code,
|
|
|
|
"redirect_uri": redirect_uri,
|
|
|
|
"client_id": client_id,
|
2019-05-01 17:49:45 -05:00
|
|
|
"verified": False,
|
|
|
|
},
|
2019-05-01 18:19:01 -05:00
|
|
|
{
|
|
|
|
"$set": {
|
|
|
|
"verified": True,
|
2019-05-02 14:53:17 -05:00
|
|
|
"verified_by": "id",
|
2019-05-01 18:19:01 -05:00
|
|
|
"verified_at": datetime.now().timestamp(),
|
2019-05-08 06:19:31 -05:00
|
|
|
"ip_address": ip,
|
|
|
|
"geoip": geoip,
|
2019-05-01 18:19:01 -05:00
|
|
|
}
|
|
|
|
},
|
2018-05-18 13:41:41 -05:00
|
|
|
)
|
|
|
|
print(auth)
|
|
|
|
print(code, redirect_uri, client_id)
|
|
|
|
|
2019-05-01 17:49:45 -05:00
|
|
|
# Ensure the code is recent
|
2019-05-01 17:59:13 -05:00
|
|
|
if (datetime.now() - datetime.fromtimestamp(auth["ts"])) > timedelta(minutes=5):
|
2019-05-01 17:49:45 -05:00
|
|
|
abort(400)
|
|
|
|
|
2018-05-18 13:41:41 -05:00
|
|
|
if not auth:
|
|
|
|
abort(403)
|
|
|
|
return
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
session["logged_in"] = True
|
|
|
|
me = auth["me"]
|
|
|
|
state = auth["state"]
|
2019-05-01 17:49:45 -05:00
|
|
|
scope = auth["scope"]
|
2018-06-16 15:02:10 -05:00
|
|
|
print("STATE", state)
|
|
|
|
return build_auth_resp({"me": me, "state": state, "scope": scope})
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/token", methods=["GET", "POST"])
|
2018-05-18 13:41:41 -05:00
|
|
|
def token_endpoint():
|
2019-05-01 17:49:45 -05:00
|
|
|
# Generate a new token with the returned access code
|
2018-06-16 15:02:10 -05:00
|
|
|
if request.method == "POST":
|
|
|
|
code = request.form.get("code")
|
|
|
|
me = request.form.get("me")
|
|
|
|
redirect_uri = request.form.get("redirect_uri")
|
|
|
|
client_id = request.form.get("client_id")
|
2018-05-18 13:41:41 -05:00
|
|
|
|
2019-05-01 18:19:01 -05:00
|
|
|
now = datetime.now()
|
2019-05-08 06:19:31 -05:00
|
|
|
ip, geoip = _get_ip()
|
2019-05-01 18:19:01 -05:00
|
|
|
|
|
|
|
# This query ensure code, client_id, redirect_uri and me are matching with the code request
|
2019-05-01 17:49:45 -05:00
|
|
|
auth = DB.indieauth.find_one_and_update(
|
2018-06-16 15:02:10 -05:00
|
|
|
{
|
|
|
|
"code": code,
|
|
|
|
"me": me,
|
|
|
|
"redirect_uri": redirect_uri,
|
|
|
|
"client_id": client_id,
|
2019-05-01 17:49:45 -05:00
|
|
|
"verified": False,
|
|
|
|
},
|
2019-05-01 18:19:01 -05:00
|
|
|
{
|
|
|
|
"$set": {
|
|
|
|
"verified": True,
|
2019-05-02 14:53:17 -05:00
|
|
|
"verified_by": "code",
|
2019-05-01 18:19:01 -05:00
|
|
|
"verified_at": now.timestamp(),
|
2019-05-08 06:19:31 -05:00
|
|
|
"ip_address": ip,
|
|
|
|
"geoip": geoip,
|
2019-05-01 18:19:01 -05:00
|
|
|
}
|
|
|
|
},
|
2018-06-16 15:02:10 -05:00
|
|
|
)
|
2019-05-01 18:19:01 -05:00
|
|
|
|
2018-05-18 13:41:41 -05:00
|
|
|
if not auth:
|
|
|
|
abort(403)
|
2019-05-01 17:49:45 -05:00
|
|
|
|
2019-05-01 18:19:01 -05:00
|
|
|
scope = auth["scope"].split()
|
|
|
|
|
|
|
|
# Ensure there's at least one scope
|
|
|
|
if not len(scope):
|
|
|
|
abort(400)
|
|
|
|
|
2019-05-01 17:49:45 -05:00
|
|
|
# Ensure the code is recent
|
2019-05-01 17:59:13 -05:00
|
|
|
if (now - datetime.fromtimestamp(auth["ts"])) > timedelta(minutes=5):
|
2019-05-01 17:49:45 -05:00
|
|
|
abort(400)
|
|
|
|
|
|
|
|
payload = dict(me=me, client_id=client_id, scope=scope, ts=now.timestamp())
|
2018-06-16 15:02:10 -05:00
|
|
|
token = JWT.dumps(payload).decode("utf-8")
|
2019-05-01 17:49:45 -05:00
|
|
|
DB.indieauth.update_one(
|
|
|
|
{"_id": auth["_id"]},
|
|
|
|
{
|
|
|
|
"$set": {
|
|
|
|
"token": token,
|
|
|
|
"token_expires": (now + timedelta(minutes=30)).timestamp(),
|
|
|
|
}
|
|
|
|
},
|
|
|
|
)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
2019-05-01 17:49:45 -05:00
|
|
|
return build_auth_resp(
|
|
|
|
{"me": me, "scope": auth["scope"], "access_token": token}
|
|
|
|
)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
# Token verification
|
2018-06-16 15:02:10 -05:00
|
|
|
token = request.headers.get("Authorization").replace("Bearer ", "")
|
2018-05-18 13:41:41 -05:00
|
|
|
try:
|
|
|
|
payload = JWT.loads(token)
|
|
|
|
except BadSignature:
|
|
|
|
abort(403)
|
|
|
|
|
2019-05-01 17:49:45 -05:00
|
|
|
# Check the token expritation (valid for 3 hours)
|
2019-05-01 18:19:01 -05:00
|
|
|
if (datetime.now() - datetime.fromtimestamp(payload["ts"])) > timedelta(
|
|
|
|
minutes=180
|
|
|
|
):
|
2019-05-01 17:49:45 -05:00
|
|
|
abort(401)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
return build_auth_resp(
|
|
|
|
{
|
|
|
|
"me": payload["me"],
|
2019-05-01 17:49:45 -05:00
|
|
|
"scope": " ".join(payload["scope"]),
|
2018-06-16 15:02:10 -05:00
|
|
|
"client_id": payload["client_id"],
|
|
|
|
}
|
|
|
|
)
|
2019-02-24 14:04:09 -06:00
|
|
|
|
|
|
|
|
2019-04-05 14:36:56 -05:00
|
|
|
#################
|
|
|
|
# Feeds
|
|
|
|
|
2019-04-07 14:24:52 -05:00
|
|
|
|
2019-02-24 14:04:09 -06:00
|
|
|
@app.route("/feed.json")
|
|
|
|
def json_feed():
|
|
|
|
return Response(
|
2019-04-05 04:35:48 -05:00
|
|
|
response=json.dumps(activitypub.json_feed("/feed.json")),
|
2019-02-24 14:04:09 -06:00
|
|
|
headers={"Content-Type": "application/json"},
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
@app.route("/feed.atom")
|
|
|
|
def atom_feed():
|
|
|
|
return Response(
|
|
|
|
response=activitypub.gen_feed().atom_str(),
|
|
|
|
headers={"Content-Type": "application/atom+xml"},
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
@app.route("/feed.rss")
|
|
|
|
def rss_feed():
|
|
|
|
return Response(
|
|
|
|
response=activitypub.gen_feed().rss_str(),
|
|
|
|
headers={"Content-Type": "application/rss+xml"},
|
|
|
|
)
|
2019-04-05 04:35:48 -05:00
|
|
|
|
|
|
|
|
2019-04-05 14:36:56 -05:00
|
|
|
###########
|
|
|
|
# Tasks
|
2019-04-05 04:35:48 -05:00
|
|
|
|
2019-04-07 14:24:52 -05:00
|
|
|
|
2019-04-05 14:36:56 -05:00
|
|
|
class Tasks:
|
|
|
|
@staticmethod
|
|
|
|
def cache_object(iri: str) -> None:
|
|
|
|
p.push(iri, "/task/cache_object")
|
2019-04-05 04:35:48 -05:00
|
|
|
|
2019-04-05 14:36:56 -05:00
|
|
|
@staticmethod
|
|
|
|
def cache_actor(iri: str, also_cache_attachments: bool = True) -> None:
|
|
|
|
p.push(
|
|
|
|
{"iri": iri, "also_cache_attachments": also_cache_attachments},
|
|
|
|
"/task/cache_actor",
|
|
|
|
)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def post_to_remote_inbox(payload: str, recp: str) -> None:
|
|
|
|
p.push({"payload": payload, "to": recp}, "/task/post_to_remote_inbox")
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def forward_activity(iri: str) -> None:
|
|
|
|
p.push(iri, "/task/forward_activity")
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def fetch_og_meta(iri: str) -> None:
|
|
|
|
p.push(iri, "/task/fetch_og_meta")
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def process_new_activity(iri: str) -> None:
|
|
|
|
p.push(iri, "/task/process_new_activity")
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def cache_attachments(iri: str) -> None:
|
|
|
|
p.push(iri, "/task/cache_attachments")
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def finish_post_to_inbox(iri: str) -> None:
|
|
|
|
p.push(iri, "/task/finish_post_to_inbox")
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def finish_post_to_outbox(iri: str) -> None:
|
|
|
|
p.push(iri, "/task/finish_post_to_outbox")
|
2019-04-05 04:35:48 -05:00
|
|
|
|
|
|
|
|
|
|
|
@app.route("/task/fetch_og_meta", methods=["POST"])
|
2019-04-05 08:14:57 -05:00
|
|
|
def task_fetch_og_meta():
|
2019-04-05 04:35:48 -05:00
|
|
|
task = p.parse(request)
|
|
|
|
app.logger.info(f"task={task!r}")
|
|
|
|
iri = task.payload
|
|
|
|
try:
|
|
|
|
activity = ap.fetch_remote_activity(iri)
|
|
|
|
app.logger.info(f"activity={activity!r}")
|
|
|
|
if activity.has_type(ap.ActivityType.CREATE):
|
|
|
|
note = activity.get_object()
|
|
|
|
links = opengraph.links_from_note(note.to_dict())
|
|
|
|
og_metadata = opengraph.fetch_og_metadata(USER_AGENT, links)
|
|
|
|
for og in og_metadata:
|
|
|
|
if not og.get("image"):
|
|
|
|
continue
|
2019-04-08 09:41:09 -05:00
|
|
|
MEDIA_CACHE.cache_og_image2(og["image"], iri)
|
2019-04-05 04:35:48 -05:00
|
|
|
|
|
|
|
app.logger.debug(f"OG metadata {og_metadata!r}")
|
|
|
|
DB.activities.update_one(
|
|
|
|
{"remote_id": iri}, {"$set": {"meta.og_metadata": og_metadata}}
|
|
|
|
)
|
|
|
|
|
|
|
|
app.logger.info(f"OG metadata fetched for {iri}")
|
|
|
|
except (ActivityGoneError, ActivityNotFoundError):
|
|
|
|
app.logger.exception(f"dropping activity {iri}, skip OG metedata")
|
|
|
|
return ""
|
|
|
|
except requests.exceptions.HTTPError as http_err:
|
|
|
|
if 400 <= http_err.response.status_code < 500:
|
|
|
|
app.logger.exception("bad request, no retry")
|
|
|
|
return ""
|
|
|
|
app.logger.exception("failed to fetch OG metadata")
|
2019-04-07 07:37:05 -05:00
|
|
|
raise TaskError() from http_err
|
|
|
|
except Exception as err:
|
2019-04-05 04:35:48 -05:00
|
|
|
app.logger.exception(f"failed to fetch OG metadata for {iri}")
|
2019-04-07 07:37:05 -05:00
|
|
|
raise TaskError() from err
|
2019-04-05 04:35:48 -05:00
|
|
|
|
2019-04-05 08:14:57 -05:00
|
|
|
return ""
|
|
|
|
|
2019-04-05 04:35:48 -05:00
|
|
|
|
|
|
|
@app.route("/task/cache_object", methods=["POST"])
|
|
|
|
def task_cache_object():
|
|
|
|
task = p.parse(request)
|
|
|
|
app.logger.info(f"task={task!r}")
|
|
|
|
iri = task.payload
|
|
|
|
try:
|
|
|
|
activity = ap.fetch_remote_activity(iri)
|
|
|
|
app.logger.info(f"activity={activity!r}")
|
2019-04-07 09:49:35 -05:00
|
|
|
obj = activity.get_object()
|
2019-04-05 04:35:48 -05:00
|
|
|
DB.activities.update_one(
|
|
|
|
{"remote_id": activity.id},
|
|
|
|
{
|
|
|
|
"$set": {
|
|
|
|
"meta.object": obj.to_dict(embed=True),
|
|
|
|
"meta.object_actor": activitypub._actor_to_meta(obj.get_actor()),
|
|
|
|
}
|
|
|
|
},
|
|
|
|
)
|
|
|
|
except (ActivityGoneError, ActivityNotFoundError, NotAnActivityError):
|
|
|
|
DB.activities.update_one({"remote_id": iri}, {"$set": {"meta.deleted": True}})
|
|
|
|
app.logger.exception(f"flagging activity {iri} as deleted, no object caching")
|
2019-04-07 07:37:05 -05:00
|
|
|
except Exception as err:
|
2019-04-05 04:35:48 -05:00
|
|
|
app.logger.exception(f"failed to cache object for {iri}")
|
2019-04-07 07:37:05 -05:00
|
|
|
raise TaskError() from err
|
|
|
|
|
2019-04-05 04:35:48 -05:00
|
|
|
return ""
|
|
|
|
|
2019-04-07 14:24:52 -05:00
|
|
|
|
2019-04-05 04:35:48 -05:00
|
|
|
@app.route("/task/finish_post_to_outbox", methods=["POST"]) # noqa:C901
|
|
|
|
def task_finish_post_to_outbox():
|
|
|
|
task = p.parse(request)
|
|
|
|
app.logger.info(f"task={task!r}")
|
|
|
|
iri = task.payload
|
|
|
|
try:
|
|
|
|
activity = ap.fetch_remote_activity(iri)
|
|
|
|
app.logger.info(f"activity={activity!r}")
|
|
|
|
|
|
|
|
recipients = activity.recipients()
|
|
|
|
|
|
|
|
if activity.has_type(ap.ActivityType.DELETE):
|
|
|
|
back.outbox_delete(MY_PERSON, activity)
|
|
|
|
elif activity.has_type(ap.ActivityType.UPDATE):
|
|
|
|
back.outbox_update(MY_PERSON, activity)
|
|
|
|
elif activity.has_type(ap.ActivityType.CREATE):
|
|
|
|
back.outbox_create(MY_PERSON, activity)
|
|
|
|
elif activity.has_type(ap.ActivityType.ANNOUNCE):
|
|
|
|
back.outbox_announce(MY_PERSON, activity)
|
|
|
|
elif activity.has_type(ap.ActivityType.LIKE):
|
|
|
|
back.outbox_like(MY_PERSON, activity)
|
|
|
|
elif activity.has_type(ap.ActivityType.UNDO):
|
|
|
|
obj = activity.get_object()
|
|
|
|
if obj.has_type(ap.ActivityType.LIKE):
|
|
|
|
back.outbox_undo_like(MY_PERSON, obj)
|
|
|
|
elif obj.has_type(ap.ActivityType.ANNOUNCE):
|
|
|
|
back.outbox_undo_announce(MY_PERSON, obj)
|
|
|
|
elif obj.has_type(ap.ActivityType.FOLLOW):
|
2019-04-08 13:55:03 -05:00
|
|
|
back.undo_new_following(MY_PERSON, obj)
|
2019-04-05 04:35:48 -05:00
|
|
|
|
|
|
|
app.logger.info(f"recipients={recipients}")
|
|
|
|
activity = ap.clean_activity(activity.to_dict())
|
|
|
|
|
|
|
|
DB.cache2.remove()
|
|
|
|
|
|
|
|
payload = json.dumps(activity)
|
|
|
|
for recp in recipients:
|
|
|
|
app.logger.debug(f"posting to {recp}")
|
|
|
|
Tasks.post_to_remote_inbox(payload, recp)
|
|
|
|
except (ActivityGoneError, ActivityNotFoundError):
|
|
|
|
app.logger.exception(f"no retry")
|
2019-04-07 07:37:05 -05:00
|
|
|
except Exception as err:
|
2019-04-05 04:35:48 -05:00
|
|
|
app.logger.exception(f"failed to post to remote inbox for {iri}")
|
2019-04-07 07:37:05 -05:00
|
|
|
raise TaskError() from err
|
2019-04-05 04:35:48 -05:00
|
|
|
|
2019-04-05 08:14:57 -05:00
|
|
|
return ""
|
|
|
|
|
2019-04-05 04:35:48 -05:00
|
|
|
|
|
|
|
@app.route("/task/finish_post_to_inbox", methods=["POST"]) # noqa: C901
|
|
|
|
def task_finish_post_to_inbox():
|
|
|
|
task = p.parse(request)
|
|
|
|
app.logger.info(f"task={task!r}")
|
|
|
|
iri = task.payload
|
|
|
|
try:
|
|
|
|
activity = ap.fetch_remote_activity(iri)
|
|
|
|
app.logger.info(f"activity={activity!r}")
|
|
|
|
|
|
|
|
if activity.has_type(ap.ActivityType.DELETE):
|
|
|
|
back.inbox_delete(MY_PERSON, activity)
|
|
|
|
elif activity.has_type(ap.ActivityType.UPDATE):
|
|
|
|
back.inbox_update(MY_PERSON, activity)
|
|
|
|
elif activity.has_type(ap.ActivityType.CREATE):
|
|
|
|
back.inbox_create(MY_PERSON, activity)
|
|
|
|
elif activity.has_type(ap.ActivityType.ANNOUNCE):
|
|
|
|
back.inbox_announce(MY_PERSON, activity)
|
|
|
|
elif activity.has_type(ap.ActivityType.LIKE):
|
|
|
|
back.inbox_like(MY_PERSON, activity)
|
|
|
|
elif activity.has_type(ap.ActivityType.FOLLOW):
|
|
|
|
# Reply to a Follow with an Accept
|
|
|
|
accept = ap.Accept(actor=ID, object=activity.to_dict(embed=True))
|
|
|
|
post_to_outbox(accept)
|
|
|
|
elif activity.has_type(ap.ActivityType.UNDO):
|
|
|
|
obj = activity.get_object()
|
|
|
|
if obj.has_type(ap.ActivityType.LIKE):
|
|
|
|
back.inbox_undo_like(MY_PERSON, obj)
|
|
|
|
elif obj.has_type(ap.ActivityType.ANNOUNCE):
|
|
|
|
back.inbox_undo_announce(MY_PERSON, obj)
|
|
|
|
elif obj.has_type(ap.ActivityType.FOLLOW):
|
2019-04-08 13:55:03 -05:00
|
|
|
back.undo_new_follower(MY_PERSON, obj)
|
2019-04-05 04:35:48 -05:00
|
|
|
try:
|
|
|
|
invalidate_cache(activity)
|
|
|
|
except Exception:
|
|
|
|
app.logger.exception("failed to invalidate cache")
|
|
|
|
except (ActivityGoneError, ActivityNotFoundError, NotAnActivityError):
|
|
|
|
app.logger.exception(f"no retry")
|
2019-04-07 07:37:05 -05:00
|
|
|
except Exception as err:
|
2019-04-05 04:35:48 -05:00
|
|
|
app.logger.exception(f"failed to cache attachments for {iri}")
|
2019-04-07 07:37:05 -05:00
|
|
|
raise TaskError() from err
|
2019-04-05 04:35:48 -05:00
|
|
|
|
2019-04-05 08:14:57 -05:00
|
|
|
return ""
|
|
|
|
|
2019-04-05 04:35:48 -05:00
|
|
|
|
|
|
|
def post_to_outbox(activity: ap.BaseActivity) -> str:
|
|
|
|
if activity.has_type(ap.CREATE_TYPES):
|
|
|
|
activity = activity.build_create()
|
|
|
|
|
|
|
|
# Assign create a random ID
|
|
|
|
obj_id = back.random_object_id()
|
2019-05-11 05:19:19 -05:00
|
|
|
|
2019-04-05 04:35:48 -05:00
|
|
|
activity.set_id(back.activity_url(obj_id), obj_id)
|
|
|
|
|
2019-04-08 13:55:03 -05:00
|
|
|
back.save(Box.OUTBOX, activity)
|
2019-04-05 04:35:48 -05:00
|
|
|
Tasks.cache_actor(activity.id)
|
|
|
|
Tasks.finish_post_to_outbox(activity.id)
|
|
|
|
return activity.id
|
|
|
|
|
|
|
|
|
|
|
|
def post_to_inbox(activity: ap.BaseActivity) -> None:
|
|
|
|
# Check for Block activity
|
|
|
|
actor = activity.get_actor()
|
2019-04-08 13:55:03 -05:00
|
|
|
if back.outbox_is_blocked(MY_PERSON, actor.id):
|
2019-04-05 04:35:48 -05:00
|
|
|
app.logger.info(
|
|
|
|
f"actor {actor!r} is blocked, dropping the received activity {activity!r}"
|
|
|
|
)
|
|
|
|
return
|
|
|
|
|
|
|
|
if back.inbox_check_duplicate(MY_PERSON, activity.id):
|
|
|
|
# The activity is already in the inbox
|
|
|
|
app.logger.info(f"received duplicate activity {activity!r}, dropping it")
|
|
|
|
|
2019-04-08 13:55:03 -05:00
|
|
|
back.save(Box.INBOX, activity)
|
2019-04-05 04:35:48 -05:00
|
|
|
Tasks.process_new_activity(activity.id)
|
|
|
|
|
|
|
|
app.logger.info(f"spawning task for {activity!r}")
|
|
|
|
Tasks.finish_post_to_inbox(activity.id)
|
|
|
|
|
|
|
|
|
|
|
|
def invalidate_cache(activity):
|
|
|
|
if activity.has_type(ap.ActivityType.LIKE):
|
|
|
|
if activity.get_object().id.startswith(BASE_URL):
|
|
|
|
DB.cache2.remove()
|
|
|
|
elif activity.has_type(ap.ActivityType.ANNOUNCE):
|
|
|
|
if activity.get_object().id.startswith(BASE_URL):
|
|
|
|
DB.cache2.remove()
|
|
|
|
elif activity.has_type(ap.ActivityType.UNDO):
|
|
|
|
DB.cache2.remove()
|
|
|
|
elif activity.has_type(ap.ActivityType.DELETE):
|
|
|
|
# TODO(tsileo): only invalidate if it's a delete of a reply
|
|
|
|
DB.cache2.remove()
|
|
|
|
elif activity.has_type(ap.ActivityType.UPDATE):
|
|
|
|
DB.cache2.remove()
|
|
|
|
elif activity.has_type(ap.ActivityType.CREATE):
|
|
|
|
note = activity.get_object()
|
2019-04-16 15:54:08 -05:00
|
|
|
in_reply_to = note.get_in_reply_to()
|
|
|
|
if not in_reply_to or in_reply_to.startswith(ID):
|
2019-04-05 04:35:48 -05:00
|
|
|
DB.cache2.remove()
|
|
|
|
# FIXME(tsileo): check if it's a reply of a reply
|
|
|
|
|
|
|
|
|
|
|
|
@app.route("/task/cache_attachments", methods=["POST"])
|
|
|
|
def task_cache_attachments():
|
|
|
|
task = p.parse(request)
|
|
|
|
app.logger.info(f"task={task!r}")
|
|
|
|
iri = task.payload
|
|
|
|
try:
|
|
|
|
activity = ap.fetch_remote_activity(iri)
|
|
|
|
app.logger.info(f"activity={activity!r}")
|
|
|
|
# Generates thumbnails for the actor's icon and the attachments if any
|
|
|
|
|
|
|
|
actor = activity.get_actor()
|
|
|
|
|
|
|
|
# Update the cached actor
|
|
|
|
DB.actors.update_one(
|
|
|
|
{"remote_id": iri},
|
|
|
|
{"$set": {"remote_id": iri, "data": actor.to_dict(embed=True)}},
|
|
|
|
upsert=True,
|
|
|
|
)
|
|
|
|
|
|
|
|
if actor.icon:
|
|
|
|
MEDIA_CACHE.cache(actor.icon["url"], Kind.ACTOR_ICON)
|
|
|
|
|
|
|
|
if activity.has_type(ap.ActivityType.CREATE):
|
|
|
|
for attachment in activity.get_object()._data.get("attachment", []):
|
|
|
|
if (
|
|
|
|
attachment.get("mediaType", "").startswith("image/")
|
|
|
|
or attachment.get("type") == ap.ActivityType.IMAGE.value
|
|
|
|
):
|
|
|
|
try:
|
2019-04-08 09:41:09 -05:00
|
|
|
MEDIA_CACHE.cache_attachment2(attachment["url"], iri)
|
2019-04-05 04:35:48 -05:00
|
|
|
except ValueError:
|
|
|
|
app.logger.exception(f"failed to cache {attachment}")
|
|
|
|
|
|
|
|
app.logger.info(f"attachments cached for {iri}")
|
|
|
|
|
|
|
|
except (ActivityGoneError, ActivityNotFoundError, NotAnActivityError):
|
|
|
|
app.logger.exception(f"dropping activity {iri}, no attachment caching")
|
2019-04-07 07:37:05 -05:00
|
|
|
except Exception as err:
|
2019-04-05 04:35:48 -05:00
|
|
|
app.logger.exception(f"failed to cache attachments for {iri}")
|
2019-04-07 07:37:05 -05:00
|
|
|
raise TaskError() from err
|
2019-04-05 04:35:48 -05:00
|
|
|
|
2019-04-05 08:14:57 -05:00
|
|
|
return ""
|
|
|
|
|
2019-04-05 04:35:48 -05:00
|
|
|
|
|
|
|
@app.route("/task/cache_actor", methods=["POST"])
|
2019-04-05 08:14:57 -05:00
|
|
|
def task_cache_actor() -> str:
|
2019-04-05 04:35:48 -05:00
|
|
|
task = p.parse(request)
|
|
|
|
app.logger.info(f"task={task!r}")
|
|
|
|
iri, also_cache_attachments = (
|
|
|
|
task.payload["iri"],
|
|
|
|
task.payload.get("also_cache_attachments", True),
|
|
|
|
)
|
|
|
|
try:
|
|
|
|
activity = ap.fetch_remote_activity(iri)
|
|
|
|
app.logger.info(f"activity={activity!r}")
|
|
|
|
|
|
|
|
if activity.has_type(ap.ActivityType.CREATE):
|
2019-04-05 08:14:57 -05:00
|
|
|
Tasks.fetch_og_meta(iri)
|
2019-04-05 04:35:48 -05:00
|
|
|
|
|
|
|
if activity.has_type([ap.ActivityType.LIKE, ap.ActivityType.ANNOUNCE]):
|
|
|
|
Tasks.cache_object(iri)
|
|
|
|
|
|
|
|
actor = activity.get_actor()
|
|
|
|
|
|
|
|
cache_actor_with_inbox = False
|
|
|
|
if activity.has_type(ap.ActivityType.FOLLOW):
|
|
|
|
if actor.id != ID:
|
|
|
|
# It's a Follow from the Inbox
|
|
|
|
cache_actor_with_inbox = True
|
|
|
|
else:
|
|
|
|
# It's a new following, cache the "object" (which is the actor we follow)
|
|
|
|
DB.activities.update_one(
|
|
|
|
{"remote_id": iri},
|
|
|
|
{
|
|
|
|
"$set": {
|
|
|
|
"meta.object": activitypub._actor_to_meta(
|
|
|
|
activity.get_object()
|
|
|
|
)
|
|
|
|
}
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
# Cache the actor info
|
|
|
|
DB.activities.update_one(
|
|
|
|
{"remote_id": iri},
|
|
|
|
{
|
|
|
|
"$set": {
|
|
|
|
"meta.actor": activitypub._actor_to_meta(
|
|
|
|
actor, cache_actor_with_inbox
|
|
|
|
)
|
|
|
|
}
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
app.logger.info(f"actor cached for {iri}")
|
|
|
|
if also_cache_attachments and activity.has_type(ap.ActivityType.CREATE):
|
|
|
|
Tasks.cache_attachments(iri)
|
|
|
|
|
|
|
|
except (ActivityGoneError, ActivityNotFoundError):
|
|
|
|
DB.activities.update_one({"remote_id": iri}, {"$set": {"meta.deleted": True}})
|
|
|
|
app.logger.exception(f"flagging activity {iri} as deleted, no actor caching")
|
2019-04-07 07:37:05 -05:00
|
|
|
except Exception as err:
|
2019-04-05 04:35:48 -05:00
|
|
|
app.logger.exception(f"failed to cache actor for {iri}")
|
2019-04-07 07:37:05 -05:00
|
|
|
raise TaskError() from err
|
2019-04-05 04:35:48 -05:00
|
|
|
|
2019-04-05 08:14:57 -05:00
|
|
|
return ""
|
|
|
|
|
2019-04-05 04:35:48 -05:00
|
|
|
|
|
|
|
@app.route("/task/process_new_activity", methods=["POST"]) # noqa:c901
|
|
|
|
def task_process_new_activity():
|
|
|
|
"""Process an activity received in the inbox"""
|
|
|
|
task = p.parse(request)
|
|
|
|
app.logger.info(f"task={task!r}")
|
|
|
|
iri = task.payload
|
|
|
|
try:
|
|
|
|
activity = ap.fetch_remote_activity(iri)
|
|
|
|
app.logger.info(f"activity={activity!r}")
|
|
|
|
|
|
|
|
# Is the activity expected?
|
|
|
|
# following = ap.get_backend().following()
|
|
|
|
should_forward = False
|
|
|
|
should_delete = False
|
2019-04-07 14:24:52 -05:00
|
|
|
should_keep = False
|
2019-04-05 04:35:48 -05:00
|
|
|
|
|
|
|
tag_stream = False
|
|
|
|
if activity.has_type(ap.ActivityType.ANNOUNCE):
|
2019-04-08 09:41:09 -05:00
|
|
|
# FIXME(tsileo): Ensure it's follower and store into a "dead activities" DB
|
2019-04-05 04:35:48 -05:00
|
|
|
try:
|
|
|
|
activity.get_object()
|
|
|
|
tag_stream = True
|
2019-04-07 14:24:52 -05:00
|
|
|
if activity.get_object_id().startswith(BASE_URL):
|
|
|
|
should_keep = True
|
2019-04-05 04:35:48 -05:00
|
|
|
except (NotAnActivityError, BadActivityError):
|
|
|
|
app.logger.exception(f"failed to get announce object for {activity!r}")
|
|
|
|
# Most likely on OStatus notice
|
|
|
|
tag_stream = False
|
|
|
|
should_delete = True
|
|
|
|
except (ActivityGoneError, ActivityNotFoundError):
|
|
|
|
# The announced activity is deleted/gone, drop it
|
|
|
|
should_delete = True
|
|
|
|
|
2019-04-07 14:24:52 -05:00
|
|
|
elif activity.has_type(ap.ActivityType.FOLLOW):
|
|
|
|
# FIXME(tsileo): ensure it's a follow where the server is the object
|
|
|
|
should_keep = True
|
|
|
|
|
2019-04-05 04:35:48 -05:00
|
|
|
elif activity.has_type(ap.ActivityType.CREATE):
|
|
|
|
note = activity.get_object()
|
2019-04-16 15:54:08 -05:00
|
|
|
in_reply_to = note.get_in_reply_to()
|
2019-04-05 04:35:48 -05:00
|
|
|
# Make the note part of the stream if it's not a reply, or if it's a local reply
|
2019-04-16 15:54:08 -05:00
|
|
|
if not in_reply_to or in_reply_to.startswith(ID):
|
2019-04-05 04:35:48 -05:00
|
|
|
tag_stream = True
|
|
|
|
|
2019-04-14 12:17:54 -05:00
|
|
|
# FIXME(tsileo): check for direct addressing in the to, cc, bcc... fields
|
2019-04-16 16:00:15 -05:00
|
|
|
if (in_reply_to and in_reply_to.startswith(ID)) or note.has_mention(ID):
|
2019-04-07 14:24:52 -05:00
|
|
|
should_keep = True
|
|
|
|
|
2019-04-16 15:54:08 -05:00
|
|
|
if in_reply_to:
|
2019-04-05 04:35:48 -05:00
|
|
|
try:
|
2019-04-16 15:54:08 -05:00
|
|
|
reply = ap.fetch_remote_activity(note.get_in_reply_to())
|
2019-04-05 04:35:48 -05:00
|
|
|
if (
|
|
|
|
reply.id.startswith(ID) or reply.has_mention(ID)
|
|
|
|
) and activity.is_public():
|
|
|
|
# The reply is public "local reply", forward the reply (i.e. the original activity) to the
|
|
|
|
# original recipients
|
|
|
|
should_forward = True
|
2019-04-07 14:24:52 -05:00
|
|
|
should_keep = True
|
2019-04-05 04:35:48 -05:00
|
|
|
except NotAnActivityError:
|
|
|
|
# Most likely a reply to an OStatus notce
|
|
|
|
should_delete = True
|
|
|
|
|
|
|
|
# (partial) Ghost replies handling
|
|
|
|
# [X] This is the first time the server has seen this Activity.
|
|
|
|
should_forward = False
|
|
|
|
local_followers = ID + "/followers"
|
|
|
|
for field in ["to", "cc"]:
|
|
|
|
if field in activity._data:
|
|
|
|
if local_followers in activity._data[field]:
|
|
|
|
# [X] The values of to, cc, and/or audience contain a Collection owned by the server.
|
|
|
|
should_forward = True
|
|
|
|
|
|
|
|
# [X] The values of inReplyTo, object, target and/or tag are objects owned by the server
|
2019-04-16 15:54:08 -05:00
|
|
|
if not (in_reply_to and in_reply_to.startswith(ID)):
|
2019-04-05 04:35:48 -05:00
|
|
|
should_forward = False
|
|
|
|
|
|
|
|
elif activity.has_type(ap.ActivityType.DELETE):
|
|
|
|
note = DB.activities.find_one(
|
|
|
|
{"activity.object.id": activity.get_object().id}
|
|
|
|
)
|
|
|
|
if note and note["meta"].get("forwarded", False):
|
|
|
|
# If the activity was originally forwarded, forward the delete too
|
|
|
|
should_forward = True
|
|
|
|
|
|
|
|
elif activity.has_type(ap.ActivityType.LIKE):
|
2019-04-07 14:24:52 -05:00
|
|
|
if activity.get_object_id().startswith(BASE_URL):
|
|
|
|
should_keep = True
|
|
|
|
else:
|
2019-04-05 04:35:48 -05:00
|
|
|
# We only want to keep a like if it's a like for a local activity
|
|
|
|
# (Pleroma relay the likes it received, we don't want to store them)
|
|
|
|
should_delete = True
|
|
|
|
|
|
|
|
if should_forward:
|
|
|
|
app.logger.info(f"will forward {activity!r} to followers")
|
|
|
|
Tasks.forward_activity(activity.id)
|
|
|
|
|
|
|
|
if should_delete:
|
|
|
|
app.logger.info(f"will soft delete {activity!r}")
|
|
|
|
|
|
|
|
app.logger.info(f"{iri} tag_stream={tag_stream}")
|
|
|
|
DB.activities.update_one(
|
|
|
|
{"remote_id": activity.id},
|
|
|
|
{
|
|
|
|
"$set": {
|
2019-04-07 14:24:52 -05:00
|
|
|
"meta.keep": should_keep,
|
2019-04-05 04:35:48 -05:00
|
|
|
"meta.stream": tag_stream,
|
|
|
|
"meta.forwarded": should_forward,
|
|
|
|
"meta.deleted": should_delete,
|
|
|
|
}
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
app.logger.info(f"new activity {iri} processed")
|
|
|
|
if not should_delete and not activity.has_type(ap.ActivityType.DELETE):
|
|
|
|
Tasks.cache_actor(iri)
|
|
|
|
except (ActivityGoneError, ActivityNotFoundError):
|
2019-04-07 05:41:27 -05:00
|
|
|
app.logger.exception(f"dropping activity {iri}, skip processing")
|
|
|
|
return ""
|
2019-04-07 07:37:05 -05:00
|
|
|
except Exception as err:
|
2019-04-05 04:35:48 -05:00
|
|
|
app.logger.exception(f"failed to process new activity {iri}")
|
2019-04-07 07:37:05 -05:00
|
|
|
raise TaskError() from err
|
2019-04-05 04:35:48 -05:00
|
|
|
|
2019-04-05 08:14:57 -05:00
|
|
|
return ""
|
|
|
|
|
2019-04-05 04:35:48 -05:00
|
|
|
|
2019-04-05 08:14:57 -05:00
|
|
|
@app.route("/task/forward_activity", methods=["POST"])
|
2019-04-05 04:35:48 -05:00
|
|
|
def task_forward_activity():
|
|
|
|
task = p.parse(request)
|
|
|
|
app.logger.info(f"task={task!r}")
|
|
|
|
iri = task.payload
|
|
|
|
try:
|
|
|
|
activity = ap.fetch_remote_activity(iri)
|
2019-04-08 13:55:03 -05:00
|
|
|
recipients = back.followers_as_recipients()
|
2019-04-05 04:35:48 -05:00
|
|
|
app.logger.debug(f"Forwarding {activity!r} to {recipients}")
|
|
|
|
activity = ap.clean_activity(activity.to_dict())
|
|
|
|
payload = json.dumps(activity)
|
|
|
|
for recp in recipients:
|
|
|
|
app.logger.debug(f"forwarding {activity!r} to {recp}")
|
|
|
|
Tasks.post_to_remote_inbox(payload, recp)
|
2019-04-07 07:37:05 -05:00
|
|
|
except Exception as err:
|
2019-04-05 04:35:48 -05:00
|
|
|
app.logger.exception("task failed")
|
2019-04-07 07:37:05 -05:00
|
|
|
raise TaskError() from err
|
2019-04-05 04:35:48 -05:00
|
|
|
|
2019-04-05 08:14:57 -05:00
|
|
|
return ""
|
|
|
|
|
2019-04-05 04:35:48 -05:00
|
|
|
|
2019-04-05 08:14:57 -05:00
|
|
|
@app.route("/task/post_to_remote_inbox", methods=["POST"])
|
2019-04-05 04:35:48 -05:00
|
|
|
def task_post_to_remote_inbox():
|
2019-04-05 14:36:56 -05:00
|
|
|
"""Post an activity to a remote inbox."""
|
2019-04-05 04:35:48 -05:00
|
|
|
task = p.parse(request)
|
|
|
|
app.logger.info(f"task={task!r}")
|
|
|
|
payload, to = task.payload["payload"], task.payload["to"]
|
|
|
|
try:
|
|
|
|
app.logger.info("payload=%s", payload)
|
|
|
|
app.logger.info("generating sig")
|
|
|
|
signed_payload = json.loads(payload)
|
|
|
|
|
2019-04-22 02:50:53 -05:00
|
|
|
# XXX Disable JSON-LD signature crap for now (as HTTP signatures are enough for most implementations)
|
2019-04-05 04:35:48 -05:00
|
|
|
# Don't overwrite the signature if we're forwarding an activity
|
2019-04-22 02:50:53 -05:00
|
|
|
# if "signature" not in signed_payload:
|
|
|
|
# generate_signature(signed_payload, KEY)
|
2019-04-05 04:35:48 -05:00
|
|
|
|
|
|
|
app.logger.info("to=%s", to)
|
|
|
|
resp = requests.post(
|
|
|
|
to,
|
|
|
|
data=json.dumps(signed_payload),
|
|
|
|
auth=SIG_AUTH,
|
|
|
|
headers={
|
|
|
|
"Content-Type": HEADERS[1],
|
|
|
|
"Accept": HEADERS[1],
|
|
|
|
"User-Agent": USER_AGENT,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
app.logger.info("resp=%s", resp)
|
|
|
|
app.logger.info("resp_body=%s", resp.text)
|
|
|
|
resp.raise_for_status()
|
|
|
|
except HTTPError as err:
|
|
|
|
app.logger.exception("request failed")
|
|
|
|
if 400 >= err.response.status_code >= 499:
|
|
|
|
app.logger.info("client error, no retry")
|
|
|
|
return ""
|
|
|
|
|
2019-04-07 07:37:05 -05:00
|
|
|
raise TaskError() from err
|
|
|
|
except Exception as err:
|
|
|
|
app.logger.exception("task failed")
|
|
|
|
raise TaskError() from err
|
2019-04-05 08:14:57 -05:00
|
|
|
|
|
|
|
return ""
|
2019-04-08 10:24:50 -05:00
|
|
|
|
|
|
|
|
2019-04-15 15:23:55 -05:00
|
|
|
@app.route("/task/update_question", methods=["POST"])
|
|
|
|
def task_update_question():
|
|
|
|
"""Post an activity to a remote inbox."""
|
|
|
|
task = p.parse(request)
|
|
|
|
app.logger.info(f"task={task!r}")
|
|
|
|
iri = task.payload
|
|
|
|
try:
|
|
|
|
app.logger.info(f"Updating question {iri}")
|
|
|
|
# TODO(tsileo): sends an Update with the question/iri as an actor, with the updated stats (LD sig will fail?)
|
|
|
|
# but to who? followers and people who voted? but this must not be visible right?
|
|
|
|
# also sends/trigger a notification when a poll I voted for ends like Mastodon?
|
|
|
|
except HTTPError as err:
|
|
|
|
app.logger.exception("request failed")
|
|
|
|
if 400 >= err.response.status_code >= 499:
|
|
|
|
app.logger.info("client error, no retry")
|
|
|
|
return ""
|
|
|
|
|
|
|
|
raise TaskError() from err
|
|
|
|
except Exception as err:
|
|
|
|
app.logger.exception("task failed")
|
|
|
|
raise TaskError() from err
|
|
|
|
|
|
|
|
return ""
|
|
|
|
|
|
|
|
|
2019-04-08 11:09:33 -05:00
|
|
|
@app.route("/task/cleanup", methods=["POST"])
|
|
|
|
def task_cleanup():
|
|
|
|
task = p.parse(request)
|
|
|
|
app.logger.info(f"task={task!r}")
|
|
|
|
p.push({}, "/task/cleanup_part_1")
|
2019-04-08 11:14:39 -05:00
|
|
|
return ""
|
2019-04-08 11:09:33 -05:00
|
|
|
|
|
|
|
|
2019-04-08 10:24:50 -05:00
|
|
|
@app.route("/task/cleanup_part_1", methods=["POST"])
|
|
|
|
def task_cleanup_part_1():
|
|
|
|
task = p.parse(request)
|
|
|
|
app.logger.info(f"task={task!r}")
|
|
|
|
d = (datetime.utcnow() - timedelta(days=15)).strftime("%Y-%m-%d")
|
|
|
|
|
|
|
|
# (We keep Follow and Accept forever)
|
|
|
|
|
|
|
|
# Announce and Like cleanup
|
|
|
|
for ap_type in [ActivityType.ANNOUNCE, ActivityType.LIKE]:
|
|
|
|
# Migrate old (before meta.keep activities on the fly)
|
|
|
|
DB.activities.update_many(
|
|
|
|
{
|
|
|
|
"box": Box.INBOX.value,
|
|
|
|
"type": ap_type.value,
|
|
|
|
"meta.keep": {"$exists": False},
|
|
|
|
"activity.object": {"$regex": f"^{BASE_URL}"},
|
|
|
|
},
|
|
|
|
{"$set": {"meta.keep": True}},
|
|
|
|
)
|
|
|
|
|
|
|
|
DB.activities.update_many(
|
|
|
|
{
|
|
|
|
"box": Box.INBOX.value,
|
|
|
|
"type": ap_type.value,
|
|
|
|
"meta.keep": {"$exists": False},
|
|
|
|
"activity.object.id": {"$regex": f"^{BASE_URL}"},
|
|
|
|
},
|
|
|
|
{"$set": {"meta.keep": True}},
|
|
|
|
)
|
|
|
|
|
|
|
|
DB.activities.update_many(
|
|
|
|
{
|
|
|
|
"box": Box.INBOX.value,
|
|
|
|
"type": ap_type.value,
|
|
|
|
"meta.keep": {"$exists": False},
|
|
|
|
},
|
|
|
|
{"$set": {"meta.keep": False}},
|
|
|
|
)
|
|
|
|
# End of the migration
|
|
|
|
|
|
|
|
# Delete old activities
|
|
|
|
DB.activities.delete_many(
|
|
|
|
{
|
|
|
|
"box": Box.INBOX.value,
|
|
|
|
"type": ap_type.value,
|
|
|
|
"meta.keep": False,
|
|
|
|
"activity.published": {"$lt": d},
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
# And delete the soft-deleted one
|
|
|
|
DB.activities.delete_many(
|
|
|
|
{
|
|
|
|
"box": Box.INBOX.value,
|
|
|
|
"type": ap_type.value,
|
|
|
|
"meta.keep": False,
|
|
|
|
"meta.deleted": True,
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
# Create cleanup (more complicated)
|
|
|
|
# The one that mention our actor
|
|
|
|
DB.activities.update_many(
|
|
|
|
{
|
|
|
|
"box": Box.INBOX.value,
|
|
|
|
"meta.keep": {"$exists": False},
|
|
|
|
"activity.object.tag.href": {"$regex": f"^{BASE_URL}"},
|
|
|
|
},
|
|
|
|
{"$set": {"meta.keep": True}},
|
|
|
|
)
|
|
|
|
DB.activities.update_many(
|
|
|
|
{
|
|
|
|
"box": Box.REPLIES.value,
|
|
|
|
"meta.keep": {"$exists": False},
|
|
|
|
"activity.tag.href": {"$regex": f"^{BASE_URL}"},
|
|
|
|
},
|
|
|
|
{"$set": {"meta.keep": True}},
|
|
|
|
)
|
|
|
|
|
|
|
|
# The replies of the outbox
|
|
|
|
DB.activities.update_many(
|
|
|
|
{"meta.thread_root_parent": {"$regex": f"^{BASE_URL}"}},
|
|
|
|
{"$set": {"meta.keep": True}},
|
|
|
|
)
|
|
|
|
# Track all the threads we participated
|
|
|
|
keep_threads = []
|
|
|
|
for data in DB.activities.find(
|
|
|
|
{
|
|
|
|
"box": Box.OUTBOX.value,
|
|
|
|
"type": ActivityType.CREATE.value,
|
|
|
|
"meta.thread_root_parent": {"$exists": True},
|
|
|
|
}
|
|
|
|
):
|
|
|
|
keep_threads.append(data["meta"]["thread_root_parent"])
|
|
|
|
|
|
|
|
for root_parent in set(keep_threads):
|
|
|
|
DB.activities.update_many(
|
|
|
|
{"meta.thread_root_parent": root_parent}, {"$set": {"meta.keep": True}}
|
|
|
|
)
|
|
|
|
|
|
|
|
DB.activities.update_many(
|
|
|
|
{
|
|
|
|
"box": {"$in": [Box.REPLIES.value, Box.INBOX.value]},
|
|
|
|
"meta.keep": {"$exists": False},
|
|
|
|
},
|
|
|
|
{"$set": {"meta.keep": False}},
|
|
|
|
)
|
2019-04-14 12:17:54 -05:00
|
|
|
|
|
|
|
DB.activities.update_many(
|
|
|
|
{
|
|
|
|
"box": Box.OUTBOX.value,
|
2019-04-15 14:20:14 -05:00
|
|
|
"type": {"$in": [ActivityType.CREATE.value, ActivityType.ANNOUNCE.value]},
|
2019-04-14 12:17:54 -05:00
|
|
|
"meta.public": {"$exists": False},
|
|
|
|
},
|
|
|
|
{"$set": {"meta.public": True}},
|
|
|
|
)
|
|
|
|
|
|
|
|
p.push({}, "/task/cleanup_part_2")
|
2019-04-08 10:24:50 -05:00
|
|
|
return "OK"
|
|
|
|
|
|
|
|
|
|
|
|
@app.route("/task/cleanup_part_2", methods=["POST"])
|
|
|
|
def task_cleanup_part_2():
|
|
|
|
task = p.parse(request)
|
|
|
|
app.logger.info(f"task={task!r}")
|
|
|
|
d = (datetime.utcnow() - timedelta(days=15)).strftime("%Y-%m-%d")
|
|
|
|
|
|
|
|
# Go over the old Create activities
|
|
|
|
for data in DB.activities.find(
|
|
|
|
{
|
|
|
|
"box": Box.INBOX.value,
|
|
|
|
"type": ActivityType.CREATE.value,
|
|
|
|
"meta.keep": False,
|
|
|
|
"activity.published": {"$lt": d},
|
|
|
|
}
|
2019-04-09 01:40:48 -05:00
|
|
|
).limit(5000):
|
2019-04-08 10:24:50 -05:00
|
|
|
# Delete the cached attachment/
|
|
|
|
for grid_item in MEDIA_CACHE.fs.find({"remote_id": data["remote_id"]}):
|
|
|
|
MEDIA_CACHE.fs.delete(grid_item._id)
|
2019-04-09 01:40:48 -05:00
|
|
|
DB.activities.delete_one({"_id": data["_id"]})
|
2019-04-08 10:24:50 -05:00
|
|
|
|
2019-04-14 12:17:54 -05:00
|
|
|
p.push({}, "/task/cleanup_part_3")
|
2019-04-08 10:24:50 -05:00
|
|
|
return "OK"
|
|
|
|
|
|
|
|
|
|
|
|
@app.route("/task/cleanup_part_3", methods=["POST"])
|
|
|
|
def task_cleanup_part_3():
|
|
|
|
task = p.parse(request)
|
|
|
|
app.logger.info(f"task={task!r}")
|
|
|
|
|
|
|
|
d = (datetime.utcnow() - timedelta(days=15)).strftime("%Y-%m-%d")
|
|
|
|
|
|
|
|
# Delete old replies we don't care about
|
|
|
|
DB.activities.delete_many(
|
|
|
|
{"box": Box.REPLIES.value, "meta.keep": False, "activity.published": {"$lt": d}}
|
|
|
|
)
|
|
|
|
|
|
|
|
# Remove all the attachments no tied to a remote_id (post celery migration)
|
|
|
|
for grid_item in MEDIA_CACHE.fs.find(
|
|
|
|
{"kind": {"$in": ["og", "attachment"]}, "remote_id": {"$exists": False}}
|
|
|
|
):
|
|
|
|
MEDIA_CACHE.fs.delete(grid_item._id)
|
|
|
|
|
|
|
|
# TODO(tsileo): iterator over "actor_icon" and look for unused one in a separate task
|
|
|
|
|
|
|
|
return "OK"
|