microblog.pub/app.py

2170 lines
60 KiB
Python
Raw Normal View History

2018-05-18 13:41:41 -05:00
import binascii
import json
import logging
2018-06-16 15:02:10 -05:00
import mimetypes
import os
2018-07-17 17:20:32 -05:00
import traceback
2018-06-16 15:02:10 -05:00
import urllib
2018-05-18 13:41:41 -05:00
from datetime import datetime
from datetime import timezone
2018-06-16 15:02:10 -05:00
from functools import wraps
from io import BytesIO
2018-06-16 15:02:10 -05:00
from typing import Any
from typing import Dict
from typing import Optional
2018-07-04 18:02:51 -05:00
from typing import Tuple
2018-06-16 15:02:10 -05:00
from urllib.parse import urlencode
from urllib.parse import urlparse
2018-05-18 13:41:41 -05:00
import bleach
import mf2py
2018-06-16 15:02:10 -05:00
import pymongo
import timeago
2018-05-18 13:41:41 -05:00
from bson.objectid import ObjectId
from dateutil import parser
2018-05-18 13:41:41 -05:00
from flask import Flask
from flask import make_response
2018-06-16 15:02:10 -05:00
from flask import Response
2018-05-18 13:41:41 -05:00
from flask import abort
2018-06-16 15:02:10 -05:00
from flask import jsonify as flask_jsonify
2018-05-18 13:41:41 -05:00
from flask import redirect
from flask import render_template
2018-06-16 15:02:10 -05:00
from flask import request
2018-05-18 13:41:41 -05:00
from flask import session
from flask import url_for
2018-06-16 15:02:10 -05:00
from flask_wtf.csrf import CSRFProtect
2018-05-18 13:41:41 -05:00
from html2text import html2text
from itsdangerous import BadSignature
2018-07-11 16:22:47 -05:00
from little_boxes import activitypub as ap
from little_boxes.activitypub import ActivityType
from little_boxes.activitypub import _to_list
from little_boxes.activitypub import clean_activity
from little_boxes.activitypub import get_backend
from little_boxes.content_helper import parse_markdown
from little_boxes.errors import ActivityGoneError
from little_boxes.errors import ActivityNotFoundError
from little_boxes.errors import Error
from little_boxes.errors import NotFromOutboxError
from little_boxes.httpsig import HTTPSigAuth
from little_boxes.httpsig import verify_request
from little_boxes.webfinger import get_actor_url
from little_boxes.webfinger import get_remote_follow_template
2018-05-18 13:41:41 -05:00
from passlib.hash import bcrypt
from u2flib_server import u2f
from werkzeug.utils import secure_filename
import activitypub
import config
2018-07-11 16:22:47 -05:00
import tasks
2018-06-29 15:16:26 -05:00
from activitypub import Box
2018-06-17 12:21:59 -05:00
from activitypub import embed_collection
2018-06-16 15:02:10 -05:00
from config import ADMIN_API_KEY
from config import BASE_URL
2018-05-18 13:41:41 -05:00
from config import DB
2018-06-16 15:02:10 -05:00
from config import DEBUG_MODE
2018-05-18 13:41:41 -05:00
from config import DOMAIN
2018-06-16 15:02:10 -05:00
from config import HEADERS
2018-07-04 14:08:45 -05:00
from config import ICON_URL
2018-06-16 15:02:10 -05:00
from config import ID
from config import JWT
from config import KEY
from config import ME
from config import MEDIA_CACHE
2018-05-18 13:41:41 -05:00
from config import PASS
2018-06-16 15:02:10 -05:00
from config import USERNAME
from config import VERSION
from config import _drop_db
2018-06-16 15:02:10 -05:00
from utils.key import get_secret_key
from utils.lookup import lookup
2018-07-20 18:05:51 -05:00
from utils.media import Kind
2018-06-17 13:51:23 -05:00
back = activitypub.MicroblogPubBackend()
ap.use_backend(back)
MY_PERSON = ap.Person(**ME)
2018-06-16 14:24:53 -05:00
2018-05-18 13:41:41 -05:00
app = Flask(__name__)
2018-06-16 15:02:10 -05:00
app.secret_key = get_secret_key("flask")
app.config.update(WTF_CSRF_CHECK_DEFAULT=False)
csrf = CSRFProtect(app)
2018-05-18 13:41:41 -05:00
2018-05-28 12:46:23 -05:00
logger = logging.getLogger(__name__)
2018-05-22 17:41:37 -05:00
# Hook up Flask logging with gunicorn
2018-06-03 05:51:57 -05:00
root_logger = logging.getLogger()
2018-06-16 15:02:10 -05:00
if os.getenv("FLASK_DEBUG"):
logger.setLevel(logging.DEBUG)
root_logger.setLevel(logging.DEBUG)
else:
2018-06-16 15:02:10 -05:00
gunicorn_logger = logging.getLogger("gunicorn.error")
root_logger.handlers = gunicorn_logger.handlers
root_logger.setLevel(gunicorn_logger.level)
2018-05-22 17:41:37 -05:00
2018-06-16 15:02:10 -05:00
SIG_AUTH = HTTPSigAuth(KEY)
2018-05-18 13:41:41 -05:00
def verify_pass(pwd):
2018-06-16 15:02:10 -05:00
return bcrypt.verify(pwd, PASS)
2018-05-18 13:41:41 -05:00
@app.context_processor
def inject_config():
q = {
"type": "Create",
"activity.object.type": "Note",
"activity.object.inReplyTo": None,
"meta.deleted": False,
}
2018-06-29 15:16:26 -05:00
notes_count = DB.activities.find(
{"box": Box.OUTBOX.value, "$or": [q, {"type": "Announce", "meta.undo": False}]}
).count()
q = {"type": "Create", "activity.object.type": "Note", "meta.deleted": False}
2018-06-29 15:16:26 -05:00
with_replies_count = DB.activities.find(
{"box": Box.OUTBOX.value, "$or": [q, {"type": "Announce", "meta.undo": False}]}
).count()
2018-06-29 15:16:26 -05:00
liked_count = DB.activities.count(
{
"box": Box.OUTBOX.value,
"meta.deleted": False,
"meta.undo": False,
"type": ActivityType.LIKE.value,
}
)
followers_q = {
"box": Box.INBOX.value,
"type": ActivityType.FOLLOW.value,
"meta.undo": False,
}
following_q = {
"box": Box.OUTBOX.value,
"type": ActivityType.FOLLOW.value,
"meta.undo": False,
}
2018-06-16 15:02:10 -05:00
return dict(
microblogpub_version=VERSION,
config=config,
logged_in=session.get("logged_in", False),
followers_count=DB.activities.count(followers_q),
following_count=DB.activities.count(following_q),
notes_count=notes_count,
liked_count=liked_count,
with_replies_count=with_replies_count,
2018-06-29 15:16:26 -05:00
me=ME,
2018-06-16 15:02:10 -05:00
)
2018-05-18 13:41:41 -05:00
@app.after_request
def set_x_powered_by(response):
2018-06-16 15:02:10 -05:00
response.headers["X-Powered-By"] = "microblog.pub"
2018-05-18 13:41:41 -05:00
return response
2018-06-16 15:02:10 -05:00
2018-05-18 13:41:41 -05:00
# HTML/templates helper
ALLOWED_TAGS = [
2018-06-16 15:02:10 -05:00
"a",
"abbr",
"acronym",
"b",
2018-06-29 15:42:53 -05:00
"br",
2018-06-16 15:02:10 -05:00
"blockquote",
"code",
"pre",
"em",
"i",
"li",
"ol",
"strong",
"ul",
"span",
"div",
"p",
"h1",
"h2",
"h3",
"h4",
"h5",
"h6",
2018-05-18 13:41:41 -05:00
]
def clean_html(html):
return bleach.clean(html, tags=ALLOWED_TAGS)
_GRIDFS_CACHE: Dict[Tuple[Kind, str, Optional[int]], str] = {}
2018-07-04 18:02:51 -05:00
def _get_file_url(url, size, kind):
k = (kind, url, size)
2018-07-04 18:02:51 -05:00
cached = _GRIDFS_CACHE.get(k)
if cached:
return cached
doc = MEDIA_CACHE.get_file(url, size, kind)
2018-07-04 18:02:51 -05:00
if doc:
u = f"/media/{str(doc._id)}"
2018-07-04 18:02:51 -05:00
_GRIDFS_CACHE[k] = u
return u
2018-07-24 16:58:13 -05:00
# MEDIA_CACHE.cache(url, kind)
2018-07-30 02:41:04 -05:00
app.logger.error(f"cache not available for {url}/{size}/{kind}")
2018-07-24 16:58:13 -05:00
return url
2018-07-04 18:02:51 -05:00
2018-07-23 15:11:03 -05:00
@app.template_filter()
def remove_mongo_id(dat):
if isinstance(dat, list):
return [remove_mongo_id(item) for item in dat]
2018-07-23 15:11:03 -05:00
if "_id" in dat:
2018-07-23 15:25:51 -05:00
dat["_id"] = str(dat["_id"])
for k, v in dat.items():
if isinstance(v, dict):
dat[k] = remove_mongo_id(dat[k])
2018-07-23 15:11:03 -05:00
return dat
2018-09-02 12:43:09 -05:00
@app.template_filter()
def get_video_link(data):
for link in data:
if link.get("mimeType", "").startswith("video/"):
return link.get("href")
return None
2018-07-04 18:02:51 -05:00
@app.template_filter()
def get_actor_icon_url(url, size):
return _get_file_url(url, size, Kind.ACTOR_ICON)
@app.template_filter()
def get_attachment_url(url, size):
return _get_file_url(url, size, Kind.ATTACHMENT)
2018-07-04 18:02:51 -05:00
2018-07-21 16:16:40 -05:00
@app.template_filter()
def get_og_image_url(url, size=100):
2018-07-23 01:30:51 -05:00
try:
return _get_file_url(url, size, Kind.OG_IMAGE)
except Exception:
2018-07-23 15:11:03 -05:00
return ""
2018-07-21 16:16:40 -05:00
2018-06-29 15:16:26 -05:00
@app.template_filter()
def permalink_id(val):
return str(hash(val))
2018-06-16 14:24:53 -05:00
@app.template_filter()
def quote_plus(t):
return urllib.parse.quote_plus(t)
2018-05-18 13:41:41 -05:00
2018-06-16 14:24:53 -05:00
@app.template_filter()
def is_from_outbox(t):
2018-06-04 11:53:44 -05:00
return t.startswith(ID)
2018-06-16 14:24:53 -05:00
@app.template_filter()
def clean(html):
return clean_html(html)
2018-05-18 13:41:41 -05:00
2018-06-16 14:24:53 -05:00
@app.template_filter()
def html2plaintext(body):
2018-05-18 13:41:41 -05:00
return html2text(body)
@app.template_filter()
def domain(url):
return urlparse(url).netloc
2018-07-22 05:42:36 -05:00
@app.template_filter()
def url_or_id(d):
2018-07-22 14:34:42 -05:00
if "url" in d:
return d["url"]
return d["id"]
2018-07-22 05:42:36 -05:00
2018-07-03 16:29:55 -05:00
@app.template_filter()
def get_url(u):
2018-09-03 01:20:43 -05:00
print(f"GET_URL({u!r})")
2018-09-02 12:43:09 -05:00
if isinstance(u, list):
for l in u:
2018-09-03 01:20:43 -05:00
if l.get("mimeType") == "text/html":
2018-09-02 12:43:09 -05:00
u = l
2018-07-03 16:29:55 -05:00
if isinstance(u, dict):
return u["href"]
2018-07-15 14:12:57 -05:00
elif isinstance(u, str):
2018-07-03 16:29:55 -05:00
return u
2018-07-15 14:12:57 -05:00
else:
2018-07-15 14:38:15 -05:00
return u
2018-07-03 16:29:55 -05:00
2018-05-18 13:41:41 -05:00
@app.template_filter()
def get_actor(url):
if not url:
return None
2018-09-02 12:43:09 -05:00
if isinstance(url, list):
url = url[0]
if isinstance(url, dict):
url = url.get("id")
2018-06-16 15:02:10 -05:00
print(f"GET_ACTOR {url}")
try:
return get_backend().fetch_iri(url)
except (ActivityNotFoundError, ActivityGoneError):
return f"Deleted<{url}>"
2018-09-02 12:43:09 -05:00
except Exception as exc:
return f"Error<{url}/{exc!r}>"
2018-06-16 15:02:10 -05:00
2018-07-10 16:04:05 -05:00
2018-05-18 13:41:41 -05:00
@app.template_filter()
def format_time(val):
if val:
dt = parser.parse(val)
return datetime.strftime(dt, "%B %d, %Y, %H:%M %p")
2018-05-18 13:41:41 -05:00
return val
@app.template_filter()
def format_timeago(val):
if val:
dt = parser.parse(val)
return timeago.format(dt, datetime.now(timezone.utc))
2018-05-18 13:41:41 -05:00
return val
2018-06-16 15:02:10 -05:00
2018-06-29 15:16:26 -05:00
@app.template_filter()
2018-09-02 12:43:09 -05:00
def has_type(doc, _types):
for _type in _to_list(_types):
if _type in _to_list(doc["type"]):
return True
2018-06-29 15:16:26 -05:00
return False
2018-07-29 13:24:46 -05:00
@app.template_filter()
def has_actor_type(doc):
for t in ap.ACTOR_TYPES:
if has_type(doc, t.value):
return True
return False
2018-05-18 13:41:41 -05:00
def _is_img(filename):
filename = filename.lower()
2018-06-16 15:02:10 -05:00
if (
filename.endswith(".png")
or filename.endswith(".jpg")
or filename.endswith(".jpeg")
or filename.endswith(".gif")
or filename.endswith(".svg")
):
2018-05-18 13:41:41 -05:00
return True
return False
2018-06-16 15:02:10 -05:00
2018-05-18 13:41:41 -05:00
@app.template_filter()
def not_only_imgs(attachment):
for a in attachment:
2018-06-16 15:02:10 -05:00
if not _is_img(a["url"]):
2018-05-18 13:41:41 -05:00
return True
return False
2018-06-16 15:02:10 -05:00
2018-05-18 13:41:41 -05:00
@app.template_filter()
def is_img(filename):
return _is_img(filename)
2018-07-22 05:17:55 -05:00
def add_response_headers(headers={}):
"""This decorator adds the headers passed in to the response"""
2018-07-22 14:34:42 -05:00
2018-07-22 05:17:55 -05:00
def decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
resp = make_response(f(*args, **kwargs))
h = resp.headers
for header, value in headers.items():
h[header] = value
return resp
2018-07-22 14:34:42 -05:00
2018-07-22 05:17:55 -05:00
return decorated_function
2018-07-22 14:34:42 -05:00
2018-07-22 05:17:55 -05:00
return decorator
def noindex(f):
"""This decorator passes X-Robots-Tag: noindex, nofollow"""
2018-07-22 14:34:42 -05:00
return add_response_headers({"X-Robots-Tag": "noindex, nofollow"})(f)
2018-07-22 05:17:55 -05:00
2018-05-18 13:41:41 -05:00
def login_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
2018-06-16 15:02:10 -05:00
if not session.get("logged_in"):
2018-07-22 05:17:55 -05:00
return redirect(url_for("admin_login", next=request.url))
2018-05-18 13:41:41 -05:00
return f(*args, **kwargs)
2018-06-16 15:02:10 -05:00
2018-05-18 13:41:41 -05:00
return decorated_function
2018-05-21 07:30:52 -05:00
def _api_required():
2018-06-16 15:02:10 -05:00
if session.get("logged_in"):
if request.method not in ["GET", "HEAD"]:
2018-06-04 10:59:38 -05:00
# If a standard API request is made with a "login session", it must havw a CSRF token
csrf.protect()
2018-05-21 07:30:52 -05:00
return
# Token verification
2018-06-16 15:02:10 -05:00
token = request.headers.get("Authorization", "").replace("Bearer ", "")
2018-05-21 07:30:52 -05:00
if not token:
2018-05-29 14:36:05 -05:00
# IndieAuth token
2018-06-16 15:02:10 -05:00
token = request.form.get("access_token", "")
2018-05-21 07:30:52 -05:00
# Will raise a BadSignature on bad auth
payload = JWT.loads(token)
2018-06-16 15:02:10 -05:00
logger.info(f"api call by {payload}")
def api_required(f):
2018-05-18 13:41:41 -05:00
@wraps(f)
def decorated_function(*args, **kwargs):
try:
2018-05-21 07:30:52 -05:00
_api_required()
2018-05-18 13:41:41 -05:00
except BadSignature:
abort(401)
2018-05-21 07:30:52 -05:00
return f(*args, **kwargs)
2018-06-16 15:02:10 -05:00
2018-05-18 13:41:41 -05:00
return decorated_function
def jsonify(**data):
2018-06-16 15:02:10 -05:00
if "@context" not in data:
2018-07-22 14:34:42 -05:00
data["@context"] = config.DEFAULT_CTX
2018-05-18 13:41:41 -05:00
return Response(
response=json.dumps(data),
2018-06-16 15:02:10 -05:00
headers={
"Content-Type": "application/json"
if app.debug
else "application/activity+json"
},
2018-05-18 13:41:41 -05:00
)
def is_api_request():
2018-06-16 15:02:10 -05:00
h = request.headers.get("Accept")
2018-05-18 13:41:41 -05:00
if h is None:
return False
2018-06-16 15:02:10 -05:00
h = h.split(",")[0]
if h in HEADERS or h == "application/json":
2018-05-18 13:41:41 -05:00
return True
return False
2018-05-29 14:36:05 -05:00
@app.errorhandler(ValueError)
def handle_value_error(error):
2018-07-17 17:20:32 -05:00
logger.error(
f"caught value error: {error!r}, {traceback.format_tb(error.__traceback__)}"
)
2018-05-29 14:36:05 -05:00
response = flask_jsonify(message=error.args[0])
response.status_code = 400
return response
@app.errorhandler(Error)
def handle_activitypub_error(error):
2018-07-17 17:20:32 -05:00
logger.error(
f"caught activitypub error {error!r}, {traceback.format_tb(error.__traceback__)}"
)
2018-05-29 14:36:05 -05:00
response = flask_jsonify(error.to_dict())
response.status_code = error.status_code
return response
2018-07-22 05:42:36 -05:00
# @app.errorhandler(Exception)
# def handle_other_error(error):
# logger.error(
# f"caught error {error!r}, {traceback.format_tb(error.__traceback__)}"
# )
# response = flask_jsonify({})
# response.status_code = 500
# return response
2018-07-22 05:25:56 -05:00
2018-06-16 14:24:53 -05:00
# App routes
2018-05-18 13:41:41 -05:00
ROBOTS_TXT = """User-agent: *
2018-07-06 17:08:44 -05:00
Disallow: /login
Disallow: /admin/
Disallow: /static/
Disallow: /media/
Disallow: /uploads/"""
2018-07-04 18:02:51 -05:00
@app.route("/robots.txt")
def robots_txt():
return Response(response=ROBOTS_TXT, headers={"Content-Type": "text/plain"})
@app.route("/media/<media_id>")
2018-07-22 05:17:55 -05:00
@noindex
def serve_media(media_id):
f = MEDIA_CACHE.fs.get(ObjectId(media_id))
resp = app.response_class(f, direct_passthrough=True, mimetype=f.content_type)
resp.headers.set("Content-Length", f.length)
resp.headers.set("ETag", f.md5)
resp.headers.set(
"Last-Modified", f.uploadDate.strftime("%a, %d %b %Y %H:%M:%S GMT")
)
resp.headers.set("Cache-Control", "public,max-age=31536000,immutable")
resp.headers.set("Content-Encoding", "gzip")
return resp
@app.route("/uploads/<oid>/<fname>")
def serve_uploads(oid, fname):
f = MEDIA_CACHE.fs.get(ObjectId(oid))
2018-07-04 18:02:51 -05:00
resp = app.response_class(f, direct_passthrough=True, mimetype=f.content_type)
resp.headers.set("Content-Length", f.length)
resp.headers.set("ETag", f.md5)
resp.headers.set(
"Last-Modified", f.uploadDate.strftime("%a, %d %b %Y %H:%M:%S GMT")
)
resp.headers.set("Cache-Control", "public,max-age=31536000,immutable")
resp.headers.set("Content-Encoding", "gzip")
return resp
2018-05-18 13:41:41 -05:00
#######
# Login
2018-06-16 15:02:10 -05:00
2018-07-06 17:08:44 -05:00
@app.route("/admin/logout")
2018-05-18 13:41:41 -05:00
@login_required
2018-07-06 17:08:44 -05:00
def admin_logout():
2018-06-16 15:02:10 -05:00
session["logged_in"] = False
return redirect("/")
2018-05-18 13:41:41 -05:00
2018-06-16 15:02:10 -05:00
@app.route("/login", methods=["POST", "GET"])
2018-07-22 05:17:55 -05:00
@noindex
2018-07-06 17:08:44 -05:00
def admin_login():
2018-07-15 14:25:09 -05:00
if session.get("logged_in") is True:
return redirect(url_for("admin_notifications"))
2018-06-16 15:02:10 -05:00
devices = [doc["device"] for doc in DB.u2f.find()]
2018-05-18 13:41:41 -05:00
u2f_enabled = True if devices else False
2018-06-16 15:02:10 -05:00
if request.method == "POST":
csrf.protect()
2018-06-16 15:02:10 -05:00
pwd = request.form.get("pass")
2018-05-18 13:41:41 -05:00
if pwd and verify_pass(pwd):
if devices:
2018-06-16 15:02:10 -05:00
resp = json.loads(request.form.get("resp"))
2018-05-18 13:41:41 -05:00
print(resp)
try:
2018-06-16 15:02:10 -05:00
u2f.complete_authentication(session["challenge"], resp)
2018-05-18 13:41:41 -05:00
except ValueError as exc:
2018-06-16 15:02:10 -05:00
print("failed", exc)
2018-05-18 13:41:41 -05:00
abort(401)
return
finally:
2018-06-16 15:02:10 -05:00
session["challenge"] = None
2018-05-18 13:41:41 -05:00
2018-06-16 15:02:10 -05:00
session["logged_in"] = True
2018-07-09 17:49:52 -05:00
return redirect(
request.args.get("redirect") or url_for("admin_notifications")
)
2018-05-18 13:41:41 -05:00
else:
abort(401)
payload = None
if devices:
payload = u2f.begin_authentication(ID, devices)
2018-06-16 15:02:10 -05:00
session["challenge"] = payload
2018-05-18 13:41:41 -05:00
2018-06-29 15:16:26 -05:00
return render_template("login.html", u2f_enabled=u2f_enabled, payload=payload)
2018-05-18 13:41:41 -05:00
2018-06-16 15:02:10 -05:00
@app.route("/remote_follow", methods=["GET", "POST"])
2018-05-18 13:41:41 -05:00
def remote_follow():
2018-06-16 15:02:10 -05:00
if request.method == "GET":
return render_template("remote_follow.html")
2018-05-18 13:41:41 -05:00
2018-06-04 10:59:38 -05:00
csrf.protect()
profile = request.form.get("profile")
if not profile.startswith("@"):
profile = f"@{profile}"
2018-06-16 15:02:10 -05:00
return redirect(
get_remote_follow_template(profile).format(uri=f"{USERNAME}@{DOMAIN}")
2018-06-16 15:02:10 -05:00
)
2018-05-18 13:41:41 -05:00
2018-06-16 15:02:10 -05:00
@app.route("/authorize_follow", methods=["GET", "POST"])
2018-05-18 13:41:41 -05:00
@login_required
def authorize_follow():
2018-06-16 15:02:10 -05:00
if request.method == "GET":
return render_template(
"authorize_remote_follow.html", profile=request.args.get("profile")
)
2018-05-18 13:41:41 -05:00
2018-06-16 15:02:10 -05:00
actor = get_actor_url(request.form.get("profile"))
2018-05-18 13:41:41 -05:00
if not actor:
abort(500)
q = {
"box": Box.OUTBOX.value,
"type": ActivityType.FOLLOW.value,
"meta.undo": False,
"activity.object": actor,
}
if DB.activities.count(q) > 0:
2018-06-16 15:02:10 -05:00
return redirect("/following")
2018-05-18 13:41:41 -05:00
2018-06-24 12:22:40 -05:00
follow = ap.Follow(actor=MY_PERSON.id, object=actor)
2018-07-29 09:07:27 -05:00
tasks.post_to_outbox(follow)
2018-06-16 15:33:51 -05:00
2018-06-16 15:02:10 -05:00
return redirect("/following")
2018-05-18 13:41:41 -05:00
2018-06-16 15:02:10 -05:00
@app.route("/u2f/register", methods=["GET", "POST"])
2018-05-18 13:41:41 -05:00
@login_required
def u2f_register():
# TODO(tsileo): ensure no duplicates
2018-06-16 15:02:10 -05:00
if request.method == "GET":
2018-05-18 13:41:41 -05:00
payload = u2f.begin_registration(ID)
2018-06-16 15:02:10 -05:00
session["challenge"] = payload
return render_template("u2f.html", payload=payload)
2018-05-18 13:41:41 -05:00
else:
2018-06-16 15:02:10 -05:00
resp = json.loads(request.form.get("resp"))
device, device_cert = u2f.complete_registration(session["challenge"], resp)
session["challenge"] = None
DB.u2f.insert_one({"device": device, "cert": device_cert})
return ""
2018-05-18 13:41:41 -05:00
#######
# Activity pub routes
2018-07-16 15:24:14 -05:00
@app.route("/drop_cache")
@login_required
def drop_cache():
DB.actors.drop()
return "Done"
2018-05-18 13:41:41 -05:00
2018-06-16 15:02:10 -05:00
2018-06-29 15:16:26 -05:00
@app.route("/migration1_step1")
@login_required
def tmp_migrate():
for activity in DB.outbox.find():
activity["box"] = Box.OUTBOX.value
DB.activities.insert_one(activity)
for activity in DB.inbox.find():
activity["box"] = Box.INBOX.value
DB.activities.insert_one(activity)
for activity in DB.replies.find():
activity["box"] = Box.REPLIES.value
DB.activities.insert_one(activity)
return "Done"
@app.route("/migration1_step2")
@login_required
def tmp_migrate2():
2018-07-01 04:05:33 -05:00
# Remove buggy OStatus announce
2018-07-01 05:49:40 -05:00
DB.activities.remove(
{"activity.object": {"$regex": f"^tag:"}, "type": ActivityType.ANNOUNCE.value}
)
2018-07-01 04:05:33 -05:00
# Cache the object
2018-06-29 15:16:26 -05:00
for activity in DB.activities.find():
2018-07-01 04:05:33 -05:00
if (
activity["box"] == Box.OUTBOX.value
and activity["type"] == ActivityType.LIKE.value
):
like = ap.parse_activity(activity["activity"])
obj = like.get_object()
DB.activities.update_one(
{"remote_id": like.id},
{"$set": {"meta.object": obj.to_dict(embed=True)}},
)
elif activity["type"] == ActivityType.ANNOUNCE.value:
announce = ap.parse_activity(activity["activity"])
obj = announce.get_object()
DB.activities.update_one(
{"remote_id": announce.id},
{"$set": {"meta.object": obj.to_dict(embed=True)}},
)
2018-06-29 15:16:26 -05:00
return "Done"
@app.route("/migration2")
@login_required
def tmp_migrate3():
for activity in DB.activities.find():
try:
activity = ap.parse_activity(activity["activity"])
actor = activity.get_actor()
if actor.icon:
MEDIA_CACHE.cache(actor.icon["url"], Kind.ACTOR_ICON)
if activity.type == ActivityType.CREATE.value:
for attachment in activity.get_object()._data.get("attachment", []):
MEDIA_CACHE.cache(attachment["url"], Kind.ATTACHMENT)
except Exception:
app.logger.exception("failed")
return "Done"
@app.route("/migration3")
@login_required
def tmp_migrate4():
for activity in DB.activities.find(
{"box": Box.OUTBOX.value, "type": ActivityType.UNDO.value}
):
try:
activity = ap.parse_activity(activity["activity"])
if activity.get_object().type == ActivityType.FOLLOW.value:
DB.activities.update_one(
{"remote_id": activity.get_object().id},
{"$set": {"meta.undo": True}},
)
print(activity.get_object().to_dict())
except Exception:
app.logger.exception("failed")
for activity in DB.activities.find(
{"box": Box.INBOX.value, "type": ActivityType.UNDO.value}
):
try:
activity = ap.parse_activity(activity["activity"])
if activity.get_object().type == ActivityType.FOLLOW.value:
DB.activities.update_one(
{"remote_id": activity.get_object().id},
{"$set": {"meta.undo": True}},
)
print(activity.get_object().to_dict())
except Exception:
app.logger.exception("failed")
return "Done"
2018-07-19 18:12:02 -05:00
@app.route("/migration4")
@login_required
def tmp_migrate5():
for activity in DB.activities.find():
tasks.cache_actor.delay(activity["remote_id"], also_cache_attachments=False)
2018-07-20 03:56:39 -05:00
return "Done"
@app.route("/migration5")
@login_required
def tmp_migrate6():
for activity in DB.activities.find():
# tasks.cache_actor.delay(activity["remote_id"], also_cache_attachments=False)
2018-07-20 03:56:39 -05:00
try:
a = ap.parse_activity(activity["activity"])
if a.has_type([ActivityType.LIKE, ActivityType.FOLLOW]):
DB.activities.update_one(
{"remote_id": a.id},
{
"$set": {
"meta.object_actor": activitypub._actor_to_meta(
a.get_object().get_actor()
)
}
},
)
except Exception:
app.logger.exception(f"processing {activity} failed")
2018-07-19 18:12:02 -05:00
return "Done"
2018-07-06 16:54:41 -05:00
def paginated_query(db, q, limit=25, sort_key="_id"):
older_than = newer_than = None
query_sort = -1
2018-07-06 16:53:33 -05:00
first_page = not request.args.get("older_than") and not request.args.get(
"newer_than"
)
query_older_than = request.args.get("older_than")
query_newer_than = request.args.get("newer_than")
2018-07-06 16:53:33 -05:00
if query_older_than:
q["_id"] = {"$lt": ObjectId(query_older_than)}
elif query_newer_than:
q["_id"] = {"$gt": ObjectId(query_newer_than)}
query_sort = 1
2018-07-06 16:53:33 -05:00
outbox_data = list(db.find(q, limit=limit + 1).sort(sort_key, query_sort))
outbox_len = len(outbox_data)
2018-07-06 16:53:33 -05:00
outbox_data = sorted(
outbox_data[:limit], key=lambda x: str(x[sort_key]), reverse=True
)
if query_older_than:
newer_than = str(outbox_data[0]["_id"])
if outbox_len == limit + 1:
older_than = str(outbox_data[-1]["_id"])
elif query_newer_than:
older_than = str(outbox_data[-1]["_id"])
if outbox_len == limit + 1:
newer_than = str(outbox_data[0]["_id"])
elif first_page and outbox_len == limit + 1:
older_than = str(outbox_data[-1]["_id"])
2018-06-04 10:59:38 -05:00
2018-07-06 16:53:33 -05:00
return outbox_data, older_than, newer_than
2018-09-03 13:21:33 -05:00
CACHING = True
def _get_cached(type_="html", arg=None):
if not CACHING:
return None
logged_in = session.get("logged_in")
2018-09-03 02:38:29 -05:00
if not logged_in:
2018-09-03 13:21:33 -05:00
cached = DB.cache2.find_one({"path": request.path, "type": type_, "arg": arg})
2018-09-03 02:38:29 -05:00
if cached:
app.logger.info("from cache")
return cached['response_data']
2018-09-03 13:21:33 -05:00
return None
def _cache(resp, type_="html", arg=None):
if not CACHING:
return None
logged_in = session.get("logged_in")
if not logged_in:
DB.cache2.update_one(
{"path": request.path, "type": type_, "arg": arg},
{"$set": {"response_data": resp, "date": datetime.now(timezone.utc)}},
upsert=True,
)
return None
@app.route("/")
def index():
if is_api_request():
return jsonify(**ME)
cache_arg = f"{request.args.get('older_than', '')}:{request.args.get('newer_than', '')}"
cached = _get_cached("html", cache_arg)
if cached:
return cached
2018-07-06 16:53:33 -05:00
q = {
"box": Box.OUTBOX.value,
"type": {"$in": [ActivityType.CREATE.value, ActivityType.ANNOUNCE.value]},
"activity.object.inReplyTo": None,
"meta.deleted": False,
"meta.undo": False,
"$or": [{"meta.pinned": False}, {"meta.pinned": {"$exists": False}}],
2018-07-06 16:53:33 -05:00
}
2018-07-22 15:25:28 -05:00
pinned = []
# Only fetch the pinned notes if we're on the first page
if not request.args.get("older_than") and not request.args.get("newer_than"):
q_pinned = {
"box": Box.OUTBOX.value,
"type": ActivityType.CREATE.value,
"meta.deleted": False,
"meta.undo": False,
"meta.pinned": True,
}
pinned = list(DB.activities.find(q_pinned))
outbox_data, older_than, newer_than = paginated_query(
DB.activities, q, limit=25 - len(pinned)
)
2018-07-22 15:25:28 -05:00
2018-09-03 02:38:29 -05:00
resp = render_template(
"index.html",
outbox_data=outbox_data,
older_than=older_than,
newer_than=newer_than,
pinned=pinned,
)
2018-09-03 13:21:33 -05:00
_cache(resp, "html", cache_arg)
2018-09-03 02:38:29 -05:00
return resp
2018-06-04 10:59:38 -05:00
2018-06-16 15:02:10 -05:00
@app.route("/with_replies")
@login_required
2018-06-04 10:59:38 -05:00
def with_replies():
2018-06-29 15:16:26 -05:00
q = {
"box": Box.OUTBOX.value,
"type": {"$in": [ActivityType.CREATE.value, ActivityType.ANNOUNCE.value]},
"meta.deleted": False,
"meta.undo": False,
}
2018-07-06 16:53:33 -05:00
outbox_data, older_than, newer_than = paginated_query(DB.activities, q)
2018-05-18 13:41:41 -05:00
2018-07-06 16:53:33 -05:00
return render_template(
"index.html",
outbox_data=outbox_data,
older_than=older_than,
newer_than=newer_than,
)
2018-05-18 13:41:41 -05:00
2018-06-03 14:28:06 -05:00
def _build_thread(data, include_children=True):
2018-06-16 15:02:10 -05:00
data["_requested"] = True
2018-06-22 18:04:58 -05:00
print(data)
2018-06-16 15:02:10 -05:00
root_id = data["meta"].get("thread_root_parent", data["activity"]["object"]["id"])
2018-06-03 14:28:06 -05:00
2018-07-14 06:19:30 -05:00
query = {
"$or": [
{"meta.thread_root_parent": root_id, "type": "Create"},
{"activity.object.id": root_id},
]
}
if data["activity"]["object"].get("inReplyTo"):
query["$or"].append(
{"activity.object.id": data["activity"]["object"]["inReplyTo"]}
)
2018-05-18 13:41:41 -05:00
2018-06-03 14:28:06 -05:00
# Fetch the root replies, and the children
2018-06-29 15:16:26 -05:00
replies = [data] + list(DB.activities.find(query))
2018-06-22 18:04:58 -05:00
replies = sorted(replies, key=lambda d: d["activity"]["object"]["published"])
2018-06-03 14:28:06 -05:00
# Index all the IDs in order to build a tree
idx = {}
2018-06-22 18:04:58 -05:00
replies2 = []
2018-05-18 13:41:41 -05:00
for rep in replies:
2018-06-16 15:02:10 -05:00
rep_id = rep["activity"]["object"]["id"]
2018-06-22 18:04:58 -05:00
if rep_id in idx:
continue
2018-06-03 14:28:06 -05:00
idx[rep_id] = rep.copy()
2018-06-16 15:02:10 -05:00
idx[rep_id]["_nodes"] = []
2018-06-22 18:04:58 -05:00
replies2.append(rep)
2018-05-18 13:41:41 -05:00
2018-06-03 14:28:06 -05:00
# Build the tree
2018-06-22 18:04:58 -05:00
for rep in replies2:
2018-06-16 15:02:10 -05:00
rep_id = rep["activity"]["object"]["id"]
2018-06-03 14:28:06 -05:00
if rep_id == root_id:
continue
2018-06-16 15:02:10 -05:00
reply_of = rep["activity"]["object"]["inReplyTo"]
2018-07-26 16:11:38 -05:00
try:
idx[reply_of]["_nodes"].append(rep)
except KeyError:
app.logger.info(f"{reply_of} is not there! skipping {rep}")
2018-07-12 01:05:23 -05:00
2018-06-03 14:28:06 -05:00
# Flatten the tree
thread = []
2018-06-16 15:02:10 -05:00
2018-06-03 14:28:06 -05:00
def _flatten(node, level=0):
2018-06-16 15:02:10 -05:00
node["_level"] = level
2018-06-03 14:28:06 -05:00
thread.append(node)
2018-06-16 14:24:53 -05:00
2018-06-16 15:02:10 -05:00
for snode in sorted(
idx[node["activity"]["object"]["id"]]["_nodes"],
key=lambda d: d["activity"]["object"]["published"],
):
_flatten(snode, level=level + 1)
2018-06-03 14:28:06 -05:00
_flatten(idx[root_id])
2018-05-18 13:41:41 -05:00
2018-06-03 14:28:06 -05:00
return thread
2018-05-18 13:41:41 -05:00
2018-06-16 15:02:10 -05:00
@app.route("/note/<note_id>")
2018-06-16 14:24:53 -05:00
def note_by_id(note_id):
if is_api_request():
return redirect(url_for("outbox_activity", item_id=note_id))
2018-06-29 15:16:26 -05:00
data = DB.activities.find_one(
{"box": Box.OUTBOX.value, "remote_id": back.activity_url(note_id)}
)
2018-06-16 14:24:53 -05:00
if not data:
2018-06-03 14:28:06 -05:00
abort(404)
2018-06-16 15:02:10 -05:00
if data["meta"].get("deleted", False):
2018-06-03 14:28:06 -05:00
abort(410)
thread = _build_thread(data)
2018-08-01 01:29:08 -05:00
app.logger.info(f"thread={thread!r}")
2018-08-28 15:14:48 -05:00
raw_likes = list(
2018-06-29 15:16:26 -05:00
DB.activities.find(
2018-06-16 15:02:10 -05:00
{
"meta.undo": False,
2018-07-19 18:12:02 -05:00
"meta.deleted": False,
2018-06-16 15:02:10 -05:00
"type": ActivityType.LIKE.value,
"$or": [
2018-08-01 01:29:08 -05:00
# FIXME(tsileo): remove all the useless $or
2018-06-16 15:02:10 -05:00
{"activity.object.id": data["activity"]["object"]["id"]},
{"activity.object": data["activity"]["object"]["id"]},
],
}
)
)
2018-08-28 15:14:48 -05:00
likes = []
for doc in raw_likes:
try:
likes.append(doc["meta"]["actor"])
except Exception:
app.logger.exception(f"invalid doc: {doc!r}")
2018-08-01 01:29:08 -05:00
app.logger.info(f"likes={likes!r}")
2018-08-28 15:14:48 -05:00
raw_shares = list(
2018-06-29 15:16:26 -05:00
DB.activities.find(
2018-06-16 15:02:10 -05:00
{
"meta.undo": False,
2018-07-19 18:12:02 -05:00
"meta.deleted": False,
2018-06-16 15:02:10 -05:00
"type": ActivityType.ANNOUNCE.value,
"$or": [
{"activity.object.id": data["activity"]["object"]["id"]},
{"activity.object": data["activity"]["object"]["id"]},
],
}
)
)
2018-08-28 15:14:48 -05:00
shares = []
for doc in raw_shares:
try:
shares.append(doc["meta"]["actor"])
except Exception:
app.logger.exception(f"invalid doc: {doc!r}")
2018-08-01 01:29:08 -05:00
app.logger.info(f"shares={shares!r}")
2018-06-03 16:36:16 -05:00
2018-06-16 15:02:10 -05:00
return render_template(
2018-06-29 15:16:26 -05:00
"note.html", likes=likes, shares=shares, thread=thread, note=data
2018-06-16 15:02:10 -05:00
)
2018-05-18 13:41:41 -05:00
2018-06-16 15:02:10 -05:00
@app.route("/nodeinfo")
2018-06-03 03:15:11 -05:00
def nodeinfo():
2018-09-03 13:21:33 -05:00
response = _get_cached("api")
cached = True
if not response:
cached = False
q = {
"box": Box.OUTBOX.value,
"meta.deleted": False, # TODO(tsileo): retrieve deleted and expose tombstone
"type": {"$in": [ActivityType.CREATE.value, ActivityType.ANNOUNCE.value]},
}
response = json.dumps(
{
"version": "2.0",
"software": {
"name": "microblogpub",
"version": f"Microblog.pub {VERSION}",
},
"protocols": ["activitypub"],
"services": {"inbound": [], "outbound": []},
"openRegistrations": False,
"usage": {"users": {"total": 1}, "localPosts": DB.activities.count(q)},
"metadata": {
"sourceCode": "https://github.com/tsileo/microblog.pub",
"nodeName": f"@{USERNAME}@{DOMAIN}",
},
}
)
if not cached:
_cache(response, "api")
2018-06-03 03:15:11 -05:00
return Response(
2018-06-16 15:02:10 -05:00
headers={
"Content-Type": "application/json; profile=http://nodeinfo.diaspora.software/ns/schema/2.0#"
},
2018-09-03 13:21:33 -05:00
response=response,
2018-06-03 03:15:11 -05:00
)
2018-06-16 15:02:10 -05:00
@app.route("/.well-known/nodeinfo")
2018-06-03 03:15:11 -05:00
def wellknown_nodeinfo():
return flask_jsonify(
links=[
{
2018-06-16 15:02:10 -05:00
"rel": "http://nodeinfo.diaspora.software/ns/schema/2.0",
"href": f"{ID}/nodeinfo",
2018-06-03 03:15:11 -05:00
}
2018-06-16 15:02:10 -05:00
]
2018-06-03 03:15:11 -05:00
)
2018-06-16 15:02:10 -05:00
@app.route("/.well-known/webfinger")
2018-06-03 03:15:11 -05:00
def wellknown_webfinger():
2018-05-18 13:41:41 -05:00
"""Enable WebFinger support, required for Mastodon interopability."""
2018-07-03 17:40:23 -05:00
# TODO(tsileo): move this to little-boxes?
2018-06-16 15:02:10 -05:00
resource = request.args.get("resource")
if resource not in [f"acct:{USERNAME}@{DOMAIN}", ID]:
2018-05-18 13:41:41 -05:00
abort(404)
out = {
2018-06-16 15:02:10 -05:00
"subject": f"acct:{USERNAME}@{DOMAIN}",
2018-05-18 13:41:41 -05:00
"aliases": [ID],
"links": [
2018-06-16 15:02:10 -05:00
{
"rel": "http://webfinger.net/rel/profile-page",
"type": "text/html",
"href": BASE_URL,
},
2018-05-18 13:41:41 -05:00
{"rel": "self", "type": "application/activity+json", "href": ID},
2018-06-16 15:02:10 -05:00
{
"rel": "http://ostatus.org/schema/1.0/subscribe",
"template": BASE_URL + "/authorize_follow?profile={uri}",
},
2018-07-03 17:40:23 -05:00
{"rel": "magic-public-key", "href": KEY.to_magic_key()},
2018-07-04 14:08:45 -05:00
{
"href": ICON_URL,
"rel": "http://webfinger.net/rel/avatar",
"type": mimetypes.guess_type(ICON_URL)[0],
},
2018-05-18 13:41:41 -05:00
],
}
return Response(
response=json.dumps(out),
2018-06-16 15:02:10 -05:00
headers={
"Content-Type": "application/jrd+json; charset=utf-8"
if not app.debug
else "application/json"
},
2018-05-18 13:41:41 -05:00
)
2018-05-28 12:46:23 -05:00
def add_extra_collection(raw_doc: Dict[str, Any]) -> Dict[str, Any]:
2018-06-16 15:02:10 -05:00
if raw_doc["activity"]["type"] != ActivityType.CREATE.value:
2018-05-29 11:59:37 -05:00
return raw_doc
2018-06-16 15:02:10 -05:00
raw_doc["activity"]["object"]["replies"] = embed_collection(
raw_doc.get("meta", {}).get("count_direct_reply", 0),
2018-06-18 15:01:21 -05:00
f'{raw_doc["remote_id"]}/replies',
)
2018-05-29 11:59:37 -05:00
2018-06-16 15:02:10 -05:00
raw_doc["activity"]["object"]["likes"] = embed_collection(
2018-06-18 15:01:21 -05:00
raw_doc.get("meta", {}).get("count_like", 0), f'{raw_doc["remote_id"]}/likes'
)
2018-06-16 15:02:10 -05:00
raw_doc["activity"]["object"]["shares"] = embed_collection(
2018-06-18 15:01:21 -05:00
raw_doc.get("meta", {}).get("count_boost", 0), f'{raw_doc["remote_id"]}/shares'
)
2018-05-28 12:46:23 -05:00
return raw_doc
2018-06-04 12:10:04 -05:00
def remove_context(activity: Dict[str, Any]) -> Dict[str, Any]:
2018-06-16 15:02:10 -05:00
if "@context" in activity:
del activity["@context"]
2018-06-04 12:10:04 -05:00
return activity
def activity_from_doc(raw_doc: Dict[str, Any], embed: bool = False) -> Dict[str, Any]:
2018-05-28 12:46:23 -05:00
raw_doc = add_extra_collection(raw_doc)
2018-06-16 15:02:10 -05:00
activity = clean_activity(raw_doc["activity"])
2018-06-04 12:10:04 -05:00
if embed:
return remove_context(activity)
return activity
2018-05-28 12:46:23 -05:00
2018-06-16 15:02:10 -05:00
@app.route("/outbox", methods=["GET", "POST"])
2018-06-16 14:24:53 -05:00
def outbox():
2018-06-16 15:02:10 -05:00
if request.method == "GET":
2018-06-16 14:24:53 -05:00
if not is_api_request():
abort(404)
# TODO(tsileo): returns the whole outbox if authenticated
2018-05-18 13:41:41 -05:00
q = {
2018-06-29 15:16:26 -05:00
"box": Box.OUTBOX.value,
2018-07-09 17:49:52 -05:00
"meta.deleted": False,
"type": {"$in": [ActivityType.CREATE.value, ActivityType.ANNOUNCE.value]},
2018-05-18 13:41:41 -05:00
}
2018-06-16 15:02:10 -05:00
return jsonify(
**activitypub.build_ordered_collection(
2018-06-29 15:16:26 -05:00
DB.activities,
2018-06-16 15:02:10 -05:00
q=q,
cursor=request.args.get("cursor"),
map_func=lambda doc: activity_from_doc(doc, embed=True),
2018-07-31 16:23:20 -05:00
col_name="outbox",
2018-06-16 15:02:10 -05:00
)
)
2018-05-18 13:41:41 -05:00
# Handle POST request
2018-05-21 07:30:52 -05:00
try:
_api_required()
except BadSignature:
abort(401)
2018-06-16 14:24:53 -05:00
2018-05-18 13:41:41 -05:00
data = request.get_json(force=True)
print(data)
2018-06-17 12:21:59 -05:00
activity = ap.parse_activity(data)
2018-07-29 09:07:27 -05:00
activity_id = tasks.post_to_outbox(activity)
2018-07-29 09:07:27 -05:00
return Response(status=201, headers={"Location": activity_id})
2018-05-18 13:41:41 -05:00
2018-06-16 15:02:10 -05:00
@app.route("/outbox/<item_id>")
2018-05-18 13:41:41 -05:00
def outbox_detail(item_id):
2018-06-29 15:16:26 -05:00
doc = DB.activities.find_one(
{"box": Box.OUTBOX.value, "remote_id": back.activity_url(item_id)}
)
2018-07-09 17:49:52 -05:00
if not doc:
abort(404)
2018-06-16 15:02:10 -05:00
if doc["meta"].get("deleted", False):
2018-06-17 12:21:59 -05:00
obj = ap.parse_activity(doc["activity"])
2018-07-09 17:49:52 -05:00
resp = jsonify(**obj.get_tombstone().to_dict())
resp.status_code = 410
return resp
2018-05-28 12:46:23 -05:00
return jsonify(**activity_from_doc(doc))
2018-05-18 13:41:41 -05:00
2018-06-16 15:02:10 -05:00
@app.route("/outbox/<item_id>/activity")
2018-05-18 13:41:41 -05:00
def outbox_activity(item_id):
2018-06-29 15:16:26 -05:00
data = DB.activities.find_one(
2018-07-09 17:49:52 -05:00
{"box": Box.OUTBOX.value, "remote_id": back.activity_url(item_id)}
2018-06-18 15:01:21 -05:00
)
2018-05-18 13:41:41 -05:00
if not data:
abort(404)
2018-05-28 12:46:23 -05:00
obj = activity_from_doc(data)
2018-07-09 17:49:52 -05:00
if data["meta"].get("deleted", False):
obj = ap.parse_activity(data["activity"])
resp = jsonify(**obj.get_object().get_tombstone().to_dict())
resp.status_code = 410
return resp
2018-06-16 15:02:10 -05:00
if obj["type"] != ActivityType.CREATE.value:
2018-05-18 13:41:41 -05:00
abort(404)
2018-06-16 15:02:10 -05:00
return jsonify(**obj["object"])
2018-05-28 12:46:23 -05:00
2018-05-18 13:41:41 -05:00
2018-06-16 15:02:10 -05:00
@app.route("/outbox/<item_id>/replies")
def outbox_activity_replies(item_id):
if not is_api_request():
abort(404)
2018-06-29 15:16:26 -05:00
data = DB.activities.find_one(
{
"box": Box.OUTBOX.value,
"remote_id": back.activity_url(item_id),
"meta.deleted": False,
}
2018-06-18 15:01:21 -05:00
)
if not data:
abort(404)
2018-06-17 12:21:59 -05:00
obj = ap.parse_activity(data["activity"])
if obj.ACTIVITY_TYPE != ActivityType.CREATE:
abort(404)
q = {
2018-06-16 15:02:10 -05:00
"meta.deleted": False,
"type": ActivityType.CREATE.value,
"activity.object.inReplyTo": obj.get_object().id,
}
2018-06-16 15:02:10 -05:00
return jsonify(
**activitypub.build_ordered_collection(
2018-06-29 15:16:26 -05:00
DB.activities,
2018-06-16 15:02:10 -05:00
q=q,
cursor=request.args.get("cursor"),
map_func=lambda doc: doc["activity"]["object"],
col_name=f"outbox/{item_id}/replies",
first_page=request.args.get("page") == "first",
)
)
2018-06-16 15:02:10 -05:00
@app.route("/outbox/<item_id>/likes")
def outbox_activity_likes(item_id):
if not is_api_request():
abort(404)
2018-06-29 15:16:26 -05:00
data = DB.activities.find_one(
{
"box": Box.OUTBOX.value,
"remote_id": back.activity_url(item_id),
"meta.deleted": False,
}
2018-06-18 15:01:21 -05:00
)
if not data:
abort(404)
2018-06-17 12:21:59 -05:00
obj = ap.parse_activity(data["activity"])
if obj.ACTIVITY_TYPE != ActivityType.CREATE:
abort(404)
q = {
2018-06-16 15:02:10 -05:00
"meta.undo": False,
"type": ActivityType.LIKE.value,
"$or": [
{"activity.object.id": obj.get_object().id},
{"activity.object": obj.get_object().id},
],
}
2018-06-16 15:02:10 -05:00
return jsonify(
**activitypub.build_ordered_collection(
2018-06-29 15:16:26 -05:00
DB.activities,
2018-06-16 15:02:10 -05:00
q=q,
cursor=request.args.get("cursor"),
map_func=lambda doc: remove_context(doc["activity"]),
col_name=f"outbox/{item_id}/likes",
first_page=request.args.get("page") == "first",
)
)
2018-06-16 15:02:10 -05:00
@app.route("/outbox/<item_id>/shares")
def outbox_activity_shares(item_id):
if not is_api_request():
abort(404)
2018-06-29 15:16:26 -05:00
data = DB.activities.find_one(
{
"box": Box.OUTBOX.value,
"remote_id": back.activity_url(item_id),
"meta.deleted": False,
}
2018-06-18 15:01:21 -05:00
)
if not data:
abort(404)
2018-06-17 12:21:59 -05:00
obj = ap.parse_activity(data["activity"])
if obj.ACTIVITY_TYPE != ActivityType.CREATE:
abort(404)
q = {
2018-06-16 15:02:10 -05:00
"meta.undo": False,
"type": ActivityType.ANNOUNCE.value,
"$or": [
{"activity.object.id": obj.get_object().id},
{"activity.object": obj.get_object().id},
],
}
2018-06-16 15:02:10 -05:00
return jsonify(
**activitypub.build_ordered_collection(
2018-06-29 15:16:26 -05:00
DB.activities,
2018-06-16 15:02:10 -05:00
q=q,
cursor=request.args.get("cursor"),
map_func=lambda doc: remove_context(doc["activity"]),
col_name=f"outbox/{item_id}/shares",
first_page=request.args.get("page") == "first",
)
)
@app.route("/admin", methods=["GET"])
2018-05-18 13:41:41 -05:00
@login_required
def admin():
2018-06-29 15:16:26 -05:00
q = {
"meta.deleted": False,
"meta.undo": False,
"type": ActivityType.LIKE.value,
"box": Box.OUTBOX.value,
}
col_liked = DB.activities.count(q)
2018-05-18 13:41:41 -05:00
return render_template(
2018-06-16 15:02:10 -05:00
"admin.html",
instances=list(DB.instances.find()),
2018-06-29 15:16:26 -05:00
inbox_size=DB.activities.count({"box": Box.INBOX.value}),
outbox_size=DB.activities.count({"box": Box.OUTBOX.value}),
2018-06-16 15:02:10 -05:00
col_liked=col_liked,
col_followers=DB.activities.count(
{
"box": Box.INBOX.value,
"type": ActivityType.FOLLOW.value,
"meta.undo": False,
}
),
col_following=DB.activities.count(
{
"box": Box.OUTBOX.value,
"type": ActivityType.FOLLOW.value,
"meta.undo": False,
}
),
2018-05-18 13:41:41 -05:00
)
2018-06-16 14:24:53 -05:00
2018-05-18 13:41:41 -05:00
@app.route("/admin/lookup", methods=["GET", "POST"])
@login_required
def admin_lookup():
data = None
2018-07-29 13:10:15 -05:00
meta = None
if request.method == "POST":
if request.form.get("url"):
data = lookup(request.form.get("url"))
2018-07-29 13:10:15 -05:00
if data.has_type(ActivityType.ANNOUNCE):
meta = dict(
object=data.get_object().to_dict(),
object_actor=data.get_object().get_actor().to_dict(),
actor=data.get_actor().to_dict(),
)
2018-09-02 12:43:09 -05:00
print(data)
2018-07-29 13:10:15 -05:00
return render_template(
"lookup.html", data=data, meta=meta, url=request.form.get("url")
)
2018-07-14 05:29:46 -05:00
@app.route("/admin/thread")
@login_required
def admin_thread():
data = DB.activities.find_one(
2018-07-14 06:19:30 -05:00
{
"$or": [
{"remote_id": request.args.get("oid")},
{"activity.object.id": request.args.get("oid")},
]
}
2018-07-14 05:29:46 -05:00
)
if not data:
abort(404)
if data["meta"].get("deleted", False):
abort(410)
thread = _build_thread(data)
tpl = "note.html"
if request.args.get("debug"):
tpl = "note_debug.html"
return render_template(tpl, thread=thread, note=data)
2018-07-14 05:29:46 -05:00
2018-07-06 17:08:44 -05:00
@app.route("/admin/new", methods=["GET"])
2018-05-18 13:41:41 -05:00
@login_required
2018-07-06 17:08:44 -05:00
def admin_new():
2018-05-18 13:41:41 -05:00
reply_id = None
2018-06-16 15:02:10 -05:00
content = ""
2018-06-03 14:28:06 -05:00
thread = []
2018-07-31 15:42:50 -05:00
print(request.args)
2018-06-16 15:02:10 -05:00
if request.args.get("reply"):
2018-06-29 15:16:26 -05:00
data = DB.activities.find_one({"activity.object.id": request.args.get("reply")})
2018-07-31 15:42:50 -05:00
if data:
reply = ap.parse_activity(data["activity"])
else:
data = dict(
meta={},
activity=dict(
object=get_backend().fetch_iri(request.args.get("reply"))
),
)
reply = ap.parse_activity(data["activity"]["object"])
2018-06-03 14:28:06 -05:00
2018-05-18 13:41:41 -05:00
reply_id = reply.id
2018-06-17 12:21:59 -05:00
if reply.ACTIVITY_TYPE == ActivityType.CREATE:
2018-06-03 14:28:06 -05:00
reply_id = reply.get_object().id
2018-05-18 13:41:41 -05:00
actor = reply.get_actor()
domain = urlparse(actor.id).netloc
2018-06-03 14:28:06 -05:00
# FIXME(tsileo): if reply of reply, fetch all participants
2018-06-16 15:02:10 -05:00
content = f"@{actor.preferredUsername}@{domain} "
2018-06-29 15:16:26 -05:00
thread = _build_thread(data)
2018-05-18 13:41:41 -05:00
2018-06-16 15:02:10 -05:00
return render_template("new.html", reply=reply_id, content=content, thread=thread)
2018-05-18 13:41:41 -05:00
2018-07-06 17:08:44 -05:00
@app.route("/admin/notifications")
2018-05-18 13:41:41 -05:00
@login_required
2018-07-06 17:08:44 -05:00
def admin_notifications():
2018-06-29 15:16:26 -05:00
# FIXME(tsileo): show unfollow (performed by the current actor) and liked???
mentions_query = {
"type": ActivityType.CREATE.value,
2018-06-16 15:02:10 -05:00
"activity.object.tag.type": "Mention",
"activity.object.tag.name": f"@{USERNAME}@{DOMAIN}",
"meta.deleted": False,
2018-05-18 13:41:41 -05:00
}
2018-06-29 15:16:26 -05:00
replies_query = {
"type": ActivityType.CREATE.value,
"activity.object.inReplyTo": {"$regex": f"^{BASE_URL}"},
}
announced_query = {
"type": ActivityType.ANNOUNCE.value,
"activity.object": {"$regex": f"^{BASE_URL}"},
}
new_followers_query = {"type": ActivityType.FOLLOW.value}
2018-07-01 14:32:12 -05:00
unfollow_query = {
"type": ActivityType.UNDO.value,
"activity.object.type": ActivityType.FOLLOW.value,
}
2018-07-30 11:30:47 -05:00
likes_query = {
"type": ActivityType.LIKE.value,
"activity.object": {"$regex": f"^{BASE_URL}"},
}
2018-06-29 15:16:26 -05:00
followed_query = {"type": ActivityType.ACCEPT.value}
2018-06-16 15:02:10 -05:00
q = {
2018-06-29 15:16:26 -05:00
"box": Box.INBOX.value,
2018-06-16 15:02:10 -05:00
"$or": [
2018-06-29 15:16:26 -05:00
mentions_query,
announced_query,
replies_query,
new_followers_query,
followed_query,
2018-07-01 14:32:12 -05:00
unfollow_query,
2018-07-30 11:12:27 -05:00
likes_query,
2018-06-29 15:16:26 -05:00
],
2018-06-16 15:02:10 -05:00
}
2018-07-06 16:53:33 -05:00
inbox_data, older_than, newer_than = paginated_query(DB.activities, q)
2018-05-18 13:41:41 -05:00
2018-07-06 16:53:33 -05:00
return render_template(
"stream.html",
inbox_data=inbox_data,
older_than=older_than,
newer_than=newer_than,
)
2018-05-18 13:41:41 -05:00
2018-05-28 12:46:23 -05:00
2018-06-16 15:02:10 -05:00
@app.route("/api/key")
2018-05-29 14:36:05 -05:00
@login_required
def api_user_key():
return flask_jsonify(api_key=ADMIN_API_KEY)
def _user_api_arg(key: str, **kwargs):
2018-06-01 13:29:44 -05:00
"""Try to get the given key from the requests, try JSON body, form data and query arg."""
2018-05-29 15:16:09 -05:00
if request.is_json:
2018-06-01 13:29:44 -05:00
oid = request.json.get(key)
2018-05-29 14:36:05 -05:00
else:
2018-06-01 13:29:44 -05:00
oid = request.args.get(key) or request.form.get(key)
2018-05-29 14:36:05 -05:00
if not oid:
2018-06-16 15:02:10 -05:00
if "default" in kwargs:
2018-07-14 06:19:30 -05:00
app.logger.info(f'{key}={kwargs.get("default")}')
2018-06-16 15:02:10 -05:00
return kwargs.get("default")
2018-06-03 05:50:51 -05:00
2018-06-16 15:02:10 -05:00
raise ValueError(f"missing {key}")
2018-06-01 13:29:44 -05:00
2018-07-14 06:19:30 -05:00
app.logger.info(f"{key}={oid}")
2018-06-01 13:29:44 -05:00
return oid
def _user_api_get_note(from_outbox: bool = False):
2018-06-16 15:02:10 -05:00
oid = _user_api_arg("id")
2018-07-14 06:19:30 -05:00
app.logger.info(f"fetching {oid}")
note = ap.parse_activity(get_backend().fetch_iri(oid), expected=ActivityType.NOTE)
2018-06-01 13:29:44 -05:00
if from_outbox and not note.id.startswith(ID):
2018-06-16 15:02:10 -05:00
raise NotFromOutboxError(
2018-06-17 12:21:59 -05:00
f"cannot load {note.id}, id must be owned by the server"
2018-06-16 15:02:10 -05:00
)
2018-05-29 14:36:05 -05:00
2018-06-01 13:29:44 -05:00
return note
2018-05-29 14:36:05 -05:00
def _user_api_response(**kwargs):
2018-06-16 15:02:10 -05:00
_redirect = _user_api_arg("redirect", default=None)
if _redirect:
return redirect(_redirect)
2018-05-29 14:36:05 -05:00
resp = flask_jsonify(**kwargs)
resp.status_code = 201
return resp
2018-06-16 15:02:10 -05:00
@app.route("/api/note/delete", methods=["POST"])
@api_required
def api_delete():
2018-05-29 14:36:05 -05:00
"""API endpoint to delete a Note activity."""
2018-06-01 13:29:44 -05:00
note = _user_api_get_note(from_outbox=True)
2018-07-29 10:19:06 -05:00
delete = ap.Delete(actor=ID, object=ap.Tombstone(id=note.id).to_dict(embed=True))
2018-07-29 09:07:27 -05:00
delete_id = tasks.post_to_outbox(delete)
2018-05-29 14:36:05 -05:00
2018-07-29 09:07:27 -05:00
return _user_api_response(activity=delete_id)
2018-05-29 14:36:05 -05:00
2018-06-16 15:02:10 -05:00
@app.route("/api/boost", methods=["POST"])
2018-05-28 12:46:23 -05:00
@api_required
def api_boost():
2018-06-01 13:29:44 -05:00
note = _user_api_get_note()
2018-06-18 15:01:21 -05:00
announce = note.build_announce(MY_PERSON)
2018-07-29 09:07:27 -05:00
announce_id = tasks.post_to_outbox(announce)
2018-05-18 13:41:41 -05:00
2018-07-29 09:07:27 -05:00
return _user_api_response(activity=announce_id)
2018-06-01 13:29:44 -05:00
2018-06-16 15:02:10 -05:00
@app.route("/api/like", methods=["POST"])
2018-05-28 12:46:23 -05:00
@api_required
def api_like():
2018-06-01 13:29:44 -05:00
note = _user_api_get_note()
2018-06-18 15:01:21 -05:00
like = note.build_like(MY_PERSON)
2018-07-29 09:07:27 -05:00
like_id = tasks.post_to_outbox(like)
2018-06-01 13:29:44 -05:00
2018-07-29 09:07:27 -05:00
return _user_api_response(activity=like_id)
2018-05-28 12:46:23 -05:00
2018-05-18 13:41:41 -05:00
@app.route("/api/note/pin", methods=["POST"])
@api_required
def api_pin():
note = _user_api_get_note(from_outbox=True)
DB.activities.update_one(
{"activity.object.id": note.id, "box": Box.OUTBOX.value},
{"$set": {"meta.pinned": True}},
)
return _user_api_response(pinned=True)
@app.route("/api/note/unpin", methods=["POST"])
@api_required
def api_unpin():
note = _user_api_get_note(from_outbox=True)
DB.activities.update_one(
{"activity.object.id": note.id, "box": Box.OUTBOX.value},
{"$set": {"meta.pinned": False}},
)
return _user_api_response(pinned=False)
2018-06-16 15:02:10 -05:00
@app.route("/api/undo", methods=["POST"])
2018-05-27 07:21:06 -05:00
@api_required
def api_undo():
2018-06-16 15:02:10 -05:00
oid = _user_api_arg("id")
2018-06-29 15:16:26 -05:00
doc = DB.activities.find_one(
{
"box": Box.OUTBOX.value,
"$or": [{"remote_id": back.activity_url(oid)}, {"remote_id": oid}],
}
2018-06-18 15:01:21 -05:00
)
2018-06-01 13:29:44 -05:00
if not doc:
2018-06-16 15:02:10 -05:00
raise ActivityNotFoundError(f"cannot found {oid}")
2018-06-01 13:29:44 -05:00
2018-06-17 12:21:59 -05:00
obj = ap.parse_activity(doc.get("activity"))
2018-06-01 13:29:44 -05:00
# FIXME(tsileo): detect already undo-ed and make this API call idempotent
undo = obj.build_undo()
2018-07-29 09:07:27 -05:00
undo_id = tasks.post_to_outbox(undo)
2018-06-01 13:29:44 -05:00
2018-07-29 09:07:27 -05:00
return _user_api_response(activity=undo_id)
2018-05-27 07:21:06 -05:00
2018-05-18 13:41:41 -05:00
2018-07-06 17:08:44 -05:00
@app.route("/admin/stream")
2018-05-18 13:41:41 -05:00
@login_required
2018-07-06 17:08:44 -05:00
def admin_stream():
2018-07-14 06:19:30 -05:00
q = {"meta.stream": True, "meta.deleted": False}
2018-05-18 13:41:41 -05:00
2018-07-23 15:11:03 -05:00
tpl = "stream.html"
if request.args.get("debug"):
tpl = "stream_debug.html"
2018-07-23 15:25:51 -05:00
if request.args.get("debug_inbox"):
q = {}
inbox_data, older_than, newer_than = paginated_query(
DB.activities, q, limit=int(request.args.get("limit", 25))
)
2018-07-23 15:11:03 -05:00
2018-07-06 16:53:33 -05:00
return render_template(
2018-07-23 15:11:03 -05:00
tpl, inbox_data=inbox_data, older_than=older_than, newer_than=newer_than
2018-06-16 15:02:10 -05:00
)
2018-05-18 13:41:41 -05:00
2018-06-16 15:02:10 -05:00
@app.route("/inbox", methods=["GET", "POST"])
2018-06-16 14:24:53 -05:00
def inbox():
2018-06-16 15:02:10 -05:00
if request.method == "GET":
2018-06-16 14:24:53 -05:00
if not is_api_request():
abort(404)
2018-05-21 07:30:52 -05:00
try:
_api_required()
except BadSignature:
abort(404)
2018-06-16 15:02:10 -05:00
return jsonify(
**activitypub.build_ordered_collection(
2018-06-29 15:16:26 -05:00
DB.activities,
q={"meta.deleted": False, "box": Box.INBOX.value},
2018-06-16 15:02:10 -05:00
cursor=request.args.get("cursor"),
map_func=lambda doc: remove_context(doc["activity"]),
2018-07-31 16:23:20 -05:00
col_name="inbox",
2018-06-16 15:02:10 -05:00
)
)
2018-05-21 07:30:52 -05:00
2018-06-16 14:24:53 -05:00
data = request.get_json(force=True)
2018-06-16 15:02:10 -05:00
logger.debug(f"req_headers={request.headers}")
logger.debug(f"raw_data={data}")
2018-06-20 16:42:12 -05:00
try:
if not verify_request(
request.method, request.path, request.headers, request.data
):
2018-06-16 15:02:10 -05:00
raise Exception("failed to verify request")
2018-06-02 02:07:57 -05:00
except Exception:
2018-06-16 15:02:10 -05:00
logger.exception(
"failed to verify request, trying to verify the payload by fetching the remote"
)
try:
2018-06-20 16:42:12 -05:00
data = get_backend().fetch_iri(data["id"])
2018-09-02 13:32:15 -05:00
except ActivityGoneError:
# XXX Mastodon sends Delete activities that are not dereferencable, it's the actor url with #delete
# appended, so an `ActivityGoneError` kind of ensure it's "legit"
2018-09-03 01:20:43 -05:00
if data["type"] == ActivityType.DELETE.value and data["id"].startswith(
data["object"]
):
2018-09-02 13:32:15 -05:00
logger.info(f"received a Delete for an actor {data!r}")
if get_backend().inbox_check_duplicate(MY_PERSON, data["id"]):
# The activity is already in the inbox
logger.info(f"received duplicate activity {data!r}, dropping it")
2018-09-03 01:20:43 -05:00
DB.activities.insert_one(
{
"box": Box.INBOX.value,
"activity": data,
"type": _to_list(data["type"]),
"remote_id": data["id"],
"meta": {"undo": False, "deleted": False},
}
)
# TODO(tsileo): write the callback the the delete external actor event
return Response(status=201)
except Exception:
2018-05-28 12:46:23 -05:00
logger.exception(f'failed to fetch remote id at {data["id"]}')
2018-06-02 02:07:57 -05:00
return Response(
status=422,
2018-06-16 15:02:10 -05:00
headers={"Content-Type": "application/json"},
response=json.dumps(
{
"error": "failed to verify request (using HTTP signatures or fetching the IRI)"
}
),
2018-06-02 02:07:57 -05:00
)
2018-06-17 12:21:59 -05:00
activity = ap.parse_activity(data)
2018-06-16 15:02:10 -05:00
logger.debug(f"inbox activity={activity}/{data}")
2018-07-29 09:07:27 -05:00
tasks.post_to_inbox(activity)
2018-05-18 13:41:41 -05:00
2018-06-16 15:02:10 -05:00
return Response(status=201)
2018-05-18 13:41:41 -05:00
2018-06-18 15:01:21 -05:00
def without_id(l):
out = []
for d in l:
if "_id" in d:
del d["_id"]
out.append(d)
return out
2018-06-16 15:02:10 -05:00
@app.route("/api/debug", methods=["GET", "DELETE"])
@api_required
def api_debug():
"""Endpoint used/needed for testing, only works in DEBUG_MODE."""
if not DEBUG_MODE:
2018-06-16 15:02:10 -05:00
return flask_jsonify(message="DEBUG_MODE is off")
2018-06-16 15:02:10 -05:00
if request.method == "DELETE":
_drop_db()
2018-06-16 15:02:10 -05:00
return flask_jsonify(message="DB dropped")
2018-06-18 15:01:21 -05:00
return flask_jsonify(
2018-06-29 15:16:26 -05:00
inbox=DB.activities.count({"box": Box.INBOX.value}),
outbox=DB.activities.count({"box": Box.OUTBOX.value}),
outbox_data=without_id(DB.activities.find({"box": Box.OUTBOX.value})),
2018-06-18 15:01:21 -05:00
)
2018-06-16 15:02:10 -05:00
@app.route("/api/new_note", methods=["POST"])
2018-06-16 14:24:53 -05:00
@api_required
def api_new_note():
2018-06-16 15:02:10 -05:00
source = _user_api_arg("content")
if not source:
2018-06-16 15:02:10 -05:00
raise ValueError("missing content")
2018-06-16 14:24:53 -05:00
2018-06-01 13:29:44 -05:00
_reply, reply = None, None
try:
2018-06-16 15:02:10 -05:00
_reply = _user_api_arg("reply")
2018-06-01 13:29:44 -05:00
except ValueError:
pass
2018-06-16 14:24:53 -05:00
content, tags = parse_markdown(source)
2018-06-16 15:02:10 -05:00
to = request.args.get("to")
cc = [ID + "/followers"]
2018-06-16 14:24:53 -05:00
2018-06-01 13:29:44 -05:00
if _reply:
reply = ap.fetch_remote_activity(_reply)
cc.append(reply.attributedTo)
2018-06-01 13:29:44 -05:00
2018-05-18 13:41:41 -05:00
for tag in tags:
2018-06-16 15:02:10 -05:00
if tag["type"] == "Mention":
cc.append(tag["href"])
2018-05-18 13:41:41 -05:00
raw_note = dict(
2018-06-17 14:54:16 -05:00
attributedTo=MY_PERSON.id,
2018-06-16 14:24:53 -05:00
cc=list(set(cc)),
2018-06-17 12:21:59 -05:00
to=[to if to else ap.AS_PUBLIC],
2018-06-01 13:29:44 -05:00
content=content,
2018-05-18 13:41:41 -05:00
tag=tags,
2018-06-16 15:02:10 -05:00
source={"mediaType": "text/markdown", "content": source},
inReplyTo=reply.id if reply else None,
2018-05-18 13:41:41 -05:00
)
2018-07-03 16:29:55 -05:00
if "file" in request.files:
file = request.files["file"]
rfilename = secure_filename(file.filename)
with BytesIO() as buf:
file.save(buf)
oid = MEDIA_CACHE.save_upload(buf, rfilename)
mtype = mimetypes.guess_type(rfilename)[0]
2018-07-03 16:29:55 -05:00
raw_note["attachment"] = [
{
"mediaType": mtype,
"name": rfilename,
"type": "Document",
"url": f"{BASE_URL}/uploads/{oid}/{rfilename}",
}
]
note = ap.Note(**raw_note)
2018-05-18 13:41:41 -05:00
create = note.build_create()
2018-07-29 09:07:27 -05:00
create_id = tasks.post_to_outbox(create)
2018-07-29 09:07:27 -05:00
return _user_api_response(activity=create_id)
2018-06-01 13:29:44 -05:00
2018-05-18 13:41:41 -05:00
2018-06-16 15:02:10 -05:00
@app.route("/api/stream")
2018-05-27 15:30:43 -05:00
@api_required
2018-05-18 13:41:41 -05:00
def api_stream():
return Response(
2018-06-16 15:02:10 -05:00
response=json.dumps(
activitypub.build_inbox_json_feed("/api/stream", request.args.get("cursor"))
),
headers={"Content-Type": "application/json"},
2018-05-18 13:41:41 -05:00
)
2018-06-16 15:02:10 -05:00
@app.route("/api/block", methods=["POST"])
@api_required
def api_block():
2018-06-16 15:02:10 -05:00
actor = _user_api_arg("actor")
2018-06-01 13:29:44 -05:00
2018-06-29 15:16:26 -05:00
existing = DB.activities.find_one(
{
"box": Box.OUTBOX.value,
"type": ActivityType.BLOCK.value,
"activity.object": actor,
"meta.undo": False,
}
2018-06-16 15:02:10 -05:00
)
2018-06-01 13:29:44 -05:00
if existing:
2018-06-16 15:02:10 -05:00
return _user_api_response(activity=existing["activity"]["id"])
2018-06-17 14:54:16 -05:00
block = ap.Block(actor=MY_PERSON.id, object=actor)
2018-07-29 09:07:27 -05:00
block_id = tasks.post_to_outbox(block)
2018-06-01 13:29:44 -05:00
2018-07-29 09:07:27 -05:00
return _user_api_response(activity=block_id)
2018-06-16 15:02:10 -05:00
@app.route("/api/follow", methods=["POST"])
2018-05-18 13:41:41 -05:00
@api_required
def api_follow():
2018-06-16 15:02:10 -05:00
actor = _user_api_arg("actor")
2018-06-01 13:29:44 -05:00
q = {
"box": Box.OUTBOX.value,
"type": ActivityType.FOLLOW.value,
"meta.undo": False,
"activity.object": actor,
}
existing = DB.activities.find_one(q)
2018-06-01 13:29:44 -05:00
if existing:
2018-06-16 15:02:10 -05:00
return _user_api_response(activity=existing["activity"]["id"])
2018-05-18 13:41:41 -05:00
2018-06-17 14:54:16 -05:00
follow = ap.Follow(actor=MY_PERSON.id, object=actor)
2018-07-29 09:07:27 -05:00
follow_id = tasks.post_to_outbox(follow)
2018-06-01 13:29:44 -05:00
2018-07-29 09:07:27 -05:00
return _user_api_response(activity=follow_id)
2018-05-18 13:41:41 -05:00
2018-06-16 15:02:10 -05:00
@app.route("/followers")
2018-06-16 14:24:53 -05:00
def followers():
q = {"box": Box.INBOX.value, "type": ActivityType.FOLLOW.value, "meta.undo": False}
2018-06-16 14:24:53 -05:00
if is_api_request():
2018-05-18 13:41:41 -05:00
return jsonify(
**activitypub.build_ordered_collection(
DB.activities,
q=q,
2018-06-16 15:02:10 -05:00
cursor=request.args.get("cursor"),
2018-07-07 07:07:29 -05:00
map_func=lambda doc: doc["activity"]["actor"],
2018-07-31 16:23:20 -05:00
col_name="followers",
2018-05-18 13:41:41 -05:00
)
2018-06-16 14:24:53 -05:00
)
2018-05-18 13:41:41 -05:00
2018-07-18 16:18:39 -05:00
raw_followers, older_than, newer_than = paginated_query(DB.activities, q)
followers = []
for doc in raw_followers:
try:
2018-07-19 18:12:02 -05:00
followers.append(doc["meta"]["actor"])
2018-07-18 16:18:39 -05:00
except Exception:
pass
return render_template(
"followers.html",
followers_data=followers,
older_than=older_than,
newer_than=newer_than,
)
2018-05-18 13:41:41 -05:00
2018-06-16 15:02:10 -05:00
@app.route("/following")
2018-05-18 13:41:41 -05:00
def following():
q = {"box": Box.OUTBOX.value, "type": ActivityType.FOLLOW.value, "meta.undo": False}
2018-05-18 13:41:41 -05:00
if is_api_request():
return jsonify(
**activitypub.build_ordered_collection(
DB.activities,
q=q,
2018-06-16 15:02:10 -05:00
cursor=request.args.get("cursor"),
map_func=lambda doc: doc["activity"]["object"],
2018-07-31 16:23:20 -05:00
col_name="following",
2018-06-16 15:02:10 -05:00
)
2018-05-18 13:41:41 -05:00
)
2018-06-16 14:24:53 -05:00
2018-07-18 16:18:39 -05:00
if config.HIDE_FOLLOWING and not session.get("logged_in", False):
2018-07-17 16:42:21 -05:00
abort(404)
following, older_than, newer_than = paginated_query(DB.activities, q)
2018-07-26 15:44:31 -05:00
following = [(doc["remote_id"], doc["meta"]["object"]) for doc in following]
return render_template(
"following.html",
following_data=following,
older_than=older_than,
newer_than=newer_than,
)
2018-05-18 13:41:41 -05:00
2018-06-16 15:02:10 -05:00
@app.route("/tags/<tag>")
2018-05-18 13:41:41 -05:00
def tags(tag):
2018-06-29 15:16:26 -05:00
if not DB.activities.count(
{
"box": Box.OUTBOX.value,
"activity.object.tag.type": "Hashtag",
"activity.object.tag.name": "#" + tag,
}
2018-06-16 15:02:10 -05:00
):
2018-05-18 13:41:41 -05:00
abort(404)
if not is_api_request():
return render_template(
2018-06-16 15:02:10 -05:00
"tags.html",
2018-05-18 13:41:41 -05:00
tag=tag,
2018-06-29 15:16:26 -05:00
outbox_data=DB.activities.find(
2018-06-16 15:02:10 -05:00
{
2018-06-29 15:16:26 -05:00
"box": Box.OUTBOX.value,
"type": ActivityType.CREATE.value,
2018-06-16 15:02:10 -05:00
"meta.deleted": False,
"activity.object.tag.type": "Hashtag",
"activity.object.tag.name": "#" + tag,
}
),
2018-05-18 13:41:41 -05:00
)
q = {
2018-06-29 15:16:26 -05:00
"box": Box.OUTBOX.value,
2018-06-16 15:02:10 -05:00
"meta.deleted": False,
"meta.undo": False,
"type": ActivityType.CREATE.value,
"activity.object.tag.type": "Hashtag",
"activity.object.tag.name": "#" + tag,
2018-05-18 13:41:41 -05:00
}
2018-06-16 15:02:10 -05:00
return jsonify(
**activitypub.build_ordered_collection(
2018-06-29 15:16:26 -05:00
DB.activities,
2018-06-16 15:02:10 -05:00
q=q,
cursor=request.args.get("cursor"),
map_func=lambda doc: doc["activity"]["object"]["id"],
col_name=f"tags/{tag}",
)
)
2018-05-18 13:41:41 -05:00
@app.route("/featured")
def featured():
if not is_api_request():
abort(404)
q = {
"box": Box.OUTBOX.value,
"type": ActivityType.CREATE.value,
"meta.deleted": False,
"meta.undo": False,
"meta.pinned": True,
}
data = [clean_activity(doc["activity"]["object"]) for doc in DB.activities.find(q)]
return jsonify(**activitypub.simple_build_ordered_collection("featured", data))
2018-06-16 15:02:10 -05:00
@app.route("/liked")
2018-05-18 13:41:41 -05:00
def liked():
if not is_api_request():
2018-07-06 16:53:33 -05:00
q = {
"box": Box.OUTBOX.value,
"type": ActivityType.LIKE.value,
"meta.deleted": False,
"meta.undo": False,
}
liked, older_than, newer_than = paginated_query(DB.activities, q)
return render_template(
2018-07-06 16:53:33 -05:00
"liked.html", liked=liked, older_than=older_than, newer_than=newer_than
)
2018-06-16 15:02:10 -05:00
q = {"meta.deleted": False, "meta.undo": False, "type": ActivityType.LIKE.value}
return jsonify(
**activitypub.build_ordered_collection(
2018-06-29 15:16:26 -05:00
DB.activities,
2018-06-16 15:02:10 -05:00
q=q,
cursor=request.args.get("cursor"),
map_func=lambda doc: doc["activity"]["object"],
col_name="liked",
)
)
2018-05-18 13:41:41 -05:00
#######
# IndieAuth
def build_auth_resp(payload):
2018-06-16 15:02:10 -05:00
if request.headers.get("Accept") == "application/json":
2018-05-18 13:41:41 -05:00
return Response(
status=200,
2018-06-16 15:02:10 -05:00
headers={"Content-Type": "application/json"},
2018-05-18 13:41:41 -05:00
response=json.dumps(payload),
)
return Response(
status=200,
2018-06-16 15:02:10 -05:00
headers={"Content-Type": "application/x-www-form-urlencoded"},
2018-05-18 13:41:41 -05:00
response=urlencode(payload),
)
def _get_prop(props, name, default=None):
if name in props:
items = props.get(name)
if isinstance(items, list):
return items[0]
return items
return default
2018-06-16 15:02:10 -05:00
2018-05-18 13:41:41 -05:00
def get_client_id_data(url):
data = mf2py.parse(url=url)
2018-06-16 15:02:10 -05:00
for item in data["items"]:
if "h-x-app" in item["type"] or "h-app" in item["type"]:
props = item.get("properties", {})
2018-05-18 13:41:41 -05:00
print(props)
return dict(
2018-06-16 15:02:10 -05:00
logo=_get_prop(props, "logo"),
name=_get_prop(props, "name"),
url=_get_prop(props, "url"),
2018-05-18 13:41:41 -05:00
)
2018-06-16 15:02:10 -05:00
return dict(logo=None, name=url, url=url)
2018-05-18 13:41:41 -05:00
2018-06-16 15:02:10 -05:00
@app.route("/indieauth/flow", methods=["POST"])
2018-06-16 14:24:53 -05:00
@login_required
def indieauth_flow():
auth = dict(
2018-06-16 15:02:10 -05:00
scope=" ".join(request.form.getlist("scopes")),
me=request.form.get("me"),
client_id=request.form.get("client_id"),
state=request.form.get("state"),
redirect_uri=request.form.get("redirect_uri"),
response_type=request.form.get("response_type"),
2018-05-18 13:41:41 -05:00
)
2018-06-16 15:02:10 -05:00
code = binascii.hexlify(os.urandom(8)).decode("utf-8")
auth.update(code=code, verified=False)
2018-05-18 13:41:41 -05:00
print(auth)
2018-06-16 15:02:10 -05:00
if not auth["redirect_uri"]:
2018-05-18 13:41:41 -05:00
abort(500)
DB.indieauth.insert_one(auth)
# FIXME(tsileo): fetch client ID and validate redirect_uri
red = f'{auth["redirect_uri"]}?code={code}&state={auth["state"]}&me={auth["me"]}'
return redirect(red)
2018-06-16 14:24:53 -05:00
# @app.route('/indieauth', methods=['GET', 'POST'])
def indieauth_endpoint():
2018-06-16 15:02:10 -05:00
if request.method == "GET":
if not session.get("logged_in"):
2018-07-22 05:17:55 -05:00
return redirect(url_for("admin_login", next=request.url))
2018-05-18 13:41:41 -05:00
2018-06-16 15:02:10 -05:00
me = request.args.get("me")
2018-06-16 14:24:53 -05:00
# FIXME(tsileo): ensure me == ID
2018-06-16 15:02:10 -05:00
client_id = request.args.get("client_id")
redirect_uri = request.args.get("redirect_uri")
state = request.args.get("state", "")
response_type = request.args.get("response_type", "id")
scope = request.args.get("scope", "").split()
2018-05-18 13:41:41 -05:00
2018-06-16 15:02:10 -05:00
print("STATE", state)
2018-05-18 13:41:41 -05:00
return render_template(
2018-06-16 15:02:10 -05:00
"indieauth_flow.html",
2018-05-18 13:41:41 -05:00
client=get_client_id_data(client_id),
scopes=scope,
redirect_uri=redirect_uri,
state=state,
response_type=response_type,
client_id=client_id,
me=me,
)
# Auth verification via POST
2018-06-16 15:02:10 -05:00
code = request.form.get("code")
redirect_uri = request.form.get("redirect_uri")
client_id = request.form.get("client_id")
2018-05-18 13:41:41 -05:00
auth = DB.indieauth.find_one_and_update(
2018-06-16 15:02:10 -05:00
{
"code": code,
"redirect_uri": redirect_uri,
"client_id": client_id,
}, # }, # , 'verified': False},
{"$set": {"verified": True}},
sort=[("_id", pymongo.DESCENDING)],
2018-05-18 13:41:41 -05:00
)
print(auth)
print(code, redirect_uri, client_id)
if not auth:
abort(403)
return
2018-06-16 15:02:10 -05:00
session["logged_in"] = True
me = auth["me"]
state = auth["state"]
scope = " ".join(auth["scope"])
print("STATE", state)
return build_auth_resp({"me": me, "state": state, "scope": scope})
2018-05-18 13:41:41 -05:00
2018-06-16 15:02:10 -05:00
@app.route("/token", methods=["GET", "POST"])
2018-05-18 13:41:41 -05:00
def token_endpoint():
2018-06-16 15:02:10 -05:00
if request.method == "POST":
code = request.form.get("code")
me = request.form.get("me")
redirect_uri = request.form.get("redirect_uri")
client_id = request.form.get("client_id")
2018-05-18 13:41:41 -05:00
2018-06-16 15:02:10 -05:00
auth = DB.indieauth.find_one(
{
"code": code,
"me": me,
"redirect_uri": redirect_uri,
"client_id": client_id,
}
)
2018-05-18 13:41:41 -05:00
if not auth:
abort(403)
2018-06-16 15:02:10 -05:00
scope = " ".join(auth["scope"])
payload = dict(
me=me, client_id=client_id, scope=scope, ts=datetime.now().timestamp()
)
token = JWT.dumps(payload).decode("utf-8")
2018-05-18 13:41:41 -05:00
2018-06-16 15:02:10 -05:00
return build_auth_resp({"me": me, "scope": scope, "access_token": token})
2018-05-18 13:41:41 -05:00
# Token verification
2018-06-16 15:02:10 -05:00
token = request.headers.get("Authorization").replace("Bearer ", "")
2018-05-18 13:41:41 -05:00
try:
payload = JWT.loads(token)
except BadSignature:
abort(403)
# TODO(tsileo): handle expiration
2018-06-16 15:02:10 -05:00
return build_auth_resp(
{
"me": payload["me"],
"scope": payload["scope"],
"client_id": payload["client_id"],
}
)