microblog.pub/app.py

1006 lines
29 KiB
Python
Raw Normal View History

2018-05-18 13:41:41 -05:00
import json
import logging
2018-06-16 15:02:10 -05:00
import os
2018-07-17 17:20:32 -05:00
import traceback
2018-05-18 13:41:41 -05:00
from datetime import datetime
2019-08-18 04:40:57 -05:00
from typing import Any
2019-08-18 04:48:18 -05:00
from urllib.parse import urlparse
2019-08-05 15:40:24 -05:00
from uuid import uuid4
2018-05-18 13:41:41 -05:00
2019-08-18 04:40:57 -05:00
import requests
2019-08-06 15:12:05 -05:00
from bson.errors import InvalidId
2018-05-18 13:41:41 -05:00
from bson.objectid import ObjectId
from flask import Flask
2018-06-16 15:02:10 -05:00
from flask import Response
2018-05-18 13:41:41 -05:00
from flask import abort
2019-08-05 15:40:24 -05:00
from flask import g
2018-05-18 13:41:41 -05:00
from flask import redirect
from flask import render_template
2018-06-16 15:02:10 -05:00
from flask import request
2018-05-18 13:41:41 -05:00
from flask import session
from flask import url_for
2019-08-06 15:12:05 -05:00
from gridfs.errors import NoFile
2018-05-18 13:41:41 -05:00
from itsdangerous import BadSignature
2018-07-11 16:22:47 -05:00
from little_boxes import activitypub as ap
from little_boxes.activitypub import ActivityType
from little_boxes.activitypub import clean_activity
from little_boxes.activitypub import get_backend
from little_boxes.errors import ActivityGoneError
from little_boxes.errors import Error
from little_boxes.httpsig import verify_request
from little_boxes.webfinger import get_remote_follow_template
2018-05-18 13:41:41 -05:00
2019-08-01 12:55:30 -05:00
import blueprints.admin
2019-08-01 13:56:32 -05:00
import blueprints.indieauth
2019-08-01 12:55:30 -05:00
import blueprints.tasks
import blueprints.well_known
2018-05-18 13:41:41 -05:00
import config
2019-08-01 12:55:30 -05:00
from blueprints.api import _api_required
2019-08-13 17:06:58 -05:00
from blueprints.api import api_required
2019-08-01 15:25:58 -05:00
from blueprints.tasks import TaskError
2018-05-18 13:41:41 -05:00
from config import DB
2018-06-16 15:02:10 -05:00
from config import ID
from config import ME
from config import MEDIA_CACHE
from config import VERSION
2019-08-01 15:00:26 -05:00
from core import activitypub
from core import feed
2019-08-17 14:02:30 -05:00
from core import jsonld
from core.activitypub import activity_from_doc
from core.activitypub import activity_url
from core.activitypub import post_to_inbox
from core.activitypub import post_to_outbox
from core.activitypub import remove_context
2019-08-01 15:24:18 -05:00
from core.db import find_one_activity
2019-08-01 15:25:58 -05:00
from core.meta import Box
2019-08-01 15:00:26 -05:00
from core.meta import MetaKey
from core.meta import _meta
2019-09-01 13:58:51 -05:00
from core.meta import by_hashtag
2019-09-02 16:44:38 -05:00
from core.meta import by_object_id
2019-08-01 15:24:18 -05:00
from core.meta import by_remote_id
2019-08-05 15:40:24 -05:00
from core.meta import by_type
2019-09-01 13:58:51 -05:00
from core.meta import by_visibility
2019-09-02 16:44:38 -05:00
from core.meta import in_inbox
2019-08-01 15:24:18 -05:00
from core.meta import in_outbox
2019-08-01 15:25:58 -05:00
from core.meta import is_public
2019-09-01 13:58:51 -05:00
from core.meta import not_deleted
2019-09-02 16:44:38 -05:00
from core.meta import not_poll_answer
2019-08-05 15:40:24 -05:00
from core.meta import not_undo
2019-09-01 14:38:38 -05:00
from core.meta import pinned
2019-08-01 15:00:26 -05:00
from core.shared import _build_thread
from core.shared import _get_ip
from core.shared import activitypubify
2019-08-01 15:00:26 -05:00
from core.shared import csrf
from core.shared import htmlify
from core.shared import is_api_request
2019-08-24 03:58:35 -05:00
from core.shared import jsonify
2019-08-01 15:00:26 -05:00
from core.shared import login_required
from core.shared import noindex
from core.shared import paginated_query
2019-08-05 15:40:24 -05:00
from utils.blacklist import is_blacklisted
2019-08-20 15:16:47 -05:00
from utils.emojis import EMOJIS
2019-08-24 17:16:39 -05:00
from utils.highlight import HIGHLIGHT_CSS
2018-06-16 15:02:10 -05:00
from utils.key import get_secret_key
2019-07-30 15:12:20 -05:00
from utils.template_filters import filters
2018-06-17 13:51:23 -05:00
2018-05-18 13:41:41 -05:00
app = Flask(__name__)
2018-06-16 15:02:10 -05:00
app.secret_key = get_secret_key("flask")
2019-07-30 15:12:20 -05:00
app.register_blueprint(filters)
2019-08-01 12:55:30 -05:00
app.register_blueprint(blueprints.admin.blueprint)
app.register_blueprint(blueprints.api.blueprint, url_prefix="/api")
2019-08-01 13:56:32 -05:00
app.register_blueprint(blueprints.indieauth.blueprint)
2019-08-01 12:55:30 -05:00
app.register_blueprint(blueprints.tasks.blueprint)
app.register_blueprint(blueprints.well_known.blueprint)
2018-06-16 15:02:10 -05:00
app.config.update(WTF_CSRF_CHECK_DEFAULT=False)
2019-07-30 15:12:20 -05:00
csrf.init_app(app)
2018-05-18 13:41:41 -05:00
2018-05-28 12:46:23 -05:00
logger = logging.getLogger(__name__)
2018-05-22 17:41:37 -05:00
# Hook up Flask logging with gunicorn
2018-06-03 05:51:57 -05:00
root_logger = logging.getLogger()
2018-06-16 15:02:10 -05:00
if os.getenv("FLASK_DEBUG"):
logger.setLevel(logging.DEBUG)
root_logger.setLevel(logging.DEBUG)
2019-07-22 12:27:12 -05:00
root_logger.handlers = app.logger.handlers
else:
2018-06-16 15:02:10 -05:00
gunicorn_logger = logging.getLogger("gunicorn.error")
root_logger.handlers = gunicorn_logger.handlers
root_logger.setLevel(gunicorn_logger.level)
2018-05-22 17:41:37 -05:00
2018-05-18 13:41:41 -05:00
@app.context_processor
def inject_config():
q = {
2019-09-01 14:38:38 -05:00
**in_outbox(),
2019-09-01 14:49:36 -05:00
"$or": [
{
**by_type(ActivityType.CREATE),
**not_deleted(),
**by_visibility(ap.Visibility.PUBLIC),
},
{**by_type(ActivityType.ANNOUNCE), **not_undo()},
],
}
2019-09-01 14:38:38 -05:00
notes_count = DB.activities.count(q)
2019-07-12 17:38:51 -05:00
# FIXME(tsileo): rename to all_count, and remove poll answers from it
all_q = {
2019-09-02 16:44:38 -05:00
**in_outbox(),
**by_type([ActivityType.CREATE, ActivityType.ANNOUNCE]),
**not_deleted(),
**not_undo(),
**not_poll_answer(),
}
2019-08-13 17:06:58 -05:00
liked_q = {
2019-08-13 17:12:12 -05:00
**in_outbox(),
2019-09-02 16:44:38 -05:00
**by_type(ActivityType.LIKE),
**not_undo(),
**not_deleted(),
2019-08-13 17:06:58 -05:00
}
followers_q = {
2019-09-02 16:44:38 -05:00
**in_inbox(),
**by_type(ActivityType.FOLLOW),
**not_undo(),
**not_deleted(),
}
following_q = {
2019-09-02 16:44:38 -05:00
**in_outbox(),
**by_type(ActivityType.FOLLOW),
**not_undo(),
**not_deleted(),
}
unread_notifications_q = {_meta(MetaKey.NOTIFICATION_UNREAD): True}
2019-07-10 16:32:48 -05:00
logged_in = session.get("logged_in", False)
2018-06-16 15:02:10 -05:00
return dict(
microblogpub_version=VERSION,
config=config,
2019-07-10 16:32:48 -05:00
logged_in=logged_in,
followers_count=DB.activities.count(followers_q),
2019-07-10 16:32:48 -05:00
following_count=DB.activities.count(following_q) if logged_in else 0,
notes_count=notes_count,
2019-08-13 17:06:58 -05:00
liked_count=DB.activities.count(liked_q) if logged_in else 0,
with_replies_count=DB.activities.count(all_q) if logged_in else 0,
unread_notifications_count=DB.activities.count(unread_notifications_q)
if logged_in
else 0,
2018-06-29 15:16:26 -05:00
me=ME,
2019-04-14 12:17:54 -05:00
base_url=config.BASE_URL,
2019-08-24 17:16:39 -05:00
highlight_css=HIGHLIGHT_CSS,
2018-06-16 15:02:10 -05:00
)
2018-05-18 13:41:41 -05:00
2019-08-05 15:40:24 -05:00
@app.before_request
def generate_request_id():
g.request_id = uuid4().hex
2018-05-18 13:41:41 -05:00
@app.after_request
def set_x_powered_by(response):
2018-06-16 15:02:10 -05:00
response.headers["X-Powered-By"] = "microblog.pub"
2019-08-05 15:40:24 -05:00
response.headers["X-Request-ID"] = g.request_id
2018-05-18 13:41:41 -05:00
return response
2018-06-16 15:02:10 -05:00
2018-05-29 14:36:05 -05:00
@app.errorhandler(ValueError)
def handle_value_error(error):
2018-07-17 17:20:32 -05:00
logger.error(
2019-08-05 15:40:24 -05:00
f"caught value error for {g.request_id}: {error!r}, {traceback.format_tb(error.__traceback__)}"
2018-07-17 17:20:32 -05:00
)
2019-08-24 03:58:35 -05:00
response = jsonify({"message": error.args[0], "request_id": g.request_id})
2018-05-29 14:36:05 -05:00
response.status_code = 400
return response
@app.errorhandler(Error)
def handle_activitypub_error(error):
2018-07-17 17:20:32 -05:00
logger.error(
2019-08-05 15:40:24 -05:00
f"caught activitypub error for {g.request_id}: {error!r}, {traceback.format_tb(error.__traceback__)}"
2018-07-17 17:20:32 -05:00
)
2019-08-24 03:58:35 -05:00
response = jsonify({**error.to_dict(), "request_id": g.request_id})
2018-05-29 14:36:05 -05:00
response.status_code = error.status_code
return response
2019-04-07 07:37:05 -05:00
@app.errorhandler(TaskError)
def handle_task_error(error):
logger.error(
2019-08-05 15:40:24 -05:00
f"caught activitypub error for {g.request_id}: {error!r}, {traceback.format_tb(error.__traceback__)}"
2019-04-07 07:37:05 -05:00
)
2019-08-24 03:58:35 -05:00
response = jsonify({"traceback": error.message, "request_id": g.request_id})
2019-04-07 07:37:05 -05:00
response.status_code = 500
return response
2018-07-22 05:42:36 -05:00
# @app.errorhandler(Exception)
# def handle_other_error(error):
# logger.error(
# f"caught error {error!r}, {traceback.format_tb(error.__traceback__)}"
# )
# response = flask_jsonify({})
# response.status_code = 500
# return response
2018-07-22 05:25:56 -05:00
2019-08-08 17:07:08 -05:00
def _log_sig():
sig = request.headers.get("Signature")
if sig:
app.logger.info(f"received an authenticated fetch: {sig}")
2019-08-19 16:35:14 -05:00
try:
req_verified, actor_id = verify_request(
request.method, request.path, request.headers, None
)
2019-08-20 15:16:47 -05:00
app.logger.info(
f"authenticated fetch: {req_verified}: {actor_id} {request.headers}"
)
2019-08-19 16:35:14 -05:00
except Exception:
app.logger.exception("failed to verify authenticated fetch")
2019-08-08 17:07:08 -05:00
2018-06-16 14:24:53 -05:00
# App routes
2018-05-18 13:41:41 -05:00
ROBOTS_TXT = """User-agent: *
2018-07-06 17:08:44 -05:00
Disallow: /login
Disallow: /admin/
Disallow: /static/
Disallow: /media/
2019-08-18 05:05:56 -05:00
Disallow: /p/
Disallow: /uploads/"""
2018-07-04 18:02:51 -05:00
@app.route("/robots.txt")
def robots_txt():
return Response(response=ROBOTS_TXT, headers={"Content-Type": "text/plain"})
2019-08-20 15:16:47 -05:00
@app.route("/microblogpub-0.1.jsonld")
2019-08-17 14:02:30 -05:00
def microblogpub_jsonld():
2019-08-18 04:40:57 -05:00
"""Returns our AP context (embedded in activities @context)."""
2019-08-17 14:02:30 -05:00
return Response(
response=json.dumps(jsonld.MICROBLOGPUB),
headers={"Content-Type": "application/ld+json"},
)
2019-08-18 04:59:02 -05:00
@app.route("/p/<scheme>/<path:url>")
2019-08-18 05:05:56 -05:00
@noindex
2019-08-18 04:59:02 -05:00
def proxy(scheme: str, url: str) -> Any:
url = f"{scheme}://{url}"
2019-08-18 04:40:57 -05:00
req_headers = {
k: v
for k, v in dict(request.headers).items()
if k.lower() not in ["host", "cookie"]
}
2019-08-18 04:48:18 -05:00
req_headers["Host"] = urlparse(url).netloc
2019-08-18 04:40:57 -05:00
resp = requests.get(url, stream=True, headers=req_headers)
2019-08-20 15:55:55 -05:00
app.logger.info(f"proxied req {url} {req_headers}: {resp!r}")
2019-08-18 04:40:57 -05:00
def data():
for chunk in resp.raw.stream(decode_content=False):
yield chunk
resp_headers = {
k: v
for k, v in dict(resp.raw.headers).items()
if k.lower()
2019-08-18 11:31:52 -05:00
in [
"content-length",
"content-type",
"etag",
"cache-control",
"expires",
"date",
"last-modified",
]
}
2019-08-20 15:55:55 -05:00
return Response(data(), headers=resp_headers, status=resp.status_code)
2019-08-18 04:40:57 -05:00
@app.route("/media/<media_id>")
2018-07-22 05:17:55 -05:00
@noindex
def serve_media(media_id):
2019-08-06 15:12:05 -05:00
try:
f = MEDIA_CACHE.fs.get(ObjectId(media_id))
except (InvalidId, NoFile):
abort(404)
resp = app.response_class(f, direct_passthrough=True, mimetype=f.content_type)
resp.headers.set("Content-Length", f.length)
resp.headers.set("ETag", f.md5)
resp.headers.set(
"Last-Modified", f.uploadDate.strftime("%a, %d %b %Y %H:%M:%S GMT")
)
resp.headers.set("Cache-Control", "public,max-age=31536000,immutable")
resp.headers.set("Content-Encoding", "gzip")
return resp
@app.route("/uploads/<oid>/<fname>")
def serve_uploads(oid, fname):
2019-08-06 15:12:05 -05:00
try:
f = MEDIA_CACHE.fs.get(ObjectId(oid))
except (InvalidId, NoFile):
abort(404)
2018-07-04 18:02:51 -05:00
resp = app.response_class(f, direct_passthrough=True, mimetype=f.content_type)
resp.headers.set("Content-Length", f.length)
resp.headers.set("ETag", f.md5)
resp.headers.set(
"Last-Modified", f.uploadDate.strftime("%a, %d %b %Y %H:%M:%S GMT")
)
resp.headers.set("Cache-Control", "public,max-age=31536000,immutable")
resp.headers.set("Content-Encoding", "gzip")
return resp
2018-06-16 15:02:10 -05:00
@app.route("/remote_follow", methods=["GET", "POST"])
2018-05-18 13:41:41 -05:00
def remote_follow():
"""Form to allow visitor to perform the remote follow dance."""
2018-06-16 15:02:10 -05:00
if request.method == "GET":
return htmlify(render_template("remote_follow.html"))
2018-05-18 13:41:41 -05:00
2018-06-04 10:59:38 -05:00
csrf.protect()
profile = request.form.get("profile")
if not profile.startswith("@"):
profile = f"@{profile}"
2019-07-26 17:24:04 -05:00
return redirect(get_remote_follow_template(profile).format(uri=ID))
2018-05-18 13:41:41 -05:00
#######
# Activity pub routes
2018-06-16 15:02:10 -05:00
2018-09-03 13:21:33 -05:00
@app.route("/")
def index():
if is_api_request():
2019-08-08 17:07:08 -05:00
_log_sig()
return activitypubify(**ME)
2018-07-06 16:53:33 -05:00
q = {
2019-09-01 14:38:38 -05:00
**in_outbox(),
2019-09-01 14:49:36 -05:00
"$or": [
{
**by_type(ActivityType.CREATE),
**not_deleted(),
**by_visibility(ap.Visibility.PUBLIC),
"$or": [{"meta.pinned": False}, {"meta.pinned": {"$exists": False}}],
},
{**by_type(ActivityType.ANNOUNCE), **not_undo()},
],
2018-07-06 16:53:33 -05:00
}
2018-07-22 15:25:28 -05:00
2019-09-01 14:38:38 -05:00
apinned = []
2018-07-22 15:25:28 -05:00
# Only fetch the pinned notes if we're on the first page
if not request.args.get("older_than") and not request.args.get("newer_than"):
q_pinned = {
2019-09-01 14:38:38 -05:00
**in_outbox(),
**by_type(ActivityType.CREATE),
**not_deleted(),
**pinned(),
**by_visibility(ap.Visibility.PUBLIC),
2018-07-22 15:25:28 -05:00
}
2019-09-01 14:38:38 -05:00
apinned = list(DB.activities.find(q_pinned))
2018-07-22 15:25:28 -05:00
outbox_data, older_than, newer_than = paginated_query(
2019-09-01 14:38:38 -05:00
DB.activities, q, limit=25 - len(apinned)
)
2018-07-22 15:25:28 -05:00
return htmlify(
render_template(
"index.html",
outbox_data=outbox_data,
older_than=older_than,
newer_than=newer_than,
2019-09-01 14:38:38 -05:00
pinned=apinned,
)
)
2018-06-04 10:59:38 -05:00
2019-07-12 17:38:51 -05:00
@app.route("/all")
@login_required
2019-07-12 17:38:51 -05:00
def all():
2018-06-29 15:16:26 -05:00
q = {
2019-09-02 16:44:38 -05:00
**in_outbox(),
**by_type([ActivityType.CREATE, ActivityType.ANNOUNCE]),
**not_deleted(),
**not_undo(),
**not_poll_answer(),
2018-06-29 15:16:26 -05:00
}
2018-07-06 16:53:33 -05:00
outbox_data, older_than, newer_than = paginated_query(DB.activities, q)
2018-05-18 13:41:41 -05:00
return htmlify(
render_template(
"index.html",
outbox_data=outbox_data,
older_than=older_than,
newer_than=newer_than,
)
2018-07-06 16:53:33 -05:00
)
2018-05-18 13:41:41 -05:00
2018-06-16 15:02:10 -05:00
@app.route("/note/<note_id>")
2018-06-16 14:24:53 -05:00
def note_by_id(note_id):
if is_api_request():
return redirect(url_for("outbox_activity", item_id=note_id))
2018-06-29 15:16:26 -05:00
data = DB.activities.find_one(
2019-09-02 16:44:38 -05:00
{**in_outbox(), **by_remote_id(activity_url(note_id))}
2018-06-29 15:16:26 -05:00
)
2018-06-16 14:24:53 -05:00
if not data:
2018-06-03 14:28:06 -05:00
abort(404)
2018-06-16 15:02:10 -05:00
if data["meta"].get("deleted", False):
2018-06-03 14:28:06 -05:00
abort(410)
2019-04-14 12:17:54 -05:00
2018-06-03 14:28:06 -05:00
thread = _build_thread(data)
2018-08-01 01:29:08 -05:00
app.logger.info(f"thread={thread!r}")
2018-08-28 15:14:48 -05:00
raw_likes = list(
2018-06-29 15:16:26 -05:00
DB.activities.find(
2018-06-16 15:02:10 -05:00
{
2019-09-02 16:44:38 -05:00
**not_undo(),
**not_deleted(),
**by_type(ActivityType.LIKE),
**by_object_id(data["activity"]["object"]["id"]),
2018-06-16 15:02:10 -05:00
}
)
)
2018-08-28 15:14:48 -05:00
likes = []
for doc in raw_likes:
try:
likes.append(doc["meta"]["actor"])
except Exception:
app.logger.exception(f"invalid doc: {doc!r}")
2018-08-01 01:29:08 -05:00
app.logger.info(f"likes={likes!r}")
2018-08-28 15:14:48 -05:00
raw_shares = list(
2018-06-29 15:16:26 -05:00
DB.activities.find(
2018-06-16 15:02:10 -05:00
{
2019-09-02 16:44:38 -05:00
**not_undo(),
**not_deleted(),
**by_type(ActivityType.ANNOUNCE),
**by_object_id(data["activity"]["object"]["id"]),
2018-06-16 15:02:10 -05:00
}
)
)
2018-08-28 15:14:48 -05:00
shares = []
for doc in raw_shares:
try:
shares.append(doc["meta"]["actor"])
except Exception:
app.logger.exception(f"invalid doc: {doc!r}")
2018-08-01 01:29:08 -05:00
app.logger.info(f"shares={shares!r}")
2018-06-03 16:36:16 -05:00
return htmlify(
render_template(
"note.html", likes=likes, shares=shares, thread=thread, note=data
)
2018-06-16 15:02:10 -05:00
)
2018-05-18 13:41:41 -05:00
2018-06-16 15:02:10 -05:00
@app.route("/outbox", methods=["GET", "POST"])
2018-06-16 14:24:53 -05:00
def outbox():
2018-06-16 15:02:10 -05:00
if request.method == "GET":
2018-06-16 14:24:53 -05:00
if not is_api_request():
abort(404)
2019-08-08 17:07:08 -05:00
_log_sig()
# TODO(tsileo): returns the whole outbox if authenticated and look at OCAP support
2018-05-18 13:41:41 -05:00
q = {
2019-09-01 14:38:38 -05:00
**in_outbox(),
2019-09-01 14:49:36 -05:00
"$or": [
{
**by_type(ActivityType.CREATE),
**not_deleted(),
**by_visibility(ap.Visibility.PUBLIC),
},
{**by_type(ActivityType.ANNOUNCE), **not_undo()},
],
2018-05-18 13:41:41 -05:00
}
return activitypubify(
2018-06-16 15:02:10 -05:00
**activitypub.build_ordered_collection(
2018-06-29 15:16:26 -05:00
DB.activities,
2018-06-16 15:02:10 -05:00
q=q,
cursor=request.args.get("cursor"),
map_func=lambda doc: activity_from_doc(doc, embed=True),
2018-07-31 16:23:20 -05:00
col_name="outbox",
2018-06-16 15:02:10 -05:00
)
)
2018-05-18 13:41:41 -05:00
2019-09-01 14:38:38 -05:00
# Handle POST request aka C2S API
2018-05-21 07:30:52 -05:00
try:
_api_required()
except BadSignature:
abort(401)
2018-06-16 14:24:53 -05:00
2018-05-18 13:41:41 -05:00
data = request.get_json(force=True)
2018-06-17 12:21:59 -05:00
activity = ap.parse_activity(data)
2019-04-05 04:35:48 -05:00
activity_id = post_to_outbox(activity)
2018-07-29 09:07:27 -05:00
return Response(status=201, headers={"Location": activity_id})
2018-05-18 13:41:41 -05:00
2019-08-20 15:16:47 -05:00
@app.route("/emoji/<name>")
def ap_emoji(name):
if name in EMOJIS:
2019-08-24 04:11:36 -05:00
return activitypubify(**{**EMOJIS[name], "@context": config.DEFAULT_CTX})
2019-08-20 15:16:47 -05:00
abort(404)
2018-06-16 15:02:10 -05:00
@app.route("/outbox/<item_id>")
2018-05-18 13:41:41 -05:00
def outbox_detail(item_id):
2018-06-29 15:16:26 -05:00
doc = DB.activities.find_one(
{
2019-09-02 16:44:38 -05:00
**in_outbox(),
**by_remote_id(activity_url(item_id)),
**not_deleted(),
**is_public(),
}
2018-06-29 15:16:26 -05:00
)
2018-07-09 17:49:52 -05:00
if not doc:
abort(404)
2019-08-08 17:07:08 -05:00
_log_sig()
2018-06-16 15:02:10 -05:00
if doc["meta"].get("deleted", False):
abort(404)
return activitypubify(**activity_from_doc(doc))
2018-05-18 13:41:41 -05:00
2018-06-16 15:02:10 -05:00
@app.route("/outbox/<item_id>/activity")
2018-05-18 13:41:41 -05:00
def outbox_activity(item_id):
2019-08-01 15:25:58 -05:00
data = find_one_activity(
{**in_outbox(), **by_remote_id(activity_url(item_id)), **is_public()}
2019-08-01 15:25:58 -05:00
)
2018-05-18 13:41:41 -05:00
if not data:
abort(404)
2019-04-14 12:17:54 -05:00
2019-08-08 17:07:08 -05:00
_log_sig()
2018-05-28 12:46:23 -05:00
obj = activity_from_doc(data)
2018-07-09 17:49:52 -05:00
if data["meta"].get("deleted", False):
abort(404)
2018-07-09 17:49:52 -05:00
2018-06-16 15:02:10 -05:00
if obj["type"] != ActivityType.CREATE.value:
2018-05-18 13:41:41 -05:00
abort(404)
return activitypubify(**obj["object"])
2018-05-28 12:46:23 -05:00
2018-05-18 13:41:41 -05:00
2018-06-16 15:02:10 -05:00
@app.route("/outbox/<item_id>/replies")
def outbox_activity_replies(item_id):
if not is_api_request():
abort(404)
2019-08-08 17:07:08 -05:00
_log_sig()
2018-06-29 15:16:26 -05:00
data = DB.activities.find_one(
{
2019-09-02 16:44:38 -05:00
**in_outbox(),
**by_remote_id(activity_url(item_id)),
**not_deleted(),
**is_public(),
2018-06-29 15:16:26 -05:00
}
2018-06-18 15:01:21 -05:00
)
if not data:
abort(404)
2018-06-17 12:21:59 -05:00
obj = ap.parse_activity(data["activity"])
if obj.ACTIVITY_TYPE != ActivityType.CREATE:
abort(404)
q = {
2019-09-02 16:44:38 -05:00
**is_public(),
**not_deleted(),
**by_type(ActivityType.CREATE),
2018-06-16 15:02:10 -05:00
"activity.object.inReplyTo": obj.get_object().id,
}
return activitypubify(
2018-06-16 15:02:10 -05:00
**activitypub.build_ordered_collection(
2018-06-29 15:16:26 -05:00
DB.activities,
2018-06-16 15:02:10 -05:00
q=q,
cursor=request.args.get("cursor"),
map_func=lambda doc: doc["activity"]["object"],
col_name=f"outbox/{item_id}/replies",
first_page=request.args.get("page") == "first",
)
)
2018-06-16 15:02:10 -05:00
@app.route("/outbox/<item_id>/likes")
def outbox_activity_likes(item_id):
if not is_api_request():
abort(404)
2019-08-08 17:07:08 -05:00
_log_sig()
2018-06-29 15:16:26 -05:00
data = DB.activities.find_one(
{
"box": Box.OUTBOX.value,
"remote_id": activity_url(item_id),
2018-06-29 15:16:26 -05:00
"meta.deleted": False,
"meta.public": True,
2018-06-29 15:16:26 -05:00
}
2018-06-18 15:01:21 -05:00
)
if not data:
abort(404)
2018-06-17 12:21:59 -05:00
obj = ap.parse_activity(data["activity"])
if obj.ACTIVITY_TYPE != ActivityType.CREATE:
abort(404)
q = {
2018-06-16 15:02:10 -05:00
"meta.undo": False,
"type": ActivityType.LIKE.value,
"$or": [
{"activity.object.id": obj.get_object().id},
{"activity.object": obj.get_object().id},
],
}
return activitypubify(
2018-06-16 15:02:10 -05:00
**activitypub.build_ordered_collection(
2018-06-29 15:16:26 -05:00
DB.activities,
2018-06-16 15:02:10 -05:00
q=q,
cursor=request.args.get("cursor"),
map_func=lambda doc: remove_context(doc["activity"]),
col_name=f"outbox/{item_id}/likes",
first_page=request.args.get("page") == "first",
)
)
2018-06-16 15:02:10 -05:00
@app.route("/outbox/<item_id>/shares")
def outbox_activity_shares(item_id):
if not is_api_request():
abort(404)
2018-06-29 15:16:26 -05:00
data = DB.activities.find_one(
{
"box": Box.OUTBOX.value,
"remote_id": activity_url(item_id),
2018-06-29 15:16:26 -05:00
"meta.deleted": False,
}
2018-06-18 15:01:21 -05:00
)
if not data:
abort(404)
2019-08-08 17:07:08 -05:00
_log_sig()
2018-06-17 12:21:59 -05:00
obj = ap.parse_activity(data["activity"])
if obj.ACTIVITY_TYPE != ActivityType.CREATE:
abort(404)
q = {
2018-06-16 15:02:10 -05:00
"meta.undo": False,
"type": ActivityType.ANNOUNCE.value,
"$or": [
{"activity.object.id": obj.get_object().id},
{"activity.object": obj.get_object().id},
],
}
return activitypubify(
2018-06-16 15:02:10 -05:00
**activitypub.build_ordered_collection(
2018-06-29 15:16:26 -05:00
DB.activities,
2018-06-16 15:02:10 -05:00
q=q,
cursor=request.args.get("cursor"),
map_func=lambda doc: remove_context(doc["activity"]),
col_name=f"outbox/{item_id}/shares",
first_page=request.args.get("page") == "first",
)
)
2019-08-01 12:55:30 -05:00
@app.route("/inbox", methods=["GET", "POST"]) # noqa: C901
def inbox():
# GET /inbox
if request.method == "GET":
if not is_api_request():
abort(404)
try:
_api_required()
except BadSignature:
abort(404)
return activitypubify(
2019-08-01 12:55:30 -05:00
**activitypub.build_ordered_collection(
DB.activities,
q={"meta.deleted": False, "box": Box.INBOX.value},
cursor=request.args.get("cursor"),
map_func=lambda doc: remove_context(doc["activity"]),
col_name="inbox",
)
)
2019-08-01 12:55:30 -05:00
# POST/ inbox
try:
data = request.get_json(force=True)
if not isinstance(data, dict):
raise ValueError("not a dict")
except Exception:
return Response(
status=422,
headers={"Content-Type": "application/json"},
2019-08-05 15:40:24 -05:00
response=json.dumps(
{
"error": "failed to decode request body as JSON",
"request_id": g.request_id,
}
),
2019-08-01 12:55:30 -05:00
)
2019-04-07 05:27:48 -05:00
2019-08-01 12:55:30 -05:00
# Check the blacklist now to see if we can return super early
2019-08-05 15:40:24 -05:00
if is_blacklisted(data):
2019-08-01 12:55:30 -05:00
logger.info(f"dropping activity from blacklisted host: {data['id']}")
return Response(status=201)
2019-04-07 05:27:48 -05:00
2019-08-05 15:40:24 -05:00
logger.info(f"request_id={g.request_id} req_headers={request.headers!r}")
logger.info(f"request_id={g.request_id} raw_data={data}")
2019-08-01 12:55:30 -05:00
try:
2019-08-08 15:54:33 -05:00
req_verified, actor_id = verify_request(
2019-08-01 12:55:30 -05:00
request.method, request.path, request.headers, request.data
2019-08-08 15:54:33 -05:00
)
if not req_verified:
2019-08-01 12:55:30 -05:00
raise Exception("failed to verify request")
2019-08-08 15:54:33 -05:00
logger.info(f"request_id={g.request_id} signed by {actor_id}")
2019-08-01 12:55:30 -05:00
except Exception:
logger.exception(
2019-08-05 15:40:24 -05:00
f"failed to verify request {g.request_id}, trying to verify the payload by fetching the remote"
2019-08-01 12:55:30 -05:00
)
try:
remote_data = get_backend().fetch_iri(data["id"])
except ActivityGoneError:
# XXX Mastodon sends Delete activities that are not dereferencable, it's the actor url with #delete
# appended, so an `ActivityGoneError` kind of ensure it's "legit"
if data["type"] == ActivityType.DELETE.value and data["id"].startswith(
data["object"]
):
# If we're here, this means the key is not saved, so we cannot verify the object
logger.info(f"received a Delete for an unknown actor {data!r}, drop it")
2019-08-01 12:55:30 -05:00
return Response(status=201)
except Exception:
logger.exception(f"failed to fetch remote for payload {data!r}")
2019-08-01 12:55:30 -05:00
if "type" in data:
# Friendica does not returns a 410, but a 302 that redirect to an HTML page
if ap._has_type(data["type"], ActivityType.DELETE):
logger.info(
f"received a Delete for an unknown actor {data!r}, drop it"
)
return Response(status=201)
2019-04-11 12:24:28 -05:00
2019-08-01 12:55:30 -05:00
if "id" in data:
if DB.trash.find_one({"activity.id": data["id"]}):
# It's already stored in trash, returns early
return Response(
status=422,
headers={"Content-Type": "application/json"},
response=json.dumps(
{
2019-08-05 15:40:24 -05:00
"error": "failed to verify request (using HTTP signatures or fetching the IRI)",
"request_id": g.request_id,
2019-08-01 12:55:30 -05:00
}
),
)
2018-07-14 05:29:46 -05:00
2019-08-01 12:55:30 -05:00
# Now we can store this activity in the trash for later analysis
2018-07-14 05:29:46 -05:00
2019-08-01 12:55:30 -05:00
# Track/store the payload for analysis
ip, geoip = _get_ip()
2018-07-14 05:29:46 -05:00
2019-08-01 12:55:30 -05:00
DB.trash.insert(
{
"activity": data,
"meta": {
"ts": datetime.now().timestamp(),
"ip_address": ip,
"geoip": geoip,
"tb": traceback.format_exc(),
"headers": dict(request.headers),
2019-08-05 15:40:24 -05:00
"request_id": g.request_id,
2019-08-01 12:55:30 -05:00
},
}
2019-05-12 03:06:26 -05:00
)
2019-05-12 03:02:28 -05:00
2018-06-02 02:07:57 -05:00
return Response(
status=422,
2018-06-16 15:02:10 -05:00
headers={"Content-Type": "application/json"},
response=json.dumps(
{
2019-08-05 15:40:24 -05:00
"error": "failed to verify request (using HTTP signatures or fetching the IRI)",
"request_id": g.request_id,
2018-06-16 15:02:10 -05:00
}
),
2018-06-02 02:07:57 -05:00
)
2019-07-07 07:14:13 -05:00
# We fetched the remote data successfully
data = remote_data
2018-06-17 12:21:59 -05:00
activity = ap.parse_activity(data)
2019-08-05 15:40:24 -05:00
logger.debug(f"inbox activity={g.request_id}/{activity}/{data}")
2019-04-05 04:35:48 -05:00
post_to_inbox(activity)
2018-05-18 13:41:41 -05:00
2018-06-16 15:02:10 -05:00
return Response(status=201)
2018-05-18 13:41:41 -05:00
2018-06-16 15:02:10 -05:00
@app.route("/followers")
2018-06-16 14:24:53 -05:00
def followers():
q = {"box": Box.INBOX.value, "type": ActivityType.FOLLOW.value, "meta.undo": False}
2018-06-16 14:24:53 -05:00
if is_api_request():
2019-08-08 17:07:08 -05:00
_log_sig()
return activitypubify(
2018-05-18 13:41:41 -05:00
**activitypub.build_ordered_collection(
DB.activities,
q=q,
2018-06-16 15:02:10 -05:00
cursor=request.args.get("cursor"),
2018-07-07 07:07:29 -05:00
map_func=lambda doc: doc["activity"]["actor"],
2018-07-31 16:23:20 -05:00
col_name="followers",
2018-05-18 13:41:41 -05:00
)
2018-06-16 14:24:53 -05:00
)
2018-05-18 13:41:41 -05:00
2018-07-18 16:18:39 -05:00
raw_followers, older_than, newer_than = paginated_query(DB.activities, q)
2019-09-01 04:32:12 -05:00
followers = [doc["meta"] for doc in raw_followers if "actor" in doc.get("meta", {})]
return htmlify(
render_template(
"followers.html",
followers_data=followers,
older_than=older_than,
newer_than=newer_than,
)
)
2018-05-18 13:41:41 -05:00
2018-06-16 15:02:10 -05:00
@app.route("/following")
2018-05-18 13:41:41 -05:00
def following():
2019-08-05 15:40:24 -05:00
q = {**in_outbox(), **by_type(ActivityType.FOLLOW), **not_undo()}
2018-05-18 13:41:41 -05:00
if is_api_request():
2019-08-08 17:07:08 -05:00
_log_sig()
if config.HIDE_FOLLOWING:
return activitypubify(
**activitypub.simple_build_ordered_collection("following", [])
)
return activitypubify(
2018-05-18 13:41:41 -05:00
**activitypub.build_ordered_collection(
DB.activities,
q=q,
2018-06-16 15:02:10 -05:00
cursor=request.args.get("cursor"),
map_func=lambda doc: doc["activity"]["object"],
2018-07-31 16:23:20 -05:00
col_name="following",
2018-06-16 15:02:10 -05:00
)
2018-05-18 13:41:41 -05:00
)
2018-06-16 14:24:53 -05:00
2018-07-18 16:18:39 -05:00
if config.HIDE_FOLLOWING and not session.get("logged_in", False):
2018-07-17 16:42:21 -05:00
abort(404)
following, older_than, newer_than = paginated_query(DB.activities, q)
2019-04-05 04:35:48 -05:00
following = [
2019-09-01 04:32:12 -05:00
(doc["remote_id"], doc["meta"])
2019-04-05 04:35:48 -05:00
for doc in following
if "remote_id" in doc and "object" in doc.get("meta", {})
]
2019-07-22 13:32:35 -05:00
lists = list(DB.lists.find())
return htmlify(
render_template(
"following.html",
following_data=following,
older_than=older_than,
newer_than=newer_than,
lists=lists,
)
)
2018-05-18 13:41:41 -05:00
2018-06-16 15:02:10 -05:00
@app.route("/tags/<tag>")
2018-05-18 13:41:41 -05:00
def tags(tag):
2018-06-29 15:16:26 -05:00
if not DB.activities.count(
{
2019-09-01 13:58:51 -05:00
**in_outbox(),
**by_hashtag(tag),
**by_visibility(ap.Visibility.PUBLIC),
**not_deleted(),
2018-06-29 15:16:26 -05:00
}
2018-06-16 15:02:10 -05:00
):
2018-05-18 13:41:41 -05:00
abort(404)
if not is_api_request():
return htmlify(
render_template(
"tags.html",
tag=tag,
outbox_data=DB.activities.find(
{
2019-09-01 13:58:51 -05:00
**in_outbox(),
**by_hashtag(tag),
**by_visibility(ap.Visibility.PUBLIC),
**not_deleted(),
}
),
)
2018-05-18 13:41:41 -05:00
)
2019-08-08 17:07:08 -05:00
_log_sig()
2018-05-18 13:41:41 -05:00
q = {
2019-09-01 13:58:51 -05:00
**in_outbox(),
**by_hashtag(tag),
**by_visibility(ap.Visibility.PUBLIC),
**not_deleted(),
2018-05-18 13:41:41 -05:00
}
return activitypubify(
2018-06-16 15:02:10 -05:00
**activitypub.build_ordered_collection(
2018-06-29 15:16:26 -05:00
DB.activities,
2018-06-16 15:02:10 -05:00
q=q,
cursor=request.args.get("cursor"),
map_func=lambda doc: doc["activity"]["object"]["id"],
col_name=f"tags/{tag}",
)
)
2018-05-18 13:41:41 -05:00
@app.route("/featured")
def featured():
if not is_api_request():
abort(404)
2019-08-01 12:55:30 -05:00
2019-08-08 17:07:08 -05:00
_log_sig()
q = {
"box": Box.OUTBOX.value,
"type": ActivityType.CREATE.value,
"meta.deleted": False,
"meta.undo": False,
"meta.pinned": True,
}
data = [clean_activity(doc["activity"]["object"]) for doc in DB.activities.find(q)]
return activitypubify(
**activitypub.simple_build_ordered_collection("featured", data)
)
2018-06-16 15:02:10 -05:00
@app.route("/liked")
2019-08-13 17:06:58 -05:00
@api_required
2018-05-18 13:41:41 -05:00
def liked():
if not is_api_request():
2018-07-06 16:53:33 -05:00
q = {
"box": Box.OUTBOX.value,
"type": ActivityType.LIKE.value,
"meta.deleted": False,
"meta.undo": False,
}
liked, older_than, newer_than = paginated_query(DB.activities, q)
return htmlify(
render_template(
"liked.html", liked=liked, older_than=older_than, newer_than=newer_than
)
)
2018-06-16 15:02:10 -05:00
q = {"meta.deleted": False, "meta.undo": False, "type": ActivityType.LIKE.value}
return activitypubify(
2018-06-16 15:02:10 -05:00
**activitypub.build_ordered_collection(
2018-06-29 15:16:26 -05:00
DB.activities,
2018-06-16 15:02:10 -05:00
q=q,
cursor=request.args.get("cursor"),
map_func=lambda doc: doc["activity"]["object"],
col_name="liked",
)
)
2018-05-18 13:41:41 -05:00
2019-04-05 14:36:56 -05:00
#################
# Feeds
@app.route("/feed.json")
def json_feed():
return Response(
response=json.dumps(feed.json_feed("/feed.json")),
headers={"Content-Type": "application/json"},
)
@app.route("/feed.atom")
def atom_feed():
return Response(
response=feed.gen_feed().atom_str(),
headers={"Content-Type": "application/atom+xml"},
)
@app.route("/feed.rss")
def rss_feed():
return Response(
response=feed.gen_feed().rss_str(),
headers={"Content-Type": "application/rss+xml"},
)