2018-05-18 13:41:41 -05:00
|
|
|
import json
|
|
|
|
import logging
|
2018-06-16 15:02:10 -05:00
|
|
|
import os
|
2018-07-17 17:20:32 -05:00
|
|
|
import traceback
|
2018-05-18 13:41:41 -05:00
|
|
|
from datetime import datetime
|
2018-06-16 15:02:10 -05:00
|
|
|
from typing import Any
|
|
|
|
from typing import Dict
|
|
|
|
from urllib.parse import urlparse
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
from bson.objectid import ObjectId
|
|
|
|
from flask import Flask
|
2018-06-16 15:02:10 -05:00
|
|
|
from flask import Response
|
2018-05-18 13:41:41 -05:00
|
|
|
from flask import abort
|
2018-06-16 15:02:10 -05:00
|
|
|
from flask import jsonify as flask_jsonify
|
2018-05-18 13:41:41 -05:00
|
|
|
from flask import redirect
|
|
|
|
from flask import render_template
|
2018-06-16 15:02:10 -05:00
|
|
|
from flask import request
|
2018-05-18 13:41:41 -05:00
|
|
|
from flask import session
|
|
|
|
from flask import url_for
|
|
|
|
from itsdangerous import BadSignature
|
2018-07-11 16:22:47 -05:00
|
|
|
from little_boxes import activitypub as ap
|
|
|
|
from little_boxes.activitypub import ActivityType
|
|
|
|
from little_boxes.activitypub import clean_activity
|
|
|
|
from little_boxes.activitypub import get_backend
|
|
|
|
from little_boxes.errors import ActivityGoneError
|
|
|
|
from little_boxes.errors import Error
|
|
|
|
from little_boxes.httpsig import HTTPSigAuth
|
|
|
|
from little_boxes.httpsig import verify_request
|
|
|
|
from little_boxes.webfinger import get_actor_url
|
|
|
|
from little_boxes.webfinger import get_remote_follow_template
|
2018-05-18 13:41:41 -05:00
|
|
|
from u2flib_server import u2f
|
|
|
|
|
2019-08-01 12:55:30 -05:00
|
|
|
import blueprints.admin
|
2019-08-01 13:56:32 -05:00
|
|
|
import blueprints.indieauth
|
2019-08-01 12:55:30 -05:00
|
|
|
import blueprints.tasks
|
|
|
|
import blueprints.well_known
|
2018-05-18 13:41:41 -05:00
|
|
|
import config
|
2019-08-01 15:00:26 -05:00
|
|
|
from core.meta import Box
|
|
|
|
from core.activitypub import embed_collection
|
2019-08-01 12:55:30 -05:00
|
|
|
from blueprints.api import _api_required
|
2018-06-16 15:02:10 -05:00
|
|
|
from config import ADMIN_API_KEY
|
2019-07-08 16:18:33 -05:00
|
|
|
from config import BLACKLIST
|
2018-05-18 13:41:41 -05:00
|
|
|
from config import DB
|
2018-06-16 15:02:10 -05:00
|
|
|
from config import HEADERS
|
|
|
|
from config import ID
|
|
|
|
from config import KEY
|
|
|
|
from config import ME
|
2018-07-06 16:15:49 -05:00
|
|
|
from config import MEDIA_CACHE
|
2018-05-21 10:04:53 -05:00
|
|
|
from config import VERSION
|
2019-08-01 15:00:26 -05:00
|
|
|
from core import activitypub
|
|
|
|
from core.meta import MetaKey
|
|
|
|
from core.meta import _meta
|
|
|
|
from core.shared import MY_PERSON
|
|
|
|
from core.shared import _add_answers_to_question
|
|
|
|
from core.shared import _build_thread
|
|
|
|
from core.shared import _get_ip
|
|
|
|
from core.shared import back
|
|
|
|
from core.shared import csrf
|
|
|
|
from core.shared import login_required
|
|
|
|
from core.shared import noindex
|
|
|
|
from core.shared import paginated_query
|
|
|
|
from core.shared import post_to_outbox
|
|
|
|
from core.tasks import Tasks
|
2019-07-29 12:36:22 -05:00
|
|
|
from utils import now
|
2018-06-16 15:02:10 -05:00
|
|
|
from utils.key import get_secret_key
|
2019-07-30 15:12:20 -05:00
|
|
|
from utils.template_filters import filters
|
2018-06-17 13:51:23 -05:00
|
|
|
|
2019-04-10 15:50:36 -05:00
|
|
|
# p = PousseTaches("http://localhost:7991", "http://localhost:5000")
|
2019-04-08 10:24:50 -05:00
|
|
|
|
2018-05-18 13:41:41 -05:00
|
|
|
app = Flask(__name__)
|
2018-06-16 15:02:10 -05:00
|
|
|
app.secret_key = get_secret_key("flask")
|
2019-07-30 15:12:20 -05:00
|
|
|
app.register_blueprint(filters)
|
2019-08-01 12:55:30 -05:00
|
|
|
app.register_blueprint(blueprints.admin.blueprint)
|
|
|
|
app.register_blueprint(blueprints.api.blueprint, url_prefix="/api")
|
2019-08-01 13:56:32 -05:00
|
|
|
app.register_blueprint(blueprints.indieauth.blueprint)
|
2019-08-01 12:55:30 -05:00
|
|
|
app.register_blueprint(blueprints.tasks.blueprint)
|
|
|
|
app.register_blueprint(blueprints.well_known.blueprint)
|
2018-06-16 15:02:10 -05:00
|
|
|
app.config.update(WTF_CSRF_CHECK_DEFAULT=False)
|
2019-07-30 15:12:20 -05:00
|
|
|
csrf.init_app(app)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
2018-05-28 12:46:23 -05:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2018-05-22 17:41:37 -05:00
|
|
|
# Hook up Flask logging with gunicorn
|
2018-06-03 05:51:57 -05:00
|
|
|
root_logger = logging.getLogger()
|
2018-06-16 15:02:10 -05:00
|
|
|
if os.getenv("FLASK_DEBUG"):
|
2018-06-03 15:44:19 -05:00
|
|
|
logger.setLevel(logging.DEBUG)
|
|
|
|
root_logger.setLevel(logging.DEBUG)
|
2019-07-22 12:27:12 -05:00
|
|
|
root_logger.handlers = app.logger.handlers
|
2018-06-03 15:44:19 -05:00
|
|
|
else:
|
2018-06-16 15:02:10 -05:00
|
|
|
gunicorn_logger = logging.getLogger("gunicorn.error")
|
2018-06-03 15:44:19 -05:00
|
|
|
root_logger.handlers = gunicorn_logger.handlers
|
|
|
|
root_logger.setLevel(gunicorn_logger.level)
|
2018-05-22 17:41:37 -05:00
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
SIG_AUTH = HTTPSigAuth(KEY)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
|
2019-07-08 16:18:33 -05:00
|
|
|
def is_blacklisted(url: str) -> bool:
|
2019-07-21 16:39:12 -05:00
|
|
|
try:
|
|
|
|
return urlparse(url).netloc in BLACKLIST
|
|
|
|
except Exception:
|
|
|
|
logger.exception(f"failed to blacklist for {url}")
|
|
|
|
return False
|
2019-07-08 16:18:33 -05:00
|
|
|
|
|
|
|
|
2018-05-18 13:41:41 -05:00
|
|
|
@app.context_processor
|
|
|
|
def inject_config():
|
2018-06-22 15:27:28 -05:00
|
|
|
q = {
|
|
|
|
"type": "Create",
|
|
|
|
"activity.object.inReplyTo": None,
|
|
|
|
"meta.deleted": False,
|
2019-04-14 12:17:54 -05:00
|
|
|
"meta.public": True,
|
2018-06-22 15:27:28 -05:00
|
|
|
}
|
2018-06-29 15:16:26 -05:00
|
|
|
notes_count = DB.activities.find(
|
|
|
|
{"box": Box.OUTBOX.value, "$or": [q, {"type": "Announce", "meta.undo": False}]}
|
2018-06-22 15:27:28 -05:00
|
|
|
).count()
|
2019-07-12 17:38:51 -05:00
|
|
|
# FIXME(tsileo): rename to all_count, and remove poll answers from it
|
2019-07-14 16:57:23 -05:00
|
|
|
all_q = {
|
|
|
|
"box": Box.OUTBOX.value,
|
|
|
|
"type": {"$in": [ActivityType.CREATE.value, ActivityType.ANNOUNCE.value]},
|
|
|
|
"meta.undo": False,
|
|
|
|
"meta.deleted": False,
|
|
|
|
"meta.poll_answer": False,
|
|
|
|
}
|
2018-06-29 15:16:26 -05:00
|
|
|
liked_count = DB.activities.count(
|
|
|
|
{
|
|
|
|
"box": Box.OUTBOX.value,
|
|
|
|
"meta.deleted": False,
|
|
|
|
"meta.undo": False,
|
|
|
|
"type": ActivityType.LIKE.value,
|
|
|
|
}
|
|
|
|
)
|
2018-07-07 06:56:00 -05:00
|
|
|
followers_q = {
|
|
|
|
"box": Box.INBOX.value,
|
|
|
|
"type": ActivityType.FOLLOW.value,
|
|
|
|
"meta.undo": False,
|
|
|
|
}
|
|
|
|
following_q = {
|
|
|
|
"box": Box.OUTBOX.value,
|
|
|
|
"type": ActivityType.FOLLOW.value,
|
|
|
|
"meta.undo": False,
|
|
|
|
}
|
2019-07-29 12:36:22 -05:00
|
|
|
unread_notifications_q = {_meta(MetaKey.NOTIFICATION_UNREAD): True}
|
2018-07-07 06:56:00 -05:00
|
|
|
|
2019-07-10 16:32:48 -05:00
|
|
|
logged_in = session.get("logged_in", False)
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
return dict(
|
|
|
|
microblogpub_version=VERSION,
|
|
|
|
config=config,
|
2019-07-10 16:32:48 -05:00
|
|
|
logged_in=logged_in,
|
2018-07-07 06:56:00 -05:00
|
|
|
followers_count=DB.activities.count(followers_q),
|
2019-07-10 16:32:48 -05:00
|
|
|
following_count=DB.activities.count(following_q) if logged_in else 0,
|
2018-06-22 15:27:28 -05:00
|
|
|
notes_count=notes_count,
|
2018-06-25 16:45:43 -05:00
|
|
|
liked_count=liked_count,
|
2019-07-14 16:57:23 -05:00
|
|
|
with_replies_count=DB.activities.count(all_q) if logged_in else 0,
|
2019-07-29 12:36:22 -05:00
|
|
|
unread_notifications_count=DB.activities.count(unread_notifications_q)
|
|
|
|
if logged_in
|
|
|
|
else 0,
|
2018-06-29 15:16:26 -05:00
|
|
|
me=ME,
|
2019-04-14 12:17:54 -05:00
|
|
|
base_url=config.BASE_URL,
|
2018-06-16 15:02:10 -05:00
|
|
|
)
|
|
|
|
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
@app.after_request
|
|
|
|
def set_x_powered_by(response):
|
2018-06-16 15:02:10 -05:00
|
|
|
response.headers["X-Powered-By"] = "microblog.pub"
|
2018-05-18 13:41:41 -05:00
|
|
|
return response
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
|
2018-05-18 13:41:41 -05:00
|
|
|
def jsonify(**data):
|
2018-06-16 15:02:10 -05:00
|
|
|
if "@context" not in data:
|
2018-07-22 14:34:42 -05:00
|
|
|
data["@context"] = config.DEFAULT_CTX
|
2018-05-18 13:41:41 -05:00
|
|
|
return Response(
|
|
|
|
response=json.dumps(data),
|
2018-06-16 15:02:10 -05:00
|
|
|
headers={
|
|
|
|
"Content-Type": "application/json"
|
|
|
|
if app.debug
|
|
|
|
else "application/activity+json"
|
|
|
|
},
|
2018-05-18 13:41:41 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def is_api_request():
|
2018-06-16 15:02:10 -05:00
|
|
|
h = request.headers.get("Accept")
|
2018-05-18 13:41:41 -05:00
|
|
|
if h is None:
|
|
|
|
return False
|
2018-06-16 15:02:10 -05:00
|
|
|
h = h.split(",")[0]
|
|
|
|
if h in HEADERS or h == "application/json":
|
2018-05-18 13:41:41 -05:00
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2018-05-29 14:36:05 -05:00
|
|
|
|
|
|
|
@app.errorhandler(ValueError)
|
|
|
|
def handle_value_error(error):
|
2018-07-17 17:20:32 -05:00
|
|
|
logger.error(
|
|
|
|
f"caught value error: {error!r}, {traceback.format_tb(error.__traceback__)}"
|
|
|
|
)
|
2018-05-29 14:36:05 -05:00
|
|
|
response = flask_jsonify(message=error.args[0])
|
|
|
|
response.status_code = 400
|
|
|
|
return response
|
|
|
|
|
|
|
|
|
|
|
|
@app.errorhandler(Error)
|
|
|
|
def handle_activitypub_error(error):
|
2018-07-17 17:20:32 -05:00
|
|
|
logger.error(
|
|
|
|
f"caught activitypub error {error!r}, {traceback.format_tb(error.__traceback__)}"
|
|
|
|
)
|
2018-05-29 14:36:05 -05:00
|
|
|
response = flask_jsonify(error.to_dict())
|
|
|
|
response.status_code = error.status_code
|
|
|
|
return response
|
|
|
|
|
|
|
|
|
2019-04-07 07:37:05 -05:00
|
|
|
class TaskError(Exception):
|
|
|
|
"""Raised to log the error for poussetaches."""
|
2019-04-07 14:24:52 -05:00
|
|
|
|
2019-04-07 07:37:05 -05:00
|
|
|
def __init__(self):
|
|
|
|
self.message = traceback.format_exc()
|
|
|
|
|
|
|
|
|
|
|
|
@app.errorhandler(TaskError)
|
|
|
|
def handle_task_error(error):
|
|
|
|
logger.error(
|
|
|
|
f"caught activitypub error {error!r}, {traceback.format_tb(error.__traceback__)}"
|
|
|
|
)
|
|
|
|
response = flask_jsonify({"traceback": error.message})
|
|
|
|
response.status_code = 500
|
|
|
|
return response
|
|
|
|
|
|
|
|
|
2018-07-22 05:42:36 -05:00
|
|
|
# @app.errorhandler(Exception)
|
|
|
|
# def handle_other_error(error):
|
|
|
|
# logger.error(
|
|
|
|
# f"caught error {error!r}, {traceback.format_tb(error.__traceback__)}"
|
|
|
|
# )
|
|
|
|
# response = flask_jsonify({})
|
|
|
|
# response.status_code = 500
|
|
|
|
# return response
|
2018-07-22 05:25:56 -05:00
|
|
|
|
|
|
|
|
2018-06-16 14:24:53 -05:00
|
|
|
# App routes
|
2018-05-18 13:41:41 -05:00
|
|
|
|
2018-07-06 16:15:49 -05:00
|
|
|
ROBOTS_TXT = """User-agent: *
|
2018-07-06 17:08:44 -05:00
|
|
|
Disallow: /login
|
2018-07-06 16:15:49 -05:00
|
|
|
Disallow: /admin/
|
|
|
|
Disallow: /static/
|
|
|
|
Disallow: /media/
|
|
|
|
Disallow: /uploads/"""
|
|
|
|
|
2018-07-04 18:02:51 -05:00
|
|
|
|
2018-07-06 16:15:49 -05:00
|
|
|
@app.route("/robots.txt")
|
|
|
|
def robots_txt():
|
|
|
|
return Response(response=ROBOTS_TXT, headers={"Content-Type": "text/plain"})
|
|
|
|
|
|
|
|
|
|
|
|
@app.route("/media/<media_id>")
|
2018-07-22 05:17:55 -05:00
|
|
|
@noindex
|
2018-07-06 16:15:49 -05:00
|
|
|
def serve_media(media_id):
|
|
|
|
f = MEDIA_CACHE.fs.get(ObjectId(media_id))
|
|
|
|
resp = app.response_class(f, direct_passthrough=True, mimetype=f.content_type)
|
|
|
|
resp.headers.set("Content-Length", f.length)
|
|
|
|
resp.headers.set("ETag", f.md5)
|
|
|
|
resp.headers.set(
|
|
|
|
"Last-Modified", f.uploadDate.strftime("%a, %d %b %Y %H:%M:%S GMT")
|
|
|
|
)
|
|
|
|
resp.headers.set("Cache-Control", "public,max-age=31536000,immutable")
|
|
|
|
resp.headers.set("Content-Encoding", "gzip")
|
|
|
|
return resp
|
|
|
|
|
|
|
|
|
|
|
|
@app.route("/uploads/<oid>/<fname>")
|
|
|
|
def serve_uploads(oid, fname):
|
|
|
|
f = MEDIA_CACHE.fs.get(ObjectId(oid))
|
2018-07-04 18:02:51 -05:00
|
|
|
resp = app.response_class(f, direct_passthrough=True, mimetype=f.content_type)
|
|
|
|
resp.headers.set("Content-Length", f.length)
|
|
|
|
resp.headers.set("ETag", f.md5)
|
|
|
|
resp.headers.set(
|
|
|
|
"Last-Modified", f.uploadDate.strftime("%a, %d %b %Y %H:%M:%S GMT")
|
|
|
|
)
|
|
|
|
resp.headers.set("Cache-Control", "public,max-age=31536000,immutable")
|
|
|
|
resp.headers.set("Content-Encoding", "gzip")
|
|
|
|
return resp
|
|
|
|
|
|
|
|
|
2018-05-18 13:41:41 -05:00
|
|
|
#######
|
|
|
|
# Login
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
|
|
|
|
@app.route("/remote_follow", methods=["GET", "POST"])
|
2018-05-18 13:41:41 -05:00
|
|
|
def remote_follow():
|
2018-06-16 15:02:10 -05:00
|
|
|
if request.method == "GET":
|
|
|
|
return render_template("remote_follow.html")
|
2018-05-18 13:41:41 -05:00
|
|
|
|
2018-06-04 10:59:38 -05:00
|
|
|
csrf.protect()
|
2018-06-21 17:23:05 -05:00
|
|
|
profile = request.form.get("profile")
|
|
|
|
if not profile.startswith("@"):
|
|
|
|
profile = f"@{profile}"
|
2019-07-26 17:24:04 -05:00
|
|
|
return redirect(get_remote_follow_template(profile).format(uri=ID))
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/authorize_follow", methods=["GET", "POST"])
|
2018-05-18 13:41:41 -05:00
|
|
|
@login_required
|
|
|
|
def authorize_follow():
|
2018-06-16 15:02:10 -05:00
|
|
|
if request.method == "GET":
|
|
|
|
return render_template(
|
|
|
|
"authorize_remote_follow.html", profile=request.args.get("profile")
|
|
|
|
)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
actor = get_actor_url(request.form.get("profile"))
|
2018-05-18 13:41:41 -05:00
|
|
|
if not actor:
|
|
|
|
abort(500)
|
2018-07-07 06:56:00 -05:00
|
|
|
|
|
|
|
q = {
|
|
|
|
"box": Box.OUTBOX.value,
|
|
|
|
"type": ActivityType.FOLLOW.value,
|
|
|
|
"meta.undo": False,
|
|
|
|
"activity.object": actor,
|
|
|
|
}
|
|
|
|
if DB.activities.count(q) > 0:
|
2018-06-16 15:02:10 -05:00
|
|
|
return redirect("/following")
|
2018-05-18 13:41:41 -05:00
|
|
|
|
2019-07-16 12:34:10 -05:00
|
|
|
follow = ap.Follow(
|
2019-07-29 12:36:22 -05:00
|
|
|
actor=MY_PERSON.id, object=actor, to=[actor], cc=[ap.AS_PUBLIC], published=now()
|
2019-07-16 12:34:10 -05:00
|
|
|
)
|
2019-04-05 04:35:48 -05:00
|
|
|
post_to_outbox(follow)
|
2018-06-16 15:33:51 -05:00
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
return redirect("/following")
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/u2f/register", methods=["GET", "POST"])
|
2018-05-18 13:41:41 -05:00
|
|
|
@login_required
|
|
|
|
def u2f_register():
|
|
|
|
# TODO(tsileo): ensure no duplicates
|
2018-06-16 15:02:10 -05:00
|
|
|
if request.method == "GET":
|
2018-05-18 13:41:41 -05:00
|
|
|
payload = u2f.begin_registration(ID)
|
2018-06-16 15:02:10 -05:00
|
|
|
session["challenge"] = payload
|
|
|
|
return render_template("u2f.html", payload=payload)
|
2018-05-18 13:41:41 -05:00
|
|
|
else:
|
2018-06-16 15:02:10 -05:00
|
|
|
resp = json.loads(request.form.get("resp"))
|
|
|
|
device, device_cert = u2f.complete_registration(session["challenge"], resp)
|
|
|
|
session["challenge"] = None
|
|
|
|
DB.u2f.insert_one({"device": device, "cert": device_cert})
|
2019-04-08 09:41:09 -05:00
|
|
|
session["logged_in"] = False
|
|
|
|
return redirect("/login")
|
2018-06-16 15:02:10 -05:00
|
|
|
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
#######
|
|
|
|
# Activity pub routes
|
2018-07-16 15:24:14 -05:00
|
|
|
@app.route("/drop_cache")
|
|
|
|
@login_required
|
|
|
|
def drop_cache():
|
|
|
|
DB.actors.drop()
|
|
|
|
return "Done"
|
2018-05-18 13:41:41 -05:00
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
|
2018-09-03 13:21:33 -05:00
|
|
|
@app.route("/")
|
|
|
|
def index():
|
|
|
|
if is_api_request():
|
|
|
|
return jsonify(**ME)
|
2018-07-06 16:53:33 -05:00
|
|
|
|
|
|
|
q = {
|
|
|
|
"box": Box.OUTBOX.value,
|
|
|
|
"type": {"$in": [ActivityType.CREATE.value, ActivityType.ANNOUNCE.value]},
|
|
|
|
"activity.object.inReplyTo": None,
|
|
|
|
"meta.deleted": False,
|
|
|
|
"meta.undo": False,
|
2019-07-12 17:38:51 -05:00
|
|
|
"meta.public": True,
|
2018-07-22 15:22:30 -05:00
|
|
|
"$or": [{"meta.pinned": False}, {"meta.pinned": {"$exists": False}}],
|
2018-07-06 16:53:33 -05:00
|
|
|
}
|
2019-04-15 14:20:14 -05:00
|
|
|
print(list(DB.activities.find(q)))
|
2018-07-22 15:25:28 -05:00
|
|
|
|
|
|
|
pinned = []
|
|
|
|
# Only fetch the pinned notes if we're on the first page
|
|
|
|
if not request.args.get("older_than") and not request.args.get("newer_than"):
|
|
|
|
q_pinned = {
|
|
|
|
"box": Box.OUTBOX.value,
|
|
|
|
"type": ActivityType.CREATE.value,
|
|
|
|
"meta.deleted": False,
|
|
|
|
"meta.undo": False,
|
2019-07-12 17:38:51 -05:00
|
|
|
"meta.public": True,
|
2018-07-22 15:25:28 -05:00
|
|
|
"meta.pinned": True,
|
|
|
|
}
|
|
|
|
pinned = list(DB.activities.find(q_pinned))
|
|
|
|
|
2018-07-22 15:22:30 -05:00
|
|
|
outbox_data, older_than, newer_than = paginated_query(
|
|
|
|
DB.activities, q, limit=25 - len(pinned)
|
|
|
|
)
|
2018-07-22 15:25:28 -05:00
|
|
|
|
2018-09-03 02:38:29 -05:00
|
|
|
resp = render_template(
|
2018-07-06 16:15:49 -05:00
|
|
|
"index.html",
|
|
|
|
outbox_data=outbox_data,
|
|
|
|
older_than=older_than,
|
|
|
|
newer_than=newer_than,
|
2018-07-22 15:22:30 -05:00
|
|
|
pinned=pinned,
|
2018-07-06 16:15:49 -05:00
|
|
|
)
|
2018-09-03 02:38:29 -05:00
|
|
|
return resp
|
2018-06-04 10:59:38 -05:00
|
|
|
|
|
|
|
|
2019-07-12 17:38:51 -05:00
|
|
|
@app.route("/all")
|
2018-07-17 17:06:42 -05:00
|
|
|
@login_required
|
2019-07-12 17:38:51 -05:00
|
|
|
def all():
|
2018-06-29 15:16:26 -05:00
|
|
|
q = {
|
|
|
|
"box": Box.OUTBOX.value,
|
|
|
|
"type": {"$in": [ActivityType.CREATE.value, ActivityType.ANNOUNCE.value]},
|
|
|
|
"meta.deleted": False,
|
|
|
|
"meta.undo": False,
|
2019-07-14 16:57:23 -05:00
|
|
|
"meta.poll_answer": False,
|
2018-06-29 15:16:26 -05:00
|
|
|
}
|
2018-07-06 16:53:33 -05:00
|
|
|
outbox_data, older_than, newer_than = paginated_query(DB.activities, q)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
2018-07-06 16:53:33 -05:00
|
|
|
return render_template(
|
|
|
|
"index.html",
|
|
|
|
outbox_data=outbox_data,
|
|
|
|
older_than=older_than,
|
|
|
|
newer_than=newer_than,
|
|
|
|
)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/note/<note_id>")
|
2018-06-16 14:24:53 -05:00
|
|
|
def note_by_id(note_id):
|
2018-07-20 05:14:11 -05:00
|
|
|
if is_api_request():
|
2018-07-20 17:04:15 -05:00
|
|
|
return redirect(url_for("outbox_activity", item_id=note_id))
|
2018-07-20 05:14:11 -05:00
|
|
|
|
2018-06-29 15:16:26 -05:00
|
|
|
data = DB.activities.find_one(
|
|
|
|
{"box": Box.OUTBOX.value, "remote_id": back.activity_url(note_id)}
|
|
|
|
)
|
2018-06-16 14:24:53 -05:00
|
|
|
if not data:
|
2018-06-03 14:28:06 -05:00
|
|
|
abort(404)
|
2018-06-16 15:02:10 -05:00
|
|
|
if data["meta"].get("deleted", False):
|
2018-06-03 14:28:06 -05:00
|
|
|
abort(410)
|
2019-04-14 12:17:54 -05:00
|
|
|
|
2018-06-03 14:28:06 -05:00
|
|
|
thread = _build_thread(data)
|
2018-08-01 01:29:08 -05:00
|
|
|
app.logger.info(f"thread={thread!r}")
|
2018-06-03 16:11:43 -05:00
|
|
|
|
2018-08-28 15:14:48 -05:00
|
|
|
raw_likes = list(
|
2018-06-29 15:16:26 -05:00
|
|
|
DB.activities.find(
|
2018-06-16 15:02:10 -05:00
|
|
|
{
|
|
|
|
"meta.undo": False,
|
2018-07-19 18:12:02 -05:00
|
|
|
"meta.deleted": False,
|
2018-06-16 15:02:10 -05:00
|
|
|
"type": ActivityType.LIKE.value,
|
|
|
|
"$or": [
|
2018-08-01 01:29:08 -05:00
|
|
|
# FIXME(tsileo): remove all the useless $or
|
2018-06-16 15:02:10 -05:00
|
|
|
{"activity.object.id": data["activity"]["object"]["id"]},
|
|
|
|
{"activity.object": data["activity"]["object"]["id"]},
|
|
|
|
],
|
|
|
|
}
|
|
|
|
)
|
|
|
|
)
|
2018-08-28 15:14:48 -05:00
|
|
|
likes = []
|
|
|
|
for doc in raw_likes:
|
|
|
|
try:
|
|
|
|
likes.append(doc["meta"]["actor"])
|
|
|
|
except Exception:
|
|
|
|
app.logger.exception(f"invalid doc: {doc!r}")
|
2018-08-01 01:29:08 -05:00
|
|
|
app.logger.info(f"likes={likes!r}")
|
2018-06-03 16:11:43 -05:00
|
|
|
|
2018-08-28 15:14:48 -05:00
|
|
|
raw_shares = list(
|
2018-06-29 15:16:26 -05:00
|
|
|
DB.activities.find(
|
2018-06-16 15:02:10 -05:00
|
|
|
{
|
|
|
|
"meta.undo": False,
|
2018-07-19 18:12:02 -05:00
|
|
|
"meta.deleted": False,
|
2018-06-16 15:02:10 -05:00
|
|
|
"type": ActivityType.ANNOUNCE.value,
|
|
|
|
"$or": [
|
|
|
|
{"activity.object.id": data["activity"]["object"]["id"]},
|
|
|
|
{"activity.object": data["activity"]["object"]["id"]},
|
|
|
|
],
|
|
|
|
}
|
|
|
|
)
|
|
|
|
)
|
2018-08-28 15:14:48 -05:00
|
|
|
shares = []
|
|
|
|
for doc in raw_shares:
|
|
|
|
try:
|
|
|
|
shares.append(doc["meta"]["actor"])
|
|
|
|
except Exception:
|
|
|
|
app.logger.exception(f"invalid doc: {doc!r}")
|
2018-08-01 01:29:08 -05:00
|
|
|
app.logger.info(f"shares={shares!r}")
|
2018-06-03 16:36:16 -05:00
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
return render_template(
|
2018-06-29 15:16:26 -05:00
|
|
|
"note.html", likes=likes, shares=shares, thread=thread, note=data
|
2018-06-16 15:02:10 -05:00
|
|
|
)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
|
2018-05-28 12:46:23 -05:00
|
|
|
def add_extra_collection(raw_doc: Dict[str, Any]) -> Dict[str, Any]:
|
2018-06-16 15:02:10 -05:00
|
|
|
if raw_doc["activity"]["type"] != ActivityType.CREATE.value:
|
2018-05-29 11:59:37 -05:00
|
|
|
return raw_doc
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
raw_doc["activity"]["object"]["replies"] = embed_collection(
|
|
|
|
raw_doc.get("meta", {}).get("count_direct_reply", 0),
|
2018-06-18 15:01:21 -05:00
|
|
|
f'{raw_doc["remote_id"]}/replies',
|
2018-05-31 18:26:23 -05:00
|
|
|
)
|
2018-05-29 11:59:37 -05:00
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
raw_doc["activity"]["object"]["likes"] = embed_collection(
|
2018-06-18 15:01:21 -05:00
|
|
|
raw_doc.get("meta", {}).get("count_like", 0), f'{raw_doc["remote_id"]}/likes'
|
2018-05-31 18:26:23 -05:00
|
|
|
)
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
raw_doc["activity"]["object"]["shares"] = embed_collection(
|
2018-06-18 15:01:21 -05:00
|
|
|
raw_doc.get("meta", {}).get("count_boost", 0), f'{raw_doc["remote_id"]}/shares'
|
2018-05-31 18:26:23 -05:00
|
|
|
)
|
2018-05-28 12:46:23 -05:00
|
|
|
|
|
|
|
return raw_doc
|
|
|
|
|
|
|
|
|
2018-06-04 12:10:04 -05:00
|
|
|
def remove_context(activity: Dict[str, Any]) -> Dict[str, Any]:
|
2018-06-16 15:02:10 -05:00
|
|
|
if "@context" in activity:
|
|
|
|
del activity["@context"]
|
2018-06-04 12:10:04 -05:00
|
|
|
return activity
|
|
|
|
|
|
|
|
|
|
|
|
def activity_from_doc(raw_doc: Dict[str, Any], embed: bool = False) -> Dict[str, Any]:
|
2018-05-28 12:46:23 -05:00
|
|
|
raw_doc = add_extra_collection(raw_doc)
|
2018-06-16 15:02:10 -05:00
|
|
|
activity = clean_activity(raw_doc["activity"])
|
2019-04-14 12:17:54 -05:00
|
|
|
|
|
|
|
# Handle Questions
|
|
|
|
# TODO(tsileo): what about object embedded by ID/URL?
|
2019-07-04 16:22:38 -05:00
|
|
|
_add_answers_to_question(raw_doc)
|
2018-06-04 12:10:04 -05:00
|
|
|
if embed:
|
|
|
|
return remove_context(activity)
|
|
|
|
return activity
|
|
|
|
|
2018-05-28 12:46:23 -05:00
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/outbox", methods=["GET", "POST"])
|
2018-06-16 14:24:53 -05:00
|
|
|
def outbox():
|
2018-06-16 15:02:10 -05:00
|
|
|
if request.method == "GET":
|
2018-06-16 14:24:53 -05:00
|
|
|
if not is_api_request():
|
|
|
|
abort(404)
|
2019-07-14 16:57:23 -05:00
|
|
|
# TODO(tsileo): returns the whole outbox if authenticated and look at OCAP support
|
2018-05-18 13:41:41 -05:00
|
|
|
q = {
|
2018-06-29 15:16:26 -05:00
|
|
|
"box": Box.OUTBOX.value,
|
2018-07-09 17:49:52 -05:00
|
|
|
"meta.deleted": False,
|
2019-04-14 12:17:54 -05:00
|
|
|
"meta.undo": False,
|
|
|
|
"meta.public": True,
|
2018-07-07 06:56:00 -05:00
|
|
|
"type": {"$in": [ActivityType.CREATE.value, ActivityType.ANNOUNCE.value]},
|
2018-05-18 13:41:41 -05:00
|
|
|
}
|
2018-06-16 15:02:10 -05:00
|
|
|
return jsonify(
|
|
|
|
**activitypub.build_ordered_collection(
|
2018-06-29 15:16:26 -05:00
|
|
|
DB.activities,
|
2018-06-16 15:02:10 -05:00
|
|
|
q=q,
|
|
|
|
cursor=request.args.get("cursor"),
|
|
|
|
map_func=lambda doc: activity_from_doc(doc, embed=True),
|
2018-07-31 16:23:20 -05:00
|
|
|
col_name="outbox",
|
2018-06-16 15:02:10 -05:00
|
|
|
)
|
|
|
|
)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
# Handle POST request
|
2018-05-21 07:30:52 -05:00
|
|
|
try:
|
|
|
|
_api_required()
|
|
|
|
except BadSignature:
|
|
|
|
abort(401)
|
2018-06-16 14:24:53 -05:00
|
|
|
|
2018-05-18 13:41:41 -05:00
|
|
|
data = request.get_json(force=True)
|
|
|
|
print(data)
|
2018-06-17 12:21:59 -05:00
|
|
|
activity = ap.parse_activity(data)
|
2019-04-05 04:35:48 -05:00
|
|
|
activity_id = post_to_outbox(activity)
|
2018-05-21 10:04:53 -05:00
|
|
|
|
2018-07-29 09:07:27 -05:00
|
|
|
return Response(status=201, headers={"Location": activity_id})
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/outbox/<item_id>")
|
2018-05-18 13:41:41 -05:00
|
|
|
def outbox_detail(item_id):
|
2018-06-29 15:16:26 -05:00
|
|
|
doc = DB.activities.find_one(
|
2019-07-14 16:57:23 -05:00
|
|
|
{
|
|
|
|
"box": Box.OUTBOX.value,
|
|
|
|
"remote_id": back.activity_url(item_id),
|
|
|
|
"meta.public": True,
|
|
|
|
}
|
2018-06-29 15:16:26 -05:00
|
|
|
)
|
2018-07-09 17:49:52 -05:00
|
|
|
if not doc:
|
|
|
|
abort(404)
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
if doc["meta"].get("deleted", False):
|
2018-06-17 12:21:59 -05:00
|
|
|
obj = ap.parse_activity(doc["activity"])
|
2018-07-09 17:49:52 -05:00
|
|
|
resp = jsonify(**obj.get_tombstone().to_dict())
|
2018-06-03 07:34:04 -05:00
|
|
|
resp.status_code = 410
|
|
|
|
return resp
|
2018-05-28 12:46:23 -05:00
|
|
|
return jsonify(**activity_from_doc(doc))
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/outbox/<item_id>/activity")
|
2018-05-18 13:41:41 -05:00
|
|
|
def outbox_activity(item_id):
|
2018-06-29 15:16:26 -05:00
|
|
|
data = DB.activities.find_one(
|
2019-07-14 16:57:23 -05:00
|
|
|
{
|
|
|
|
"box": Box.OUTBOX.value,
|
|
|
|
"remote_id": back.activity_url(item_id),
|
|
|
|
"meta.public": True,
|
|
|
|
}
|
2018-06-18 15:01:21 -05:00
|
|
|
)
|
2018-05-18 13:41:41 -05:00
|
|
|
if not data:
|
|
|
|
abort(404)
|
2019-04-14 12:17:54 -05:00
|
|
|
|
2018-05-28 12:46:23 -05:00
|
|
|
obj = activity_from_doc(data)
|
2018-07-09 17:49:52 -05:00
|
|
|
if data["meta"].get("deleted", False):
|
|
|
|
obj = ap.parse_activity(data["activity"])
|
|
|
|
resp = jsonify(**obj.get_object().get_tombstone().to_dict())
|
|
|
|
resp.status_code = 410
|
|
|
|
return resp
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
if obj["type"] != ActivityType.CREATE.value:
|
2018-05-18 13:41:41 -05:00
|
|
|
abort(404)
|
2018-06-16 15:02:10 -05:00
|
|
|
return jsonify(**obj["object"])
|
2018-05-28 12:46:23 -05:00
|
|
|
|
2018-05-18 13:41:41 -05:00
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/outbox/<item_id>/replies")
|
2018-05-31 18:26:23 -05:00
|
|
|
def outbox_activity_replies(item_id):
|
|
|
|
if not is_api_request():
|
|
|
|
abort(404)
|
2018-06-29 15:16:26 -05:00
|
|
|
data = DB.activities.find_one(
|
|
|
|
{
|
|
|
|
"box": Box.OUTBOX.value,
|
|
|
|
"remote_id": back.activity_url(item_id),
|
|
|
|
"meta.deleted": False,
|
2019-07-14 16:57:23 -05:00
|
|
|
"meta.public": True,
|
2018-06-29 15:16:26 -05:00
|
|
|
}
|
2018-06-18 15:01:21 -05:00
|
|
|
)
|
2018-05-31 18:26:23 -05:00
|
|
|
if not data:
|
|
|
|
abort(404)
|
2018-06-17 12:21:59 -05:00
|
|
|
obj = ap.parse_activity(data["activity"])
|
|
|
|
if obj.ACTIVITY_TYPE != ActivityType.CREATE:
|
2018-05-31 18:26:23 -05:00
|
|
|
abort(404)
|
|
|
|
|
|
|
|
q = {
|
2018-06-16 15:02:10 -05:00
|
|
|
"meta.deleted": False,
|
2019-07-14 16:57:23 -05:00
|
|
|
"meta.public": True,
|
2018-06-16 15:02:10 -05:00
|
|
|
"type": ActivityType.CREATE.value,
|
|
|
|
"activity.object.inReplyTo": obj.get_object().id,
|
2018-05-31 18:26:23 -05:00
|
|
|
}
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
return jsonify(
|
|
|
|
**activitypub.build_ordered_collection(
|
2018-06-29 15:16:26 -05:00
|
|
|
DB.activities,
|
2018-06-16 15:02:10 -05:00
|
|
|
q=q,
|
|
|
|
cursor=request.args.get("cursor"),
|
|
|
|
map_func=lambda doc: doc["activity"]["object"],
|
|
|
|
col_name=f"outbox/{item_id}/replies",
|
|
|
|
first_page=request.args.get("page") == "first",
|
|
|
|
)
|
|
|
|
)
|
2018-05-31 18:26:23 -05:00
|
|
|
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/outbox/<item_id>/likes")
|
2018-05-31 18:26:23 -05:00
|
|
|
def outbox_activity_likes(item_id):
|
|
|
|
if not is_api_request():
|
|
|
|
abort(404)
|
2018-06-29 15:16:26 -05:00
|
|
|
data = DB.activities.find_one(
|
|
|
|
{
|
|
|
|
"box": Box.OUTBOX.value,
|
|
|
|
"remote_id": back.activity_url(item_id),
|
|
|
|
"meta.deleted": False,
|
2019-07-14 16:57:23 -05:00
|
|
|
"meta.public": True,
|
2018-06-29 15:16:26 -05:00
|
|
|
}
|
2018-06-18 15:01:21 -05:00
|
|
|
)
|
2018-05-31 18:26:23 -05:00
|
|
|
if not data:
|
|
|
|
abort(404)
|
2018-06-17 12:21:59 -05:00
|
|
|
obj = ap.parse_activity(data["activity"])
|
|
|
|
if obj.ACTIVITY_TYPE != ActivityType.CREATE:
|
2018-05-31 18:26:23 -05:00
|
|
|
abort(404)
|
|
|
|
|
|
|
|
q = {
|
2018-06-16 15:02:10 -05:00
|
|
|
"meta.undo": False,
|
|
|
|
"type": ActivityType.LIKE.value,
|
|
|
|
"$or": [
|
|
|
|
{"activity.object.id": obj.get_object().id},
|
|
|
|
{"activity.object": obj.get_object().id},
|
|
|
|
],
|
2018-05-31 18:26:23 -05:00
|
|
|
}
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
return jsonify(
|
|
|
|
**activitypub.build_ordered_collection(
|
2018-06-29 15:16:26 -05:00
|
|
|
DB.activities,
|
2018-06-16 15:02:10 -05:00
|
|
|
q=q,
|
|
|
|
cursor=request.args.get("cursor"),
|
|
|
|
map_func=lambda doc: remove_context(doc["activity"]),
|
|
|
|
col_name=f"outbox/{item_id}/likes",
|
|
|
|
first_page=request.args.get("page") == "first",
|
|
|
|
)
|
|
|
|
)
|
2018-05-31 18:26:23 -05:00
|
|
|
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/outbox/<item_id>/shares")
|
2018-05-31 18:26:23 -05:00
|
|
|
def outbox_activity_shares(item_id):
|
|
|
|
if not is_api_request():
|
|
|
|
abort(404)
|
2018-06-29 15:16:26 -05:00
|
|
|
data = DB.activities.find_one(
|
|
|
|
{
|
|
|
|
"box": Box.OUTBOX.value,
|
|
|
|
"remote_id": back.activity_url(item_id),
|
|
|
|
"meta.deleted": False,
|
|
|
|
}
|
2018-06-18 15:01:21 -05:00
|
|
|
)
|
2018-05-31 18:26:23 -05:00
|
|
|
if not data:
|
|
|
|
abort(404)
|
2018-06-17 12:21:59 -05:00
|
|
|
obj = ap.parse_activity(data["activity"])
|
|
|
|
if obj.ACTIVITY_TYPE != ActivityType.CREATE:
|
2018-05-31 18:26:23 -05:00
|
|
|
abort(404)
|
|
|
|
|
|
|
|
q = {
|
2018-06-16 15:02:10 -05:00
|
|
|
"meta.undo": False,
|
|
|
|
"type": ActivityType.ANNOUNCE.value,
|
|
|
|
"$or": [
|
|
|
|
{"activity.object.id": obj.get_object().id},
|
|
|
|
{"activity.object": obj.get_object().id},
|
|
|
|
],
|
2018-05-31 18:26:23 -05:00
|
|
|
}
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
return jsonify(
|
|
|
|
**activitypub.build_ordered_collection(
|
2018-06-29 15:16:26 -05:00
|
|
|
DB.activities,
|
2018-06-16 15:02:10 -05:00
|
|
|
q=q,
|
|
|
|
cursor=request.args.get("cursor"),
|
|
|
|
map_func=lambda doc: remove_context(doc["activity"]),
|
|
|
|
col_name=f"outbox/{item_id}/shares",
|
|
|
|
first_page=request.args.get("page") == "first",
|
|
|
|
)
|
|
|
|
)
|
2018-05-31 18:26:23 -05:00
|
|
|
|
|
|
|
|
2019-08-01 12:55:30 -05:00
|
|
|
@app.route("/api/key")
|
2018-05-18 13:41:41 -05:00
|
|
|
@login_required
|
2019-08-01 12:55:30 -05:00
|
|
|
def api_user_key():
|
|
|
|
return flask_jsonify(api_key=ADMIN_API_KEY)
|
2018-06-16 14:24:53 -05:00
|
|
|
|
2018-05-18 13:41:41 -05:00
|
|
|
|
2019-08-01 12:55:30 -05:00
|
|
|
@app.route("/inbox", methods=["GET", "POST"]) # noqa: C901
|
|
|
|
def inbox():
|
|
|
|
# GET /inbox
|
|
|
|
if request.method == "GET":
|
|
|
|
if not is_api_request():
|
|
|
|
abort(404)
|
|
|
|
try:
|
|
|
|
_api_required()
|
|
|
|
except BadSignature:
|
|
|
|
abort(404)
|
2019-05-02 14:53:17 -05:00
|
|
|
|
2019-08-01 12:55:30 -05:00
|
|
|
return jsonify(
|
|
|
|
**activitypub.build_ordered_collection(
|
|
|
|
DB.activities,
|
|
|
|
q={"meta.deleted": False, "box": Box.INBOX.value},
|
|
|
|
cursor=request.args.get("cursor"),
|
|
|
|
map_func=lambda doc: remove_context(doc["activity"]),
|
|
|
|
col_name="inbox",
|
|
|
|
)
|
|
|
|
)
|
2019-05-02 14:53:17 -05:00
|
|
|
|
2019-08-01 12:55:30 -05:00
|
|
|
# POST/ inbox
|
|
|
|
try:
|
|
|
|
data = request.get_json(force=True)
|
|
|
|
if not isinstance(data, dict):
|
|
|
|
raise ValueError("not a dict")
|
|
|
|
except Exception:
|
|
|
|
return Response(
|
|
|
|
status=422,
|
|
|
|
headers={"Content-Type": "application/json"},
|
|
|
|
response=json.dumps({"error": "failed to decode request as JSON"}),
|
|
|
|
)
|
2019-04-07 05:27:48 -05:00
|
|
|
|
2019-08-01 12:55:30 -05:00
|
|
|
# Check the blacklist now to see if we can return super early
|
|
|
|
if (
|
|
|
|
"id" in data
|
|
|
|
and is_blacklisted(data["id"])
|
|
|
|
or (
|
|
|
|
"object" in data
|
|
|
|
and isinstance(data["object"], dict)
|
|
|
|
and "id" in data["object"]
|
|
|
|
and is_blacklisted(data["object"]["id"])
|
|
|
|
)
|
|
|
|
or (
|
|
|
|
"object" in data
|
|
|
|
and isinstance(data["object"], str)
|
|
|
|
and is_blacklisted(data["object"])
|
|
|
|
)
|
|
|
|
):
|
|
|
|
logger.info(f"dropping activity from blacklisted host: {data['id']}")
|
|
|
|
return Response(status=201)
|
2019-04-07 05:27:48 -05:00
|
|
|
|
2019-08-01 12:55:30 -05:00
|
|
|
print(f"req_headers={request.headers}")
|
|
|
|
print(f"raw_data={data}")
|
|
|
|
logger.debug(f"req_headers={request.headers}")
|
|
|
|
logger.debug(f"raw_data={data}")
|
|
|
|
try:
|
|
|
|
if not verify_request(
|
|
|
|
request.method, request.path, request.headers, request.data
|
|
|
|
):
|
|
|
|
raise Exception("failed to verify request")
|
|
|
|
except Exception:
|
|
|
|
logger.exception(
|
|
|
|
"failed to verify request, trying to verify the payload by fetching the remote"
|
|
|
|
)
|
|
|
|
try:
|
|
|
|
remote_data = get_backend().fetch_iri(data["id"])
|
|
|
|
except ActivityGoneError:
|
|
|
|
# XXX Mastodon sends Delete activities that are not dereferencable, it's the actor url with #delete
|
|
|
|
# appended, so an `ActivityGoneError` kind of ensure it's "legit"
|
|
|
|
if data["type"] == ActivityType.DELETE.value and data["id"].startswith(
|
|
|
|
data["object"]
|
|
|
|
):
|
|
|
|
# If we're here, this means the key is not saved, so we cannot verify the object
|
|
|
|
logger.info(f"received a Delete for an unknown actor {data!r}, drop it")
|
2018-07-20 17:04:15 -05:00
|
|
|
|
2019-08-01 12:55:30 -05:00
|
|
|
return Response(status=201)
|
|
|
|
except Exception:
|
|
|
|
logger.exception(f"failed to fetch remote for payload {data!r}")
|
2018-07-20 17:04:15 -05:00
|
|
|
|
2019-08-01 12:55:30 -05:00
|
|
|
if "type" in data:
|
|
|
|
# Friendica does not returns a 410, but a 302 that redirect to an HTML page
|
|
|
|
if ap._has_type(data["type"], ActivityType.DELETE):
|
|
|
|
logger.info(
|
|
|
|
f"received a Delete for an unknown actor {data!r}, drop it"
|
|
|
|
)
|
|
|
|
return Response(status=201)
|
2019-04-11 12:24:28 -05:00
|
|
|
|
2019-08-01 12:55:30 -05:00
|
|
|
if "id" in data:
|
|
|
|
if DB.trash.find_one({"activity.id": data["id"]}):
|
|
|
|
# It's already stored in trash, returns early
|
|
|
|
return Response(
|
|
|
|
status=422,
|
|
|
|
headers={"Content-Type": "application/json"},
|
|
|
|
response=json.dumps(
|
|
|
|
{
|
|
|
|
"error": "failed to verify request (using HTTP signatures or fetching the IRI)"
|
|
|
|
}
|
|
|
|
),
|
|
|
|
)
|
2018-07-14 05:29:46 -05:00
|
|
|
|
2019-08-01 12:55:30 -05:00
|
|
|
# Now we can store this activity in the trash for later analysis
|
2018-07-14 05:29:46 -05:00
|
|
|
|
2019-08-01 12:55:30 -05:00
|
|
|
# Track/store the payload for analysis
|
|
|
|
ip, geoip = _get_ip()
|
2018-07-14 05:29:46 -05:00
|
|
|
|
2019-08-01 12:55:30 -05:00
|
|
|
DB.trash.insert(
|
|
|
|
{
|
|
|
|
"activity": data,
|
|
|
|
"meta": {
|
|
|
|
"ts": datetime.now().timestamp(),
|
|
|
|
"ip_address": ip,
|
|
|
|
"geoip": geoip,
|
|
|
|
"tb": traceback.format_exc(),
|
|
|
|
"headers": dict(request.headers),
|
|
|
|
},
|
|
|
|
}
|
2019-05-12 03:06:26 -05:00
|
|
|
)
|
2019-05-12 03:02:28 -05:00
|
|
|
|
2018-06-02 02:07:57 -05:00
|
|
|
return Response(
|
|
|
|
status=422,
|
2018-06-16 15:02:10 -05:00
|
|
|
headers={"Content-Type": "application/json"},
|
|
|
|
response=json.dumps(
|
|
|
|
{
|
|
|
|
"error": "failed to verify request (using HTTP signatures or fetching the IRI)"
|
|
|
|
}
|
|
|
|
),
|
2018-06-02 02:07:57 -05:00
|
|
|
)
|
2019-07-07 07:14:13 -05:00
|
|
|
|
|
|
|
# We fetched the remote data successfully
|
|
|
|
data = remote_data
|
2019-04-14 13:16:04 -05:00
|
|
|
print(data)
|
2018-06-17 12:21:59 -05:00
|
|
|
activity = ap.parse_activity(data)
|
2018-06-16 15:02:10 -05:00
|
|
|
logger.debug(f"inbox activity={activity}/{data}")
|
2019-04-05 04:35:48 -05:00
|
|
|
post_to_inbox(activity)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
return Response(status=201)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/followers")
|
2018-06-16 14:24:53 -05:00
|
|
|
def followers():
|
2018-07-07 06:56:00 -05:00
|
|
|
q = {"box": Box.INBOX.value, "type": ActivityType.FOLLOW.value, "meta.undo": False}
|
|
|
|
|
2018-06-16 14:24:53 -05:00
|
|
|
if is_api_request():
|
2018-05-18 13:41:41 -05:00
|
|
|
return jsonify(
|
|
|
|
**activitypub.build_ordered_collection(
|
2018-07-07 06:56:00 -05:00
|
|
|
DB.activities,
|
|
|
|
q=q,
|
2018-06-16 15:02:10 -05:00
|
|
|
cursor=request.args.get("cursor"),
|
2018-07-07 07:07:29 -05:00
|
|
|
map_func=lambda doc: doc["activity"]["actor"],
|
2018-07-31 16:23:20 -05:00
|
|
|
col_name="followers",
|
2018-05-18 13:41:41 -05:00
|
|
|
)
|
2018-06-16 14:24:53 -05:00
|
|
|
)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
2018-07-18 16:18:39 -05:00
|
|
|
raw_followers, older_than, newer_than = paginated_query(DB.activities, q)
|
2019-04-05 04:35:48 -05:00
|
|
|
followers = [
|
|
|
|
doc["meta"]["actor"] for doc in raw_followers if "actor" in doc.get("meta", {})
|
|
|
|
]
|
2018-07-07 06:56:00 -05:00
|
|
|
return render_template(
|
|
|
|
"followers.html",
|
|
|
|
followers_data=followers,
|
|
|
|
older_than=older_than,
|
|
|
|
newer_than=newer_than,
|
|
|
|
)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/following")
|
2018-05-18 13:41:41 -05:00
|
|
|
def following():
|
2018-07-07 06:56:00 -05:00
|
|
|
q = {"box": Box.OUTBOX.value, "type": ActivityType.FOLLOW.value, "meta.undo": False}
|
|
|
|
|
2018-05-18 13:41:41 -05:00
|
|
|
if is_api_request():
|
|
|
|
return jsonify(
|
|
|
|
**activitypub.build_ordered_collection(
|
2018-07-07 06:56:00 -05:00
|
|
|
DB.activities,
|
|
|
|
q=q,
|
2018-06-16 15:02:10 -05:00
|
|
|
cursor=request.args.get("cursor"),
|
2018-07-07 06:56:00 -05:00
|
|
|
map_func=lambda doc: doc["activity"]["object"],
|
2018-07-31 16:23:20 -05:00
|
|
|
col_name="following",
|
2018-06-16 15:02:10 -05:00
|
|
|
)
|
2018-05-18 13:41:41 -05:00
|
|
|
)
|
2018-06-16 14:24:53 -05:00
|
|
|
|
2018-07-18 16:18:39 -05:00
|
|
|
if config.HIDE_FOLLOWING and not session.get("logged_in", False):
|
2018-07-17 16:42:21 -05:00
|
|
|
abort(404)
|
|
|
|
|
2018-07-07 06:56:00 -05:00
|
|
|
following, older_than, newer_than = paginated_query(DB.activities, q)
|
2019-04-05 04:35:48 -05:00
|
|
|
following = [
|
|
|
|
(doc["remote_id"], doc["meta"]["object"])
|
|
|
|
for doc in following
|
|
|
|
if "remote_id" in doc and "object" in doc.get("meta", {})
|
|
|
|
]
|
2019-07-22 13:32:35 -05:00
|
|
|
lists = list(DB.lists.find())
|
2018-07-07 06:56:00 -05:00
|
|
|
return render_template(
|
|
|
|
"following.html",
|
|
|
|
following_data=following,
|
|
|
|
older_than=older_than,
|
|
|
|
newer_than=newer_than,
|
2019-07-22 13:32:35 -05:00
|
|
|
lists=lists,
|
2018-07-07 06:56:00 -05:00
|
|
|
)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/tags/<tag>")
|
2018-05-18 13:41:41 -05:00
|
|
|
def tags(tag):
|
2018-06-29 15:16:26 -05:00
|
|
|
if not DB.activities.count(
|
|
|
|
{
|
|
|
|
"box": Box.OUTBOX.value,
|
|
|
|
"activity.object.tag.type": "Hashtag",
|
|
|
|
"activity.object.tag.name": "#" + tag,
|
|
|
|
}
|
2018-06-16 15:02:10 -05:00
|
|
|
):
|
2018-05-18 13:41:41 -05:00
|
|
|
abort(404)
|
|
|
|
if not is_api_request():
|
|
|
|
return render_template(
|
2018-06-16 15:02:10 -05:00
|
|
|
"tags.html",
|
2018-05-18 13:41:41 -05:00
|
|
|
tag=tag,
|
2018-06-29 15:16:26 -05:00
|
|
|
outbox_data=DB.activities.find(
|
2018-06-16 15:02:10 -05:00
|
|
|
{
|
2018-06-29 15:16:26 -05:00
|
|
|
"box": Box.OUTBOX.value,
|
|
|
|
"type": ActivityType.CREATE.value,
|
2018-06-16 15:02:10 -05:00
|
|
|
"meta.deleted": False,
|
|
|
|
"activity.object.tag.type": "Hashtag",
|
|
|
|
"activity.object.tag.name": "#" + tag,
|
|
|
|
}
|
|
|
|
),
|
2018-05-18 13:41:41 -05:00
|
|
|
)
|
|
|
|
q = {
|
2018-06-29 15:16:26 -05:00
|
|
|
"box": Box.OUTBOX.value,
|
2018-06-16 15:02:10 -05:00
|
|
|
"meta.deleted": False,
|
|
|
|
"meta.undo": False,
|
|
|
|
"type": ActivityType.CREATE.value,
|
|
|
|
"activity.object.tag.type": "Hashtag",
|
|
|
|
"activity.object.tag.name": "#" + tag,
|
2018-05-18 13:41:41 -05:00
|
|
|
}
|
2018-06-16 15:02:10 -05:00
|
|
|
return jsonify(
|
|
|
|
**activitypub.build_ordered_collection(
|
2018-06-29 15:16:26 -05:00
|
|
|
DB.activities,
|
2018-06-16 15:02:10 -05:00
|
|
|
q=q,
|
|
|
|
cursor=request.args.get("cursor"),
|
|
|
|
map_func=lambda doc: doc["activity"]["object"]["id"],
|
|
|
|
col_name=f"tags/{tag}",
|
|
|
|
)
|
|
|
|
)
|
2018-05-18 13:41:41 -05:00
|
|
|
|
|
|
|
|
2018-07-22 14:54:24 -05:00
|
|
|
@app.route("/featured")
|
|
|
|
def featured():
|
|
|
|
if not is_api_request():
|
|
|
|
abort(404)
|
2019-08-01 12:55:30 -05:00
|
|
|
|
2018-07-22 15:22:30 -05:00
|
|
|
q = {
|
|
|
|
"box": Box.OUTBOX.value,
|
|
|
|
"type": ActivityType.CREATE.value,
|
|
|
|
"meta.deleted": False,
|
|
|
|
"meta.undo": False,
|
|
|
|
"meta.pinned": True,
|
|
|
|
}
|
|
|
|
data = [clean_activity(doc["activity"]["object"]) for doc in DB.activities.find(q)]
|
|
|
|
return jsonify(**activitypub.simple_build_ordered_collection("featured", data))
|
2018-07-22 14:54:24 -05:00
|
|
|
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
@app.route("/liked")
|
2018-05-18 13:41:41 -05:00
|
|
|
def liked():
|
|
|
|
if not is_api_request():
|
2018-07-06 16:53:33 -05:00
|
|
|
q = {
|
|
|
|
"box": Box.OUTBOX.value,
|
|
|
|
"type": ActivityType.LIKE.value,
|
|
|
|
"meta.deleted": False,
|
|
|
|
"meta.undo": False,
|
|
|
|
}
|
|
|
|
|
|
|
|
liked, older_than, newer_than = paginated_query(DB.activities, q)
|
|
|
|
|
2018-06-25 16:45:43 -05:00
|
|
|
return render_template(
|
2018-07-06 16:53:33 -05:00
|
|
|
"liked.html", liked=liked, older_than=older_than, newer_than=newer_than
|
2018-06-25 16:45:43 -05:00
|
|
|
)
|
|
|
|
|
2018-06-16 15:02:10 -05:00
|
|
|
q = {"meta.deleted": False, "meta.undo": False, "type": ActivityType.LIKE.value}
|
|
|
|
return jsonify(
|
|
|
|
**activitypub.build_ordered_collection(
|
2018-06-29 15:16:26 -05:00
|
|
|
DB.activities,
|
2018-06-16 15:02:10 -05:00
|
|
|
q=q,
|
|
|
|
cursor=request.args.get("cursor"),
|
|
|
|
map_func=lambda doc: doc["activity"]["object"],
|
|
|
|
col_name="liked",
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2018-05-18 13:41:41 -05:00
|
|
|
|
2019-04-05 14:36:56 -05:00
|
|
|
#################
|
|
|
|
# Feeds
|
|
|
|
|
2019-04-07 14:24:52 -05:00
|
|
|
|
2019-02-24 14:04:09 -06:00
|
|
|
@app.route("/feed.json")
|
|
|
|
def json_feed():
|
|
|
|
return Response(
|
2019-04-05 04:35:48 -05:00
|
|
|
response=json.dumps(activitypub.json_feed("/feed.json")),
|
2019-02-24 14:04:09 -06:00
|
|
|
headers={"Content-Type": "application/json"},
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
@app.route("/feed.atom")
|
|
|
|
def atom_feed():
|
|
|
|
return Response(
|
|
|
|
response=activitypub.gen_feed().atom_str(),
|
|
|
|
headers={"Content-Type": "application/atom+xml"},
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
@app.route("/feed.rss")
|
|
|
|
def rss_feed():
|
|
|
|
return Response(
|
|
|
|
response=activitypub.gen_feed().rss_str(),
|
|
|
|
headers={"Content-Type": "application/rss+xml"},
|
|
|
|
)
|
2019-04-05 04:35:48 -05:00
|
|
|
|
|
|
|
|
|
|
|
def post_to_inbox(activity: ap.BaseActivity) -> None:
|
|
|
|
# Check for Block activity
|
|
|
|
actor = activity.get_actor()
|
2019-04-08 13:55:03 -05:00
|
|
|
if back.outbox_is_blocked(MY_PERSON, actor.id):
|
2019-04-05 04:35:48 -05:00
|
|
|
app.logger.info(
|
|
|
|
f"actor {actor!r} is blocked, dropping the received activity {activity!r}"
|
|
|
|
)
|
|
|
|
return
|
|
|
|
|
|
|
|
if back.inbox_check_duplicate(MY_PERSON, activity.id):
|
|
|
|
# The activity is already in the inbox
|
|
|
|
app.logger.info(f"received duplicate activity {activity!r}, dropping it")
|
2019-07-05 15:05:28 -05:00
|
|
|
return
|
2019-04-05 04:35:48 -05:00
|
|
|
|
2019-04-08 13:55:03 -05:00
|
|
|
back.save(Box.INBOX, activity)
|
2019-04-05 04:35:48 -05:00
|
|
|
Tasks.process_new_activity(activity.id)
|
|
|
|
|
|
|
|
app.logger.info(f"spawning task for {activity!r}")
|
|
|
|
Tasks.finish_post_to_inbox(activity.id)
|