Start migration to little_boxes
This commit is contained in:
1328
activitypub.py
1328
activitypub.py
File diff suppressed because it is too large
Load Diff
174
app.py
174
app.py
@@ -67,7 +67,7 @@ from utils.errors import ActivityNotFoundError
|
||||
|
||||
|
||||
from typing import Dict, Any
|
||||
|
||||
|
||||
app = Flask(__name__)
|
||||
app.secret_key = get_secret_key('flask')
|
||||
app.config.update(
|
||||
@@ -137,23 +137,23 @@ def clean_html(html):
|
||||
return bleach.clean(html, tags=ALLOWED_TAGS)
|
||||
|
||||
|
||||
@app.template_filter()
|
||||
def quote_plus(t):
|
||||
return urllib.parse.quote_plus(t)
|
||||
@app.template_filter()
|
||||
def quote_plus(t):
|
||||
return urllib.parse.quote_plus(t)
|
||||
|
||||
|
||||
@app.template_filter()
|
||||
def is_from_outbox(t):
|
||||
@app.template_filter()
|
||||
def is_from_outbox(t):
|
||||
return t.startswith(ID)
|
||||
|
||||
|
||||
@app.template_filter()
|
||||
def clean(html):
|
||||
return clean_html(html)
|
||||
@app.template_filter()
|
||||
def clean(html):
|
||||
return clean_html(html)
|
||||
|
||||
|
||||
@app.template_filter()
|
||||
def html2plaintext(body):
|
||||
@app.template_filter()
|
||||
def html2plaintext(body):
|
||||
return html2text(body)
|
||||
|
||||
|
||||
@@ -183,7 +183,7 @@ def format_timeago(val):
|
||||
return timeago.format(datetime.strptime(val, '%Y-%m-%dT%H:%M:%SZ'), datetime.utcnow())
|
||||
except:
|
||||
return timeago.format(datetime.strptime(val, '%Y-%m-%dT%H:%M:%S.%fZ'), datetime.utcnow())
|
||||
|
||||
|
||||
return val
|
||||
|
||||
def _is_img(filename):
|
||||
@@ -279,7 +279,7 @@ def handle_activitypub_error(error):
|
||||
return response
|
||||
|
||||
|
||||
# App routes
|
||||
# App routes
|
||||
|
||||
#######
|
||||
# Login
|
||||
@@ -487,7 +487,7 @@ def _build_thread(data, include_children=True):
|
||||
def _flatten(node, level=0):
|
||||
node['_level'] = level
|
||||
thread.append(node)
|
||||
|
||||
|
||||
for snode in sorted(idx[node['activity']['object']['id']]['_nodes'], key=lambda d: d['activity']['object']['published']):
|
||||
_flatten(snode, level=level+1)
|
||||
_flatten(idx[root_id])
|
||||
@@ -495,10 +495,10 @@ def _build_thread(data, include_children=True):
|
||||
return thread
|
||||
|
||||
|
||||
@app.route('/note/<note_id>')
|
||||
def note_by_id(note_id):
|
||||
@app.route('/note/<note_id>')
|
||||
def note_by_id(note_id):
|
||||
data = DB.outbox.find_one({'id': note_id})
|
||||
if not data:
|
||||
if not data:
|
||||
abort(404)
|
||||
if data['meta'].get('deleted', False):
|
||||
abort(410)
|
||||
@@ -511,7 +511,7 @@ def note_by_id(note_id):
|
||||
'$or': [{'activity.object.id': data['activity']['object']['id']},
|
||||
{'activity.object': data['activity']['object']['id']}],
|
||||
}))
|
||||
likes = [ACTOR_SERVICE.get(doc['activity']['actor']) for doc in likes]
|
||||
likes = [ACTOR_SERVICE.get(doc['activity']['actor']) for doc in likes]
|
||||
|
||||
shares = list(DB.inbox.find({
|
||||
'meta.undo': False,
|
||||
@@ -519,7 +519,7 @@ def note_by_id(note_id):
|
||||
'$or': [{'activity.object.id': data['activity']['object']['id']},
|
||||
{'activity.object': data['activity']['object']['id']}],
|
||||
}))
|
||||
shares = [ACTOR_SERVICE.get(doc['activity']['actor']) for doc in shares]
|
||||
shares = [ACTOR_SERVICE.get(doc['activity']['actor']) for doc in shares]
|
||||
|
||||
return render_template('note.html', likes=likes, shares=shares, me=ME, thread=thread, note=data)
|
||||
|
||||
@@ -536,7 +536,7 @@ def nodeinfo():
|
||||
'openRegistrations': False,
|
||||
'usage': {'users': {'total': 1}, 'localPosts': DB.outbox.count()},
|
||||
'metadata': {
|
||||
'sourceCode': 'https://github.com/tsileo/microblog.pub',
|
||||
'sourceCode': 'https://github.com/tsileo/microblog.pub',
|
||||
'nodeName': f'@{USERNAME}@{DOMAIN}',
|
||||
},
|
||||
}),
|
||||
@@ -551,7 +551,7 @@ def wellknown_nodeinfo():
|
||||
'rel': 'http://nodeinfo.diaspora.software/ns/schema/2.0',
|
||||
'href': f'{ID}/nodeinfo',
|
||||
}
|
||||
|
||||
|
||||
],
|
||||
)
|
||||
|
||||
@@ -616,11 +616,11 @@ def activity_from_doc(raw_doc: Dict[str, Any], embed: bool = False) -> Dict[str,
|
||||
|
||||
|
||||
|
||||
@app.route('/outbox', methods=['GET', 'POST'])
|
||||
def outbox():
|
||||
if request.method == 'GET':
|
||||
if not is_api_request():
|
||||
abort(404)
|
||||
@app.route('/outbox', methods=['GET', 'POST'])
|
||||
def outbox():
|
||||
if request.method == 'GET':
|
||||
if not is_api_request():
|
||||
abort(404)
|
||||
# TODO(tsileo): filter the outbox if not authenticated
|
||||
# FIXME(tsileo): filter deleted, add query support for build_ordered_collection
|
||||
q = {
|
||||
@@ -639,7 +639,7 @@ def outbox():
|
||||
_api_required()
|
||||
except BadSignature:
|
||||
abort(401)
|
||||
|
||||
|
||||
data = request.get_json(force=True)
|
||||
print(data)
|
||||
activity = activitypub.parse_activity(data)
|
||||
@@ -785,7 +785,7 @@ def admin():
|
||||
col_followers=DB.followers.count(),
|
||||
col_following=DB.following.count(),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@app.route('/new', methods=['GET'])
|
||||
@login_required
|
||||
@@ -833,7 +833,7 @@ def notifications():
|
||||
'meta.deleted': False,
|
||||
}
|
||||
# TODO(tsileo): also include replies via regex on Create replyTo
|
||||
q = {'$or': [q, {'type': 'Follow'}, {'type': 'Accept'}, {'type': 'Undo', 'activity.object.type': 'Follow'},
|
||||
q = {'$or': [q, {'type': 'Follow'}, {'type': 'Accept'}, {'type': 'Undo', 'activity.object.type': 'Follow'},
|
||||
{'type': 'Announce', 'activity.object': {'$regex': f'^{BASE_URL}'}},
|
||||
{'type': 'Create', 'activity.object.inReplyTo': {'$regex': f'^{BASE_URL}'}},
|
||||
]}
|
||||
@@ -1004,27 +1004,27 @@ def stream():
|
||||
)
|
||||
|
||||
|
||||
@app.route('/inbox', methods=['GET', 'POST'])
|
||||
def inbox():
|
||||
if request.method == 'GET':
|
||||
if not is_api_request():
|
||||
abort(404)
|
||||
@app.route('/inbox', methods=['GET', 'POST'])
|
||||
def inbox():
|
||||
if request.method == 'GET':
|
||||
if not is_api_request():
|
||||
abort(404)
|
||||
try:
|
||||
_api_required()
|
||||
except BadSignature:
|
||||
abort(404)
|
||||
|
||||
return jsonify(**activitypub.build_ordered_collection(
|
||||
DB.inbox,
|
||||
q={'meta.deleted': False},
|
||||
cursor=request.args.get('cursor'),
|
||||
map_func=lambda doc: remove_context(doc['activity']),
|
||||
))
|
||||
return jsonify(**activitypub.build_ordered_collection(
|
||||
DB.inbox,
|
||||
q={'meta.deleted': False},
|
||||
cursor=request.args.get('cursor'),
|
||||
map_func=lambda doc: remove_context(doc['activity']),
|
||||
))
|
||||
|
||||
data = request.get_json(force=True)
|
||||
data = request.get_json(force=True)
|
||||
logger.debug(f'req_headers={request.headers}')
|
||||
logger.debug(f'raw_data={data}')
|
||||
try:
|
||||
try:
|
||||
if not verify_request(ACTOR_SERVICE):
|
||||
raise Exception('failed to verify request')
|
||||
except Exception:
|
||||
@@ -1039,13 +1039,13 @@ def inbox():
|
||||
response=json.dumps({'error': 'failed to verify request (using HTTP signatures or fetching the IRI)'}),
|
||||
)
|
||||
|
||||
activity = activitypub.parse_activity(data)
|
||||
logger.debug(f'inbox activity={activity}/{data}')
|
||||
activity.process_from_inbox()
|
||||
activity = activitypub.parse_activity(data)
|
||||
logger.debug(f'inbox activity={activity}/{data}')
|
||||
activity.process_from_inbox()
|
||||
|
||||
return Response(
|
||||
status=201,
|
||||
)
|
||||
return Response(
|
||||
status=201,
|
||||
)
|
||||
|
||||
|
||||
@app.route('/api/debug', methods=['GET', 'DELETE'])
|
||||
@@ -1082,17 +1082,17 @@ def api_upload():
|
||||
print('upload OK')
|
||||
print(filename)
|
||||
attachment = [
|
||||
{'mediaType': mtype,
|
||||
'name': rfilename,
|
||||
'type': 'Document',
|
||||
{'mediaType': mtype,
|
||||
'name': rfilename,
|
||||
'type': 'Document',
|
||||
'url': BASE_URL + f'/static/media/{filename}'
|
||||
},
|
||||
]
|
||||
print(attachment)
|
||||
content = request.args.get('content')
|
||||
to = request.args.get('to')
|
||||
note = activitypub.Note(
|
||||
cc=[ID+'/followers'],
|
||||
content = request.args.get('content')
|
||||
to = request.args.get('to')
|
||||
note = activitypub.Note(
|
||||
cc=[ID+'/followers'],
|
||||
to=[to if to else config.AS_PUBLIC],
|
||||
content=content, # TODO(tsileo): handle markdown
|
||||
attachment=attachment,
|
||||
@@ -1104,30 +1104,30 @@ def api_upload():
|
||||
print(create.to_dict())
|
||||
create.post_to_outbox()
|
||||
print('posted')
|
||||
|
||||
|
||||
return Response(
|
||||
status=201,
|
||||
response='OK',
|
||||
)
|
||||
|
||||
|
||||
@app.route('/api/new_note', methods=['POST'])
|
||||
@api_required
|
||||
def api_new_note():
|
||||
@app.route('/api/new_note', methods=['POST'])
|
||||
@api_required
|
||||
def api_new_note():
|
||||
source = _user_api_arg('content')
|
||||
if not source:
|
||||
raise ValueError('missing content')
|
||||
|
||||
|
||||
_reply, reply = None, None
|
||||
try:
|
||||
_reply = _user_api_arg('reply')
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
content, tags = parse_markdown(source)
|
||||
content, tags = parse_markdown(source)
|
||||
to = request.args.get('to')
|
||||
cc = [ID+'/followers']
|
||||
|
||||
|
||||
if _reply:
|
||||
reply = activitypub.parse_activity(OBJECT_SERVICE.get(_reply))
|
||||
cc.append(reply.attributedTo)
|
||||
@@ -1136,8 +1136,8 @@ def api_new_note():
|
||||
if tag['type'] == 'Mention':
|
||||
cc.append(tag['href'])
|
||||
|
||||
note = activitypub.Note(
|
||||
cc=list(set(cc)),
|
||||
note = activitypub.Note(
|
||||
cc=list(set(cc)),
|
||||
to=[to if to else config.AS_PUBLIC],
|
||||
content=content,
|
||||
tag=tags,
|
||||
@@ -1193,20 +1193,20 @@ def api_follow():
|
||||
return _user_api_response(activity=follow.id)
|
||||
|
||||
|
||||
@app.route('/followers')
|
||||
def followers():
|
||||
if is_api_request():
|
||||
@app.route('/followers')
|
||||
def followers():
|
||||
if is_api_request():
|
||||
return jsonify(
|
||||
**activitypub.build_ordered_collection(
|
||||
DB.followers,
|
||||
cursor=request.args.get('cursor'),
|
||||
map_func=lambda doc: doc['remote_actor'],
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
followers = [ACTOR_SERVICE.get(doc['remote_actor']) for doc in DB.followers.find(limit=50)]
|
||||
return render_template(
|
||||
'followers.html',
|
||||
followers = [ACTOR_SERVICE.get(doc['remote_actor']) for doc in DB.followers.find(limit=50)]
|
||||
return render_template(
|
||||
'followers.html',
|
||||
me=ME,
|
||||
notes=DB.inbox.find({'object.object.type': 'Note'}).count(),
|
||||
followers=DB.followers.count(),
|
||||
@@ -1225,7 +1225,7 @@ def following():
|
||||
map_func=lambda doc: doc['remote_actor'],
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
following = [ACTOR_SERVICE.get(doc['remote_actor']) for doc in DB.following.find(limit=50)]
|
||||
return render_template(
|
||||
'following.html',
|
||||
@@ -1327,13 +1327,13 @@ def get_client_id_data(url):
|
||||
|
||||
|
||||
@app.route('/indieauth/flow', methods=['POST'])
|
||||
@login_required
|
||||
def indieauth_flow():
|
||||
auth = dict(
|
||||
scope=' '.join(request.form.getlist('scopes')),
|
||||
me=request.form.get('me'),
|
||||
client_id=request.form.get('client_id'),
|
||||
state=request.form.get('state'),
|
||||
@login_required
|
||||
def indieauth_flow():
|
||||
auth = dict(
|
||||
scope=' '.join(request.form.getlist('scopes')),
|
||||
me=request.form.get('me'),
|
||||
client_id=request.form.get('client_id'),
|
||||
state=request.form.get('state'),
|
||||
redirect_uri=request.form.get('redirect_uri'),
|
||||
response_type=request.form.get('response_type'),
|
||||
)
|
||||
@@ -1354,14 +1354,14 @@ def indieauth_flow():
|
||||
return redirect(red)
|
||||
|
||||
|
||||
# @app.route('/indieauth', methods=['GET', 'POST'])
|
||||
def indieauth_endpoint():
|
||||
if request.method == 'GET':
|
||||
if not session.get('logged_in'):
|
||||
return redirect(url_for('login', next=request.url))
|
||||
# @app.route('/indieauth', methods=['GET', 'POST'])
|
||||
def indieauth_endpoint():
|
||||
if request.method == 'GET':
|
||||
if not session.get('logged_in'):
|
||||
return redirect(url_for('login', next=request.url))
|
||||
|
||||
me = request.args.get('me')
|
||||
# FIXME(tsileo): ensure me == ID
|
||||
me = request.args.get('me')
|
||||
# FIXME(tsileo): ensure me == ID
|
||||
client_id = request.args.get('client_id')
|
||||
redirect_uri = request.args.get('redirect_uri')
|
||||
state = request.args.get('state', '')
|
||||
@@ -1397,7 +1397,7 @@ def indieauth_endpoint():
|
||||
abort(403)
|
||||
return
|
||||
|
||||
session['logged_in'] = True
|
||||
session['logged_in'] = True
|
||||
me = auth['me']
|
||||
state = auth['state']
|
||||
scope = ' '.join(auth['scope'])
|
||||
|
28
config.py
28
config.py
@@ -6,10 +6,9 @@ import requests
|
||||
from itsdangerous import JSONWebSignatureSerializer
|
||||
from datetime import datetime
|
||||
|
||||
from utils import strtobool
|
||||
from utils.key import Key, KEY_DIR, get_secret_key
|
||||
from utils.actor_service import ActorService
|
||||
from utils.object_service import ObjectService
|
||||
from little_boxes.utils import strtobool
|
||||
from utils.key import KEY_DIR, get_key, get_secret_key
|
||||
|
||||
|
||||
def noop():
|
||||
pass
|
||||
@@ -17,7 +16,7 @@ def noop():
|
||||
|
||||
CUSTOM_CACHE_HOOKS = False
|
||||
try:
|
||||
from cache_hooks import purge as custom_cache_purge_hook
|
||||
from cache_hooks import purge as custom_cache_purge_hook
|
||||
except ModuleNotFoundError:
|
||||
custom_cache_purge_hook = noop
|
||||
|
||||
@@ -58,8 +57,6 @@ USER_AGENT = (
|
||||
f'(microblog.pub/{VERSION}; +{BASE_URL})'
|
||||
)
|
||||
|
||||
# TODO(tsileo): use 'mongo:27017;
|
||||
# mongo_client = MongoClient(host=['mongo:27017'])
|
||||
mongo_client = MongoClient(
|
||||
host=[os.getenv('MICROBLOGPUB_MONGODB_HOST', 'localhost:27017')],
|
||||
)
|
||||
@@ -67,23 +64,26 @@ mongo_client = MongoClient(
|
||||
DB_NAME = '{}_{}'.format(USERNAME, DOMAIN.replace('.', '_'))
|
||||
DB = mongo_client[DB_NAME]
|
||||
|
||||
|
||||
def _drop_db():
|
||||
if not DEBUG_MODE:
|
||||
return
|
||||
|
||||
mongo_client.drop_database(DB_NAME)
|
||||
|
||||
KEY = Key(USERNAME, DOMAIN, create=True)
|
||||
|
||||
KEY = get_key(ID, USERNAME, DOMAIN)
|
||||
|
||||
|
||||
JWT_SECRET = get_secret_key('jwt')
|
||||
JWT = JSONWebSignatureSerializer(JWT_SECRET)
|
||||
|
||||
|
||||
def _admin_jwt_token() -> str:
|
||||
return JWT.dumps({'me': 'ADMIN', 'ts': datetime.now().timestamp()}).decode('utf-8') # type: ignore
|
||||
|
||||
ADMIN_API_KEY = get_secret_key('admin_api_key', _admin_jwt_token)
|
||||
|
||||
ADMIN_API_KEY = get_secret_key('admin_api_key', _admin_jwt_token)
|
||||
|
||||
ME = {
|
||||
"@context": [
|
||||
@@ -107,13 +107,5 @@ ME = {
|
||||
"type": "Image",
|
||||
"url": ICON_URL,
|
||||
},
|
||||
"publicKey": {
|
||||
"id": ID+"#main-key",
|
||||
"owner": ID,
|
||||
"publicKeyPem": KEY.pubkey_pem,
|
||||
},
|
||||
"publicKey": KEY.to_dict(),
|
||||
}
|
||||
print(ME)
|
||||
|
||||
ACTOR_SERVICE = ActorService(USER_AGENT, DB.actors_cache, ID, ME, DB.instances)
|
||||
OBJECT_SERVICE = ObjectService(USER_AGENT, DB.objects_cache, DB.inbox, DB.outbox, DB.instances)
|
||||
|
@@ -1,37 +0,0 @@
|
||||
# Little Boxes
|
||||
|
||||
Tiny ActivityPub framework written in Python, both database and server agnostic.
|
||||
|
||||
## Getting Started
|
||||
|
||||
```python
|
||||
from little_boxes import activitypub as ap
|
||||
|
||||
from mydb import db_client
|
||||
|
||||
|
||||
class MyBackend(BaseBackend):
|
||||
|
||||
def __init__(self, db_connection):
|
||||
self.db_connection = db_connection
|
||||
|
||||
def inbox_new(self, as_actor, activity):
|
||||
# Save activity as "as_actor"
|
||||
# [...]
|
||||
|
||||
def post_to_remote_inbox(self, as_actor, payload, recipient):
|
||||
# Send the activity to the remote actor
|
||||
# [...]
|
||||
|
||||
|
||||
db_con = db_client()
|
||||
my_backend = MyBackend(db_con)
|
||||
|
||||
ap.use_backend(my_backend)
|
||||
|
||||
me = ap.Person({}) # Init an actor
|
||||
outbox = ap.Outbox(me)
|
||||
|
||||
follow = ap.Follow(actor=me, object='http://iri-i-want-follow')
|
||||
outbox.post(follow)
|
||||
```
|
@@ -1,12 +0,0 @@
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def strtobool(s: str) -> bool:
|
||||
if s in ['y', 'yes', 'true', 'on', '1']:
|
||||
return True
|
||||
if s in ['n', 'no', 'false', 'off', '0']:
|
||||
return False
|
||||
|
||||
raise ValueError(f'cannot convert {s} to bool')
|
File diff suppressed because it is too large
Load Diff
@@ -1,55 +0,0 @@
|
||||
"""Errors raised by this package."""
|
||||
from typing import Optional
|
||||
from typing import Dict
|
||||
from typing import Any
|
||||
|
||||
|
||||
class Error(Exception):
|
||||
"""HTTP-friendly base error, with a status code, a message and an optional payload."""
|
||||
status_code = 400
|
||||
|
||||
def __init__(
|
||||
self, message: str,
|
||||
status_code: Optional[int] = None,
|
||||
payload: Optional[Dict[str, Any]] = None,
|
||||
) -> None:
|
||||
Exception.__init__(self)
|
||||
self.message = message
|
||||
if status_code is not None:
|
||||
self.status_code = status_code
|
||||
self.payload = payload
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
rv = dict(self.payload or ())
|
||||
rv['message'] = self.message
|
||||
return rv
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return (
|
||||
f'{self.__class__.__qualname__}({self.message!r}, payload={self.payload!r}, status_code={self.status_code})'
|
||||
)
|
||||
|
||||
|
||||
class ActorBlockedError(Error):
|
||||
"""Raised when an activity from a blocked actor is received."""
|
||||
|
||||
|
||||
class NotFromOutboxError(Error):
|
||||
"""Raised when an activity targets an object from the inbox when an object from the oubox was expected."""
|
||||
|
||||
|
||||
class ActivityNotFoundError(Error):
|
||||
"""Raised when an activity is not found."""
|
||||
status_code = 404
|
||||
|
||||
|
||||
class BadActivityError(Error):
|
||||
"""Raised when an activity could not be parsed/initialized."""
|
||||
|
||||
|
||||
class RecursionLimitExceededError(BadActivityError):
|
||||
"""Raised when the recursion limit for fetching remote object was exceeded (likely a collection)."""
|
||||
|
||||
|
||||
class UnexpectedActivityTypeError(BadActivityError):
|
||||
"""Raised when an another activty was expected."""
|
@@ -1,47 +0,0 @@
|
||||
import logging
|
||||
import os
|
||||
import socket
|
||||
import ipaddress
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from . import strtobool
|
||||
from .errors import Error
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class InvalidURLError(Error):
|
||||
pass
|
||||
|
||||
|
||||
def is_url_valid(url: str) -> bool:
|
||||
parsed = urlparse(url)
|
||||
if parsed.scheme not in ['http', 'https']:
|
||||
return False
|
||||
|
||||
# XXX in debug mode, we want to allow requests to localhost to test the federation with local instances
|
||||
debug_mode = strtobool(os.getenv('MICROBLOGPUB_DEBUG', 'false'))
|
||||
if debug_mode:
|
||||
return True
|
||||
|
||||
if parsed.hostname in ['localhost']:
|
||||
return False
|
||||
|
||||
try:
|
||||
ip_address = socket.getaddrinfo(parsed.hostname, parsed.port or 80)[0][4][0]
|
||||
except socket.gaierror:
|
||||
logger.exception(f'failed to lookup url {url}')
|
||||
return False
|
||||
|
||||
if ipaddress.ip_address(ip_address).is_private:
|
||||
logger.info(f'rejecting private URL {url}')
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def check_url(url: str) -> None:
|
||||
if not is_url_valid(url):
|
||||
raise InvalidURLError(f'"{url}" is invalid')
|
||||
|
||||
return None
|
@@ -1,60 +0,0 @@
|
||||
"""Contains some ActivityPub related utils."""
|
||||
from typing import Optional
|
||||
from typing import Callable
|
||||
from typing import Dict
|
||||
from typing import List
|
||||
from typing import Any
|
||||
|
||||
|
||||
from .errors import RecursionLimitExceededError
|
||||
from .errors import UnexpectedActivityTypeError
|
||||
|
||||
|
||||
def parse_collection(
|
||||
payload: Optional[Dict[str, Any]] = None,
|
||||
url: Optional[str] = None,
|
||||
level: int = 0,
|
||||
fetcher: Optional[Callable[[str], Dict[str, Any]]] = None,
|
||||
) -> List[Any]:
|
||||
"""Resolve/fetch a `Collection`/`OrderedCollection`."""
|
||||
if not fetcher:
|
||||
raise Exception('must provide a fetcher')
|
||||
if level > 3:
|
||||
raise RecursionLimitExceededError('recursion limit exceeded')
|
||||
|
||||
# Go through all the pages
|
||||
out: List[Any] = []
|
||||
if url:
|
||||
payload = fetcher(url)
|
||||
if not payload:
|
||||
raise ValueError('must at least prove a payload or an URL')
|
||||
|
||||
if payload['type'] in ['Collection', 'OrderedCollection']:
|
||||
if 'orderedItems' in payload:
|
||||
return payload['orderedItems']
|
||||
if 'items' in payload:
|
||||
return payload['items']
|
||||
if 'first' in payload:
|
||||
if 'orderedItems' in payload['first']:
|
||||
out.extend(payload['first']['orderedItems'])
|
||||
if 'items' in payload['first']:
|
||||
out.extend(payload['first']['items'])
|
||||
n = payload['first'].get('next')
|
||||
if n:
|
||||
out.extend(parse_collection(url=n, level=level+1, fetcher=fetcher))
|
||||
return out
|
||||
|
||||
while payload:
|
||||
if payload['type'] in ['CollectionPage', 'OrderedCollectionPage']:
|
||||
if 'orderedItems' in payload:
|
||||
out.extend(payload['orderedItems'])
|
||||
if 'items' in payload:
|
||||
out.extend(payload['items'])
|
||||
n = payload.get('next')
|
||||
if n is None:
|
||||
break
|
||||
payload = fetcher(n)
|
||||
else:
|
||||
raise UnexpectedActivityTypeError('unexpected activity type {}'.format(payload['type']))
|
||||
|
||||
return out
|
@@ -2,21 +2,19 @@ libsass
|
||||
gunicorn
|
||||
piexif
|
||||
requests
|
||||
markdown
|
||||
python-u2flib-server
|
||||
Flask
|
||||
Flask-WTF
|
||||
Celery
|
||||
pymongo
|
||||
pyld
|
||||
timeago
|
||||
bleach
|
||||
pycryptodome
|
||||
html2text
|
||||
feedgen
|
||||
itsdangerous
|
||||
bcrypt
|
||||
mf2py
|
||||
passlib
|
||||
pyyaml
|
||||
git+https://github.com/erikriver/opengraph.git
|
||||
git+https://github.com/tsileo/little-boxes.git
|
||||
pyyaml
|
||||
|
@@ -1,26 +0,0 @@
|
||||
from little_boxes.activitypub import use_backend
|
||||
from little_boxes.activitypub import BaseBackend
|
||||
from little_boxes.activitypub import Outbox
|
||||
from little_boxes.activitypub import Person
|
||||
from little_boxes.activitypub import Follow
|
||||
|
||||
def test_little_boxes_follow():
|
||||
back = BaseBackend()
|
||||
use_backend(back)
|
||||
|
||||
me = back.setup_actor('Thomas', 'tom')
|
||||
|
||||
other = back.setup_actor('Thomas', 'tom2')
|
||||
|
||||
outbox = Outbox(me)
|
||||
f = Follow(
|
||||
actor=me.id,
|
||||
object=other.id,
|
||||
)
|
||||
|
||||
outbox.post(f)
|
||||
assert back.followers(other) == [me.id]
|
||||
assert back.following(other) == []
|
||||
|
||||
assert back.followers(me) == []
|
||||
assert back.following(me) == [other.id]
|
51
utils/key.py
51
utils/key.py
@@ -1,22 +1,23 @@
|
||||
import os
|
||||
import binascii
|
||||
|
||||
from Crypto.PublicKey import RSA
|
||||
from typing import Callable
|
||||
|
||||
KEY_DIR = os.path.join(
|
||||
os.path.dirname(os.path.abspath(__file__)), '..', 'config'
|
||||
)
|
||||
from little_boxes.key import Key
|
||||
|
||||
KEY_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "config")
|
||||
|
||||
|
||||
def _new_key() -> str:
|
||||
return binascii.hexlify(os.urandom(32)).decode('utf-8')
|
||||
return binascii.hexlify(os.urandom(32)).decode("utf-8")
|
||||
|
||||
|
||||
def get_secret_key(name: str, new_key: Callable[[], str] = _new_key) -> str:
|
||||
key_path = os.path.join(KEY_DIR, f'{name}.key')
|
||||
"""Loads or generates a cryptographic key."""
|
||||
key_path = os.path.join(KEY_DIR, f"{name}.key")
|
||||
if not os.path.exists(key_path):
|
||||
k = new_key()
|
||||
with open(key_path, 'w+') as f:
|
||||
with open(key_path, "w+") as f:
|
||||
f.write(k)
|
||||
return k
|
||||
|
||||
@@ -24,23 +25,19 @@ def get_secret_key(name: str, new_key: Callable[[], str] = _new_key) -> str:
|
||||
return f.read()
|
||||
|
||||
|
||||
class Key(object):
|
||||
DEFAULT_KEY_SIZE = 2048
|
||||
def __init__(self, user: str, domain: str, create: bool = True) -> None:
|
||||
user = user.replace('.', '_')
|
||||
domain = domain.replace('.', '_')
|
||||
key_path = os.path.join(KEY_DIR, f'key_{user}_{domain}.pem')
|
||||
if os.path.isfile(key_path):
|
||||
with open(key_path) as f:
|
||||
self.privkey_pem = f.read()
|
||||
self.privkey = RSA.importKey(self.privkey_pem)
|
||||
self.pubkey_pem = self.privkey.publickey().exportKey('PEM').decode('utf-8')
|
||||
else:
|
||||
if not create:
|
||||
raise Exception('must init private key first')
|
||||
k = RSA.generate(self.DEFAULT_KEY_SIZE)
|
||||
self.privkey_pem = k.exportKey('PEM').decode('utf-8')
|
||||
self.pubkey_pem = k.publickey().exportKey('PEM').decode('utf-8')
|
||||
with open(key_path, 'w') as f:
|
||||
f.write(self.privkey_pem)
|
||||
self.privkey = k
|
||||
def get_key(owner: str, user: str, domain: str) -> Key:
|
||||
""""Loads or generates an RSA key."""
|
||||
k = Key(owner)
|
||||
user = user.replace(".", "_")
|
||||
domain = domain.replace(".", "_")
|
||||
key_path = os.path.join(KEY_DIR, f"key_{user}_{domain}.pem")
|
||||
if os.path.isfile(key_path):
|
||||
with open(key_path) as f:
|
||||
privkey_pem = f.read()
|
||||
k.load(privkey_pem)
|
||||
else:
|
||||
k.new()
|
||||
with open(key_path, "w") as f:
|
||||
f.write(k.privkey_pem)
|
||||
|
||||
return k
|
||||
|
Reference in New Issue
Block a user