Compare commits
2 commits
54eddbcf23
...
b97e79b61b
Author | SHA1 | Date | |
---|---|---|---|
b97e79b61b | |||
1389ba47fb |
3 changed files with 125 additions and 3 deletions
73
app/boxes.py
73
app/boxes.py
|
@ -1,9 +1,26 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
from app import models
|
from app import models
|
||||||
from app.database import AsyncSession
|
from app.database import AsyncSession
|
||||||
|
from app.activitypub import ME
|
||||||
from loguru import logger
|
from loguru import logger
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
from app.config import MANUALLY_APPROVES_FOLLOWERS
|
||||||
|
from app.config import AP_CONTENT_TYPE
|
||||||
|
from app.config import BASE_URL
|
||||||
|
from app.config import USER_AGENT
|
||||||
|
from app.httpsig import auth
|
||||||
|
|
||||||
|
import app.activitypub as ap
|
||||||
|
import uuid
|
||||||
|
import httpx
|
||||||
|
|
||||||
|
|
||||||
|
def allocate_outbox_id() -> str:
|
||||||
|
return str(uuid.uuid4())
|
||||||
|
|
||||||
|
|
||||||
|
def build_object_id(id) -> str:
|
||||||
|
return f"{BASE_URL}/tail/{id}"
|
||||||
|
|
||||||
async def save_incoming(
|
async def save_incoming(
|
||||||
db_session: AsyncSession,
|
db_session: AsyncSession,
|
||||||
|
@ -23,7 +40,63 @@ async def save_incoming(
|
||||||
ap_id=ap_id,
|
ap_id=ap_id,
|
||||||
ap_object=payload,
|
ap_object=payload,
|
||||||
)
|
)
|
||||||
|
await process_incoming(db_session, payload)
|
||||||
db_session.add(incoming_activity)
|
db_session.add(incoming_activity)
|
||||||
await db_session.commit()
|
await db_session.commit()
|
||||||
await db_session.refresh(incoming_activity)
|
await db_session.refresh(incoming_activity)
|
||||||
return incoming_activity
|
return incoming_activity
|
||||||
|
|
||||||
|
|
||||||
|
async def process_incoming(
|
||||||
|
db_session: AsyncSession,
|
||||||
|
ap_object: dict,
|
||||||
|
) -> bool:
|
||||||
|
|
||||||
|
if "Follow" == ap_object["type"]:
|
||||||
|
await _handle_follow(db_session, ap_object)
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
async def _handle_follow(
|
||||||
|
db_session: AsyncSession,
|
||||||
|
ap_object: dict,
|
||||||
|
) -> None:
|
||||||
|
if ME["id"] != ap_object["object"]:
|
||||||
|
# await db_session.delete(ap_object)
|
||||||
|
logger.warning("no match follow object!" + ap_object["object"])
|
||||||
|
return
|
||||||
|
|
||||||
|
if MANUALLY_APPROVES_FOLLOWERS:
|
||||||
|
# TODO
|
||||||
|
return
|
||||||
|
|
||||||
|
await _send_accept(db_session, ap_object)
|
||||||
|
|
||||||
|
|
||||||
|
async def _send_accept(
|
||||||
|
db_session: AsyncSession,
|
||||||
|
ap_object: dict,
|
||||||
|
) -> None :
|
||||||
|
|
||||||
|
reply_id = allocate_outbox_id()
|
||||||
|
out = {
|
||||||
|
"@context": ap.AS_CTX,
|
||||||
|
"id": build_object_id(reply_id),
|
||||||
|
"type": "Accept",
|
||||||
|
"actor": ME["id"],
|
||||||
|
"object": ap_object["id"],
|
||||||
|
}
|
||||||
|
|
||||||
|
async with httpx.AsyncClient() as client:
|
||||||
|
resp = await client.post(
|
||||||
|
ap_object["actor"] + "/inbox",
|
||||||
|
headers={
|
||||||
|
"User-Agent": USER_AGENT,
|
||||||
|
"Content-Type": AP_CONTENT_TYPE,
|
||||||
|
},
|
||||||
|
json=out,
|
||||||
|
auth=auth,
|
||||||
|
)
|
||||||
|
|
||||||
|
resp.raise_for_status()
|
||||||
|
logger.info(resp)
|
||||||
|
|
|
@ -49,3 +49,5 @@ KEY_PATH = (ROOT_DIR / "data" / "key.pem")
|
||||||
|
|
||||||
USER_AGENT = "Fediverse Application/Foxhole-0.0.1"
|
USER_AGENT = "Fediverse Application/Foxhole-0.0.1"
|
||||||
AP_CONTENT_TYPE = "application/activity+json"
|
AP_CONTENT_TYPE = "application/activity+json"
|
||||||
|
|
||||||
|
MANUALLY_APPROVES_FOLLOWERS = CONFIG.manually_approves_followers
|
||||||
|
|
|
@ -1,11 +1,14 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
import base64
|
import base64
|
||||||
|
from urllib.request import HTTPSHandler
|
||||||
import httpx
|
import httpx
|
||||||
import json
|
import json
|
||||||
import fastapi
|
import fastapi
|
||||||
|
import hashlib
|
||||||
|
from datetime import datetime
|
||||||
from typing import Literal, TypedDict, cast, Any
|
from typing import Literal, TypedDict, cast, Any
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
from app.config import AP_CONTENT_TYPE, USER_AGENT
|
from app.config import AP_CONTENT_TYPE, USER_AGENT, KEY_PATH, ID
|
||||||
|
|
||||||
from loguru import logger
|
from loguru import logger
|
||||||
|
|
||||||
|
@ -118,8 +121,8 @@ class HttpSignature:
|
||||||
cls,
|
cls,
|
||||||
method : str,
|
method : str,
|
||||||
path : str,
|
path : str,
|
||||||
signed_headers : dict,
|
signed_headers : list,
|
||||||
body_digest : str,
|
body_digest : str | None,
|
||||||
headers,
|
headers,
|
||||||
) -> str :
|
) -> str :
|
||||||
signed_string = []
|
signed_string = []
|
||||||
|
@ -131,3 +134,47 @@ class HttpSignature:
|
||||||
else:
|
else:
|
||||||
signed_string.append(signed_header + ": " + headers[signed_header])
|
signed_string.append(signed_header + ": " + headers[signed_header])
|
||||||
return "\n".join(signed_string)
|
return "\n".join(signed_string)
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPXSigAuth(httpx.Auth):
|
||||||
|
def __init__(self, key) -> None:
|
||||||
|
self.key = key
|
||||||
|
|
||||||
|
def auth_flow(
|
||||||
|
self, r: httpx.Request
|
||||||
|
):
|
||||||
|
bodydigest = None
|
||||||
|
if r.content:
|
||||||
|
bh = hashlib.new("sha256")
|
||||||
|
bh.update(r.content)
|
||||||
|
bodydigest = "SHA-256=" + base64.b64encode(bh.digest()).decode("utf-8")
|
||||||
|
|
||||||
|
date = datetime.utcnow().strftime("%Y %m %d %H:%M:%S GMT")
|
||||||
|
r.headers["Date"] = date
|
||||||
|
sigheaders = {}
|
||||||
|
if bodydigest:
|
||||||
|
r.headers["digest"] = bodydigest
|
||||||
|
sigheaders = "(request-target) user-agent host date digest content-type"
|
||||||
|
else:
|
||||||
|
sigheaders = "(request-target) user-agent host date accept"
|
||||||
|
|
||||||
|
logger.warning(r.headers)
|
||||||
|
to_be_signed = HttpSignature.build_signature_string(
|
||||||
|
r.method, r.url.path, sigheaders.split(), bodydigest, r.headers
|
||||||
|
)
|
||||||
|
if not self.key:
|
||||||
|
raise ValueError("Should never happen")
|
||||||
|
signer = PKCS1_v1_5.new(self.key)
|
||||||
|
digest = SHA256.new()
|
||||||
|
digest.update(to_be_signed.encode("utf-8"))
|
||||||
|
sig = base64.b64encode(signer.sign(digest)).decode()
|
||||||
|
|
||||||
|
key_id = f"{ID}#main-key"
|
||||||
|
sig_value = f'keyId="{key_id}",algorithm="rsa-sha256",headers="{sigheaders}",signature="{sig}"' # noqa: E501
|
||||||
|
logger.debug(f"signed request {sig_value=}")
|
||||||
|
r.headers["signature"] = sig_value
|
||||||
|
yield r
|
||||||
|
|
||||||
|
k = KEY_PATH.read_text()
|
||||||
|
k = RSA.importKey(k)
|
||||||
|
auth = HTTPXSigAuth(k)
|
||||||
|
|
Loading…
Reference in a new issue