This commit is contained in:
parent
572eb05019
commit
2c62f0f948
19 changed files with 266 additions and 302 deletions
|
@ -72,9 +72,10 @@ ME = {
|
|||
"owner": config.ID,
|
||||
"publicKeyPem": get_pubkey_as_pem(config.KEY_PATH),
|
||||
},
|
||||
"tag": [] # TODO tag support
|
||||
"tag": [], # TODO tag support
|
||||
}
|
||||
|
||||
|
||||
class BaseActor:
|
||||
def __init__(self, ap_actor: RawObject, **_) -> None:
|
||||
if (ap_type := ap_actor.get("type")) not in ACTOR_TYPES:
|
||||
|
@ -101,8 +102,7 @@ class BaseActor:
|
|||
|
||||
@property
|
||||
def share_inbox_url(self) -> str:
|
||||
return self.ap_actor.get("endpoints", {}).get("sharedInbox") \
|
||||
or self.inbox_url
|
||||
return self.ap_actor.get("endpoints", {}).get("sharedInbox") or self.inbox_url
|
||||
|
||||
|
||||
class VisibilityEnum(str, enum.Enum):
|
||||
|
@ -125,7 +125,6 @@ def handle_visibility(ap_object: dict) -> VisibilityEnum:
|
|||
|
||||
def wrap_ap_object(ap_object: dict) -> dict:
|
||||
if ap_object["type"] in ["Note"]:
|
||||
|
||||
if "@context" in ap_object:
|
||||
del ap_object["@context"]
|
||||
|
||||
|
|
19
app/actor.py
19
app/actor.py
|
@ -24,9 +24,7 @@ async def fetch_actor(
|
|||
in db."""
|
||||
exist_actor = (
|
||||
await db_session.scalars(
|
||||
select(models.Actor).where(
|
||||
models.Actor.ap_id == actor_id
|
||||
)
|
||||
select(models.Actor).where(models.Actor.ap_id == actor_id)
|
||||
)
|
||||
).one_or_none()
|
||||
|
||||
|
@ -38,10 +36,7 @@ async def fetch_actor(
|
|||
return exist_actor
|
||||
|
||||
|
||||
async def save_actor(
|
||||
ap_object: dict,
|
||||
db_session: AsyncSession
|
||||
) -> "ActorModel":
|
||||
async def save_actor(ap_object: dict, db_session: AsyncSession) -> "ActorModel":
|
||||
"""Save actor to db."""
|
||||
logger.info("save actor " + ap_object["id"])
|
||||
actor = models.Actor(
|
||||
|
@ -64,20 +59,16 @@ def _handle(
|
|||
if not ap_id.hostname:
|
||||
raise ValueError(f"Invalid actor ID {ap_id}")
|
||||
|
||||
handle = '@' + ap_object["preferredUsername"] + '@' + ap_id.hostname
|
||||
handle = "@" + ap_object["preferredUsername"] + "@" + ap_id.hostname
|
||||
|
||||
return handle
|
||||
|
||||
|
||||
async def get_public_key(
|
||||
db_session: AsyncSession,
|
||||
key_id: str
|
||||
) -> str:
|
||||
async def get_public_key(db_session: AsyncSession, key_id: str) -> str:
|
||||
"""Give key id and reutrn public key."""
|
||||
existing_actor = (
|
||||
await db_session.scalars(
|
||||
select(models.Actor).where(
|
||||
models.Actor.ap_id == key_id.split("#")[0])
|
||||
select(models.Actor).where(models.Actor.ap_id == key_id.split("#")[0])
|
||||
)
|
||||
).one_or_none()
|
||||
public_key = existing_actor.ap_actor["publicKey"]["publicKeyPem"]
|
||||
|
|
42
app/boxes.py
42
app/boxes.py
|
@ -29,7 +29,6 @@ from uuid import uuid4
|
|||
from datetime import datetime
|
||||
|
||||
|
||||
|
||||
def allocate_outbox_id() -> str:
|
||||
return str(uuid.uuid4())
|
||||
|
||||
|
@ -122,8 +121,7 @@ async def process_incoming(
|
|||
ap_object["object"]["id"],
|
||||
)
|
||||
relates_to_inbox_object = await get_inbox_object(
|
||||
db_session,
|
||||
ap_object["object"]["id"]
|
||||
db_session, ap_object["object"]["id"]
|
||||
)
|
||||
else:
|
||||
if ap_object["object"].startswith(BASE_URL):
|
||||
|
@ -132,11 +130,9 @@ async def process_incoming(
|
|||
ap_object["object"],
|
||||
)
|
||||
relates_to_inbox_object = await get_inbox_object(
|
||||
db_session,
|
||||
ap_object["object"]
|
||||
db_session, ap_object["object"]
|
||||
)
|
||||
|
||||
|
||||
def build_object(
|
||||
object,
|
||||
relates_to_inbox_object=None,
|
||||
|
@ -181,7 +177,9 @@ async def process_incoming(
|
|||
if isinstance(follow_id, dict):
|
||||
follow_id = follow_id["id"]
|
||||
|
||||
relate_following_object = (await db_session.execute(
|
||||
relate_following_object = (
|
||||
(
|
||||
await db_session.execute(
|
||||
select(models.OutboxObject)
|
||||
.where(models.OutboxObject.ap_id == follow_id)
|
||||
.options(
|
||||
|
@ -190,7 +188,10 @@ async def process_incoming(
|
|||
)
|
||||
)
|
||||
)
|
||||
).unique().scalar_one_or_none()
|
||||
)
|
||||
.unique()
|
||||
.scalar_one_or_none()
|
||||
)
|
||||
|
||||
if "Accept" == ap_object["type"]:
|
||||
try:
|
||||
|
@ -312,17 +313,14 @@ async def _send_accept(
|
|||
logger.error(e)
|
||||
|
||||
|
||||
async def _handle_undo(
|
||||
db_session: AsyncSession,
|
||||
inbox_object: dict
|
||||
) -> bool:
|
||||
async def _handle_undo(db_session: AsyncSession, inbox_object: dict) -> bool:
|
||||
if inbox_object["object"]["object"] != ME["id"]:
|
||||
logger.warning("Wrong undo object! "
|
||||
+ inbox_object["object"]["actor"])
|
||||
logger.warning("Wrong undo object! " + inbox_object["object"]["actor"])
|
||||
return False
|
||||
|
||||
if "Follow" == inbox_object["object"]["type"]:
|
||||
relate_object = (await db_session.execute(
|
||||
relate_object = (
|
||||
await db_session.execute(
|
||||
select(models.InboxObject)
|
||||
.where(models.InboxObject.ap_id == inbox_object["object"]["id"])
|
||||
.options(
|
||||
|
@ -348,10 +346,7 @@ async def _handle_undo(
|
|||
return False
|
||||
|
||||
|
||||
async def send_follow(
|
||||
db_session : AsyncSession,
|
||||
acct : str
|
||||
):
|
||||
async def send_follow(db_session: AsyncSession, acct: str):
|
||||
await _send_follow(db_session, acct)
|
||||
await db_session.commit()
|
||||
await db_session.flush()
|
||||
|
@ -409,7 +404,7 @@ async def _send_follow(
|
|||
follow_id,
|
||||
out,
|
||||
relates_to_actor_id=actor.id, # type: ignore
|
||||
activity_object_ap_id=actor.ap_id
|
||||
activity_object_ap_id=actor.ap_id,
|
||||
)
|
||||
|
||||
await post(
|
||||
|
@ -485,7 +480,6 @@ async def _send_create(
|
|||
except Exception as e:
|
||||
logger.error(e)
|
||||
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
@ -493,10 +487,7 @@ async def _compute_recipients(
|
|||
db_session: AsyncSession,
|
||||
ap_object: dict,
|
||||
) -> set[str]:
|
||||
|
||||
async def process_collection(
|
||||
db_session,
|
||||
url) -> list[Actor]:
|
||||
async def process_collection(db_session, url) -> list[Actor]:
|
||||
if url == BASE_URL + "/followers":
|
||||
followers = (
|
||||
(
|
||||
|
@ -542,7 +533,6 @@ async def save_to_inbox(
|
|||
relates_to_outbox_object_id: int | None = None,
|
||||
relates_to_actor_id: int | None = None,
|
||||
) -> InboxObject:
|
||||
|
||||
ap_type = ap_object["type"]
|
||||
ap_id = ap_object["id"]
|
||||
visibility = handle_visibility(ap_object)
|
||||
|
|
|
@ -35,28 +35,30 @@ class Config(pydantic.BaseModel):
|
|||
ROOT_DIR = Path().parent.resolve()
|
||||
_CONFIG_FILE = os.getenv("FOXHOLE_CONFIG_FILE", "config.toml")
|
||||
|
||||
|
||||
def load_config() -> Config:
|
||||
try:
|
||||
return Config.parse_obj(
|
||||
tomli.loads((ROOT_DIR / "data" / _CONFIG_FILE).read_text())
|
||||
)
|
||||
except FileNotFoundError:
|
||||
raise ValueError(
|
||||
f"{_CONFIG_FILE} is missing"
|
||||
)
|
||||
raise ValueError(f"{_CONFIG_FILE} is missing")
|
||||
|
||||
|
||||
def get_version_commit() -> str:
|
||||
import subprocess
|
||||
|
||||
try:
|
||||
return (
|
||||
'+' +
|
||||
subprocess.check_output(["git", "rev-parse", "--short=8", "HEAD"])
|
||||
"+"
|
||||
+ subprocess.check_output(["git", "rev-parse", "--short=8", "HEAD"])
|
||||
.split()[0]
|
||||
.decode()
|
||||
)
|
||||
except Exception:
|
||||
return "+dev"
|
||||
|
||||
|
||||
CONFIG = load_config()
|
||||
DOMAIN = CONFIG.domain
|
||||
|
||||
|
@ -67,7 +69,7 @@ _SCHEME = "https" if CONFIG.https else "http"
|
|||
ID = f"{_SCHEME}://{DOMAIN}"
|
||||
BASE_URL = ID
|
||||
USERNAME = CONFIG.username
|
||||
KEY_PATH = (ROOT_DIR / os.getenv("FOXHOLE_KEY_PATH", "data/key.pem"))
|
||||
KEY_PATH = ROOT_DIR / os.getenv("FOXHOLE_KEY_PATH", "data/key.pem")
|
||||
|
||||
VERSION = "Foxhole-" + MAIN_VERSION + get_version_commit()
|
||||
USER_AGENT = "Fediverse Application/" + VERSION
|
||||
|
|
|
@ -25,11 +25,14 @@ async_engine = create_async_engine(
|
|||
)
|
||||
async_session = sessionmaker(async_engine, class_=AsyncSession, expire_on_commit=False)
|
||||
|
||||
|
||||
class Base(DeclarativeBase):
|
||||
pass
|
||||
|
||||
|
||||
metadata_obj = MetaData()
|
||||
|
||||
|
||||
async def get_db_session() -> AsyncGenerator[AsyncSession, None]:
|
||||
async with async_session() as session: # type: ignore
|
||||
try:
|
||||
|
|
|
@ -12,26 +12,20 @@ from Crypto.Signature import PKCS1_v1_5
|
|||
from Crypto.PublicKey import RSA
|
||||
|
||||
|
||||
|
||||
class HttpSignature:
|
||||
"""
|
||||
calculation and verification of HTTP signatures
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def calculation_digest(
|
||||
cls,
|
||||
body : bytes,
|
||||
algorithm : str ="sha-256"
|
||||
)-> str :
|
||||
def calculation_digest(cls, body: bytes, algorithm: str = "sha-256") -> str:
|
||||
"""
|
||||
Calculates the digest header value for a given HTTP body
|
||||
"""
|
||||
if "sha-256" == algorithm:
|
||||
h = SHA256.new()
|
||||
h.update(body)
|
||||
return "SHA-256=" + \
|
||||
base64.b64encode(h.digest()).decode("utf-8")
|
||||
return "SHA-256=" + base64.b64encode(h.digest()).decode("utf-8")
|
||||
else:
|
||||
raise ValueError(f"No support algorithm {algorithm}")
|
||||
|
||||
|
@ -87,9 +81,7 @@ class HTTPXSigAuth(httpx.Auth):
|
|||
def __init__(self, key) -> None:
|
||||
self.key = key
|
||||
|
||||
def auth_flow(
|
||||
self, r: httpx.Request
|
||||
):
|
||||
def auth_flow(self, r: httpx.Request):
|
||||
bodydigest = None
|
||||
if r.content:
|
||||
bh = hashlib.new("sha256")
|
||||
|
@ -122,6 +114,7 @@ class HTTPXSigAuth(httpx.Auth):
|
|||
r.headers["signature"] = sig_value
|
||||
yield r
|
||||
|
||||
|
||||
k = KEY_PATH.read_text()
|
||||
k = RSA.importKey(k)
|
||||
auth = HTTPXSigAuth(k)
|
||||
|
|
|
@ -16,9 +16,9 @@ from app.database import AsyncSession
|
|||
from app.actor import get_public_key
|
||||
|
||||
|
||||
|
||||
requests_loader = pyld.documentloader.requests.requests_document_loader() # type: ignore
|
||||
|
||||
|
||||
def _loader(url, options):
|
||||
if options is None:
|
||||
options = {}
|
||||
|
@ -80,7 +80,9 @@ async def verify_signature(
|
|||
signer = PKCS1_v1_5.new(pubkey)
|
||||
digest = SHA256.new()
|
||||
digest.update(to_be_signed.encode("utf-8"))
|
||||
return signer.verify(digest, base64.b64decode(signature)) # pylint: disable=not-callable
|
||||
return signer.verify(
|
||||
digest, base64.b64decode(signature)
|
||||
) # pylint: disable=not-callable
|
||||
|
||||
|
||||
def generate_signature(doc: ap.RawObject, key) -> None:
|
||||
|
|
14
app/main.py
14
app/main.py
|
@ -35,14 +35,12 @@ from app.hysql import get_index_status
|
|||
|
||||
def _check_0rtt_early_data(request: Request) -> None:
|
||||
"""Disable TLS1.3 0-RTT requests for non-GET."""
|
||||
if request.headers.get("Early-Data", None) == "1" \
|
||||
and request.method != "GET":
|
||||
if request.headers.get("Early-Data", None) == "1" and request.method != "GET":
|
||||
raise fastapi.HTTPException(status_code=425, detail="Too early")
|
||||
|
||||
|
||||
app = FastAPI(
|
||||
docs_url=None, redoc_url=None,
|
||||
dependencies=[Depends(_check_0rtt_early_data)]
|
||||
docs_url=None, redoc_url=None, dependencies=[Depends(_check_0rtt_early_data)]
|
||||
)
|
||||
|
||||
templates = Jinja2Templates(directory="templates")
|
||||
|
@ -55,6 +53,7 @@ logger.add("output.log", level="DEBUG")
|
|||
# pylint: disable=too-few-public-methods
|
||||
class ActivityPubResponse(JSONResponse):
|
||||
"""Simple wrap JSONresponse return ActivityPub response."""
|
||||
|
||||
media_type = "application/activity+json"
|
||||
|
||||
|
||||
|
@ -137,12 +136,7 @@ async def outbox(
|
|||
logger.info("True token")
|
||||
note_content = to_html(payload["content"]).replace("\n", "")
|
||||
|
||||
await _send_create(
|
||||
db_session,
|
||||
"Note",
|
||||
note_content,
|
||||
payload["visibility"]
|
||||
)
|
||||
await _send_create(db_session, "Note", note_content, payload["visibility"])
|
||||
return Response(status_code=200)
|
||||
|
||||
|
||||
|
|
|
@ -42,8 +42,12 @@ class Actor(Base, BaseActor):
|
|||
|
||||
handle = mapped_column(String, nullable=True, index=True)
|
||||
|
||||
is_blocked = mapped_column(Boolean, nullable=False, default=False, server_default="0")
|
||||
is_deleted = mapped_column(Boolean, nullable=False, default=False, server_default="0")
|
||||
is_blocked = mapped_column(
|
||||
Boolean, nullable=False, default=False, server_default="0"
|
||||
)
|
||||
is_deleted = mapped_column(
|
||||
Boolean, nullable=False, default=False, server_default="0"
|
||||
)
|
||||
|
||||
|
||||
class InboxObject(Base, BaseObject):
|
||||
|
@ -179,7 +183,9 @@ class Follower(Base):
|
|||
created_at = mapped_column(DateTime(timezone=True), nullable=False, default=now)
|
||||
updated_at = mapped_column(DateTime(timezone=True), nullable=False, default=now)
|
||||
|
||||
actor_id = mapped_column(Integer, ForeignKey("actor.id"), nullable=False, unique=True)
|
||||
actor_id = mapped_column(
|
||||
Integer, ForeignKey("actor.id"), nullable=False, unique=True
|
||||
)
|
||||
actor: Mapped[Actor] = relationship(Actor, uselist=False)
|
||||
|
||||
inbox_object_id = mapped_column(Integer, ForeignKey("inbox.id"), nullable=False)
|
||||
|
@ -195,7 +201,9 @@ class Following(Base):
|
|||
created_at = mapped_column(DateTime(timezone=True), nullable=False, default=now)
|
||||
updated_at = mapped_column(DateTime(timezone=True), nullable=False, default=now)
|
||||
|
||||
actor_id = mapped_column(Integer, ForeignKey("actor.id"), nullable=False, unique=True)
|
||||
actor_id = mapped_column(
|
||||
Integer, ForeignKey("actor.id"), nullable=False, unique=True
|
||||
)
|
||||
actor: Mapped[Actor] = relationship(Actor, uselist=False)
|
||||
|
||||
outbox_object_id = mapped_column(Integer, ForeignKey("outbox.id"), nullable=False)
|
||||
|
|
|
@ -36,7 +36,8 @@ LIST_STATUS_REGEXP = re.compile(r"\[( |X|-)\]\s")
|
|||
LIST_LEVEL_REGEXP = re.compile(r"(\s*)(.+)$")
|
||||
|
||||
HEADLINE_REGEXP = re.compile(
|
||||
r"^(\*+)(?:\s+(.+?))?(?:\s+\[#(.+)\])?(\s+.*?)(?:\s+:(.+):)?$")
|
||||
r"^(\*+)(?:\s+(.+?))?(?:\s+\[#(.+)\])?(\s+.*?)(?:\s+:(.+):)?$"
|
||||
)
|
||||
KEYWORD_REGEXP = re.compile(r"^(\s*)#\+([^:]+):(\s+(.*)|$)")
|
||||
COMMENT_REGEXP = re.compile(r"^(\s*)#(.*)")
|
||||
ATTRIBUTE_REGEXP = re.compile(r"(?:^|\s+)(:[-\w]+)\s+(.*)$")
|
||||
|
@ -260,7 +261,7 @@ class Parser(object):
|
|||
|
||||
def __str__(self):
|
||||
str_children = [str(child) for child in self.children]
|
||||
return self.__class__.__name__ + '(' + ','.join(str_children) + ')'
|
||||
return self.__class__.__name__ + "(" + ",".join(str_children) + ")"
|
||||
|
||||
def __repr__(self):
|
||||
return self.__str__()
|
||||
|
@ -274,7 +275,8 @@ class Headline(Parser):
|
|||
keyword=None,
|
||||
priority=None,
|
||||
tags=[],
|
||||
todo_keywords=TODO_KEYWORDS):
|
||||
todo_keywords=TODO_KEYWORDS,
|
||||
):
|
||||
super(Headline, self).__init__()
|
||||
self.title = title
|
||||
self.stars = stars
|
||||
|
@ -311,7 +313,7 @@ class Headline(Parser):
|
|||
)
|
||||
|
||||
def id(self):
|
||||
hid = 'org-{0}'.format(sha1(self.title.encode()).hexdigest()[:10])
|
||||
hid = "org-{0}".format(sha1(self.title.encode()).hexdigest()[:10])
|
||||
if self.properties:
|
||||
return self.properties.get("CUSTOM_ID", hid)
|
||||
return hid
|
||||
|
@ -319,18 +321,18 @@ class Headline(Parser):
|
|||
def toc(self):
|
||||
b = ""
|
||||
if self.keyword:
|
||||
b = b + "<span class=\"todo\">{0}</span>".format(self.keyword)
|
||||
b = b + '<span class="todo">{0}</span>'.format(self.keyword)
|
||||
if self.priority:
|
||||
b = b + "<span class=\"priority\">{0}</span>".format(self.priority)
|
||||
b = b + '<span class="priority">{0}</span>'.format(self.priority)
|
||||
|
||||
b = b + self.inlinetext(self.title).to_html()
|
||||
|
||||
for tag in self.tags:
|
||||
b = b + "<span class=\"tag\">{0}</span>".format(tag)
|
||||
b = b + '<span class="tag">{0}</span>'.format(tag)
|
||||
return b.strip()
|
||||
|
||||
def to_html(self):
|
||||
b = "<h{0} id=\"{1}\">{2}</h{0}>".format(
|
||||
b = '<h{0} id="{1}">{2}</h{0}>'.format(
|
||||
self.stars,
|
||||
self.id(),
|
||||
self.toc(),
|
||||
|
@ -413,13 +415,13 @@ class Block(Parser):
|
|||
class Center(Block):
|
||||
def __init__(self, params=""):
|
||||
super(Center, self).__init__("center", params)
|
||||
self.element = "<div style=\"text-align: center;\">\n{0}\n</div>"
|
||||
self.element = '<div style="text-align: center;">\n{0}\n</div>'
|
||||
|
||||
|
||||
class Verse(Block):
|
||||
def __init__(self, params=""):
|
||||
super(Verse, self).__init__("verse", params)
|
||||
self.element = "<p class=\"verse\">\n{0}\n</p>"
|
||||
self.element = '<p class="verse">\n{0}\n</p>'
|
||||
|
||||
def add_child(self, node):
|
||||
self.children.append(node)
|
||||
|
@ -453,7 +455,7 @@ class Src(Block):
|
|||
super(Src, self).__init__("src", params)
|
||||
self.language = language
|
||||
self.highlight_code = highlight
|
||||
self.element = "<pre class=\"src src-{0}\">\n{1}\n</pre>"
|
||||
self.element = '<pre class="src src-{0}">\n{1}\n</pre>'
|
||||
self.needparse = False
|
||||
self.escape = False
|
||||
self.parsed_nodes = ()
|
||||
|
@ -482,7 +484,7 @@ class Example(Src):
|
|||
class BlockResult(Parser):
|
||||
def __init__(self):
|
||||
super(BlockResult, self).__init__()
|
||||
self.element = "<pre class=\"example\">\n{0}\n</pre>"
|
||||
self.element = '<pre class="example">\n{0}\n</pre>'
|
||||
|
||||
@classmethod
|
||||
def match(cls, line):
|
||||
|
@ -524,8 +526,7 @@ class ListItem(Parser):
|
|||
|
||||
if self.checkbox == "HTML":
|
||||
if self.status == "X":
|
||||
node = self.inlinetext(
|
||||
'<input type="checkbox" checked="checked" />')
|
||||
node = self.inlinetext('<input type="checkbox" checked="checked" />')
|
||||
else:
|
||||
node = self.inlinetext('<input type="checkbox" />')
|
||||
node.needparse = False
|
||||
|
@ -777,7 +778,7 @@ class Section(Parser):
|
|||
|
||||
def to_html(self):
|
||||
text = "<li>"
|
||||
text += "<a href=\"#{0}\">{1}</a>".format(
|
||||
text += '<a href="#{0}">{1}</a>'.format(
|
||||
self.headline.id(),
|
||||
self.headline.toc(),
|
||||
)
|
||||
|
@ -785,7 +786,8 @@ class Section(Parser):
|
|||
return text + "</li>"
|
||||
|
||||
text += "\n<ul>\n{0}\n</ul>\n</li>".format(
|
||||
"\n".join([child.to_html() for child in self.children]))
|
||||
"\n".join([child.to_html() for child in self.children])
|
||||
)
|
||||
return text
|
||||
|
||||
|
||||
|
@ -794,9 +796,10 @@ class Toc(Parser):
|
|||
super(Toc, self).__init__()
|
||||
self.element = (
|
||||
'<div id="table-of-contents">'
|
||||
'<h2>Table of Contents</h2>'
|
||||
"<h2>Table of Contents</h2>"
|
||||
'<div id="text-table-of-contents">'
|
||||
'\n<ul>\n{0}\n</ul>\n</div></div>')
|
||||
"\n<ul>\n{0}\n</ul>\n</div></div>"
|
||||
)
|
||||
|
||||
def add_child(self, node):
|
||||
last = self.last_child()
|
||||
|
|
|
@ -16,83 +16,73 @@ import os
|
|||
# _inline_regexp = r"(^|.*?(?<![/\\])){0}(.+?(?<![/\\])){0}(.*?|$)"
|
||||
_inline_regexp = r"(^|.*?(?<![/\\])){0}(.+?(?<![/\\])){0}(.*?|$)"
|
||||
|
||||
BOLD_REGEXP = re.compile(_inline_regexp.format('\\*'))
|
||||
CODE_REGEXP = re.compile(_inline_regexp.format('(?:\\=|`)'))
|
||||
ITALIC_REGEXP = re.compile(_inline_regexp.format('(?:\\*\\*|\\/)'))
|
||||
DELETE_REGEXP = re.compile(_inline_regexp.format('\\+'))
|
||||
VERBATIM_REGEXP = re.compile(_inline_regexp.format('~'))
|
||||
UNDERLINE_REGEXP = re.compile(_inline_regexp.format('_'))
|
||||
BOLD_REGEXP = re.compile(_inline_regexp.format("\\*"))
|
||||
CODE_REGEXP = re.compile(_inline_regexp.format("(?:\\=|`)"))
|
||||
ITALIC_REGEXP = re.compile(_inline_regexp.format("(?:\\*\\*|\\/)"))
|
||||
DELETE_REGEXP = re.compile(_inline_regexp.format("\\+"))
|
||||
VERBATIM_REGEXP = re.compile(_inline_regexp.format("~"))
|
||||
UNDERLINE_REGEXP = re.compile(_inline_regexp.format("_"))
|
||||
|
||||
PERCENT_REGEXP = re.compile(r"\[(\d+/\d+|\d+%)\]")
|
||||
|
||||
HR_REGEXP = re.compile(r"^\s*\-{5,}\s*")
|
||||
FN_REGEXP = re.compile(r"(^|.*?(?<![/\\]))(\[fn:(.+?)\])(.*?|$)")
|
||||
IMG_REGEXP = re.compile(r"^[.](png|gif|jpe?g|svg|tiff?)$")
|
||||
LINK_REGEXP = re.compile(r'\[\[(.+?)\](?:\[(.+?)\])?\]')
|
||||
LINK_REGEXP = re.compile(r"\[\[(.+?)\](?:\[(.+?)\])?\]")
|
||||
VIDEO_REGEXP = re.compile(r"^[.](webm|mp4)$")
|
||||
|
||||
NEWLINE_REGEXP = re.compile(r"(^|.*?(?<![/\\]))(\\\\(\s*)$)")
|
||||
BLANKLINE_REGEXP = re.compile(r"^(\s*)$")
|
||||
|
||||
TIMESTAMP_REGEXP = re.compile(
|
||||
r"^<(\d{4}-\d{2}-\d{2})( [A-Za-z]+)?( \d{2}:\d{2})?( \+\d+[dwmy])?>")
|
||||
r"^<(\d{4}-\d{2}-\d{2})( [A-Za-z]+)?( \d{2}:\d{2})?( \+\d+[dwmy])?>"
|
||||
)
|
||||
|
||||
_html_escape = (
|
||||
("&", "&"),
|
||||
("'", "'"),
|
||||
("<", "<"),
|
||||
(">", ">"),
|
||||
("\"", """),
|
||||
('"', """),
|
||||
)
|
||||
|
||||
# https://github.com/tsroten/zhon/blob/develop/zhon/hanzi.py
|
||||
_chinese_non_stops = (
|
||||
# Fullwidth ASCII variants
|
||||
'\uFF02\uFF03\uFF04\uFF05\uFF06\uFF07\uFF08\uFF09\uFF0A\uFF0B\uFF0C\uFF0D'
|
||||
'\uFF0F\uFF1A\uFF1B\uFF1C\uFF1D\uFF1E\uFF20\uFF3B\uFF3C\uFF3D\uFF3E\uFF3F'
|
||||
'\uFF40\uFF5B\uFF5C\uFF5D\uFF5E\uFF5F\uFF60'
|
||||
|
||||
"\uFF02\uFF03\uFF04\uFF05\uFF06\uFF07\uFF08\uFF09\uFF0A\uFF0B\uFF0C\uFF0D"
|
||||
"\uFF0F\uFF1A\uFF1B\uFF1C\uFF1D\uFF1E\uFF20\uFF3B\uFF3C\uFF3D\uFF3E\uFF3F"
|
||||
"\uFF40\uFF5B\uFF5C\uFF5D\uFF5E\uFF5F\uFF60"
|
||||
# Halfwidth CJK punctuation
|
||||
'\uFF62\uFF63\uFF64'
|
||||
|
||||
"\uFF62\uFF63\uFF64"
|
||||
# CJK symbols and punctuation
|
||||
'\u3000\u3001\u3003'
|
||||
|
||||
"\u3000\u3001\u3003"
|
||||
# CJK angle and corner brackets
|
||||
'\u3008\u3009\u300A\u300B\u300C\u300D\u300E\u300F\u3010\u3011'
|
||||
|
||||
"\u3008\u3009\u300A\u300B\u300C\u300D\u300E\u300F\u3010\u3011"
|
||||
# CJK brackets and symbols/punctuation
|
||||
'\u3014\u3015\u3016\u3017\u3018\u3019\u301A\u301B\u301C\u301D\u301E\u301F'
|
||||
|
||||
"\u3014\u3015\u3016\u3017\u3018\u3019\u301A\u301B\u301C\u301D\u301E\u301F"
|
||||
# Other CJK symbols
|
||||
'\u3030'
|
||||
|
||||
"\u3030"
|
||||
# Special CJK indicators
|
||||
'\u303E\u303F'
|
||||
|
||||
"\u303E\u303F"
|
||||
# Dashes
|
||||
'\u2013\u2014'
|
||||
|
||||
"\u2013\u2014"
|
||||
# Quotation marks and apostrophe
|
||||
'\u2018\u2019\u201B\u201C\u201D\u201E\u201F'
|
||||
|
||||
"\u2018\u2019\u201B\u201C\u201D\u201E\u201F"
|
||||
# General punctuation
|
||||
'\u2026\u2027'
|
||||
|
||||
"\u2026\u2027"
|
||||
# Overscores and underscores
|
||||
'\uFE4F'
|
||||
|
||||
"\uFE4F"
|
||||
# Small form variants
|
||||
'\uFE51\uFE54'
|
||||
|
||||
"\uFE51\uFE54"
|
||||
# Latin punctuation
|
||||
'\u00B7')
|
||||
"\u00B7"
|
||||
)
|
||||
|
||||
_chinese_stops = (
|
||||
'\uFF01' # Fullwidth exclamation mark
|
||||
'\uFF1F' # Fullwidth question mark
|
||||
'\uFF61' # Halfwidth ideographic full stop
|
||||
'\u3002' # Ideographic full stop
|
||||
"\uFF01" # Fullwidth exclamation mark
|
||||
"\uFF1F" # Fullwidth question mark
|
||||
"\uFF61" # Halfwidth ideographic full stop
|
||||
"\u3002" # Ideographic full stop
|
||||
)
|
||||
|
||||
|
||||
|
@ -103,7 +93,7 @@ def html_escape(text):
|
|||
|
||||
|
||||
def match_chinese(ch):
|
||||
if '\u4e00' <= ch <= '\u9fff':
|
||||
if "\u4e00" <= ch <= "\u9fff":
|
||||
return True
|
||||
if ch in _chinese_stops:
|
||||
return True
|
||||
|
@ -192,8 +182,7 @@ class InlineParser(object):
|
|||
c2 = len(char[0]) == 2 and char[0] == double_char
|
||||
|
||||
if c1 or c2:
|
||||
node, num = getattr(self, "parse_" + char_map[char[0]])(
|
||||
index, lines)
|
||||
node, num = getattr(self, "parse_" + char_map[char[0]])(index, lines)
|
||||
if node:
|
||||
return node, num
|
||||
|
||||
|
@ -232,7 +221,7 @@ class InlineParser(object):
|
|||
return text
|
||||
|
||||
def __str__(self):
|
||||
return '{}({})'.format(self.__class__.__name__, self.content.strip())
|
||||
return "{}({})".format(self.__class__.__name__, self.content.strip())
|
||||
|
||||
def __repr__(self):
|
||||
return self.__str__()
|
||||
|
@ -308,7 +297,7 @@ class Verbatim(InlineParser):
|
|||
class Underline(InlineParser):
|
||||
def __init__(self, content):
|
||||
super(Underline, self).__init__(content)
|
||||
self.element = "<span style=\"text-decoration:underline\">{0}</span>"
|
||||
self.element = '<span style="text-decoration:underline">{0}</span>'
|
||||
|
||||
@classmethod
|
||||
def match(cls, line, index):
|
||||
|
@ -350,9 +339,9 @@ class Link(InlineParser):
|
|||
|
||||
def to_html(self):
|
||||
if self.is_img():
|
||||
return "<img src=\"{0}\"/>".format(self.content)
|
||||
return '<img src="{0}"/>'.format(self.content)
|
||||
if self.is_vedio():
|
||||
return "<video src=\"{0}\">{0}</video>".format(self.content)
|
||||
return '<video src="{0}">{0}</video>'.format(self.content)
|
||||
if self.desc:
|
||||
return '<a href="{0}">{1}</a>'.format(self.content, self.desc)
|
||||
return '<a href="{0}">{1}</a>'.format(self.content, self.content)
|
||||
|
@ -361,7 +350,9 @@ class Link(InlineParser):
|
|||
class Fn(InlineParser):
|
||||
def __init__(self, content):
|
||||
super(Fn, self).__init__(content)
|
||||
self.element = '<sup><a id="fnr:{0}" class="footref" href="#fn.{0}">{0}</a></sup>'
|
||||
self.element = (
|
||||
'<sup><a id="fnr:{0}" class="footref" href="#fn.{0}">{0}</a></sup>'
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def match(cls, line, index):
|
||||
|
|
|
@ -7,6 +7,8 @@ from typing import TypeVar
|
|||
from loguru import logger
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
class Worker(Generic[T]):
|
||||
def __init__(self) -> None:
|
||||
self._loop = asyncio.get_event_loop()
|
||||
|
|
|
@ -14,7 +14,6 @@ from app.database import SessionLocal
|
|||
from sqlalchemy import orm
|
||||
|
||||
|
||||
|
||||
@pytest_asyncio.fixture
|
||||
async def async_db_session():
|
||||
async with async_session() as session: # type: ignore
|
||||
|
|
|
@ -64,7 +64,6 @@ class ActorFactory(factory.alchemy.SQLAlchemyModelFactory):
|
|||
ap_id = "stub"
|
||||
|
||||
|
||||
|
||||
async def inbox_prechecker(
|
||||
request: fastapi.Request,
|
||||
) -> bool:
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
|
||||
domain = "test.foxhole.me"
|
||||
username = "testfox"
|
||||
name = "test233"
|
||||
|
|
|
@ -18,7 +18,6 @@ from app import models
|
|||
from sqlalchemy import select
|
||||
|
||||
|
||||
|
||||
def test_inbox_follow_request(
|
||||
db: Session,
|
||||
client: TestClient,
|
||||
|
|
|
@ -31,7 +31,6 @@ def test_index__ap(db: Session, client: TestClient, accept: str):
|
|||
assert response.json() == ap.ME
|
||||
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_follow_only_status(
|
||||
client: TestClient,
|
||||
|
@ -43,15 +42,14 @@ async def test_follow_only_status(
|
|||
|
||||
# mock request
|
||||
respx_mock.get(remote_ap_id).mock(
|
||||
return_value=httpx.Response(200,json=ra.ap_actor))
|
||||
respx_mock.post(remote_ap_id + "/inbox").mock(
|
||||
return_value=httpx.Response(202))
|
||||
|
||||
return_value=httpx.Response(200, json=ra.ap_actor)
|
||||
)
|
||||
respx_mock.post(remote_ap_id + "/inbox").mock(return_value=httpx.Response(202))
|
||||
|
||||
response = client.post(
|
||||
"/outbox",
|
||||
headers={"Authorization": "Basic test-token"},
|
||||
content='{"visibility": "followers-only","content": "note content"}'
|
||||
content='{"visibility": "followers-only","content": "note content"}',
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
|
|
|
@ -31,9 +31,9 @@ async def test_outbox_send_follow_request(
|
|||
|
||||
# mock request
|
||||
respx_mock.get(remote_ap_id).mock(
|
||||
return_value=httpx.Response(200,json=ra.ap_actor))
|
||||
respx_mock.post(remote_ap_id + "/inbox").mock(
|
||||
return_value=httpx.Response(202))
|
||||
return_value=httpx.Response(200, json=ra.ap_actor)
|
||||
)
|
||||
respx_mock.post(remote_ap_id + "/inbox").mock(return_value=httpx.Response(202))
|
||||
|
||||
from app.boxes import send_follow
|
||||
|
||||
|
@ -81,9 +81,9 @@ async def test_outbox_send_follow_request_nest_accept(
|
|||
|
||||
# mock request
|
||||
respx_mock.get(remote_ap_id).mock(
|
||||
return_value=httpx.Response(200,json=ra.ap_actor))
|
||||
respx_mock.post(remote_ap_id + "/inbox").mock(
|
||||
return_value=httpx.Response(202))
|
||||
return_value=httpx.Response(200, json=ra.ap_actor)
|
||||
)
|
||||
respx_mock.post(remote_ap_id + "/inbox").mock(return_value=httpx.Response(202))
|
||||
|
||||
from app.boxes import send_follow
|
||||
|
||||
|
@ -132,9 +132,9 @@ async def test_outbox_send_create_activity(
|
|||
|
||||
# mock request
|
||||
respx_mock.get(remote_ap_id).mock(
|
||||
return_value=httpx.Response(200,json=ra.ap_actor))
|
||||
respx_mock.post(remote_ap_id + "/inbox").mock(
|
||||
return_value=httpx.Response(202))
|
||||
return_value=httpx.Response(200, json=ra.ap_actor)
|
||||
)
|
||||
respx_mock.post(remote_ap_id + "/inbox").mock(return_value=httpx.Response(202))
|
||||
|
||||
from app.boxes import _send_create
|
||||
from app.activitypub import VisibilityEnum
|
||||
|
@ -145,12 +145,7 @@ async def test_outbox_send_create_activity(
|
|||
content = "*Blod Text* =code Text= \n"
|
||||
content = to_html(content)
|
||||
|
||||
await _send_create(
|
||||
db_session,
|
||||
"Note",
|
||||
content,
|
||||
VisibilityEnum.PUBLIC
|
||||
)
|
||||
await _send_create(db_session, "Note", content, VisibilityEnum.PUBLIC)
|
||||
|
||||
# And the Follow activity was created in the outbox
|
||||
outbox_object = db.execute(select(models.OutboxObject)).scalar_one()
|
||||
|
@ -171,17 +166,16 @@ async def test_outbox_send_unlisted_note(
|
|||
|
||||
# mock request
|
||||
respx_mock.get(remote_ap_id).mock(
|
||||
return_value=httpx.Response(200,json=ra.ap_actor))
|
||||
respx_mock.post(remote_ap_id + "/inbox").mock(
|
||||
return_value=httpx.Response(202))
|
||||
return_value=httpx.Response(200, json=ra.ap_actor)
|
||||
)
|
||||
respx_mock.post(remote_ap_id + "/inbox").mock(return_value=httpx.Response(202))
|
||||
|
||||
from app.activitypub import VisibilityEnum
|
||||
|
||||
|
||||
response = client.post(
|
||||
"/outbox",
|
||||
headers={"Authorization": "Basic test-token"},
|
||||
content='{"visibility": "unlisted","content": "note content"}'
|
||||
content='{"visibility": "unlisted","content": "note content"}',
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
|
|
|
@ -4,7 +4,6 @@ from app.main import app
|
|||
from app.utils import precheck
|
||||
|
||||
|
||||
|
||||
def build_remote_actor():
|
||||
ra = factories.RemoteActorFactory(
|
||||
base_url="https://example.com",
|
||||
|
@ -12,7 +11,6 @@ def build_remote_actor():
|
|||
public_key="pk",
|
||||
)
|
||||
|
||||
app.dependency_overrides[precheck.inbox_prechecker] = \
|
||||
factories.inbox_prechecker
|
||||
app.dependency_overrides[precheck.inbox_prechecker] = factories.inbox_prechecker
|
||||
|
||||
return ra
|
||||
|
|
Loading…
Reference in a new issue