Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Create ActivityPub not for Collection #399

Merged
merged 3 commits into from
Nov 28, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions boofilsic/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,7 @@
SSL_ONLY=(bool, False),
NEODB_SENTRY_DSN=(str, ""),
NEODB_FANOUT_LIMIT_DAYS=(int, 9),
NEODB_FORCE_CLASSIC_REPOST=(bool, False),
)

# ====== End of user configuration variables ======
Expand Down Expand Up @@ -231,6 +232,7 @@

DISABLE_CRON = env("NEODB_DISABLE_CRON")
FANOUT_LIMIT_DAYS = env("NEODB_FANOUT_LIMIT_DAYS")
FORCE_CLASSIC_REPOST = env("NEODB_FORCE_CLASSIC_REPOST")
# ====== USER CONFIGUTRATION END ======

DATABASE_ROUTERS = ["takahe.db_routes.TakaheRouter"]
Expand Down
7 changes: 5 additions & 2 deletions journal/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

from catalog.common.models import *
from common.api import *
from mastodon.api import boost_toot_later
from mastodon.api import boost_toot_later, share_review

from .models import Mark, Review, ShelfType, TagManager, q_item_in_category

Expand Down Expand Up @@ -204,7 +204,10 @@ def review_item(request, item_uuid: str, review: ReviewInSchema):
created_time=review.created_time,
)
if post and review.post_to_fediverse:
boost_toot_later(request.user, post.url)
if settings.FORCE_CLASSIC_REPOST:
share_review(review)
else:
boost_toot_later(request.user, post.url)
return 200, {"message": "OK"}


Expand Down
17 changes: 17 additions & 0 deletions journal/models/collection.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,8 @@ def get_progress(self, owner: APIdentity):
)

def save(self, *args, **kwargs):
from takahe.utils import Takahe

if getattr(self, "catalog_item", None) is None:
self.catalog_item = CatalogCollection()
if (
Expand All @@ -91,6 +93,21 @@ def save(self, *args, **kwargs):
self.catalog_item.cover = self.cover # type: ignore
self.catalog_item.save()
super().save(*args, **kwargs)
Takahe.post_collection(self)

@property
def ap_object(self):
return {
"id": self.absolute_url,
"type": "Collection",
"name": self.title,
"content": self.brief,
"mediaType": "text/markdown",
"published": self.created_time.isoformat(),
"updated": self.edited_time.isoformat(),
"attributedTo": self.owner.actor_uri,
"href": self.absolute_url,
}


class FeaturedCollection(Piece):
Expand Down
7 changes: 5 additions & 2 deletions journal/models/mark.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
from catalog.common import jsondata
from catalog.common.models import Item, ItemCategory
from catalog.common.utils import DEFAULT_ITEM_COVER, piece_cover_path
from mastodon.api import boost_toot_later
from mastodon.api import boost_toot_later, share_mark
from takahe.utils import Takahe
from users.models import APIdentity

Expand Down Expand Up @@ -236,7 +236,10 @@ def update(
post = Takahe.post_mark(self, post_as_new) # this will update linked post
# async boost to mastodon
if post and share_to_mastodon:
boost_toot_later(self.owner, post.url)
if settings.FORCE_CLASSIC_REPOST:
share_mark(self)
else:
boost_toot_later(self.owner, post.url)
return True

def delete(self):
Expand Down
24 changes: 14 additions & 10 deletions journal/views/collection.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.core.exceptions import BadRequest, ObjectDoesNotExist, PermissionDenied
from django.http import Http404, HttpRequest, HttpResponse, HttpResponseRedirect
Expand All @@ -8,7 +9,7 @@

from catalog.models import Item
from common.utils import AuthedHttpRequest, get_uuid_or_404
from mastodon.api import share_collection
from mastodon.api import boost_toot_later, share_collection
from users.models import User
from users.models.apidentity import APIdentity
from users.views import render_user_blocked, render_user_not_found
Expand Down Expand Up @@ -120,22 +121,25 @@ def collection_remove_featured(request: AuthedHttpRequest, collection_uuid):

@login_required
def collection_share(request: AuthedHttpRequest, collection_uuid):
collection = (
get_object_or_404(Collection, uid=get_uuid_or_404(collection_uuid))
if collection_uuid
else None
collection = get_object_or_404(
Collection, uid=get_uuid_or_404(collection_uuid) if collection_uuid else None
)
if collection and not collection.is_visible_to(request.user):
raise PermissionDenied()
if request.method == "GET":
return render(request, "collection_share.html", {"collection": collection})
elif request.method == "POST":
visibility = int(request.POST.get("visibility", default=0))
comment = request.POST.get("comment")
if share_collection(collection, comment, request.user, visibility):
return HttpResponseRedirect(request.META.get("HTTP_REFERER", "/"))
if settings.FORCE_CLASSIC_REPOST:
visibility = int(request.POST.get("visibility", default=0))
comment = request.POST.get("comment")
if share_collection(collection, comment, request.user, visibility):
return HttpResponseRedirect(request.META.get("HTTP_REFERER", "/"))
else:
return render_relogin(request)
else:
return render_relogin(request)
if collection.latest_post:
boost_toot_later(request.user, collection.latest_post)
return HttpResponseRedirect(request.META.get("HTTP_REFERER", "/"))
else:
raise BadRequest()

Expand Down
7 changes: 5 additions & 2 deletions journal/views/review.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from catalog.models import *
from common.utils import AuthedHttpRequest, PageLinksGenerator, get_uuid_or_404
from journal.models.renderers import convert_leading_space_in_md, render_md
from mastodon.api import boost_toot_later
from mastodon.api import boost_toot_later, share_review
from users.models import User
from users.models.apidentity import APIdentity

Expand Down Expand Up @@ -85,7 +85,10 @@ def review_edit(request: AuthedHttpRequest, item_uuid, review_uuid=None):
if not review:
raise BadRequest()
if form.cleaned_data["share_to_mastodon"] and post:
boost_toot_later(request.user, post.url)
if settings.FORCE_CLASSIC_REPOST:
share_review(review)
else:
boost_toot_later(request.user, post.url)
return redirect(reverse("journal:review_retrieve", args=[review.uuid]))
else:
raise BadRequest()
Expand Down
5 changes: 3 additions & 2 deletions takahe/ap_handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,8 +90,9 @@ def post_fetched(pk, obj):
if not post.type_data:
logger.warning(f"Post {post} has no type_data")
return
items = _parse_items(post.type_data["object"]["tag"])
pieces = _parse_piece_objects(post.type_data["object"]["relatedWith"])
ap_object = post.type_data.get("object", {})
items = _parse_items(ap_object.get("tag"))
pieces = _parse_piece_objects(ap_object.get("relatedWith"))
logger.info(f"Post {post} has items {items} and pieces {pieces}")
if len(items) == 0:
logger.warning(f"Post {post} has no remote items")
Expand Down
100 changes: 88 additions & 12 deletions takahe/management/commands/backfill_takahe.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.core.management.base import BaseCommand
from django.core.paginator import Paginator
from django.db.models import Count, F
from loguru import logger
from tqdm import tqdm
Expand All @@ -9,10 +10,15 @@
from catalog.common.models import *
from catalog.models import *
from journal.models import *
from takahe.models import Identity as TakaheIdentity
from takahe.models import Post as TakahePost
from takahe.models import TimelineEvent, set_disable_timeline
from takahe.utils import *
from users.models import APIdentity
from users.models import User as NeoUser

BATCH_SIZE = 1000


def content_type_id(cls):
return ContentType.objects.get(app_label="journal", model=cls.__name__.lower()).pk
Expand All @@ -28,6 +34,10 @@ def add_arguments(self, parser):
"--post",
action="store_true",
)
parser.add_argument(
"--timeline",
action="store_true",
)
parser.add_argument(
"--like",
action="store_true",
Expand All @@ -40,7 +50,8 @@ def add_arguments(self, parser):
parser.add_argument("--count", default=0, action="store")

def process_post(self):
logger.info(f"Processing posts...")
logger.info(f"Generating posts...")
set_disable_timeline(True)
qs = Piece.objects.filter(
polymorphic_ctype__in=[
content_type_id(ShelfMember),
Expand All @@ -50,17 +61,79 @@ def process_post(self):
).order_by("id")
if self.starting_id:
qs = qs.filter(id__gte=self.starting_id)
tracker = tqdm(qs.iterator(), total=self.count_est or qs.count())
for p in tracker:
tracker.set_postfix_str(f"{p.id}")
if p.__class__ == ShelfMember:
mark = Mark(p.owner, p.item)
Takahe.post_mark(mark, self.post_new)
elif p.__class__ == Comment:
if p.item.__class__ in [PodcastEpisode, TVEpisode]:
Takahe.post_comment(p, self.post_new)
elif p.__class__ == Review:
Takahe.post_review(p, self.post_new)
pg = Paginator(qs, BATCH_SIZE)
tracker = tqdm(pg.page_range)
for page in tracker:
with transaction.atomic(using="default"):
with transaction.atomic(using="takahe"):
for p in pg.page(page):
tracker.set_postfix_str(f"{p.id}")
if p.__class__ == ShelfMember:
mark = Mark(p.owner, p.item)
Takahe.post_mark(mark, self.post_new)
elif p.__class__ == Comment:
if p.item.__class__ in [PodcastEpisode, TVEpisode]:
Takahe.post_comment(p, self.post_new)
elif p.__class__ == Review:
Takahe.post_review(p, self.post_new)
elif p.__class__ == Collection:
Takahe.post_collection(p)
set_disable_timeline(False)

def process_timeline(self):
def add_event(post_id, author_id, owner_id, published):
TimelineEvent.objects.get_or_create(
identity_id=owner_id,
type="post",
subject_post_id=post_id,
subject_identity_id=author_id,
defaults={
"published": published,
},
)

logger.info(f"Generating cache for timeline...")
followers = {
apid.pk: apid.followers if apid.is_active else []
for apid in APIdentity.objects.filter(local=True)
}
cnt = TakahePost.objects.count()
qs = TakahePost.objects.filter(local=True).order_by("published")
pg = Paginator(qs, BATCH_SIZE)
logger.info(f"Generating timeline...")
for p in tqdm(pg.page_range):
with transaction.atomic(using="takahe"):
posts = pg.page(p)
events = []
for post in posts:
events.append(
TimelineEvent(
identity_id=post.author_id,
type="post",
subject_post_id=post.pk,
subject_identity_id=post.author_id,
published=post.published,
)
)
if post.visibility != 3:
for follower_id in followers[post.author_id]:
events.append(
TimelineEvent(
identity_id=follower_id,
type="post",
subject_post_id=post.pk,
subject_identity_id=post.author_id,
published=post.published,
)
)
TimelineEvent.objects.bulk_create(events, ignore_conflicts=True)
# for post in posts:
# add_event(post.pk, post.author_id, post.author_id, post.published)
# if post.visibility != 3:
# for follower_id in followers[post.author_id]:
# add_event(
# post.pk, post.author_id, follower_id, post.published
# )

def process_like(self):
logger.info(f"Processing likes...")
Expand All @@ -82,6 +155,9 @@ def handle(self, *args, **options):
if options["post"]:
self.process_post()

if options["timeline"]:
self.process_timeline()

if options["like"]:
self.process_like()

Expand Down
Loading