thread.py
322 lines
| 9.9 KiB
| text/x-python
|
PythonLexer
neko259
|
r691 | import logging | ||
neko259
|
r1088 | from adjacent import Client | ||
neko259
|
r957 | |||
neko259
|
r1345 | from django.db.models import Count, Sum, QuerySet, Q | ||
neko259
|
r691 | from django.utils import timezone | ||
neko259
|
r1471 | from django.db import models, transaction | ||
neko259
|
r957 | |||
neko259
|
r1416 | from boards.models import STATUS_BUMPLIMIT, STATUS_ACTIVE, STATUS_ARCHIVE | ||
neko259
|
r1414 | |||
neko259
|
r716 | from boards import settings | ||
neko259
|
r1027 | import boards | ||
neko259
|
r1106 | from boards.utils import cached_result, datetime_to_epoch | ||
neko259
|
r958 | from boards.models.post import Post | ||
neko259
|
r1083 | from boards.models.tag import Tag | ||
neko259
|
r957 | |||
neko259
|
r1346 | FAV_THREAD_NO_UPDATES = -1 | ||
neko259
|
r691 | |||
__author__ = 'neko259' | ||||
logger = logging.getLogger(__name__) | ||||
neko259
|
r1088 | WS_NOTIFICATION_TYPE_NEW_POST = 'new_post' | ||
WS_NOTIFICATION_TYPE = 'notification_type' | ||||
WS_CHANNEL_THREAD = "thread:" | ||||
neko259
|
r1428 | STATUS_CHOICES = ( | ||
(STATUS_ACTIVE, STATUS_ACTIVE), | ||||
(STATUS_BUMPLIMIT, STATUS_BUMPLIMIT), | ||||
(STATUS_ARCHIVE, STATUS_ARCHIVE), | ||||
) | ||||
neko259
|
r1088 | |||
neko259
|
r715 | class ThreadManager(models.Manager): | ||
neko259
|
r716 | def process_oldest_threads(self): | ||
neko259
|
r715 | """ | ||
Preserves maximum thread count. If there are too many threads, | ||||
neko259
|
r716 | archive or delete the old ones. | ||
neko259
|
r715 | """ | ||
neko259
|
r1414 | threads = Thread.objects.exclude(status=STATUS_ARCHIVE).order_by('-bump_time') | ||
neko259
|
r715 | thread_count = threads.count() | ||
neko259
|
r1153 | max_thread_count = settings.get_int('Messages', 'MaxThreadCount') | ||
if thread_count > max_thread_count: | ||||
num_threads_to_delete = thread_count - max_thread_count | ||||
neko259
|
r715 | old_threads = threads[thread_count - num_threads_to_delete:] | ||
for thread in old_threads: | ||||
neko259
|
r1153 | if settings.get_bool('Storage', 'ArchiveThreads'): | ||
neko259
|
r716 | self._archive_thread(thread) | ||
else: | ||||
neko259
|
r718 | thread.delete() | ||
neko259
|
r716 | |||
logger.info('Processed %d old threads' % num_threads_to_delete) | ||||
neko259
|
r715 | |||
neko259
|
r716 | def _archive_thread(self, thread): | ||
neko259
|
r1414 | thread.status = STATUS_ARCHIVE | ||
neko259
|
r716 | thread.last_edit_time = timezone.now() | ||
neko259
|
r1029 | thread.update_posts_time() | ||
neko259
|
r1414 | thread.save(update_fields=['last_edit_time', 'status']) | ||
neko259
|
r716 | |||
neko259
|
r1345 | def get_new_posts(self, datas): | ||
query = None | ||||
# TODO Use classes instead of dicts | ||||
for data in datas: | ||||
neko259
|
r1346 | if data['last_id'] != FAV_THREAD_NO_UPDATES: | ||
neko259
|
r1455 | q = (Q(id=data['op'].get_thread_id()) | ||
neko259
|
r1346 | & Q(multi_replies__id__gt=data['last_id'])) | ||
if query is None: | ||||
query = q | ||||
else: | ||||
query = query | q | ||||
if query is not None: | ||||
return self.filter(query).annotate( | ||||
new_post_count=Count('multi_replies')) | ||||
neko259
|
r1345 | |||
def get_new_post_count(self, datas): | ||||
neko259
|
r1347 | new_posts = self.get_new_posts(datas) | ||
return new_posts.aggregate(total_count=Count('multi_replies'))\ | ||||
['total_count'] if new_posts else 0 | ||||
neko259
|
r1345 | |||
neko259
|
r715 | |||
neko259
|
r1136 | def get_thread_max_posts(): | ||
neko259
|
r1153 | return settings.get_int('Messages', 'MaxPostsPerThread') | ||
neko259
|
r1136 | |||
neko259
|
r691 | class Thread(models.Model): | ||
neko259
|
r715 | objects = ThreadManager() | ||
neko259
|
r691 | |||
class Meta: | ||||
app_label = 'boards' | ||||
neko259
|
r1269 | tags = models.ManyToManyField('Tag', related_name='thread_tags') | ||
neko259
|
r966 | bump_time = models.DateTimeField(db_index=True) | ||
neko259
|
r691 | last_edit_time = models.DateTimeField() | ||
neko259
|
r1136 | max_posts = models.IntegerField(default=get_thread_max_posts) | ||
neko259
|
r1428 | status = models.CharField(max_length=50, default=STATUS_ACTIVE, | ||
choices=STATUS_CHOICES) | ||||
neko259
|
r1434 | monochrome = models.BooleanField(default=False) | ||
neko259
|
r691 | |||
neko259
|
r1186 | def get_tags(self) -> QuerySet: | ||
neko259
|
r691 | """ | ||
Gets a sorted tag list. | ||||
""" | ||||
return self.tags.order_by('name') | ||||
def bump(self): | ||||
""" | ||||
Bumps (moves to up) thread if possible. | ||||
""" | ||||
if self.can_bump(): | ||||
neko259
|
r1029 | self.bump_time = self.last_edit_time | ||
neko259
|
r691 | |||
neko259
|
r1046 | self.update_bump_status() | ||
neko259
|
r885 | |||
neko259
|
r691 | logger.info('Bumped thread %d' % self.id) | ||
neko259
|
r1083 | def has_post_limit(self) -> bool: | ||
neko259
|
r1055 | return self.max_posts > 0 | ||
neko259
|
r1134 | def update_bump_status(self, exclude_posts=None): | ||
neko259
|
r1055 | if self.has_post_limit() and self.get_reply_count() >= self.max_posts: | ||
neko259
|
r1414 | self.status = STATUS_BUMPLIMIT | ||
neko259
|
r1134 | self.update_posts_time(exclude_posts=exclude_posts) | ||
neko259
|
r1046 | |||
neko259
|
r1106 | def _get_cache_key(self): | ||
return [datetime_to_epoch(self.last_edit_time)] | ||||
@cached_result(key_method=_get_cache_key) | ||||
neko259
|
r1083 | def get_reply_count(self) -> int: | ||
neko259
|
r958 | return self.get_replies().count() | ||
neko259
|
r691 | |||
neko259
|
r1106 | @cached_result(key_method=_get_cache_key) | ||
neko259
|
r1083 | def get_images_count(self) -> int: | ||
neko259
|
r958 | return self.get_replies().annotate(images_count=Count( | ||
neko259
|
r1091 | 'images')).aggregate(Sum('images_count'))['images_count__sum'] | ||
neko259
|
r691 | |||
neko259
|
r1083 | def can_bump(self) -> bool: | ||
neko259
|
r691 | """ | ||
Checks if the thread can be bumped by replying to it. | ||||
""" | ||||
neko259
|
r1414 | return self.get_status() == STATUS_ACTIVE | ||
neko259
|
r691 | |||
neko259
|
r1186 | def get_last_replies(self) -> QuerySet: | ||
neko259
|
r691 | """ | ||
Gets several last replies, not including opening post | ||||
""" | ||||
neko259
|
r1153 | last_replies_count = settings.get_int('View', 'LastRepliesCount') | ||
if last_replies_count > 0: | ||||
neko259
|
r691 | reply_count = self.get_reply_count() | ||
if reply_count > 0: | ||||
neko259
|
r1153 | reply_count_to_show = min(last_replies_count, | ||
neko259
|
r691 | reply_count - 1) | ||
neko259
|
r694 | replies = self.get_replies() | ||
neko259
|
r987 | last_replies = replies[reply_count - reply_count_to_show:] | ||
neko259
|
r691 | |||
return last_replies | ||||
neko259
|
r1083 | def get_skipped_replies_count(self) -> int: | ||
neko259
|
r691 | """ | ||
Gets number of posts between opening post and last replies. | ||||
""" | ||||
reply_count = self.get_reply_count() | ||||
neko259
|
r1153 | last_replies_count = min(settings.get_int('View', 'LastRepliesCount'), | ||
neko259
|
r691 | reply_count - 1) | ||
return reply_count - last_replies_count - 1 | ||||
neko259
|
r1186 | def get_replies(self, view_fields_only=False) -> QuerySet: | ||
neko259
|
r691 | """ | ||
Gets sorted thread posts | ||||
""" | ||||
neko259
|
r1345 | query = self.multi_replies.order_by('pub_time').prefetch_related( | ||
neko259
|
r1531 | 'images', 'thread', 'attachments') | ||
neko259
|
r691 | if view_fields_only: | ||
neko259
|
r1079 | query = query.defer('poster_ip') | ||
neko259
|
r1530 | return query | ||
neko259
|
r691 | |||
neko259
|
r1186 | def get_top_level_replies(self) -> QuerySet: | ||
neko259
|
r1180 | return self.get_replies().exclude(refposts__threads__in=[self]) | ||
neko259
|
r1186 | def get_replies_with_images(self, view_fields_only=False) -> QuerySet: | ||
neko259
|
r950 | """ | ||
Gets replies that have at least one image attached | ||||
""" | ||||
neko259
|
r693 | return self.get_replies(view_fields_only).annotate(images_count=Count( | ||
'images')).filter(images_count__gt=0) | ||||
neko259
|
r1083 | def get_opening_post(self, only_id=False) -> Post: | ||
neko259
|
r691 | """ | ||
Gets the first post of the thread | ||||
""" | ||||
neko259
|
r1381 | query = self.get_replies().filter(opening=True) | ||
neko259
|
r949 | if only_id: | ||
query = query.only('id') | ||||
opening_post = query.first() | ||||
return opening_post | ||||
neko259
|
r691 | |||
neko259
|
r1106 | @cached_result() | ||
neko259
|
r1083 | def get_opening_post_id(self) -> int: | ||
neko259
|
r691 | """ | ||
Gets ID of the first thread post. | ||||
""" | ||||
neko259
|
r957 | return self.get_opening_post(only_id=True).id | ||
neko259
|
r691 | |||
def get_pub_time(self): | ||||
""" | ||||
Gets opening post's pub time because thread does not have its own one. | ||||
""" | ||||
neko259
|
r949 | return self.get_opening_post().pub_time | ||
neko259
|
r718 | |||
neko259
|
r875 | def __str__(self): | ||
neko259
|
r950 | return 'T#{}/{}'.format(self.id, self.get_opening_post_id()) | ||
neko259
|
r1027 | |||
neko259
|
r1083 | def get_tag_url_list(self) -> list: | ||
neko259
|
r1029 | return boards.models.Tag.objects.get_tag_url_list(self.get_tags()) | ||
neko259
|
r1134 | def update_posts_time(self, exclude_posts=None): | ||
neko259
|
r1221 | last_edit_time = self.last_edit_time | ||
neko259
|
r1347 | for post in self.multi_replies.all(): | ||
neko259
|
r1219 | if exclude_posts is None or post not in exclude_posts: | ||
neko259
|
r1134 | # Manual update is required because uids are generated on save | ||
neko259
|
r1221 | post.last_edit_time = last_edit_time | ||
neko259
|
r1134 | post.save(update_fields=['last_edit_time']) | ||
neko259
|
r1120 | |||
neko259
|
r1221 | post.get_threads().update(last_edit_time=last_edit_time) | ||
neko259
|
r1088 | |||
neko259
|
r1091 | def notify_clients(self): | ||
neko259
|
r1153 | if not settings.get_bool('External', 'WebsocketsEnabled'): | ||
neko259
|
r1088 | return | ||
client = Client() | ||||
channel_name = WS_CHANNEL_THREAD + str(self.get_opening_post_id()) | ||||
client.publish(channel_name, { | ||||
WS_NOTIFICATION_TYPE: WS_NOTIFICATION_TYPE_NEW_POST, | ||||
}) | ||||
neko259
|
r1106 | client.send() | ||
neko259
|
r1149 | |||
def get_absolute_url(self): | ||||
return self.get_opening_post().get_absolute_url() | ||||
neko259
|
r1257 | |||
def get_required_tags(self): | ||||
return self.get_tags().filter(required=True) | ||||
neko259
|
r1323 | |||
def get_replies_newer(self, post_id): | ||||
return self.get_replies().filter(id__gt=post_id) | ||||
neko259
|
r1344 | def is_archived(self): | ||
neko259
|
r1414 | return self.get_status() == STATUS_ARCHIVE | ||
def get_status(self): | ||||
return self.status | ||||
neko259
|
r1434 | |||
def is_monochrome(self): | ||||
return self.monochrome | ||||
neko259
|
r1471 | |||
# If tags have parent, add them to the tag list | ||||
@transaction.atomic | ||||
def refresh_tags(self): | ||||
for tag in self.get_tags().all(): | ||||
parents = tag.get_all_parents() | ||||
if len(parents) > 0: | ||||
self.tags.add(*parents) | ||||
neko259
|
r1473 | def get_reply_tree(self): | ||
replies = self.get_replies().prefetch_related('refposts') | ||||
tree = [] | ||||
for reply in replies: | ||||
parents = reply.refposts.all() | ||||
neko259
|
r1489 | |||
neko259
|
r1473 | found_parent = False | ||
neko259
|
r1489 | searching_for_index = False | ||
neko259
|
r1473 | if len(parents) > 0: | ||
index = 0 | ||||
neko259
|
r1489 | parent_depth = 0 | ||
indexes_to_insert = [] | ||||
neko259
|
r1473 | for depth, element in tree: | ||
index += 1 | ||||
neko259
|
r1489 | |||
# If this element is next after parent on the same level, | ||||
# insert child before it | ||||
if searching_for_index and depth <= parent_depth: | ||||
indexes_to_insert.append((index - 1, parent_depth)) | ||||
searching_for_index = False | ||||
neko259
|
r1473 | if element in parents: | ||
neko259
|
r1488 | found_parent = True | ||
neko259
|
r1489 | searching_for_index = True | ||
parent_depth = depth | ||||
neko259
|
r1487 | |||
neko259
|
r1473 | if not found_parent: | ||
tree.append((0, reply)) | ||||
neko259
|
r1489 | else: | ||
if searching_for_index: | ||||
tree.append((parent_depth + 1, reply)) | ||||
offset = 0 | ||||
for last_index, parent_depth in indexes_to_insert: | ||||
tree.insert(last_index + offset, (parent_depth + 1, reply)) | ||||
offset += 1 | ||||
neko259
|
r1473 | |||
return tree | ||||