##// END OF EJS Templates
Fixed feed
Fixed feed

File last commit:

r1591:af1d8bd1 default
r1593:cfa36ec3 default
Show More
thread.py
325 lines | 10.1 KiB | text/x-python | PythonLexer
import logging
from adjacent import Client
from boards.models.attachment import FILE_TYPES_IMAGE
from django.db.models import Count, Sum, QuerySet, Q
from django.utils import timezone
from django.db import models, transaction
from boards.models import STATUS_BUMPLIMIT, STATUS_ACTIVE, STATUS_ARCHIVE
from boards import settings
import boards
from boards.utils import cached_result, datetime_to_epoch
from boards.models.post import Post
from boards.models.tag import Tag
FAV_THREAD_NO_UPDATES = -1
__author__ = 'neko259'
logger = logging.getLogger(__name__)
WS_NOTIFICATION_TYPE_NEW_POST = 'new_post'
WS_NOTIFICATION_TYPE = 'notification_type'
WS_CHANNEL_THREAD = "thread:"
STATUS_CHOICES = (
(STATUS_ACTIVE, STATUS_ACTIVE),
(STATUS_BUMPLIMIT, STATUS_BUMPLIMIT),
(STATUS_ARCHIVE, STATUS_ARCHIVE),
)
class ThreadManager(models.Manager):
def process_oldest_threads(self):
"""
Preserves maximum thread count. If there are too many threads,
archive or delete the old ones.
"""
threads = Thread.objects.exclude(status=STATUS_ARCHIVE).order_by('-bump_time')
thread_count = threads.count()
max_thread_count = settings.get_int('Messages', 'MaxThreadCount')
if thread_count > max_thread_count:
num_threads_to_delete = thread_count - max_thread_count
old_threads = threads[thread_count - num_threads_to_delete:]
for thread in old_threads:
if settings.get_bool('Storage', 'ArchiveThreads'):
self._archive_thread(thread)
else:
thread.delete()
logger.info('Processed %d old threads' % num_threads_to_delete)
def _archive_thread(self, thread):
thread.status = STATUS_ARCHIVE
thread.last_edit_time = timezone.now()
thread.update_posts_time()
thread.save(update_fields=['last_edit_time', 'status'])
def get_new_posts(self, datas):
query = None
# TODO Use classes instead of dicts
for data in datas:
if data['last_id'] != FAV_THREAD_NO_UPDATES:
q = (Q(id=data['op'].get_thread_id())
& Q(multi_replies__id__gt=data['last_id']))
if query is None:
query = q
else:
query = query | q
if query is not None:
return self.filter(query).annotate(
new_post_count=Count('multi_replies'))
def get_new_post_count(self, datas):
new_posts = self.get_new_posts(datas)
return new_posts.aggregate(total_count=Count('multi_replies'))\
['total_count'] if new_posts else 0
def get_thread_max_posts():
return settings.get_int('Messages', 'MaxPostsPerThread')
class Thread(models.Model):
objects = ThreadManager()
class Meta:
app_label = 'boards'
tags = models.ManyToManyField('Tag', related_name='thread_tags')
bump_time = models.DateTimeField(db_index=True)
last_edit_time = models.DateTimeField()
max_posts = models.IntegerField(default=get_thread_max_posts)
status = models.CharField(max_length=50, default=STATUS_ACTIVE,
choices=STATUS_CHOICES)
monochrome = models.BooleanField(default=False)
def get_tags(self) -> QuerySet:
"""
Gets a sorted tag list.
"""
return self.tags.order_by('name')
def bump(self):
"""
Bumps (moves to up) thread if possible.
"""
if self.can_bump():
self.bump_time = self.last_edit_time
self.update_bump_status()
logger.info('Bumped thread %d' % self.id)
def has_post_limit(self) -> bool:
return self.max_posts > 0
def update_bump_status(self, exclude_posts=None):
if self.has_post_limit() and self.get_reply_count() >= self.max_posts:
self.status = STATUS_BUMPLIMIT
self.update_posts_time(exclude_posts=exclude_posts)
def _get_cache_key(self):
return [datetime_to_epoch(self.last_edit_time)]
@cached_result(key_method=_get_cache_key)
def get_reply_count(self) -> int:
return self.get_replies().count()
@cached_result(key_method=_get_cache_key)
def get_images_count(self) -> int:
return self.get_replies().filter(
attachments__mimetype__in=FILE_TYPES_IMAGE)\
.annotate(images_count=Count(
'attachments')).aggregate(Sum('images_count'))['images_count__sum'] or 0
def can_bump(self) -> bool:
"""
Checks if the thread can be bumped by replying to it.
"""
return self.get_status() == STATUS_ACTIVE
def get_last_replies(self) -> QuerySet:
"""
Gets several last replies, not including opening post
"""
last_replies_count = settings.get_int('View', 'LastRepliesCount')
if last_replies_count > 0:
reply_count = self.get_reply_count()
if reply_count > 0:
reply_count_to_show = min(last_replies_count,
reply_count - 1)
replies = self.get_replies()
last_replies = replies[reply_count - reply_count_to_show:]
return last_replies
def get_skipped_replies_count(self) -> int:
"""
Gets number of posts between opening post and last replies.
"""
reply_count = self.get_reply_count()
last_replies_count = min(settings.get_int('View', 'LastRepliesCount'),
reply_count - 1)
return reply_count - last_replies_count - 1
def get_replies(self, view_fields_only=False) -> QuerySet:
"""
Gets sorted thread posts
"""
query = self.multi_replies.order_by('pub_time').prefetch_related(
'thread', 'attachments')
if view_fields_only:
query = query.defer('poster_ip')
return query
def get_top_level_replies(self) -> QuerySet:
return self.get_replies().exclude(refposts__threads__in=[self])
def get_replies_with_images(self, view_fields_only=False) -> QuerySet:
"""
Gets replies that have at least one image attached
"""
return self.get_replies(view_fields_only).filter(
attachments__mimetype__in=FILE_TYPES_IMAGE).annotate(images_count=Count(
'attachments')).filter(images_count__gt=0)
def get_opening_post(self, only_id=False) -> Post:
"""
Gets the first post of the thread
"""
query = self.get_replies().filter(opening=True)
if only_id:
query = query.only('id')
opening_post = query.first()
return opening_post
@cached_result()
def get_opening_post_id(self) -> int:
"""
Gets ID of the first thread post.
"""
return self.get_opening_post(only_id=True).id
def get_pub_time(self):
"""
Gets opening post's pub time because thread does not have its own one.
"""
return self.get_opening_post().pub_time
def __str__(self):
return 'T#{}/{}'.format(self.id, self.get_opening_post_id())
def get_tag_url_list(self) -> list:
return boards.models.Tag.objects.get_tag_url_list(self.get_tags())
def update_posts_time(self, exclude_posts=None):
last_edit_time = self.last_edit_time
for post in self.multi_replies.all():
if exclude_posts is None or post not in exclude_posts:
# Manual update is required because uids are generated on save
post.last_edit_time = last_edit_time
post.save(update_fields=['last_edit_time'])
post.get_threads().update(last_edit_time=last_edit_time)
def notify_clients(self):
if not settings.get_bool('External', 'WebsocketsEnabled'):
return
client = Client()
channel_name = WS_CHANNEL_THREAD + str(self.get_opening_post_id())
client.publish(channel_name, {
WS_NOTIFICATION_TYPE: WS_NOTIFICATION_TYPE_NEW_POST,
})
client.send()
def get_absolute_url(self):
return self.get_opening_post().get_absolute_url()
def get_required_tags(self):
return self.get_tags().filter(required=True)
def get_replies_newer(self, post_id):
return self.get_replies().filter(id__gt=post_id)
def is_archived(self):
return self.get_status() == STATUS_ARCHIVE
def get_status(self):
return self.status
def is_monochrome(self):
return self.monochrome
# If tags have parent, add them to the tag list
@transaction.atomic
def refresh_tags(self):
for tag in self.get_tags().all():
parents = tag.get_all_parents()
if len(parents) > 0:
self.tags.add(*parents)
def get_reply_tree(self):
replies = self.get_replies().prefetch_related('refposts')
tree = []
for reply in replies:
parents = reply.refposts.all()
found_parent = False
searching_for_index = False
if len(parents) > 0:
index = 0
parent_depth = 0
indexes_to_insert = []
for depth, element in tree:
index += 1
# If this element is next after parent on the same level,
# insert child before it
if searching_for_index and depth <= parent_depth:
indexes_to_insert.append((index - 1, parent_depth))
searching_for_index = False
if element in parents:
found_parent = True
searching_for_index = True
parent_depth = depth
if not found_parent:
tree.append((0, reply))
else:
if searching_for_index:
tree.append((parent_depth + 1, reply))
offset = 0
for last_index, parent_depth in indexes_to_insert:
tree.insert(last_index + offset, (parent_depth + 1, reply))
offset += 1
return tree