Show More
@@ -1,306 +1,322 b'' | |||
|
1 | 1 | import logging |
|
2 | 2 | from adjacent import Client |
|
3 | 3 | |
|
4 | 4 | from django.db.models import Count, Sum, QuerySet, Q |
|
5 | 5 | from django.utils import timezone |
|
6 | 6 | from django.db import models, transaction |
|
7 | 7 | |
|
8 | 8 | from boards.models import STATUS_BUMPLIMIT, STATUS_ACTIVE, STATUS_ARCHIVE |
|
9 | 9 | |
|
10 | 10 | from boards import settings |
|
11 | 11 | import boards |
|
12 | 12 | from boards.utils import cached_result, datetime_to_epoch |
|
13 | 13 | from boards.models.post import Post |
|
14 | 14 | from boards.models.tag import Tag |
|
15 | 15 | |
|
16 | 16 | FAV_THREAD_NO_UPDATES = -1 |
|
17 | 17 | |
|
18 | 18 | |
|
19 | 19 | __author__ = 'neko259' |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | logger = logging.getLogger(__name__) |
|
23 | 23 | |
|
24 | 24 | |
|
25 | 25 | WS_NOTIFICATION_TYPE_NEW_POST = 'new_post' |
|
26 | 26 | WS_NOTIFICATION_TYPE = 'notification_type' |
|
27 | 27 | |
|
28 | 28 | WS_CHANNEL_THREAD = "thread:" |
|
29 | 29 | |
|
30 | 30 | STATUS_CHOICES = ( |
|
31 | 31 | (STATUS_ACTIVE, STATUS_ACTIVE), |
|
32 | 32 | (STATUS_BUMPLIMIT, STATUS_BUMPLIMIT), |
|
33 | 33 | (STATUS_ARCHIVE, STATUS_ARCHIVE), |
|
34 | 34 | ) |
|
35 | 35 | |
|
36 | 36 | |
|
37 | 37 | class ThreadManager(models.Manager): |
|
38 | 38 | def process_oldest_threads(self): |
|
39 | 39 | """ |
|
40 | 40 | Preserves maximum thread count. If there are too many threads, |
|
41 | 41 | archive or delete the old ones. |
|
42 | 42 | """ |
|
43 | 43 | |
|
44 | 44 | threads = Thread.objects.exclude(status=STATUS_ARCHIVE).order_by('-bump_time') |
|
45 | 45 | thread_count = threads.count() |
|
46 | 46 | |
|
47 | 47 | max_thread_count = settings.get_int('Messages', 'MaxThreadCount') |
|
48 | 48 | if thread_count > max_thread_count: |
|
49 | 49 | num_threads_to_delete = thread_count - max_thread_count |
|
50 | 50 | old_threads = threads[thread_count - num_threads_to_delete:] |
|
51 | 51 | |
|
52 | 52 | for thread in old_threads: |
|
53 | 53 | if settings.get_bool('Storage', 'ArchiveThreads'): |
|
54 | 54 | self._archive_thread(thread) |
|
55 | 55 | else: |
|
56 | 56 | thread.delete() |
|
57 | 57 | |
|
58 | 58 | logger.info('Processed %d old threads' % num_threads_to_delete) |
|
59 | 59 | |
|
60 | 60 | def _archive_thread(self, thread): |
|
61 | 61 | thread.status = STATUS_ARCHIVE |
|
62 | 62 | thread.last_edit_time = timezone.now() |
|
63 | 63 | thread.update_posts_time() |
|
64 | 64 | thread.save(update_fields=['last_edit_time', 'status']) |
|
65 | 65 | |
|
66 | 66 | def get_new_posts(self, datas): |
|
67 | 67 | query = None |
|
68 | 68 | # TODO Use classes instead of dicts |
|
69 | 69 | for data in datas: |
|
70 | 70 | if data['last_id'] != FAV_THREAD_NO_UPDATES: |
|
71 | 71 | q = (Q(id=data['op'].get_thread_id()) |
|
72 | 72 | & Q(multi_replies__id__gt=data['last_id'])) |
|
73 | 73 | if query is None: |
|
74 | 74 | query = q |
|
75 | 75 | else: |
|
76 | 76 | query = query | q |
|
77 | 77 | if query is not None: |
|
78 | 78 | return self.filter(query).annotate( |
|
79 | 79 | new_post_count=Count('multi_replies')) |
|
80 | 80 | |
|
81 | 81 | def get_new_post_count(self, datas): |
|
82 | 82 | new_posts = self.get_new_posts(datas) |
|
83 | 83 | return new_posts.aggregate(total_count=Count('multi_replies'))\ |
|
84 | 84 | ['total_count'] if new_posts else 0 |
|
85 | 85 | |
|
86 | 86 | |
|
87 | 87 | def get_thread_max_posts(): |
|
88 | 88 | return settings.get_int('Messages', 'MaxPostsPerThread') |
|
89 | 89 | |
|
90 | 90 | |
|
91 | 91 | class Thread(models.Model): |
|
92 | 92 | objects = ThreadManager() |
|
93 | 93 | |
|
94 | 94 | class Meta: |
|
95 | 95 | app_label = 'boards' |
|
96 | 96 | |
|
97 | 97 | tags = models.ManyToManyField('Tag', related_name='thread_tags') |
|
98 | 98 | bump_time = models.DateTimeField(db_index=True) |
|
99 | 99 | last_edit_time = models.DateTimeField() |
|
100 | 100 | max_posts = models.IntegerField(default=get_thread_max_posts) |
|
101 | 101 | status = models.CharField(max_length=50, default=STATUS_ACTIVE, |
|
102 | 102 | choices=STATUS_CHOICES) |
|
103 | 103 | monochrome = models.BooleanField(default=False) |
|
104 | 104 | |
|
105 | 105 | def get_tags(self) -> QuerySet: |
|
106 | 106 | """ |
|
107 | 107 | Gets a sorted tag list. |
|
108 | 108 | """ |
|
109 | 109 | |
|
110 | 110 | return self.tags.order_by('name') |
|
111 | 111 | |
|
112 | 112 | def bump(self): |
|
113 | 113 | """ |
|
114 | 114 | Bumps (moves to up) thread if possible. |
|
115 | 115 | """ |
|
116 | 116 | |
|
117 | 117 | if self.can_bump(): |
|
118 | 118 | self.bump_time = self.last_edit_time |
|
119 | 119 | |
|
120 | 120 | self.update_bump_status() |
|
121 | 121 | |
|
122 | 122 | logger.info('Bumped thread %d' % self.id) |
|
123 | 123 | |
|
124 | 124 | def has_post_limit(self) -> bool: |
|
125 | 125 | return self.max_posts > 0 |
|
126 | 126 | |
|
127 | 127 | def update_bump_status(self, exclude_posts=None): |
|
128 | 128 | if self.has_post_limit() and self.get_reply_count() >= self.max_posts: |
|
129 | 129 | self.status = STATUS_BUMPLIMIT |
|
130 | 130 | self.update_posts_time(exclude_posts=exclude_posts) |
|
131 | 131 | |
|
132 | 132 | def _get_cache_key(self): |
|
133 | 133 | return [datetime_to_epoch(self.last_edit_time)] |
|
134 | 134 | |
|
135 | 135 | @cached_result(key_method=_get_cache_key) |
|
136 | 136 | def get_reply_count(self) -> int: |
|
137 | 137 | return self.get_replies().count() |
|
138 | 138 | |
|
139 | 139 | @cached_result(key_method=_get_cache_key) |
|
140 | 140 | def get_images_count(self) -> int: |
|
141 | 141 | return self.get_replies().annotate(images_count=Count( |
|
142 | 142 | 'images')).aggregate(Sum('images_count'))['images_count__sum'] |
|
143 | 143 | |
|
144 | 144 | def can_bump(self) -> bool: |
|
145 | 145 | """ |
|
146 | 146 | Checks if the thread can be bumped by replying to it. |
|
147 | 147 | """ |
|
148 | 148 | |
|
149 | 149 | return self.get_status() == STATUS_ACTIVE |
|
150 | 150 | |
|
151 | 151 | def get_last_replies(self) -> QuerySet: |
|
152 | 152 | """ |
|
153 | 153 | Gets several last replies, not including opening post |
|
154 | 154 | """ |
|
155 | 155 | |
|
156 | 156 | last_replies_count = settings.get_int('View', 'LastRepliesCount') |
|
157 | 157 | |
|
158 | 158 | if last_replies_count > 0: |
|
159 | 159 | reply_count = self.get_reply_count() |
|
160 | 160 | |
|
161 | 161 | if reply_count > 0: |
|
162 | 162 | reply_count_to_show = min(last_replies_count, |
|
163 | 163 | reply_count - 1) |
|
164 | 164 | replies = self.get_replies() |
|
165 | 165 | last_replies = replies[reply_count - reply_count_to_show:] |
|
166 | 166 | |
|
167 | 167 | return last_replies |
|
168 | 168 | |
|
169 | 169 | def get_skipped_replies_count(self) -> int: |
|
170 | 170 | """ |
|
171 | 171 | Gets number of posts between opening post and last replies. |
|
172 | 172 | """ |
|
173 | 173 | reply_count = self.get_reply_count() |
|
174 | 174 | last_replies_count = min(settings.get_int('View', 'LastRepliesCount'), |
|
175 | 175 | reply_count - 1) |
|
176 | 176 | return reply_count - last_replies_count - 1 |
|
177 | 177 | |
|
178 | 178 | def get_replies(self, view_fields_only=False) -> QuerySet: |
|
179 | 179 | """ |
|
180 | 180 | Gets sorted thread posts |
|
181 | 181 | """ |
|
182 | 182 | |
|
183 | 183 | query = self.multi_replies.order_by('pub_time').prefetch_related( |
|
184 | 184 | 'images', 'thread', 'threads', 'attachments') |
|
185 | 185 | if view_fields_only: |
|
186 | 186 | query = query.defer('poster_ip') |
|
187 | 187 | return query.all() |
|
188 | 188 | |
|
189 | 189 | def get_top_level_replies(self) -> QuerySet: |
|
190 | 190 | return self.get_replies().exclude(refposts__threads__in=[self]) |
|
191 | 191 | |
|
192 | 192 | def get_replies_with_images(self, view_fields_only=False) -> QuerySet: |
|
193 | 193 | """ |
|
194 | 194 | Gets replies that have at least one image attached |
|
195 | 195 | """ |
|
196 | 196 | |
|
197 | 197 | return self.get_replies(view_fields_only).annotate(images_count=Count( |
|
198 | 198 | 'images')).filter(images_count__gt=0) |
|
199 | 199 | |
|
200 | 200 | def get_opening_post(self, only_id=False) -> Post: |
|
201 | 201 | """ |
|
202 | 202 | Gets the first post of the thread |
|
203 | 203 | """ |
|
204 | 204 | |
|
205 | 205 | query = self.get_replies().filter(opening=True) |
|
206 | 206 | if only_id: |
|
207 | 207 | query = query.only('id') |
|
208 | 208 | opening_post = query.first() |
|
209 | 209 | |
|
210 | 210 | return opening_post |
|
211 | 211 | |
|
212 | 212 | @cached_result() |
|
213 | 213 | def get_opening_post_id(self) -> int: |
|
214 | 214 | """ |
|
215 | 215 | Gets ID of the first thread post. |
|
216 | 216 | """ |
|
217 | 217 | |
|
218 | 218 | return self.get_opening_post(only_id=True).id |
|
219 | 219 | |
|
220 | 220 | def get_pub_time(self): |
|
221 | 221 | """ |
|
222 | 222 | Gets opening post's pub time because thread does not have its own one. |
|
223 | 223 | """ |
|
224 | 224 | |
|
225 | 225 | return self.get_opening_post().pub_time |
|
226 | 226 | |
|
227 | 227 | def __str__(self): |
|
228 | 228 | return 'T#{}/{}'.format(self.id, self.get_opening_post_id()) |
|
229 | 229 | |
|
230 | 230 | def get_tag_url_list(self) -> list: |
|
231 | 231 | return boards.models.Tag.objects.get_tag_url_list(self.get_tags()) |
|
232 | 232 | |
|
233 | 233 | def update_posts_time(self, exclude_posts=None): |
|
234 | 234 | last_edit_time = self.last_edit_time |
|
235 | 235 | |
|
236 | 236 | for post in self.multi_replies.all(): |
|
237 | 237 | if exclude_posts is None or post not in exclude_posts: |
|
238 | 238 | # Manual update is required because uids are generated on save |
|
239 | 239 | post.last_edit_time = last_edit_time |
|
240 | 240 | post.save(update_fields=['last_edit_time']) |
|
241 | 241 | |
|
242 | 242 | post.get_threads().update(last_edit_time=last_edit_time) |
|
243 | 243 | |
|
244 | 244 | def notify_clients(self): |
|
245 | 245 | if not settings.get_bool('External', 'WebsocketsEnabled'): |
|
246 | 246 | return |
|
247 | 247 | |
|
248 | 248 | client = Client() |
|
249 | 249 | |
|
250 | 250 | channel_name = WS_CHANNEL_THREAD + str(self.get_opening_post_id()) |
|
251 | 251 | client.publish(channel_name, { |
|
252 | 252 | WS_NOTIFICATION_TYPE: WS_NOTIFICATION_TYPE_NEW_POST, |
|
253 | 253 | }) |
|
254 | 254 | client.send() |
|
255 | 255 | |
|
256 | 256 | def get_absolute_url(self): |
|
257 | 257 | return self.get_opening_post().get_absolute_url() |
|
258 | 258 | |
|
259 | 259 | def get_required_tags(self): |
|
260 | 260 | return self.get_tags().filter(required=True) |
|
261 | 261 | |
|
262 | 262 | def get_replies_newer(self, post_id): |
|
263 | 263 | return self.get_replies().filter(id__gt=post_id) |
|
264 | 264 | |
|
265 | 265 | def is_archived(self): |
|
266 | 266 | return self.get_status() == STATUS_ARCHIVE |
|
267 | 267 | |
|
268 | 268 | def get_status(self): |
|
269 | 269 | return self.status |
|
270 | 270 | |
|
271 | 271 | def is_monochrome(self): |
|
272 | 272 | return self.monochrome |
|
273 | 273 | |
|
274 | 274 | # If tags have parent, add them to the tag list |
|
275 | 275 | @transaction.atomic |
|
276 | 276 | def refresh_tags(self): |
|
277 | 277 | for tag in self.get_tags().all(): |
|
278 | 278 | parents = tag.get_all_parents() |
|
279 | 279 | if len(parents) > 0: |
|
280 | 280 | self.tags.add(*parents) |
|
281 | 281 | |
|
282 | 282 | def get_reply_tree(self): |
|
283 | 283 | replies = self.get_replies().prefetch_related('refposts') |
|
284 | 284 | tree = [] |
|
285 | parent_positions = {} | |
|
286 | 285 | for reply in replies: |
|
287 | 286 | parents = reply.refposts.all() |
|
287 | ||
|
288 | 288 | found_parent = False |
|
289 | searching_for_index = False | |
|
290 | ||
|
289 | 291 | if len(parents) > 0: |
|
290 | 292 | index = 0 |
|
293 | parent_depth = 0 | |
|
294 | ||
|
295 | indexes_to_insert = [] | |
|
296 | ||
|
291 | 297 | for depth, element in tree: |
|
292 | 298 | index += 1 |
|
299 | ||
|
300 | # If this element is next after parent on the same level, | |
|
301 | # insert child before it | |
|
302 | if searching_for_index and depth <= parent_depth: | |
|
303 | indexes_to_insert.append((index - 1, parent_depth)) | |
|
304 | searching_for_index = False | |
|
305 | ||
|
293 | 306 | if element in parents: |
|
294 | 307 | found_parent = True |
|
295 | ||
|
296 | offset_under_parent = parent_positions.get(element, 1) | |
|
297 | tree.insert(index + offset_under_parent - 1, (depth + 1, reply)) | |
|
298 | ||
|
299 | # Move whole parent tree by 1 added element | |
|
300 | for parent in parents: | |
|
301 | parent_positions[parent] = parent_positions.get(parent, 1) + 1 | |
|
308 | searching_for_index = True | |
|
309 | parent_depth = depth | |
|
302 | 310 | |
|
303 | 311 | if not found_parent: |
|
304 | 312 | tree.append((0, reply)) |
|
313 | else: | |
|
314 | if searching_for_index: | |
|
315 | tree.append((parent_depth + 1, reply)) | |
|
316 | ||
|
317 | offset = 0 | |
|
318 | for last_index, parent_depth in indexes_to_insert: | |
|
319 | tree.insert(last_index + offset, (parent_depth + 1, reply)) | |
|
320 | offset += 1 | |
|
305 | 321 | |
|
306 | 322 | return tree |
General Comments 0
You need to be logged in to leave comments.
Login now