##// END OF EJS Templates
mail: only bother admins when that really is the intention - not when there just not happens to be other recipients
Mads Kiilerich -
r4332:609e06b6 default
parent child Browse files
Show More
@@ -1,501 +1,506 b''
1 1 # -*- coding: utf-8 -*-
2 2 # This program is free software: you can redistribute it and/or modify
3 3 # it under the terms of the GNU General Public License as published by
4 4 # the Free Software Foundation, either version 3 of the License, or
5 5 # (at your option) any later version.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 """
15 15 kallithea.lib.celerylib.tasks
16 16 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
17 17
18 18 Kallithea task modules, containing all task that suppose to be run
19 19 by celery daemon
20 20
21 21 This file was forked by the Kallithea project in July 2014.
22 22 Original author and date, and relevant copyright and licensing information is below:
23 23 :created_on: Oct 6, 2010
24 24 :author: marcink
25 25 :copyright: (c) 2013 RhodeCode GmbH, and others.
26 26 :license: GPLv3, see LICENSE.md for more details.
27 27 """
28 28
29 29 from celery.decorators import task
30 30
31 31 import os
32 32 import traceback
33 33 import logging
34 34 from os.path import join as jn
35 35
36 36 from time import mktime
37 37 from operator import itemgetter
38 38 from string import lower
39 39
40 40 from pylons import config, url
41 41 from pylons.i18n.translation import _
42 42
43 43 from kallithea.lib.vcs import get_backend
44 44
45 45 from kallithea import CELERY_ON, CELERY_EAGER
46 46 from kallithea.lib.utils2 import safe_str
47 47 from kallithea.lib.celerylib import run_task, locked_task, dbsession, \
48 48 str2bool, __get_lockkey, LockHeld, DaemonLock, get_session
49 49 from kallithea.lib.helpers import person
50 50 from kallithea.lib.rcmail.smtp_mailer import SmtpMailer
51 51 from kallithea.lib.utils import add_cache, action_logger
52 52 from kallithea.lib.compat import json, OrderedDict
53 53 from kallithea.lib.hooks import log_create_repository
54 54
55 55 from kallithea.model.db import Statistics, Repository, User
56 56 from kallithea.model.scm import ScmModel
57 57
58 58
59 59 add_cache(config) # pragma: no cover
60 60
61 61 __all__ = ['whoosh_index', 'get_commits_stats',
62 62 'reset_user_password', 'send_email']
63 63
64 64
65 65 def get_logger(cls):
66 66 if CELERY_ON:
67 67 try:
68 68 log = cls.get_logger()
69 69 except Exception:
70 70 log = logging.getLogger(__name__)
71 71 else:
72 72 log = logging.getLogger(__name__)
73 73
74 74 return log
75 75
76 76
77 77 @task(ignore_result=True)
78 78 @locked_task
79 79 @dbsession
80 80 def whoosh_index(repo_location, full_index):
81 81 from kallithea.lib.indexers.daemon import WhooshIndexingDaemon
82 82 log = get_logger(whoosh_index)
83 83 DBS = get_session()
84 84
85 85 index_location = config['index_dir']
86 86 WhooshIndexingDaemon(index_location=index_location,
87 87 repo_location=repo_location, sa=DBS)\
88 88 .run(full_index=full_index)
89 89
90 90
91 91 @task(ignore_result=True)
92 92 @dbsession
93 93 def get_commits_stats(repo_name, ts_min_y, ts_max_y, recurse_limit=100):
94 94 log = get_logger(get_commits_stats)
95 95 DBS = get_session()
96 96 lockkey = __get_lockkey('get_commits_stats', repo_name, ts_min_y,
97 97 ts_max_y)
98 98 lockkey_path = config['app_conf']['cache_dir']
99 99
100 100 log.info('running task with lockkey %s' % lockkey)
101 101
102 102 try:
103 103 lock = l = DaemonLock(file_=jn(lockkey_path, lockkey))
104 104
105 105 # for js data compatibility cleans the key for person from '
106 106 akc = lambda k: person(k).replace('"', "")
107 107
108 108 co_day_auth_aggr = {}
109 109 commits_by_day_aggregate = {}
110 110 repo = Repository.get_by_repo_name(repo_name)
111 111 if repo is None:
112 112 return True
113 113
114 114 repo = repo.scm_instance
115 115 repo_size = repo.count()
116 116 # return if repo have no revisions
117 117 if repo_size < 1:
118 118 lock.release()
119 119 return True
120 120
121 121 skip_date_limit = True
122 122 parse_limit = int(config['app_conf'].get('commit_parse_limit'))
123 123 last_rev = None
124 124 last_cs = None
125 125 timegetter = itemgetter('time')
126 126
127 127 dbrepo = DBS.query(Repository)\
128 128 .filter(Repository.repo_name == repo_name).scalar()
129 129 cur_stats = DBS.query(Statistics)\
130 130 .filter(Statistics.repository == dbrepo).scalar()
131 131
132 132 if cur_stats is not None:
133 133 last_rev = cur_stats.stat_on_revision
134 134
135 135 if last_rev == repo.get_changeset().revision and repo_size > 1:
136 136 # pass silently without any work if we're not on first revision or
137 137 # current state of parsing revision(from db marker) is the
138 138 # last revision
139 139 lock.release()
140 140 return True
141 141
142 142 if cur_stats:
143 143 commits_by_day_aggregate = OrderedDict(json.loads(
144 144 cur_stats.commit_activity_combined))
145 145 co_day_auth_aggr = json.loads(cur_stats.commit_activity)
146 146
147 147 log.debug('starting parsing %s' % parse_limit)
148 148 lmktime = mktime
149 149
150 150 last_rev = last_rev + 1 if last_rev >= 0 else 0
151 151 log.debug('Getting revisions from %s to %s' % (
152 152 last_rev, last_rev + parse_limit)
153 153 )
154 154 for cs in repo[last_rev:last_rev + parse_limit]:
155 155 log.debug('parsing %s' % cs)
156 156 last_cs = cs # remember last parsed changeset
157 157 k = lmktime([cs.date.timetuple()[0], cs.date.timetuple()[1],
158 158 cs.date.timetuple()[2], 0, 0, 0, 0, 0, 0])
159 159
160 160 if akc(cs.author) in co_day_auth_aggr:
161 161 try:
162 162 l = [timegetter(x) for x in
163 163 co_day_auth_aggr[akc(cs.author)]['data']]
164 164 time_pos = l.index(k)
165 165 except ValueError:
166 166 time_pos = None
167 167
168 168 if time_pos >= 0 and time_pos is not None:
169 169
170 170 datadict = \
171 171 co_day_auth_aggr[akc(cs.author)]['data'][time_pos]
172 172
173 173 datadict["commits"] += 1
174 174 datadict["added"] += len(cs.added)
175 175 datadict["changed"] += len(cs.changed)
176 176 datadict["removed"] += len(cs.removed)
177 177
178 178 else:
179 179 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
180 180
181 181 datadict = {"time": k,
182 182 "commits": 1,
183 183 "added": len(cs.added),
184 184 "changed": len(cs.changed),
185 185 "removed": len(cs.removed),
186 186 }
187 187 co_day_auth_aggr[akc(cs.author)]['data']\
188 188 .append(datadict)
189 189
190 190 else:
191 191 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
192 192 co_day_auth_aggr[akc(cs.author)] = {
193 193 "label": akc(cs.author),
194 194 "data": [{"time":k,
195 195 "commits":1,
196 196 "added":len(cs.added),
197 197 "changed":len(cs.changed),
198 198 "removed":len(cs.removed),
199 199 }],
200 200 "schema": ["commits"],
201 201 }
202 202
203 203 #gather all data by day
204 204 if k in commits_by_day_aggregate:
205 205 commits_by_day_aggregate[k] += 1
206 206 else:
207 207 commits_by_day_aggregate[k] = 1
208 208
209 209 overview_data = sorted(commits_by_day_aggregate.items(),
210 210 key=itemgetter(0))
211 211
212 212 if not co_day_auth_aggr:
213 213 co_day_auth_aggr[akc(repo.contact)] = {
214 214 "label": akc(repo.contact),
215 215 "data": [0, 1],
216 216 "schema": ["commits"],
217 217 }
218 218
219 219 stats = cur_stats if cur_stats else Statistics()
220 220 stats.commit_activity = json.dumps(co_day_auth_aggr)
221 221 stats.commit_activity_combined = json.dumps(overview_data)
222 222
223 223 log.debug('last revison %s' % last_rev)
224 224 leftovers = len(repo.revisions[last_rev:])
225 225 log.debug('revisions to parse %s' % leftovers)
226 226
227 227 if last_rev == 0 or leftovers < parse_limit:
228 228 log.debug('getting code trending stats')
229 229 stats.languages = json.dumps(__get_codes_stats(repo_name))
230 230
231 231 try:
232 232 stats.repository = dbrepo
233 233 stats.stat_on_revision = last_cs.revision if last_cs else 0
234 234 DBS.add(stats)
235 235 DBS.commit()
236 236 except:
237 237 log.error(traceback.format_exc())
238 238 DBS.rollback()
239 239 lock.release()
240 240 return False
241 241
242 242 # final release
243 243 lock.release()
244 244
245 245 # execute another task if celery is enabled
246 246 if len(repo.revisions) > 1 and CELERY_ON and recurse_limit > 0:
247 247 recurse_limit -= 1
248 248 run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y,
249 249 recurse_limit)
250 250 if recurse_limit <= 0:
251 251 log.debug('Breaking recursive mode due to reach of recurse limit')
252 252 return True
253 253 except LockHeld:
254 254 log.info('LockHeld')
255 255 return 'Task with key %s already running' % lockkey
256 256
257 257
258 258 @task(ignore_result=True)
259 259 @dbsession
260 260 def send_email(recipients, subject, body='', html_body=''):
261 261 """
262 262 Sends an email with defined parameters from the .ini files.
263 263
264 :param recipients: list of recipients, it this is empty the defined email
265 address from field 'email_to' is used instead
264 :param recipients: list of recipients, if this is None, the defined email
265 address from field 'email_to' and all admins is used instead
266 266 :param subject: subject of the mail
267 267 :param body: body of the mail
268 268 :param html_body: html version of body
269 269 """
270 270 log = get_logger(send_email)
271 271 DBS = get_session()
272 272 assert isinstance(recipients, list), recipients
273 273
274 274 email_config = config
275 subject = "%s %s" % (email_config.get('email_prefix', ''), subject)
276 if not recipients:
275 email_prefix = email_config.get('email_prefix', '')
276 if email_prefix:
277 subject = "%s %s" % (email_prefix, subject)
278 if recipients is None:
277 279 # if recipients are not defined we send to email_config + all admins
278 280 admins = [u.email for u in User.query()
279 281 .filter(User.admin == True).all()]
280 282 recipients = [email_config.get('email_to')] + admins
281 283 log.warning("recipients not specified for '%s' - sending to admins %s", subject, ' '.join(recipients))
284 elif not recipients:
285 log.error("No recipients specified")
286 return False
282 287
283 288 mail_from = email_config.get('app_email_from', 'Kallithea')
284 289 user = email_config.get('smtp_username')
285 290 passwd = email_config.get('smtp_password')
286 291 mail_server = email_config.get('smtp_server')
287 292 mail_port = email_config.get('smtp_port')
288 293 tls = str2bool(email_config.get('smtp_use_tls'))
289 294 ssl = str2bool(email_config.get('smtp_use_ssl'))
290 295 debug = str2bool(email_config.get('debug'))
291 296 smtp_auth = email_config.get('smtp_auth')
292 297
293 298 if not mail_server:
294 299 log.error("SMTP mail server not configured - cannot send mail '%s' to %s", subject, ' '.join(recipients))
295 300 log.warning("body:\n%s", body)
296 301 log.warning("html:\n%s", html_body)
297 302 return False
298 303
299 304 try:
300 305 m = SmtpMailer(mail_from, user, passwd, mail_server, smtp_auth,
301 306 mail_port, ssl, tls, debug=debug)
302 307 m.send(recipients, subject, body, html_body)
303 308 except:
304 309 log.error('Mail sending failed')
305 310 log.error(traceback.format_exc())
306 311 return False
307 312 return True
308 313
309 314 @task(ignore_result=False)
310 315 @dbsession
311 316 def create_repo(form_data, cur_user):
312 317 from kallithea.model.repo import RepoModel
313 318 from kallithea.model.user import UserModel
314 319 from kallithea.model.db import Setting
315 320
316 321 log = get_logger(create_repo)
317 322 DBS = get_session()
318 323
319 324 cur_user = UserModel(DBS)._get_user(cur_user)
320 325
321 326 owner = cur_user
322 327 repo_name = form_data['repo_name']
323 328 repo_name_full = form_data['repo_name_full']
324 329 repo_type = form_data['repo_type']
325 330 description = form_data['repo_description']
326 331 private = form_data['repo_private']
327 332 clone_uri = form_data.get('clone_uri')
328 333 repo_group = form_data['repo_group']
329 334 landing_rev = form_data['repo_landing_rev']
330 335 copy_fork_permissions = form_data.get('copy_permissions')
331 336 copy_group_permissions = form_data.get('repo_copy_permissions')
332 337 fork_of = form_data.get('fork_parent_id')
333 338 state = form_data.get('repo_state', Repository.STATE_PENDING)
334 339
335 340 # repo creation defaults, private and repo_type are filled in form
336 341 defs = Setting.get_default_repo_settings(strip_prefix=True)
337 342 enable_statistics = defs.get('repo_enable_statistics')
338 343 enable_locking = defs.get('repo_enable_locking')
339 344 enable_downloads = defs.get('repo_enable_downloads')
340 345
341 346 try:
342 347 repo = RepoModel(DBS)._create_repo(
343 348 repo_name=repo_name_full,
344 349 repo_type=repo_type,
345 350 description=description,
346 351 owner=owner,
347 352 private=private,
348 353 clone_uri=clone_uri,
349 354 repo_group=repo_group,
350 355 landing_rev=landing_rev,
351 356 fork_of=fork_of,
352 357 copy_fork_permissions=copy_fork_permissions,
353 358 copy_group_permissions=copy_group_permissions,
354 359 enable_statistics=enable_statistics,
355 360 enable_locking=enable_locking,
356 361 enable_downloads=enable_downloads,
357 362 state=state
358 363 )
359 364
360 365 action_logger(cur_user, 'user_created_repo',
361 366 form_data['repo_name_full'], '', DBS)
362 367
363 368 DBS.commit()
364 369 # now create this repo on Filesystem
365 370 RepoModel(DBS)._create_filesystem_repo(
366 371 repo_name=repo_name,
367 372 repo_type=repo_type,
368 373 repo_group=RepoModel(DBS)._get_repo_group(repo_group),
369 374 clone_uri=clone_uri,
370 375 )
371 376 repo = Repository.get_by_repo_name(repo_name_full)
372 377 log_create_repository(repo.get_dict(), created_by=owner.username)
373 378
374 379 # update repo changeset caches initially
375 380 repo.update_changeset_cache()
376 381
377 382 # set new created state
378 383 repo.set_state(Repository.STATE_CREATED)
379 384 DBS.commit()
380 385 except Exception, e:
381 386 log.warning('Exception %s occured when forking repository, '
382 387 'doing cleanup...' % e)
383 388 # rollback things manually !
384 389 repo = Repository.get_by_repo_name(repo_name_full)
385 390 if repo:
386 391 Repository.delete(repo.repo_id)
387 392 DBS.commit()
388 393 RepoModel(DBS)._delete_filesystem_repo(repo)
389 394 raise
390 395
391 396 # it's an odd fix to make celery fail task when exception occurs
392 397 def on_failure(self, *args, **kwargs):
393 398 pass
394 399
395 400 return True
396 401
397 402
398 403 @task(ignore_result=False)
399 404 @dbsession
400 405 def create_repo_fork(form_data, cur_user):
401 406 """
402 407 Creates a fork of repository using interval VCS methods
403 408
404 409 :param form_data:
405 410 :param cur_user:
406 411 """
407 412 from kallithea.model.repo import RepoModel
408 413 from kallithea.model.user import UserModel
409 414
410 415 log = get_logger(create_repo_fork)
411 416 DBS = get_session()
412 417
413 418 base_path = Repository.base_path()
414 419 cur_user = UserModel(DBS)._get_user(cur_user)
415 420
416 421 repo_name = form_data['repo_name'] # fork in this case
417 422 repo_name_full = form_data['repo_name_full']
418 423
419 424 repo_type = form_data['repo_type']
420 425 owner = cur_user
421 426 private = form_data['private']
422 427 clone_uri = form_data.get('clone_uri')
423 428 repo_group = form_data['repo_group']
424 429 landing_rev = form_data['landing_rev']
425 430 copy_fork_permissions = form_data.get('copy_permissions')
426 431
427 432 try:
428 433 fork_of = RepoModel(DBS)._get_repo(form_data.get('fork_parent_id'))
429 434
430 435 fork_repo = RepoModel(DBS)._create_repo(
431 436 repo_name=repo_name_full,
432 437 repo_type=repo_type,
433 438 description=form_data['description'],
434 439 owner=owner,
435 440 private=private,
436 441 clone_uri=clone_uri,
437 442 repo_group=repo_group,
438 443 landing_rev=landing_rev,
439 444 fork_of=fork_of,
440 445 copy_fork_permissions=copy_fork_permissions
441 446 )
442 447 action_logger(cur_user, 'user_forked_repo:%s' % repo_name_full,
443 448 fork_of.repo_name, '', DBS)
444 449 DBS.commit()
445 450
446 451 update_after_clone = form_data['update_after_clone']
447 452 source_repo_path = os.path.join(base_path, fork_of.repo_name)
448 453
449 454 # now create this repo on Filesystem
450 455 RepoModel(DBS)._create_filesystem_repo(
451 456 repo_name=repo_name,
452 457 repo_type=repo_type,
453 458 repo_group=RepoModel(DBS)._get_repo_group(repo_group),
454 459 clone_uri=source_repo_path,
455 460 )
456 461 repo = Repository.get_by_repo_name(repo_name_full)
457 462 log_create_repository(repo.get_dict(), created_by=owner.username)
458 463
459 464 # update repo changeset caches initially
460 465 repo.update_changeset_cache()
461 466
462 467 # set new created state
463 468 repo.set_state(Repository.STATE_CREATED)
464 469 DBS.commit()
465 470 except Exception, e:
466 471 log.warning('Exception %s occured when forking repository, '
467 472 'doing cleanup...' % e)
468 473 #rollback things manually !
469 474 repo = Repository.get_by_repo_name(repo_name_full)
470 475 if repo:
471 476 Repository.delete(repo.repo_id)
472 477 DBS.commit()
473 478 RepoModel(DBS)._delete_filesystem_repo(repo)
474 479 raise
475 480
476 481 # it's an odd fix to make celery fail task when exception occurs
477 482 def on_failure(self, *args, **kwargs):
478 483 pass
479 484
480 485 return True
481 486
482 487
483 488 def __get_codes_stats(repo_name):
484 489 from kallithea.config.conf import LANGUAGES_EXTENSIONS_MAP
485 490 repo = Repository.get_by_repo_name(repo_name).scm_instance
486 491
487 492 tip = repo.get_changeset()
488 493 code_stats = {}
489 494
490 495 def aggregate(cs):
491 496 for f in cs[2]:
492 497 ext = lower(f.extension)
493 498 if ext in LANGUAGES_EXTENSIONS_MAP.keys() and not f.is_binary:
494 499 if ext in code_stats:
495 500 code_stats[ext] += 1
496 501 else:
497 502 code_stats[ext] = 1
498 503
499 504 map(aggregate, tip.walk('/'))
500 505
501 506 return code_stats or {}
@@ -1,289 +1,291 b''
1 1 # -*- coding: utf-8 -*-
2 2 # This program is free software: you can redistribute it and/or modify
3 3 # it under the terms of the GNU General Public License as published by
4 4 # the Free Software Foundation, either version 3 of the License, or
5 5 # (at your option) any later version.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 """
15 15 kallithea.model.notification
16 16 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
17 17
18 18 Model for notifications
19 19
20 20
21 21 This file was forked by the Kallithea project in July 2014.
22 22 Original author and date, and relevant copyright and licensing information is below:
23 23 :created_on: Nov 20, 2011
24 24 :author: marcink
25 25 :copyright: (c) 2013 RhodeCode GmbH, and others.
26 26 :license: GPLv3, see LICENSE.md for more details.
27 27 """
28 28
29 29
30 30 import os
31 31 import logging
32 32 import traceback
33 33
34 34 from pylons import tmpl_context as c
35 35 from pylons.i18n.translation import _
36 36
37 37 import kallithea
38 38 from kallithea.lib import helpers as h
39 39 from kallithea.model import BaseModel
40 40 from kallithea.model.db import Notification, User, UserNotification
41 41 from kallithea.model.meta import Session
42 42
43 43 log = logging.getLogger(__name__)
44 44
45 45
46 46 class NotificationModel(BaseModel):
47 47
48 48 cls = Notification
49 49
50 50 def __get_notification(self, notification):
51 51 if isinstance(notification, Notification):
52 52 return notification
53 53 elif isinstance(notification, (int, long)):
54 54 return Notification.get(notification)
55 55 else:
56 56 if notification:
57 57 raise Exception('notification must be int, long or Instance'
58 58 ' of Notification got %s' % type(notification))
59 59
60 60 def create(self, created_by, subject, body, recipients=None,
61 61 type_=Notification.TYPE_MESSAGE, with_email=True,
62 62 email_kwargs={}, email_subject=None):
63 63 """
64 64
65 65 Creates notification of given type
66 66
67 67 :param created_by: int, str or User instance. User who created this
68 68 notification
69 69 :param subject:
70 70 :param body:
71 71 :param recipients: list of int, str or User objects, when None
72 72 is given send to all admins
73 73 :param type_: type of notification
74 74 :param with_email: send email with this notification
75 75 :param email_kwargs: additional dict to pass as args to email template
76 76 :param email_subject: use given subject as email subject
77 77 """
78 78 from kallithea.lib.celerylib import tasks, run_task
79 79
80 80 if recipients and not getattr(recipients, '__iter__', False):
81 81 raise Exception('recipients must be a list or iterable')
82 82
83 83 created_by_obj = self._get_user(created_by)
84 84
85 recipients_objs = []
85 86 if recipients:
86 recipients_objs = []
87 87 for u in recipients:
88 88 obj = self._get_user(u)
89 89 if obj:
90 90 recipients_objs.append(obj)
91 91 else:
92 92 # TODO: inform user that requested operation couldn't be completed
93 93 log.error('cannot email unknown user %r', u)
94 94 recipients_objs = set(recipients_objs)
95 95 log.debug('sending notifications %s to %s' % (
96 96 type_, recipients_objs)
97 97 )
98 else:
98 elif recipients is None:
99 99 # empty recipients means to all admins
100 100 recipients_objs = User.query().filter(User.admin == True).all()
101 101 log.debug('sending notifications %s to admins: %s' % (
102 102 type_, recipients_objs)
103 103 )
104 #else: silently skip notification mails?
105
104 106 # TODO: inform user who are notified
105 107 notif = Notification.create(
106 108 created_by=created_by_obj, subject=subject,
107 109 body=body, recipients=recipients_objs, type_=type_
108 110 )
109 111
110 112 if not with_email:
111 113 return notif
112 114
113 115 #don't send email to person who created this comment
114 116 rec_objs = set(recipients_objs).difference(set([created_by_obj]))
115 117
116 118 # send email with notification to all other participants
117 119 for rec in rec_objs:
118 120 if not email_subject:
119 121 email_subject = NotificationModel()\
120 122 .make_description(notif, show_age=False)
121 123 type_ = type_
122 124 email_body = None # we set body to none, we just send HTML emails
123 125 ## this is passed into template
124 126 kwargs = {'subject': subject, 'body': h.rst_w_mentions(body)}
125 127 kwargs.update(email_kwargs)
126 128 email_body_html = EmailNotificationModel()\
127 129 .get_email_tmpl(type_, **kwargs)
128 130
129 131 run_task(tasks.send_email, [rec.email], email_subject, email_body,
130 132 email_body_html)
131 133
132 134 return notif
133 135
134 136 def delete(self, user, notification):
135 137 # we don't want to remove actual notification just the assignment
136 138 try:
137 139 notification = self.__get_notification(notification)
138 140 user = self._get_user(user)
139 141 if notification and user:
140 142 obj = UserNotification.query()\
141 143 .filter(UserNotification.user == user)\
142 144 .filter(UserNotification.notification
143 145 == notification)\
144 146 .one()
145 147 Session().delete(obj)
146 148 return True
147 149 except Exception:
148 150 log.error(traceback.format_exc())
149 151 raise
150 152
151 153 def get_for_user(self, user, filter_=None):
152 154 """
153 155 Get notifications for given user, filter them if filter dict is given
154 156
155 157 :param user:
156 158 :param filter:
157 159 """
158 160 user = self._get_user(user)
159 161
160 162 q = UserNotification.query()\
161 163 .filter(UserNotification.user == user)\
162 164 .join((Notification, UserNotification.notification_id ==
163 165 Notification.notification_id))
164 166
165 167 if filter_:
166 168 q = q.filter(Notification.type_.in_(filter_))
167 169
168 170 return q.all()
169 171
170 172 def mark_read(self, user, notification):
171 173 try:
172 174 notification = self.__get_notification(notification)
173 175 user = self._get_user(user)
174 176 if notification and user:
175 177 obj = UserNotification.query()\
176 178 .filter(UserNotification.user == user)\
177 179 .filter(UserNotification.notification
178 180 == notification)\
179 181 .one()
180 182 obj.read = True
181 183 Session().add(obj)
182 184 return True
183 185 except Exception:
184 186 log.error(traceback.format_exc())
185 187 raise
186 188
187 189 def mark_all_read_for_user(self, user, filter_=None):
188 190 user = self._get_user(user)
189 191 q = UserNotification.query()\
190 192 .filter(UserNotification.user == user)\
191 193 .filter(UserNotification.read == False)\
192 194 .join((Notification, UserNotification.notification_id ==
193 195 Notification.notification_id))
194 196 if filter_:
195 197 q = q.filter(Notification.type_.in_(filter_))
196 198
197 199 # this is a little inefficient but sqlalchemy doesn't support
198 200 # update on joined tables :(
199 201 for obj in q.all():
200 202 obj.read = True
201 203 Session().add(obj)
202 204
203 205 def get_unread_cnt_for_user(self, user):
204 206 user = self._get_user(user)
205 207 return UserNotification.query()\
206 208 .filter(UserNotification.read == False)\
207 209 .filter(UserNotification.user == user).count()
208 210
209 211 def get_unread_for_user(self, user):
210 212 user = self._get_user(user)
211 213 return [x.notification for x in UserNotification.query()\
212 214 .filter(UserNotification.read == False)\
213 215 .filter(UserNotification.user == user).all()]
214 216
215 217 def get_user_notification(self, user, notification):
216 218 user = self._get_user(user)
217 219 notification = self.__get_notification(notification)
218 220
219 221 return UserNotification.query()\
220 222 .filter(UserNotification.notification == notification)\
221 223 .filter(UserNotification.user == user).scalar()
222 224
223 225 def make_description(self, notification, show_age=True):
224 226 """
225 227 Creates a human readable description based on properties
226 228 of notification object
227 229 """
228 230 #alias
229 231 _n = notification
230 232 _map = {
231 233 _n.TYPE_CHANGESET_COMMENT: _('%(user)s commented on changeset at %(when)s'),
232 234 _n.TYPE_MESSAGE: _('%(user)s sent message at %(when)s'),
233 235 _n.TYPE_MENTION: _('%(user)s mentioned you at %(when)s'),
234 236 _n.TYPE_REGISTRATION: _('%(user)s registered in Kallithea at %(when)s'),
235 237 _n.TYPE_PULL_REQUEST: _('%(user)s opened new pull request at %(when)s'),
236 238 _n.TYPE_PULL_REQUEST_COMMENT: _('%(user)s commented on pull request at %(when)s')
237 239 }
238 240 tmpl = _map[notification.type_]
239 241
240 242 if show_age:
241 243 when = h.age(notification.created_on)
242 244 else:
243 245 when = h.fmt_date(notification.created_on)
244 246
245 247 return tmpl % dict(
246 248 user=notification.created_by_user.username,
247 249 when=when,
248 250 )
249 251
250 252
251 253 class EmailNotificationModel(BaseModel):
252 254
253 255 TYPE_CHANGESET_COMMENT = Notification.TYPE_CHANGESET_COMMENT
254 256 TYPE_PASSWORD_RESET = 'password_link'
255 257 TYPE_REGISTRATION = Notification.TYPE_REGISTRATION
256 258 TYPE_PULL_REQUEST = Notification.TYPE_PULL_REQUEST
257 259 TYPE_PULL_REQUEST_COMMENT = Notification.TYPE_PULL_REQUEST_COMMENT
258 260 TYPE_DEFAULT = 'default'
259 261
260 262 def __init__(self):
261 263 super(EmailNotificationModel, self).__init__()
262 264 self._template_root = kallithea.CONFIG['pylons.paths']['templates'][0]
263 265 self._tmpl_lookup = kallithea.CONFIG['pylons.app_globals'].mako_lookup
264 266 self.email_types = {
265 267 self.TYPE_CHANGESET_COMMENT: 'email_templates/changeset_comment.html',
266 268 self.TYPE_PASSWORD_RESET: 'email_templates/password_reset.html',
267 269 self.TYPE_REGISTRATION: 'email_templates/registration.html',
268 270 self.TYPE_DEFAULT: 'email_templates/default.html',
269 271 self.TYPE_PULL_REQUEST: 'email_templates/pull_request.html',
270 272 self.TYPE_PULL_REQUEST_COMMENT: 'email_templates/pull_request_comment.html',
271 273 }
272 274
273 275
274 276 def get_email_tmpl(self, type_, **kwargs):
275 277 """
276 278 return generated template for email based on given type
277 279
278 280 :param type_:
279 281 """
280 282
281 283 base = self.email_types.get(type_, self.email_types[self.TYPE_DEFAULT])
282 284 email_template = self._tmpl_lookup.get_template(base)
283 285 # translator and helpers inject
284 286 _kwargs = {'_': _,
285 287 'h': h,
286 288 'c': c}
287 289 _kwargs.update(kwargs)
288 290 log.debug('rendering tmpl %s with kwargs %s' % (base, _kwargs))
289 291 return email_template.render(**_kwargs)
General Comments 0
You need to be logged in to leave comments. Login now