Show More
@@ -1,848 +1,845 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2011-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | comments model for RhodeCode |
|
23 | 23 | """ |
|
24 | 24 | import datetime |
|
25 | 25 | |
|
26 | 26 | import logging |
|
27 | 27 | import traceback |
|
28 | 28 | import collections |
|
29 | 29 | |
|
30 | 30 | from pyramid.threadlocal import get_current_registry, get_current_request |
|
31 | 31 | from sqlalchemy.sql.expression import null |
|
32 | 32 | from sqlalchemy.sql.functions import coalesce |
|
33 | 33 | |
|
34 | 34 | from rhodecode.lib import helpers as h, diffs, channelstream, hooks_utils |
|
35 | 35 | from rhodecode.lib import audit_logger |
|
36 | 36 | from rhodecode.lib.exceptions import CommentVersionMismatch |
|
37 | 37 | from rhodecode.lib.utils2 import extract_mentioned_users, safe_str, safe_int |
|
38 | 38 | from rhodecode.model import BaseModel |
|
39 | 39 | from rhodecode.model.db import ( |
|
40 | 40 | false, |
|
41 | 41 | ChangesetComment, |
|
42 | 42 | User, |
|
43 | 43 | Notification, |
|
44 | 44 | PullRequest, |
|
45 | 45 | AttributeDict, |
|
46 | 46 | ChangesetCommentHistory, |
|
47 | 47 | ) |
|
48 | 48 | from rhodecode.model.notification import NotificationModel |
|
49 | 49 | from rhodecode.model.meta import Session |
|
50 | 50 | from rhodecode.model.settings import VcsSettingsModel |
|
51 | 51 | from rhodecode.model.notification import EmailNotificationModel |
|
52 | 52 | from rhodecode.model.validation_schema.schemas import comment_schema |
|
53 | 53 | |
|
54 | 54 | |
|
55 | 55 | log = logging.getLogger(__name__) |
|
56 | 56 | |
|
57 | 57 | |
|
58 | 58 | class CommentsModel(BaseModel): |
|
59 | 59 | |
|
60 | 60 | cls = ChangesetComment |
|
61 | 61 | |
|
62 | 62 | DIFF_CONTEXT_BEFORE = 3 |
|
63 | 63 | DIFF_CONTEXT_AFTER = 3 |
|
64 | 64 | |
|
65 | 65 | def __get_commit_comment(self, changeset_comment): |
|
66 | 66 | return self._get_instance(ChangesetComment, changeset_comment) |
|
67 | 67 | |
|
68 | 68 | def __get_pull_request(self, pull_request): |
|
69 | 69 | return self._get_instance(PullRequest, pull_request) |
|
70 | 70 | |
|
71 | 71 | def _extract_mentions(self, s): |
|
72 | 72 | user_objects = [] |
|
73 | 73 | for username in extract_mentioned_users(s): |
|
74 | 74 | user_obj = User.get_by_username(username, case_insensitive=True) |
|
75 | 75 | if user_obj: |
|
76 | 76 | user_objects.append(user_obj) |
|
77 | 77 | return user_objects |
|
78 | 78 | |
|
79 | 79 | def _get_renderer(self, global_renderer='rst', request=None): |
|
80 | 80 | request = request or get_current_request() |
|
81 | 81 | |
|
82 | 82 | try: |
|
83 | 83 | global_renderer = request.call_context.visual.default_renderer |
|
84 | 84 | except AttributeError: |
|
85 | 85 | log.debug("Renderer not set, falling back " |
|
86 | 86 | "to default renderer '%s'", global_renderer) |
|
87 | 87 | except Exception: |
|
88 | 88 | log.error(traceback.format_exc()) |
|
89 | 89 | return global_renderer |
|
90 | 90 | |
|
91 | 91 | def aggregate_comments(self, comments, versions, show_version, inline=False): |
|
92 | 92 | # group by versions, and count until, and display objects |
|
93 | 93 | |
|
94 | 94 | comment_groups = collections.defaultdict(list) |
|
95 | 95 | [comment_groups[_co.pull_request_version_id].append(_co) for _co in comments] |
|
96 | 96 | |
|
97 | 97 | def yield_comments(pos): |
|
98 | 98 | for co in comment_groups[pos]: |
|
99 | 99 | yield co |
|
100 | 100 | |
|
101 | 101 | comment_versions = collections.defaultdict( |
|
102 | 102 | lambda: collections.defaultdict(list)) |
|
103 | 103 | prev_prvid = -1 |
|
104 | 104 | # fake last entry with None, to aggregate on "latest" version which |
|
105 | 105 | # doesn't have an pull_request_version_id |
|
106 | 106 | for ver in versions + [AttributeDict({'pull_request_version_id': None})]: |
|
107 | 107 | prvid = ver.pull_request_version_id |
|
108 | 108 | if prev_prvid == -1: |
|
109 | 109 | prev_prvid = prvid |
|
110 | 110 | |
|
111 | 111 | for co in yield_comments(prvid): |
|
112 | 112 | comment_versions[prvid]['at'].append(co) |
|
113 | 113 | |
|
114 | 114 | # save until |
|
115 | 115 | current = comment_versions[prvid]['at'] |
|
116 | 116 | prev_until = comment_versions[prev_prvid]['until'] |
|
117 | 117 | cur_until = prev_until + current |
|
118 | 118 | comment_versions[prvid]['until'].extend(cur_until) |
|
119 | 119 | |
|
120 | 120 | # save outdated |
|
121 | 121 | if inline: |
|
122 | 122 | outdated = [x for x in cur_until |
|
123 | 123 | if x.outdated_at_version(show_version)] |
|
124 | 124 | else: |
|
125 | 125 | outdated = [x for x in cur_until |
|
126 | 126 | if x.older_than_version(show_version)] |
|
127 | 127 | display = [x for x in cur_until if x not in outdated] |
|
128 | 128 | |
|
129 | 129 | comment_versions[prvid]['outdated'] = outdated |
|
130 | 130 | comment_versions[prvid]['display'] = display |
|
131 | 131 | |
|
132 | 132 | prev_prvid = prvid |
|
133 | 133 | |
|
134 | 134 | return comment_versions |
|
135 | 135 | |
|
136 | 136 | def get_repository_comments(self, repo, comment_type=None, user=None, commit_id=None): |
|
137 | 137 | qry = Session().query(ChangesetComment) \ |
|
138 | 138 | .filter(ChangesetComment.repo == repo) |
|
139 | 139 | |
|
140 | 140 | if comment_type and comment_type in ChangesetComment.COMMENT_TYPES: |
|
141 | 141 | qry = qry.filter(ChangesetComment.comment_type == comment_type) |
|
142 | 142 | |
|
143 | 143 | if user: |
|
144 | 144 | user = self._get_user(user) |
|
145 | 145 | if user: |
|
146 | 146 | qry = qry.filter(ChangesetComment.user_id == user.user_id) |
|
147 | 147 | |
|
148 | 148 | if commit_id: |
|
149 | 149 | qry = qry.filter(ChangesetComment.revision == commit_id) |
|
150 | 150 | |
|
151 | 151 | qry = qry.order_by(ChangesetComment.created_on) |
|
152 | 152 | return qry.all() |
|
153 | 153 | |
|
154 | 154 | def get_repository_unresolved_todos(self, repo): |
|
155 | 155 | todos = Session().query(ChangesetComment) \ |
|
156 | 156 | .filter(ChangesetComment.repo == repo) \ |
|
157 | 157 | .filter(ChangesetComment.resolved_by == None) \ |
|
158 | 158 | .filter(ChangesetComment.comment_type |
|
159 | 159 | == ChangesetComment.COMMENT_TYPE_TODO) |
|
160 | 160 | todos = todos.all() |
|
161 | 161 | |
|
162 | 162 | return todos |
|
163 | 163 | |
|
164 | 164 | def get_pull_request_unresolved_todos(self, pull_request, show_outdated=True, include_drafts=True): |
|
165 | 165 | |
|
166 | 166 | todos = Session().query(ChangesetComment) \ |
|
167 | 167 | .filter(ChangesetComment.pull_request == pull_request) \ |
|
168 | 168 | .filter(ChangesetComment.resolved_by == None) \ |
|
169 | 169 | .filter(ChangesetComment.comment_type |
|
170 | 170 | == ChangesetComment.COMMENT_TYPE_TODO) |
|
171 | 171 | |
|
172 | 172 | if not include_drafts: |
|
173 | 173 | todos = todos.filter(ChangesetComment.draft == false()) |
|
174 | 174 | |
|
175 | 175 | if not show_outdated: |
|
176 | 176 | todos = todos.filter( |
|
177 | 177 | coalesce(ChangesetComment.display_state, '') != |
|
178 | 178 | ChangesetComment.COMMENT_OUTDATED) |
|
179 | 179 | |
|
180 | 180 | todos = todos.all() |
|
181 | 181 | |
|
182 | 182 | return todos |
|
183 | 183 | |
|
184 | 184 | def get_pull_request_resolved_todos(self, pull_request, show_outdated=True, include_drafts=True): |
|
185 | 185 | |
|
186 | 186 | todos = Session().query(ChangesetComment) \ |
|
187 | 187 | .filter(ChangesetComment.pull_request == pull_request) \ |
|
188 | 188 | .filter(ChangesetComment.resolved_by != None) \ |
|
189 | 189 | .filter(ChangesetComment.comment_type |
|
190 | 190 | == ChangesetComment.COMMENT_TYPE_TODO) |
|
191 | 191 | |
|
192 | 192 | if not include_drafts: |
|
193 | 193 | todos = todos.filter(ChangesetComment.draft == false()) |
|
194 | 194 | |
|
195 | 195 | if not show_outdated: |
|
196 | 196 | todos = todos.filter( |
|
197 | 197 | coalesce(ChangesetComment.display_state, '') != |
|
198 | 198 | ChangesetComment.COMMENT_OUTDATED) |
|
199 | 199 | |
|
200 | 200 | todos = todos.all() |
|
201 | 201 | |
|
202 | 202 | return todos |
|
203 | 203 | |
|
204 | 204 | def get_commit_unresolved_todos(self, commit_id, show_outdated=True, include_drafts=True): |
|
205 | 205 | |
|
206 | 206 | todos = Session().query(ChangesetComment) \ |
|
207 | 207 | .filter(ChangesetComment.revision == commit_id) \ |
|
208 | 208 | .filter(ChangesetComment.resolved_by == None) \ |
|
209 | 209 | .filter(ChangesetComment.comment_type |
|
210 | 210 | == ChangesetComment.COMMENT_TYPE_TODO) |
|
211 | 211 | |
|
212 | 212 | if not include_drafts: |
|
213 | 213 | todos = todos.filter(ChangesetComment.draft == false()) |
|
214 | 214 | |
|
215 | 215 | if not show_outdated: |
|
216 | 216 | todos = todos.filter( |
|
217 | 217 | coalesce(ChangesetComment.display_state, '') != |
|
218 | 218 | ChangesetComment.COMMENT_OUTDATED) |
|
219 | 219 | |
|
220 | 220 | todos = todos.all() |
|
221 | 221 | |
|
222 | 222 | return todos |
|
223 | 223 | |
|
224 | 224 | def get_commit_resolved_todos(self, commit_id, show_outdated=True, include_drafts=True): |
|
225 | 225 | |
|
226 | 226 | todos = Session().query(ChangesetComment) \ |
|
227 | 227 | .filter(ChangesetComment.revision == commit_id) \ |
|
228 | 228 | .filter(ChangesetComment.resolved_by != None) \ |
|
229 | 229 | .filter(ChangesetComment.comment_type |
|
230 | 230 | == ChangesetComment.COMMENT_TYPE_TODO) |
|
231 | 231 | |
|
232 | 232 | if not include_drafts: |
|
233 | 233 | todos = todos.filter(ChangesetComment.draft == false()) |
|
234 | 234 | |
|
235 | 235 | if not show_outdated: |
|
236 | 236 | todos = todos.filter( |
|
237 | 237 | coalesce(ChangesetComment.display_state, '') != |
|
238 | 238 | ChangesetComment.COMMENT_OUTDATED) |
|
239 | 239 | |
|
240 | 240 | todos = todos.all() |
|
241 | 241 | |
|
242 | 242 | return todos |
|
243 | 243 | |
|
244 | 244 | def get_commit_inline_comments(self, commit_id, include_drafts=True): |
|
245 | 245 | inline_comments = Session().query(ChangesetComment) \ |
|
246 | 246 | .filter(ChangesetComment.line_no != None) \ |
|
247 | 247 | .filter(ChangesetComment.f_path != None) \ |
|
248 | 248 | .filter(ChangesetComment.revision == commit_id) |
|
249 | 249 | |
|
250 | 250 | if not include_drafts: |
|
251 | 251 | inline_comments = inline_comments.filter(ChangesetComment.draft == false()) |
|
252 | 252 | |
|
253 | 253 | inline_comments = inline_comments.all() |
|
254 | 254 | return inline_comments |
|
255 | 255 | |
|
256 | 256 | def _log_audit_action(self, action, action_data, auth_user, comment): |
|
257 | 257 | audit_logger.store( |
|
258 | 258 | action=action, |
|
259 | 259 | action_data=action_data, |
|
260 | 260 | user=auth_user, |
|
261 | 261 | repo=comment.repo) |
|
262 | 262 | |
|
263 | 263 | def create(self, text, repo, user, commit_id=None, pull_request=None, |
|
264 | 264 | f_path=None, line_no=None, status_change=None, |
|
265 | 265 | status_change_type=None, comment_type=None, is_draft=False, |
|
266 | 266 | resolves_comment_id=None, closing_pr=False, send_email=True, |
|
267 | 267 | renderer=None, auth_user=None, extra_recipients=None): |
|
268 | 268 | """ |
|
269 | 269 | Creates new comment for commit or pull request. |
|
270 | 270 | IF status_change is not none this comment is associated with a |
|
271 | 271 | status change of commit or commit associated with pull request |
|
272 | 272 | |
|
273 | 273 | :param text: |
|
274 | 274 | :param repo: |
|
275 | 275 | :param user: |
|
276 | 276 | :param commit_id: |
|
277 | 277 | :param pull_request: |
|
278 | 278 | :param f_path: |
|
279 | 279 | :param line_no: |
|
280 | 280 | :param status_change: Label for status change |
|
281 | 281 | :param comment_type: Type of comment |
|
282 | 282 | :param is_draft: is comment a draft only |
|
283 | 283 | :param resolves_comment_id: id of comment which this one will resolve |
|
284 | 284 | :param status_change_type: type of status change |
|
285 | 285 | :param closing_pr: |
|
286 | 286 | :param send_email: |
|
287 | 287 | :param renderer: pick renderer for this comment |
|
288 | 288 | :param auth_user: current authenticated user calling this method |
|
289 | 289 | :param extra_recipients: list of extra users to be added to recipients |
|
290 | 290 | """ |
|
291 | 291 | |
|
292 | 292 | if not text: |
|
293 | 293 | log.warning('Missing text for comment, skipping...') |
|
294 | 294 | return |
|
295 | 295 | request = get_current_request() |
|
296 | 296 | _ = request.translate |
|
297 | 297 | |
|
298 | 298 | if not renderer: |
|
299 | 299 | renderer = self._get_renderer(request=request) |
|
300 | 300 | |
|
301 | 301 | repo = self._get_repo(repo) |
|
302 | 302 | user = self._get_user(user) |
|
303 | 303 | auth_user = auth_user or user |
|
304 | 304 | |
|
305 | 305 | schema = comment_schema.CommentSchema() |
|
306 | 306 | validated_kwargs = schema.deserialize(dict( |
|
307 | 307 | comment_body=text, |
|
308 | 308 | comment_type=comment_type, |
|
309 | 309 | is_draft=is_draft, |
|
310 | 310 | comment_file=f_path, |
|
311 | 311 | comment_line=line_no, |
|
312 | 312 | renderer_type=renderer, |
|
313 | 313 | status_change=status_change_type, |
|
314 | 314 | resolves_comment_id=resolves_comment_id, |
|
315 | 315 | repo=repo.repo_id, |
|
316 | 316 | user=user.user_id, |
|
317 | 317 | )) |
|
318 | 318 | is_draft = validated_kwargs['is_draft'] |
|
319 | 319 | |
|
320 | 320 | comment = ChangesetComment() |
|
321 | 321 | comment.renderer = validated_kwargs['renderer_type'] |
|
322 | 322 | comment.text = validated_kwargs['comment_body'] |
|
323 | 323 | comment.f_path = validated_kwargs['comment_file'] |
|
324 | 324 | comment.line_no = validated_kwargs['comment_line'] |
|
325 | 325 | comment.comment_type = validated_kwargs['comment_type'] |
|
326 | 326 | comment.draft = is_draft |
|
327 | 327 | |
|
328 | 328 | comment.repo = repo |
|
329 | 329 | comment.author = user |
|
330 | 330 | resolved_comment = self.__get_commit_comment( |
|
331 | 331 | validated_kwargs['resolves_comment_id']) |
|
332 | 332 | # check if the comment actually belongs to this PR |
|
333 | 333 | if resolved_comment and resolved_comment.pull_request and \ |
|
334 | 334 | resolved_comment.pull_request != pull_request: |
|
335 | 335 | log.warning('Comment tried to resolved unrelated todo comment: %s', |
|
336 | 336 | resolved_comment) |
|
337 | 337 | # comment not bound to this pull request, forbid |
|
338 | 338 | resolved_comment = None |
|
339 | 339 | |
|
340 | 340 | elif resolved_comment and resolved_comment.repo and \ |
|
341 | 341 | resolved_comment.repo != repo: |
|
342 | 342 | log.warning('Comment tried to resolved unrelated todo comment: %s', |
|
343 | 343 | resolved_comment) |
|
344 | 344 | # comment not bound to this repo, forbid |
|
345 | 345 | resolved_comment = None |
|
346 | 346 | |
|
347 | 347 | comment.resolved_comment = resolved_comment |
|
348 | 348 | |
|
349 | 349 | pull_request_id = pull_request |
|
350 | 350 | |
|
351 | 351 | commit_obj = None |
|
352 | 352 | pull_request_obj = None |
|
353 | 353 | |
|
354 | 354 | if commit_id: |
|
355 | 355 | notification_type = EmailNotificationModel.TYPE_COMMIT_COMMENT |
|
356 | 356 | # do a lookup, so we don't pass something bad here |
|
357 | 357 | commit_obj = repo.scm_instance().get_commit(commit_id=commit_id) |
|
358 | 358 | comment.revision = commit_obj.raw_id |
|
359 | 359 | |
|
360 | 360 | elif pull_request_id: |
|
361 | 361 | notification_type = EmailNotificationModel.TYPE_PULL_REQUEST_COMMENT |
|
362 | 362 | pull_request_obj = self.__get_pull_request(pull_request_id) |
|
363 | 363 | comment.pull_request = pull_request_obj |
|
364 | 364 | else: |
|
365 | 365 | raise Exception('Please specify commit or pull_request_id') |
|
366 | 366 | |
|
367 | 367 | Session().add(comment) |
|
368 | 368 | Session().flush() |
|
369 | 369 | kwargs = { |
|
370 | 370 | 'user': user, |
|
371 | 371 | 'renderer_type': renderer, |
|
372 | 372 | 'repo_name': repo.repo_name, |
|
373 | 373 | 'status_change': status_change, |
|
374 | 374 | 'status_change_type': status_change_type, |
|
375 | 375 | 'comment_body': text, |
|
376 | 376 | 'comment_file': f_path, |
|
377 | 377 | 'comment_line': line_no, |
|
378 | 378 | 'comment_type': comment_type or 'note', |
|
379 | 379 | 'comment_id': comment.comment_id |
|
380 | 380 | } |
|
381 | 381 | |
|
382 | 382 | if commit_obj: |
|
383 | 383 | recipients = ChangesetComment.get_users( |
|
384 | 384 | revision=commit_obj.raw_id) |
|
385 | 385 | # add commit author if it's in RhodeCode system |
|
386 | 386 | cs_author = User.get_from_cs_author(commit_obj.author) |
|
387 | 387 | if not cs_author: |
|
388 | 388 | # use repo owner if we cannot extract the author correctly |
|
389 | 389 | cs_author = repo.user |
|
390 | 390 | recipients += [cs_author] |
|
391 | 391 | |
|
392 | 392 | commit_comment_url = self.get_url(comment, request=request) |
|
393 | 393 | commit_comment_reply_url = self.get_url( |
|
394 | 394 | comment, request=request, |
|
395 | 395 | anchor='comment-{}/?/ReplyToComment'.format(comment.comment_id)) |
|
396 | 396 | |
|
397 | 397 | target_repo_url = h.link_to( |
|
398 | 398 | repo.repo_name, |
|
399 | 399 | h.route_url('repo_summary', repo_name=repo.repo_name)) |
|
400 | 400 | |
|
401 | 401 | commit_url = h.route_url('repo_commit', repo_name=repo.repo_name, |
|
402 | 402 | commit_id=commit_id) |
|
403 | 403 | |
|
404 | 404 | # commit specifics |
|
405 | 405 | kwargs.update({ |
|
406 | 406 | 'commit': commit_obj, |
|
407 | 407 | 'commit_message': commit_obj.message, |
|
408 | 408 | 'commit_target_repo_url': target_repo_url, |
|
409 | 409 | 'commit_comment_url': commit_comment_url, |
|
410 | 410 | 'commit_comment_reply_url': commit_comment_reply_url, |
|
411 | 411 | 'commit_url': commit_url, |
|
412 | 412 | 'thread_ids': [commit_url, commit_comment_url], |
|
413 | 413 | }) |
|
414 | 414 | |
|
415 | 415 | elif pull_request_obj: |
|
416 | 416 | # get the current participants of this pull request |
|
417 | 417 | recipients = ChangesetComment.get_users( |
|
418 | 418 | pull_request_id=pull_request_obj.pull_request_id) |
|
419 | 419 | # add pull request author |
|
420 | 420 | recipients += [pull_request_obj.author] |
|
421 | 421 | |
|
422 | 422 | # add the reviewers to notification |
|
423 | 423 | recipients += [x.user for x in pull_request_obj.get_pull_request_reviewers()] |
|
424 | 424 | |
|
425 | 425 | pr_target_repo = pull_request_obj.target_repo |
|
426 | 426 | pr_source_repo = pull_request_obj.source_repo |
|
427 | 427 | |
|
428 | 428 | pr_comment_url = self.get_url(comment, request=request) |
|
429 | 429 | pr_comment_reply_url = self.get_url( |
|
430 | 430 | comment, request=request, |
|
431 | 431 | anchor='comment-{}/?/ReplyToComment'.format(comment.comment_id)) |
|
432 | 432 | |
|
433 | 433 | pr_url = h.route_url( |
|
434 | 434 | 'pullrequest_show', |
|
435 | 435 | repo_name=pr_target_repo.repo_name, |
|
436 | 436 | pull_request_id=pull_request_obj.pull_request_id, ) |
|
437 | 437 | |
|
438 | 438 | # set some variables for email notification |
|
439 | 439 | pr_target_repo_url = h.route_url( |
|
440 | 440 | 'repo_summary', repo_name=pr_target_repo.repo_name) |
|
441 | 441 | |
|
442 | 442 | pr_source_repo_url = h.route_url( |
|
443 | 443 | 'repo_summary', repo_name=pr_source_repo.repo_name) |
|
444 | 444 | |
|
445 | 445 | # pull request specifics |
|
446 | 446 | kwargs.update({ |
|
447 | 447 | 'pull_request': pull_request_obj, |
|
448 | 448 | 'pr_id': pull_request_obj.pull_request_id, |
|
449 | 449 | 'pull_request_url': pr_url, |
|
450 | 450 | 'pull_request_target_repo': pr_target_repo, |
|
451 | 451 | 'pull_request_target_repo_url': pr_target_repo_url, |
|
452 | 452 | 'pull_request_source_repo': pr_source_repo, |
|
453 | 453 | 'pull_request_source_repo_url': pr_source_repo_url, |
|
454 | 454 | 'pr_comment_url': pr_comment_url, |
|
455 | 455 | 'pr_comment_reply_url': pr_comment_reply_url, |
|
456 | 456 | 'pr_closing': closing_pr, |
|
457 | 457 | 'thread_ids': [pr_url, pr_comment_url], |
|
458 | 458 | }) |
|
459 | 459 | |
|
460 | 460 | if send_email: |
|
461 | 461 | recipients += [self._get_user(u) for u in (extra_recipients or [])] |
|
462 | # pre-generate the subject for notification itself | |
|
463 | (subject, _e, body_plaintext) = EmailNotificationModel().render_email( | |
|
464 | notification_type, **kwargs) | |
|
465 | 462 | |
|
466 | 463 | mention_recipients = set( |
|
467 | 464 | self._extract_mentions(text)).difference(recipients) |
|
468 | 465 | |
|
469 | 466 | # create notification objects, and emails |
|
470 | 467 | NotificationModel().create( |
|
471 | 468 | created_by=user, |
|
472 |
notification_subject= |
|
|
473 |
notification_body= |
|
|
469 | notification_subject='', # Filled in based on the notification_type | |
|
470 | notification_body='', # Filled in based on the notification_type | |
|
474 | 471 | notification_type=notification_type, |
|
475 | 472 | recipients=recipients, |
|
476 | 473 | mention_recipients=mention_recipients, |
|
477 | 474 | email_kwargs=kwargs, |
|
478 | 475 | ) |
|
479 | 476 | |
|
480 | 477 | Session().flush() |
|
481 | 478 | if comment.pull_request: |
|
482 | 479 | action = 'repo.pull_request.comment.create' |
|
483 | 480 | else: |
|
484 | 481 | action = 'repo.commit.comment.create' |
|
485 | 482 | |
|
486 | 483 | if not is_draft: |
|
487 | 484 | comment_data = comment.get_api_data() |
|
488 | 485 | |
|
489 | 486 | self._log_audit_action( |
|
490 | 487 | action, {'data': comment_data}, auth_user, comment) |
|
491 | 488 | |
|
492 | 489 | return comment |
|
493 | 490 | |
|
494 | 491 | def edit(self, comment_id, text, auth_user, version): |
|
495 | 492 | """ |
|
496 | 493 | Change existing comment for commit or pull request. |
|
497 | 494 | |
|
498 | 495 | :param comment_id: |
|
499 | 496 | :param text: |
|
500 | 497 | :param auth_user: current authenticated user calling this method |
|
501 | 498 | :param version: last comment version |
|
502 | 499 | """ |
|
503 | 500 | if not text: |
|
504 | 501 | log.warning('Missing text for comment, skipping...') |
|
505 | 502 | return |
|
506 | 503 | |
|
507 | 504 | comment = ChangesetComment.get(comment_id) |
|
508 | 505 | old_comment_text = comment.text |
|
509 | 506 | comment.text = text |
|
510 | 507 | comment.modified_at = datetime.datetime.now() |
|
511 | 508 | version = safe_int(version) |
|
512 | 509 | |
|
513 | 510 | # NOTE(marcink): this returns initial comment + edits, so v2 from ui |
|
514 | 511 | # would return 3 here |
|
515 | 512 | comment_version = ChangesetCommentHistory.get_version(comment_id) |
|
516 | 513 | |
|
517 | 514 | if isinstance(version, (int, long)) and (comment_version - version) != 1: |
|
518 | 515 | log.warning( |
|
519 | 516 | 'Version mismatch comment_version {} submitted {}, skipping'.format( |
|
520 | 517 | comment_version-1, # -1 since note above |
|
521 | 518 | version |
|
522 | 519 | ) |
|
523 | 520 | ) |
|
524 | 521 | raise CommentVersionMismatch() |
|
525 | 522 | |
|
526 | 523 | comment_history = ChangesetCommentHistory() |
|
527 | 524 | comment_history.comment_id = comment_id |
|
528 | 525 | comment_history.version = comment_version |
|
529 | 526 | comment_history.created_by_user_id = auth_user.user_id |
|
530 | 527 | comment_history.text = old_comment_text |
|
531 | 528 | # TODO add email notification |
|
532 | 529 | Session().add(comment_history) |
|
533 | 530 | Session().add(comment) |
|
534 | 531 | Session().flush() |
|
535 | 532 | |
|
536 | 533 | if comment.pull_request: |
|
537 | 534 | action = 'repo.pull_request.comment.edit' |
|
538 | 535 | else: |
|
539 | 536 | action = 'repo.commit.comment.edit' |
|
540 | 537 | |
|
541 | 538 | comment_data = comment.get_api_data() |
|
542 | 539 | comment_data['old_comment_text'] = old_comment_text |
|
543 | 540 | self._log_audit_action( |
|
544 | 541 | action, {'data': comment_data}, auth_user, comment) |
|
545 | 542 | |
|
546 | 543 | return comment_history |
|
547 | 544 | |
|
548 | 545 | def delete(self, comment, auth_user): |
|
549 | 546 | """ |
|
550 | 547 | Deletes given comment |
|
551 | 548 | """ |
|
552 | 549 | comment = self.__get_commit_comment(comment) |
|
553 | 550 | old_data = comment.get_api_data() |
|
554 | 551 | Session().delete(comment) |
|
555 | 552 | |
|
556 | 553 | if comment.pull_request: |
|
557 | 554 | action = 'repo.pull_request.comment.delete' |
|
558 | 555 | else: |
|
559 | 556 | action = 'repo.commit.comment.delete' |
|
560 | 557 | |
|
561 | 558 | self._log_audit_action( |
|
562 | 559 | action, {'old_data': old_data}, auth_user, comment) |
|
563 | 560 | |
|
564 | 561 | return comment |
|
565 | 562 | |
|
566 | 563 | def get_all_comments(self, repo_id, revision=None, pull_request=None, |
|
567 | 564 | include_drafts=True, count_only=False): |
|
568 | 565 | q = ChangesetComment.query()\ |
|
569 | 566 | .filter(ChangesetComment.repo_id == repo_id) |
|
570 | 567 | if revision: |
|
571 | 568 | q = q.filter(ChangesetComment.revision == revision) |
|
572 | 569 | elif pull_request: |
|
573 | 570 | pull_request = self.__get_pull_request(pull_request) |
|
574 | 571 | q = q.filter(ChangesetComment.pull_request_id == pull_request.pull_request_id) |
|
575 | 572 | else: |
|
576 | 573 | raise Exception('Please specify commit or pull_request') |
|
577 | 574 | if not include_drafts: |
|
578 | 575 | q = q.filter(ChangesetComment.draft == false()) |
|
579 | 576 | q = q.order_by(ChangesetComment.created_on) |
|
580 | 577 | if count_only: |
|
581 | 578 | return q.count() |
|
582 | 579 | |
|
583 | 580 | return q.all() |
|
584 | 581 | |
|
585 | 582 | def get_url(self, comment, request=None, permalink=False, anchor=None): |
|
586 | 583 | if not request: |
|
587 | 584 | request = get_current_request() |
|
588 | 585 | |
|
589 | 586 | comment = self.__get_commit_comment(comment) |
|
590 | 587 | if anchor is None: |
|
591 | 588 | anchor = 'comment-{}'.format(comment.comment_id) |
|
592 | 589 | |
|
593 | 590 | if comment.pull_request: |
|
594 | 591 | pull_request = comment.pull_request |
|
595 | 592 | if permalink: |
|
596 | 593 | return request.route_url( |
|
597 | 594 | 'pull_requests_global', |
|
598 | 595 | pull_request_id=pull_request.pull_request_id, |
|
599 | 596 | _anchor=anchor) |
|
600 | 597 | else: |
|
601 | 598 | return request.route_url( |
|
602 | 599 | 'pullrequest_show', |
|
603 | 600 | repo_name=safe_str(pull_request.target_repo.repo_name), |
|
604 | 601 | pull_request_id=pull_request.pull_request_id, |
|
605 | 602 | _anchor=anchor) |
|
606 | 603 | |
|
607 | 604 | else: |
|
608 | 605 | repo = comment.repo |
|
609 | 606 | commit_id = comment.revision |
|
610 | 607 | |
|
611 | 608 | if permalink: |
|
612 | 609 | return request.route_url( |
|
613 | 610 | 'repo_commit', repo_name=safe_str(repo.repo_id), |
|
614 | 611 | commit_id=commit_id, |
|
615 | 612 | _anchor=anchor) |
|
616 | 613 | |
|
617 | 614 | else: |
|
618 | 615 | return request.route_url( |
|
619 | 616 | 'repo_commit', repo_name=safe_str(repo.repo_name), |
|
620 | 617 | commit_id=commit_id, |
|
621 | 618 | _anchor=anchor) |
|
622 | 619 | |
|
623 | 620 | def get_comments(self, repo_id, revision=None, pull_request=None): |
|
624 | 621 | """ |
|
625 | 622 | Gets main comments based on revision or pull_request_id |
|
626 | 623 | |
|
627 | 624 | :param repo_id: |
|
628 | 625 | :param revision: |
|
629 | 626 | :param pull_request: |
|
630 | 627 | """ |
|
631 | 628 | |
|
632 | 629 | q = ChangesetComment.query()\ |
|
633 | 630 | .filter(ChangesetComment.repo_id == repo_id)\ |
|
634 | 631 | .filter(ChangesetComment.line_no == None)\ |
|
635 | 632 | .filter(ChangesetComment.f_path == None) |
|
636 | 633 | if revision: |
|
637 | 634 | q = q.filter(ChangesetComment.revision == revision) |
|
638 | 635 | elif pull_request: |
|
639 | 636 | pull_request = self.__get_pull_request(pull_request) |
|
640 | 637 | q = q.filter(ChangesetComment.pull_request == pull_request) |
|
641 | 638 | else: |
|
642 | 639 | raise Exception('Please specify commit or pull_request') |
|
643 | 640 | q = q.order_by(ChangesetComment.created_on) |
|
644 | 641 | return q.all() |
|
645 | 642 | |
|
646 | 643 | def get_inline_comments(self, repo_id, revision=None, pull_request=None): |
|
647 | 644 | q = self._get_inline_comments_query(repo_id, revision, pull_request) |
|
648 | 645 | return self._group_comments_by_path_and_line_number(q) |
|
649 | 646 | |
|
650 | 647 | def get_inline_comments_as_list(self, inline_comments, skip_outdated=True, |
|
651 | 648 | version=None): |
|
652 | 649 | inline_comms = [] |
|
653 | 650 | for fname, per_line_comments in inline_comments.iteritems(): |
|
654 | 651 | for lno, comments in per_line_comments.iteritems(): |
|
655 | 652 | for comm in comments: |
|
656 | 653 | if not comm.outdated_at_version(version) and skip_outdated: |
|
657 | 654 | inline_comms.append(comm) |
|
658 | 655 | |
|
659 | 656 | return inline_comms |
|
660 | 657 | |
|
661 | 658 | def get_outdated_comments(self, repo_id, pull_request): |
|
662 | 659 | # TODO: johbo: Remove `repo_id`, it is not needed to find the comments |
|
663 | 660 | # of a pull request. |
|
664 | 661 | q = self._all_inline_comments_of_pull_request(pull_request) |
|
665 | 662 | q = q.filter( |
|
666 | 663 | ChangesetComment.display_state == |
|
667 | 664 | ChangesetComment.COMMENT_OUTDATED |
|
668 | 665 | ).order_by(ChangesetComment.comment_id.asc()) |
|
669 | 666 | |
|
670 | 667 | return self._group_comments_by_path_and_line_number(q) |
|
671 | 668 | |
|
672 | 669 | def _get_inline_comments_query(self, repo_id, revision, pull_request): |
|
673 | 670 | # TODO: johbo: Split this into two methods: One for PR and one for |
|
674 | 671 | # commit. |
|
675 | 672 | if revision: |
|
676 | 673 | q = Session().query(ChangesetComment).filter( |
|
677 | 674 | ChangesetComment.repo_id == repo_id, |
|
678 | 675 | ChangesetComment.line_no != null(), |
|
679 | 676 | ChangesetComment.f_path != null(), |
|
680 | 677 | ChangesetComment.revision == revision) |
|
681 | 678 | |
|
682 | 679 | elif pull_request: |
|
683 | 680 | pull_request = self.__get_pull_request(pull_request) |
|
684 | 681 | if not CommentsModel.use_outdated_comments(pull_request): |
|
685 | 682 | q = self._visible_inline_comments_of_pull_request(pull_request) |
|
686 | 683 | else: |
|
687 | 684 | q = self._all_inline_comments_of_pull_request(pull_request) |
|
688 | 685 | |
|
689 | 686 | else: |
|
690 | 687 | raise Exception('Please specify commit or pull_request_id') |
|
691 | 688 | q = q.order_by(ChangesetComment.comment_id.asc()) |
|
692 | 689 | return q |
|
693 | 690 | |
|
694 | 691 | def _group_comments_by_path_and_line_number(self, q): |
|
695 | 692 | comments = q.all() |
|
696 | 693 | paths = collections.defaultdict(lambda: collections.defaultdict(list)) |
|
697 | 694 | for co in comments: |
|
698 | 695 | paths[co.f_path][co.line_no].append(co) |
|
699 | 696 | return paths |
|
700 | 697 | |
|
701 | 698 | @classmethod |
|
702 | 699 | def needed_extra_diff_context(cls): |
|
703 | 700 | return max(cls.DIFF_CONTEXT_BEFORE, cls.DIFF_CONTEXT_AFTER) |
|
704 | 701 | |
|
705 | 702 | def outdate_comments(self, pull_request, old_diff_data, new_diff_data): |
|
706 | 703 | if not CommentsModel.use_outdated_comments(pull_request): |
|
707 | 704 | return |
|
708 | 705 | |
|
709 | 706 | comments = self._visible_inline_comments_of_pull_request(pull_request) |
|
710 | 707 | comments_to_outdate = comments.all() |
|
711 | 708 | |
|
712 | 709 | for comment in comments_to_outdate: |
|
713 | 710 | self._outdate_one_comment(comment, old_diff_data, new_diff_data) |
|
714 | 711 | |
|
715 | 712 | def _outdate_one_comment(self, comment, old_diff_proc, new_diff_proc): |
|
716 | 713 | diff_line = _parse_comment_line_number(comment.line_no) |
|
717 | 714 | |
|
718 | 715 | try: |
|
719 | 716 | old_context = old_diff_proc.get_context_of_line( |
|
720 | 717 | path=comment.f_path, diff_line=diff_line) |
|
721 | 718 | new_context = new_diff_proc.get_context_of_line( |
|
722 | 719 | path=comment.f_path, diff_line=diff_line) |
|
723 | 720 | except (diffs.LineNotInDiffException, |
|
724 | 721 | diffs.FileNotInDiffException): |
|
725 | 722 | if not comment.draft: |
|
726 | 723 | comment.display_state = ChangesetComment.COMMENT_OUTDATED |
|
727 | 724 | return |
|
728 | 725 | |
|
729 | 726 | if old_context == new_context: |
|
730 | 727 | return |
|
731 | 728 | |
|
732 | 729 | if self._should_relocate_diff_line(diff_line): |
|
733 | 730 | new_diff_lines = new_diff_proc.find_context( |
|
734 | 731 | path=comment.f_path, context=old_context, |
|
735 | 732 | offset=self.DIFF_CONTEXT_BEFORE) |
|
736 | 733 | if not new_diff_lines and not comment.draft: |
|
737 | 734 | comment.display_state = ChangesetComment.COMMENT_OUTDATED |
|
738 | 735 | else: |
|
739 | 736 | new_diff_line = self._choose_closest_diff_line( |
|
740 | 737 | diff_line, new_diff_lines) |
|
741 | 738 | comment.line_no = _diff_to_comment_line_number(new_diff_line) |
|
742 | 739 | else: |
|
743 | 740 | if not comment.draft: |
|
744 | 741 | comment.display_state = ChangesetComment.COMMENT_OUTDATED |
|
745 | 742 | |
|
746 | 743 | def _should_relocate_diff_line(self, diff_line): |
|
747 | 744 | """ |
|
748 | 745 | Checks if relocation shall be tried for the given `diff_line`. |
|
749 | 746 | |
|
750 | 747 | If a comment points into the first lines, then we can have a situation |
|
751 | 748 | that after an update another line has been added on top. In this case |
|
752 | 749 | we would find the context still and move the comment around. This |
|
753 | 750 | would be wrong. |
|
754 | 751 | """ |
|
755 | 752 | should_relocate = ( |
|
756 | 753 | (diff_line.new and diff_line.new > self.DIFF_CONTEXT_BEFORE) or |
|
757 | 754 | (diff_line.old and diff_line.old > self.DIFF_CONTEXT_BEFORE)) |
|
758 | 755 | return should_relocate |
|
759 | 756 | |
|
760 | 757 | def _choose_closest_diff_line(self, diff_line, new_diff_lines): |
|
761 | 758 | candidate = new_diff_lines[0] |
|
762 | 759 | best_delta = _diff_line_delta(diff_line, candidate) |
|
763 | 760 | for new_diff_line in new_diff_lines[1:]: |
|
764 | 761 | delta = _diff_line_delta(diff_line, new_diff_line) |
|
765 | 762 | if delta < best_delta: |
|
766 | 763 | candidate = new_diff_line |
|
767 | 764 | best_delta = delta |
|
768 | 765 | return candidate |
|
769 | 766 | |
|
770 | 767 | def _visible_inline_comments_of_pull_request(self, pull_request): |
|
771 | 768 | comments = self._all_inline_comments_of_pull_request(pull_request) |
|
772 | 769 | comments = comments.filter( |
|
773 | 770 | coalesce(ChangesetComment.display_state, '') != |
|
774 | 771 | ChangesetComment.COMMENT_OUTDATED) |
|
775 | 772 | return comments |
|
776 | 773 | |
|
777 | 774 | def _all_inline_comments_of_pull_request(self, pull_request): |
|
778 | 775 | comments = Session().query(ChangesetComment)\ |
|
779 | 776 | .filter(ChangesetComment.line_no != None)\ |
|
780 | 777 | .filter(ChangesetComment.f_path != None)\ |
|
781 | 778 | .filter(ChangesetComment.pull_request == pull_request) |
|
782 | 779 | return comments |
|
783 | 780 | |
|
784 | 781 | def _all_general_comments_of_pull_request(self, pull_request): |
|
785 | 782 | comments = Session().query(ChangesetComment)\ |
|
786 | 783 | .filter(ChangesetComment.line_no == None)\ |
|
787 | 784 | .filter(ChangesetComment.f_path == None)\ |
|
788 | 785 | .filter(ChangesetComment.pull_request == pull_request) |
|
789 | 786 | |
|
790 | 787 | return comments |
|
791 | 788 | |
|
792 | 789 | @staticmethod |
|
793 | 790 | def use_outdated_comments(pull_request): |
|
794 | 791 | settings_model = VcsSettingsModel(repo=pull_request.target_repo) |
|
795 | 792 | settings = settings_model.get_general_settings() |
|
796 | 793 | return settings.get('rhodecode_use_outdated_comments', False) |
|
797 | 794 | |
|
798 | 795 | def trigger_commit_comment_hook(self, repo, user, action, data=None): |
|
799 | 796 | repo = self._get_repo(repo) |
|
800 | 797 | target_scm = repo.scm_instance() |
|
801 | 798 | if action == 'create': |
|
802 | 799 | trigger_hook = hooks_utils.trigger_comment_commit_hooks |
|
803 | 800 | elif action == 'edit': |
|
804 | 801 | trigger_hook = hooks_utils.trigger_comment_commit_edit_hooks |
|
805 | 802 | else: |
|
806 | 803 | return |
|
807 | 804 | |
|
808 | 805 | log.debug('Handling repo %s trigger_commit_comment_hook with action %s: %s', |
|
809 | 806 | repo, action, trigger_hook) |
|
810 | 807 | trigger_hook( |
|
811 | 808 | username=user.username, |
|
812 | 809 | repo_name=repo.repo_name, |
|
813 | 810 | repo_type=target_scm.alias, |
|
814 | 811 | repo=repo, |
|
815 | 812 | data=data) |
|
816 | 813 | |
|
817 | 814 | |
|
818 | 815 | def _parse_comment_line_number(line_no): |
|
819 | 816 | """ |
|
820 | 817 | Parses line numbers of the form "(o|n)\d+" and returns them in a tuple. |
|
821 | 818 | """ |
|
822 | 819 | old_line = None |
|
823 | 820 | new_line = None |
|
824 | 821 | if line_no.startswith('o'): |
|
825 | 822 | old_line = int(line_no[1:]) |
|
826 | 823 | elif line_no.startswith('n'): |
|
827 | 824 | new_line = int(line_no[1:]) |
|
828 | 825 | else: |
|
829 | 826 | raise ValueError("Comment lines have to start with either 'o' or 'n'.") |
|
830 | 827 | return diffs.DiffLineNumber(old_line, new_line) |
|
831 | 828 | |
|
832 | 829 | |
|
833 | 830 | def _diff_to_comment_line_number(diff_line): |
|
834 | 831 | if diff_line.new is not None: |
|
835 | 832 | return u'n{}'.format(diff_line.new) |
|
836 | 833 | elif diff_line.old is not None: |
|
837 | 834 | return u'o{}'.format(diff_line.old) |
|
838 | 835 | return u'' |
|
839 | 836 | |
|
840 | 837 | |
|
841 | 838 | def _diff_line_delta(a, b): |
|
842 | 839 | if None not in (a.new, b.new): |
|
843 | 840 | return abs(a.new - b.new) |
|
844 | 841 | elif None not in (a.old, b.old): |
|
845 | 842 | return abs(a.old - b.old) |
|
846 | 843 | else: |
|
847 | 844 | raise ValueError( |
|
848 | 845 | "Cannot compute delta between {} and {}".format(a, b)) |
@@ -1,435 +1,450 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2011-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | """ |
|
23 | 23 | Model for notifications |
|
24 | 24 | """ |
|
25 | 25 | |
|
26 | 26 | import logging |
|
27 | 27 | import traceback |
|
28 | 28 | |
|
29 | 29 | import premailer |
|
30 | 30 | from pyramid.threadlocal import get_current_request |
|
31 | 31 | from sqlalchemy.sql.expression import false, true |
|
32 | 32 | |
|
33 | 33 | import rhodecode |
|
34 | 34 | from rhodecode.lib import helpers as h |
|
35 | 35 | from rhodecode.model import BaseModel |
|
36 | 36 | from rhodecode.model.db import Notification, User, UserNotification |
|
37 | 37 | from rhodecode.model.meta import Session |
|
38 | 38 | from rhodecode.translation import TranslationString |
|
39 | 39 | |
|
40 | 40 | log = logging.getLogger(__name__) |
|
41 | 41 | |
|
42 | 42 | |
|
43 | 43 | class NotificationModel(BaseModel): |
|
44 | 44 | |
|
45 | 45 | cls = Notification |
|
46 | 46 | |
|
47 | 47 | def __get_notification(self, notification): |
|
48 | 48 | if isinstance(notification, Notification): |
|
49 | 49 | return notification |
|
50 | 50 | elif isinstance(notification, (int, long)): |
|
51 | 51 | return Notification.get(notification) |
|
52 | 52 | else: |
|
53 | 53 | if notification: |
|
54 | 54 | raise Exception('notification must be int, long or Instance' |
|
55 | 55 | ' of Notification got %s' % type(notification)) |
|
56 | 56 | |
|
57 | 57 | def create( |
|
58 | self, created_by, notification_subject, notification_body, | |
|
58 | self, created_by, notification_subject='', notification_body='', | |
|
59 | 59 | notification_type=Notification.TYPE_MESSAGE, recipients=None, |
|
60 | 60 | mention_recipients=None, with_email=True, email_kwargs=None): |
|
61 | 61 | """ |
|
62 | 62 | |
|
63 | 63 | Creates notification of given type |
|
64 | 64 | |
|
65 | 65 | :param created_by: int, str or User instance. User who created this |
|
66 | 66 | notification |
|
67 | :param notification_subject: subject of notification itself | |
|
67 | :param notification_subject: subject of notification itself, | |
|
68 | it will be generated automatically from notification_type if not specified | |
|
68 | 69 | :param notification_body: body of notification text |
|
70 | it will be generated automatically from notification_type if not specified | |
|
69 | 71 | :param notification_type: type of notification, based on that we |
|
70 | 72 | pick templates |
|
71 | ||
|
72 | 73 | :param recipients: list of int, str or User objects, when None |
|
73 | 74 | is given send to all admins |
|
74 | 75 | :param mention_recipients: list of int, str or User objects, |
|
75 | 76 | that were mentioned |
|
76 | 77 | :param with_email: send email with this notification |
|
77 | 78 | :param email_kwargs: dict with arguments to generate email |
|
78 | 79 | """ |
|
79 | 80 | |
|
80 | 81 | from rhodecode.lib.celerylib import tasks, run_task |
|
81 | 82 | |
|
82 | 83 | if recipients and not getattr(recipients, '__iter__', False): |
|
83 | 84 | raise Exception('recipients must be an iterable object') |
|
84 | 85 | |
|
86 | if not (notification_subject and notification_body) and not notification_type: | |
|
87 | raise ValueError('notification_subject, and notification_body ' | |
|
88 | 'cannot be empty when notification_type is not specified') | |
|
89 | ||
|
85 | 90 | created_by_obj = self._get_user(created_by) |
|
91 | ||
|
92 | if not created_by_obj: | |
|
93 | raise Exception('unknown user %s' % created_by) | |
|
94 | ||
|
86 | 95 | # default MAIN body if not given |
|
87 | 96 | email_kwargs = email_kwargs or {'body': notification_body} |
|
88 | 97 | mention_recipients = mention_recipients or set() |
|
89 | 98 | |
|
90 | if not created_by_obj: | |
|
91 | raise Exception('unknown user %s' % created_by) | |
|
92 | ||
|
93 | 99 | if recipients is None: |
|
94 | 100 | # recipients is None means to all admins |
|
95 | 101 | recipients_objs = User.query().filter(User.admin == true()).all() |
|
96 | 102 | log.debug('sending notifications %s to admins: %s', |
|
97 | 103 | notification_type, recipients_objs) |
|
98 | 104 | else: |
|
99 | 105 | recipients_objs = set() |
|
100 | 106 | for u in recipients: |
|
101 | 107 | obj = self._get_user(u) |
|
102 | 108 | if obj: |
|
103 | 109 | recipients_objs.add(obj) |
|
104 | 110 | else: # we didn't find this user, log the error and carry on |
|
105 | 111 | log.error('cannot notify unknown user %r', u) |
|
106 | 112 | |
|
107 | 113 | if not recipients_objs: |
|
108 | 114 | raise Exception('no valid recipients specified') |
|
109 | 115 | |
|
110 | 116 | log.debug('sending notifications %s to %s', |
|
111 | 117 | notification_type, recipients_objs) |
|
112 | 118 | |
|
113 | 119 | # add mentioned users into recipients |
|
114 | 120 | final_recipients = set(recipients_objs).union(mention_recipients) |
|
115 | 121 | |
|
122 | (subject, email_body, email_body_plaintext) = \ | |
|
123 | EmailNotificationModel().render_email(notification_type, **email_kwargs) | |
|
124 | ||
|
125 | if not notification_subject: | |
|
126 | notification_subject = subject | |
|
127 | ||
|
128 | if not notification_body: | |
|
129 | notification_body = email_body_plaintext | |
|
130 | ||
|
116 | 131 | notification = Notification.create( |
|
117 | 132 | created_by=created_by_obj, subject=notification_subject, |
|
118 | 133 | body=notification_body, recipients=final_recipients, |
|
119 | 134 | type_=notification_type |
|
120 | 135 | ) |
|
121 | 136 | |
|
122 | 137 | if not with_email: # skip sending email, and just create notification |
|
123 | 138 | return notification |
|
124 | 139 | |
|
125 | 140 | # don't send email to person who created this comment |
|
126 | 141 | rec_objs = set(recipients_objs).difference({created_by_obj}) |
|
127 | 142 | |
|
128 | 143 | # now notify all recipients in question |
|
129 | 144 | |
|
130 | 145 | for recipient in rec_objs.union(mention_recipients): |
|
131 | 146 | # inject current recipient |
|
132 | 147 | email_kwargs['recipient'] = recipient |
|
133 | 148 | email_kwargs['mention'] = recipient in mention_recipients |
|
134 | 149 | (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email( |
|
135 | 150 | notification_type, **email_kwargs) |
|
136 | 151 | |
|
137 | 152 | extra_headers = None |
|
138 | 153 | if 'thread_ids' in email_kwargs: |
|
139 | 154 | extra_headers = {'thread_ids': email_kwargs.pop('thread_ids')} |
|
140 | 155 | |
|
141 | 156 | log.debug('Creating notification email task for user:`%s`', recipient) |
|
142 | 157 | task = run_task( |
|
143 | 158 | tasks.send_email, recipient.email, subject, |
|
144 | 159 | email_body_plaintext, email_body, extra_headers=extra_headers) |
|
145 | 160 | log.debug('Created email task: %s', task) |
|
146 | 161 | |
|
147 | 162 | return notification |
|
148 | 163 | |
|
149 | 164 | def delete(self, user, notification): |
|
150 | 165 | # we don't want to remove actual notification just the assignment |
|
151 | 166 | try: |
|
152 | 167 | notification = self.__get_notification(notification) |
|
153 | 168 | user = self._get_user(user) |
|
154 | 169 | if notification and user: |
|
155 | 170 | obj = UserNotification.query()\ |
|
156 | 171 | .filter(UserNotification.user == user)\ |
|
157 | 172 | .filter(UserNotification.notification == notification)\ |
|
158 | 173 | .one() |
|
159 | 174 | Session().delete(obj) |
|
160 | 175 | return True |
|
161 | 176 | except Exception: |
|
162 | 177 | log.error(traceback.format_exc()) |
|
163 | 178 | raise |
|
164 | 179 | |
|
165 | 180 | def get_for_user(self, user, filter_=None): |
|
166 | 181 | """ |
|
167 | 182 | Get mentions for given user, filter them if filter dict is given |
|
168 | 183 | """ |
|
169 | 184 | user = self._get_user(user) |
|
170 | 185 | |
|
171 | 186 | q = UserNotification.query()\ |
|
172 | 187 | .filter(UserNotification.user == user)\ |
|
173 | 188 | .join(( |
|
174 | 189 | Notification, UserNotification.notification_id == |
|
175 | 190 | Notification.notification_id)) |
|
176 | 191 | if filter_ == ['all']: |
|
177 | 192 | q = q # no filter |
|
178 | 193 | elif filter_ == ['unread']: |
|
179 | 194 | q = q.filter(UserNotification.read == false()) |
|
180 | 195 | elif filter_: |
|
181 | 196 | q = q.filter(Notification.type_.in_(filter_)) |
|
182 | 197 | |
|
183 | 198 | return q |
|
184 | 199 | |
|
185 | 200 | def mark_read(self, user, notification): |
|
186 | 201 | try: |
|
187 | 202 | notification = self.__get_notification(notification) |
|
188 | 203 | user = self._get_user(user) |
|
189 | 204 | if notification and user: |
|
190 | 205 | obj = UserNotification.query()\ |
|
191 | 206 | .filter(UserNotification.user == user)\ |
|
192 | 207 | .filter(UserNotification.notification == notification)\ |
|
193 | 208 | .one() |
|
194 | 209 | obj.read = True |
|
195 | 210 | Session().add(obj) |
|
196 | 211 | return True |
|
197 | 212 | except Exception: |
|
198 | 213 | log.error(traceback.format_exc()) |
|
199 | 214 | raise |
|
200 | 215 | |
|
201 | 216 | def mark_all_read_for_user(self, user, filter_=None): |
|
202 | 217 | user = self._get_user(user) |
|
203 | 218 | q = UserNotification.query()\ |
|
204 | 219 | .filter(UserNotification.user == user)\ |
|
205 | 220 | .filter(UserNotification.read == false())\ |
|
206 | 221 | .join(( |
|
207 | 222 | Notification, UserNotification.notification_id == |
|
208 | 223 | Notification.notification_id)) |
|
209 | 224 | if filter_ == ['unread']: |
|
210 | 225 | q = q.filter(UserNotification.read == false()) |
|
211 | 226 | elif filter_: |
|
212 | 227 | q = q.filter(Notification.type_.in_(filter_)) |
|
213 | 228 | |
|
214 | 229 | # this is a little inefficient but sqlalchemy doesn't support |
|
215 | 230 | # update on joined tables :( |
|
216 | 231 | for obj in q.all(): |
|
217 | 232 | obj.read = True |
|
218 | 233 | Session().add(obj) |
|
219 | 234 | |
|
220 | 235 | def get_unread_cnt_for_user(self, user): |
|
221 | 236 | user = self._get_user(user) |
|
222 | 237 | return UserNotification.query()\ |
|
223 | 238 | .filter(UserNotification.read == false())\ |
|
224 | 239 | .filter(UserNotification.user == user).count() |
|
225 | 240 | |
|
226 | 241 | def get_unread_for_user(self, user): |
|
227 | 242 | user = self._get_user(user) |
|
228 | 243 | return [x.notification for x in UserNotification.query() |
|
229 | 244 | .filter(UserNotification.read == false()) |
|
230 | 245 | .filter(UserNotification.user == user).all()] |
|
231 | 246 | |
|
232 | 247 | def get_user_notification(self, user, notification): |
|
233 | 248 | user = self._get_user(user) |
|
234 | 249 | notification = self.__get_notification(notification) |
|
235 | 250 | |
|
236 | 251 | return UserNotification.query()\ |
|
237 | 252 | .filter(UserNotification.notification == notification)\ |
|
238 | 253 | .filter(UserNotification.user == user).scalar() |
|
239 | 254 | |
|
240 | 255 | def make_description(self, notification, translate, show_age=True): |
|
241 | 256 | """ |
|
242 | 257 | Creates a human readable description based on properties |
|
243 | 258 | of notification object |
|
244 | 259 | """ |
|
245 | 260 | _ = translate |
|
246 | 261 | _map = { |
|
247 | 262 | notification.TYPE_CHANGESET_COMMENT: [ |
|
248 | 263 | _('%(user)s commented on commit %(date_or_age)s'), |
|
249 | 264 | _('%(user)s commented on commit at %(date_or_age)s'), |
|
250 | 265 | ], |
|
251 | 266 | notification.TYPE_MESSAGE: [ |
|
252 | 267 | _('%(user)s sent message %(date_or_age)s'), |
|
253 | 268 | _('%(user)s sent message at %(date_or_age)s'), |
|
254 | 269 | ], |
|
255 | 270 | notification.TYPE_MENTION: [ |
|
256 | 271 | _('%(user)s mentioned you %(date_or_age)s'), |
|
257 | 272 | _('%(user)s mentioned you at %(date_or_age)s'), |
|
258 | 273 | ], |
|
259 | 274 | notification.TYPE_REGISTRATION: [ |
|
260 | 275 | _('%(user)s registered in RhodeCode %(date_or_age)s'), |
|
261 | 276 | _('%(user)s registered in RhodeCode at %(date_or_age)s'), |
|
262 | 277 | ], |
|
263 | 278 | notification.TYPE_PULL_REQUEST: [ |
|
264 | 279 | _('%(user)s opened new pull request %(date_or_age)s'), |
|
265 | 280 | _('%(user)s opened new pull request at %(date_or_age)s'), |
|
266 | 281 | ], |
|
267 | 282 | notification.TYPE_PULL_REQUEST_UPDATE: [ |
|
268 | 283 | _('%(user)s updated pull request %(date_or_age)s'), |
|
269 | 284 | _('%(user)s updated pull request at %(date_or_age)s'), |
|
270 | 285 | ], |
|
271 | 286 | notification.TYPE_PULL_REQUEST_COMMENT: [ |
|
272 | 287 | _('%(user)s commented on pull request %(date_or_age)s'), |
|
273 | 288 | _('%(user)s commented on pull request at %(date_or_age)s'), |
|
274 | 289 | ], |
|
275 | 290 | } |
|
276 | 291 | |
|
277 | 292 | templates = _map[notification.type_] |
|
278 | 293 | |
|
279 | 294 | if show_age: |
|
280 | 295 | template = templates[0] |
|
281 | 296 | date_or_age = h.age(notification.created_on) |
|
282 | 297 | if translate: |
|
283 | 298 | date_or_age = translate(date_or_age) |
|
284 | 299 | |
|
285 | 300 | if isinstance(date_or_age, TranslationString): |
|
286 | 301 | date_or_age = date_or_age.interpolate() |
|
287 | 302 | |
|
288 | 303 | else: |
|
289 | 304 | template = templates[1] |
|
290 | 305 | date_or_age = h.format_date(notification.created_on) |
|
291 | 306 | |
|
292 | 307 | return template % { |
|
293 | 308 | 'user': notification.created_by_user.username, |
|
294 | 309 | 'date_or_age': date_or_age, |
|
295 | 310 | } |
|
296 | 311 | |
|
297 | 312 | |
|
298 | 313 | # Templates for Titles, that could be overwritten by rcextensions |
|
299 | 314 | # Title of email for pull-request update |
|
300 | 315 | EMAIL_PR_UPDATE_SUBJECT_TEMPLATE = '' |
|
301 | 316 | # Title of email for request for pull request review |
|
302 | 317 | EMAIL_PR_REVIEW_SUBJECT_TEMPLATE = '' |
|
303 | 318 | |
|
304 | 319 | # Title of email for general comment on pull request |
|
305 | 320 | EMAIL_PR_COMMENT_SUBJECT_TEMPLATE = '' |
|
306 | 321 | # Title of email for general comment which includes status change on pull request |
|
307 | 322 | EMAIL_PR_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE = '' |
|
308 | 323 | # Title of email for inline comment on a file in pull request |
|
309 | 324 | EMAIL_PR_COMMENT_FILE_SUBJECT_TEMPLATE = '' |
|
310 | 325 | |
|
311 | 326 | # Title of email for general comment on commit |
|
312 | 327 | EMAIL_COMMENT_SUBJECT_TEMPLATE = '' |
|
313 | 328 | # Title of email for general comment which includes status change on commit |
|
314 | 329 | EMAIL_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE = '' |
|
315 | 330 | # Title of email for inline comment on a file in commit |
|
316 | 331 | EMAIL_COMMENT_FILE_SUBJECT_TEMPLATE = '' |
|
317 | 332 | |
|
318 | 333 | |
|
319 | 334 | class EmailNotificationModel(BaseModel): |
|
320 | 335 | TYPE_COMMIT_COMMENT = Notification.TYPE_CHANGESET_COMMENT |
|
321 | 336 | TYPE_REGISTRATION = Notification.TYPE_REGISTRATION |
|
322 | 337 | TYPE_PULL_REQUEST = Notification.TYPE_PULL_REQUEST |
|
323 | 338 | TYPE_PULL_REQUEST_COMMENT = Notification.TYPE_PULL_REQUEST_COMMENT |
|
324 | 339 | TYPE_PULL_REQUEST_UPDATE = Notification.TYPE_PULL_REQUEST_UPDATE |
|
325 | 340 | TYPE_MAIN = Notification.TYPE_MESSAGE |
|
326 | 341 | |
|
327 | 342 | TYPE_PASSWORD_RESET = 'password_reset' |
|
328 | 343 | TYPE_PASSWORD_RESET_CONFIRMATION = 'password_reset_confirmation' |
|
329 | 344 | TYPE_EMAIL_TEST = 'email_test' |
|
330 | 345 | TYPE_EMAIL_EXCEPTION = 'exception' |
|
331 | 346 | TYPE_TEST = 'test' |
|
332 | 347 | |
|
333 | 348 | email_types = { |
|
334 | 349 | TYPE_MAIN: |
|
335 | 350 | 'rhodecode:templates/email_templates/main.mako', |
|
336 | 351 | TYPE_TEST: |
|
337 | 352 | 'rhodecode:templates/email_templates/test.mako', |
|
338 | 353 | TYPE_EMAIL_EXCEPTION: |
|
339 | 354 | 'rhodecode:templates/email_templates/exception_tracker.mako', |
|
340 | 355 | TYPE_EMAIL_TEST: |
|
341 | 356 | 'rhodecode:templates/email_templates/email_test.mako', |
|
342 | 357 | TYPE_REGISTRATION: |
|
343 | 358 | 'rhodecode:templates/email_templates/user_registration.mako', |
|
344 | 359 | TYPE_PASSWORD_RESET: |
|
345 | 360 | 'rhodecode:templates/email_templates/password_reset.mako', |
|
346 | 361 | TYPE_PASSWORD_RESET_CONFIRMATION: |
|
347 | 362 | 'rhodecode:templates/email_templates/password_reset_confirmation.mako', |
|
348 | 363 | TYPE_COMMIT_COMMENT: |
|
349 | 364 | 'rhodecode:templates/email_templates/commit_comment.mako', |
|
350 | 365 | TYPE_PULL_REQUEST: |
|
351 | 366 | 'rhodecode:templates/email_templates/pull_request_review.mako', |
|
352 | 367 | TYPE_PULL_REQUEST_COMMENT: |
|
353 | 368 | 'rhodecode:templates/email_templates/pull_request_comment.mako', |
|
354 | 369 | TYPE_PULL_REQUEST_UPDATE: |
|
355 | 370 | 'rhodecode:templates/email_templates/pull_request_update.mako', |
|
356 | 371 | } |
|
357 | 372 | |
|
358 | 373 | premailer_instance = premailer.Premailer( |
|
359 | 374 | cssutils_logging_level=logging.ERROR, |
|
360 | 375 | cssutils_logging_handler=logging.getLogger().handlers[0] |
|
361 | 376 | if logging.getLogger().handlers else None, |
|
362 | 377 | ) |
|
363 | 378 | |
|
364 | 379 | def __init__(self): |
|
365 | 380 | """ |
|
366 | 381 | Example usage:: |
|
367 | 382 | |
|
368 | 383 | (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email( |
|
369 | 384 | EmailNotificationModel.TYPE_TEST, **email_kwargs) |
|
370 | 385 | |
|
371 | 386 | """ |
|
372 | 387 | super(EmailNotificationModel, self).__init__() |
|
373 | 388 | self.rhodecode_instance_name = rhodecode.CONFIG.get('rhodecode_title') |
|
374 | 389 | |
|
375 | 390 | def _update_kwargs_for_render(self, kwargs): |
|
376 | 391 | """ |
|
377 | 392 | Inject params required for Mako rendering |
|
378 | 393 | |
|
379 | 394 | :param kwargs: |
|
380 | 395 | """ |
|
381 | 396 | |
|
382 | 397 | kwargs['rhodecode_instance_name'] = self.rhodecode_instance_name |
|
383 | 398 | kwargs['rhodecode_version'] = rhodecode.__version__ |
|
384 | 399 | instance_url = h.route_url('home') |
|
385 | 400 | _kwargs = { |
|
386 | 401 | 'instance_url': instance_url, |
|
387 | 402 | 'whitespace_filter': self.whitespace_filter, |
|
388 | 403 | 'email_pr_update_subject_template': EMAIL_PR_UPDATE_SUBJECT_TEMPLATE, |
|
389 | 404 | 'email_pr_review_subject_template': EMAIL_PR_REVIEW_SUBJECT_TEMPLATE, |
|
390 | 405 | 'email_pr_comment_subject_template': EMAIL_PR_COMMENT_SUBJECT_TEMPLATE, |
|
391 | 406 | 'email_pr_comment_status_change_subject_template': EMAIL_PR_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE, |
|
392 | 407 | 'email_pr_comment_file_subject_template': EMAIL_PR_COMMENT_FILE_SUBJECT_TEMPLATE, |
|
393 | 408 | 'email_comment_subject_template': EMAIL_COMMENT_SUBJECT_TEMPLATE, |
|
394 | 409 | 'email_comment_status_change_subject_template': EMAIL_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE, |
|
395 | 410 | 'email_comment_file_subject_template': EMAIL_COMMENT_FILE_SUBJECT_TEMPLATE, |
|
396 | 411 | } |
|
397 | 412 | _kwargs.update(kwargs) |
|
398 | 413 | return _kwargs |
|
399 | 414 | |
|
400 | 415 | def whitespace_filter(self, text): |
|
401 | 416 | return text.replace('\n', '').replace('\t', '') |
|
402 | 417 | |
|
403 | 418 | def get_renderer(self, type_, request): |
|
404 | 419 | template_name = self.email_types[type_] |
|
405 | 420 | return request.get_partial_renderer(template_name) |
|
406 | 421 | |
|
407 | 422 | def render_email(self, type_, **kwargs): |
|
408 | 423 | """ |
|
409 | 424 | renders template for email, and returns a tuple of |
|
410 | 425 | (subject, email_headers, email_html_body, email_plaintext_body) |
|
411 | 426 | """ |
|
412 | 427 | # translator and helpers inject |
|
413 | 428 | _kwargs = self._update_kwargs_for_render(kwargs) |
|
414 | 429 | request = get_current_request() |
|
415 | 430 | email_template = self.get_renderer(type_, request=request) |
|
416 | 431 | |
|
417 | 432 | subject = email_template.render('subject', **_kwargs) |
|
418 | 433 | |
|
419 | 434 | try: |
|
420 | 435 | body_plaintext = email_template.render('body_plaintext', **_kwargs) |
|
421 | 436 | except AttributeError: |
|
422 | 437 | # it's not defined in template, ok we can skip it |
|
423 | 438 | body_plaintext = '' |
|
424 | 439 | |
|
425 | 440 | # render WHOLE template |
|
426 | 441 | body = email_template.render(None, **_kwargs) |
|
427 | 442 | |
|
428 | 443 | try: |
|
429 | 444 | # Inline CSS styles and conversion |
|
430 | 445 | body = self.premailer_instance.transform(body) |
|
431 | 446 | except Exception: |
|
432 | 447 | log.exception('Failed to parse body with premailer') |
|
433 | 448 | pass |
|
434 | 449 | |
|
435 | 450 | return subject, body, body_plaintext |
@@ -1,2237 +1,2230 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2012-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | """ |
|
23 | 23 | pull request model for RhodeCode |
|
24 | 24 | """ |
|
25 | 25 | |
|
26 | 26 | |
|
27 | 27 | import json |
|
28 | 28 | import logging |
|
29 | 29 | import os |
|
30 | 30 | |
|
31 | 31 | import datetime |
|
32 | 32 | import urllib |
|
33 | 33 | import collections |
|
34 | 34 | |
|
35 | 35 | from pyramid import compat |
|
36 | 36 | from pyramid.threadlocal import get_current_request |
|
37 | 37 | |
|
38 | 38 | from rhodecode.lib.vcs.nodes import FileNode |
|
39 | 39 | from rhodecode.translation import lazy_ugettext |
|
40 | 40 | from rhodecode.lib import helpers as h, hooks_utils, diffs |
|
41 | 41 | from rhodecode.lib import audit_logger |
|
42 | 42 | from rhodecode.lib.compat import OrderedDict |
|
43 | 43 | from rhodecode.lib.hooks_daemon import prepare_callback_daemon |
|
44 | 44 | from rhodecode.lib.markup_renderer import ( |
|
45 | 45 | DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer) |
|
46 | 46 | from rhodecode.lib.utils2 import ( |
|
47 | 47 | safe_unicode, safe_str, md5_safe, AttributeDict, safe_int, |
|
48 | 48 | get_current_rhodecode_user) |
|
49 | 49 | from rhodecode.lib.vcs.backends.base import ( |
|
50 | 50 | Reference, MergeResponse, MergeFailureReason, UpdateFailureReason, |
|
51 | 51 | TargetRefMissing, SourceRefMissing) |
|
52 | 52 | from rhodecode.lib.vcs.conf import settings as vcs_settings |
|
53 | 53 | from rhodecode.lib.vcs.exceptions import ( |
|
54 | 54 | CommitDoesNotExistError, EmptyRepositoryError) |
|
55 | 55 | from rhodecode.model import BaseModel |
|
56 | 56 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
57 | 57 | from rhodecode.model.comment import CommentsModel |
|
58 | 58 | from rhodecode.model.db import ( |
|
59 | 59 | or_, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus, |
|
60 | 60 | PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User) |
|
61 | 61 | from rhodecode.model.meta import Session |
|
62 | 62 | from rhodecode.model.notification import NotificationModel, \ |
|
63 | 63 | EmailNotificationModel |
|
64 | 64 | from rhodecode.model.scm import ScmModel |
|
65 | 65 | from rhodecode.model.settings import VcsSettingsModel |
|
66 | 66 | |
|
67 | 67 | |
|
68 | 68 | log = logging.getLogger(__name__) |
|
69 | 69 | |
|
70 | 70 | |
|
71 | 71 | # Data structure to hold the response data when updating commits during a pull |
|
72 | 72 | # request update. |
|
73 | 73 | class UpdateResponse(object): |
|
74 | 74 | |
|
75 | 75 | def __init__(self, executed, reason, new, old, common_ancestor_id, |
|
76 | 76 | commit_changes, source_changed, target_changed): |
|
77 | 77 | |
|
78 | 78 | self.executed = executed |
|
79 | 79 | self.reason = reason |
|
80 | 80 | self.new = new |
|
81 | 81 | self.old = old |
|
82 | 82 | self.common_ancestor_id = common_ancestor_id |
|
83 | 83 | self.changes = commit_changes |
|
84 | 84 | self.source_changed = source_changed |
|
85 | 85 | self.target_changed = target_changed |
|
86 | 86 | |
|
87 | 87 | |
|
88 | 88 | def get_diff_info( |
|
89 | 89 | source_repo, source_ref, target_repo, target_ref, get_authors=False, |
|
90 | 90 | get_commit_authors=True): |
|
91 | 91 | """ |
|
92 | 92 | Calculates detailed diff information for usage in preview of creation of a pull-request. |
|
93 | 93 | This is also used for default reviewers logic |
|
94 | 94 | """ |
|
95 | 95 | |
|
96 | 96 | source_scm = source_repo.scm_instance() |
|
97 | 97 | target_scm = target_repo.scm_instance() |
|
98 | 98 | |
|
99 | 99 | ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm) |
|
100 | 100 | if not ancestor_id: |
|
101 | 101 | raise ValueError( |
|
102 | 102 | 'cannot calculate diff info without a common ancestor. ' |
|
103 | 103 | 'Make sure both repositories are related, and have a common forking commit.') |
|
104 | 104 | |
|
105 | 105 | # case here is that want a simple diff without incoming commits, |
|
106 | 106 | # previewing what will be merged based only on commits in the source. |
|
107 | 107 | log.debug('Using ancestor %s as source_ref instead of %s', |
|
108 | 108 | ancestor_id, source_ref) |
|
109 | 109 | |
|
110 | 110 | # source of changes now is the common ancestor |
|
111 | 111 | source_commit = source_scm.get_commit(commit_id=ancestor_id) |
|
112 | 112 | # target commit becomes the source ref as it is the last commit |
|
113 | 113 | # for diff generation this logic gives proper diff |
|
114 | 114 | target_commit = source_scm.get_commit(commit_id=source_ref) |
|
115 | 115 | |
|
116 | 116 | vcs_diff = \ |
|
117 | 117 | source_scm.get_diff(commit1=source_commit, commit2=target_commit, |
|
118 | 118 | ignore_whitespace=False, context=3) |
|
119 | 119 | |
|
120 | 120 | diff_processor = diffs.DiffProcessor( |
|
121 | 121 | vcs_diff, format='newdiff', diff_limit=None, |
|
122 | 122 | file_limit=None, show_full_diff=True) |
|
123 | 123 | |
|
124 | 124 | _parsed = diff_processor.prepare() |
|
125 | 125 | |
|
126 | 126 | all_files = [] |
|
127 | 127 | all_files_changes = [] |
|
128 | 128 | changed_lines = {} |
|
129 | 129 | stats = [0, 0] |
|
130 | 130 | for f in _parsed: |
|
131 | 131 | all_files.append(f['filename']) |
|
132 | 132 | all_files_changes.append({ |
|
133 | 133 | 'filename': f['filename'], |
|
134 | 134 | 'stats': f['stats'] |
|
135 | 135 | }) |
|
136 | 136 | stats[0] += f['stats']['added'] |
|
137 | 137 | stats[1] += f['stats']['deleted'] |
|
138 | 138 | |
|
139 | 139 | changed_lines[f['filename']] = [] |
|
140 | 140 | if len(f['chunks']) < 2: |
|
141 | 141 | continue |
|
142 | 142 | # first line is "context" information |
|
143 | 143 | for chunks in f['chunks'][1:]: |
|
144 | 144 | for chunk in chunks['lines']: |
|
145 | 145 | if chunk['action'] not in ('del', 'mod'): |
|
146 | 146 | continue |
|
147 | 147 | changed_lines[f['filename']].append(chunk['old_lineno']) |
|
148 | 148 | |
|
149 | 149 | commit_authors = [] |
|
150 | 150 | user_counts = {} |
|
151 | 151 | email_counts = {} |
|
152 | 152 | author_counts = {} |
|
153 | 153 | _commit_cache = {} |
|
154 | 154 | |
|
155 | 155 | commits = [] |
|
156 | 156 | if get_commit_authors: |
|
157 | 157 | log.debug('Obtaining commit authors from set of commits') |
|
158 | 158 | _compare_data = target_scm.compare( |
|
159 | 159 | target_ref, source_ref, source_scm, merge=True, |
|
160 | 160 | pre_load=["author", "date", "message"] |
|
161 | 161 | ) |
|
162 | 162 | |
|
163 | 163 | for commit in _compare_data: |
|
164 | 164 | # NOTE(marcink): we serialize here, so we don't produce more vcsserver calls on data returned |
|
165 | 165 | # at this function which is later called via JSON serialization |
|
166 | 166 | serialized_commit = dict( |
|
167 | 167 | author=commit.author, |
|
168 | 168 | date=commit.date, |
|
169 | 169 | message=commit.message, |
|
170 | 170 | commit_id=commit.raw_id, |
|
171 | 171 | raw_id=commit.raw_id |
|
172 | 172 | ) |
|
173 | 173 | commits.append(serialized_commit) |
|
174 | 174 | user = User.get_from_cs_author(serialized_commit['author']) |
|
175 | 175 | if user and user not in commit_authors: |
|
176 | 176 | commit_authors.append(user) |
|
177 | 177 | |
|
178 | 178 | # lines |
|
179 | 179 | if get_authors: |
|
180 | 180 | log.debug('Calculating authors of changed files') |
|
181 | 181 | target_commit = source_repo.get_commit(ancestor_id) |
|
182 | 182 | |
|
183 | 183 | for fname, lines in changed_lines.items(): |
|
184 | 184 | |
|
185 | 185 | try: |
|
186 | 186 | node = target_commit.get_node(fname, pre_load=["is_binary"]) |
|
187 | 187 | except Exception: |
|
188 | 188 | log.exception("Failed to load node with path %s", fname) |
|
189 | 189 | continue |
|
190 | 190 | |
|
191 | 191 | if not isinstance(node, FileNode): |
|
192 | 192 | continue |
|
193 | 193 | |
|
194 | 194 | # NOTE(marcink): for binary node we don't do annotation, just use last author |
|
195 | 195 | if node.is_binary: |
|
196 | 196 | author = node.last_commit.author |
|
197 | 197 | email = node.last_commit.author_email |
|
198 | 198 | |
|
199 | 199 | user = User.get_from_cs_author(author) |
|
200 | 200 | if user: |
|
201 | 201 | user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1 |
|
202 | 202 | author_counts[author] = author_counts.get(author, 0) + 1 |
|
203 | 203 | email_counts[email] = email_counts.get(email, 0) + 1 |
|
204 | 204 | |
|
205 | 205 | continue |
|
206 | 206 | |
|
207 | 207 | for annotation in node.annotate: |
|
208 | 208 | line_no, commit_id, get_commit_func, line_text = annotation |
|
209 | 209 | if line_no in lines: |
|
210 | 210 | if commit_id not in _commit_cache: |
|
211 | 211 | _commit_cache[commit_id] = get_commit_func() |
|
212 | 212 | commit = _commit_cache[commit_id] |
|
213 | 213 | author = commit.author |
|
214 | 214 | email = commit.author_email |
|
215 | 215 | user = User.get_from_cs_author(author) |
|
216 | 216 | if user: |
|
217 | 217 | user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1 |
|
218 | 218 | author_counts[author] = author_counts.get(author, 0) + 1 |
|
219 | 219 | email_counts[email] = email_counts.get(email, 0) + 1 |
|
220 | 220 | |
|
221 | 221 | log.debug('Default reviewers processing finished') |
|
222 | 222 | |
|
223 | 223 | return { |
|
224 | 224 | 'commits': commits, |
|
225 | 225 | 'files': all_files_changes, |
|
226 | 226 | 'stats': stats, |
|
227 | 227 | 'ancestor': ancestor_id, |
|
228 | 228 | # original authors of modified files |
|
229 | 229 | 'original_authors': { |
|
230 | 230 | 'users': user_counts, |
|
231 | 231 | 'authors': author_counts, |
|
232 | 232 | 'emails': email_counts, |
|
233 | 233 | }, |
|
234 | 234 | 'commit_authors': commit_authors |
|
235 | 235 | } |
|
236 | 236 | |
|
237 | 237 | |
|
238 | 238 | class PullRequestModel(BaseModel): |
|
239 | 239 | |
|
240 | 240 | cls = PullRequest |
|
241 | 241 | |
|
242 | 242 | DIFF_CONTEXT = diffs.DEFAULT_CONTEXT |
|
243 | 243 | |
|
244 | 244 | UPDATE_STATUS_MESSAGES = { |
|
245 | 245 | UpdateFailureReason.NONE: lazy_ugettext( |
|
246 | 246 | 'Pull request update successful.'), |
|
247 | 247 | UpdateFailureReason.UNKNOWN: lazy_ugettext( |
|
248 | 248 | 'Pull request update failed because of an unknown error.'), |
|
249 | 249 | UpdateFailureReason.NO_CHANGE: lazy_ugettext( |
|
250 | 250 | 'No update needed because the source and target have not changed.'), |
|
251 | 251 | UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext( |
|
252 | 252 | 'Pull request cannot be updated because the reference type is ' |
|
253 | 253 | 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'), |
|
254 | 254 | UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext( |
|
255 | 255 | 'This pull request cannot be updated because the target ' |
|
256 | 256 | 'reference is missing.'), |
|
257 | 257 | UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext( |
|
258 | 258 | 'This pull request cannot be updated because the source ' |
|
259 | 259 | 'reference is missing.'), |
|
260 | 260 | } |
|
261 | 261 | REF_TYPES = ['bookmark', 'book', 'tag', 'branch'] |
|
262 | 262 | UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch'] |
|
263 | 263 | |
|
264 | 264 | def __get_pull_request(self, pull_request): |
|
265 | 265 | return self._get_instance(( |
|
266 | 266 | PullRequest, PullRequestVersion), pull_request) |
|
267 | 267 | |
|
268 | 268 | def _check_perms(self, perms, pull_request, user, api=False): |
|
269 | 269 | if not api: |
|
270 | 270 | return h.HasRepoPermissionAny(*perms)( |
|
271 | 271 | user=user, repo_name=pull_request.target_repo.repo_name) |
|
272 | 272 | else: |
|
273 | 273 | return h.HasRepoPermissionAnyApi(*perms)( |
|
274 | 274 | user=user, repo_name=pull_request.target_repo.repo_name) |
|
275 | 275 | |
|
276 | 276 | def check_user_read(self, pull_request, user, api=False): |
|
277 | 277 | _perms = ('repository.admin', 'repository.write', 'repository.read',) |
|
278 | 278 | return self._check_perms(_perms, pull_request, user, api) |
|
279 | 279 | |
|
280 | 280 | def check_user_merge(self, pull_request, user, api=False): |
|
281 | 281 | _perms = ('repository.admin', 'repository.write', 'hg.admin',) |
|
282 | 282 | return self._check_perms(_perms, pull_request, user, api) |
|
283 | 283 | |
|
284 | 284 | def check_user_update(self, pull_request, user, api=False): |
|
285 | 285 | owner = user.user_id == pull_request.user_id |
|
286 | 286 | return self.check_user_merge(pull_request, user, api) or owner |
|
287 | 287 | |
|
288 | 288 | def check_user_delete(self, pull_request, user): |
|
289 | 289 | owner = user.user_id == pull_request.user_id |
|
290 | 290 | _perms = ('repository.admin',) |
|
291 | 291 | return self._check_perms(_perms, pull_request, user) or owner |
|
292 | 292 | |
|
293 | 293 | def is_user_reviewer(self, pull_request, user): |
|
294 | 294 | return user.user_id in [ |
|
295 | 295 | x.user_id for x in |
|
296 | 296 | pull_request.get_pull_request_reviewers(PullRequestReviewers.ROLE_REVIEWER) |
|
297 | 297 | if x.user |
|
298 | 298 | ] |
|
299 | 299 | |
|
300 | 300 | def check_user_change_status(self, pull_request, user, api=False): |
|
301 | 301 | return self.check_user_update(pull_request, user, api) \ |
|
302 | 302 | or self.is_user_reviewer(pull_request, user) |
|
303 | 303 | |
|
304 | 304 | def check_user_comment(self, pull_request, user): |
|
305 | 305 | owner = user.user_id == pull_request.user_id |
|
306 | 306 | return self.check_user_read(pull_request, user) or owner |
|
307 | 307 | |
|
308 | 308 | def get(self, pull_request): |
|
309 | 309 | return self.__get_pull_request(pull_request) |
|
310 | 310 | |
|
311 | 311 | def _prepare_get_all_query(self, repo_name, search_q=None, source=False, |
|
312 | 312 | statuses=None, opened_by=None, order_by=None, |
|
313 | 313 | order_dir='desc', only_created=False): |
|
314 | 314 | repo = None |
|
315 | 315 | if repo_name: |
|
316 | 316 | repo = self._get_repo(repo_name) |
|
317 | 317 | |
|
318 | 318 | q = PullRequest.query() |
|
319 | 319 | |
|
320 | 320 | if search_q: |
|
321 | 321 | like_expression = u'%{}%'.format(safe_unicode(search_q)) |
|
322 | 322 | q = q.join(User) |
|
323 | 323 | q = q.filter(or_( |
|
324 | 324 | cast(PullRequest.pull_request_id, String).ilike(like_expression), |
|
325 | 325 | User.username.ilike(like_expression), |
|
326 | 326 | PullRequest.title.ilike(like_expression), |
|
327 | 327 | PullRequest.description.ilike(like_expression), |
|
328 | 328 | )) |
|
329 | 329 | |
|
330 | 330 | # source or target |
|
331 | 331 | if repo and source: |
|
332 | 332 | q = q.filter(PullRequest.source_repo == repo) |
|
333 | 333 | elif repo: |
|
334 | 334 | q = q.filter(PullRequest.target_repo == repo) |
|
335 | 335 | |
|
336 | 336 | # closed,opened |
|
337 | 337 | if statuses: |
|
338 | 338 | q = q.filter(PullRequest.status.in_(statuses)) |
|
339 | 339 | |
|
340 | 340 | # opened by filter |
|
341 | 341 | if opened_by: |
|
342 | 342 | q = q.filter(PullRequest.user_id.in_(opened_by)) |
|
343 | 343 | |
|
344 | 344 | # only get those that are in "created" state |
|
345 | 345 | if only_created: |
|
346 | 346 | q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED) |
|
347 | 347 | |
|
348 | 348 | if order_by: |
|
349 | 349 | order_map = { |
|
350 | 350 | 'name_raw': PullRequest.pull_request_id, |
|
351 | 351 | 'id': PullRequest.pull_request_id, |
|
352 | 352 | 'title': PullRequest.title, |
|
353 | 353 | 'updated_on_raw': PullRequest.updated_on, |
|
354 | 354 | 'target_repo': PullRequest.target_repo_id |
|
355 | 355 | } |
|
356 | 356 | if order_dir == 'asc': |
|
357 | 357 | q = q.order_by(order_map[order_by].asc()) |
|
358 | 358 | else: |
|
359 | 359 | q = q.order_by(order_map[order_by].desc()) |
|
360 | 360 | |
|
361 | 361 | return q |
|
362 | 362 | |
|
363 | 363 | def count_all(self, repo_name, search_q=None, source=False, statuses=None, |
|
364 | 364 | opened_by=None): |
|
365 | 365 | """ |
|
366 | 366 | Count the number of pull requests for a specific repository. |
|
367 | 367 | |
|
368 | 368 | :param repo_name: target or source repo |
|
369 | 369 | :param search_q: filter by text |
|
370 | 370 | :param source: boolean flag to specify if repo_name refers to source |
|
371 | 371 | :param statuses: list of pull request statuses |
|
372 | 372 | :param opened_by: author user of the pull request |
|
373 | 373 | :returns: int number of pull requests |
|
374 | 374 | """ |
|
375 | 375 | q = self._prepare_get_all_query( |
|
376 | 376 | repo_name, search_q=search_q, source=source, statuses=statuses, |
|
377 | 377 | opened_by=opened_by) |
|
378 | 378 | |
|
379 | 379 | return q.count() |
|
380 | 380 | |
|
381 | 381 | def get_all(self, repo_name, search_q=None, source=False, statuses=None, |
|
382 | 382 | opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'): |
|
383 | 383 | """ |
|
384 | 384 | Get all pull requests for a specific repository. |
|
385 | 385 | |
|
386 | 386 | :param repo_name: target or source repo |
|
387 | 387 | :param search_q: filter by text |
|
388 | 388 | :param source: boolean flag to specify if repo_name refers to source |
|
389 | 389 | :param statuses: list of pull request statuses |
|
390 | 390 | :param opened_by: author user of the pull request |
|
391 | 391 | :param offset: pagination offset |
|
392 | 392 | :param length: length of returned list |
|
393 | 393 | :param order_by: order of the returned list |
|
394 | 394 | :param order_dir: 'asc' or 'desc' ordering direction |
|
395 | 395 | :returns: list of pull requests |
|
396 | 396 | """ |
|
397 | 397 | q = self._prepare_get_all_query( |
|
398 | 398 | repo_name, search_q=search_q, source=source, statuses=statuses, |
|
399 | 399 | opened_by=opened_by, order_by=order_by, order_dir=order_dir) |
|
400 | 400 | |
|
401 | 401 | if length: |
|
402 | 402 | pull_requests = q.limit(length).offset(offset).all() |
|
403 | 403 | else: |
|
404 | 404 | pull_requests = q.all() |
|
405 | 405 | |
|
406 | 406 | return pull_requests |
|
407 | 407 | |
|
408 | 408 | def count_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None, |
|
409 | 409 | opened_by=None): |
|
410 | 410 | """ |
|
411 | 411 | Count the number of pull requests for a specific repository that are |
|
412 | 412 | awaiting review. |
|
413 | 413 | |
|
414 | 414 | :param repo_name: target or source repo |
|
415 | 415 | :param search_q: filter by text |
|
416 | 416 | :param source: boolean flag to specify if repo_name refers to source |
|
417 | 417 | :param statuses: list of pull request statuses |
|
418 | 418 | :param opened_by: author user of the pull request |
|
419 | 419 | :returns: int number of pull requests |
|
420 | 420 | """ |
|
421 | 421 | pull_requests = self.get_awaiting_review( |
|
422 | 422 | repo_name, search_q=search_q, source=source, statuses=statuses, opened_by=opened_by) |
|
423 | 423 | |
|
424 | 424 | return len(pull_requests) |
|
425 | 425 | |
|
426 | 426 | def get_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None, |
|
427 | 427 | opened_by=None, offset=0, length=None, |
|
428 | 428 | order_by=None, order_dir='desc'): |
|
429 | 429 | """ |
|
430 | 430 | Get all pull requests for a specific repository that are awaiting |
|
431 | 431 | review. |
|
432 | 432 | |
|
433 | 433 | :param repo_name: target or source repo |
|
434 | 434 | :param search_q: filter by text |
|
435 | 435 | :param source: boolean flag to specify if repo_name refers to source |
|
436 | 436 | :param statuses: list of pull request statuses |
|
437 | 437 | :param opened_by: author user of the pull request |
|
438 | 438 | :param offset: pagination offset |
|
439 | 439 | :param length: length of returned list |
|
440 | 440 | :param order_by: order of the returned list |
|
441 | 441 | :param order_dir: 'asc' or 'desc' ordering direction |
|
442 | 442 | :returns: list of pull requests |
|
443 | 443 | """ |
|
444 | 444 | pull_requests = self.get_all( |
|
445 | 445 | repo_name, search_q=search_q, source=source, statuses=statuses, |
|
446 | 446 | opened_by=opened_by, order_by=order_by, order_dir=order_dir) |
|
447 | 447 | |
|
448 | 448 | _filtered_pull_requests = [] |
|
449 | 449 | for pr in pull_requests: |
|
450 | 450 | status = pr.calculated_review_status() |
|
451 | 451 | if status in [ChangesetStatus.STATUS_NOT_REVIEWED, |
|
452 | 452 | ChangesetStatus.STATUS_UNDER_REVIEW]: |
|
453 | 453 | _filtered_pull_requests.append(pr) |
|
454 | 454 | if length: |
|
455 | 455 | return _filtered_pull_requests[offset:offset+length] |
|
456 | 456 | else: |
|
457 | 457 | return _filtered_pull_requests |
|
458 | 458 | |
|
459 | 459 | def count_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None, |
|
460 | 460 | opened_by=None, user_id=None): |
|
461 | 461 | """ |
|
462 | 462 | Count the number of pull requests for a specific repository that are |
|
463 | 463 | awaiting review from a specific user. |
|
464 | 464 | |
|
465 | 465 | :param repo_name: target or source repo |
|
466 | 466 | :param search_q: filter by text |
|
467 | 467 | :param source: boolean flag to specify if repo_name refers to source |
|
468 | 468 | :param statuses: list of pull request statuses |
|
469 | 469 | :param opened_by: author user of the pull request |
|
470 | 470 | :param user_id: reviewer user of the pull request |
|
471 | 471 | :returns: int number of pull requests |
|
472 | 472 | """ |
|
473 | 473 | pull_requests = self.get_awaiting_my_review( |
|
474 | 474 | repo_name, search_q=search_q, source=source, statuses=statuses, |
|
475 | 475 | opened_by=opened_by, user_id=user_id) |
|
476 | 476 | |
|
477 | 477 | return len(pull_requests) |
|
478 | 478 | |
|
479 | 479 | def get_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None, |
|
480 | 480 | opened_by=None, user_id=None, offset=0, |
|
481 | 481 | length=None, order_by=None, order_dir='desc'): |
|
482 | 482 | """ |
|
483 | 483 | Get all pull requests for a specific repository that are awaiting |
|
484 | 484 | review from a specific user. |
|
485 | 485 | |
|
486 | 486 | :param repo_name: target or source repo |
|
487 | 487 | :param search_q: filter by text |
|
488 | 488 | :param source: boolean flag to specify if repo_name refers to source |
|
489 | 489 | :param statuses: list of pull request statuses |
|
490 | 490 | :param opened_by: author user of the pull request |
|
491 | 491 | :param user_id: reviewer user of the pull request |
|
492 | 492 | :param offset: pagination offset |
|
493 | 493 | :param length: length of returned list |
|
494 | 494 | :param order_by: order of the returned list |
|
495 | 495 | :param order_dir: 'asc' or 'desc' ordering direction |
|
496 | 496 | :returns: list of pull requests |
|
497 | 497 | """ |
|
498 | 498 | pull_requests = self.get_all( |
|
499 | 499 | repo_name, search_q=search_q, source=source, statuses=statuses, |
|
500 | 500 | opened_by=opened_by, order_by=order_by, order_dir=order_dir) |
|
501 | 501 | |
|
502 | 502 | _my = PullRequestModel().get_not_reviewed(user_id) |
|
503 | 503 | my_participation = [] |
|
504 | 504 | for pr in pull_requests: |
|
505 | 505 | if pr in _my: |
|
506 | 506 | my_participation.append(pr) |
|
507 | 507 | _filtered_pull_requests = my_participation |
|
508 | 508 | if length: |
|
509 | 509 | return _filtered_pull_requests[offset:offset+length] |
|
510 | 510 | else: |
|
511 | 511 | return _filtered_pull_requests |
|
512 | 512 | |
|
513 | 513 | def get_not_reviewed(self, user_id): |
|
514 | 514 | return [ |
|
515 | 515 | x.pull_request for x in PullRequestReviewers.query().filter( |
|
516 | 516 | PullRequestReviewers.user_id == user_id).all() |
|
517 | 517 | ] |
|
518 | 518 | |
|
519 | 519 | def _prepare_participating_query(self, user_id=None, statuses=None, query='', |
|
520 | 520 | order_by=None, order_dir='desc'): |
|
521 | 521 | q = PullRequest.query() |
|
522 | 522 | if user_id: |
|
523 | 523 | reviewers_subquery = Session().query( |
|
524 | 524 | PullRequestReviewers.pull_request_id).filter( |
|
525 | 525 | PullRequestReviewers.user_id == user_id).subquery() |
|
526 | 526 | user_filter = or_( |
|
527 | 527 | PullRequest.user_id == user_id, |
|
528 | 528 | PullRequest.pull_request_id.in_(reviewers_subquery) |
|
529 | 529 | ) |
|
530 | 530 | q = PullRequest.query().filter(user_filter) |
|
531 | 531 | |
|
532 | 532 | # closed,opened |
|
533 | 533 | if statuses: |
|
534 | 534 | q = q.filter(PullRequest.status.in_(statuses)) |
|
535 | 535 | |
|
536 | 536 | if query: |
|
537 | 537 | like_expression = u'%{}%'.format(safe_unicode(query)) |
|
538 | 538 | q = q.join(User) |
|
539 | 539 | q = q.filter(or_( |
|
540 | 540 | cast(PullRequest.pull_request_id, String).ilike(like_expression), |
|
541 | 541 | User.username.ilike(like_expression), |
|
542 | 542 | PullRequest.title.ilike(like_expression), |
|
543 | 543 | PullRequest.description.ilike(like_expression), |
|
544 | 544 | )) |
|
545 | 545 | if order_by: |
|
546 | 546 | order_map = { |
|
547 | 547 | 'name_raw': PullRequest.pull_request_id, |
|
548 | 548 | 'title': PullRequest.title, |
|
549 | 549 | 'updated_on_raw': PullRequest.updated_on, |
|
550 | 550 | 'target_repo': PullRequest.target_repo_id |
|
551 | 551 | } |
|
552 | 552 | if order_dir == 'asc': |
|
553 | 553 | q = q.order_by(order_map[order_by].asc()) |
|
554 | 554 | else: |
|
555 | 555 | q = q.order_by(order_map[order_by].desc()) |
|
556 | 556 | |
|
557 | 557 | return q |
|
558 | 558 | |
|
559 | 559 | def count_im_participating_in(self, user_id=None, statuses=None, query=''): |
|
560 | 560 | q = self._prepare_participating_query(user_id, statuses=statuses, query=query) |
|
561 | 561 | return q.count() |
|
562 | 562 | |
|
563 | 563 | def get_im_participating_in( |
|
564 | 564 | self, user_id=None, statuses=None, query='', offset=0, |
|
565 | 565 | length=None, order_by=None, order_dir='desc'): |
|
566 | 566 | """ |
|
567 | 567 | Get all Pull requests that i'm participating in, or i have opened |
|
568 | 568 | """ |
|
569 | 569 | |
|
570 | 570 | q = self._prepare_participating_query( |
|
571 | 571 | user_id, statuses=statuses, query=query, order_by=order_by, |
|
572 | 572 | order_dir=order_dir) |
|
573 | 573 | |
|
574 | 574 | if length: |
|
575 | 575 | pull_requests = q.limit(length).offset(offset).all() |
|
576 | 576 | else: |
|
577 | 577 | pull_requests = q.all() |
|
578 | 578 | |
|
579 | 579 | return pull_requests |
|
580 | 580 | |
|
581 | 581 | def get_versions(self, pull_request): |
|
582 | 582 | """ |
|
583 | 583 | returns version of pull request sorted by ID descending |
|
584 | 584 | """ |
|
585 | 585 | return PullRequestVersion.query()\ |
|
586 | 586 | .filter(PullRequestVersion.pull_request == pull_request)\ |
|
587 | 587 | .order_by(PullRequestVersion.pull_request_version_id.asc())\ |
|
588 | 588 | .all() |
|
589 | 589 | |
|
590 | 590 | def get_pr_version(self, pull_request_id, version=None): |
|
591 | 591 | at_version = None |
|
592 | 592 | |
|
593 | 593 | if version and version == 'latest': |
|
594 | 594 | pull_request_ver = PullRequest.get(pull_request_id) |
|
595 | 595 | pull_request_obj = pull_request_ver |
|
596 | 596 | _org_pull_request_obj = pull_request_obj |
|
597 | 597 | at_version = 'latest' |
|
598 | 598 | elif version: |
|
599 | 599 | pull_request_ver = PullRequestVersion.get_or_404(version) |
|
600 | 600 | pull_request_obj = pull_request_ver |
|
601 | 601 | _org_pull_request_obj = pull_request_ver.pull_request |
|
602 | 602 | at_version = pull_request_ver.pull_request_version_id |
|
603 | 603 | else: |
|
604 | 604 | _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404( |
|
605 | 605 | pull_request_id) |
|
606 | 606 | |
|
607 | 607 | pull_request_display_obj = PullRequest.get_pr_display_object( |
|
608 | 608 | pull_request_obj, _org_pull_request_obj) |
|
609 | 609 | |
|
610 | 610 | return _org_pull_request_obj, pull_request_obj, \ |
|
611 | 611 | pull_request_display_obj, at_version |
|
612 | 612 | |
|
613 | 613 | def create(self, created_by, source_repo, source_ref, target_repo, |
|
614 | 614 | target_ref, revisions, reviewers, observers, title, description=None, |
|
615 | 615 | common_ancestor_id=None, |
|
616 | 616 | description_renderer=None, |
|
617 | 617 | reviewer_data=None, translator=None, auth_user=None): |
|
618 | 618 | translator = translator or get_current_request().translate |
|
619 | 619 | |
|
620 | 620 | created_by_user = self._get_user(created_by) |
|
621 | 621 | auth_user = auth_user or created_by_user.AuthUser() |
|
622 | 622 | source_repo = self._get_repo(source_repo) |
|
623 | 623 | target_repo = self._get_repo(target_repo) |
|
624 | 624 | |
|
625 | 625 | pull_request = PullRequest() |
|
626 | 626 | pull_request.source_repo = source_repo |
|
627 | 627 | pull_request.source_ref = source_ref |
|
628 | 628 | pull_request.target_repo = target_repo |
|
629 | 629 | pull_request.target_ref = target_ref |
|
630 | 630 | pull_request.revisions = revisions |
|
631 | 631 | pull_request.title = title |
|
632 | 632 | pull_request.description = description |
|
633 | 633 | pull_request.description_renderer = description_renderer |
|
634 | 634 | pull_request.author = created_by_user |
|
635 | 635 | pull_request.reviewer_data = reviewer_data |
|
636 | 636 | pull_request.pull_request_state = pull_request.STATE_CREATING |
|
637 | 637 | pull_request.common_ancestor_id = common_ancestor_id |
|
638 | 638 | |
|
639 | 639 | Session().add(pull_request) |
|
640 | 640 | Session().flush() |
|
641 | 641 | |
|
642 | 642 | reviewer_ids = set() |
|
643 | 643 | # members / reviewers |
|
644 | 644 | for reviewer_object in reviewers: |
|
645 | 645 | user_id, reasons, mandatory, role, rules = reviewer_object |
|
646 | 646 | user = self._get_user(user_id) |
|
647 | 647 | |
|
648 | 648 | # skip duplicates |
|
649 | 649 | if user.user_id in reviewer_ids: |
|
650 | 650 | continue |
|
651 | 651 | |
|
652 | 652 | reviewer_ids.add(user.user_id) |
|
653 | 653 | |
|
654 | 654 | reviewer = PullRequestReviewers() |
|
655 | 655 | reviewer.user = user |
|
656 | 656 | reviewer.pull_request = pull_request |
|
657 | 657 | reviewer.reasons = reasons |
|
658 | 658 | reviewer.mandatory = mandatory |
|
659 | 659 | reviewer.role = role |
|
660 | 660 | |
|
661 | 661 | # NOTE(marcink): pick only first rule for now |
|
662 | 662 | rule_id = list(rules)[0] if rules else None |
|
663 | 663 | rule = RepoReviewRule.get(rule_id) if rule_id else None |
|
664 | 664 | if rule: |
|
665 | 665 | review_group = rule.user_group_vote_rule(user_id) |
|
666 | 666 | # we check if this particular reviewer is member of a voting group |
|
667 | 667 | if review_group: |
|
668 | 668 | # NOTE(marcink): |
|
669 | 669 | # can be that user is member of more but we pick the first same, |
|
670 | 670 | # same as default reviewers algo |
|
671 | 671 | review_group = review_group[0] |
|
672 | 672 | |
|
673 | 673 | rule_data = { |
|
674 | 674 | 'rule_name': |
|
675 | 675 | rule.review_rule_name, |
|
676 | 676 | 'rule_user_group_entry_id': |
|
677 | 677 | review_group.repo_review_rule_users_group_id, |
|
678 | 678 | 'rule_user_group_name': |
|
679 | 679 | review_group.users_group.users_group_name, |
|
680 | 680 | 'rule_user_group_members': |
|
681 | 681 | [x.user.username for x in review_group.users_group.members], |
|
682 | 682 | 'rule_user_group_members_id': |
|
683 | 683 | [x.user.user_id for x in review_group.users_group.members], |
|
684 | 684 | } |
|
685 | 685 | # e.g {'vote_rule': -1, 'mandatory': True} |
|
686 | 686 | rule_data.update(review_group.rule_data()) |
|
687 | 687 | |
|
688 | 688 | reviewer.rule_data = rule_data |
|
689 | 689 | |
|
690 | 690 | Session().add(reviewer) |
|
691 | 691 | Session().flush() |
|
692 | 692 | |
|
693 | 693 | for observer_object in observers: |
|
694 | 694 | user_id, reasons, mandatory, role, rules = observer_object |
|
695 | 695 | user = self._get_user(user_id) |
|
696 | 696 | |
|
697 | 697 | # skip duplicates from reviewers |
|
698 | 698 | if user.user_id in reviewer_ids: |
|
699 | 699 | continue |
|
700 | 700 | |
|
701 | 701 | #reviewer_ids.add(user.user_id) |
|
702 | 702 | |
|
703 | 703 | observer = PullRequestReviewers() |
|
704 | 704 | observer.user = user |
|
705 | 705 | observer.pull_request = pull_request |
|
706 | 706 | observer.reasons = reasons |
|
707 | 707 | observer.mandatory = mandatory |
|
708 | 708 | observer.role = role |
|
709 | 709 | |
|
710 | 710 | # NOTE(marcink): pick only first rule for now |
|
711 | 711 | rule_id = list(rules)[0] if rules else None |
|
712 | 712 | rule = RepoReviewRule.get(rule_id) if rule_id else None |
|
713 | 713 | if rule: |
|
714 | 714 | # TODO(marcink): do we need this for observers ?? |
|
715 | 715 | pass |
|
716 | 716 | |
|
717 | 717 | Session().add(observer) |
|
718 | 718 | Session().flush() |
|
719 | 719 | |
|
720 | 720 | # Set approval status to "Under Review" for all commits which are |
|
721 | 721 | # part of this pull request. |
|
722 | 722 | ChangesetStatusModel().set_status( |
|
723 | 723 | repo=target_repo, |
|
724 | 724 | status=ChangesetStatus.STATUS_UNDER_REVIEW, |
|
725 | 725 | user=created_by_user, |
|
726 | 726 | pull_request=pull_request |
|
727 | 727 | ) |
|
728 | 728 | # we commit early at this point. This has to do with a fact |
|
729 | 729 | # that before queries do some row-locking. And because of that |
|
730 | 730 | # we need to commit and finish transaction before below validate call |
|
731 | 731 | # that for large repos could be long resulting in long row locks |
|
732 | 732 | Session().commit() |
|
733 | 733 | |
|
734 | 734 | # prepare workspace, and run initial merge simulation. Set state during that |
|
735 | 735 | # operation |
|
736 | 736 | pull_request = PullRequest.get(pull_request.pull_request_id) |
|
737 | 737 | |
|
738 | 738 | # set as merging, for merge simulation, and if finished to created so we mark |
|
739 | 739 | # simulation is working fine |
|
740 | 740 | with pull_request.set_state(PullRequest.STATE_MERGING, |
|
741 | 741 | final_state=PullRequest.STATE_CREATED) as state_obj: |
|
742 | 742 | MergeCheck.validate( |
|
743 | 743 | pull_request, auth_user=auth_user, translator=translator) |
|
744 | 744 | |
|
745 | 745 | self.notify_reviewers(pull_request, reviewer_ids, created_by_user) |
|
746 | 746 | self.trigger_pull_request_hook(pull_request, created_by_user, 'create') |
|
747 | 747 | |
|
748 | 748 | creation_data = pull_request.get_api_data(with_merge_state=False) |
|
749 | 749 | self._log_audit_action( |
|
750 | 750 | 'repo.pull_request.create', {'data': creation_data}, |
|
751 | 751 | auth_user, pull_request) |
|
752 | 752 | |
|
753 | 753 | return pull_request |
|
754 | 754 | |
|
755 | 755 | def trigger_pull_request_hook(self, pull_request, user, action, data=None): |
|
756 | 756 | pull_request = self.__get_pull_request(pull_request) |
|
757 | 757 | target_scm = pull_request.target_repo.scm_instance() |
|
758 | 758 | if action == 'create': |
|
759 | 759 | trigger_hook = hooks_utils.trigger_create_pull_request_hook |
|
760 | 760 | elif action == 'merge': |
|
761 | 761 | trigger_hook = hooks_utils.trigger_merge_pull_request_hook |
|
762 | 762 | elif action == 'close': |
|
763 | 763 | trigger_hook = hooks_utils.trigger_close_pull_request_hook |
|
764 | 764 | elif action == 'review_status_change': |
|
765 | 765 | trigger_hook = hooks_utils.trigger_review_pull_request_hook |
|
766 | 766 | elif action == 'update': |
|
767 | 767 | trigger_hook = hooks_utils.trigger_update_pull_request_hook |
|
768 | 768 | elif action == 'comment': |
|
769 | 769 | trigger_hook = hooks_utils.trigger_comment_pull_request_hook |
|
770 | 770 | elif action == 'comment_edit': |
|
771 | 771 | trigger_hook = hooks_utils.trigger_comment_pull_request_edit_hook |
|
772 | 772 | else: |
|
773 | 773 | return |
|
774 | 774 | |
|
775 | 775 | log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s', |
|
776 | 776 | pull_request, action, trigger_hook) |
|
777 | 777 | trigger_hook( |
|
778 | 778 | username=user.username, |
|
779 | 779 | repo_name=pull_request.target_repo.repo_name, |
|
780 | 780 | repo_type=target_scm.alias, |
|
781 | 781 | pull_request=pull_request, |
|
782 | 782 | data=data) |
|
783 | 783 | |
|
784 | 784 | def _get_commit_ids(self, pull_request): |
|
785 | 785 | """ |
|
786 | 786 | Return the commit ids of the merged pull request. |
|
787 | 787 | |
|
788 | 788 | This method is not dealing correctly yet with the lack of autoupdates |
|
789 | 789 | nor with the implicit target updates. |
|
790 | 790 | For example: if a commit in the source repo is already in the target it |
|
791 | 791 | will be reported anyways. |
|
792 | 792 | """ |
|
793 | 793 | merge_rev = pull_request.merge_rev |
|
794 | 794 | if merge_rev is None: |
|
795 | 795 | raise ValueError('This pull request was not merged yet') |
|
796 | 796 | |
|
797 | 797 | commit_ids = list(pull_request.revisions) |
|
798 | 798 | if merge_rev not in commit_ids: |
|
799 | 799 | commit_ids.append(merge_rev) |
|
800 | 800 | |
|
801 | 801 | return commit_ids |
|
802 | 802 | |
|
803 | 803 | def merge_repo(self, pull_request, user, extras): |
|
804 | 804 | log.debug("Merging pull request %s", pull_request.pull_request_id) |
|
805 | 805 | extras['user_agent'] = 'internal-merge' |
|
806 | 806 | merge_state = self._merge_pull_request(pull_request, user, extras) |
|
807 | 807 | if merge_state.executed: |
|
808 | 808 | log.debug("Merge was successful, updating the pull request comments.") |
|
809 | 809 | self._comment_and_close_pr(pull_request, user, merge_state) |
|
810 | 810 | |
|
811 | 811 | self._log_audit_action( |
|
812 | 812 | 'repo.pull_request.merge', |
|
813 | 813 | {'merge_state': merge_state.__dict__}, |
|
814 | 814 | user, pull_request) |
|
815 | 815 | |
|
816 | 816 | else: |
|
817 | 817 | log.warn("Merge failed, not updating the pull request.") |
|
818 | 818 | return merge_state |
|
819 | 819 | |
|
820 | 820 | def _merge_pull_request(self, pull_request, user, extras, merge_msg=None): |
|
821 | 821 | target_vcs = pull_request.target_repo.scm_instance() |
|
822 | 822 | source_vcs = pull_request.source_repo.scm_instance() |
|
823 | 823 | |
|
824 | 824 | message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format( |
|
825 | 825 | pr_id=pull_request.pull_request_id, |
|
826 | 826 | pr_title=pull_request.title, |
|
827 | 827 | source_repo=source_vcs.name, |
|
828 | 828 | source_ref_name=pull_request.source_ref_parts.name, |
|
829 | 829 | target_repo=target_vcs.name, |
|
830 | 830 | target_ref_name=pull_request.target_ref_parts.name, |
|
831 | 831 | ) |
|
832 | 832 | |
|
833 | 833 | workspace_id = self._workspace_id(pull_request) |
|
834 | 834 | repo_id = pull_request.target_repo.repo_id |
|
835 | 835 | use_rebase = self._use_rebase_for_merging(pull_request) |
|
836 | 836 | close_branch = self._close_branch_before_merging(pull_request) |
|
837 | 837 | user_name = self._user_name_for_merging(pull_request, user) |
|
838 | 838 | |
|
839 | 839 | target_ref = self._refresh_reference( |
|
840 | 840 | pull_request.target_ref_parts, target_vcs) |
|
841 | 841 | |
|
842 | 842 | callback_daemon, extras = prepare_callback_daemon( |
|
843 | 843 | extras, protocol=vcs_settings.HOOKS_PROTOCOL, |
|
844 | 844 | host=vcs_settings.HOOKS_HOST, |
|
845 | 845 | use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS) |
|
846 | 846 | |
|
847 | 847 | with callback_daemon: |
|
848 | 848 | # TODO: johbo: Implement a clean way to run a config_override |
|
849 | 849 | # for a single call. |
|
850 | 850 | target_vcs.config.set( |
|
851 | 851 | 'rhodecode', 'RC_SCM_DATA', json.dumps(extras)) |
|
852 | 852 | |
|
853 | 853 | merge_state = target_vcs.merge( |
|
854 | 854 | repo_id, workspace_id, target_ref, source_vcs, |
|
855 | 855 | pull_request.source_ref_parts, |
|
856 | 856 | user_name=user_name, user_email=user.email, |
|
857 | 857 | message=message, use_rebase=use_rebase, |
|
858 | 858 | close_branch=close_branch) |
|
859 | 859 | return merge_state |
|
860 | 860 | |
|
861 | 861 | def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None): |
|
862 | 862 | pull_request.merge_rev = merge_state.merge_ref.commit_id |
|
863 | 863 | pull_request.updated_on = datetime.datetime.now() |
|
864 | 864 | close_msg = close_msg or 'Pull request merged and closed' |
|
865 | 865 | |
|
866 | 866 | CommentsModel().create( |
|
867 | 867 | text=safe_unicode(close_msg), |
|
868 | 868 | repo=pull_request.target_repo.repo_id, |
|
869 | 869 | user=user.user_id, |
|
870 | 870 | pull_request=pull_request.pull_request_id, |
|
871 | 871 | f_path=None, |
|
872 | 872 | line_no=None, |
|
873 | 873 | closing_pr=True |
|
874 | 874 | ) |
|
875 | 875 | |
|
876 | 876 | Session().add(pull_request) |
|
877 | 877 | Session().flush() |
|
878 | 878 | # TODO: paris: replace invalidation with less radical solution |
|
879 | 879 | ScmModel().mark_for_invalidation( |
|
880 | 880 | pull_request.target_repo.repo_name) |
|
881 | 881 | self.trigger_pull_request_hook(pull_request, user, 'merge') |
|
882 | 882 | |
|
883 | 883 | def has_valid_update_type(self, pull_request): |
|
884 | 884 | source_ref_type = pull_request.source_ref_parts.type |
|
885 | 885 | return source_ref_type in self.REF_TYPES |
|
886 | 886 | |
|
887 | 887 | def get_flow_commits(self, pull_request): |
|
888 | 888 | |
|
889 | 889 | # source repo |
|
890 | 890 | source_ref_name = pull_request.source_ref_parts.name |
|
891 | 891 | source_ref_type = pull_request.source_ref_parts.type |
|
892 | 892 | source_ref_id = pull_request.source_ref_parts.commit_id |
|
893 | 893 | source_repo = pull_request.source_repo.scm_instance() |
|
894 | 894 | |
|
895 | 895 | try: |
|
896 | 896 | if source_ref_type in self.REF_TYPES: |
|
897 | 897 | source_commit = source_repo.get_commit(source_ref_name) |
|
898 | 898 | else: |
|
899 | 899 | source_commit = source_repo.get_commit(source_ref_id) |
|
900 | 900 | except CommitDoesNotExistError: |
|
901 | 901 | raise SourceRefMissing() |
|
902 | 902 | |
|
903 | 903 | # target repo |
|
904 | 904 | target_ref_name = pull_request.target_ref_parts.name |
|
905 | 905 | target_ref_type = pull_request.target_ref_parts.type |
|
906 | 906 | target_ref_id = pull_request.target_ref_parts.commit_id |
|
907 | 907 | target_repo = pull_request.target_repo.scm_instance() |
|
908 | 908 | |
|
909 | 909 | try: |
|
910 | 910 | if target_ref_type in self.REF_TYPES: |
|
911 | 911 | target_commit = target_repo.get_commit(target_ref_name) |
|
912 | 912 | else: |
|
913 | 913 | target_commit = target_repo.get_commit(target_ref_id) |
|
914 | 914 | except CommitDoesNotExistError: |
|
915 | 915 | raise TargetRefMissing() |
|
916 | 916 | |
|
917 | 917 | return source_commit, target_commit |
|
918 | 918 | |
|
919 | 919 | def update_commits(self, pull_request, updating_user): |
|
920 | 920 | """ |
|
921 | 921 | Get the updated list of commits for the pull request |
|
922 | 922 | and return the new pull request version and the list |
|
923 | 923 | of commits processed by this update action |
|
924 | 924 | |
|
925 | 925 | updating_user is the user_object who triggered the update |
|
926 | 926 | """ |
|
927 | 927 | pull_request = self.__get_pull_request(pull_request) |
|
928 | 928 | source_ref_type = pull_request.source_ref_parts.type |
|
929 | 929 | source_ref_name = pull_request.source_ref_parts.name |
|
930 | 930 | source_ref_id = pull_request.source_ref_parts.commit_id |
|
931 | 931 | |
|
932 | 932 | target_ref_type = pull_request.target_ref_parts.type |
|
933 | 933 | target_ref_name = pull_request.target_ref_parts.name |
|
934 | 934 | target_ref_id = pull_request.target_ref_parts.commit_id |
|
935 | 935 | |
|
936 | 936 | if not self.has_valid_update_type(pull_request): |
|
937 | 937 | log.debug("Skipping update of pull request %s due to ref type: %s", |
|
938 | 938 | pull_request, source_ref_type) |
|
939 | 939 | return UpdateResponse( |
|
940 | 940 | executed=False, |
|
941 | 941 | reason=UpdateFailureReason.WRONG_REF_TYPE, |
|
942 | 942 | old=pull_request, new=None, common_ancestor_id=None, commit_changes=None, |
|
943 | 943 | source_changed=False, target_changed=False) |
|
944 | 944 | |
|
945 | 945 | try: |
|
946 | 946 | source_commit, target_commit = self.get_flow_commits(pull_request) |
|
947 | 947 | except SourceRefMissing: |
|
948 | 948 | return UpdateResponse( |
|
949 | 949 | executed=False, |
|
950 | 950 | reason=UpdateFailureReason.MISSING_SOURCE_REF, |
|
951 | 951 | old=pull_request, new=None, common_ancestor_id=None, commit_changes=None, |
|
952 | 952 | source_changed=False, target_changed=False) |
|
953 | 953 | except TargetRefMissing: |
|
954 | 954 | return UpdateResponse( |
|
955 | 955 | executed=False, |
|
956 | 956 | reason=UpdateFailureReason.MISSING_TARGET_REF, |
|
957 | 957 | old=pull_request, new=None, common_ancestor_id=None, commit_changes=None, |
|
958 | 958 | source_changed=False, target_changed=False) |
|
959 | 959 | |
|
960 | 960 | source_changed = source_ref_id != source_commit.raw_id |
|
961 | 961 | target_changed = target_ref_id != target_commit.raw_id |
|
962 | 962 | |
|
963 | 963 | if not (source_changed or target_changed): |
|
964 | 964 | log.debug("Nothing changed in pull request %s", pull_request) |
|
965 | 965 | return UpdateResponse( |
|
966 | 966 | executed=False, |
|
967 | 967 | reason=UpdateFailureReason.NO_CHANGE, |
|
968 | 968 | old=pull_request, new=None, common_ancestor_id=None, commit_changes=None, |
|
969 | 969 | source_changed=target_changed, target_changed=source_changed) |
|
970 | 970 | |
|
971 | 971 | change_in_found = 'target repo' if target_changed else 'source repo' |
|
972 | 972 | log.debug('Updating pull request because of change in %s detected', |
|
973 | 973 | change_in_found) |
|
974 | 974 | |
|
975 | 975 | # Finally there is a need for an update, in case of source change |
|
976 | 976 | # we create a new version, else just an update |
|
977 | 977 | if source_changed: |
|
978 | 978 | pull_request_version = self._create_version_from_snapshot(pull_request) |
|
979 | 979 | self._link_comments_to_version(pull_request_version) |
|
980 | 980 | else: |
|
981 | 981 | try: |
|
982 | 982 | ver = pull_request.versions[-1] |
|
983 | 983 | except IndexError: |
|
984 | 984 | ver = None |
|
985 | 985 | |
|
986 | 986 | pull_request.pull_request_version_id = \ |
|
987 | 987 | ver.pull_request_version_id if ver else None |
|
988 | 988 | pull_request_version = pull_request |
|
989 | 989 | |
|
990 | 990 | source_repo = pull_request.source_repo.scm_instance() |
|
991 | 991 | target_repo = pull_request.target_repo.scm_instance() |
|
992 | 992 | |
|
993 | 993 | # re-compute commit ids |
|
994 | 994 | old_commit_ids = pull_request.revisions |
|
995 | 995 | pre_load = ["author", "date", "message", "branch"] |
|
996 | 996 | commit_ranges = target_repo.compare( |
|
997 | 997 | target_commit.raw_id, source_commit.raw_id, source_repo, merge=True, |
|
998 | 998 | pre_load=pre_load) |
|
999 | 999 | |
|
1000 | 1000 | target_ref = target_commit.raw_id |
|
1001 | 1001 | source_ref = source_commit.raw_id |
|
1002 | 1002 | ancestor_commit_id = target_repo.get_common_ancestor( |
|
1003 | 1003 | target_ref, source_ref, source_repo) |
|
1004 | 1004 | |
|
1005 | 1005 | if not ancestor_commit_id: |
|
1006 | 1006 | raise ValueError( |
|
1007 | 1007 | 'cannot calculate diff info without a common ancestor. ' |
|
1008 | 1008 | 'Make sure both repositories are related, and have a common forking commit.') |
|
1009 | 1009 | |
|
1010 | 1010 | pull_request.common_ancestor_id = ancestor_commit_id |
|
1011 | 1011 | |
|
1012 | 1012 | pull_request.source_ref = '%s:%s:%s' % ( |
|
1013 | 1013 | source_ref_type, source_ref_name, source_commit.raw_id) |
|
1014 | 1014 | pull_request.target_ref = '%s:%s:%s' % ( |
|
1015 | 1015 | target_ref_type, target_ref_name, ancestor_commit_id) |
|
1016 | 1016 | |
|
1017 | 1017 | pull_request.revisions = [ |
|
1018 | 1018 | commit.raw_id for commit in reversed(commit_ranges)] |
|
1019 | 1019 | pull_request.updated_on = datetime.datetime.now() |
|
1020 | 1020 | Session().add(pull_request) |
|
1021 | 1021 | new_commit_ids = pull_request.revisions |
|
1022 | 1022 | |
|
1023 | 1023 | old_diff_data, new_diff_data = self._generate_update_diffs( |
|
1024 | 1024 | pull_request, pull_request_version) |
|
1025 | 1025 | |
|
1026 | 1026 | # calculate commit and file changes |
|
1027 | 1027 | commit_changes = self._calculate_commit_id_changes( |
|
1028 | 1028 | old_commit_ids, new_commit_ids) |
|
1029 | 1029 | file_changes = self._calculate_file_changes( |
|
1030 | 1030 | old_diff_data, new_diff_data) |
|
1031 | 1031 | |
|
1032 | 1032 | # set comments as outdated if DIFFS changed |
|
1033 | 1033 | CommentsModel().outdate_comments( |
|
1034 | 1034 | pull_request, old_diff_data=old_diff_data, |
|
1035 | 1035 | new_diff_data=new_diff_data) |
|
1036 | 1036 | |
|
1037 | 1037 | valid_commit_changes = (commit_changes.added or commit_changes.removed) |
|
1038 | 1038 | file_node_changes = ( |
|
1039 | 1039 | file_changes.added or file_changes.modified or file_changes.removed) |
|
1040 | 1040 | pr_has_changes = valid_commit_changes or file_node_changes |
|
1041 | 1041 | |
|
1042 | 1042 | # Add an automatic comment to the pull request, in case |
|
1043 | 1043 | # anything has changed |
|
1044 | 1044 | if pr_has_changes: |
|
1045 | 1045 | update_comment = CommentsModel().create( |
|
1046 | 1046 | text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes), |
|
1047 | 1047 | repo=pull_request.target_repo, |
|
1048 | 1048 | user=pull_request.author, |
|
1049 | 1049 | pull_request=pull_request, |
|
1050 | 1050 | send_email=False, renderer=DEFAULT_COMMENTS_RENDERER) |
|
1051 | 1051 | |
|
1052 | 1052 | # Update status to "Under Review" for added commits |
|
1053 | 1053 | for commit_id in commit_changes.added: |
|
1054 | 1054 | ChangesetStatusModel().set_status( |
|
1055 | 1055 | repo=pull_request.source_repo, |
|
1056 | 1056 | status=ChangesetStatus.STATUS_UNDER_REVIEW, |
|
1057 | 1057 | comment=update_comment, |
|
1058 | 1058 | user=pull_request.author, |
|
1059 | 1059 | pull_request=pull_request, |
|
1060 | 1060 | revision=commit_id) |
|
1061 | 1061 | |
|
1062 | 1062 | # send update email to users |
|
1063 | 1063 | try: |
|
1064 | 1064 | self.notify_users(pull_request=pull_request, updating_user=updating_user, |
|
1065 | 1065 | ancestor_commit_id=ancestor_commit_id, |
|
1066 | 1066 | commit_changes=commit_changes, |
|
1067 | 1067 | file_changes=file_changes) |
|
1068 | 1068 | except Exception: |
|
1069 | 1069 | log.exception('Failed to send email notification to users') |
|
1070 | 1070 | |
|
1071 | 1071 | log.debug( |
|
1072 | 1072 | 'Updated pull request %s, added_ids: %s, common_ids: %s, ' |
|
1073 | 1073 | 'removed_ids: %s', pull_request.pull_request_id, |
|
1074 | 1074 | commit_changes.added, commit_changes.common, commit_changes.removed) |
|
1075 | 1075 | log.debug( |
|
1076 | 1076 | 'Updated pull request with the following file changes: %s', |
|
1077 | 1077 | file_changes) |
|
1078 | 1078 | |
|
1079 | 1079 | log.info( |
|
1080 | 1080 | "Updated pull request %s from commit %s to commit %s, " |
|
1081 | 1081 | "stored new version %s of this pull request.", |
|
1082 | 1082 | pull_request.pull_request_id, source_ref_id, |
|
1083 | 1083 | pull_request.source_ref_parts.commit_id, |
|
1084 | 1084 | pull_request_version.pull_request_version_id) |
|
1085 | 1085 | Session().commit() |
|
1086 | 1086 | self.trigger_pull_request_hook(pull_request, pull_request.author, 'update') |
|
1087 | 1087 | |
|
1088 | 1088 | return UpdateResponse( |
|
1089 | 1089 | executed=True, reason=UpdateFailureReason.NONE, |
|
1090 | 1090 | old=pull_request, new=pull_request_version, |
|
1091 | 1091 | common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes, |
|
1092 | 1092 | source_changed=source_changed, target_changed=target_changed) |
|
1093 | 1093 | |
|
1094 | 1094 | def _create_version_from_snapshot(self, pull_request): |
|
1095 | 1095 | version = PullRequestVersion() |
|
1096 | 1096 | version.title = pull_request.title |
|
1097 | 1097 | version.description = pull_request.description |
|
1098 | 1098 | version.status = pull_request.status |
|
1099 | 1099 | version.pull_request_state = pull_request.pull_request_state |
|
1100 | 1100 | version.created_on = datetime.datetime.now() |
|
1101 | 1101 | version.updated_on = pull_request.updated_on |
|
1102 | 1102 | version.user_id = pull_request.user_id |
|
1103 | 1103 | version.source_repo = pull_request.source_repo |
|
1104 | 1104 | version.source_ref = pull_request.source_ref |
|
1105 | 1105 | version.target_repo = pull_request.target_repo |
|
1106 | 1106 | version.target_ref = pull_request.target_ref |
|
1107 | 1107 | |
|
1108 | 1108 | version._last_merge_source_rev = pull_request._last_merge_source_rev |
|
1109 | 1109 | version._last_merge_target_rev = pull_request._last_merge_target_rev |
|
1110 | 1110 | version.last_merge_status = pull_request.last_merge_status |
|
1111 | 1111 | version.last_merge_metadata = pull_request.last_merge_metadata |
|
1112 | 1112 | version.shadow_merge_ref = pull_request.shadow_merge_ref |
|
1113 | 1113 | version.merge_rev = pull_request.merge_rev |
|
1114 | 1114 | version.reviewer_data = pull_request.reviewer_data |
|
1115 | 1115 | |
|
1116 | 1116 | version.revisions = pull_request.revisions |
|
1117 | 1117 | version.common_ancestor_id = pull_request.common_ancestor_id |
|
1118 | 1118 | version.pull_request = pull_request |
|
1119 | 1119 | Session().add(version) |
|
1120 | 1120 | Session().flush() |
|
1121 | 1121 | |
|
1122 | 1122 | return version |
|
1123 | 1123 | |
|
1124 | 1124 | def _generate_update_diffs(self, pull_request, pull_request_version): |
|
1125 | 1125 | |
|
1126 | 1126 | diff_context = ( |
|
1127 | 1127 | self.DIFF_CONTEXT + |
|
1128 | 1128 | CommentsModel.needed_extra_diff_context()) |
|
1129 | 1129 | hide_whitespace_changes = False |
|
1130 | 1130 | source_repo = pull_request_version.source_repo |
|
1131 | 1131 | source_ref_id = pull_request_version.source_ref_parts.commit_id |
|
1132 | 1132 | target_ref_id = pull_request_version.target_ref_parts.commit_id |
|
1133 | 1133 | old_diff = self._get_diff_from_pr_or_version( |
|
1134 | 1134 | source_repo, source_ref_id, target_ref_id, |
|
1135 | 1135 | hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context) |
|
1136 | 1136 | |
|
1137 | 1137 | source_repo = pull_request.source_repo |
|
1138 | 1138 | source_ref_id = pull_request.source_ref_parts.commit_id |
|
1139 | 1139 | target_ref_id = pull_request.target_ref_parts.commit_id |
|
1140 | 1140 | |
|
1141 | 1141 | new_diff = self._get_diff_from_pr_or_version( |
|
1142 | 1142 | source_repo, source_ref_id, target_ref_id, |
|
1143 | 1143 | hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context) |
|
1144 | 1144 | |
|
1145 | 1145 | old_diff_data = diffs.DiffProcessor(old_diff) |
|
1146 | 1146 | old_diff_data.prepare() |
|
1147 | 1147 | new_diff_data = diffs.DiffProcessor(new_diff) |
|
1148 | 1148 | new_diff_data.prepare() |
|
1149 | 1149 | |
|
1150 | 1150 | return old_diff_data, new_diff_data |
|
1151 | 1151 | |
|
1152 | 1152 | def _link_comments_to_version(self, pull_request_version): |
|
1153 | 1153 | """ |
|
1154 | 1154 | Link all unlinked comments of this pull request to the given version. |
|
1155 | 1155 | |
|
1156 | 1156 | :param pull_request_version: The `PullRequestVersion` to which |
|
1157 | 1157 | the comments shall be linked. |
|
1158 | 1158 | |
|
1159 | 1159 | """ |
|
1160 | 1160 | pull_request = pull_request_version.pull_request |
|
1161 | 1161 | comments = ChangesetComment.query()\ |
|
1162 | 1162 | .filter( |
|
1163 | 1163 | # TODO: johbo: Should we query for the repo at all here? |
|
1164 | 1164 | # Pending decision on how comments of PRs are to be related |
|
1165 | 1165 | # to either the source repo, the target repo or no repo at all. |
|
1166 | 1166 | ChangesetComment.repo_id == pull_request.target_repo.repo_id, |
|
1167 | 1167 | ChangesetComment.pull_request == pull_request, |
|
1168 | 1168 | ChangesetComment.pull_request_version == None)\ |
|
1169 | 1169 | .order_by(ChangesetComment.comment_id.asc()) |
|
1170 | 1170 | |
|
1171 | 1171 | # TODO: johbo: Find out why this breaks if it is done in a bulk |
|
1172 | 1172 | # operation. |
|
1173 | 1173 | for comment in comments: |
|
1174 | 1174 | comment.pull_request_version_id = ( |
|
1175 | 1175 | pull_request_version.pull_request_version_id) |
|
1176 | 1176 | Session().add(comment) |
|
1177 | 1177 | |
|
1178 | 1178 | def _calculate_commit_id_changes(self, old_ids, new_ids): |
|
1179 | 1179 | added = [x for x in new_ids if x not in old_ids] |
|
1180 | 1180 | common = [x for x in new_ids if x in old_ids] |
|
1181 | 1181 | removed = [x for x in old_ids if x not in new_ids] |
|
1182 | 1182 | total = new_ids |
|
1183 | 1183 | return ChangeTuple(added, common, removed, total) |
|
1184 | 1184 | |
|
1185 | 1185 | def _calculate_file_changes(self, old_diff_data, new_diff_data): |
|
1186 | 1186 | |
|
1187 | 1187 | old_files = OrderedDict() |
|
1188 | 1188 | for diff_data in old_diff_data.parsed_diff: |
|
1189 | 1189 | old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff']) |
|
1190 | 1190 | |
|
1191 | 1191 | added_files = [] |
|
1192 | 1192 | modified_files = [] |
|
1193 | 1193 | removed_files = [] |
|
1194 | 1194 | for diff_data in new_diff_data.parsed_diff: |
|
1195 | 1195 | new_filename = diff_data['filename'] |
|
1196 | 1196 | new_hash = md5_safe(diff_data['raw_diff']) |
|
1197 | 1197 | |
|
1198 | 1198 | old_hash = old_files.get(new_filename) |
|
1199 | 1199 | if not old_hash: |
|
1200 | 1200 | # file is not present in old diff, we have to figure out from parsed diff |
|
1201 | 1201 | # operation ADD/REMOVE |
|
1202 | 1202 | operations_dict = diff_data['stats']['ops'] |
|
1203 | 1203 | if diffs.DEL_FILENODE in operations_dict: |
|
1204 | 1204 | removed_files.append(new_filename) |
|
1205 | 1205 | else: |
|
1206 | 1206 | added_files.append(new_filename) |
|
1207 | 1207 | else: |
|
1208 | 1208 | if new_hash != old_hash: |
|
1209 | 1209 | modified_files.append(new_filename) |
|
1210 | 1210 | # now remove a file from old, since we have seen it already |
|
1211 | 1211 | del old_files[new_filename] |
|
1212 | 1212 | |
|
1213 | 1213 | # removed files is when there are present in old, but not in NEW, |
|
1214 | 1214 | # since we remove old files that are present in new diff, left-overs |
|
1215 | 1215 | # if any should be the removed files |
|
1216 | 1216 | removed_files.extend(old_files.keys()) |
|
1217 | 1217 | |
|
1218 | 1218 | return FileChangeTuple(added_files, modified_files, removed_files) |
|
1219 | 1219 | |
|
1220 | 1220 | def _render_update_message(self, ancestor_commit_id, changes, file_changes): |
|
1221 | 1221 | """ |
|
1222 | 1222 | render the message using DEFAULT_COMMENTS_RENDERER (RST renderer), |
|
1223 | 1223 | so it's always looking the same disregarding on which default |
|
1224 | 1224 | renderer system is using. |
|
1225 | 1225 | |
|
1226 | 1226 | :param ancestor_commit_id: ancestor raw_id |
|
1227 | 1227 | :param changes: changes named tuple |
|
1228 | 1228 | :param file_changes: file changes named tuple |
|
1229 | 1229 | |
|
1230 | 1230 | """ |
|
1231 | 1231 | new_status = ChangesetStatus.get_status_lbl( |
|
1232 | 1232 | ChangesetStatus.STATUS_UNDER_REVIEW) |
|
1233 | 1233 | |
|
1234 | 1234 | changed_files = ( |
|
1235 | 1235 | file_changes.added + file_changes.modified + file_changes.removed) |
|
1236 | 1236 | |
|
1237 | 1237 | params = { |
|
1238 | 1238 | 'under_review_label': new_status, |
|
1239 | 1239 | 'added_commits': changes.added, |
|
1240 | 1240 | 'removed_commits': changes.removed, |
|
1241 | 1241 | 'changed_files': changed_files, |
|
1242 | 1242 | 'added_files': file_changes.added, |
|
1243 | 1243 | 'modified_files': file_changes.modified, |
|
1244 | 1244 | 'removed_files': file_changes.removed, |
|
1245 | 1245 | 'ancestor_commit_id': ancestor_commit_id |
|
1246 | 1246 | } |
|
1247 | 1247 | renderer = RstTemplateRenderer() |
|
1248 | 1248 | return renderer.render('pull_request_update.mako', **params) |
|
1249 | 1249 | |
|
1250 | 1250 | def edit(self, pull_request, title, description, description_renderer, user): |
|
1251 | 1251 | pull_request = self.__get_pull_request(pull_request) |
|
1252 | 1252 | old_data = pull_request.get_api_data(with_merge_state=False) |
|
1253 | 1253 | if pull_request.is_closed(): |
|
1254 | 1254 | raise ValueError('This pull request is closed') |
|
1255 | 1255 | if title: |
|
1256 | 1256 | pull_request.title = title |
|
1257 | 1257 | pull_request.description = description |
|
1258 | 1258 | pull_request.updated_on = datetime.datetime.now() |
|
1259 | 1259 | pull_request.description_renderer = description_renderer |
|
1260 | 1260 | Session().add(pull_request) |
|
1261 | 1261 | self._log_audit_action( |
|
1262 | 1262 | 'repo.pull_request.edit', {'old_data': old_data}, |
|
1263 | 1263 | user, pull_request) |
|
1264 | 1264 | |
|
1265 | 1265 | def update_reviewers(self, pull_request, reviewer_data, user): |
|
1266 | 1266 | """ |
|
1267 | 1267 | Update the reviewers in the pull request |
|
1268 | 1268 | |
|
1269 | 1269 | :param pull_request: the pr to update |
|
1270 | 1270 | :param reviewer_data: list of tuples |
|
1271 | 1271 | [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])] |
|
1272 | 1272 | :param user: current use who triggers this action |
|
1273 | 1273 | """ |
|
1274 | 1274 | |
|
1275 | 1275 | pull_request = self.__get_pull_request(pull_request) |
|
1276 | 1276 | if pull_request.is_closed(): |
|
1277 | 1277 | raise ValueError('This pull request is closed') |
|
1278 | 1278 | |
|
1279 | 1279 | reviewers = {} |
|
1280 | 1280 | for user_id, reasons, mandatory, role, rules in reviewer_data: |
|
1281 | 1281 | if isinstance(user_id, (int, compat.string_types)): |
|
1282 | 1282 | user_id = self._get_user(user_id).user_id |
|
1283 | 1283 | reviewers[user_id] = { |
|
1284 | 1284 | 'reasons': reasons, 'mandatory': mandatory, 'role': role} |
|
1285 | 1285 | |
|
1286 | 1286 | reviewers_ids = set(reviewers.keys()) |
|
1287 | 1287 | current_reviewers = PullRequestReviewers.get_pull_request_reviewers( |
|
1288 | 1288 | pull_request.pull_request_id, role=PullRequestReviewers.ROLE_REVIEWER) |
|
1289 | 1289 | |
|
1290 | 1290 | current_reviewers_ids = set([x.user.user_id for x in current_reviewers]) |
|
1291 | 1291 | |
|
1292 | 1292 | ids_to_add = reviewers_ids.difference(current_reviewers_ids) |
|
1293 | 1293 | ids_to_remove = current_reviewers_ids.difference(reviewers_ids) |
|
1294 | 1294 | |
|
1295 | 1295 | log.debug("Adding %s reviewers", ids_to_add) |
|
1296 | 1296 | log.debug("Removing %s reviewers", ids_to_remove) |
|
1297 | 1297 | changed = False |
|
1298 | 1298 | added_audit_reviewers = [] |
|
1299 | 1299 | removed_audit_reviewers = [] |
|
1300 | 1300 | |
|
1301 | 1301 | for uid in ids_to_add: |
|
1302 | 1302 | changed = True |
|
1303 | 1303 | _usr = self._get_user(uid) |
|
1304 | 1304 | reviewer = PullRequestReviewers() |
|
1305 | 1305 | reviewer.user = _usr |
|
1306 | 1306 | reviewer.pull_request = pull_request |
|
1307 | 1307 | reviewer.reasons = reviewers[uid]['reasons'] |
|
1308 | 1308 | # NOTE(marcink): mandatory shouldn't be changed now |
|
1309 | 1309 | # reviewer.mandatory = reviewers[uid]['reasons'] |
|
1310 | 1310 | # NOTE(marcink): role should be hardcoded, so we won't edit it. |
|
1311 | 1311 | reviewer.role = PullRequestReviewers.ROLE_REVIEWER |
|
1312 | 1312 | Session().add(reviewer) |
|
1313 | 1313 | added_audit_reviewers.append(reviewer.get_dict()) |
|
1314 | 1314 | |
|
1315 | 1315 | for uid in ids_to_remove: |
|
1316 | 1316 | changed = True |
|
1317 | 1317 | # NOTE(marcink): we fetch "ALL" reviewers objects using .all(). |
|
1318 | 1318 | # This is an edge case that handles previous state of having the same reviewer twice. |
|
1319 | 1319 | # this CAN happen due to the lack of DB checks |
|
1320 | 1320 | reviewers = PullRequestReviewers.query()\ |
|
1321 | 1321 | .filter(PullRequestReviewers.user_id == uid, |
|
1322 | 1322 | PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER, |
|
1323 | 1323 | PullRequestReviewers.pull_request == pull_request)\ |
|
1324 | 1324 | .all() |
|
1325 | 1325 | |
|
1326 | 1326 | for obj in reviewers: |
|
1327 | 1327 | added_audit_reviewers.append(obj.get_dict()) |
|
1328 | 1328 | Session().delete(obj) |
|
1329 | 1329 | |
|
1330 | 1330 | if changed: |
|
1331 | 1331 | Session().expire_all() |
|
1332 | 1332 | pull_request.updated_on = datetime.datetime.now() |
|
1333 | 1333 | Session().add(pull_request) |
|
1334 | 1334 | |
|
1335 | 1335 | # finally store audit logs |
|
1336 | 1336 | for user_data in added_audit_reviewers: |
|
1337 | 1337 | self._log_audit_action( |
|
1338 | 1338 | 'repo.pull_request.reviewer.add', {'data': user_data}, |
|
1339 | 1339 | user, pull_request) |
|
1340 | 1340 | for user_data in removed_audit_reviewers: |
|
1341 | 1341 | self._log_audit_action( |
|
1342 | 1342 | 'repo.pull_request.reviewer.delete', {'old_data': user_data}, |
|
1343 | 1343 | user, pull_request) |
|
1344 | 1344 | |
|
1345 | 1345 | self.notify_reviewers(pull_request, ids_to_add, user) |
|
1346 | 1346 | return ids_to_add, ids_to_remove |
|
1347 | 1347 | |
|
1348 | 1348 | def update_observers(self, pull_request, observer_data, user): |
|
1349 | 1349 | """ |
|
1350 | 1350 | Update the observers in the pull request |
|
1351 | 1351 | |
|
1352 | 1352 | :param pull_request: the pr to update |
|
1353 | 1353 | :param observer_data: list of tuples |
|
1354 | 1354 | [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])] |
|
1355 | 1355 | :param user: current use who triggers this action |
|
1356 | 1356 | """ |
|
1357 | 1357 | pull_request = self.__get_pull_request(pull_request) |
|
1358 | 1358 | if pull_request.is_closed(): |
|
1359 | 1359 | raise ValueError('This pull request is closed') |
|
1360 | 1360 | |
|
1361 | 1361 | observers = {} |
|
1362 | 1362 | for user_id, reasons, mandatory, role, rules in observer_data: |
|
1363 | 1363 | if isinstance(user_id, (int, compat.string_types)): |
|
1364 | 1364 | user_id = self._get_user(user_id).user_id |
|
1365 | 1365 | observers[user_id] = { |
|
1366 | 1366 | 'reasons': reasons, 'observers': mandatory, 'role': role} |
|
1367 | 1367 | |
|
1368 | 1368 | observers_ids = set(observers.keys()) |
|
1369 | 1369 | current_observers = PullRequestReviewers.get_pull_request_reviewers( |
|
1370 | 1370 | pull_request.pull_request_id, role=PullRequestReviewers.ROLE_OBSERVER) |
|
1371 | 1371 | |
|
1372 | 1372 | current_observers_ids = set([x.user.user_id for x in current_observers]) |
|
1373 | 1373 | |
|
1374 | 1374 | ids_to_add = observers_ids.difference(current_observers_ids) |
|
1375 | 1375 | ids_to_remove = current_observers_ids.difference(observers_ids) |
|
1376 | 1376 | |
|
1377 | 1377 | log.debug("Adding %s observer", ids_to_add) |
|
1378 | 1378 | log.debug("Removing %s observer", ids_to_remove) |
|
1379 | 1379 | changed = False |
|
1380 | 1380 | added_audit_observers = [] |
|
1381 | 1381 | removed_audit_observers = [] |
|
1382 | 1382 | |
|
1383 | 1383 | for uid in ids_to_add: |
|
1384 | 1384 | changed = True |
|
1385 | 1385 | _usr = self._get_user(uid) |
|
1386 | 1386 | observer = PullRequestReviewers() |
|
1387 | 1387 | observer.user = _usr |
|
1388 | 1388 | observer.pull_request = pull_request |
|
1389 | 1389 | observer.reasons = observers[uid]['reasons'] |
|
1390 | 1390 | # NOTE(marcink): mandatory shouldn't be changed now |
|
1391 | 1391 | # observer.mandatory = observer[uid]['reasons'] |
|
1392 | 1392 | |
|
1393 | 1393 | # NOTE(marcink): role should be hardcoded, so we won't edit it. |
|
1394 | 1394 | observer.role = PullRequestReviewers.ROLE_OBSERVER |
|
1395 | 1395 | Session().add(observer) |
|
1396 | 1396 | added_audit_observers.append(observer.get_dict()) |
|
1397 | 1397 | |
|
1398 | 1398 | for uid in ids_to_remove: |
|
1399 | 1399 | changed = True |
|
1400 | 1400 | # NOTE(marcink): we fetch "ALL" reviewers objects using .all(). |
|
1401 | 1401 | # This is an edge case that handles previous state of having the same reviewer twice. |
|
1402 | 1402 | # this CAN happen due to the lack of DB checks |
|
1403 | 1403 | observers = PullRequestReviewers.query()\ |
|
1404 | 1404 | .filter(PullRequestReviewers.user_id == uid, |
|
1405 | 1405 | PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER, |
|
1406 | 1406 | PullRequestReviewers.pull_request == pull_request)\ |
|
1407 | 1407 | .all() |
|
1408 | 1408 | |
|
1409 | 1409 | for obj in observers: |
|
1410 | 1410 | added_audit_observers.append(obj.get_dict()) |
|
1411 | 1411 | Session().delete(obj) |
|
1412 | 1412 | |
|
1413 | 1413 | if changed: |
|
1414 | 1414 | Session().expire_all() |
|
1415 | 1415 | pull_request.updated_on = datetime.datetime.now() |
|
1416 | 1416 | Session().add(pull_request) |
|
1417 | 1417 | |
|
1418 | 1418 | # finally store audit logs |
|
1419 | 1419 | for user_data in added_audit_observers: |
|
1420 | 1420 | self._log_audit_action( |
|
1421 | 1421 | 'repo.pull_request.observer.add', {'data': user_data}, |
|
1422 | 1422 | user, pull_request) |
|
1423 | 1423 | for user_data in removed_audit_observers: |
|
1424 | 1424 | self._log_audit_action( |
|
1425 | 1425 | 'repo.pull_request.observer.delete', {'old_data': user_data}, |
|
1426 | 1426 | user, pull_request) |
|
1427 | 1427 | |
|
1428 | 1428 | self.notify_observers(pull_request, ids_to_add, user) |
|
1429 | 1429 | return ids_to_add, ids_to_remove |
|
1430 | 1430 | |
|
1431 | 1431 | def get_url(self, pull_request, request=None, permalink=False): |
|
1432 | 1432 | if not request: |
|
1433 | 1433 | request = get_current_request() |
|
1434 | 1434 | |
|
1435 | 1435 | if permalink: |
|
1436 | 1436 | return request.route_url( |
|
1437 | 1437 | 'pull_requests_global', |
|
1438 | 1438 | pull_request_id=pull_request.pull_request_id,) |
|
1439 | 1439 | else: |
|
1440 | 1440 | return request.route_url('pullrequest_show', |
|
1441 | 1441 | repo_name=safe_str(pull_request.target_repo.repo_name), |
|
1442 | 1442 | pull_request_id=pull_request.pull_request_id,) |
|
1443 | 1443 | |
|
1444 | 1444 | def get_shadow_clone_url(self, pull_request, request=None): |
|
1445 | 1445 | """ |
|
1446 | 1446 | Returns qualified url pointing to the shadow repository. If this pull |
|
1447 | 1447 | request is closed there is no shadow repository and ``None`` will be |
|
1448 | 1448 | returned. |
|
1449 | 1449 | """ |
|
1450 | 1450 | if pull_request.is_closed(): |
|
1451 | 1451 | return None |
|
1452 | 1452 | else: |
|
1453 | 1453 | pr_url = urllib.unquote(self.get_url(pull_request, request=request)) |
|
1454 | 1454 | return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url)) |
|
1455 | 1455 | |
|
1456 | 1456 | def _notify_reviewers(self, pull_request, user_ids, role, user): |
|
1457 | 1457 | # notification to reviewers/observers |
|
1458 | 1458 | if not user_ids: |
|
1459 | 1459 | return |
|
1460 | 1460 | |
|
1461 | 1461 | log.debug('Notify following %s users about pull-request %s', role, user_ids) |
|
1462 | 1462 | |
|
1463 | 1463 | pull_request_obj = pull_request |
|
1464 | 1464 | # get the current participants of this pull request |
|
1465 | 1465 | recipients = user_ids |
|
1466 | 1466 | notification_type = EmailNotificationModel.TYPE_PULL_REQUEST |
|
1467 | 1467 | |
|
1468 | 1468 | pr_source_repo = pull_request_obj.source_repo |
|
1469 | 1469 | pr_target_repo = pull_request_obj.target_repo |
|
1470 | 1470 | |
|
1471 | 1471 | pr_url = h.route_url('pullrequest_show', |
|
1472 | 1472 | repo_name=pr_target_repo.repo_name, |
|
1473 | 1473 | pull_request_id=pull_request_obj.pull_request_id,) |
|
1474 | 1474 | |
|
1475 | 1475 | # set some variables for email notification |
|
1476 | 1476 | pr_target_repo_url = h.route_url( |
|
1477 | 1477 | 'repo_summary', repo_name=pr_target_repo.repo_name) |
|
1478 | 1478 | |
|
1479 | 1479 | pr_source_repo_url = h.route_url( |
|
1480 | 1480 | 'repo_summary', repo_name=pr_source_repo.repo_name) |
|
1481 | 1481 | |
|
1482 | 1482 | # pull request specifics |
|
1483 | 1483 | pull_request_commits = [ |
|
1484 | 1484 | (x.raw_id, x.message) |
|
1485 | 1485 | for x in map(pr_source_repo.get_commit, pull_request.revisions)] |
|
1486 | 1486 | |
|
1487 | 1487 | current_rhodecode_user = user |
|
1488 | 1488 | kwargs = { |
|
1489 | 1489 | 'user': current_rhodecode_user, |
|
1490 | 1490 | 'pull_request_author': pull_request.author, |
|
1491 | 1491 | 'pull_request': pull_request_obj, |
|
1492 | 1492 | 'pull_request_commits': pull_request_commits, |
|
1493 | 1493 | |
|
1494 | 1494 | 'pull_request_target_repo': pr_target_repo, |
|
1495 | 1495 | 'pull_request_target_repo_url': pr_target_repo_url, |
|
1496 | 1496 | |
|
1497 | 1497 | 'pull_request_source_repo': pr_source_repo, |
|
1498 | 1498 | 'pull_request_source_repo_url': pr_source_repo_url, |
|
1499 | 1499 | |
|
1500 | 1500 | 'pull_request_url': pr_url, |
|
1501 | 1501 | 'thread_ids': [pr_url], |
|
1502 | 1502 | 'user_role': role |
|
1503 | 1503 | } |
|
1504 | 1504 | |
|
1505 | # pre-generate the subject for notification itself | |
|
1506 | (subject, _e, body_plaintext) = EmailNotificationModel().render_email( | |
|
1507 | notification_type, **kwargs) | |
|
1508 | ||
|
1509 | 1505 | # create notification objects, and emails |
|
1510 | 1506 | NotificationModel().create( |
|
1511 | 1507 | created_by=current_rhodecode_user, |
|
1512 |
notification_subject= |
|
|
1513 | notification_body=body_plaintext, | |
|
1508 | notification_subject='', # Filled in based on the notification_type | |
|
1509 | notification_body='', # Filled in based on the notification_type | |
|
1514 | 1510 | notification_type=notification_type, |
|
1515 | 1511 | recipients=recipients, |
|
1516 | 1512 | email_kwargs=kwargs, |
|
1517 | 1513 | ) |
|
1518 | 1514 | |
|
1519 | 1515 | def notify_reviewers(self, pull_request, reviewers_ids, user): |
|
1520 | 1516 | return self._notify_reviewers(pull_request, reviewers_ids, |
|
1521 | 1517 | PullRequestReviewers.ROLE_REVIEWER, user) |
|
1522 | 1518 | |
|
1523 | 1519 | def notify_observers(self, pull_request, observers_ids, user): |
|
1524 | 1520 | return self._notify_reviewers(pull_request, observers_ids, |
|
1525 | 1521 | PullRequestReviewers.ROLE_OBSERVER, user) |
|
1526 | 1522 | |
|
1527 | 1523 | def notify_users(self, pull_request, updating_user, ancestor_commit_id, |
|
1528 | 1524 | commit_changes, file_changes): |
|
1529 | 1525 | |
|
1530 | 1526 | updating_user_id = updating_user.user_id |
|
1531 | 1527 | reviewers = set([x.user.user_id for x in pull_request.get_pull_request_reviewers()]) |
|
1532 | 1528 | # NOTE(marcink): send notification to all other users except to |
|
1533 | 1529 | # person who updated the PR |
|
1534 | 1530 | recipients = reviewers.difference(set([updating_user_id])) |
|
1535 | 1531 | |
|
1536 | 1532 | log.debug('Notify following recipients about pull-request update %s', recipients) |
|
1537 | 1533 | |
|
1538 | 1534 | pull_request_obj = pull_request |
|
1539 | 1535 | |
|
1540 | 1536 | # send email about the update |
|
1541 | 1537 | changed_files = ( |
|
1542 | 1538 | file_changes.added + file_changes.modified + file_changes.removed) |
|
1543 | 1539 | |
|
1544 | 1540 | pr_source_repo = pull_request_obj.source_repo |
|
1545 | 1541 | pr_target_repo = pull_request_obj.target_repo |
|
1546 | 1542 | |
|
1547 | 1543 | pr_url = h.route_url('pullrequest_show', |
|
1548 | 1544 | repo_name=pr_target_repo.repo_name, |
|
1549 | 1545 | pull_request_id=pull_request_obj.pull_request_id,) |
|
1550 | 1546 | |
|
1551 | 1547 | # set some variables for email notification |
|
1552 | 1548 | pr_target_repo_url = h.route_url( |
|
1553 | 1549 | 'repo_summary', repo_name=pr_target_repo.repo_name) |
|
1554 | 1550 | |
|
1555 | 1551 | pr_source_repo_url = h.route_url( |
|
1556 | 1552 | 'repo_summary', repo_name=pr_source_repo.repo_name) |
|
1557 | 1553 | |
|
1558 | 1554 | email_kwargs = { |
|
1559 | 1555 | 'date': datetime.datetime.now(), |
|
1560 | 1556 | 'updating_user': updating_user, |
|
1561 | 1557 | |
|
1562 | 1558 | 'pull_request': pull_request_obj, |
|
1563 | 1559 | |
|
1564 | 1560 | 'pull_request_target_repo': pr_target_repo, |
|
1565 | 1561 | 'pull_request_target_repo_url': pr_target_repo_url, |
|
1566 | 1562 | |
|
1567 | 1563 | 'pull_request_source_repo': pr_source_repo, |
|
1568 | 1564 | 'pull_request_source_repo_url': pr_source_repo_url, |
|
1569 | 1565 | |
|
1570 | 1566 | 'pull_request_url': pr_url, |
|
1571 | 1567 | |
|
1572 | 1568 | 'ancestor_commit_id': ancestor_commit_id, |
|
1573 | 1569 | 'added_commits': commit_changes.added, |
|
1574 | 1570 | 'removed_commits': commit_changes.removed, |
|
1575 | 1571 | 'changed_files': changed_files, |
|
1576 | 1572 | 'added_files': file_changes.added, |
|
1577 | 1573 | 'modified_files': file_changes.modified, |
|
1578 | 1574 | 'removed_files': file_changes.removed, |
|
1579 | 1575 | 'thread_ids': [pr_url], |
|
1580 | 1576 | } |
|
1581 | 1577 | |
|
1582 | (subject, _e, body_plaintext) = EmailNotificationModel().render_email( | |
|
1583 | EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE, **email_kwargs) | |
|
1584 | ||
|
1585 | 1578 | # create notification objects, and emails |
|
1586 | 1579 | NotificationModel().create( |
|
1587 | 1580 | created_by=updating_user, |
|
1588 |
notification_subject= |
|
|
1589 | notification_body=body_plaintext, | |
|
1581 | notification_subject='', # Filled in based on the notification_type | |
|
1582 | notification_body='', # Filled in based on the notification_type | |
|
1590 | 1583 | notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE, |
|
1591 | 1584 | recipients=recipients, |
|
1592 | 1585 | email_kwargs=email_kwargs, |
|
1593 | 1586 | ) |
|
1594 | 1587 | |
|
1595 | 1588 | def delete(self, pull_request, user=None): |
|
1596 | 1589 | if not user: |
|
1597 | 1590 | user = getattr(get_current_rhodecode_user(), 'username', None) |
|
1598 | 1591 | |
|
1599 | 1592 | pull_request = self.__get_pull_request(pull_request) |
|
1600 | 1593 | old_data = pull_request.get_api_data(with_merge_state=False) |
|
1601 | 1594 | self._cleanup_merge_workspace(pull_request) |
|
1602 | 1595 | self._log_audit_action( |
|
1603 | 1596 | 'repo.pull_request.delete', {'old_data': old_data}, |
|
1604 | 1597 | user, pull_request) |
|
1605 | 1598 | Session().delete(pull_request) |
|
1606 | 1599 | |
|
1607 | 1600 | def close_pull_request(self, pull_request, user): |
|
1608 | 1601 | pull_request = self.__get_pull_request(pull_request) |
|
1609 | 1602 | self._cleanup_merge_workspace(pull_request) |
|
1610 | 1603 | pull_request.status = PullRequest.STATUS_CLOSED |
|
1611 | 1604 | pull_request.updated_on = datetime.datetime.now() |
|
1612 | 1605 | Session().add(pull_request) |
|
1613 | 1606 | self.trigger_pull_request_hook(pull_request, pull_request.author, 'close') |
|
1614 | 1607 | |
|
1615 | 1608 | pr_data = pull_request.get_api_data(with_merge_state=False) |
|
1616 | 1609 | self._log_audit_action( |
|
1617 | 1610 | 'repo.pull_request.close', {'data': pr_data}, user, pull_request) |
|
1618 | 1611 | |
|
1619 | 1612 | def close_pull_request_with_comment( |
|
1620 | 1613 | self, pull_request, user, repo, message=None, auth_user=None): |
|
1621 | 1614 | |
|
1622 | 1615 | pull_request_review_status = pull_request.calculated_review_status() |
|
1623 | 1616 | |
|
1624 | 1617 | if pull_request_review_status == ChangesetStatus.STATUS_APPROVED: |
|
1625 | 1618 | # approved only if we have voting consent |
|
1626 | 1619 | status = ChangesetStatus.STATUS_APPROVED |
|
1627 | 1620 | else: |
|
1628 | 1621 | status = ChangesetStatus.STATUS_REJECTED |
|
1629 | 1622 | status_lbl = ChangesetStatus.get_status_lbl(status) |
|
1630 | 1623 | |
|
1631 | 1624 | default_message = ( |
|
1632 | 1625 | 'Closing with status change {transition_icon} {status}.' |
|
1633 | 1626 | ).format(transition_icon='>', status=status_lbl) |
|
1634 | 1627 | text = message or default_message |
|
1635 | 1628 | |
|
1636 | 1629 | # create a comment, and link it to new status |
|
1637 | 1630 | comment = CommentsModel().create( |
|
1638 | 1631 | text=text, |
|
1639 | 1632 | repo=repo.repo_id, |
|
1640 | 1633 | user=user.user_id, |
|
1641 | 1634 | pull_request=pull_request.pull_request_id, |
|
1642 | 1635 | status_change=status_lbl, |
|
1643 | 1636 | status_change_type=status, |
|
1644 | 1637 | closing_pr=True, |
|
1645 | 1638 | auth_user=auth_user, |
|
1646 | 1639 | ) |
|
1647 | 1640 | |
|
1648 | 1641 | # calculate old status before we change it |
|
1649 | 1642 | old_calculated_status = pull_request.calculated_review_status() |
|
1650 | 1643 | ChangesetStatusModel().set_status( |
|
1651 | 1644 | repo.repo_id, |
|
1652 | 1645 | status, |
|
1653 | 1646 | user.user_id, |
|
1654 | 1647 | comment=comment, |
|
1655 | 1648 | pull_request=pull_request.pull_request_id |
|
1656 | 1649 | ) |
|
1657 | 1650 | |
|
1658 | 1651 | Session().flush() |
|
1659 | 1652 | |
|
1660 | 1653 | self.trigger_pull_request_hook(pull_request, user, 'comment', |
|
1661 | 1654 | data={'comment': comment}) |
|
1662 | 1655 | |
|
1663 | 1656 | # we now calculate the status of pull request again, and based on that |
|
1664 | 1657 | # calculation trigger status change. This might happen in cases |
|
1665 | 1658 | # that non-reviewer admin closes a pr, which means his vote doesn't |
|
1666 | 1659 | # change the status, while if he's a reviewer this might change it. |
|
1667 | 1660 | calculated_status = pull_request.calculated_review_status() |
|
1668 | 1661 | if old_calculated_status != calculated_status: |
|
1669 | 1662 | self.trigger_pull_request_hook(pull_request, user, 'review_status_change', |
|
1670 | 1663 | data={'status': calculated_status}) |
|
1671 | 1664 | |
|
1672 | 1665 | # finally close the PR |
|
1673 | 1666 | PullRequestModel().close_pull_request(pull_request.pull_request_id, user) |
|
1674 | 1667 | |
|
1675 | 1668 | return comment, status |
|
1676 | 1669 | |
|
1677 | 1670 | def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False): |
|
1678 | 1671 | _ = translator or get_current_request().translate |
|
1679 | 1672 | |
|
1680 | 1673 | if not self._is_merge_enabled(pull_request): |
|
1681 | 1674 | return None, False, _('Server-side pull request merging is disabled.') |
|
1682 | 1675 | |
|
1683 | 1676 | if pull_request.is_closed(): |
|
1684 | 1677 | return None, False, _('This pull request is closed.') |
|
1685 | 1678 | |
|
1686 | 1679 | merge_possible, msg = self._check_repo_requirements( |
|
1687 | 1680 | target=pull_request.target_repo, source=pull_request.source_repo, |
|
1688 | 1681 | translator=_) |
|
1689 | 1682 | if not merge_possible: |
|
1690 | 1683 | return None, merge_possible, msg |
|
1691 | 1684 | |
|
1692 | 1685 | try: |
|
1693 | 1686 | merge_response = self._try_merge( |
|
1694 | 1687 | pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh) |
|
1695 | 1688 | log.debug("Merge response: %s", merge_response) |
|
1696 | 1689 | return merge_response, merge_response.possible, merge_response.merge_status_message |
|
1697 | 1690 | except NotImplementedError: |
|
1698 | 1691 | return None, False, _('Pull request merging is not supported.') |
|
1699 | 1692 | |
|
1700 | 1693 | def _check_repo_requirements(self, target, source, translator): |
|
1701 | 1694 | """ |
|
1702 | 1695 | Check if `target` and `source` have compatible requirements. |
|
1703 | 1696 | |
|
1704 | 1697 | Currently this is just checking for largefiles. |
|
1705 | 1698 | """ |
|
1706 | 1699 | _ = translator |
|
1707 | 1700 | target_has_largefiles = self._has_largefiles(target) |
|
1708 | 1701 | source_has_largefiles = self._has_largefiles(source) |
|
1709 | 1702 | merge_possible = True |
|
1710 | 1703 | message = u'' |
|
1711 | 1704 | |
|
1712 | 1705 | if target_has_largefiles != source_has_largefiles: |
|
1713 | 1706 | merge_possible = False |
|
1714 | 1707 | if source_has_largefiles: |
|
1715 | 1708 | message = _( |
|
1716 | 1709 | 'Target repository large files support is disabled.') |
|
1717 | 1710 | else: |
|
1718 | 1711 | message = _( |
|
1719 | 1712 | 'Source repository large files support is disabled.') |
|
1720 | 1713 | |
|
1721 | 1714 | return merge_possible, message |
|
1722 | 1715 | |
|
1723 | 1716 | def _has_largefiles(self, repo): |
|
1724 | 1717 | largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings( |
|
1725 | 1718 | 'extensions', 'largefiles') |
|
1726 | 1719 | return largefiles_ui and largefiles_ui[0].active |
|
1727 | 1720 | |
|
1728 | 1721 | def _try_merge(self, pull_request, force_shadow_repo_refresh=False): |
|
1729 | 1722 | """ |
|
1730 | 1723 | Try to merge the pull request and return the merge status. |
|
1731 | 1724 | """ |
|
1732 | 1725 | log.debug( |
|
1733 | 1726 | "Trying out if the pull request %s can be merged. Force_refresh=%s", |
|
1734 | 1727 | pull_request.pull_request_id, force_shadow_repo_refresh) |
|
1735 | 1728 | target_vcs = pull_request.target_repo.scm_instance() |
|
1736 | 1729 | # Refresh the target reference. |
|
1737 | 1730 | try: |
|
1738 | 1731 | target_ref = self._refresh_reference( |
|
1739 | 1732 | pull_request.target_ref_parts, target_vcs) |
|
1740 | 1733 | except CommitDoesNotExistError: |
|
1741 | 1734 | merge_state = MergeResponse( |
|
1742 | 1735 | False, False, None, MergeFailureReason.MISSING_TARGET_REF, |
|
1743 | 1736 | metadata={'target_ref': pull_request.target_ref_parts}) |
|
1744 | 1737 | return merge_state |
|
1745 | 1738 | |
|
1746 | 1739 | target_locked = pull_request.target_repo.locked |
|
1747 | 1740 | if target_locked and target_locked[0]: |
|
1748 | 1741 | locked_by = 'user:{}'.format(target_locked[0]) |
|
1749 | 1742 | log.debug("The target repository is locked by %s.", locked_by) |
|
1750 | 1743 | merge_state = MergeResponse( |
|
1751 | 1744 | False, False, None, MergeFailureReason.TARGET_IS_LOCKED, |
|
1752 | 1745 | metadata={'locked_by': locked_by}) |
|
1753 | 1746 | elif force_shadow_repo_refresh or self._needs_merge_state_refresh( |
|
1754 | 1747 | pull_request, target_ref): |
|
1755 | 1748 | log.debug("Refreshing the merge status of the repository.") |
|
1756 | 1749 | merge_state = self._refresh_merge_state( |
|
1757 | 1750 | pull_request, target_vcs, target_ref) |
|
1758 | 1751 | else: |
|
1759 | 1752 | possible = pull_request.last_merge_status == MergeFailureReason.NONE |
|
1760 | 1753 | metadata = { |
|
1761 | 1754 | 'unresolved_files': '', |
|
1762 | 1755 | 'target_ref': pull_request.target_ref_parts, |
|
1763 | 1756 | 'source_ref': pull_request.source_ref_parts, |
|
1764 | 1757 | } |
|
1765 | 1758 | if pull_request.last_merge_metadata: |
|
1766 | 1759 | metadata.update(pull_request.last_merge_metadata_parsed) |
|
1767 | 1760 | |
|
1768 | 1761 | if not possible and target_ref.type == 'branch': |
|
1769 | 1762 | # NOTE(marcink): case for mercurial multiple heads on branch |
|
1770 | 1763 | heads = target_vcs._heads(target_ref.name) |
|
1771 | 1764 | if len(heads) != 1: |
|
1772 | 1765 | heads = '\n,'.join(target_vcs._heads(target_ref.name)) |
|
1773 | 1766 | metadata.update({ |
|
1774 | 1767 | 'heads': heads |
|
1775 | 1768 | }) |
|
1776 | 1769 | |
|
1777 | 1770 | merge_state = MergeResponse( |
|
1778 | 1771 | possible, False, None, pull_request.last_merge_status, metadata=metadata) |
|
1779 | 1772 | |
|
1780 | 1773 | return merge_state |
|
1781 | 1774 | |
|
1782 | 1775 | def _refresh_reference(self, reference, vcs_repository): |
|
1783 | 1776 | if reference.type in self.UPDATABLE_REF_TYPES: |
|
1784 | 1777 | name_or_id = reference.name |
|
1785 | 1778 | else: |
|
1786 | 1779 | name_or_id = reference.commit_id |
|
1787 | 1780 | |
|
1788 | 1781 | refreshed_commit = vcs_repository.get_commit(name_or_id) |
|
1789 | 1782 | refreshed_reference = Reference( |
|
1790 | 1783 | reference.type, reference.name, refreshed_commit.raw_id) |
|
1791 | 1784 | return refreshed_reference |
|
1792 | 1785 | |
|
1793 | 1786 | def _needs_merge_state_refresh(self, pull_request, target_reference): |
|
1794 | 1787 | return not( |
|
1795 | 1788 | pull_request.revisions and |
|
1796 | 1789 | pull_request.revisions[0] == pull_request._last_merge_source_rev and |
|
1797 | 1790 | target_reference.commit_id == pull_request._last_merge_target_rev) |
|
1798 | 1791 | |
|
1799 | 1792 | def _refresh_merge_state(self, pull_request, target_vcs, target_reference): |
|
1800 | 1793 | workspace_id = self._workspace_id(pull_request) |
|
1801 | 1794 | source_vcs = pull_request.source_repo.scm_instance() |
|
1802 | 1795 | repo_id = pull_request.target_repo.repo_id |
|
1803 | 1796 | use_rebase = self._use_rebase_for_merging(pull_request) |
|
1804 | 1797 | close_branch = self._close_branch_before_merging(pull_request) |
|
1805 | 1798 | merge_state = target_vcs.merge( |
|
1806 | 1799 | repo_id, workspace_id, |
|
1807 | 1800 | target_reference, source_vcs, pull_request.source_ref_parts, |
|
1808 | 1801 | dry_run=True, use_rebase=use_rebase, |
|
1809 | 1802 | close_branch=close_branch) |
|
1810 | 1803 | |
|
1811 | 1804 | # Do not store the response if there was an unknown error. |
|
1812 | 1805 | if merge_state.failure_reason != MergeFailureReason.UNKNOWN: |
|
1813 | 1806 | pull_request._last_merge_source_rev = \ |
|
1814 | 1807 | pull_request.source_ref_parts.commit_id |
|
1815 | 1808 | pull_request._last_merge_target_rev = target_reference.commit_id |
|
1816 | 1809 | pull_request.last_merge_status = merge_state.failure_reason |
|
1817 | 1810 | pull_request.last_merge_metadata = merge_state.metadata |
|
1818 | 1811 | |
|
1819 | 1812 | pull_request.shadow_merge_ref = merge_state.merge_ref |
|
1820 | 1813 | Session().add(pull_request) |
|
1821 | 1814 | Session().commit() |
|
1822 | 1815 | |
|
1823 | 1816 | return merge_state |
|
1824 | 1817 | |
|
1825 | 1818 | def _workspace_id(self, pull_request): |
|
1826 | 1819 | workspace_id = 'pr-%s' % pull_request.pull_request_id |
|
1827 | 1820 | return workspace_id |
|
1828 | 1821 | |
|
1829 | 1822 | def generate_repo_data(self, repo, commit_id=None, branch=None, |
|
1830 | 1823 | bookmark=None, translator=None): |
|
1831 | 1824 | from rhodecode.model.repo import RepoModel |
|
1832 | 1825 | |
|
1833 | 1826 | all_refs, selected_ref = \ |
|
1834 | 1827 | self._get_repo_pullrequest_sources( |
|
1835 | 1828 | repo.scm_instance(), commit_id=commit_id, |
|
1836 | 1829 | branch=branch, bookmark=bookmark, translator=translator) |
|
1837 | 1830 | |
|
1838 | 1831 | refs_select2 = [] |
|
1839 | 1832 | for element in all_refs: |
|
1840 | 1833 | children = [{'id': x[0], 'text': x[1]} for x in element[0]] |
|
1841 | 1834 | refs_select2.append({'text': element[1], 'children': children}) |
|
1842 | 1835 | |
|
1843 | 1836 | return { |
|
1844 | 1837 | 'user': { |
|
1845 | 1838 | 'user_id': repo.user.user_id, |
|
1846 | 1839 | 'username': repo.user.username, |
|
1847 | 1840 | 'firstname': repo.user.first_name, |
|
1848 | 1841 | 'lastname': repo.user.last_name, |
|
1849 | 1842 | 'gravatar_link': h.gravatar_url(repo.user.email, 14), |
|
1850 | 1843 | }, |
|
1851 | 1844 | 'name': repo.repo_name, |
|
1852 | 1845 | 'link': RepoModel().get_url(repo), |
|
1853 | 1846 | 'description': h.chop_at_smart(repo.description_safe, '\n'), |
|
1854 | 1847 | 'refs': { |
|
1855 | 1848 | 'all_refs': all_refs, |
|
1856 | 1849 | 'selected_ref': selected_ref, |
|
1857 | 1850 | 'select2_refs': refs_select2 |
|
1858 | 1851 | } |
|
1859 | 1852 | } |
|
1860 | 1853 | |
|
1861 | 1854 | def generate_pullrequest_title(self, source, source_ref, target): |
|
1862 | 1855 | return u'{source}#{at_ref} to {target}'.format( |
|
1863 | 1856 | source=source, |
|
1864 | 1857 | at_ref=source_ref, |
|
1865 | 1858 | target=target, |
|
1866 | 1859 | ) |
|
1867 | 1860 | |
|
1868 | 1861 | def _cleanup_merge_workspace(self, pull_request): |
|
1869 | 1862 | # Merging related cleanup |
|
1870 | 1863 | repo_id = pull_request.target_repo.repo_id |
|
1871 | 1864 | target_scm = pull_request.target_repo.scm_instance() |
|
1872 | 1865 | workspace_id = self._workspace_id(pull_request) |
|
1873 | 1866 | |
|
1874 | 1867 | try: |
|
1875 | 1868 | target_scm.cleanup_merge_workspace(repo_id, workspace_id) |
|
1876 | 1869 | except NotImplementedError: |
|
1877 | 1870 | pass |
|
1878 | 1871 | |
|
1879 | 1872 | def _get_repo_pullrequest_sources( |
|
1880 | 1873 | self, repo, commit_id=None, branch=None, bookmark=None, |
|
1881 | 1874 | translator=None): |
|
1882 | 1875 | """ |
|
1883 | 1876 | Return a structure with repo's interesting commits, suitable for |
|
1884 | 1877 | the selectors in pullrequest controller |
|
1885 | 1878 | |
|
1886 | 1879 | :param commit_id: a commit that must be in the list somehow |
|
1887 | 1880 | and selected by default |
|
1888 | 1881 | :param branch: a branch that must be in the list and selected |
|
1889 | 1882 | by default - even if closed |
|
1890 | 1883 | :param bookmark: a bookmark that must be in the list and selected |
|
1891 | 1884 | """ |
|
1892 | 1885 | _ = translator or get_current_request().translate |
|
1893 | 1886 | |
|
1894 | 1887 | commit_id = safe_str(commit_id) if commit_id else None |
|
1895 | 1888 | branch = safe_unicode(branch) if branch else None |
|
1896 | 1889 | bookmark = safe_unicode(bookmark) if bookmark else None |
|
1897 | 1890 | |
|
1898 | 1891 | selected = None |
|
1899 | 1892 | |
|
1900 | 1893 | # order matters: first source that has commit_id in it will be selected |
|
1901 | 1894 | sources = [] |
|
1902 | 1895 | sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark)) |
|
1903 | 1896 | sources.append(('branch', repo.branches.items(), _('Branches'), branch)) |
|
1904 | 1897 | |
|
1905 | 1898 | if commit_id: |
|
1906 | 1899 | ref_commit = (h.short_id(commit_id), commit_id) |
|
1907 | 1900 | sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id)) |
|
1908 | 1901 | |
|
1909 | 1902 | sources.append( |
|
1910 | 1903 | ('branch', repo.branches_closed.items(), _('Closed Branches'), branch), |
|
1911 | 1904 | ) |
|
1912 | 1905 | |
|
1913 | 1906 | groups = [] |
|
1914 | 1907 | |
|
1915 | 1908 | for group_key, ref_list, group_name, match in sources: |
|
1916 | 1909 | group_refs = [] |
|
1917 | 1910 | for ref_name, ref_id in ref_list: |
|
1918 | 1911 | ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id) |
|
1919 | 1912 | group_refs.append((ref_key, ref_name)) |
|
1920 | 1913 | |
|
1921 | 1914 | if not selected: |
|
1922 | 1915 | if set([commit_id, match]) & set([ref_id, ref_name]): |
|
1923 | 1916 | selected = ref_key |
|
1924 | 1917 | |
|
1925 | 1918 | if group_refs: |
|
1926 | 1919 | groups.append((group_refs, group_name)) |
|
1927 | 1920 | |
|
1928 | 1921 | if not selected: |
|
1929 | 1922 | ref = commit_id or branch or bookmark |
|
1930 | 1923 | if ref: |
|
1931 | 1924 | raise CommitDoesNotExistError( |
|
1932 | 1925 | u'No commit refs could be found matching: {}'.format(ref)) |
|
1933 | 1926 | elif repo.DEFAULT_BRANCH_NAME in repo.branches: |
|
1934 | 1927 | selected = u'branch:{}:{}'.format( |
|
1935 | 1928 | safe_unicode(repo.DEFAULT_BRANCH_NAME), |
|
1936 | 1929 | safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME]) |
|
1937 | 1930 | ) |
|
1938 | 1931 | elif repo.commit_ids: |
|
1939 | 1932 | # make the user select in this case |
|
1940 | 1933 | selected = None |
|
1941 | 1934 | else: |
|
1942 | 1935 | raise EmptyRepositoryError() |
|
1943 | 1936 | return groups, selected |
|
1944 | 1937 | |
|
1945 | 1938 | def get_diff(self, source_repo, source_ref_id, target_ref_id, |
|
1946 | 1939 | hide_whitespace_changes, diff_context): |
|
1947 | 1940 | |
|
1948 | 1941 | return self._get_diff_from_pr_or_version( |
|
1949 | 1942 | source_repo, source_ref_id, target_ref_id, |
|
1950 | 1943 | hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context) |
|
1951 | 1944 | |
|
1952 | 1945 | def _get_diff_from_pr_or_version( |
|
1953 | 1946 | self, source_repo, source_ref_id, target_ref_id, |
|
1954 | 1947 | hide_whitespace_changes, diff_context): |
|
1955 | 1948 | |
|
1956 | 1949 | target_commit = source_repo.get_commit( |
|
1957 | 1950 | commit_id=safe_str(target_ref_id)) |
|
1958 | 1951 | source_commit = source_repo.get_commit( |
|
1959 | 1952 | commit_id=safe_str(source_ref_id), maybe_unreachable=True) |
|
1960 | 1953 | if isinstance(source_repo, Repository): |
|
1961 | 1954 | vcs_repo = source_repo.scm_instance() |
|
1962 | 1955 | else: |
|
1963 | 1956 | vcs_repo = source_repo |
|
1964 | 1957 | |
|
1965 | 1958 | # TODO: johbo: In the context of an update, we cannot reach |
|
1966 | 1959 | # the old commit anymore with our normal mechanisms. It needs |
|
1967 | 1960 | # some sort of special support in the vcs layer to avoid this |
|
1968 | 1961 | # workaround. |
|
1969 | 1962 | if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and |
|
1970 | 1963 | vcs_repo.alias == 'git'): |
|
1971 | 1964 | source_commit.raw_id = safe_str(source_ref_id) |
|
1972 | 1965 | |
|
1973 | 1966 | log.debug('calculating diff between ' |
|
1974 | 1967 | 'source_ref:%s and target_ref:%s for repo `%s`', |
|
1975 | 1968 | target_ref_id, source_ref_id, |
|
1976 | 1969 | safe_unicode(vcs_repo.path)) |
|
1977 | 1970 | |
|
1978 | 1971 | vcs_diff = vcs_repo.get_diff( |
|
1979 | 1972 | commit1=target_commit, commit2=source_commit, |
|
1980 | 1973 | ignore_whitespace=hide_whitespace_changes, context=diff_context) |
|
1981 | 1974 | return vcs_diff |
|
1982 | 1975 | |
|
1983 | 1976 | def _is_merge_enabled(self, pull_request): |
|
1984 | 1977 | return self._get_general_setting( |
|
1985 | 1978 | pull_request, 'rhodecode_pr_merge_enabled') |
|
1986 | 1979 | |
|
1987 | 1980 | def _use_rebase_for_merging(self, pull_request): |
|
1988 | 1981 | repo_type = pull_request.target_repo.repo_type |
|
1989 | 1982 | if repo_type == 'hg': |
|
1990 | 1983 | return self._get_general_setting( |
|
1991 | 1984 | pull_request, 'rhodecode_hg_use_rebase_for_merging') |
|
1992 | 1985 | elif repo_type == 'git': |
|
1993 | 1986 | return self._get_general_setting( |
|
1994 | 1987 | pull_request, 'rhodecode_git_use_rebase_for_merging') |
|
1995 | 1988 | |
|
1996 | 1989 | return False |
|
1997 | 1990 | |
|
1998 | 1991 | def _user_name_for_merging(self, pull_request, user): |
|
1999 | 1992 | env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '') |
|
2000 | 1993 | if env_user_name_attr and hasattr(user, env_user_name_attr): |
|
2001 | 1994 | user_name_attr = env_user_name_attr |
|
2002 | 1995 | else: |
|
2003 | 1996 | user_name_attr = 'short_contact' |
|
2004 | 1997 | |
|
2005 | 1998 | user_name = getattr(user, user_name_attr) |
|
2006 | 1999 | return user_name |
|
2007 | 2000 | |
|
2008 | 2001 | def _close_branch_before_merging(self, pull_request): |
|
2009 | 2002 | repo_type = pull_request.target_repo.repo_type |
|
2010 | 2003 | if repo_type == 'hg': |
|
2011 | 2004 | return self._get_general_setting( |
|
2012 | 2005 | pull_request, 'rhodecode_hg_close_branch_before_merging') |
|
2013 | 2006 | elif repo_type == 'git': |
|
2014 | 2007 | return self._get_general_setting( |
|
2015 | 2008 | pull_request, 'rhodecode_git_close_branch_before_merging') |
|
2016 | 2009 | |
|
2017 | 2010 | return False |
|
2018 | 2011 | |
|
2019 | 2012 | def _get_general_setting(self, pull_request, settings_key, default=False): |
|
2020 | 2013 | settings_model = VcsSettingsModel(repo=pull_request.target_repo) |
|
2021 | 2014 | settings = settings_model.get_general_settings() |
|
2022 | 2015 | return settings.get(settings_key, default) |
|
2023 | 2016 | |
|
2024 | 2017 | def _log_audit_action(self, action, action_data, user, pull_request): |
|
2025 | 2018 | audit_logger.store( |
|
2026 | 2019 | action=action, |
|
2027 | 2020 | action_data=action_data, |
|
2028 | 2021 | user=user, |
|
2029 | 2022 | repo=pull_request.target_repo) |
|
2030 | 2023 | |
|
2031 | 2024 | def get_reviewer_functions(self): |
|
2032 | 2025 | """ |
|
2033 | 2026 | Fetches functions for validation and fetching default reviewers. |
|
2034 | 2027 | If available we use the EE package, else we fallback to CE |
|
2035 | 2028 | package functions |
|
2036 | 2029 | """ |
|
2037 | 2030 | try: |
|
2038 | 2031 | from rc_reviewers.utils import get_default_reviewers_data |
|
2039 | 2032 | from rc_reviewers.utils import validate_default_reviewers |
|
2040 | 2033 | from rc_reviewers.utils import validate_observers |
|
2041 | 2034 | except ImportError: |
|
2042 | 2035 | from rhodecode.apps.repository.utils import get_default_reviewers_data |
|
2043 | 2036 | from rhodecode.apps.repository.utils import validate_default_reviewers |
|
2044 | 2037 | from rhodecode.apps.repository.utils import validate_observers |
|
2045 | 2038 | |
|
2046 | 2039 | return get_default_reviewers_data, validate_default_reviewers, validate_observers |
|
2047 | 2040 | |
|
2048 | 2041 | |
|
2049 | 2042 | class MergeCheck(object): |
|
2050 | 2043 | """ |
|
2051 | 2044 | Perform Merge Checks and returns a check object which stores information |
|
2052 | 2045 | about merge errors, and merge conditions |
|
2053 | 2046 | """ |
|
2054 | 2047 | TODO_CHECK = 'todo' |
|
2055 | 2048 | PERM_CHECK = 'perm' |
|
2056 | 2049 | REVIEW_CHECK = 'review' |
|
2057 | 2050 | MERGE_CHECK = 'merge' |
|
2058 | 2051 | WIP_CHECK = 'wip' |
|
2059 | 2052 | |
|
2060 | 2053 | def __init__(self): |
|
2061 | 2054 | self.review_status = None |
|
2062 | 2055 | self.merge_possible = None |
|
2063 | 2056 | self.merge_msg = '' |
|
2064 | 2057 | self.merge_response = None |
|
2065 | 2058 | self.failed = None |
|
2066 | 2059 | self.errors = [] |
|
2067 | 2060 | self.error_details = OrderedDict() |
|
2068 | 2061 | self.source_commit = AttributeDict() |
|
2069 | 2062 | self.target_commit = AttributeDict() |
|
2070 | 2063 | |
|
2071 | 2064 | def __repr__(self): |
|
2072 | 2065 | return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format( |
|
2073 | 2066 | self.merge_possible, self.failed, self.errors) |
|
2074 | 2067 | |
|
2075 | 2068 | def push_error(self, error_type, message, error_key, details): |
|
2076 | 2069 | self.failed = True |
|
2077 | 2070 | self.errors.append([error_type, message]) |
|
2078 | 2071 | self.error_details[error_key] = dict( |
|
2079 | 2072 | details=details, |
|
2080 | 2073 | error_type=error_type, |
|
2081 | 2074 | message=message |
|
2082 | 2075 | ) |
|
2083 | 2076 | |
|
2084 | 2077 | @classmethod |
|
2085 | 2078 | def validate(cls, pull_request, auth_user, translator, fail_early=False, |
|
2086 | 2079 | force_shadow_repo_refresh=False): |
|
2087 | 2080 | _ = translator |
|
2088 | 2081 | merge_check = cls() |
|
2089 | 2082 | |
|
2090 | 2083 | # title has WIP: |
|
2091 | 2084 | if pull_request.work_in_progress: |
|
2092 | 2085 | log.debug("MergeCheck: cannot merge, title has wip: marker.") |
|
2093 | 2086 | |
|
2094 | 2087 | msg = _('WIP marker in title prevents from accidental merge.') |
|
2095 | 2088 | merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title) |
|
2096 | 2089 | if fail_early: |
|
2097 | 2090 | return merge_check |
|
2098 | 2091 | |
|
2099 | 2092 | # permissions to merge |
|
2100 | 2093 | user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user) |
|
2101 | 2094 | if not user_allowed_to_merge: |
|
2102 | 2095 | log.debug("MergeCheck: cannot merge, approval is pending.") |
|
2103 | 2096 | |
|
2104 | 2097 | msg = _('User `{}` not allowed to perform merge.').format(auth_user.username) |
|
2105 | 2098 | merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username) |
|
2106 | 2099 | if fail_early: |
|
2107 | 2100 | return merge_check |
|
2108 | 2101 | |
|
2109 | 2102 | # permission to merge into the target branch |
|
2110 | 2103 | target_commit_id = pull_request.target_ref_parts.commit_id |
|
2111 | 2104 | if pull_request.target_ref_parts.type == 'branch': |
|
2112 | 2105 | branch_name = pull_request.target_ref_parts.name |
|
2113 | 2106 | else: |
|
2114 | 2107 | # for mercurial we can always figure out the branch from the commit |
|
2115 | 2108 | # in case of bookmark |
|
2116 | 2109 | target_commit = pull_request.target_repo.get_commit(target_commit_id) |
|
2117 | 2110 | branch_name = target_commit.branch |
|
2118 | 2111 | |
|
2119 | 2112 | rule, branch_perm = auth_user.get_rule_and_branch_permission( |
|
2120 | 2113 | pull_request.target_repo.repo_name, branch_name) |
|
2121 | 2114 | if branch_perm and branch_perm == 'branch.none': |
|
2122 | 2115 | msg = _('Target branch `{}` changes rejected by rule {}.').format( |
|
2123 | 2116 | branch_name, rule) |
|
2124 | 2117 | merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username) |
|
2125 | 2118 | if fail_early: |
|
2126 | 2119 | return merge_check |
|
2127 | 2120 | |
|
2128 | 2121 | # review status, must be always present |
|
2129 | 2122 | review_status = pull_request.calculated_review_status() |
|
2130 | 2123 | merge_check.review_status = review_status |
|
2131 | 2124 | |
|
2132 | 2125 | status_approved = review_status == ChangesetStatus.STATUS_APPROVED |
|
2133 | 2126 | if not status_approved: |
|
2134 | 2127 | log.debug("MergeCheck: cannot merge, approval is pending.") |
|
2135 | 2128 | |
|
2136 | 2129 | msg = _('Pull request reviewer approval is pending.') |
|
2137 | 2130 | |
|
2138 | 2131 | merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status) |
|
2139 | 2132 | |
|
2140 | 2133 | if fail_early: |
|
2141 | 2134 | return merge_check |
|
2142 | 2135 | |
|
2143 | 2136 | # left over TODOs |
|
2144 | 2137 | todos = CommentsModel().get_pull_request_unresolved_todos(pull_request) |
|
2145 | 2138 | if todos: |
|
2146 | 2139 | log.debug("MergeCheck: cannot merge, {} " |
|
2147 | 2140 | "unresolved TODOs left.".format(len(todos))) |
|
2148 | 2141 | |
|
2149 | 2142 | if len(todos) == 1: |
|
2150 | 2143 | msg = _('Cannot merge, {} TODO still not resolved.').format( |
|
2151 | 2144 | len(todos)) |
|
2152 | 2145 | else: |
|
2153 | 2146 | msg = _('Cannot merge, {} TODOs still not resolved.').format( |
|
2154 | 2147 | len(todos)) |
|
2155 | 2148 | |
|
2156 | 2149 | merge_check.push_error('warning', msg, cls.TODO_CHECK, todos) |
|
2157 | 2150 | |
|
2158 | 2151 | if fail_early: |
|
2159 | 2152 | return merge_check |
|
2160 | 2153 | |
|
2161 | 2154 | # merge possible, here is the filesystem simulation + shadow repo |
|
2162 | 2155 | merge_response, merge_status, msg = PullRequestModel().merge_status( |
|
2163 | 2156 | pull_request, translator=translator, |
|
2164 | 2157 | force_shadow_repo_refresh=force_shadow_repo_refresh) |
|
2165 | 2158 | |
|
2166 | 2159 | merge_check.merge_possible = merge_status |
|
2167 | 2160 | merge_check.merge_msg = msg |
|
2168 | 2161 | merge_check.merge_response = merge_response |
|
2169 | 2162 | |
|
2170 | 2163 | source_ref_id = pull_request.source_ref_parts.commit_id |
|
2171 | 2164 | target_ref_id = pull_request.target_ref_parts.commit_id |
|
2172 | 2165 | |
|
2173 | 2166 | try: |
|
2174 | 2167 | source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request) |
|
2175 | 2168 | merge_check.source_commit.changed = source_ref_id != source_commit.raw_id |
|
2176 | 2169 | merge_check.source_commit.ref_spec = pull_request.source_ref_parts |
|
2177 | 2170 | merge_check.source_commit.current_raw_id = source_commit.raw_id |
|
2178 | 2171 | merge_check.source_commit.previous_raw_id = source_ref_id |
|
2179 | 2172 | |
|
2180 | 2173 | merge_check.target_commit.changed = target_ref_id != target_commit.raw_id |
|
2181 | 2174 | merge_check.target_commit.ref_spec = pull_request.target_ref_parts |
|
2182 | 2175 | merge_check.target_commit.current_raw_id = target_commit.raw_id |
|
2183 | 2176 | merge_check.target_commit.previous_raw_id = target_ref_id |
|
2184 | 2177 | except (SourceRefMissing, TargetRefMissing): |
|
2185 | 2178 | pass |
|
2186 | 2179 | |
|
2187 | 2180 | if not merge_status: |
|
2188 | 2181 | log.debug("MergeCheck: cannot merge, pull request merge not possible.") |
|
2189 | 2182 | merge_check.push_error('warning', msg, cls.MERGE_CHECK, None) |
|
2190 | 2183 | |
|
2191 | 2184 | if fail_early: |
|
2192 | 2185 | return merge_check |
|
2193 | 2186 | |
|
2194 | 2187 | log.debug('MergeCheck: is failed: %s', merge_check.failed) |
|
2195 | 2188 | return merge_check |
|
2196 | 2189 | |
|
2197 | 2190 | @classmethod |
|
2198 | 2191 | def get_merge_conditions(cls, pull_request, translator): |
|
2199 | 2192 | _ = translator |
|
2200 | 2193 | merge_details = {} |
|
2201 | 2194 | |
|
2202 | 2195 | model = PullRequestModel() |
|
2203 | 2196 | use_rebase = model._use_rebase_for_merging(pull_request) |
|
2204 | 2197 | |
|
2205 | 2198 | if use_rebase: |
|
2206 | 2199 | merge_details['merge_strategy'] = dict( |
|
2207 | 2200 | details={}, |
|
2208 | 2201 | message=_('Merge strategy: rebase') |
|
2209 | 2202 | ) |
|
2210 | 2203 | else: |
|
2211 | 2204 | merge_details['merge_strategy'] = dict( |
|
2212 | 2205 | details={}, |
|
2213 | 2206 | message=_('Merge strategy: explicit merge commit') |
|
2214 | 2207 | ) |
|
2215 | 2208 | |
|
2216 | 2209 | close_branch = model._close_branch_before_merging(pull_request) |
|
2217 | 2210 | if close_branch: |
|
2218 | 2211 | repo_type = pull_request.target_repo.repo_type |
|
2219 | 2212 | close_msg = '' |
|
2220 | 2213 | if repo_type == 'hg': |
|
2221 | 2214 | close_msg = _('Source branch will be closed before the merge.') |
|
2222 | 2215 | elif repo_type == 'git': |
|
2223 | 2216 | close_msg = _('Source branch will be deleted after the merge.') |
|
2224 | 2217 | |
|
2225 | 2218 | merge_details['close_branch'] = dict( |
|
2226 | 2219 | details={}, |
|
2227 | 2220 | message=close_msg |
|
2228 | 2221 | ) |
|
2229 | 2222 | |
|
2230 | 2223 | return merge_details |
|
2231 | 2224 | |
|
2232 | 2225 | |
|
2233 | 2226 | ChangeTuple = collections.namedtuple( |
|
2234 | 2227 | 'ChangeTuple', ['added', 'common', 'removed', 'total']) |
|
2235 | 2228 | |
|
2236 | 2229 | FileChangeTuple = collections.namedtuple( |
|
2237 | 2230 | 'FileChangeTuple', ['added', 'modified', 'removed']) |
@@ -1,1050 +1,1047 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | users model for RhodeCode |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import logging |
|
26 | 26 | import traceback |
|
27 | 27 | import datetime |
|
28 | 28 | import ipaddress |
|
29 | 29 | |
|
30 | 30 | from pyramid.threadlocal import get_current_request |
|
31 | 31 | from sqlalchemy.exc import DatabaseError |
|
32 | 32 | |
|
33 | 33 | from rhodecode import events |
|
34 | 34 | from rhodecode.lib.user_log_filter import user_log_filter |
|
35 | 35 | from rhodecode.lib.utils2 import ( |
|
36 | 36 | safe_unicode, get_current_rhodecode_user, action_logger_generic, |
|
37 | 37 | AttributeDict, str2bool) |
|
38 | 38 | from rhodecode.lib.exceptions import ( |
|
39 | 39 | DefaultUserException, UserOwnsReposException, UserOwnsRepoGroupsException, |
|
40 | 40 | UserOwnsUserGroupsException, NotAllowedToCreateUserError, |
|
41 | 41 | UserOwnsPullRequestsException, UserOwnsArtifactsException) |
|
42 | 42 | from rhodecode.lib.caching_query import FromCache |
|
43 | 43 | from rhodecode.model import BaseModel |
|
44 | 44 | from rhodecode.model.db import ( |
|
45 | 45 | _hash_key, func, true, false, or_, joinedload, User, UserToPerm, |
|
46 | 46 | UserEmailMap, UserIpMap, UserLog) |
|
47 | 47 | from rhodecode.model.meta import Session |
|
48 | 48 | from rhodecode.model.auth_token import AuthTokenModel |
|
49 | 49 | from rhodecode.model.repo_group import RepoGroupModel |
|
50 | 50 | |
|
51 | 51 | log = logging.getLogger(__name__) |
|
52 | 52 | |
|
53 | 53 | |
|
54 | 54 | class UserModel(BaseModel): |
|
55 | 55 | cls = User |
|
56 | 56 | |
|
57 | 57 | def get(self, user_id, cache=False): |
|
58 | 58 | user = self.sa.query(User) |
|
59 | 59 | if cache: |
|
60 | 60 | user = user.options( |
|
61 | 61 | FromCache("sql_cache_short", "get_user_%s" % user_id)) |
|
62 | 62 | return user.get(user_id) |
|
63 | 63 | |
|
64 | 64 | def get_user(self, user): |
|
65 | 65 | return self._get_user(user) |
|
66 | 66 | |
|
67 | 67 | def _serialize_user(self, user): |
|
68 | 68 | import rhodecode.lib.helpers as h |
|
69 | 69 | |
|
70 | 70 | return { |
|
71 | 71 | 'id': user.user_id, |
|
72 | 72 | 'first_name': user.first_name, |
|
73 | 73 | 'last_name': user.last_name, |
|
74 | 74 | 'username': user.username, |
|
75 | 75 | 'email': user.email, |
|
76 | 76 | 'icon_link': h.gravatar_url(user.email, 30), |
|
77 | 77 | 'profile_link': h.link_to_user(user), |
|
78 | 78 | 'value_display': h.escape(h.person(user)), |
|
79 | 79 | 'value': user.username, |
|
80 | 80 | 'value_type': 'user', |
|
81 | 81 | 'active': user.active, |
|
82 | 82 | } |
|
83 | 83 | |
|
84 | 84 | def get_users(self, name_contains=None, limit=20, only_active=True): |
|
85 | 85 | |
|
86 | 86 | query = self.sa.query(User) |
|
87 | 87 | if only_active: |
|
88 | 88 | query = query.filter(User.active == true()) |
|
89 | 89 | |
|
90 | 90 | if name_contains: |
|
91 | 91 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) |
|
92 | 92 | query = query.filter( |
|
93 | 93 | or_( |
|
94 | 94 | User.name.ilike(ilike_expression), |
|
95 | 95 | User.lastname.ilike(ilike_expression), |
|
96 | 96 | User.username.ilike(ilike_expression) |
|
97 | 97 | ) |
|
98 | 98 | ) |
|
99 | 99 | # sort by len to have top most matches first |
|
100 | 100 | query = query.order_by(func.length(User.username))\ |
|
101 | 101 | .order_by(User.username) |
|
102 | 102 | query = query.limit(limit) |
|
103 | 103 | |
|
104 | 104 | users = query.all() |
|
105 | 105 | |
|
106 | 106 | _users = [ |
|
107 | 107 | self._serialize_user(user) for user in users |
|
108 | 108 | ] |
|
109 | 109 | return _users |
|
110 | 110 | |
|
111 | 111 | def get_by_username(self, username, cache=False, case_insensitive=False): |
|
112 | 112 | |
|
113 | 113 | if case_insensitive: |
|
114 | 114 | user = self.sa.query(User).filter(User.username.ilike(username)) |
|
115 | 115 | else: |
|
116 | 116 | user = self.sa.query(User)\ |
|
117 | 117 | .filter(User.username == username) |
|
118 | 118 | if cache: |
|
119 | 119 | name_key = _hash_key(username) |
|
120 | 120 | user = user.options( |
|
121 | 121 | FromCache("sql_cache_short", "get_user_%s" % name_key)) |
|
122 | 122 | return user.scalar() |
|
123 | 123 | |
|
124 | 124 | def get_by_email(self, email, cache=False, case_insensitive=False): |
|
125 | 125 | return User.get_by_email(email, case_insensitive, cache) |
|
126 | 126 | |
|
127 | 127 | def get_by_auth_token(self, auth_token, cache=False): |
|
128 | 128 | return User.get_by_auth_token(auth_token, cache) |
|
129 | 129 | |
|
130 | 130 | def get_active_user_count(self, cache=False): |
|
131 | 131 | qry = User.query().filter( |
|
132 | 132 | User.active == true()).filter( |
|
133 | 133 | User.username != User.DEFAULT_USER) |
|
134 | 134 | if cache: |
|
135 | 135 | qry = qry.options( |
|
136 | 136 | FromCache("sql_cache_short", "get_active_users")) |
|
137 | 137 | return qry.count() |
|
138 | 138 | |
|
139 | 139 | def create(self, form_data, cur_user=None): |
|
140 | 140 | if not cur_user: |
|
141 | 141 | cur_user = getattr(get_current_rhodecode_user(), 'username', None) |
|
142 | 142 | |
|
143 | 143 | user_data = { |
|
144 | 144 | 'username': form_data['username'], |
|
145 | 145 | 'password': form_data['password'], |
|
146 | 146 | 'email': form_data['email'], |
|
147 | 147 | 'firstname': form_data['firstname'], |
|
148 | 148 | 'lastname': form_data['lastname'], |
|
149 | 149 | 'active': form_data['active'], |
|
150 | 150 | 'extern_type': form_data['extern_type'], |
|
151 | 151 | 'extern_name': form_data['extern_name'], |
|
152 | 152 | 'admin': False, |
|
153 | 153 | 'cur_user': cur_user |
|
154 | 154 | } |
|
155 | 155 | |
|
156 | 156 | if 'create_repo_group' in form_data: |
|
157 | 157 | user_data['create_repo_group'] = str2bool( |
|
158 | 158 | form_data.get('create_repo_group')) |
|
159 | 159 | |
|
160 | 160 | try: |
|
161 | 161 | if form_data.get('password_change'): |
|
162 | 162 | user_data['force_password_change'] = True |
|
163 | 163 | return UserModel().create_or_update(**user_data) |
|
164 | 164 | except Exception: |
|
165 | 165 | log.error(traceback.format_exc()) |
|
166 | 166 | raise |
|
167 | 167 | |
|
168 | 168 | def update_user(self, user, skip_attrs=None, **kwargs): |
|
169 | 169 | from rhodecode.lib.auth import get_crypt_password |
|
170 | 170 | |
|
171 | 171 | user = self._get_user(user) |
|
172 | 172 | if user.username == User.DEFAULT_USER: |
|
173 | 173 | raise DefaultUserException( |
|
174 | 174 | "You can't edit this user (`%(username)s`) since it's " |
|
175 | 175 | "crucial for entire application" % { |
|
176 | 176 | 'username': user.username}) |
|
177 | 177 | |
|
178 | 178 | # first store only defaults |
|
179 | 179 | user_attrs = { |
|
180 | 180 | 'updating_user_id': user.user_id, |
|
181 | 181 | 'username': user.username, |
|
182 | 182 | 'password': user.password, |
|
183 | 183 | 'email': user.email, |
|
184 | 184 | 'firstname': user.name, |
|
185 | 185 | 'lastname': user.lastname, |
|
186 | 186 | 'description': user.description, |
|
187 | 187 | 'active': user.active, |
|
188 | 188 | 'admin': user.admin, |
|
189 | 189 | 'extern_name': user.extern_name, |
|
190 | 190 | 'extern_type': user.extern_type, |
|
191 | 191 | 'language': user.user_data.get('language') |
|
192 | 192 | } |
|
193 | 193 | |
|
194 | 194 | # in case there's new_password, that comes from form, use it to |
|
195 | 195 | # store password |
|
196 | 196 | if kwargs.get('new_password'): |
|
197 | 197 | kwargs['password'] = kwargs['new_password'] |
|
198 | 198 | |
|
199 | 199 | # cleanups, my_account password change form |
|
200 | 200 | kwargs.pop('current_password', None) |
|
201 | 201 | kwargs.pop('new_password', None) |
|
202 | 202 | |
|
203 | 203 | # cleanups, user edit password change form |
|
204 | 204 | kwargs.pop('password_confirmation', None) |
|
205 | 205 | kwargs.pop('password_change', None) |
|
206 | 206 | |
|
207 | 207 | # create repo group on user creation |
|
208 | 208 | kwargs.pop('create_repo_group', None) |
|
209 | 209 | |
|
210 | 210 | # legacy forms send name, which is the firstname |
|
211 | 211 | firstname = kwargs.pop('name', None) |
|
212 | 212 | if firstname: |
|
213 | 213 | kwargs['firstname'] = firstname |
|
214 | 214 | |
|
215 | 215 | for k, v in kwargs.items(): |
|
216 | 216 | # skip if we don't want to update this |
|
217 | 217 | if skip_attrs and k in skip_attrs: |
|
218 | 218 | continue |
|
219 | 219 | |
|
220 | 220 | user_attrs[k] = v |
|
221 | 221 | |
|
222 | 222 | try: |
|
223 | 223 | return self.create_or_update(**user_attrs) |
|
224 | 224 | except Exception: |
|
225 | 225 | log.error(traceback.format_exc()) |
|
226 | 226 | raise |
|
227 | 227 | |
|
228 | 228 | def create_or_update( |
|
229 | 229 | self, username, password, email, firstname='', lastname='', |
|
230 | 230 | active=True, admin=False, extern_type=None, extern_name=None, |
|
231 | 231 | cur_user=None, plugin=None, force_password_change=False, |
|
232 | 232 | allow_to_create_user=True, create_repo_group=None, |
|
233 | 233 | updating_user_id=None, language=None, description='', |
|
234 | 234 | strict_creation_check=True): |
|
235 | 235 | """ |
|
236 | 236 | Creates a new instance if not found, or updates current one |
|
237 | 237 | |
|
238 | 238 | :param username: |
|
239 | 239 | :param password: |
|
240 | 240 | :param email: |
|
241 | 241 | :param firstname: |
|
242 | 242 | :param lastname: |
|
243 | 243 | :param active: |
|
244 | 244 | :param admin: |
|
245 | 245 | :param extern_type: |
|
246 | 246 | :param extern_name: |
|
247 | 247 | :param cur_user: |
|
248 | 248 | :param plugin: optional plugin this method was called from |
|
249 | 249 | :param force_password_change: toggles new or existing user flag |
|
250 | 250 | for password change |
|
251 | 251 | :param allow_to_create_user: Defines if the method can actually create |
|
252 | 252 | new users |
|
253 | 253 | :param create_repo_group: Defines if the method should also |
|
254 | 254 | create an repo group with user name, and owner |
|
255 | 255 | :param updating_user_id: if we set it up this is the user we want to |
|
256 | 256 | update this allows to editing username. |
|
257 | 257 | :param language: language of user from interface. |
|
258 | 258 | :param description: user description |
|
259 | 259 | :param strict_creation_check: checks for allowed creation license wise etc. |
|
260 | 260 | |
|
261 | 261 | :returns: new User object with injected `is_new_user` attribute. |
|
262 | 262 | """ |
|
263 | 263 | |
|
264 | 264 | if not cur_user: |
|
265 | 265 | cur_user = getattr(get_current_rhodecode_user(), 'username', None) |
|
266 | 266 | |
|
267 | 267 | from rhodecode.lib.auth import ( |
|
268 | 268 | get_crypt_password, check_password) |
|
269 | 269 | from rhodecode.lib import hooks_base |
|
270 | 270 | |
|
271 | 271 | def _password_change(new_user, password): |
|
272 | 272 | old_password = new_user.password or '' |
|
273 | 273 | # empty password |
|
274 | 274 | if not old_password: |
|
275 | 275 | return False |
|
276 | 276 | |
|
277 | 277 | # password check is only needed for RhodeCode internal auth calls |
|
278 | 278 | # in case it's a plugin we don't care |
|
279 | 279 | if not plugin: |
|
280 | 280 | |
|
281 | 281 | # first check if we gave crypted password back, and if it |
|
282 | 282 | # matches it's not password change |
|
283 | 283 | if new_user.password == password: |
|
284 | 284 | return False |
|
285 | 285 | |
|
286 | 286 | password_match = check_password(password, old_password) |
|
287 | 287 | if not password_match: |
|
288 | 288 | return True |
|
289 | 289 | |
|
290 | 290 | return False |
|
291 | 291 | |
|
292 | 292 | # read settings on default personal repo group creation |
|
293 | 293 | if create_repo_group is None: |
|
294 | 294 | default_create_repo_group = RepoGroupModel()\ |
|
295 | 295 | .get_default_create_personal_repo_group() |
|
296 | 296 | create_repo_group = default_create_repo_group |
|
297 | 297 | |
|
298 | 298 | user_data = { |
|
299 | 299 | 'username': username, |
|
300 | 300 | 'password': password, |
|
301 | 301 | 'email': email, |
|
302 | 302 | 'firstname': firstname, |
|
303 | 303 | 'lastname': lastname, |
|
304 | 304 | 'active': active, |
|
305 | 305 | 'admin': admin |
|
306 | 306 | } |
|
307 | 307 | |
|
308 | 308 | if updating_user_id: |
|
309 | 309 | log.debug('Checking for existing account in RhodeCode ' |
|
310 | 310 | 'database with user_id `%s` ', updating_user_id) |
|
311 | 311 | user = User.get(updating_user_id) |
|
312 | 312 | else: |
|
313 | 313 | log.debug('Checking for existing account in RhodeCode ' |
|
314 | 314 | 'database with username `%s` ', username) |
|
315 | 315 | user = User.get_by_username(username, case_insensitive=True) |
|
316 | 316 | |
|
317 | 317 | if user is None: |
|
318 | 318 | # we check internal flag if this method is actually allowed to |
|
319 | 319 | # create new user |
|
320 | 320 | if not allow_to_create_user: |
|
321 | 321 | msg = ('Method wants to create new user, but it is not ' |
|
322 | 322 | 'allowed to do so') |
|
323 | 323 | log.warning(msg) |
|
324 | 324 | raise NotAllowedToCreateUserError(msg) |
|
325 | 325 | |
|
326 | 326 | log.debug('Creating new user %s', username) |
|
327 | 327 | |
|
328 | 328 | # only if we create user that is active |
|
329 | 329 | new_active_user = active |
|
330 | 330 | if new_active_user and strict_creation_check: |
|
331 | 331 | # raises UserCreationError if it's not allowed for any reason to |
|
332 | 332 | # create new active user, this also executes pre-create hooks |
|
333 | 333 | hooks_base.check_allowed_create_user(user_data, cur_user, strict_check=True) |
|
334 | 334 | events.trigger(events.UserPreCreate(user_data)) |
|
335 | 335 | new_user = User() |
|
336 | 336 | edit = False |
|
337 | 337 | else: |
|
338 | 338 | log.debug('updating user `%s`', username) |
|
339 | 339 | events.trigger(events.UserPreUpdate(user, user_data)) |
|
340 | 340 | new_user = user |
|
341 | 341 | edit = True |
|
342 | 342 | |
|
343 | 343 | # we're not allowed to edit default user |
|
344 | 344 | if user.username == User.DEFAULT_USER: |
|
345 | 345 | raise DefaultUserException( |
|
346 | 346 | "You can't edit this user (`%(username)s`) since it's " |
|
347 | 347 | "crucial for entire application" |
|
348 | 348 | % {'username': user.username}) |
|
349 | 349 | |
|
350 | 350 | # inject special attribute that will tell us if User is new or old |
|
351 | 351 | new_user.is_new_user = not edit |
|
352 | 352 | # for users that didn's specify auth type, we use RhodeCode built in |
|
353 | 353 | from rhodecode.authentication.plugins import auth_rhodecode |
|
354 | 354 | extern_name = extern_name or auth_rhodecode.RhodeCodeAuthPlugin.uid |
|
355 | 355 | extern_type = extern_type or auth_rhodecode.RhodeCodeAuthPlugin.uid |
|
356 | 356 | |
|
357 | 357 | try: |
|
358 | 358 | new_user.username = username |
|
359 | 359 | new_user.admin = admin |
|
360 | 360 | new_user.email = email |
|
361 | 361 | new_user.active = active |
|
362 | 362 | new_user.extern_name = safe_unicode(extern_name) |
|
363 | 363 | new_user.extern_type = safe_unicode(extern_type) |
|
364 | 364 | new_user.name = firstname |
|
365 | 365 | new_user.lastname = lastname |
|
366 | 366 | new_user.description = description |
|
367 | 367 | |
|
368 | 368 | # set password only if creating an user or password is changed |
|
369 | 369 | if not edit or _password_change(new_user, password): |
|
370 | 370 | reason = 'new password' if edit else 'new user' |
|
371 | 371 | log.debug('Updating password reason=>%s', reason) |
|
372 | 372 | new_user.password = get_crypt_password(password) if password else None |
|
373 | 373 | |
|
374 | 374 | if force_password_change: |
|
375 | 375 | new_user.update_userdata(force_password_change=True) |
|
376 | 376 | if language: |
|
377 | 377 | new_user.update_userdata(language=language) |
|
378 | 378 | new_user.update_userdata(notification_status=True) |
|
379 | 379 | |
|
380 | 380 | self.sa.add(new_user) |
|
381 | 381 | |
|
382 | 382 | if not edit and create_repo_group: |
|
383 | 383 | RepoGroupModel().create_personal_repo_group( |
|
384 | 384 | new_user, commit_early=False) |
|
385 | 385 | |
|
386 | 386 | if not edit: |
|
387 | 387 | # add the RSS token |
|
388 | 388 | self.add_auth_token( |
|
389 | 389 | user=username, lifetime_minutes=-1, |
|
390 | 390 | role=self.auth_token_role.ROLE_FEED, |
|
391 | 391 | description=u'Generated feed token') |
|
392 | 392 | |
|
393 | 393 | kwargs = new_user.get_dict() |
|
394 | 394 | # backward compat, require api_keys present |
|
395 | 395 | kwargs['api_keys'] = kwargs['auth_tokens'] |
|
396 | 396 | hooks_base.create_user(created_by=cur_user, **kwargs) |
|
397 | 397 | events.trigger(events.UserPostCreate(user_data)) |
|
398 | 398 | return new_user |
|
399 | 399 | except (DatabaseError,): |
|
400 | 400 | log.error(traceback.format_exc()) |
|
401 | 401 | raise |
|
402 | 402 | |
|
403 | 403 | def create_registration(self, form_data, |
|
404 | 404 | extern_name='rhodecode', extern_type='rhodecode'): |
|
405 | 405 | from rhodecode.model.notification import NotificationModel |
|
406 | 406 | from rhodecode.model.notification import EmailNotificationModel |
|
407 | 407 | |
|
408 | 408 | try: |
|
409 | 409 | form_data['admin'] = False |
|
410 | 410 | form_data['extern_name'] = extern_name |
|
411 | 411 | form_data['extern_type'] = extern_type |
|
412 | 412 | new_user = self.create(form_data) |
|
413 | 413 | |
|
414 | 414 | self.sa.add(new_user) |
|
415 | 415 | self.sa.flush() |
|
416 | 416 | |
|
417 | 417 | user_data = new_user.get_dict() |
|
418 | 418 | user_data.update({ |
|
419 | 419 | 'first_name': user_data.get('firstname'), |
|
420 | 420 | 'last_name': user_data.get('lastname'), |
|
421 | 421 | }) |
|
422 | 422 | kwargs = { |
|
423 | 423 | # use SQLALCHEMY safe dump of user data |
|
424 | 424 | 'user': AttributeDict(user_data), |
|
425 | 425 | 'date': datetime.datetime.now() |
|
426 | 426 | } |
|
427 | 427 | notification_type = EmailNotificationModel.TYPE_REGISTRATION |
|
428 | # pre-generate the subject for notification itself | |
|
429 | (subject, _e, body_plaintext) = EmailNotificationModel().render_email( | |
|
430 | notification_type, **kwargs) | |
|
431 | 428 | |
|
432 | 429 | # create notification objects, and emails |
|
433 | 430 | NotificationModel().create( |
|
434 | 431 | created_by=new_user, |
|
435 |
notification_subject= |
|
|
436 |
notification_body= |
|
|
432 | notification_subject='', # Filled in based on the notification_type | |
|
433 | notification_body='', # Filled in based on the notification_type | |
|
437 | 434 | notification_type=notification_type, |
|
438 | 435 | recipients=None, # all admins |
|
439 | 436 | email_kwargs=kwargs, |
|
440 | 437 | ) |
|
441 | 438 | |
|
442 | 439 | return new_user |
|
443 | 440 | except Exception: |
|
444 | 441 | log.error(traceback.format_exc()) |
|
445 | 442 | raise |
|
446 | 443 | |
|
447 | 444 | def _handle_user_repos(self, username, repositories, handle_user, |
|
448 | 445 | handle_mode=None): |
|
449 | 446 | |
|
450 | 447 | left_overs = True |
|
451 | 448 | |
|
452 | 449 | from rhodecode.model.repo import RepoModel |
|
453 | 450 | |
|
454 | 451 | if handle_mode == 'detach': |
|
455 | 452 | for obj in repositories: |
|
456 | 453 | obj.user = handle_user |
|
457 | 454 | # set description we know why we super admin now owns |
|
458 | 455 | # additional repositories that were orphaned ! |
|
459 | 456 | obj.description += ' \n::detached repository from deleted user: %s' % (username,) |
|
460 | 457 | self.sa.add(obj) |
|
461 | 458 | left_overs = False |
|
462 | 459 | elif handle_mode == 'delete': |
|
463 | 460 | for obj in repositories: |
|
464 | 461 | RepoModel().delete(obj, forks='detach') |
|
465 | 462 | left_overs = False |
|
466 | 463 | |
|
467 | 464 | # if nothing is done we have left overs left |
|
468 | 465 | return left_overs |
|
469 | 466 | |
|
470 | 467 | def _handle_user_repo_groups(self, username, repository_groups, handle_user, |
|
471 | 468 | handle_mode=None): |
|
472 | 469 | |
|
473 | 470 | left_overs = True |
|
474 | 471 | |
|
475 | 472 | from rhodecode.model.repo_group import RepoGroupModel |
|
476 | 473 | |
|
477 | 474 | if handle_mode == 'detach': |
|
478 | 475 | for r in repository_groups: |
|
479 | 476 | r.user = handle_user |
|
480 | 477 | # set description we know why we super admin now owns |
|
481 | 478 | # additional repositories that were orphaned ! |
|
482 | 479 | r.group_description += ' \n::detached repository group from deleted user: %s' % (username,) |
|
483 | 480 | r.personal = False |
|
484 | 481 | self.sa.add(r) |
|
485 | 482 | left_overs = False |
|
486 | 483 | elif handle_mode == 'delete': |
|
487 | 484 | for r in repository_groups: |
|
488 | 485 | RepoGroupModel().delete(r) |
|
489 | 486 | left_overs = False |
|
490 | 487 | |
|
491 | 488 | # if nothing is done we have left overs left |
|
492 | 489 | return left_overs |
|
493 | 490 | |
|
494 | 491 | def _handle_user_user_groups(self, username, user_groups, handle_user, |
|
495 | 492 | handle_mode=None): |
|
496 | 493 | |
|
497 | 494 | left_overs = True |
|
498 | 495 | |
|
499 | 496 | from rhodecode.model.user_group import UserGroupModel |
|
500 | 497 | |
|
501 | 498 | if handle_mode == 'detach': |
|
502 | 499 | for r in user_groups: |
|
503 | 500 | for user_user_group_to_perm in r.user_user_group_to_perm: |
|
504 | 501 | if user_user_group_to_perm.user.username == username: |
|
505 | 502 | user_user_group_to_perm.user = handle_user |
|
506 | 503 | r.user = handle_user |
|
507 | 504 | # set description we know why we super admin now owns |
|
508 | 505 | # additional repositories that were orphaned ! |
|
509 | 506 | r.user_group_description += ' \n::detached user group from deleted user: %s' % (username,) |
|
510 | 507 | self.sa.add(r) |
|
511 | 508 | left_overs = False |
|
512 | 509 | elif handle_mode == 'delete': |
|
513 | 510 | for r in user_groups: |
|
514 | 511 | UserGroupModel().delete(r) |
|
515 | 512 | left_overs = False |
|
516 | 513 | |
|
517 | 514 | # if nothing is done we have left overs left |
|
518 | 515 | return left_overs |
|
519 | 516 | |
|
520 | 517 | def _handle_user_pull_requests(self, username, pull_requests, handle_user, |
|
521 | 518 | handle_mode=None): |
|
522 | 519 | left_overs = True |
|
523 | 520 | |
|
524 | 521 | from rhodecode.model.pull_request import PullRequestModel |
|
525 | 522 | |
|
526 | 523 | if handle_mode == 'detach': |
|
527 | 524 | for pr in pull_requests: |
|
528 | 525 | pr.user_id = handle_user.user_id |
|
529 | 526 | # set description we know why we super admin now owns |
|
530 | 527 | # additional repositories that were orphaned ! |
|
531 | 528 | pr.description += ' \n::detached pull requests from deleted user: %s' % (username,) |
|
532 | 529 | self.sa.add(pr) |
|
533 | 530 | left_overs = False |
|
534 | 531 | elif handle_mode == 'delete': |
|
535 | 532 | for pr in pull_requests: |
|
536 | 533 | PullRequestModel().delete(pr) |
|
537 | 534 | |
|
538 | 535 | left_overs = False |
|
539 | 536 | |
|
540 | 537 | # if nothing is done we have left overs left |
|
541 | 538 | return left_overs |
|
542 | 539 | |
|
543 | 540 | def _handle_user_artifacts(self, username, artifacts, handle_user, |
|
544 | 541 | handle_mode=None): |
|
545 | 542 | |
|
546 | 543 | left_overs = True |
|
547 | 544 | |
|
548 | 545 | if handle_mode == 'detach': |
|
549 | 546 | for a in artifacts: |
|
550 | 547 | a.upload_user = handle_user |
|
551 | 548 | # set description we know why we super admin now owns |
|
552 | 549 | # additional artifacts that were orphaned ! |
|
553 | 550 | a.file_description += ' \n::detached artifact from deleted user: %s' % (username,) |
|
554 | 551 | self.sa.add(a) |
|
555 | 552 | left_overs = False |
|
556 | 553 | elif handle_mode == 'delete': |
|
557 | 554 | from rhodecode.apps.file_store import utils as store_utils |
|
558 | 555 | request = get_current_request() |
|
559 | 556 | storage = store_utils.get_file_storage(request.registry.settings) |
|
560 | 557 | for a in artifacts: |
|
561 | 558 | file_uid = a.file_uid |
|
562 | 559 | storage.delete(file_uid) |
|
563 | 560 | self.sa.delete(a) |
|
564 | 561 | |
|
565 | 562 | left_overs = False |
|
566 | 563 | |
|
567 | 564 | # if nothing is done we have left overs left |
|
568 | 565 | return left_overs |
|
569 | 566 | |
|
570 | 567 | def delete(self, user, cur_user=None, handle_repos=None, |
|
571 | 568 | handle_repo_groups=None, handle_user_groups=None, |
|
572 | 569 | handle_pull_requests=None, handle_artifacts=None, handle_new_owner=None): |
|
573 | 570 | from rhodecode.lib import hooks_base |
|
574 | 571 | |
|
575 | 572 | if not cur_user: |
|
576 | 573 | cur_user = getattr(get_current_rhodecode_user(), 'username', None) |
|
577 | 574 | |
|
578 | 575 | user = self._get_user(user) |
|
579 | 576 | |
|
580 | 577 | try: |
|
581 | 578 | if user.username == User.DEFAULT_USER: |
|
582 | 579 | raise DefaultUserException( |
|
583 | 580 | u"You can't remove this user since it's" |
|
584 | 581 | u" crucial for entire application") |
|
585 | 582 | handle_user = handle_new_owner or self.cls.get_first_super_admin() |
|
586 | 583 | log.debug('New detached objects owner %s', handle_user) |
|
587 | 584 | |
|
588 | 585 | left_overs = self._handle_user_repos( |
|
589 | 586 | user.username, user.repositories, handle_user, handle_repos) |
|
590 | 587 | if left_overs and user.repositories: |
|
591 | 588 | repos = [x.repo_name for x in user.repositories] |
|
592 | 589 | raise UserOwnsReposException( |
|
593 | 590 | u'user "%(username)s" still owns %(len_repos)s repositories and cannot be ' |
|
594 | 591 | u'removed. Switch owners or remove those repositories:%(list_repos)s' |
|
595 | 592 | % {'username': user.username, 'len_repos': len(repos), |
|
596 | 593 | 'list_repos': ', '.join(repos)}) |
|
597 | 594 | |
|
598 | 595 | left_overs = self._handle_user_repo_groups( |
|
599 | 596 | user.username, user.repository_groups, handle_user, handle_repo_groups) |
|
600 | 597 | if left_overs and user.repository_groups: |
|
601 | 598 | repo_groups = [x.group_name for x in user.repository_groups] |
|
602 | 599 | raise UserOwnsRepoGroupsException( |
|
603 | 600 | u'user "%(username)s" still owns %(len_repo_groups)s repository groups and cannot be ' |
|
604 | 601 | u'removed. Switch owners or remove those repository groups:%(list_repo_groups)s' |
|
605 | 602 | % {'username': user.username, 'len_repo_groups': len(repo_groups), |
|
606 | 603 | 'list_repo_groups': ', '.join(repo_groups)}) |
|
607 | 604 | |
|
608 | 605 | left_overs = self._handle_user_user_groups( |
|
609 | 606 | user.username, user.user_groups, handle_user, handle_user_groups) |
|
610 | 607 | if left_overs and user.user_groups: |
|
611 | 608 | user_groups = [x.users_group_name for x in user.user_groups] |
|
612 | 609 | raise UserOwnsUserGroupsException( |
|
613 | 610 | u'user "%s" still owns %s user groups and cannot be ' |
|
614 | 611 | u'removed. Switch owners or remove those user groups:%s' |
|
615 | 612 | % (user.username, len(user_groups), ', '.join(user_groups))) |
|
616 | 613 | |
|
617 | 614 | left_overs = self._handle_user_pull_requests( |
|
618 | 615 | user.username, user.user_pull_requests, handle_user, handle_pull_requests) |
|
619 | 616 | if left_overs and user.user_pull_requests: |
|
620 | 617 | pull_requests = ['!{}'.format(x.pull_request_id) for x in user.user_pull_requests] |
|
621 | 618 | raise UserOwnsPullRequestsException( |
|
622 | 619 | u'user "%s" still owns %s pull requests and cannot be ' |
|
623 | 620 | u'removed. Switch owners or remove those pull requests:%s' |
|
624 | 621 | % (user.username, len(pull_requests), ', '.join(pull_requests))) |
|
625 | 622 | |
|
626 | 623 | left_overs = self._handle_user_artifacts( |
|
627 | 624 | user.username, user.artifacts, handle_user, handle_artifacts) |
|
628 | 625 | if left_overs and user.artifacts: |
|
629 | 626 | artifacts = [x.file_uid for x in user.artifacts] |
|
630 | 627 | raise UserOwnsArtifactsException( |
|
631 | 628 | u'user "%s" still owns %s artifacts and cannot be ' |
|
632 | 629 | u'removed. Switch owners or remove those artifacts:%s' |
|
633 | 630 | % (user.username, len(artifacts), ', '.join(artifacts))) |
|
634 | 631 | |
|
635 | 632 | user_data = user.get_dict() # fetch user data before expire |
|
636 | 633 | |
|
637 | 634 | # we might change the user data with detach/delete, make sure |
|
638 | 635 | # the object is marked as expired before actually deleting ! |
|
639 | 636 | self.sa.expire(user) |
|
640 | 637 | self.sa.delete(user) |
|
641 | 638 | |
|
642 | 639 | hooks_base.delete_user(deleted_by=cur_user, **user_data) |
|
643 | 640 | except Exception: |
|
644 | 641 | log.error(traceback.format_exc()) |
|
645 | 642 | raise |
|
646 | 643 | |
|
647 | 644 | def reset_password_link(self, data, pwd_reset_url): |
|
648 | 645 | from rhodecode.lib.celerylib import tasks, run_task |
|
649 | 646 | from rhodecode.model.notification import EmailNotificationModel |
|
650 | 647 | user_email = data['email'] |
|
651 | 648 | try: |
|
652 | 649 | user = User.get_by_email(user_email) |
|
653 | 650 | if user: |
|
654 | 651 | log.debug('password reset user found %s', user) |
|
655 | 652 | |
|
656 | 653 | email_kwargs = { |
|
657 | 654 | 'password_reset_url': pwd_reset_url, |
|
658 | 655 | 'user': user, |
|
659 | 656 | 'email': user_email, |
|
660 | 657 | 'date': datetime.datetime.now(), |
|
661 | 658 | 'first_admin_email': User.get_first_super_admin().email |
|
662 | 659 | } |
|
663 | 660 | |
|
664 | 661 | (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email( |
|
665 | 662 | EmailNotificationModel.TYPE_PASSWORD_RESET, **email_kwargs) |
|
666 | 663 | |
|
667 | 664 | recipients = [user_email] |
|
668 | 665 | |
|
669 | 666 | action_logger_generic( |
|
670 | 667 | 'sending password reset email to user: {}'.format( |
|
671 | 668 | user), namespace='security.password_reset') |
|
672 | 669 | |
|
673 | 670 | run_task(tasks.send_email, recipients, subject, |
|
674 | 671 | email_body_plaintext, email_body) |
|
675 | 672 | |
|
676 | 673 | else: |
|
677 | 674 | log.debug("password reset email %s not found", user_email) |
|
678 | 675 | except Exception: |
|
679 | 676 | log.error(traceback.format_exc()) |
|
680 | 677 | return False |
|
681 | 678 | |
|
682 | 679 | return True |
|
683 | 680 | |
|
684 | 681 | def reset_password(self, data): |
|
685 | 682 | from rhodecode.lib.celerylib import tasks, run_task |
|
686 | 683 | from rhodecode.model.notification import EmailNotificationModel |
|
687 | 684 | from rhodecode.lib import auth |
|
688 | 685 | user_email = data['email'] |
|
689 | 686 | pre_db = True |
|
690 | 687 | try: |
|
691 | 688 | user = User.get_by_email(user_email) |
|
692 | 689 | new_passwd = auth.PasswordGenerator().gen_password( |
|
693 | 690 | 12, auth.PasswordGenerator.ALPHABETS_BIG_SMALL) |
|
694 | 691 | if user: |
|
695 | 692 | user.password = auth.get_crypt_password(new_passwd) |
|
696 | 693 | # also force this user to reset his password ! |
|
697 | 694 | user.update_userdata(force_password_change=True) |
|
698 | 695 | |
|
699 | 696 | Session().add(user) |
|
700 | 697 | |
|
701 | 698 | # now delete the token in question |
|
702 | 699 | UserApiKeys = AuthTokenModel.cls |
|
703 | 700 | UserApiKeys().query().filter( |
|
704 | 701 | UserApiKeys.api_key == data['token']).delete() |
|
705 | 702 | |
|
706 | 703 | Session().commit() |
|
707 | 704 | log.info('successfully reset password for `%s`', user_email) |
|
708 | 705 | |
|
709 | 706 | if new_passwd is None: |
|
710 | 707 | raise Exception('unable to generate new password') |
|
711 | 708 | |
|
712 | 709 | pre_db = False |
|
713 | 710 | |
|
714 | 711 | email_kwargs = { |
|
715 | 712 | 'new_password': new_passwd, |
|
716 | 713 | 'user': user, |
|
717 | 714 | 'email': user_email, |
|
718 | 715 | 'date': datetime.datetime.now(), |
|
719 | 716 | 'first_admin_email': User.get_first_super_admin().email |
|
720 | 717 | } |
|
721 | 718 | |
|
722 | 719 | (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email( |
|
723 | 720 | EmailNotificationModel.TYPE_PASSWORD_RESET_CONFIRMATION, |
|
724 | 721 | **email_kwargs) |
|
725 | 722 | |
|
726 | 723 | recipients = [user_email] |
|
727 | 724 | |
|
728 | 725 | action_logger_generic( |
|
729 | 726 | 'sent new password to user: {} with email: {}'.format( |
|
730 | 727 | user, user_email), namespace='security.password_reset') |
|
731 | 728 | |
|
732 | 729 | run_task(tasks.send_email, recipients, subject, |
|
733 | 730 | email_body_plaintext, email_body) |
|
734 | 731 | |
|
735 | 732 | except Exception: |
|
736 | 733 | log.error('Failed to update user password') |
|
737 | 734 | log.error(traceback.format_exc()) |
|
738 | 735 | if pre_db: |
|
739 | 736 | # we rollback only if local db stuff fails. If it goes into |
|
740 | 737 | # run_task, we're pass rollback state this wouldn't work then |
|
741 | 738 | Session().rollback() |
|
742 | 739 | |
|
743 | 740 | return True |
|
744 | 741 | |
|
745 | 742 | def fill_data(self, auth_user, user_id=None, api_key=None, username=None): |
|
746 | 743 | """ |
|
747 | 744 | Fetches auth_user by user_id,or api_key if present. |
|
748 | 745 | Fills auth_user attributes with those taken from database. |
|
749 | 746 | Additionally set's is_authenitated if lookup fails |
|
750 | 747 | present in database |
|
751 | 748 | |
|
752 | 749 | :param auth_user: instance of user to set attributes |
|
753 | 750 | :param user_id: user id to fetch by |
|
754 | 751 | :param api_key: api key to fetch by |
|
755 | 752 | :param username: username to fetch by |
|
756 | 753 | """ |
|
757 | 754 | def token_obfuscate(token): |
|
758 | 755 | if token: |
|
759 | 756 | return token[:4] + "****" |
|
760 | 757 | |
|
761 | 758 | if user_id is None and api_key is None and username is None: |
|
762 | 759 | raise Exception('You need to pass user_id, api_key or username') |
|
763 | 760 | |
|
764 | 761 | log.debug( |
|
765 | 762 | 'AuthUser: fill data execution based on: ' |
|
766 | 763 | 'user_id:%s api_key:%s username:%s', user_id, api_key, username) |
|
767 | 764 | try: |
|
768 | 765 | dbuser = None |
|
769 | 766 | if user_id: |
|
770 | 767 | dbuser = self.get(user_id) |
|
771 | 768 | elif api_key: |
|
772 | 769 | dbuser = self.get_by_auth_token(api_key) |
|
773 | 770 | elif username: |
|
774 | 771 | dbuser = self.get_by_username(username) |
|
775 | 772 | |
|
776 | 773 | if not dbuser: |
|
777 | 774 | log.warning( |
|
778 | 775 | 'Unable to lookup user by id:%s api_key:%s username:%s', |
|
779 | 776 | user_id, token_obfuscate(api_key), username) |
|
780 | 777 | return False |
|
781 | 778 | if not dbuser.active: |
|
782 | 779 | log.debug('User `%s:%s` is inactive, skipping fill data', |
|
783 | 780 | username, user_id) |
|
784 | 781 | return False |
|
785 | 782 | |
|
786 | 783 | log.debug('AuthUser: filling found user:%s data', dbuser) |
|
787 | 784 | |
|
788 | 785 | attrs = { |
|
789 | 786 | 'user_id': dbuser.user_id, |
|
790 | 787 | 'username': dbuser.username, |
|
791 | 788 | 'name': dbuser.name, |
|
792 | 789 | 'first_name': dbuser.first_name, |
|
793 | 790 | 'firstname': dbuser.firstname, |
|
794 | 791 | 'last_name': dbuser.last_name, |
|
795 | 792 | 'lastname': dbuser.lastname, |
|
796 | 793 | 'admin': dbuser.admin, |
|
797 | 794 | 'active': dbuser.active, |
|
798 | 795 | |
|
799 | 796 | 'email': dbuser.email, |
|
800 | 797 | 'emails': dbuser.emails_cached(), |
|
801 | 798 | 'short_contact': dbuser.short_contact, |
|
802 | 799 | 'full_contact': dbuser.full_contact, |
|
803 | 800 | 'full_name': dbuser.full_name, |
|
804 | 801 | 'full_name_or_username': dbuser.full_name_or_username, |
|
805 | 802 | |
|
806 | 803 | '_api_key': dbuser._api_key, |
|
807 | 804 | '_user_data': dbuser._user_data, |
|
808 | 805 | |
|
809 | 806 | 'created_on': dbuser.created_on, |
|
810 | 807 | 'extern_name': dbuser.extern_name, |
|
811 | 808 | 'extern_type': dbuser.extern_type, |
|
812 | 809 | |
|
813 | 810 | 'inherit_default_permissions': dbuser.inherit_default_permissions, |
|
814 | 811 | |
|
815 | 812 | 'language': dbuser.language, |
|
816 | 813 | 'last_activity': dbuser.last_activity, |
|
817 | 814 | 'last_login': dbuser.last_login, |
|
818 | 815 | 'password': dbuser.password, |
|
819 | 816 | } |
|
820 | 817 | auth_user.__dict__.update(attrs) |
|
821 | 818 | except Exception: |
|
822 | 819 | log.error(traceback.format_exc()) |
|
823 | 820 | auth_user.is_authenticated = False |
|
824 | 821 | return False |
|
825 | 822 | |
|
826 | 823 | return True |
|
827 | 824 | |
|
828 | 825 | def has_perm(self, user, perm): |
|
829 | 826 | perm = self._get_perm(perm) |
|
830 | 827 | user = self._get_user(user) |
|
831 | 828 | |
|
832 | 829 | return UserToPerm.query().filter(UserToPerm.user == user)\ |
|
833 | 830 | .filter(UserToPerm.permission == perm).scalar() is not None |
|
834 | 831 | |
|
835 | 832 | def grant_perm(self, user, perm): |
|
836 | 833 | """ |
|
837 | 834 | Grant user global permissions |
|
838 | 835 | |
|
839 | 836 | :param user: |
|
840 | 837 | :param perm: |
|
841 | 838 | """ |
|
842 | 839 | user = self._get_user(user) |
|
843 | 840 | perm = self._get_perm(perm) |
|
844 | 841 | # if this permission is already granted skip it |
|
845 | 842 | _perm = UserToPerm.query()\ |
|
846 | 843 | .filter(UserToPerm.user == user)\ |
|
847 | 844 | .filter(UserToPerm.permission == perm)\ |
|
848 | 845 | .scalar() |
|
849 | 846 | if _perm: |
|
850 | 847 | return |
|
851 | 848 | new = UserToPerm() |
|
852 | 849 | new.user = user |
|
853 | 850 | new.permission = perm |
|
854 | 851 | self.sa.add(new) |
|
855 | 852 | return new |
|
856 | 853 | |
|
857 | 854 | def revoke_perm(self, user, perm): |
|
858 | 855 | """ |
|
859 | 856 | Revoke users global permissions |
|
860 | 857 | |
|
861 | 858 | :param user: |
|
862 | 859 | :param perm: |
|
863 | 860 | """ |
|
864 | 861 | user = self._get_user(user) |
|
865 | 862 | perm = self._get_perm(perm) |
|
866 | 863 | |
|
867 | 864 | obj = UserToPerm.query()\ |
|
868 | 865 | .filter(UserToPerm.user == user)\ |
|
869 | 866 | .filter(UserToPerm.permission == perm)\ |
|
870 | 867 | .scalar() |
|
871 | 868 | if obj: |
|
872 | 869 | self.sa.delete(obj) |
|
873 | 870 | |
|
874 | 871 | def add_extra_email(self, user, email): |
|
875 | 872 | """ |
|
876 | 873 | Adds email address to UserEmailMap |
|
877 | 874 | |
|
878 | 875 | :param user: |
|
879 | 876 | :param email: |
|
880 | 877 | """ |
|
881 | 878 | |
|
882 | 879 | user = self._get_user(user) |
|
883 | 880 | |
|
884 | 881 | obj = UserEmailMap() |
|
885 | 882 | obj.user = user |
|
886 | 883 | obj.email = email |
|
887 | 884 | self.sa.add(obj) |
|
888 | 885 | return obj |
|
889 | 886 | |
|
890 | 887 | def delete_extra_email(self, user, email_id): |
|
891 | 888 | """ |
|
892 | 889 | Removes email address from UserEmailMap |
|
893 | 890 | |
|
894 | 891 | :param user: |
|
895 | 892 | :param email_id: |
|
896 | 893 | """ |
|
897 | 894 | user = self._get_user(user) |
|
898 | 895 | obj = UserEmailMap.query().get(email_id) |
|
899 | 896 | if obj and obj.user_id == user.user_id: |
|
900 | 897 | self.sa.delete(obj) |
|
901 | 898 | |
|
902 | 899 | def parse_ip_range(self, ip_range): |
|
903 | 900 | ip_list = [] |
|
904 | 901 | |
|
905 | 902 | def make_unique(value): |
|
906 | 903 | seen = [] |
|
907 | 904 | return [c for c in value if not (c in seen or seen.append(c))] |
|
908 | 905 | |
|
909 | 906 | # firsts split by commas |
|
910 | 907 | for ip_range in ip_range.split(','): |
|
911 | 908 | if not ip_range: |
|
912 | 909 | continue |
|
913 | 910 | ip_range = ip_range.strip() |
|
914 | 911 | if '-' in ip_range: |
|
915 | 912 | start_ip, end_ip = ip_range.split('-', 1) |
|
916 | 913 | start_ip = ipaddress.ip_address(safe_unicode(start_ip.strip())) |
|
917 | 914 | end_ip = ipaddress.ip_address(safe_unicode(end_ip.strip())) |
|
918 | 915 | parsed_ip_range = [] |
|
919 | 916 | |
|
920 | 917 | for index in range(int(start_ip), int(end_ip) + 1): |
|
921 | 918 | new_ip = ipaddress.ip_address(index) |
|
922 | 919 | parsed_ip_range.append(str(new_ip)) |
|
923 | 920 | ip_list.extend(parsed_ip_range) |
|
924 | 921 | else: |
|
925 | 922 | ip_list.append(ip_range) |
|
926 | 923 | |
|
927 | 924 | return make_unique(ip_list) |
|
928 | 925 | |
|
929 | 926 | def add_extra_ip(self, user, ip, description=None): |
|
930 | 927 | """ |
|
931 | 928 | Adds ip address to UserIpMap |
|
932 | 929 | |
|
933 | 930 | :param user: |
|
934 | 931 | :param ip: |
|
935 | 932 | """ |
|
936 | 933 | |
|
937 | 934 | user = self._get_user(user) |
|
938 | 935 | obj = UserIpMap() |
|
939 | 936 | obj.user = user |
|
940 | 937 | obj.ip_addr = ip |
|
941 | 938 | obj.description = description |
|
942 | 939 | self.sa.add(obj) |
|
943 | 940 | return obj |
|
944 | 941 | |
|
945 | 942 | auth_token_role = AuthTokenModel.cls |
|
946 | 943 | |
|
947 | 944 | def add_auth_token(self, user, lifetime_minutes, role, description=u'', |
|
948 | 945 | scope_callback=None): |
|
949 | 946 | """ |
|
950 | 947 | Add AuthToken for user. |
|
951 | 948 | |
|
952 | 949 | :param user: username/user_id |
|
953 | 950 | :param lifetime_minutes: in minutes the lifetime for token, -1 equals no limit |
|
954 | 951 | :param role: one of AuthTokenModel.cls.ROLE_* |
|
955 | 952 | :param description: optional string description |
|
956 | 953 | """ |
|
957 | 954 | |
|
958 | 955 | token = AuthTokenModel().create( |
|
959 | 956 | user, description, lifetime_minutes, role) |
|
960 | 957 | if scope_callback and callable(scope_callback): |
|
961 | 958 | # call the callback if we provide, used to attach scope for EE edition |
|
962 | 959 | scope_callback(token) |
|
963 | 960 | return token |
|
964 | 961 | |
|
965 | 962 | def delete_extra_ip(self, user, ip_id): |
|
966 | 963 | """ |
|
967 | 964 | Removes ip address from UserIpMap |
|
968 | 965 | |
|
969 | 966 | :param user: |
|
970 | 967 | :param ip_id: |
|
971 | 968 | """ |
|
972 | 969 | user = self._get_user(user) |
|
973 | 970 | obj = UserIpMap.query().get(ip_id) |
|
974 | 971 | if obj and obj.user_id == user.user_id: |
|
975 | 972 | self.sa.delete(obj) |
|
976 | 973 | |
|
977 | 974 | def get_accounts_in_creation_order(self, current_user=None): |
|
978 | 975 | """ |
|
979 | 976 | Get accounts in order of creation for deactivation for license limits |
|
980 | 977 | |
|
981 | 978 | pick currently logged in user, and append to the list in position 0 |
|
982 | 979 | pick all super-admins in order of creation date and add it to the list |
|
983 | 980 | pick all other accounts in order of creation and add it to the list. |
|
984 | 981 | |
|
985 | 982 | Based on that list, the last accounts can be disabled as they are |
|
986 | 983 | created at the end and don't include any of the super admins as well |
|
987 | 984 | as the current user. |
|
988 | 985 | |
|
989 | 986 | :param current_user: optionally current user running this operation |
|
990 | 987 | """ |
|
991 | 988 | |
|
992 | 989 | if not current_user: |
|
993 | 990 | current_user = get_current_rhodecode_user() |
|
994 | 991 | active_super_admins = [ |
|
995 | 992 | x.user_id for x in User.query() |
|
996 | 993 | .filter(User.user_id != current_user.user_id) |
|
997 | 994 | .filter(User.active == true()) |
|
998 | 995 | .filter(User.admin == true()) |
|
999 | 996 | .order_by(User.created_on.asc())] |
|
1000 | 997 | |
|
1001 | 998 | active_regular_users = [ |
|
1002 | 999 | x.user_id for x in User.query() |
|
1003 | 1000 | .filter(User.user_id != current_user.user_id) |
|
1004 | 1001 | .filter(User.active == true()) |
|
1005 | 1002 | .filter(User.admin == false()) |
|
1006 | 1003 | .order_by(User.created_on.asc())] |
|
1007 | 1004 | |
|
1008 | 1005 | list_of_accounts = [current_user.user_id] |
|
1009 | 1006 | list_of_accounts += active_super_admins |
|
1010 | 1007 | list_of_accounts += active_regular_users |
|
1011 | 1008 | |
|
1012 | 1009 | return list_of_accounts |
|
1013 | 1010 | |
|
1014 | 1011 | def deactivate_last_users(self, expected_users, current_user=None): |
|
1015 | 1012 | """ |
|
1016 | 1013 | Deactivate accounts that are over the license limits. |
|
1017 | 1014 | Algorithm of which accounts to disabled is based on the formula: |
|
1018 | 1015 | |
|
1019 | 1016 | Get current user, then super admins in creation order, then regular |
|
1020 | 1017 | active users in creation order. |
|
1021 | 1018 | |
|
1022 | 1019 | Using that list we mark all accounts from the end of it as inactive. |
|
1023 | 1020 | This way we block only latest created accounts. |
|
1024 | 1021 | |
|
1025 | 1022 | :param expected_users: list of users in special order, we deactivate |
|
1026 | 1023 | the end N amount of users from that list |
|
1027 | 1024 | """ |
|
1028 | 1025 | |
|
1029 | 1026 | list_of_accounts = self.get_accounts_in_creation_order( |
|
1030 | 1027 | current_user=current_user) |
|
1031 | 1028 | |
|
1032 | 1029 | for acc_id in list_of_accounts[expected_users + 1:]: |
|
1033 | 1030 | user = User.get(acc_id) |
|
1034 | 1031 | log.info('Deactivating account %s for license unlock', user) |
|
1035 | 1032 | user.active = False |
|
1036 | 1033 | Session().add(user) |
|
1037 | 1034 | Session().commit() |
|
1038 | 1035 | |
|
1039 | 1036 | return |
|
1040 | 1037 | |
|
1041 | 1038 | def get_user_log(self, user, filter_term): |
|
1042 | 1039 | user_log = UserLog.query()\ |
|
1043 | 1040 | .filter(or_(UserLog.user_id == user.user_id, |
|
1044 | 1041 | UserLog.username == user.username))\ |
|
1045 | 1042 | .options(joinedload(UserLog.user))\ |
|
1046 | 1043 | .options(joinedload(UserLog.repository))\ |
|
1047 | 1044 | .order_by(UserLog.action_date.desc()) |
|
1048 | 1045 | |
|
1049 | 1046 | user_log = user_log_filter(user_log, filter_term) |
|
1050 | 1047 | return user_log |
General Comments 0
You need to be logged in to leave comments.
Login now