##// END OF EJS Templates
lint: use null() to compare to == None for linters to be happy
super-admin -
r5180:0f2a8907 default
parent child Browse files
Show More
@@ -1,412 +1,412 b''
1 1 # Copyright (C) 2012-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 RhodeCode task modules, containing all task that suppose to be run
21 21 by celery daemon
22 22 """
23 23
24 24 import os
25 25 import time
26 26
27 27 from pyramid_mailer.mailer import Mailer
28 28 from pyramid_mailer.message import Message
29 29 from email.utils import formatdate
30 30
31 31 import rhodecode
32 32 from rhodecode.lib import audit_logger
33 33 from rhodecode.lib.celerylib import get_logger, async_task, RequestContextTask, run_task
34 34 from rhodecode.lib import hooks_base
35 35 from rhodecode.lib.utils2 import safe_int, str2bool, aslist
36 36 from rhodecode.lib.statsd_client import StatsdClient
37 37 from rhodecode.model.db import (
38 Session, IntegrityError, true, Repository, RepoGroup, User)
38 true, null, Session, IntegrityError, Repository, RepoGroup, User)
39 39 from rhodecode.model.permission import PermissionModel
40 40
41 41
42 42 @async_task(ignore_result=True, base=RequestContextTask)
43 43 def send_email(recipients, subject, body='', html_body='', email_config=None,
44 44 extra_headers=None):
45 45 """
46 46 Sends an email with defined parameters from the .ini files.
47 47
48 48 :param recipients: list of recipients, it this is empty the defined email
49 49 address from field 'email_to' is used instead
50 50 :param subject: subject of the mail
51 51 :param body: body of the mail
52 52 :param html_body: html version of body
53 53 :param email_config: specify custom configuration for mailer
54 54 :param extra_headers: specify custom headers
55 55 """
56 56 log = get_logger(send_email)
57 57
58 58 email_config = email_config or rhodecode.CONFIG
59 59
60 60 mail_server = email_config.get('smtp_server') or None
61 61 if mail_server is None:
62 62 log.error("SMTP server information missing. Sending email failed. "
63 63 "Make sure that `smtp_server` variable is configured "
64 64 "inside the .ini file")
65 65 return False
66 66
67 67 subject = "%s %s" % (email_config.get('email_prefix', ''), subject)
68 68
69 69 if recipients:
70 70 if isinstance(recipients, str):
71 71 recipients = recipients.split(',')
72 72 else:
73 73 # if recipients are not defined we send to email_config + all admins
74 74 admins = []
75 75 for u in User.query().filter(User.admin == true()).all():
76 76 if u.email:
77 77 admins.append(u.email)
78 78 recipients = []
79 79 config_email = email_config.get('email_to')
80 80 if config_email:
81 81 recipients += [config_email]
82 82 recipients += admins
83 83
84 84 # translate our LEGACY config into the one that pyramid_mailer supports
85 85 email_conf = dict(
86 86 host=mail_server,
87 87 port=email_config.get('smtp_port', 25),
88 88 username=email_config.get('smtp_username'),
89 89 password=email_config.get('smtp_password'),
90 90
91 91 tls=str2bool(email_config.get('smtp_use_tls')),
92 92 ssl=str2bool(email_config.get('smtp_use_ssl')),
93 93
94 94 # SSL key file
95 95 # keyfile='',
96 96
97 97 # SSL certificate file
98 98 # certfile='',
99 99
100 100 # Location of maildir
101 101 # queue_path='',
102 102
103 103 default_sender=email_config.get('app_email_from', 'RhodeCode-noreply@rhodecode.com'),
104 104
105 105 debug=str2bool(email_config.get('smtp_debug')),
106 106 # /usr/sbin/sendmail Sendmail executable
107 107 # sendmail_app='',
108 108
109 109 # {sendmail_app} -t -i -f {sender} Template for sendmail execution
110 110 # sendmail_template='',
111 111 )
112 112
113 113 if extra_headers is None:
114 114 extra_headers = {}
115 115
116 116 extra_headers.setdefault('Date', formatdate(time.time()))
117 117
118 118 if 'thread_ids' in extra_headers:
119 119 thread_ids = extra_headers.pop('thread_ids')
120 120 extra_headers['References'] = ' '.join('<{}>'.format(t) for t in thread_ids)
121 121
122 122 try:
123 123 mailer = Mailer(**email_conf)
124 124
125 125 message = Message(subject=subject,
126 126 sender=email_conf['default_sender'],
127 127 recipients=recipients,
128 128 body=body, html=html_body,
129 129 extra_headers=extra_headers)
130 130 mailer.send_immediately(message)
131 131 statsd = StatsdClient.statsd
132 132 if statsd:
133 133 statsd.incr('rhodecode_email_sent_total')
134 134
135 135 except Exception:
136 136 log.exception('Mail sending failed')
137 137 return False
138 138 return True
139 139
140 140
141 141 @async_task(ignore_result=True, base=RequestContextTask)
142 142 def create_repo(form_data, cur_user):
143 143 from rhodecode.model.repo import RepoModel
144 144 from rhodecode.model.user import UserModel
145 145 from rhodecode.model.scm import ScmModel
146 146 from rhodecode.model.settings import SettingsModel
147 147
148 148 log = get_logger(create_repo)
149 149
150 150 cur_user = UserModel()._get_user(cur_user)
151 151 owner = cur_user
152 152
153 153 repo_name = form_data['repo_name']
154 154 repo_name_full = form_data['repo_name_full']
155 155 repo_type = form_data['repo_type']
156 156 description = form_data['repo_description']
157 157 private = form_data['repo_private']
158 158 clone_uri = form_data.get('clone_uri')
159 159 repo_group = safe_int(form_data['repo_group'])
160 160 copy_fork_permissions = form_data.get('copy_permissions')
161 161 copy_group_permissions = form_data.get('repo_copy_permissions')
162 162 fork_of = form_data.get('fork_parent_id')
163 163 state = form_data.get('repo_state', Repository.STATE_PENDING)
164 164
165 165 # repo creation defaults, private and repo_type are filled in form
166 166 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
167 167 enable_statistics = form_data.get(
168 168 'enable_statistics', defs.get('repo_enable_statistics'))
169 169 enable_locking = form_data.get(
170 170 'enable_locking', defs.get('repo_enable_locking'))
171 171 enable_downloads = form_data.get(
172 172 'enable_downloads', defs.get('repo_enable_downloads'))
173 173
174 174 # set landing rev based on default branches for SCM
175 175 landing_ref, _label = ScmModel.backend_landing_ref(repo_type)
176 176
177 177 try:
178 178 RepoModel()._create_repo(
179 179 repo_name=repo_name_full,
180 180 repo_type=repo_type,
181 181 description=description,
182 182 owner=owner,
183 183 private=private,
184 184 clone_uri=clone_uri,
185 185 repo_group=repo_group,
186 186 landing_rev=landing_ref,
187 187 fork_of=fork_of,
188 188 copy_fork_permissions=copy_fork_permissions,
189 189 copy_group_permissions=copy_group_permissions,
190 190 enable_statistics=enable_statistics,
191 191 enable_locking=enable_locking,
192 192 enable_downloads=enable_downloads,
193 193 state=state
194 194 )
195 195 Session().commit()
196 196
197 197 # now create this repo on Filesystem
198 198 RepoModel()._create_filesystem_repo(
199 199 repo_name=repo_name,
200 200 repo_type=repo_type,
201 201 repo_group=RepoModel()._get_repo_group(repo_group),
202 202 clone_uri=clone_uri,
203 203 )
204 204 repo = Repository.get_by_repo_name(repo_name_full)
205 205 hooks_base.create_repository(created_by=owner.username, **repo.get_dict())
206 206
207 207 # update repo commit caches initially
208 208 repo.update_commit_cache()
209 209
210 210 # set new created state
211 211 repo.set_state(Repository.STATE_CREATED)
212 212 repo_id = repo.repo_id
213 213 repo_data = repo.get_api_data()
214 214
215 215 audit_logger.store(
216 216 'repo.create', action_data={'data': repo_data},
217 217 user=cur_user,
218 218 repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id))
219 219
220 220 Session().commit()
221 221
222 222 PermissionModel().trigger_permission_flush()
223 223
224 224 except Exception as e:
225 225 log.warning('Exception occurred when creating repository, '
226 226 'doing cleanup...', exc_info=True)
227 227 if isinstance(e, IntegrityError):
228 228 Session().rollback()
229 229
230 230 # rollback things manually !
231 231 repo = Repository.get_by_repo_name(repo_name_full)
232 232 if repo:
233 233 Repository.delete(repo.repo_id)
234 234 Session().commit()
235 235 RepoModel()._delete_filesystem_repo(repo)
236 236 log.info('Cleanup of repo %s finished', repo_name_full)
237 237 raise
238 238
239 239 return True
240 240
241 241
242 242 @async_task(ignore_result=True, base=RequestContextTask)
243 243 def create_repo_fork(form_data, cur_user):
244 244 """
245 245 Creates a fork of repository using internal VCS methods
246 246 """
247 247 from rhodecode.model.repo import RepoModel
248 248 from rhodecode.model.user import UserModel
249 249
250 250 log = get_logger(create_repo_fork)
251 251
252 252 cur_user = UserModel()._get_user(cur_user)
253 253 owner = cur_user
254 254
255 255 repo_name = form_data['repo_name'] # fork in this case
256 256 repo_name_full = form_data['repo_name_full']
257 257 repo_type = form_data['repo_type']
258 258 description = form_data['description']
259 259 private = form_data['private']
260 260 clone_uri = form_data.get('clone_uri')
261 261 repo_group = safe_int(form_data['repo_group'])
262 262 landing_ref = form_data['landing_rev']
263 263 copy_fork_permissions = form_data.get('copy_permissions')
264 264 fork_id = safe_int(form_data.get('fork_parent_id'))
265 265
266 266 try:
267 267 fork_of = RepoModel()._get_repo(fork_id)
268 268 RepoModel()._create_repo(
269 269 repo_name=repo_name_full,
270 270 repo_type=repo_type,
271 271 description=description,
272 272 owner=owner,
273 273 private=private,
274 274 clone_uri=clone_uri,
275 275 repo_group=repo_group,
276 276 landing_rev=landing_ref,
277 277 fork_of=fork_of,
278 278 copy_fork_permissions=copy_fork_permissions
279 279 )
280 280
281 281 Session().commit()
282 282
283 283 base_path = Repository.base_path()
284 284 source_repo_path = os.path.join(base_path, fork_of.repo_name)
285 285
286 286 # now create this repo on Filesystem
287 287 RepoModel()._create_filesystem_repo(
288 288 repo_name=repo_name,
289 289 repo_type=repo_type,
290 290 repo_group=RepoModel()._get_repo_group(repo_group),
291 291 clone_uri=source_repo_path,
292 292 )
293 293 repo = Repository.get_by_repo_name(repo_name_full)
294 294 hooks_base.create_repository(created_by=owner.username, **repo.get_dict())
295 295
296 296 # update repo commit caches initially
297 297 config = repo._config
298 298 config.set('extensions', 'largefiles', '')
299 299 repo.update_commit_cache(config=config)
300 300
301 301 # set new created state
302 302 repo.set_state(Repository.STATE_CREATED)
303 303
304 304 repo_id = repo.repo_id
305 305 repo_data = repo.get_api_data()
306 306 audit_logger.store(
307 307 'repo.fork', action_data={'data': repo_data},
308 308 user=cur_user,
309 309 repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id))
310 310
311 311 Session().commit()
312 312 except Exception as e:
313 313 log.warning('Exception occurred when forking repository, '
314 314 'doing cleanup...', exc_info=True)
315 315 if isinstance(e, IntegrityError):
316 316 Session().rollback()
317 317
318 318 # rollback things manually !
319 319 repo = Repository.get_by_repo_name(repo_name_full)
320 320 if repo:
321 321 Repository.delete(repo.repo_id)
322 322 Session().commit()
323 323 RepoModel()._delete_filesystem_repo(repo)
324 324 log.info('Cleanup of repo %s finished', repo_name_full)
325 325 raise
326 326
327 327 return True
328 328
329 329
330 330 @async_task(ignore_result=True, base=RequestContextTask)
331 331 def repo_maintenance(repoid):
332 332 from rhodecode.lib import repo_maintenance as repo_maintenance_lib
333 333 log = get_logger(repo_maintenance)
334 334 repo = Repository.get_by_id_or_repo_name(repoid)
335 335 if repo:
336 336 maintenance = repo_maintenance_lib.RepoMaintenance()
337 337 tasks = maintenance.get_tasks_for_repo(repo)
338 338 log.debug('Executing %s tasks on repo `%s`', tasks, repoid)
339 339 executed_types = maintenance.execute(repo)
340 340 log.debug('Got execution results %s', executed_types)
341 341 else:
342 342 log.debug('Repo `%s` not found or without a clone_url', repoid)
343 343
344 344
345 345 @async_task(ignore_result=True, base=RequestContextTask)
346 346 def check_for_update(send_email_notification=True, email_recipients=None):
347 347 from rhodecode.model.update import UpdateModel
348 348 from rhodecode.model.notification import EmailNotificationModel
349 349
350 350 log = get_logger(check_for_update)
351 351 update_url = UpdateModel().get_update_url()
352 352 cur_ver = rhodecode.__version__
353 353
354 354 try:
355 355 data = UpdateModel().get_update_data(update_url)
356 356
357 357 current_ver = UpdateModel().get_stored_version(fallback=cur_ver)
358 358 latest_ver = data['versions'][0]['version']
359 359 UpdateModel().store_version(latest_ver)
360 360
361 361 if send_email_notification:
362 362 log.debug('Send email notification is enabled. '
363 363 'Current RhodeCode version: %s, latest known: %s', current_ver, latest_ver)
364 364 if UpdateModel().is_outdated(current_ver, latest_ver):
365 365
366 366 email_kwargs = {
367 367 'current_ver': current_ver,
368 368 'latest_ver': latest_ver,
369 369 }
370 370
371 371 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
372 372 EmailNotificationModel.TYPE_UPDATE_AVAILABLE, **email_kwargs)
373 373
374 374 email_recipients = aslist(email_recipients, sep=',') or \
375 375 [user.email for user in User.get_all_super_admins()]
376 376 run_task(send_email, email_recipients, subject,
377 377 email_body_plaintext, email_body)
378 378
379 379 except Exception:
380 380 log.exception('Failed to check for update')
381 381 raise
382 382
383 383
384 384 def sync_last_update_for_objects(*args, **kwargs):
385 385 skip_repos = kwargs.get('skip_repos')
386 386 if not skip_repos:
387 387 repos = Repository.query() \
388 388 .order_by(Repository.group_id.asc())
389 389
390 390 for repo in repos:
391 391 repo.update_commit_cache()
392 392
393 393 skip_groups = kwargs.get('skip_groups')
394 394 if not skip_groups:
395 395 repo_groups = RepoGroup.query() \
396 .filter(RepoGroup.group_parent_id == None)
396 .filter(RepoGroup.group_parent_id == null())
397 397
398 398 for root_gr in repo_groups:
399 399 for repo_gr in reversed(root_gr.recursive_groups()):
400 400 repo_gr.update_commit_cache()
401 401
402 402
403 403 @async_task(ignore_result=True, base=RequestContextTask)
404 404 def sync_last_update(*args, **kwargs):
405 405 sync_last_update_for_objects(*args, **kwargs)
406 406
407 407
408 408 @async_task(ignore_result=False)
409 409 def beat_check(*args, **kwargs):
410 410 log = get_logger(beat_check)
411 411 log.info('%r: Got args: %r and kwargs %r', beat_check, args, kwargs)
412 412 return time.time()
@@ -1,402 +1,402 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19
20 20 import itertools
21 21 import logging
22 22 import collections
23 23
24 24 from rhodecode.model import BaseModel
25 25 from rhodecode.model.db import (
26 ChangesetStatus, ChangesetComment, PullRequest, PullRequestReviewers, Session)
26 null, ChangesetStatus, ChangesetComment, PullRequest, PullRequestReviewers, Session)
27 27 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError
28 28 from rhodecode.lib.markup_renderer import (
29 29 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
30 30
31 31 log = logging.getLogger(__name__)
32 32
33 33
34 34 class ChangesetStatusModel(BaseModel):
35 35
36 36 cls = ChangesetStatus
37 37
38 38 def __get_changeset_status(self, changeset_status):
39 39 return self._get_instance(ChangesetStatus, changeset_status)
40 40
41 41 def __get_pull_request(self, pull_request):
42 42 return self._get_instance(PullRequest, pull_request)
43 43
44 44 def _get_status_query(self, repo, revision, pull_request,
45 45 with_revisions=False):
46 46 repo = self._get_repo(repo)
47 47
48 48 q = ChangesetStatus.query()\
49 49 .filter(ChangesetStatus.repo == repo)
50 50 if not with_revisions:
51 51 q = q.filter(ChangesetStatus.version == 0)
52 52
53 53 if revision:
54 54 q = q.filter(ChangesetStatus.revision == revision)
55 55 elif pull_request:
56 56 pull_request = self.__get_pull_request(pull_request)
57 57 # TODO: johbo: Think about the impact of this join, there must
58 58 # be a reason why ChangesetStatus and ChanagesetComment is linked
59 59 # to the pull request. Might be that we want to do the same for
60 60 # the pull_request_version_id.
61 61 q = q.join(ChangesetComment).filter(
62 62 ChangesetStatus.pull_request == pull_request,
63 ChangesetComment.pull_request_version_id == None)
63 ChangesetComment.pull_request_version_id == null())
64 64 else:
65 65 raise Exception('Please specify revision or pull_request')
66 66 q = q.order_by(ChangesetStatus.version.asc())
67 67 return q
68 68
69 69 def calculate_group_vote(self, group_id, group_statuses_by_reviewers,
70 70 trim_votes=True):
71 71 """
72 72 Calculate status based on given group members, and voting rule
73 73
74 74
75 75 group1 - 4 members, 3 required for approval
76 76 user1 - approved
77 77 user2 - reject
78 78 user3 - approved
79 79 user4 - rejected
80 80
81 81 final_state: rejected, reasons not at least 3 votes
82 82
83 83
84 84 group1 - 4 members, 2 required for approval
85 85 user1 - approved
86 86 user2 - reject
87 87 user3 - approved
88 88 user4 - rejected
89 89
90 90 final_state: approved, reasons got at least 2 approvals
91 91
92 92 group1 - 4 members, ALL required for approval
93 93 user1 - approved
94 94 user2 - reject
95 95 user3 - approved
96 96 user4 - rejected
97 97
98 98 final_state: rejected, reasons not all approvals
99 99
100 100
101 101 group1 - 4 members, ALL required for approval
102 102 user1 - approved
103 103 user2 - approved
104 104 user3 - approved
105 105 user4 - approved
106 106
107 107 final_state: approved, reason all approvals received
108 108
109 109 group1 - 4 members, 5 required for approval
110 110 (approval should be shorted to number of actual members)
111 111
112 112 user1 - approved
113 113 user2 - approved
114 114 user3 - approved
115 115 user4 - approved
116 116
117 117 final_state: approved, reason all approvals received
118 118
119 119 """
120 120 group_vote_data = {}
121 121 got_rule = False
122 122 members = collections.OrderedDict()
123 123 for review_obj, user, reasons, mandatory, statuses \
124 124 in group_statuses_by_reviewers:
125 125
126 126 if not got_rule:
127 127 group_vote_data = review_obj.rule_user_group_data()
128 128 got_rule = bool(group_vote_data)
129 129
130 130 members[user.user_id] = statuses
131 131
132 132 if not group_vote_data:
133 133 return []
134 134
135 135 required_votes = group_vote_data['vote_rule']
136 136 if required_votes == -1:
137 137 # -1 means all required, so we replace it with how many people
138 138 # are in the members
139 139 required_votes = len(members)
140 140
141 141 if trim_votes and required_votes > len(members):
142 142 # we require more votes than we have members in the group
143 143 # in this case we trim the required votes to the number of members
144 144 required_votes = len(members)
145 145
146 146 approvals = sum([
147 147 1 for statuses in members.values()
148 148 if statuses and
149 149 statuses[0][1].status == ChangesetStatus.STATUS_APPROVED])
150 150
151 151 calculated_votes = []
152 152 # we have all votes from users, now check if we have enough votes
153 153 # to fill other
154 154 fill_in = ChangesetStatus.STATUS_UNDER_REVIEW
155 155 if approvals >= required_votes:
156 156 fill_in = ChangesetStatus.STATUS_APPROVED
157 157
158 158 for member, statuses in members.items():
159 159 if statuses:
160 160 ver, latest = statuses[0]
161 161 if fill_in == ChangesetStatus.STATUS_APPROVED:
162 162 calculated_votes.append(fill_in)
163 163 else:
164 164 calculated_votes.append(latest.status)
165 165 else:
166 166 calculated_votes.append(fill_in)
167 167
168 168 return calculated_votes
169 169
170 170 def calculate_status(self, statuses_by_reviewers):
171 171 """
172 172 Given the approval statuses from reviewers, calculates final approval
173 173 status. There can only be 3 results, all approved, all rejected. If
174 174 there is no consensus the PR is under review.
175 175
176 176 :param statuses_by_reviewers:
177 177 """
178 178
179 179 def group_rule(element):
180 180 _review_obj = element[0]
181 181 rule_data = _review_obj.rule_user_group_data()
182 182 if rule_data and rule_data['id']:
183 183 return rule_data['id']
184 184 # don't return None, as we cant compare this
185 185 return 0
186 186
187 187 voting_groups = itertools.groupby(sorted(statuses_by_reviewers, key=group_rule), group_rule)
188 188
189 189 voting_by_groups = [(x, list(y)) for x, y in voting_groups]
190 190
191 191 reviewers_number = len(statuses_by_reviewers)
192 192 votes = collections.defaultdict(int)
193 193 for group, group_statuses_by_reviewers in voting_by_groups:
194 194 if group:
195 195 # calculate how the "group" voted
196 196 for vote_status in self.calculate_group_vote(
197 197 group, group_statuses_by_reviewers):
198 198 votes[vote_status] += 1
199 199 else:
200 200
201 201 for review_obj, user, reasons, mandatory, statuses \
202 202 in group_statuses_by_reviewers:
203 203 # individual vote
204 204 if statuses:
205 205 ver, latest = statuses[0]
206 206 votes[latest.status] += 1
207 207
208 208 approved_votes_count = votes[ChangesetStatus.STATUS_APPROVED]
209 209 rejected_votes_count = votes[ChangesetStatus.STATUS_REJECTED]
210 210
211 211 # TODO(marcink): with group voting, how does rejected work,
212 212 # do we ever get rejected state ?
213 213
214 214 if approved_votes_count and (approved_votes_count == reviewers_number):
215 215 return ChangesetStatus.STATUS_APPROVED
216 216
217 217 if rejected_votes_count and (rejected_votes_count == reviewers_number):
218 218 return ChangesetStatus.STATUS_REJECTED
219 219
220 220 return ChangesetStatus.STATUS_UNDER_REVIEW
221 221
222 222 def get_statuses(self, repo, revision=None, pull_request=None,
223 223 with_revisions=False):
224 224 q = self._get_status_query(repo, revision, pull_request,
225 225 with_revisions)
226 226 return q.all()
227 227
228 228 def get_status(self, repo, revision=None, pull_request=None, as_str=True):
229 229 """
230 230 Returns latest status of changeset for given revision or for given
231 231 pull request. Statuses are versioned inside a table itself and
232 232 version == 0 is always the current one
233 233
234 234 :param repo:
235 235 :param revision: 40char hash or None
236 236 :param pull_request: pull_request reference
237 237 :param as_str: return status as string not object
238 238 """
239 239 q = self._get_status_query(repo, revision, pull_request)
240 240
241 241 # need to use first here since there can be multiple statuses
242 242 # returned from pull_request
243 243 status = q.first()
244 244 if as_str:
245 245 status = status.status if status else status
246 246 st = status or ChangesetStatus.DEFAULT
247 247 return str(st)
248 248 return status
249 249
250 250 def _render_auto_status_message(
251 251 self, status, commit_id=None, pull_request=None):
252 252 """
253 253 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
254 254 so it's always looking the same disregarding on which default
255 255 renderer system is using.
256 256
257 257 :param status: status text to change into
258 258 :param commit_id: the commit_id we change the status for
259 259 :param pull_request: the pull request we change the status for
260 260 """
261 261
262 262 new_status = ChangesetStatus.get_status_lbl(status)
263 263
264 264 params = {
265 265 'new_status_label': new_status,
266 266 'pull_request': pull_request,
267 267 'commit_id': commit_id,
268 268 }
269 269 renderer = RstTemplateRenderer()
270 270 return renderer.render('auto_status_change.mako', **params)
271 271
272 272 def set_status(self, repo, status, user, comment=None, revision=None,
273 273 pull_request=None, dont_allow_on_closed_pull_request=False):
274 274 """
275 275 Creates new status for changeset or updates the old ones bumping their
276 276 version, leaving the current status at
277 277
278 278 :param repo:
279 279 :param revision:
280 280 :param status:
281 281 :param user:
282 282 :param comment:
283 283 :param dont_allow_on_closed_pull_request: don't allow a status change
284 284 if last status was for pull request and it's closed. We shouldn't
285 285 mess around this manually
286 286 """
287 287 repo = self._get_repo(repo)
288 288
289 289 q = ChangesetStatus.query()
290 290
291 291 if revision:
292 292 q = q.filter(ChangesetStatus.repo == repo)
293 293 q = q.filter(ChangesetStatus.revision == revision)
294 294 elif pull_request:
295 295 pull_request = self.__get_pull_request(pull_request)
296 296 q = q.filter(ChangesetStatus.repo == pull_request.source_repo)
297 297 q = q.filter(ChangesetStatus.revision.in_(pull_request.revisions))
298 298 cur_statuses = q.all()
299 299
300 300 # if statuses exists and last is associated with a closed pull request
301 301 # we need to check if we can allow this status change
302 302 if (dont_allow_on_closed_pull_request and cur_statuses
303 303 and getattr(cur_statuses[0].pull_request, 'status', '')
304 304 == PullRequest.STATUS_CLOSED):
305 305 raise StatusChangeOnClosedPullRequestError(
306 306 'Changing status on closed pull request is not allowed'
307 307 )
308 308
309 309 # update all current statuses with older version
310 310 if cur_statuses:
311 311 for st in cur_statuses:
312 312 st.version += 1
313 313 Session().add(st)
314 314 Session().flush()
315 315
316 316 def _create_status(user, repo, status, comment, revision, pull_request):
317 317 new_status = ChangesetStatus()
318 318 new_status.author = self._get_user(user)
319 319 new_status.repo = self._get_repo(repo)
320 320 new_status.status = status
321 321 new_status.comment = comment
322 322 new_status.revision = revision
323 323 new_status.pull_request = pull_request
324 324 return new_status
325 325
326 326 if not comment:
327 327 from rhodecode.model.comment import CommentsModel
328 328 comment = CommentsModel().create(
329 329 text=self._render_auto_status_message(
330 330 status, commit_id=revision, pull_request=pull_request),
331 331 repo=repo,
332 332 user=user,
333 333 pull_request=pull_request,
334 334 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER
335 335 )
336 336
337 337 if revision:
338 338 new_status = _create_status(
339 339 user=user, repo=repo, status=status, comment=comment,
340 340 revision=revision, pull_request=pull_request)
341 341 Session().add(new_status)
342 342 return new_status
343 343 elif pull_request:
344 344 # pull request can have more than one revision associated to it
345 345 # we need to create new version for each one
346 346 new_statuses = []
347 347 repo = pull_request.source_repo
348 348 for rev in pull_request.revisions:
349 349 new_status = _create_status(
350 350 user=user, repo=repo, status=status, comment=comment,
351 351 revision=rev, pull_request=pull_request)
352 352 new_statuses.append(new_status)
353 353 Session().add(new_status)
354 354 return new_statuses
355 355
356 356 def aggregate_votes_by_user(self, commit_statuses, reviewers_data, user=None):
357 357
358 358 commit_statuses_map = collections.defaultdict(list)
359 359 for st in commit_statuses:
360 360 commit_statuses_map[st.author.username] += [st]
361 361
362 362 reviewers = []
363 363
364 364 def version(commit_status):
365 365 return commit_status.version
366 366
367 367 for obj in reviewers_data:
368 368 if not obj.user:
369 369 continue
370 370 if user and obj.user.username != user.username:
371 371 # single user filter
372 372 continue
373 373
374 374 statuses = commit_statuses_map.get(obj.user.username, None)
375 375 if statuses:
376 376 status_groups = itertools.groupby(
377 377 sorted(statuses, key=version), version)
378 378 statuses = [(x, list(y)[0]) for x, y in status_groups]
379 379
380 380 reviewers.append((obj, obj.user, obj.reasons, obj.mandatory, statuses))
381 381
382 382 if user:
383 383 return reviewers[0] if reviewers else reviewers
384 384 else:
385 385 return reviewers
386 386
387 387 def reviewers_statuses(self, pull_request, user=None):
388 388 _commit_statuses = self.get_statuses(
389 389 pull_request.source_repo,
390 390 pull_request=pull_request,
391 391 with_revisions=True)
392 392 reviewers = pull_request.get_pull_request_reviewers(
393 393 role=PullRequestReviewers.ROLE_REVIEWER)
394 394 return self.aggregate_votes_by_user(_commit_statuses, reviewers, user=user)
395 395
396 396 def calculated_review_status(self, pull_request):
397 397 """
398 398 calculate pull request status based on reviewers, it should be a list
399 399 of two element lists.
400 400 """
401 401 reviewers = self.reviewers_statuses(pull_request)
402 402 return self.calculate_status(reviewers)
@@ -1,852 +1,852 b''
1 1 # Copyright (C) 2011-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 comments model for RhodeCode
21 21 """
22 22 import datetime
23 23
24 24 import logging
25 25 import traceback
26 26 import collections
27 27
28 28 from pyramid.threadlocal import get_current_registry, get_current_request
29 29 from sqlalchemy.sql.expression import null
30 30 from sqlalchemy.sql.functions import coalesce
31 31
32 32 from rhodecode.lib import helpers as h, diffs, channelstream, hooks_utils
33 33 from rhodecode.lib import audit_logger
34 34 from rhodecode.lib.exceptions import CommentVersionMismatch
35 35 from rhodecode.lib.utils2 import extract_mentioned_users, safe_str, safe_int
36 36 from rhodecode.model import BaseModel
37 37 from rhodecode.model.db import (
38 38 false, true,
39 39 ChangesetComment,
40 40 User,
41 41 Notification,
42 42 PullRequest,
43 43 AttributeDict,
44 44 ChangesetCommentHistory,
45 45 )
46 46 from rhodecode.model.notification import NotificationModel
47 47 from rhodecode.model.meta import Session
48 48 from rhodecode.model.settings import VcsSettingsModel
49 49 from rhodecode.model.notification import EmailNotificationModel
50 50 from rhodecode.model.validation_schema.schemas import comment_schema
51 51
52 52
53 53 log = logging.getLogger(__name__)
54 54
55 55
56 56 class CommentsModel(BaseModel):
57 57
58 58 cls = ChangesetComment
59 59
60 60 DIFF_CONTEXT_BEFORE = 3
61 61 DIFF_CONTEXT_AFTER = 3
62 62
63 63 def __get_commit_comment(self, changeset_comment):
64 64 return self._get_instance(ChangesetComment, changeset_comment)
65 65
66 66 def __get_pull_request(self, pull_request):
67 67 return self._get_instance(PullRequest, pull_request)
68 68
69 69 def _extract_mentions(self, s):
70 70 user_objects = []
71 71 for username in extract_mentioned_users(s):
72 72 user_obj = User.get_by_username(username, case_insensitive=True)
73 73 if user_obj:
74 74 user_objects.append(user_obj)
75 75 return user_objects
76 76
77 77 def _get_renderer(self, global_renderer='rst', request=None):
78 78 request = request or get_current_request()
79 79
80 80 try:
81 81 global_renderer = request.call_context.visual.default_renderer
82 82 except AttributeError:
83 83 log.debug("Renderer not set, falling back "
84 84 "to default renderer '%s'", global_renderer)
85 85 except Exception:
86 86 log.error(traceback.format_exc())
87 87 return global_renderer
88 88
89 89 def aggregate_comments(self, comments, versions, show_version, inline=False):
90 90 # group by versions, and count until, and display objects
91 91
92 92 comment_groups = collections.defaultdict(list)
93 93 [comment_groups[_co.pull_request_version_id].append(_co) for _co in comments]
94 94
95 95 def yield_comments(pos):
96 96 yield from comment_groups[pos]
97 97
98 98 comment_versions = collections.defaultdict(
99 99 lambda: collections.defaultdict(list))
100 100 prev_prvid = -1
101 101 # fake last entry with None, to aggregate on "latest" version which
102 102 # doesn't have an pull_request_version_id
103 103 for ver in versions + [AttributeDict({'pull_request_version_id': None})]:
104 104 prvid = ver.pull_request_version_id
105 105 if prev_prvid == -1:
106 106 prev_prvid = prvid
107 107
108 108 for co in yield_comments(prvid):
109 109 comment_versions[prvid]['at'].append(co)
110 110
111 111 # save until
112 112 current = comment_versions[prvid]['at']
113 113 prev_until = comment_versions[prev_prvid]['until']
114 114 cur_until = prev_until + current
115 115 comment_versions[prvid]['until'].extend(cur_until)
116 116
117 117 # save outdated
118 118 if inline:
119 119 outdated = [x for x in cur_until
120 120 if x.outdated_at_version(show_version)]
121 121 else:
122 122 outdated = [x for x in cur_until
123 123 if x.older_than_version(show_version)]
124 124 display = [x for x in cur_until if x not in outdated]
125 125
126 126 comment_versions[prvid]['outdated'] = outdated
127 127 comment_versions[prvid]['display'] = display
128 128
129 129 prev_prvid = prvid
130 130
131 131 return comment_versions
132 132
133 133 def get_repository_comments(self, repo, comment_type=None, user=None, commit_id=None):
134 134 qry = Session().query(ChangesetComment) \
135 135 .filter(ChangesetComment.repo == repo)
136 136
137 137 if comment_type and comment_type in ChangesetComment.COMMENT_TYPES:
138 138 qry = qry.filter(ChangesetComment.comment_type == comment_type)
139 139
140 140 if user:
141 141 user = self._get_user(user)
142 142 if user:
143 143 qry = qry.filter(ChangesetComment.user_id == user.user_id)
144 144
145 145 if commit_id:
146 146 qry = qry.filter(ChangesetComment.revision == commit_id)
147 147
148 148 qry = qry.order_by(ChangesetComment.created_on)
149 149 return qry.all()
150 150
151 151 def get_repository_unresolved_todos(self, repo):
152 152 todos = Session().query(ChangesetComment) \
153 153 .filter(ChangesetComment.repo == repo) \
154 .filter(ChangesetComment.resolved_by == None) \
154 .filter(ChangesetComment.resolved_by == null()) \
155 155 .filter(ChangesetComment.comment_type
156 156 == ChangesetComment.COMMENT_TYPE_TODO)
157 157 todos = todos.all()
158 158
159 159 return todos
160 160
161 161 def get_pull_request_unresolved_todos(self, pull_request, show_outdated=True, include_drafts=True):
162 162
163 163 todos = Session().query(ChangesetComment) \
164 164 .filter(ChangesetComment.pull_request == pull_request) \
165 .filter(ChangesetComment.resolved_by == None) \
165 .filter(ChangesetComment.resolved_by == null()) \
166 166 .filter(ChangesetComment.comment_type
167 167 == ChangesetComment.COMMENT_TYPE_TODO)
168 168
169 169 if not include_drafts:
170 170 todos = todos.filter(ChangesetComment.draft == false())
171 171
172 172 if not show_outdated:
173 173 todos = todos.filter(
174 174 coalesce(ChangesetComment.display_state, '') !=
175 175 ChangesetComment.COMMENT_OUTDATED)
176 176
177 177 todos = todos.all()
178 178
179 179 return todos
180 180
181 181 def get_pull_request_resolved_todos(self, pull_request, show_outdated=True, include_drafts=True):
182 182
183 183 todos = Session().query(ChangesetComment) \
184 184 .filter(ChangesetComment.pull_request == pull_request) \
185 185 .filter(ChangesetComment.resolved_by != None) \
186 186 .filter(ChangesetComment.comment_type
187 187 == ChangesetComment.COMMENT_TYPE_TODO)
188 188
189 189 if not include_drafts:
190 190 todos = todos.filter(ChangesetComment.draft == false())
191 191
192 192 if not show_outdated:
193 193 todos = todos.filter(
194 194 coalesce(ChangesetComment.display_state, '') !=
195 195 ChangesetComment.COMMENT_OUTDATED)
196 196
197 197 todos = todos.all()
198 198
199 199 return todos
200 200
201 201 def get_pull_request_drafts(self, user_id, pull_request):
202 202 drafts = Session().query(ChangesetComment) \
203 203 .filter(ChangesetComment.pull_request == pull_request) \
204 204 .filter(ChangesetComment.user_id == user_id) \
205 205 .filter(ChangesetComment.draft == true())
206 206 return drafts.all()
207 207
208 208 def get_commit_unresolved_todos(self, commit_id, show_outdated=True, include_drafts=True):
209 209
210 210 todos = Session().query(ChangesetComment) \
211 211 .filter(ChangesetComment.revision == commit_id) \
212 .filter(ChangesetComment.resolved_by == None) \
212 .filter(ChangesetComment.resolved_by == null()) \
213 213 .filter(ChangesetComment.comment_type
214 214 == ChangesetComment.COMMENT_TYPE_TODO)
215 215
216 216 if not include_drafts:
217 217 todos = todos.filter(ChangesetComment.draft == false())
218 218
219 219 if not show_outdated:
220 220 todos = todos.filter(
221 221 coalesce(ChangesetComment.display_state, '') !=
222 222 ChangesetComment.COMMENT_OUTDATED)
223 223
224 224 todos = todos.all()
225 225
226 226 return todos
227 227
228 228 def get_commit_resolved_todos(self, commit_id, show_outdated=True, include_drafts=True):
229 229
230 230 todos = Session().query(ChangesetComment) \
231 231 .filter(ChangesetComment.revision == commit_id) \
232 232 .filter(ChangesetComment.resolved_by != None) \
233 233 .filter(ChangesetComment.comment_type
234 234 == ChangesetComment.COMMENT_TYPE_TODO)
235 235
236 236 if not include_drafts:
237 237 todos = todos.filter(ChangesetComment.draft == false())
238 238
239 239 if not show_outdated:
240 240 todos = todos.filter(
241 241 coalesce(ChangesetComment.display_state, '') !=
242 242 ChangesetComment.COMMENT_OUTDATED)
243 243
244 244 todos = todos.all()
245 245
246 246 return todos
247 247
248 248 def get_commit_inline_comments(self, commit_id, include_drafts=True):
249 249 inline_comments = Session().query(ChangesetComment) \
250 250 .filter(ChangesetComment.line_no != None) \
251 251 .filter(ChangesetComment.f_path != None) \
252 252 .filter(ChangesetComment.revision == commit_id)
253 253
254 254 if not include_drafts:
255 255 inline_comments = inline_comments.filter(ChangesetComment.draft == false())
256 256
257 257 inline_comments = inline_comments.all()
258 258 return inline_comments
259 259
260 260 def _log_audit_action(self, action, action_data, auth_user, comment):
261 261 audit_logger.store(
262 262 action=action,
263 263 action_data=action_data,
264 264 user=auth_user,
265 265 repo=comment.repo)
266 266
267 267 def create(self, text, repo, user, commit_id=None, pull_request=None,
268 268 f_path=None, line_no=None, status_change=None,
269 269 status_change_type=None, comment_type=None, is_draft=False,
270 270 resolves_comment_id=None, closing_pr=False, send_email=True,
271 271 renderer=None, auth_user=None, extra_recipients=None):
272 272 """
273 273 Creates new comment for commit or pull request.
274 274 IF status_change is not none this comment is associated with a
275 275 status change of commit or commit associated with pull request
276 276
277 277 :param text:
278 278 :param repo:
279 279 :param user:
280 280 :param commit_id:
281 281 :param pull_request:
282 282 :param f_path:
283 283 :param line_no:
284 284 :param status_change: Label for status change
285 285 :param comment_type: Type of comment
286 286 :param is_draft: is comment a draft only
287 287 :param resolves_comment_id: id of comment which this one will resolve
288 288 :param status_change_type: type of status change
289 289 :param closing_pr:
290 290 :param send_email:
291 291 :param renderer: pick renderer for this comment
292 292 :param auth_user: current authenticated user calling this method
293 293 :param extra_recipients: list of extra users to be added to recipients
294 294 """
295 295
296 296 request = get_current_request()
297 297 _ = request.translate
298 298
299 299 if not renderer:
300 300 renderer = self._get_renderer(request=request)
301 301
302 302 repo = self._get_repo(repo)
303 303 user = self._get_user(user)
304 304 auth_user = auth_user or user
305 305
306 306 schema = comment_schema.CommentSchema()
307 307 validated_kwargs = schema.deserialize(dict(
308 308 comment_body=text,
309 309 comment_type=comment_type,
310 310 is_draft=is_draft,
311 311 comment_file=f_path,
312 312 comment_line=line_no,
313 313 renderer_type=renderer,
314 314 status_change=status_change_type,
315 315 resolves_comment_id=resolves_comment_id,
316 316 repo=repo.repo_id,
317 317 user=user.user_id,
318 318 ))
319 319
320 320 is_draft = validated_kwargs['is_draft']
321 321
322 322 comment = ChangesetComment()
323 323 comment.renderer = validated_kwargs['renderer_type']
324 324 comment.text = validated_kwargs['comment_body']
325 325 comment.f_path = validated_kwargs['comment_file']
326 326 comment.line_no = validated_kwargs['comment_line']
327 327 comment.comment_type = validated_kwargs['comment_type']
328 328 comment.draft = is_draft
329 329
330 330 comment.repo = repo
331 331 comment.author = user
332 332 resolved_comment = self.__get_commit_comment(
333 333 validated_kwargs['resolves_comment_id'])
334 334
335 335 # check if the comment actually belongs to this PR
336 336 if resolved_comment and resolved_comment.pull_request and \
337 337 resolved_comment.pull_request != pull_request:
338 338 log.warning('Comment tried to resolved unrelated todo comment: %s',
339 339 resolved_comment)
340 340 # comment not bound to this pull request, forbid
341 341 resolved_comment = None
342 342
343 343 elif resolved_comment and resolved_comment.repo and \
344 344 resolved_comment.repo != repo:
345 345 log.warning('Comment tried to resolved unrelated todo comment: %s',
346 346 resolved_comment)
347 347 # comment not bound to this repo, forbid
348 348 resolved_comment = None
349 349
350 350 if resolved_comment and resolved_comment.resolved_by:
351 351 # if this comment is already resolved, don't mark it again!
352 352 resolved_comment = None
353 353
354 354 comment.resolved_comment = resolved_comment
355 355
356 356 pull_request_id = pull_request
357 357
358 358 commit_obj = None
359 359 pull_request_obj = None
360 360
361 361 if commit_id:
362 362 notification_type = EmailNotificationModel.TYPE_COMMIT_COMMENT
363 363 # do a lookup, so we don't pass something bad here
364 364 commit_obj = repo.scm_instance().get_commit(commit_id=commit_id)
365 365 comment.revision = commit_obj.raw_id
366 366
367 367 elif pull_request_id:
368 368 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST_COMMENT
369 369 pull_request_obj = self.__get_pull_request(pull_request_id)
370 370 comment.pull_request = pull_request_obj
371 371 else:
372 372 raise Exception('Please specify commit or pull_request_id')
373 373
374 374 Session().add(comment)
375 375 Session().flush()
376 376 kwargs = {
377 377 'user': user,
378 378 'renderer_type': renderer,
379 379 'repo_name': repo.repo_name,
380 380 'status_change': status_change,
381 381 'status_change_type': status_change_type,
382 382 'comment_body': text,
383 383 'comment_file': f_path,
384 384 'comment_line': line_no,
385 385 'comment_type': comment_type or 'note',
386 386 'comment_id': comment.comment_id
387 387 }
388 388
389 389 if commit_obj:
390 390 recipients = ChangesetComment.get_users(
391 391 revision=commit_obj.raw_id)
392 392 # add commit author if it's in RhodeCode system
393 393 cs_author = User.get_from_cs_author(commit_obj.author)
394 394 if not cs_author:
395 395 # use repo owner if we cannot extract the author correctly
396 396 cs_author = repo.user
397 397 recipients += [cs_author]
398 398
399 399 commit_comment_url = self.get_url(comment, request=request)
400 400 commit_comment_reply_url = self.get_url(
401 401 comment, request=request,
402 402 anchor=f'comment-{comment.comment_id}/?/ReplyToComment')
403 403
404 404 target_repo_url = h.link_to(
405 405 repo.repo_name,
406 406 h.route_url('repo_summary', repo_name=repo.repo_name))
407 407
408 408 commit_url = h.route_url('repo_commit', repo_name=repo.repo_name,
409 409 commit_id=commit_id)
410 410
411 411 # commit specifics
412 412 kwargs.update({
413 413 'commit': commit_obj,
414 414 'commit_message': commit_obj.message,
415 415 'commit_target_repo_url': target_repo_url,
416 416 'commit_comment_url': commit_comment_url,
417 417 'commit_comment_reply_url': commit_comment_reply_url,
418 418 'commit_url': commit_url,
419 419 'thread_ids': [commit_url, commit_comment_url],
420 420 })
421 421
422 422 elif pull_request_obj:
423 423 # get the current participants of this pull request
424 424 recipients = ChangesetComment.get_users(
425 425 pull_request_id=pull_request_obj.pull_request_id)
426 426 # add pull request author
427 427 recipients += [pull_request_obj.author]
428 428
429 429 # add the reviewers to notification
430 430 recipients += [x.user for x in pull_request_obj.get_pull_request_reviewers()]
431 431
432 432 pr_target_repo = pull_request_obj.target_repo
433 433 pr_source_repo = pull_request_obj.source_repo
434 434
435 435 pr_comment_url = self.get_url(comment, request=request)
436 436 pr_comment_reply_url = self.get_url(
437 437 comment, request=request,
438 438 anchor=f'comment-{comment.comment_id}/?/ReplyToComment')
439 439
440 440 pr_url = h.route_url(
441 441 'pullrequest_show',
442 442 repo_name=pr_target_repo.repo_name,
443 443 pull_request_id=pull_request_obj.pull_request_id, )
444 444
445 445 # set some variables for email notification
446 446 pr_target_repo_url = h.route_url(
447 447 'repo_summary', repo_name=pr_target_repo.repo_name)
448 448
449 449 pr_source_repo_url = h.route_url(
450 450 'repo_summary', repo_name=pr_source_repo.repo_name)
451 451
452 452 # pull request specifics
453 453 kwargs.update({
454 454 'pull_request': pull_request_obj,
455 455 'pr_id': pull_request_obj.pull_request_id,
456 456 'pull_request_url': pr_url,
457 457 'pull_request_target_repo': pr_target_repo,
458 458 'pull_request_target_repo_url': pr_target_repo_url,
459 459 'pull_request_source_repo': pr_source_repo,
460 460 'pull_request_source_repo_url': pr_source_repo_url,
461 461 'pr_comment_url': pr_comment_url,
462 462 'pr_comment_reply_url': pr_comment_reply_url,
463 463 'pr_closing': closing_pr,
464 464 'thread_ids': [pr_url, pr_comment_url],
465 465 })
466 466
467 467 if send_email:
468 468 recipients += [self._get_user(u) for u in (extra_recipients or [])]
469 469
470 470 mention_recipients = set(
471 471 self._extract_mentions(text)).difference(recipients)
472 472
473 473 # create notification objects, and emails
474 474 NotificationModel().create(
475 475 created_by=user,
476 476 notification_subject='', # Filled in based on the notification_type
477 477 notification_body='', # Filled in based on the notification_type
478 478 notification_type=notification_type,
479 479 recipients=recipients,
480 480 mention_recipients=mention_recipients,
481 481 email_kwargs=kwargs,
482 482 )
483 483
484 484 Session().flush()
485 485 if comment.pull_request:
486 486 action = 'repo.pull_request.comment.create'
487 487 else:
488 488 action = 'repo.commit.comment.create'
489 489
490 490 if not is_draft:
491 491 comment_data = comment.get_api_data()
492 492
493 493 self._log_audit_action(
494 494 action, {'data': comment_data}, auth_user, comment)
495 495
496 496 return comment
497 497
498 498 def edit(self, comment_id, text, auth_user, version):
499 499 """
500 500 Change existing comment for commit or pull request.
501 501
502 502 :param comment_id:
503 503 :param text:
504 504 :param auth_user: current authenticated user calling this method
505 505 :param version: last comment version
506 506 """
507 507 if not text:
508 508 log.warning('Missing text for comment, skipping...')
509 509 return
510 510
511 511 comment = ChangesetComment.get(comment_id)
512 512 old_comment_text = comment.text
513 513 comment.text = text
514 514 comment.modified_at = datetime.datetime.now()
515 515 version = safe_int(version)
516 516
517 517 # NOTE(marcink): this returns initial comment + edits, so v2 from ui
518 518 # would return 3 here
519 519 comment_version = ChangesetCommentHistory.get_version(comment_id)
520 520
521 521 if isinstance(version, int) and (comment_version - version) != 1:
522 522 log.warning(
523 523 'Version mismatch comment_version {} submitted {}, skipping'.format(
524 524 comment_version-1, # -1 since note above
525 525 version
526 526 )
527 527 )
528 528 raise CommentVersionMismatch()
529 529
530 530 comment_history = ChangesetCommentHistory()
531 531 comment_history.comment_id = comment_id
532 532 comment_history.version = comment_version
533 533 comment_history.created_by_user_id = auth_user.user_id
534 534 comment_history.text = old_comment_text
535 535 # TODO add email notification
536 536 Session().add(comment_history)
537 537 Session().add(comment)
538 538 Session().flush()
539 539
540 540 if comment.pull_request:
541 541 action = 'repo.pull_request.comment.edit'
542 542 else:
543 543 action = 'repo.commit.comment.edit'
544 544
545 545 comment_data = comment.get_api_data()
546 546 comment_data['old_comment_text'] = old_comment_text
547 547 self._log_audit_action(
548 548 action, {'data': comment_data}, auth_user, comment)
549 549
550 550 return comment_history
551 551
552 552 def delete(self, comment, auth_user):
553 553 """
554 554 Deletes given comment
555 555 """
556 556 comment = self.__get_commit_comment(comment)
557 557 old_data = comment.get_api_data()
558 558 Session().delete(comment)
559 559
560 560 if comment.pull_request:
561 561 action = 'repo.pull_request.comment.delete'
562 562 else:
563 563 action = 'repo.commit.comment.delete'
564 564
565 565 self._log_audit_action(
566 566 action, {'old_data': old_data}, auth_user, comment)
567 567
568 568 return comment
569 569
570 570 def get_all_comments(self, repo_id, revision=None, pull_request=None,
571 571 include_drafts=True, count_only=False):
572 572 q = ChangesetComment.query()\
573 573 .filter(ChangesetComment.repo_id == repo_id)
574 574 if revision:
575 575 q = q.filter(ChangesetComment.revision == revision)
576 576 elif pull_request:
577 577 pull_request = self.__get_pull_request(pull_request)
578 578 q = q.filter(ChangesetComment.pull_request_id == pull_request.pull_request_id)
579 579 else:
580 580 raise Exception('Please specify commit or pull_request')
581 581 if not include_drafts:
582 582 q = q.filter(ChangesetComment.draft == false())
583 583 q = q.order_by(ChangesetComment.created_on)
584 584 if count_only:
585 585 return q.count()
586 586
587 587 return q.all()
588 588
589 589 def get_url(self, comment, request=None, permalink=False, anchor=None):
590 590 if not request:
591 591 request = get_current_request()
592 592
593 593 comment = self.__get_commit_comment(comment)
594 594 if anchor is None:
595 595 anchor = f'comment-{comment.comment_id}'
596 596
597 597 if comment.pull_request:
598 598 pull_request = comment.pull_request
599 599 if permalink:
600 600 return request.route_url(
601 601 'pull_requests_global',
602 602 pull_request_id=pull_request.pull_request_id,
603 603 _anchor=anchor)
604 604 else:
605 605 return request.route_url(
606 606 'pullrequest_show',
607 607 repo_name=safe_str(pull_request.target_repo.repo_name),
608 608 pull_request_id=pull_request.pull_request_id,
609 609 _anchor=anchor)
610 610
611 611 else:
612 612 repo = comment.repo
613 613 commit_id = comment.revision
614 614
615 615 if permalink:
616 616 return request.route_url(
617 617 'repo_commit', repo_name=safe_str(repo.repo_id),
618 618 commit_id=commit_id,
619 619 _anchor=anchor)
620 620
621 621 else:
622 622 return request.route_url(
623 623 'repo_commit', repo_name=safe_str(repo.repo_name),
624 624 commit_id=commit_id,
625 625 _anchor=anchor)
626 626
627 627 def get_comments(self, repo_id, revision=None, pull_request=None):
628 628 """
629 629 Gets main comments based on revision or pull_request_id
630 630
631 631 :param repo_id:
632 632 :param revision:
633 633 :param pull_request:
634 634 """
635 635
636 636 q = ChangesetComment.query()\
637 637 .filter(ChangesetComment.repo_id == repo_id)\
638 .filter(ChangesetComment.line_no == None)\
639 .filter(ChangesetComment.f_path == None)
638 .filter(ChangesetComment.line_no == null())\
639 .filter(ChangesetComment.f_path == null())
640 640 if revision:
641 641 q = q.filter(ChangesetComment.revision == revision)
642 642 elif pull_request:
643 643 pull_request = self.__get_pull_request(pull_request)
644 644 q = q.filter(ChangesetComment.pull_request == pull_request)
645 645 else:
646 646 raise Exception('Please specify commit or pull_request')
647 647 q = q.order_by(ChangesetComment.created_on)
648 648 return q.all()
649 649
650 650 def get_inline_comments(self, repo_id, revision=None, pull_request=None):
651 651 q = self._get_inline_comments_query(repo_id, revision, pull_request)
652 652 return self._group_comments_by_path_and_line_number(q)
653 653
654 654 def get_inline_comments_as_list(self, inline_comments, skip_outdated=True,
655 655 version=None):
656 656 inline_comms = []
657 657 for fname, per_line_comments in inline_comments.items():
658 658 for lno, comments in per_line_comments.items():
659 659 for comm in comments:
660 660 if not comm.outdated_at_version(version) and skip_outdated:
661 661 inline_comms.append(comm)
662 662
663 663 return inline_comms
664 664
665 665 def get_outdated_comments(self, repo_id, pull_request):
666 666 # TODO: johbo: Remove `repo_id`, it is not needed to find the comments
667 667 # of a pull request.
668 668 q = self._all_inline_comments_of_pull_request(pull_request)
669 669 q = q.filter(
670 670 ChangesetComment.display_state ==
671 671 ChangesetComment.COMMENT_OUTDATED
672 672 ).order_by(ChangesetComment.comment_id.asc())
673 673
674 674 return self._group_comments_by_path_and_line_number(q)
675 675
676 676 def _get_inline_comments_query(self, repo_id, revision, pull_request):
677 677 # TODO: johbo: Split this into two methods: One for PR and one for
678 678 # commit.
679 679 if revision:
680 680 q = Session().query(ChangesetComment).filter(
681 681 ChangesetComment.repo_id == repo_id,
682 682 ChangesetComment.line_no != null(),
683 683 ChangesetComment.f_path != null(),
684 684 ChangesetComment.revision == revision)
685 685
686 686 elif pull_request:
687 687 pull_request = self.__get_pull_request(pull_request)
688 688 if not CommentsModel.use_outdated_comments(pull_request):
689 689 q = self._visible_inline_comments_of_pull_request(pull_request)
690 690 else:
691 691 q = self._all_inline_comments_of_pull_request(pull_request)
692 692
693 693 else:
694 694 raise Exception('Please specify commit or pull_request_id')
695 695 q = q.order_by(ChangesetComment.comment_id.asc())
696 696 return q
697 697
698 698 def _group_comments_by_path_and_line_number(self, q):
699 699 comments = q.all()
700 700 paths = collections.defaultdict(lambda: collections.defaultdict(list))
701 701 for co in comments:
702 702 paths[co.f_path][co.line_no].append(co)
703 703 return paths
704 704
705 705 @classmethod
706 706 def needed_extra_diff_context(cls):
707 707 return max(cls.DIFF_CONTEXT_BEFORE, cls.DIFF_CONTEXT_AFTER)
708 708
709 709 def outdate_comments(self, pull_request, old_diff_data, new_diff_data):
710 710 if not CommentsModel.use_outdated_comments(pull_request):
711 711 return
712 712
713 713 comments = self._visible_inline_comments_of_pull_request(pull_request)
714 714 comments_to_outdate = comments.all()
715 715
716 716 for comment in comments_to_outdate:
717 717 self._outdate_one_comment(comment, old_diff_data, new_diff_data)
718 718
719 719 def _outdate_one_comment(self, comment, old_diff_proc, new_diff_proc):
720 720 diff_line = _parse_comment_line_number(comment.line_no)
721 721
722 722 try:
723 723 old_context = old_diff_proc.get_context_of_line(
724 724 path=comment.f_path, diff_line=diff_line)
725 725 new_context = new_diff_proc.get_context_of_line(
726 726 path=comment.f_path, diff_line=diff_line)
727 727 except (diffs.LineNotInDiffException,
728 728 diffs.FileNotInDiffException):
729 729 if not comment.draft:
730 730 comment.display_state = ChangesetComment.COMMENT_OUTDATED
731 731 return
732 732
733 733 if old_context == new_context:
734 734 return
735 735
736 736 if self._should_relocate_diff_line(diff_line):
737 737 new_diff_lines = new_diff_proc.find_context(
738 738 path=comment.f_path, context=old_context,
739 739 offset=self.DIFF_CONTEXT_BEFORE)
740 740 if not new_diff_lines and not comment.draft:
741 741 comment.display_state = ChangesetComment.COMMENT_OUTDATED
742 742 else:
743 743 new_diff_line = self._choose_closest_diff_line(
744 744 diff_line, new_diff_lines)
745 745 comment.line_no = _diff_to_comment_line_number(new_diff_line)
746 746 else:
747 747 if not comment.draft:
748 748 comment.display_state = ChangesetComment.COMMENT_OUTDATED
749 749
750 750 def _should_relocate_diff_line(self, diff_line):
751 751 """
752 752 Checks if relocation shall be tried for the given `diff_line`.
753 753
754 754 If a comment points into the first lines, then we can have a situation
755 755 that after an update another line has been added on top. In this case
756 756 we would find the context still and move the comment around. This
757 757 would be wrong.
758 758 """
759 759 should_relocate = (
760 760 (diff_line.new and diff_line.new > self.DIFF_CONTEXT_BEFORE) or
761 761 (diff_line.old and diff_line.old > self.DIFF_CONTEXT_BEFORE))
762 762 return should_relocate
763 763
764 764 def _choose_closest_diff_line(self, diff_line, new_diff_lines):
765 765 candidate = new_diff_lines[0]
766 766 best_delta = _diff_line_delta(diff_line, candidate)
767 767 for new_diff_line in new_diff_lines[1:]:
768 768 delta = _diff_line_delta(diff_line, new_diff_line)
769 769 if delta < best_delta:
770 770 candidate = new_diff_line
771 771 best_delta = delta
772 772 return candidate
773 773
774 774 def _visible_inline_comments_of_pull_request(self, pull_request):
775 775 comments = self._all_inline_comments_of_pull_request(pull_request)
776 776 comments = comments.filter(
777 777 coalesce(ChangesetComment.display_state, '') !=
778 778 ChangesetComment.COMMENT_OUTDATED)
779 779 return comments
780 780
781 781 def _all_inline_comments_of_pull_request(self, pull_request):
782 782 comments = Session().query(ChangesetComment)\
783 .filter(ChangesetComment.line_no != None)\
784 .filter(ChangesetComment.f_path != None)\
783 .filter(ChangesetComment.line_no != null())\
784 .filter(ChangesetComment.f_path != null())\
785 785 .filter(ChangesetComment.pull_request == pull_request)
786 786 return comments
787 787
788 788 def _all_general_comments_of_pull_request(self, pull_request):
789 789 comments = Session().query(ChangesetComment)\
790 .filter(ChangesetComment.line_no == None)\
791 .filter(ChangesetComment.f_path == None)\
790 .filter(ChangesetComment.line_no == null())\
791 .filter(ChangesetComment.f_path == null())\
792 792 .filter(ChangesetComment.pull_request == pull_request)
793 793
794 794 return comments
795 795
796 796 @staticmethod
797 797 def use_outdated_comments(pull_request):
798 798 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
799 799 settings = settings_model.get_general_settings()
800 800 return settings.get('rhodecode_use_outdated_comments', False)
801 801
802 802 def trigger_commit_comment_hook(self, repo, user, action, data=None):
803 803 repo = self._get_repo(repo)
804 804 target_scm = repo.scm_instance()
805 805 if action == 'create':
806 806 trigger_hook = hooks_utils.trigger_comment_commit_hooks
807 807 elif action == 'edit':
808 808 trigger_hook = hooks_utils.trigger_comment_commit_edit_hooks
809 809 else:
810 810 return
811 811
812 812 log.debug('Handling repo %s trigger_commit_comment_hook with action %s: %s',
813 813 repo, action, trigger_hook)
814 814 trigger_hook(
815 815 username=user.username,
816 816 repo_name=repo.repo_name,
817 817 repo_type=target_scm.alias,
818 818 repo=repo,
819 819 data=data)
820 820
821 821
822 822 def _parse_comment_line_number(line_no):
823 823 r"""
824 824 Parses line numbers of the form "(o|n)\d+" and returns them in a tuple.
825 825 """
826 826 old_line = None
827 827 new_line = None
828 828 if line_no.startswith('o'):
829 829 old_line = int(line_no[1:])
830 830 elif line_no.startswith('n'):
831 831 new_line = int(line_no[1:])
832 832 else:
833 833 raise ValueError("Comment lines have to start with either 'o' or 'n'.")
834 834 return diffs.DiffLineNumber(old_line, new_line)
835 835
836 836
837 837 def _diff_to_comment_line_number(diff_line):
838 838 if diff_line.new is not None:
839 839 return f'n{diff_line.new}'
840 840 elif diff_line.old is not None:
841 841 return f'o{diff_line.old}'
842 842 return ''
843 843
844 844
845 845 def _diff_line_delta(a, b):
846 846 if None not in (a.new, b.new):
847 847 return abs(a.new - b.new)
848 848 elif None not in (a.old, b.old):
849 849 return abs(a.old - b.old)
850 850 else:
851 851 raise ValueError(
852 852 f"Cannot compute delta between {a} and {b}")
@@ -1,237 +1,237 b''
1 1 # Copyright (C) 2011-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19
20 20 """
21 21 Model for integrations
22 22 """
23 23
24 24
25 25 import logging
26 26
27 27 from sqlalchemy import or_, and_
28 28
29 29 from rhodecode import events
30 30 from rhodecode.integrations.types.base import EEIntegration
31 31 from rhodecode.lib.caching_query import FromCache
32 32 from rhodecode.model import BaseModel
33 33 from rhodecode.model.db import Integration, Repository, RepoGroup, true, false, case, null
34 34 from rhodecode.integrations import integration_type_registry
35 35
36 36 log = logging.getLogger(__name__)
37 37
38 38
39 39 class IntegrationModel(BaseModel):
40 40
41 41 cls = Integration
42 42
43 43 def __get_integration(self, integration):
44 44 if isinstance(integration, Integration):
45 45 return integration
46 46 elif isinstance(integration, int):
47 47 return self.sa.query(Integration).get(integration)
48 48 else:
49 49 if integration:
50 50 raise Exception('integration must be int or Instance'
51 51 ' of Integration got %s' % type(integration))
52 52
53 53 def create(self, IntegrationType, name, enabled, repo, repo_group, child_repos_only, settings):
54 54 """ Create an IntegrationType integration """
55 55 integration = Integration()
56 56 integration.integration_type = IntegrationType.key
57 57 self.sa.add(integration)
58 58 self.update_integration(integration, name, enabled, repo, repo_group,
59 59 child_repos_only, settings)
60 60 self.sa.commit()
61 61 return integration
62 62
63 63 def update_integration(self, integration, name, enabled, repo, repo_group,
64 64 child_repos_only, settings):
65 65 integration = self.__get_integration(integration)
66 66
67 67 integration.repo = repo
68 68 integration.repo_group = repo_group
69 69 integration.child_repos_only = child_repos_only
70 70 integration.name = name
71 71 integration.enabled = enabled
72 72 integration.settings = settings
73 73
74 74 return integration
75 75
76 76 def delete(self, integration):
77 77 integration = self.__get_integration(integration)
78 78 if integration:
79 79 self.sa.delete(integration)
80 80 return True
81 81 return False
82 82
83 83 def get_integration_handler(self, integration):
84 84 TypeClass = integration_type_registry.get(integration.integration_type)
85 85 if not TypeClass:
86 86 log.error('No class could be found for integration type: {}'.format(
87 87 integration.integration_type))
88 88 return None
89 89 elif isinstance(TypeClass, EEIntegration) or issubclass(TypeClass, EEIntegration):
90 90 log.error('EE integration cannot be '
91 91 'executed for integration type: {}'.format(
92 92 integration.integration_type))
93 93 return None
94 94
95 95 return TypeClass(integration.settings)
96 96
97 97 def send_event(self, integration, event):
98 98 """ Send an event to an integration """
99 99 handler = self.get_integration_handler(integration)
100 100 if handler:
101 101 log.debug(
102 102 'events: sending event %s on integration %s using handler %s',
103 103 event, integration, handler)
104 104 handler.send_event(event)
105 105
106 106 def get_integrations(self, scope, IntegrationType=None):
107 107 """
108 108 Return integrations for a scope, which must be one of:
109 109
110 110 'all' - every integration, global/repogroup/repo
111 111 'global' - global integrations only
112 112 <Repository> instance - integrations for this repo only
113 113 <RepoGroup> instance - integrations for this repogroup only
114 114 """
115 115
116 116 if isinstance(scope, Repository):
117 117 query = self.sa.query(Integration).filter(
118 118 Integration.repo == scope)
119 119 elif isinstance(scope, RepoGroup):
120 120 query = self.sa.query(Integration).filter(
121 121 Integration.repo_group == scope)
122 122 elif scope == 'global':
123 123 # global integrations
124 124 query = self.sa.query(Integration).filter(
125 and_(Integration.repo_id == None, Integration.repo_group_id == None)
125 and_(Integration.repo_id == null(), Integration.repo_group_id == null())
126 126 )
127 127 elif scope == 'root-repos':
128 128 query = self.sa.query(Integration).filter(
129 and_(Integration.repo_id == None,
130 Integration.repo_group_id == None,
129 and_(Integration.repo_id == null(),
130 Integration.repo_group_id == null(),
131 131 Integration.child_repos_only == true())
132 132 )
133 133 elif scope == 'all':
134 134 query = self.sa.query(Integration)
135 135 else:
136 136 raise Exception(
137 137 "invalid `scope`, must be one of: "
138 138 "['global', 'all', <Repository>, <RepoGroup>]")
139 139
140 140 if IntegrationType is not None:
141 141 query = query.filter(
142 142 Integration.integration_type==IntegrationType.key)
143 143
144 144 result = []
145 145 for integration in query.all():
146 146 IntType = integration_type_registry.get(integration.integration_type)
147 147 result.append((IntType, integration))
148 148 return result
149 149
150 150 def get_for_event(self, event, cache=False):
151 151 """
152 152 Get integrations that match an event
153 153 """
154 154 # base query
155 155 query = self.sa.query(
156 156 Integration
157 157 ).filter(
158 158 Integration.enabled == true()
159 159 )
160 160
161 161 global_integrations_filter = and_(
162 162 Integration.repo_id == null(),
163 163 Integration.repo_group_id == null(),
164 164 Integration.child_repos_only == false(),
165 165 )
166 166
167 167 if isinstance(event, events.RepoEvent):
168 168 root_repos_integrations_filter = and_(
169 169 Integration.repo_id == null(),
170 170 Integration.repo_group_id == null(),
171 171 Integration.child_repos_only == true(),
172 172 )
173 173
174 174 clauses = [
175 175 global_integrations_filter,
176 176 ]
177 177 cases = [
178 178 (global_integrations_filter, 1),
179 179 (root_repos_integrations_filter, 2),
180 180 ]
181 181
182 182 # repo group integrations
183 183 if event.repo.group:
184 184 # repo group with only root level repos
185 185 group_child_repos_filter = and_(
186 186 Integration.repo_group_id == event.repo.group.group_id,
187 187 Integration.child_repos_only == true()
188 188 )
189 189
190 190 clauses.append(group_child_repos_filter)
191 191 cases.append(
192 192 (group_child_repos_filter, 3),
193 193 )
194 194
195 195 # repo group cascade to kids
196 196 group_recursive_repos_filter = and_(
197 197 Integration.repo_group_id.in_(
198 198 [group.group_id for group in event.repo.groups_with_parents]
199 199 ),
200 200 Integration.child_repos_only == false()
201 201 )
202 202 clauses.append(group_recursive_repos_filter)
203 203 cases.append(
204 204 (group_recursive_repos_filter, 4),
205 205 )
206 206
207 207 if not event.repo.group: # root repo
208 208 clauses.append(root_repos_integrations_filter)
209 209
210 210 # repo integrations
211 211 if event.repo.repo_id: # pre create events dont have a repo_id yet
212 212 specific_repo_filter = Integration.repo_id == event.repo.repo_id
213 213 clauses.append(specific_repo_filter)
214 214 cases.append(
215 215 (specific_repo_filter, 5),
216 216 )
217 217
218 218 order_by_criterion = case(cases)
219 219
220 220 query = query.filter(or_(*clauses))
221 221 query = query.order_by(order_by_criterion)
222 222
223 223 if cache:
224 224 cache_key = f"get_enabled_repo_integrations_{event.repo.repo_id}"
225 225 query = query.options(
226 226 FromCache("sql_cache_short", cache_key))
227 227 else: # only global integrations
228 228 order_by_criterion = Integration.integration_id
229 229
230 230 query = query.filter(global_integrations_filter)
231 231 query = query.order_by(order_by_criterion)
232 232 if cache:
233 233 query = query.options(
234 234 FromCache("sql_cache_short", "get_enabled_global_integrations"))
235 235
236 236 result = query.all()
237 237 return result
@@ -1,2392 +1,2392 b''
1 1 # Copyright (C) 2012-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19
20 20 """
21 21 pull request model for RhodeCode
22 22 """
23 23
24 24 import logging
25 25 import os
26 26
27 27 import datetime
28 28 import urllib.request
29 29 import urllib.parse
30 30 import urllib.error
31 31 import collections
32 32
33 33 import dataclasses as dataclasses
34 34 from pyramid.threadlocal import get_current_request
35 35
36 36 from rhodecode.lib.vcs.nodes import FileNode
37 37 from rhodecode.translation import lazy_ugettext
38 38 from rhodecode.lib import helpers as h, hooks_utils, diffs
39 39 from rhodecode.lib import audit_logger
40 40 from collections import OrderedDict
41 41 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
42 42 from rhodecode.lib.ext_json import sjson as json
43 43 from rhodecode.lib.markup_renderer import (
44 44 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
45 45 from rhodecode.lib.hash_utils import md5_safe
46 46 from rhodecode.lib.str_utils import safe_str
47 47 from rhodecode.lib.utils2 import AttributeDict, get_current_rhodecode_user
48 48 from rhodecode.lib.vcs.backends.base import (
49 49 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason,
50 50 TargetRefMissing, SourceRefMissing)
51 51 from rhodecode.lib.vcs.conf import settings as vcs_settings
52 52 from rhodecode.lib.vcs.exceptions import (
53 53 CommitDoesNotExistError, EmptyRepositoryError)
54 54 from rhodecode.model import BaseModel
55 55 from rhodecode.model.changeset_status import ChangesetStatusModel
56 56 from rhodecode.model.comment import CommentsModel
57 57 from rhodecode.model.db import (
58 58 aliased, null, lazyload, and_, or_, select, func, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
59 59 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User)
60 60 from rhodecode.model.meta import Session
61 61 from rhodecode.model.notification import NotificationModel, \
62 62 EmailNotificationModel
63 63 from rhodecode.model.scm import ScmModel
64 64 from rhodecode.model.settings import VcsSettingsModel
65 65
66 66
67 67 log = logging.getLogger(__name__)
68 68
69 69
70 70 # Data structure to hold the response data when updating commits during a pull
71 71 # request update.
72 72 class UpdateResponse(object):
73 73
74 74 def __init__(self, executed, reason, new, old, common_ancestor_id,
75 75 commit_changes, source_changed, target_changed):
76 76
77 77 self.executed = executed
78 78 self.reason = reason
79 79 self.new = new
80 80 self.old = old
81 81 self.common_ancestor_id = common_ancestor_id
82 82 self.changes = commit_changes
83 83 self.source_changed = source_changed
84 84 self.target_changed = target_changed
85 85
86 86
87 87 def get_diff_info(
88 88 source_repo, source_ref, target_repo, target_ref, get_authors=False,
89 89 get_commit_authors=True):
90 90 """
91 91 Calculates detailed diff information for usage in preview of creation of a pull-request.
92 92 This is also used for default reviewers logic
93 93 """
94 94
95 95 source_scm = source_repo.scm_instance()
96 96 target_scm = target_repo.scm_instance()
97 97
98 98 ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm)
99 99 if not ancestor_id:
100 100 raise ValueError(
101 101 'cannot calculate diff info without a common ancestor. '
102 102 'Make sure both repositories are related, and have a common forking commit.')
103 103
104 104 # case here is that want a simple diff without incoming commits,
105 105 # previewing what will be merged based only on commits in the source.
106 106 log.debug('Using ancestor %s as source_ref instead of %s',
107 107 ancestor_id, source_ref)
108 108
109 109 # source of changes now is the common ancestor
110 110 source_commit = source_scm.get_commit(commit_id=ancestor_id)
111 111 # target commit becomes the source ref as it is the last commit
112 112 # for diff generation this logic gives proper diff
113 113 target_commit = source_scm.get_commit(commit_id=source_ref)
114 114
115 115 vcs_diff = \
116 116 source_scm.get_diff(commit1=source_commit, commit2=target_commit,
117 117 ignore_whitespace=False, context=3)
118 118
119 119 diff_processor = diffs.DiffProcessor(vcs_diff, diff_format='newdiff',
120 120 diff_limit=0, file_limit=0, show_full_diff=True)
121 121
122 122 _parsed = diff_processor.prepare()
123 123
124 124 all_files = []
125 125 all_files_changes = []
126 126 changed_lines = {}
127 127 stats = [0, 0]
128 128 for f in _parsed:
129 129 all_files.append(f['filename'])
130 130 all_files_changes.append({
131 131 'filename': f['filename'],
132 132 'stats': f['stats']
133 133 })
134 134 stats[0] += f['stats']['added']
135 135 stats[1] += f['stats']['deleted']
136 136
137 137 changed_lines[f['filename']] = []
138 138 if len(f['chunks']) < 2:
139 139 continue
140 140 # first line is "context" information
141 141 for chunks in f['chunks'][1:]:
142 142 for chunk in chunks['lines']:
143 143 if chunk['action'] not in ('del', 'mod'):
144 144 continue
145 145 changed_lines[f['filename']].append(chunk['old_lineno'])
146 146
147 147 commit_authors = []
148 148 user_counts = {}
149 149 email_counts = {}
150 150 author_counts = {}
151 151 _commit_cache = {}
152 152
153 153 commits = []
154 154 if get_commit_authors:
155 155 log.debug('Obtaining commit authors from set of commits')
156 156 _compare_data = target_scm.compare(
157 157 target_ref, source_ref, source_scm, merge=True,
158 158 pre_load=["author", "date", "message"]
159 159 )
160 160
161 161 for commit in _compare_data:
162 162 # NOTE(marcink): we serialize here, so we don't produce more vcsserver calls on data returned
163 163 # at this function which is later called via JSON serialization
164 164 serialized_commit = dict(
165 165 author=commit.author,
166 166 date=commit.date,
167 167 message=commit.message,
168 168 commit_id=commit.raw_id,
169 169 raw_id=commit.raw_id
170 170 )
171 171 commits.append(serialized_commit)
172 172 user = User.get_from_cs_author(serialized_commit['author'])
173 173 if user and user not in commit_authors:
174 174 commit_authors.append(user)
175 175
176 176 # lines
177 177 if get_authors:
178 178 log.debug('Calculating authors of changed files')
179 179 target_commit = source_repo.get_commit(ancestor_id)
180 180
181 181 for fname, lines in changed_lines.items():
182 182
183 183 try:
184 184 node = target_commit.get_node(fname, pre_load=["is_binary"])
185 185 except Exception:
186 186 log.exception("Failed to load node with path %s", fname)
187 187 continue
188 188
189 189 if not isinstance(node, FileNode):
190 190 continue
191 191
192 192 # NOTE(marcink): for binary node we don't do annotation, just use last author
193 193 if node.is_binary:
194 194 author = node.last_commit.author
195 195 email = node.last_commit.author_email
196 196
197 197 user = User.get_from_cs_author(author)
198 198 if user:
199 199 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
200 200 author_counts[author] = author_counts.get(author, 0) + 1
201 201 email_counts[email] = email_counts.get(email, 0) + 1
202 202
203 203 continue
204 204
205 205 for annotation in node.annotate:
206 206 line_no, commit_id, get_commit_func, line_text = annotation
207 207 if line_no in lines:
208 208 if commit_id not in _commit_cache:
209 209 _commit_cache[commit_id] = get_commit_func()
210 210 commit = _commit_cache[commit_id]
211 211 author = commit.author
212 212 email = commit.author_email
213 213 user = User.get_from_cs_author(author)
214 214 if user:
215 215 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
216 216 author_counts[author] = author_counts.get(author, 0) + 1
217 217 email_counts[email] = email_counts.get(email, 0) + 1
218 218
219 219 log.debug('Default reviewers processing finished')
220 220
221 221 return {
222 222 'commits': commits,
223 223 'files': all_files_changes,
224 224 'stats': stats,
225 225 'ancestor': ancestor_id,
226 226 # original authors of modified files
227 227 'original_authors': {
228 228 'users': user_counts,
229 229 'authors': author_counts,
230 230 'emails': email_counts,
231 231 },
232 232 'commit_authors': commit_authors
233 233 }
234 234
235 235
236 236 class PullRequestModel(BaseModel):
237 237
238 238 cls = PullRequest
239 239
240 240 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
241 241
242 242 UPDATE_STATUS_MESSAGES = {
243 243 UpdateFailureReason.NONE: lazy_ugettext(
244 244 'Pull request update successful.'),
245 245 UpdateFailureReason.UNKNOWN: lazy_ugettext(
246 246 'Pull request update failed because of an unknown error.'),
247 247 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
248 248 'No update needed because the source and target have not changed.'),
249 249 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
250 250 'Pull request cannot be updated because the reference type is '
251 251 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
252 252 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
253 253 'This pull request cannot be updated because the target '
254 254 'reference is missing.'),
255 255 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
256 256 'This pull request cannot be updated because the source '
257 257 'reference is missing.'),
258 258 }
259 259 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
260 260 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
261 261
262 262 def __get_pull_request(self, pull_request):
263 263 return self._get_instance((
264 264 PullRequest, PullRequestVersion), pull_request)
265 265
266 266 def _check_perms(self, perms, pull_request, user, api=False):
267 267 if not api:
268 268 return h.HasRepoPermissionAny(*perms)(
269 269 user=user, repo_name=pull_request.target_repo.repo_name)
270 270 else:
271 271 return h.HasRepoPermissionAnyApi(*perms)(
272 272 user=user, repo_name=pull_request.target_repo.repo_name)
273 273
274 274 def check_user_read(self, pull_request, user, api=False):
275 275 _perms = ('repository.admin', 'repository.write', 'repository.read',)
276 276 return self._check_perms(_perms, pull_request, user, api)
277 277
278 278 def check_user_merge(self, pull_request, user, api=False):
279 279 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
280 280 return self._check_perms(_perms, pull_request, user, api)
281 281
282 282 def check_user_update(self, pull_request, user, api=False):
283 283 owner = user.user_id == pull_request.user_id
284 284 return self.check_user_merge(pull_request, user, api) or owner
285 285
286 286 def check_user_delete(self, pull_request, user):
287 287 owner = user.user_id == pull_request.user_id
288 288 _perms = ('repository.admin',)
289 289 return self._check_perms(_perms, pull_request, user) or owner
290 290
291 291 def is_user_reviewer(self, pull_request, user):
292 292 return user.user_id in [
293 293 x.user_id for x in
294 294 pull_request.get_pull_request_reviewers(PullRequestReviewers.ROLE_REVIEWER)
295 295 if x.user
296 296 ]
297 297
298 298 def check_user_change_status(self, pull_request, user, api=False):
299 299 return self.check_user_update(pull_request, user, api) \
300 300 or self.is_user_reviewer(pull_request, user)
301 301
302 302 def check_user_comment(self, pull_request, user):
303 303 owner = user.user_id == pull_request.user_id
304 304 return self.check_user_read(pull_request, user) or owner
305 305
306 306 def get(self, pull_request):
307 307 return self.__get_pull_request(pull_request)
308 308
309 309 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
310 310 statuses=None, opened_by=None, order_by=None,
311 311 order_dir='desc', only_created=False):
312 312 repo = None
313 313 if repo_name:
314 314 repo = self._get_repo(repo_name)
315 315
316 316 q = PullRequest.query()
317 317
318 318 if search_q:
319 319 like_expression = u'%{}%'.format(safe_str(search_q))
320 320 q = q.join(User, User.user_id == PullRequest.user_id)
321 321 q = q.filter(or_(
322 322 cast(PullRequest.pull_request_id, String).ilike(like_expression),
323 323 User.username.ilike(like_expression),
324 324 PullRequest.title.ilike(like_expression),
325 325 PullRequest.description.ilike(like_expression),
326 326 ))
327 327
328 328 # source or target
329 329 if repo and source:
330 330 q = q.filter(PullRequest.source_repo == repo)
331 331 elif repo:
332 332 q = q.filter(PullRequest.target_repo == repo)
333 333
334 334 # closed,opened
335 335 if statuses:
336 336 q = q.filter(PullRequest.status.in_(statuses))
337 337
338 338 # opened by filter
339 339 if opened_by:
340 340 q = q.filter(PullRequest.user_id.in_(opened_by))
341 341
342 342 # only get those that are in "created" state
343 343 if only_created:
344 344 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
345 345
346 346 order_map = {
347 347 'name_raw': PullRequest.pull_request_id,
348 348 'id': PullRequest.pull_request_id,
349 349 'title': PullRequest.title,
350 350 'updated_on_raw': PullRequest.updated_on,
351 351 'target_repo': PullRequest.target_repo_id
352 352 }
353 353 if order_by and order_by in order_map:
354 354 if order_dir == 'asc':
355 355 q = q.order_by(order_map[order_by].asc())
356 356 else:
357 357 q = q.order_by(order_map[order_by].desc())
358 358
359 359 return q
360 360
361 361 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
362 362 opened_by=None):
363 363 """
364 364 Count the number of pull requests for a specific repository.
365 365
366 366 :param repo_name: target or source repo
367 367 :param search_q: filter by text
368 368 :param source: boolean flag to specify if repo_name refers to source
369 369 :param statuses: list of pull request statuses
370 370 :param opened_by: author user of the pull request
371 371 :returns: int number of pull requests
372 372 """
373 373 q = self._prepare_get_all_query(
374 374 repo_name, search_q=search_q, source=source, statuses=statuses,
375 375 opened_by=opened_by)
376 376
377 377 return q.count()
378 378
379 379 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
380 380 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
381 381 """
382 382 Get all pull requests for a specific repository.
383 383
384 384 :param repo_name: target or source repo
385 385 :param search_q: filter by text
386 386 :param source: boolean flag to specify if repo_name refers to source
387 387 :param statuses: list of pull request statuses
388 388 :param opened_by: author user of the pull request
389 389 :param offset: pagination offset
390 390 :param length: length of returned list
391 391 :param order_by: order of the returned list
392 392 :param order_dir: 'asc' or 'desc' ordering direction
393 393 :returns: list of pull requests
394 394 """
395 395 q = self._prepare_get_all_query(
396 396 repo_name, search_q=search_q, source=source, statuses=statuses,
397 397 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
398 398
399 399 if length:
400 400 pull_requests = q.limit(length).offset(offset).all()
401 401 else:
402 402 pull_requests = q.all()
403 403
404 404 return pull_requests
405 405
406 406 def count_awaiting_review(self, repo_name, search_q=None, statuses=None):
407 407 """
408 408 Count the number of pull requests for a specific repository that are
409 409 awaiting review.
410 410
411 411 :param repo_name: target or source repo
412 412 :param search_q: filter by text
413 413 :param statuses: list of pull request statuses
414 414 :returns: int number of pull requests
415 415 """
416 416 pull_requests = self.get_awaiting_review(
417 417 repo_name, search_q=search_q, statuses=statuses)
418 418
419 419 return len(pull_requests)
420 420
421 421 def get_awaiting_review(self, repo_name, search_q=None, statuses=None,
422 422 offset=0, length=None, order_by=None, order_dir='desc'):
423 423 """
424 424 Get all pull requests for a specific repository that are awaiting
425 425 review.
426 426
427 427 :param repo_name: target or source repo
428 428 :param search_q: filter by text
429 429 :param statuses: list of pull request statuses
430 430 :param offset: pagination offset
431 431 :param length: length of returned list
432 432 :param order_by: order of the returned list
433 433 :param order_dir: 'asc' or 'desc' ordering direction
434 434 :returns: list of pull requests
435 435 """
436 436 pull_requests = self.get_all(
437 437 repo_name, search_q=search_q, statuses=statuses,
438 438 order_by=order_by, order_dir=order_dir)
439 439
440 440 _filtered_pull_requests = []
441 441 for pr in pull_requests:
442 442 status = pr.calculated_review_status()
443 443 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
444 444 ChangesetStatus.STATUS_UNDER_REVIEW]:
445 445 _filtered_pull_requests.append(pr)
446 446 if length:
447 447 return _filtered_pull_requests[offset:offset+length]
448 448 else:
449 449 return _filtered_pull_requests
450 450
451 451 def _prepare_awaiting_my_review_review_query(
452 452 self, repo_name, user_id, search_q=None, statuses=None,
453 453 order_by=None, order_dir='desc'):
454 454
455 455 for_review_statuses = [
456 456 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
457 457 ]
458 458
459 459 pull_request_alias = aliased(PullRequest)
460 460 status_alias = aliased(ChangesetStatus)
461 461 reviewers_alias = aliased(PullRequestReviewers)
462 462 repo_alias = aliased(Repository)
463 463
464 464 last_ver_subq = Session()\
465 465 .query(func.min(ChangesetStatus.version)) \
466 466 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
467 467 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
468 468 .subquery()
469 469
470 470 q = Session().query(pull_request_alias) \
471 471 .options(lazyload(pull_request_alias.author)) \
472 472 .join(reviewers_alias,
473 473 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
474 474 .join(repo_alias,
475 475 repo_alias.repo_id == pull_request_alias.target_repo_id) \
476 476 .outerjoin(status_alias,
477 477 and_(status_alias.user_id == reviewers_alias.user_id,
478 478 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
479 479 .filter(or_(status_alias.version == null(),
480 480 status_alias.version == last_ver_subq)) \
481 481 .filter(reviewers_alias.user_id == user_id) \
482 482 .filter(repo_alias.repo_name == repo_name) \
483 483 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
484 484 .group_by(pull_request_alias)
485 485
486 486 # closed,opened
487 487 if statuses:
488 488 q = q.filter(pull_request_alias.status.in_(statuses))
489 489
490 490 if search_q:
491 491 like_expression = u'%{}%'.format(safe_str(search_q))
492 492 q = q.join(User, User.user_id == pull_request_alias.user_id)
493 493 q = q.filter(or_(
494 494 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
495 495 User.username.ilike(like_expression),
496 496 pull_request_alias.title.ilike(like_expression),
497 497 pull_request_alias.description.ilike(like_expression),
498 498 ))
499 499
500 500 order_map = {
501 501 'name_raw': pull_request_alias.pull_request_id,
502 502 'title': pull_request_alias.title,
503 503 'updated_on_raw': pull_request_alias.updated_on,
504 504 'target_repo': pull_request_alias.target_repo_id
505 505 }
506 506 if order_by and order_by in order_map:
507 507 if order_dir == 'asc':
508 508 q = q.order_by(order_map[order_by].asc())
509 509 else:
510 510 q = q.order_by(order_map[order_by].desc())
511 511
512 512 return q
513 513
514 514 def count_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None):
515 515 """
516 516 Count the number of pull requests for a specific repository that are
517 517 awaiting review from a specific user.
518 518
519 519 :param repo_name: target or source repo
520 520 :param user_id: reviewer user of the pull request
521 521 :param search_q: filter by text
522 522 :param statuses: list of pull request statuses
523 523 :returns: int number of pull requests
524 524 """
525 525 q = self._prepare_awaiting_my_review_review_query(
526 526 repo_name, user_id, search_q=search_q, statuses=statuses)
527 527 return q.count()
528 528
529 529 def get_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None,
530 530 offset=0, length=None, order_by=None, order_dir='desc'):
531 531 """
532 532 Get all pull requests for a specific repository that are awaiting
533 533 review from a specific user.
534 534
535 535 :param repo_name: target or source repo
536 536 :param user_id: reviewer user of the pull request
537 537 :param search_q: filter by text
538 538 :param statuses: list of pull request statuses
539 539 :param offset: pagination offset
540 540 :param length: length of returned list
541 541 :param order_by: order of the returned list
542 542 :param order_dir: 'asc' or 'desc' ordering direction
543 543 :returns: list of pull requests
544 544 """
545 545
546 546 q = self._prepare_awaiting_my_review_review_query(
547 547 repo_name, user_id, search_q=search_q, statuses=statuses,
548 548 order_by=order_by, order_dir=order_dir)
549 549
550 550 if length:
551 551 pull_requests = q.limit(length).offset(offset).all()
552 552 else:
553 553 pull_requests = q.all()
554 554
555 555 return pull_requests
556 556
557 557 def _prepare_im_participating_query(self, user_id=None, statuses=None, query='',
558 558 order_by=None, order_dir='desc'):
559 559 """
560 560 return a query of pull-requests user is an creator, or he's added as a reviewer
561 561 """
562 562 q = PullRequest.query()
563 563 if user_id:
564 564
565 565 base_query = select(PullRequestReviewers)\
566 566 .where(PullRequestReviewers.user_id == user_id)\
567 567 .with_only_columns(PullRequestReviewers.pull_request_id)
568 568
569 569 user_filter = or_(
570 570 PullRequest.user_id == user_id,
571 571 PullRequest.pull_request_id.in_(base_query)
572 572 )
573 573 q = PullRequest.query().filter(user_filter)
574 574
575 575 # closed,opened
576 576 if statuses:
577 577 q = q.filter(PullRequest.status.in_(statuses))
578 578
579 579 if query:
580 580 like_expression = u'%{}%'.format(safe_str(query))
581 581 q = q.join(User, User.user_id == PullRequest.user_id)
582 582 q = q.filter(or_(
583 583 cast(PullRequest.pull_request_id, String).ilike(like_expression),
584 584 User.username.ilike(like_expression),
585 585 PullRequest.title.ilike(like_expression),
586 586 PullRequest.description.ilike(like_expression),
587 587 ))
588 588
589 589 order_map = {
590 590 'name_raw': PullRequest.pull_request_id,
591 591 'title': PullRequest.title,
592 592 'updated_on_raw': PullRequest.updated_on,
593 593 'target_repo': PullRequest.target_repo_id
594 594 }
595 595 if order_by and order_by in order_map:
596 596 if order_dir == 'asc':
597 597 q = q.order_by(order_map[order_by].asc())
598 598 else:
599 599 q = q.order_by(order_map[order_by].desc())
600 600
601 601 return q
602 602
603 603 def count_im_participating_in(self, user_id=None, statuses=None, query=''):
604 604 q = self._prepare_im_participating_query(user_id, statuses=statuses, query=query)
605 605 return q.count()
606 606
607 607 def get_im_participating_in(
608 608 self, user_id=None, statuses=None, query='', offset=0,
609 609 length=None, order_by=None, order_dir='desc'):
610 610 """
611 611 Get all Pull requests that i'm participating in as a reviewer, or i have opened
612 612 """
613 613
614 614 q = self._prepare_im_participating_query(
615 615 user_id, statuses=statuses, query=query, order_by=order_by,
616 616 order_dir=order_dir)
617 617
618 618 if length:
619 619 pull_requests = q.limit(length).offset(offset).all()
620 620 else:
621 621 pull_requests = q.all()
622 622
623 623 return pull_requests
624 624
625 625 def _prepare_participating_in_for_review_query(
626 626 self, user_id, statuses=None, query='', order_by=None, order_dir='desc'):
627 627
628 628 for_review_statuses = [
629 629 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
630 630 ]
631 631
632 632 pull_request_alias = aliased(PullRequest)
633 633 status_alias = aliased(ChangesetStatus)
634 634 reviewers_alias = aliased(PullRequestReviewers)
635 635
636 636 last_ver_subq = Session()\
637 637 .query(func.min(ChangesetStatus.version)) \
638 638 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
639 639 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
640 640 .subquery()
641 641
642 642 q = Session().query(pull_request_alias) \
643 643 .options(lazyload(pull_request_alias.author)) \
644 644 .join(reviewers_alias,
645 645 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
646 646 .outerjoin(status_alias,
647 647 and_(status_alias.user_id == reviewers_alias.user_id,
648 648 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
649 649 .filter(or_(status_alias.version == null(),
650 650 status_alias.version == last_ver_subq)) \
651 651 .filter(reviewers_alias.user_id == user_id) \
652 652 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
653 653 .group_by(pull_request_alias)
654 654
655 655 # closed,opened
656 656 if statuses:
657 657 q = q.filter(pull_request_alias.status.in_(statuses))
658 658
659 659 if query:
660 660 like_expression = u'%{}%'.format(safe_str(query))
661 661 q = q.join(User, User.user_id == pull_request_alias.user_id)
662 662 q = q.filter(or_(
663 663 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
664 664 User.username.ilike(like_expression),
665 665 pull_request_alias.title.ilike(like_expression),
666 666 pull_request_alias.description.ilike(like_expression),
667 667 ))
668 668
669 669 order_map = {
670 670 'name_raw': pull_request_alias.pull_request_id,
671 671 'title': pull_request_alias.title,
672 672 'updated_on_raw': pull_request_alias.updated_on,
673 673 'target_repo': pull_request_alias.target_repo_id
674 674 }
675 675 if order_by and order_by in order_map:
676 676 if order_dir == 'asc':
677 677 q = q.order_by(order_map[order_by].asc())
678 678 else:
679 679 q = q.order_by(order_map[order_by].desc())
680 680
681 681 return q
682 682
683 683 def count_im_participating_in_for_review(self, user_id, statuses=None, query=''):
684 684 q = self._prepare_participating_in_for_review_query(user_id, statuses=statuses, query=query)
685 685 return q.count()
686 686
687 687 def get_im_participating_in_for_review(
688 688 self, user_id, statuses=None, query='', offset=0,
689 689 length=None, order_by=None, order_dir='desc'):
690 690 """
691 691 Get all Pull requests that needs user approval or rejection
692 692 """
693 693
694 694 q = self._prepare_participating_in_for_review_query(
695 695 user_id, statuses=statuses, query=query, order_by=order_by,
696 696 order_dir=order_dir)
697 697
698 698 if length:
699 699 pull_requests = q.limit(length).offset(offset).all()
700 700 else:
701 701 pull_requests = q.all()
702 702
703 703 return pull_requests
704 704
705 705 def get_versions(self, pull_request):
706 706 """
707 707 returns version of pull request sorted by ID descending
708 708 """
709 709 return PullRequestVersion.query()\
710 710 .filter(PullRequestVersion.pull_request == pull_request)\
711 711 .order_by(PullRequestVersion.pull_request_version_id.asc())\
712 712 .all()
713 713
714 714 def get_pr_version(self, pull_request_id, version=None):
715 715 at_version = None
716 716
717 717 if version and version == 'latest':
718 718 pull_request_ver = PullRequest.get(pull_request_id)
719 719 pull_request_obj = pull_request_ver
720 720 _org_pull_request_obj = pull_request_obj
721 721 at_version = 'latest'
722 722 elif version:
723 723 pull_request_ver = PullRequestVersion.get_or_404(version)
724 724 pull_request_obj = pull_request_ver
725 725 _org_pull_request_obj = pull_request_ver.pull_request
726 726 at_version = pull_request_ver.pull_request_version_id
727 727 else:
728 728 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
729 729 pull_request_id)
730 730
731 731 pull_request_display_obj = PullRequest.get_pr_display_object(
732 732 pull_request_obj, _org_pull_request_obj)
733 733
734 734 return _org_pull_request_obj, pull_request_obj, \
735 735 pull_request_display_obj, at_version
736 736
737 737 def pr_commits_versions(self, versions):
738 738 """
739 739 Maps the pull-request commits into all known PR versions. This way we can obtain
740 740 each pr version the commit was introduced in.
741 741 """
742 742 commit_versions = collections.defaultdict(list)
743 743 num_versions = [x.pull_request_version_id for x in versions]
744 744 for ver in versions:
745 745 for commit_id in ver.revisions:
746 746 ver_idx = ChangesetComment.get_index_from_version(
747 747 ver.pull_request_version_id, num_versions=num_versions)
748 748 commit_versions[commit_id].append(ver_idx)
749 749 return commit_versions
750 750
751 751 def create(self, created_by, source_repo, source_ref, target_repo,
752 752 target_ref, revisions, reviewers, observers, title, description=None,
753 753 common_ancestor_id=None,
754 754 description_renderer=None,
755 755 reviewer_data=None, translator=None, auth_user=None):
756 756 translator = translator or get_current_request().translate
757 757
758 758 created_by_user = self._get_user(created_by)
759 759 auth_user = auth_user or created_by_user.AuthUser()
760 760 source_repo = self._get_repo(source_repo)
761 761 target_repo = self._get_repo(target_repo)
762 762
763 763 pull_request = PullRequest()
764 764 pull_request.source_repo = source_repo
765 765 pull_request.source_ref = source_ref
766 766 pull_request.target_repo = target_repo
767 767 pull_request.target_ref = target_ref
768 768 pull_request.revisions = revisions
769 769 pull_request.title = title
770 770 pull_request.description = description
771 771 pull_request.description_renderer = description_renderer
772 772 pull_request.author = created_by_user
773 773 pull_request.reviewer_data = reviewer_data
774 774 pull_request.pull_request_state = pull_request.STATE_CREATING
775 775 pull_request.common_ancestor_id = common_ancestor_id
776 776
777 777 Session().add(pull_request)
778 778 Session().flush()
779 779
780 780 reviewer_ids = set()
781 781 # members / reviewers
782 782 for reviewer_object in reviewers:
783 783 user_id, reasons, mandatory, role, rules = reviewer_object
784 784 user = self._get_user(user_id)
785 785
786 786 # skip duplicates
787 787 if user.user_id in reviewer_ids:
788 788 continue
789 789
790 790 reviewer_ids.add(user.user_id)
791 791
792 792 reviewer = PullRequestReviewers()
793 793 reviewer.user = user
794 794 reviewer.pull_request = pull_request
795 795 reviewer.reasons = reasons
796 796 reviewer.mandatory = mandatory
797 797 reviewer.role = role
798 798
799 799 # NOTE(marcink): pick only first rule for now
800 800 rule_id = list(rules)[0] if rules else None
801 801 rule = RepoReviewRule.get(rule_id) if rule_id else None
802 802 if rule:
803 803 review_group = rule.user_group_vote_rule(user_id)
804 804 # we check if this particular reviewer is member of a voting group
805 805 if review_group:
806 806 # NOTE(marcink):
807 807 # can be that user is member of more but we pick the first same,
808 808 # same as default reviewers algo
809 809 review_group = review_group[0]
810 810
811 811 rule_data = {
812 812 'rule_name':
813 813 rule.review_rule_name,
814 814 'rule_user_group_entry_id':
815 815 review_group.repo_review_rule_users_group_id,
816 816 'rule_user_group_name':
817 817 review_group.users_group.users_group_name,
818 818 'rule_user_group_members':
819 819 [x.user.username for x in review_group.users_group.members],
820 820 'rule_user_group_members_id':
821 821 [x.user.user_id for x in review_group.users_group.members],
822 822 }
823 823 # e.g {'vote_rule': -1, 'mandatory': True}
824 824 rule_data.update(review_group.rule_data())
825 825
826 826 reviewer.rule_data = rule_data
827 827
828 828 Session().add(reviewer)
829 829 Session().flush()
830 830
831 831 for observer_object in observers:
832 832 user_id, reasons, mandatory, role, rules = observer_object
833 833 user = self._get_user(user_id)
834 834
835 835 # skip duplicates from reviewers
836 836 if user.user_id in reviewer_ids:
837 837 continue
838 838
839 839 #reviewer_ids.add(user.user_id)
840 840
841 841 observer = PullRequestReviewers()
842 842 observer.user = user
843 843 observer.pull_request = pull_request
844 844 observer.reasons = reasons
845 845 observer.mandatory = mandatory
846 846 observer.role = role
847 847
848 848 # NOTE(marcink): pick only first rule for now
849 849 rule_id = list(rules)[0] if rules else None
850 850 rule = RepoReviewRule.get(rule_id) if rule_id else None
851 851 if rule:
852 852 # TODO(marcink): do we need this for observers ??
853 853 pass
854 854
855 855 Session().add(observer)
856 856 Session().flush()
857 857
858 858 # Set approval status to "Under Review" for all commits which are
859 859 # part of this pull request.
860 860 ChangesetStatusModel().set_status(
861 861 repo=target_repo,
862 862 status=ChangesetStatus.STATUS_UNDER_REVIEW,
863 863 user=created_by_user,
864 864 pull_request=pull_request
865 865 )
866 866 # we commit early at this point. This has to do with a fact
867 867 # that before queries do some row-locking. And because of that
868 868 # we need to commit and finish transaction before below validate call
869 869 # that for large repos could be long resulting in long row locks
870 870 Session().commit()
871 871
872 872 # prepare workspace, and run initial merge simulation. Set state during that
873 873 # operation
874 874 pull_request = PullRequest.get(pull_request.pull_request_id)
875 875
876 876 # set as merging, for merge simulation, and if finished to created so we mark
877 877 # simulation is working fine
878 878 with pull_request.set_state(PullRequest.STATE_MERGING,
879 879 final_state=PullRequest.STATE_CREATED) as state_obj:
880 880 MergeCheck.validate(
881 881 pull_request, auth_user=auth_user, translator=translator)
882 882
883 883 self.notify_reviewers(pull_request, reviewer_ids, created_by_user)
884 884 self.trigger_pull_request_hook(pull_request, created_by_user, 'create')
885 885
886 886 creation_data = pull_request.get_api_data(with_merge_state=False)
887 887 self._log_audit_action(
888 888 'repo.pull_request.create', {'data': creation_data},
889 889 auth_user, pull_request)
890 890
891 891 return pull_request
892 892
893 893 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
894 894 pull_request = self.__get_pull_request(pull_request)
895 895 target_scm = pull_request.target_repo.scm_instance()
896 896 if action == 'create':
897 897 trigger_hook = hooks_utils.trigger_create_pull_request_hook
898 898 elif action == 'merge':
899 899 trigger_hook = hooks_utils.trigger_merge_pull_request_hook
900 900 elif action == 'close':
901 901 trigger_hook = hooks_utils.trigger_close_pull_request_hook
902 902 elif action == 'review_status_change':
903 903 trigger_hook = hooks_utils.trigger_review_pull_request_hook
904 904 elif action == 'update':
905 905 trigger_hook = hooks_utils.trigger_update_pull_request_hook
906 906 elif action == 'comment':
907 907 trigger_hook = hooks_utils.trigger_comment_pull_request_hook
908 908 elif action == 'comment_edit':
909 909 trigger_hook = hooks_utils.trigger_comment_pull_request_edit_hook
910 910 else:
911 911 return
912 912
913 913 log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s',
914 914 pull_request, action, trigger_hook)
915 915 trigger_hook(
916 916 username=user.username,
917 917 repo_name=pull_request.target_repo.repo_name,
918 918 repo_type=target_scm.alias,
919 919 pull_request=pull_request,
920 920 data=data)
921 921
922 922 def _get_commit_ids(self, pull_request):
923 923 """
924 924 Return the commit ids of the merged pull request.
925 925
926 926 This method is not dealing correctly yet with the lack of autoupdates
927 927 nor with the implicit target updates.
928 928 For example: if a commit in the source repo is already in the target it
929 929 will be reported anyways.
930 930 """
931 931 merge_rev = pull_request.merge_rev
932 932 if merge_rev is None:
933 933 raise ValueError('This pull request was not merged yet')
934 934
935 935 commit_ids = list(pull_request.revisions)
936 936 if merge_rev not in commit_ids:
937 937 commit_ids.append(merge_rev)
938 938
939 939 return commit_ids
940 940
941 941 def merge_repo(self, pull_request, user, extras):
942 942 repo_type = pull_request.source_repo.repo_type
943 943 log.debug("Merging pull request %s", pull_request)
944 944
945 945 extras['user_agent'] = '{}/internal-merge'.format(repo_type)
946 946 merge_state = self._merge_pull_request(pull_request, user, extras)
947 947 if merge_state.executed:
948 948 log.debug("Merge was successful, updating the pull request comments.")
949 949 self._comment_and_close_pr(pull_request, user, merge_state)
950 950
951 951 self._log_audit_action(
952 952 'repo.pull_request.merge',
953 953 {'merge_state': merge_state.__dict__},
954 954 user, pull_request)
955 955
956 956 else:
957 957 log.warning("Merge failed, not updating the pull request.")
958 958 return merge_state
959 959
960 960 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
961 961 target_vcs = pull_request.target_repo.scm_instance()
962 962 source_vcs = pull_request.source_repo.scm_instance()
963 963
964 964 message = safe_str(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
965 965 pr_id=pull_request.pull_request_id,
966 966 pr_title=pull_request.title,
967 967 pr_desc=pull_request.description,
968 968 source_repo=source_vcs.name,
969 969 source_ref_name=pull_request.source_ref_parts.name,
970 970 target_repo=target_vcs.name,
971 971 target_ref_name=pull_request.target_ref_parts.name,
972 972 )
973 973
974 974 workspace_id = self._workspace_id(pull_request)
975 975 repo_id = pull_request.target_repo.repo_id
976 976 use_rebase = self._use_rebase_for_merging(pull_request)
977 977 close_branch = self._close_branch_before_merging(pull_request)
978 978 user_name = self._user_name_for_merging(pull_request, user)
979 979
980 980 target_ref = self._refresh_reference(
981 981 pull_request.target_ref_parts, target_vcs)
982 982
983 983 callback_daemon, extras = prepare_callback_daemon(
984 984 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
985 985 host=vcs_settings.HOOKS_HOST,
986 986 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
987 987
988 988 with callback_daemon:
989 989 # TODO: johbo: Implement a clean way to run a config_override
990 990 # for a single call.
991 991 target_vcs.config.set(
992 992 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
993 993
994 994 merge_state = target_vcs.merge(
995 995 repo_id, workspace_id, target_ref, source_vcs,
996 996 pull_request.source_ref_parts,
997 997 user_name=user_name, user_email=user.email,
998 998 message=message, use_rebase=use_rebase,
999 999 close_branch=close_branch)
1000 1000
1001 1001 return merge_state
1002 1002
1003 1003 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
1004 1004 pull_request.merge_rev = merge_state.merge_ref.commit_id
1005 1005 pull_request.updated_on = datetime.datetime.now()
1006 1006 close_msg = close_msg or 'Pull request merged and closed'
1007 1007
1008 1008 CommentsModel().create(
1009 1009 text=safe_str(close_msg),
1010 1010 repo=pull_request.target_repo.repo_id,
1011 1011 user=user.user_id,
1012 1012 pull_request=pull_request.pull_request_id,
1013 1013 f_path=None,
1014 1014 line_no=None,
1015 1015 closing_pr=True
1016 1016 )
1017 1017
1018 1018 Session().add(pull_request)
1019 1019 Session().flush()
1020 1020 # TODO: paris: replace invalidation with less radical solution
1021 1021 ScmModel().mark_for_invalidation(
1022 1022 pull_request.target_repo.repo_name)
1023 1023 self.trigger_pull_request_hook(pull_request, user, 'merge')
1024 1024
1025 1025 def has_valid_update_type(self, pull_request):
1026 1026 source_ref_type = pull_request.source_ref_parts.type
1027 1027 return source_ref_type in self.REF_TYPES
1028 1028
1029 1029 def get_flow_commits(self, pull_request):
1030 1030
1031 1031 # source repo
1032 1032 source_ref_name = pull_request.source_ref_parts.name
1033 1033 source_ref_type = pull_request.source_ref_parts.type
1034 1034 source_ref_id = pull_request.source_ref_parts.commit_id
1035 1035 source_repo = pull_request.source_repo.scm_instance()
1036 1036
1037 1037 try:
1038 1038 if source_ref_type in self.REF_TYPES:
1039 1039 source_commit = source_repo.get_commit(
1040 1040 source_ref_name, reference_obj=pull_request.source_ref_parts)
1041 1041 else:
1042 1042 source_commit = source_repo.get_commit(source_ref_id)
1043 1043 except CommitDoesNotExistError:
1044 1044 raise SourceRefMissing()
1045 1045
1046 1046 # target repo
1047 1047 target_ref_name = pull_request.target_ref_parts.name
1048 1048 target_ref_type = pull_request.target_ref_parts.type
1049 1049 target_ref_id = pull_request.target_ref_parts.commit_id
1050 1050 target_repo = pull_request.target_repo.scm_instance()
1051 1051
1052 1052 try:
1053 1053 if target_ref_type in self.REF_TYPES:
1054 1054 target_commit = target_repo.get_commit(
1055 1055 target_ref_name, reference_obj=pull_request.target_ref_parts)
1056 1056 else:
1057 1057 target_commit = target_repo.get_commit(target_ref_id)
1058 1058 except CommitDoesNotExistError:
1059 1059 raise TargetRefMissing()
1060 1060
1061 1061 return source_commit, target_commit
1062 1062
1063 1063 def update_commits(self, pull_request, updating_user):
1064 1064 """
1065 1065 Get the updated list of commits for the pull request
1066 1066 and return the new pull request version and the list
1067 1067 of commits processed by this update action
1068 1068
1069 1069 updating_user is the user_object who triggered the update
1070 1070 """
1071 1071 pull_request = self.__get_pull_request(pull_request)
1072 1072 source_ref_type = pull_request.source_ref_parts.type
1073 1073 source_ref_name = pull_request.source_ref_parts.name
1074 1074 source_ref_id = pull_request.source_ref_parts.commit_id
1075 1075
1076 1076 target_ref_type = pull_request.target_ref_parts.type
1077 1077 target_ref_name = pull_request.target_ref_parts.name
1078 1078 target_ref_id = pull_request.target_ref_parts.commit_id
1079 1079
1080 1080 if not self.has_valid_update_type(pull_request):
1081 1081 log.debug("Skipping update of pull request %s due to ref type: %s",
1082 1082 pull_request, source_ref_type)
1083 1083 return UpdateResponse(
1084 1084 executed=False,
1085 1085 reason=UpdateFailureReason.WRONG_REF_TYPE,
1086 1086 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1087 1087 source_changed=False, target_changed=False)
1088 1088
1089 1089 try:
1090 1090 source_commit, target_commit = self.get_flow_commits(pull_request)
1091 1091 except SourceRefMissing:
1092 1092 return UpdateResponse(
1093 1093 executed=False,
1094 1094 reason=UpdateFailureReason.MISSING_SOURCE_REF,
1095 1095 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1096 1096 source_changed=False, target_changed=False)
1097 1097 except TargetRefMissing:
1098 1098 return UpdateResponse(
1099 1099 executed=False,
1100 1100 reason=UpdateFailureReason.MISSING_TARGET_REF,
1101 1101 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1102 1102 source_changed=False, target_changed=False)
1103 1103
1104 1104 source_changed = source_ref_id != source_commit.raw_id
1105 1105 target_changed = target_ref_id != target_commit.raw_id
1106 1106
1107 1107 if not (source_changed or target_changed):
1108 1108 log.debug("Nothing changed in pull request %s", pull_request)
1109 1109 return UpdateResponse(
1110 1110 executed=False,
1111 1111 reason=UpdateFailureReason.NO_CHANGE,
1112 1112 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1113 1113 source_changed=target_changed, target_changed=source_changed)
1114 1114
1115 1115 change_in_found = 'target repo' if target_changed else 'source repo'
1116 1116 log.debug('Updating pull request because of change in %s detected',
1117 1117 change_in_found)
1118 1118
1119 1119 # Finally there is a need for an update, in case of source change
1120 1120 # we create a new version, else just an update
1121 1121 if source_changed:
1122 1122 pull_request_version = self._create_version_from_snapshot(pull_request)
1123 1123 self._link_comments_to_version(pull_request_version)
1124 1124 else:
1125 1125 try:
1126 1126 ver = pull_request.versions[-1]
1127 1127 except IndexError:
1128 1128 ver = None
1129 1129
1130 1130 pull_request.pull_request_version_id = \
1131 1131 ver.pull_request_version_id if ver else None
1132 1132 pull_request_version = pull_request
1133 1133
1134 1134 source_repo = pull_request.source_repo.scm_instance()
1135 1135 target_repo = pull_request.target_repo.scm_instance()
1136 1136
1137 1137 # re-compute commit ids
1138 1138 old_commit_ids = pull_request.revisions
1139 1139 pre_load = ["author", "date", "message", "branch"]
1140 1140 commit_ranges = target_repo.compare(
1141 1141 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
1142 1142 pre_load=pre_load)
1143 1143
1144 1144 target_ref = target_commit.raw_id
1145 1145 source_ref = source_commit.raw_id
1146 1146 ancestor_commit_id = target_repo.get_common_ancestor(
1147 1147 target_ref, source_ref, source_repo)
1148 1148
1149 1149 if not ancestor_commit_id:
1150 1150 raise ValueError(
1151 1151 'cannot calculate diff info without a common ancestor. '
1152 1152 'Make sure both repositories are related, and have a common forking commit.')
1153 1153
1154 1154 pull_request.common_ancestor_id = ancestor_commit_id
1155 1155
1156 1156 pull_request.source_ref = '%s:%s:%s' % (
1157 1157 source_ref_type, source_ref_name, source_commit.raw_id)
1158 1158 pull_request.target_ref = '%s:%s:%s' % (
1159 1159 target_ref_type, target_ref_name, ancestor_commit_id)
1160 1160
1161 1161 pull_request.revisions = [
1162 1162 commit.raw_id for commit in reversed(commit_ranges)]
1163 1163 pull_request.updated_on = datetime.datetime.now()
1164 1164 Session().add(pull_request)
1165 1165 new_commit_ids = pull_request.revisions
1166 1166
1167 1167 old_diff_data, new_diff_data = self._generate_update_diffs(
1168 1168 pull_request, pull_request_version)
1169 1169
1170 1170 # calculate commit and file changes
1171 1171 commit_changes = self._calculate_commit_id_changes(
1172 1172 old_commit_ids, new_commit_ids)
1173 1173 file_changes = self._calculate_file_changes(
1174 1174 old_diff_data, new_diff_data)
1175 1175
1176 1176 # set comments as outdated if DIFFS changed
1177 1177 CommentsModel().outdate_comments(
1178 1178 pull_request, old_diff_data=old_diff_data,
1179 1179 new_diff_data=new_diff_data)
1180 1180
1181 1181 valid_commit_changes = (commit_changes.added or commit_changes.removed)
1182 1182 file_node_changes = (
1183 1183 file_changes.added or file_changes.modified or file_changes.removed)
1184 1184 pr_has_changes = valid_commit_changes or file_node_changes
1185 1185
1186 1186 # Add an automatic comment to the pull request, in case
1187 1187 # anything has changed
1188 1188 if pr_has_changes:
1189 1189 update_comment = CommentsModel().create(
1190 1190 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
1191 1191 repo=pull_request.target_repo,
1192 1192 user=pull_request.author,
1193 1193 pull_request=pull_request,
1194 1194 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
1195 1195
1196 1196 # Update status to "Under Review" for added commits
1197 1197 for commit_id in commit_changes.added:
1198 1198 ChangesetStatusModel().set_status(
1199 1199 repo=pull_request.source_repo,
1200 1200 status=ChangesetStatus.STATUS_UNDER_REVIEW,
1201 1201 comment=update_comment,
1202 1202 user=pull_request.author,
1203 1203 pull_request=pull_request,
1204 1204 revision=commit_id)
1205 1205
1206 1206 # initial commit
1207 1207 Session().commit()
1208 1208
1209 1209 if pr_has_changes:
1210 1210 # send update email to users
1211 1211 try:
1212 1212 self.notify_users(pull_request=pull_request, updating_user=updating_user,
1213 1213 ancestor_commit_id=ancestor_commit_id,
1214 1214 commit_changes=commit_changes,
1215 1215 file_changes=file_changes)
1216 1216 Session().commit()
1217 1217 except Exception:
1218 1218 log.exception('Failed to send email notification to users')
1219 1219 Session().rollback()
1220 1220
1221 1221 log.debug(
1222 1222 'Updated pull request %s, added_ids: %s, common_ids: %s, '
1223 1223 'removed_ids: %s', pull_request.pull_request_id,
1224 1224 commit_changes.added, commit_changes.common, commit_changes.removed)
1225 1225 log.debug(
1226 1226 'Updated pull request with the following file changes: %s',
1227 1227 file_changes)
1228 1228
1229 1229 log.info(
1230 1230 "Updated pull request %s from commit %s to commit %s, "
1231 1231 "stored new version %s of this pull request.",
1232 1232 pull_request.pull_request_id, source_ref_id,
1233 1233 pull_request.source_ref_parts.commit_id,
1234 1234 pull_request_version.pull_request_version_id)
1235 1235
1236 1236 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
1237 1237
1238 1238 return UpdateResponse(
1239 1239 executed=True, reason=UpdateFailureReason.NONE,
1240 1240 old=pull_request, new=pull_request_version,
1241 1241 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
1242 1242 source_changed=source_changed, target_changed=target_changed)
1243 1243
1244 1244 def _create_version_from_snapshot(self, pull_request):
1245 1245 version = PullRequestVersion()
1246 1246 version.title = pull_request.title
1247 1247 version.description = pull_request.description
1248 1248 version.status = pull_request.status
1249 1249 version.pull_request_state = pull_request.pull_request_state
1250 1250 version.created_on = datetime.datetime.now()
1251 1251 version.updated_on = pull_request.updated_on
1252 1252 version.user_id = pull_request.user_id
1253 1253 version.source_repo = pull_request.source_repo
1254 1254 version.source_ref = pull_request.source_ref
1255 1255 version.target_repo = pull_request.target_repo
1256 1256 version.target_ref = pull_request.target_ref
1257 1257
1258 1258 version._last_merge_source_rev = pull_request._last_merge_source_rev
1259 1259 version._last_merge_target_rev = pull_request._last_merge_target_rev
1260 1260 version.last_merge_status = pull_request.last_merge_status
1261 1261 version.last_merge_metadata = pull_request.last_merge_metadata
1262 1262 version.shadow_merge_ref = pull_request.shadow_merge_ref
1263 1263 version.merge_rev = pull_request.merge_rev
1264 1264 version.reviewer_data = pull_request.reviewer_data
1265 1265
1266 1266 version.revisions = pull_request.revisions
1267 1267 version.common_ancestor_id = pull_request.common_ancestor_id
1268 1268 version.pull_request = pull_request
1269 1269 Session().add(version)
1270 1270 Session().flush()
1271 1271
1272 1272 return version
1273 1273
1274 1274 def _generate_update_diffs(self, pull_request, pull_request_version):
1275 1275
1276 1276 diff_context = (
1277 1277 self.DIFF_CONTEXT +
1278 1278 CommentsModel.needed_extra_diff_context())
1279 1279 hide_whitespace_changes = False
1280 1280 source_repo = pull_request_version.source_repo
1281 1281 source_ref_id = pull_request_version.source_ref_parts.commit_id
1282 1282 target_ref_id = pull_request_version.target_ref_parts.commit_id
1283 1283 old_diff = self._get_diff_from_pr_or_version(
1284 1284 source_repo, source_ref_id, target_ref_id,
1285 1285 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1286 1286
1287 1287 source_repo = pull_request.source_repo
1288 1288 source_ref_id = pull_request.source_ref_parts.commit_id
1289 1289 target_ref_id = pull_request.target_ref_parts.commit_id
1290 1290
1291 1291 new_diff = self._get_diff_from_pr_or_version(
1292 1292 source_repo, source_ref_id, target_ref_id,
1293 1293 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1294 1294
1295 1295 # NOTE: this was using diff_format='gitdiff'
1296 1296 old_diff_data = diffs.DiffProcessor(old_diff, diff_format='newdiff')
1297 1297 old_diff_data.prepare()
1298 1298 new_diff_data = diffs.DiffProcessor(new_diff, diff_format='newdiff')
1299 1299 new_diff_data.prepare()
1300 1300
1301 1301 return old_diff_data, new_diff_data
1302 1302
1303 1303 def _link_comments_to_version(self, pull_request_version):
1304 1304 """
1305 1305 Link all unlinked comments of this pull request to the given version.
1306 1306
1307 1307 :param pull_request_version: The `PullRequestVersion` to which
1308 1308 the comments shall be linked.
1309 1309
1310 1310 """
1311 1311 pull_request = pull_request_version.pull_request
1312 1312 comments = ChangesetComment.query()\
1313 1313 .filter(
1314 1314 # TODO: johbo: Should we query for the repo at all here?
1315 1315 # Pending decision on how comments of PRs are to be related
1316 1316 # to either the source repo, the target repo or no repo at all.
1317 1317 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
1318 1318 ChangesetComment.pull_request == pull_request,
1319 ChangesetComment.pull_request_version == None)\
1319 ChangesetComment.pull_request_version == null())\
1320 1320 .order_by(ChangesetComment.comment_id.asc())
1321 1321
1322 1322 # TODO: johbo: Find out why this breaks if it is done in a bulk
1323 1323 # operation.
1324 1324 for comment in comments:
1325 1325 comment.pull_request_version_id = (
1326 1326 pull_request_version.pull_request_version_id)
1327 1327 Session().add(comment)
1328 1328
1329 1329 def _calculate_commit_id_changes(self, old_ids, new_ids):
1330 1330 added = [x for x in new_ids if x not in old_ids]
1331 1331 common = [x for x in new_ids if x in old_ids]
1332 1332 removed = [x for x in old_ids if x not in new_ids]
1333 1333 total = new_ids
1334 1334 return ChangeTuple(added, common, removed, total)
1335 1335
1336 1336 def _calculate_file_changes(self, old_diff_data, new_diff_data):
1337 1337
1338 1338 old_files = OrderedDict()
1339 1339 for diff_data in old_diff_data.parsed_diff:
1340 1340 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
1341 1341
1342 1342 added_files = []
1343 1343 modified_files = []
1344 1344 removed_files = []
1345 1345 for diff_data in new_diff_data.parsed_diff:
1346 1346 new_filename = diff_data['filename']
1347 1347 new_hash = md5_safe(diff_data['raw_diff'])
1348 1348
1349 1349 old_hash = old_files.get(new_filename)
1350 1350 if not old_hash:
1351 1351 # file is not present in old diff, we have to figure out from parsed diff
1352 1352 # operation ADD/REMOVE
1353 1353 operations_dict = diff_data['stats']['ops']
1354 1354 if diffs.DEL_FILENODE in operations_dict:
1355 1355 removed_files.append(new_filename)
1356 1356 else:
1357 1357 added_files.append(new_filename)
1358 1358 else:
1359 1359 if new_hash != old_hash:
1360 1360 modified_files.append(new_filename)
1361 1361 # now remove a file from old, since we have seen it already
1362 1362 del old_files[new_filename]
1363 1363
1364 1364 # removed files is when there are present in old, but not in NEW,
1365 1365 # since we remove old files that are present in new diff, left-overs
1366 1366 # if any should be the removed files
1367 1367 removed_files.extend(old_files.keys())
1368 1368
1369 1369 return FileChangeTuple(added_files, modified_files, removed_files)
1370 1370
1371 1371 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
1372 1372 """
1373 1373 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
1374 1374 so it's always looking the same disregarding on which default
1375 1375 renderer system is using.
1376 1376
1377 1377 :param ancestor_commit_id: ancestor raw_id
1378 1378 :param changes: changes named tuple
1379 1379 :param file_changes: file changes named tuple
1380 1380
1381 1381 """
1382 1382 new_status = ChangesetStatus.get_status_lbl(
1383 1383 ChangesetStatus.STATUS_UNDER_REVIEW)
1384 1384
1385 1385 changed_files = (
1386 1386 file_changes.added + file_changes.modified + file_changes.removed)
1387 1387
1388 1388 params = {
1389 1389 'under_review_label': new_status,
1390 1390 'added_commits': changes.added,
1391 1391 'removed_commits': changes.removed,
1392 1392 'changed_files': changed_files,
1393 1393 'added_files': file_changes.added,
1394 1394 'modified_files': file_changes.modified,
1395 1395 'removed_files': file_changes.removed,
1396 1396 'ancestor_commit_id': ancestor_commit_id
1397 1397 }
1398 1398 renderer = RstTemplateRenderer()
1399 1399 return renderer.render('pull_request_update.mako', **params)
1400 1400
1401 1401 def edit(self, pull_request, title, description, description_renderer, user):
1402 1402 pull_request = self.__get_pull_request(pull_request)
1403 1403 old_data = pull_request.get_api_data(with_merge_state=False)
1404 1404 if pull_request.is_closed():
1405 1405 raise ValueError('This pull request is closed')
1406 1406 if title:
1407 1407 pull_request.title = title
1408 1408 pull_request.description = description
1409 1409 pull_request.updated_on = datetime.datetime.now()
1410 1410 pull_request.description_renderer = description_renderer
1411 1411 Session().add(pull_request)
1412 1412 self._log_audit_action(
1413 1413 'repo.pull_request.edit', {'old_data': old_data},
1414 1414 user, pull_request)
1415 1415
1416 1416 def update_reviewers(self, pull_request, reviewer_data, user):
1417 1417 """
1418 1418 Update the reviewers in the pull request
1419 1419
1420 1420 :param pull_request: the pr to update
1421 1421 :param reviewer_data: list of tuples
1422 1422 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1423 1423 :param user: current use who triggers this action
1424 1424 """
1425 1425
1426 1426 pull_request = self.__get_pull_request(pull_request)
1427 1427 if pull_request.is_closed():
1428 1428 raise ValueError('This pull request is closed')
1429 1429
1430 1430 reviewers = {}
1431 1431 for user_id, reasons, mandatory, role, rules in reviewer_data:
1432 1432 if isinstance(user_id, (int, str)):
1433 1433 user_id = self._get_user(user_id).user_id
1434 1434 reviewers[user_id] = {
1435 1435 'reasons': reasons, 'mandatory': mandatory, 'role': role}
1436 1436
1437 1437 reviewers_ids = set(reviewers.keys())
1438 1438 current_reviewers = PullRequestReviewers.get_pull_request_reviewers(
1439 1439 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_REVIEWER)
1440 1440
1441 1441 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1442 1442
1443 1443 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1444 1444 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1445 1445
1446 1446 log.debug("Adding %s reviewers", ids_to_add)
1447 1447 log.debug("Removing %s reviewers", ids_to_remove)
1448 1448 changed = False
1449 1449 added_audit_reviewers = []
1450 1450 removed_audit_reviewers = []
1451 1451
1452 1452 for uid in ids_to_add:
1453 1453 changed = True
1454 1454 _usr = self._get_user(uid)
1455 1455 reviewer = PullRequestReviewers()
1456 1456 reviewer.user = _usr
1457 1457 reviewer.pull_request = pull_request
1458 1458 reviewer.reasons = reviewers[uid]['reasons']
1459 1459 # NOTE(marcink): mandatory shouldn't be changed now
1460 1460 # reviewer.mandatory = reviewers[uid]['reasons']
1461 1461 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1462 1462 reviewer.role = PullRequestReviewers.ROLE_REVIEWER
1463 1463 Session().add(reviewer)
1464 1464 added_audit_reviewers.append(reviewer.get_dict())
1465 1465
1466 1466 for uid in ids_to_remove:
1467 1467 changed = True
1468 1468 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1469 1469 # This is an edge case that handles previous state of having the same reviewer twice.
1470 1470 # this CAN happen due to the lack of DB checks
1471 1471 reviewers = PullRequestReviewers.query()\
1472 1472 .filter(PullRequestReviewers.user_id == uid,
1473 1473 PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER,
1474 1474 PullRequestReviewers.pull_request == pull_request)\
1475 1475 .all()
1476 1476
1477 1477 for obj in reviewers:
1478 1478 added_audit_reviewers.append(obj.get_dict())
1479 1479 Session().delete(obj)
1480 1480
1481 1481 if changed:
1482 1482 Session().expire_all()
1483 1483 pull_request.updated_on = datetime.datetime.now()
1484 1484 Session().add(pull_request)
1485 1485
1486 1486 # finally store audit logs
1487 1487 for user_data in added_audit_reviewers:
1488 1488 self._log_audit_action(
1489 1489 'repo.pull_request.reviewer.add', {'data': user_data},
1490 1490 user, pull_request)
1491 1491 for user_data in removed_audit_reviewers:
1492 1492 self._log_audit_action(
1493 1493 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1494 1494 user, pull_request)
1495 1495
1496 1496 self.notify_reviewers(pull_request, ids_to_add, user)
1497 1497 return ids_to_add, ids_to_remove
1498 1498
1499 1499 def update_observers(self, pull_request, observer_data, user):
1500 1500 """
1501 1501 Update the observers in the pull request
1502 1502
1503 1503 :param pull_request: the pr to update
1504 1504 :param observer_data: list of tuples
1505 1505 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1506 1506 :param user: current use who triggers this action
1507 1507 """
1508 1508 pull_request = self.__get_pull_request(pull_request)
1509 1509 if pull_request.is_closed():
1510 1510 raise ValueError('This pull request is closed')
1511 1511
1512 1512 observers = {}
1513 1513 for user_id, reasons, mandatory, role, rules in observer_data:
1514 1514 if isinstance(user_id, (int, str)):
1515 1515 user_id = self._get_user(user_id).user_id
1516 1516 observers[user_id] = {
1517 1517 'reasons': reasons, 'observers': mandatory, 'role': role}
1518 1518
1519 1519 observers_ids = set(observers.keys())
1520 1520 current_observers = PullRequestReviewers.get_pull_request_reviewers(
1521 1521 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_OBSERVER)
1522 1522
1523 1523 current_observers_ids = set([x.user.user_id for x in current_observers])
1524 1524
1525 1525 ids_to_add = observers_ids.difference(current_observers_ids)
1526 1526 ids_to_remove = current_observers_ids.difference(observers_ids)
1527 1527
1528 1528 log.debug("Adding %s observer", ids_to_add)
1529 1529 log.debug("Removing %s observer", ids_to_remove)
1530 1530 changed = False
1531 1531 added_audit_observers = []
1532 1532 removed_audit_observers = []
1533 1533
1534 1534 for uid in ids_to_add:
1535 1535 changed = True
1536 1536 _usr = self._get_user(uid)
1537 1537 observer = PullRequestReviewers()
1538 1538 observer.user = _usr
1539 1539 observer.pull_request = pull_request
1540 1540 observer.reasons = observers[uid]['reasons']
1541 1541 # NOTE(marcink): mandatory shouldn't be changed now
1542 1542 # observer.mandatory = observer[uid]['reasons']
1543 1543
1544 1544 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1545 1545 observer.role = PullRequestReviewers.ROLE_OBSERVER
1546 1546 Session().add(observer)
1547 1547 added_audit_observers.append(observer.get_dict())
1548 1548
1549 1549 for uid in ids_to_remove:
1550 1550 changed = True
1551 1551 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1552 1552 # This is an edge case that handles previous state of having the same reviewer twice.
1553 1553 # this CAN happen due to the lack of DB checks
1554 1554 observers = PullRequestReviewers.query()\
1555 1555 .filter(PullRequestReviewers.user_id == uid,
1556 1556 PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER,
1557 1557 PullRequestReviewers.pull_request == pull_request)\
1558 1558 .all()
1559 1559
1560 1560 for obj in observers:
1561 1561 added_audit_observers.append(obj.get_dict())
1562 1562 Session().delete(obj)
1563 1563
1564 1564 if changed:
1565 1565 Session().expire_all()
1566 1566 pull_request.updated_on = datetime.datetime.now()
1567 1567 Session().add(pull_request)
1568 1568
1569 1569 # finally store audit logs
1570 1570 for user_data in added_audit_observers:
1571 1571 self._log_audit_action(
1572 1572 'repo.pull_request.observer.add', {'data': user_data},
1573 1573 user, pull_request)
1574 1574 for user_data in removed_audit_observers:
1575 1575 self._log_audit_action(
1576 1576 'repo.pull_request.observer.delete', {'old_data': user_data},
1577 1577 user, pull_request)
1578 1578
1579 1579 self.notify_observers(pull_request, ids_to_add, user)
1580 1580 return ids_to_add, ids_to_remove
1581 1581
1582 1582 def get_url(self, pull_request, request=None, permalink=False):
1583 1583 if not request:
1584 1584 request = get_current_request()
1585 1585
1586 1586 if permalink:
1587 1587 return request.route_url(
1588 1588 'pull_requests_global',
1589 1589 pull_request_id=pull_request.pull_request_id,)
1590 1590 else:
1591 1591 return request.route_url('pullrequest_show',
1592 1592 repo_name=safe_str(pull_request.target_repo.repo_name),
1593 1593 pull_request_id=pull_request.pull_request_id,)
1594 1594
1595 1595 def get_shadow_clone_url(self, pull_request, request=None):
1596 1596 """
1597 1597 Returns qualified url pointing to the shadow repository. If this pull
1598 1598 request is closed there is no shadow repository and ``None`` will be
1599 1599 returned.
1600 1600 """
1601 1601 if pull_request.is_closed():
1602 1602 return None
1603 1603 else:
1604 1604 pr_url = urllib.parse.unquote(self.get_url(pull_request, request=request))
1605 1605 return safe_str('{pr_url}/repository'.format(pr_url=pr_url))
1606 1606
1607 1607 def _notify_reviewers(self, pull_request, user_ids, role, user):
1608 1608 # notification to reviewers/observers
1609 1609 if not user_ids:
1610 1610 return
1611 1611
1612 1612 log.debug('Notify following %s users about pull-request %s', role, user_ids)
1613 1613
1614 1614 pull_request_obj = pull_request
1615 1615 # get the current participants of this pull request
1616 1616 recipients = user_ids
1617 1617 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1618 1618
1619 1619 pr_source_repo = pull_request_obj.source_repo
1620 1620 pr_target_repo = pull_request_obj.target_repo
1621 1621
1622 1622 pr_url = h.route_url('pullrequest_show',
1623 1623 repo_name=pr_target_repo.repo_name,
1624 1624 pull_request_id=pull_request_obj.pull_request_id,)
1625 1625
1626 1626 # set some variables for email notification
1627 1627 pr_target_repo_url = h.route_url(
1628 1628 'repo_summary', repo_name=pr_target_repo.repo_name)
1629 1629
1630 1630 pr_source_repo_url = h.route_url(
1631 1631 'repo_summary', repo_name=pr_source_repo.repo_name)
1632 1632
1633 1633 # pull request specifics
1634 1634 pull_request_commits = [
1635 1635 (x.raw_id, x.message)
1636 1636 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1637 1637
1638 1638 current_rhodecode_user = user
1639 1639 kwargs = {
1640 1640 'user': current_rhodecode_user,
1641 1641 'pull_request_author': pull_request.author,
1642 1642 'pull_request': pull_request_obj,
1643 1643 'pull_request_commits': pull_request_commits,
1644 1644
1645 1645 'pull_request_target_repo': pr_target_repo,
1646 1646 'pull_request_target_repo_url': pr_target_repo_url,
1647 1647
1648 1648 'pull_request_source_repo': pr_source_repo,
1649 1649 'pull_request_source_repo_url': pr_source_repo_url,
1650 1650
1651 1651 'pull_request_url': pr_url,
1652 1652 'thread_ids': [pr_url],
1653 1653 'user_role': role
1654 1654 }
1655 1655
1656 1656 # create notification objects, and emails
1657 1657 NotificationModel().create(
1658 1658 created_by=current_rhodecode_user,
1659 1659 notification_subject='', # Filled in based on the notification_type
1660 1660 notification_body='', # Filled in based on the notification_type
1661 1661 notification_type=notification_type,
1662 1662 recipients=recipients,
1663 1663 email_kwargs=kwargs,
1664 1664 )
1665 1665
1666 1666 def notify_reviewers(self, pull_request, reviewers_ids, user):
1667 1667 return self._notify_reviewers(pull_request, reviewers_ids,
1668 1668 PullRequestReviewers.ROLE_REVIEWER, user)
1669 1669
1670 1670 def notify_observers(self, pull_request, observers_ids, user):
1671 1671 return self._notify_reviewers(pull_request, observers_ids,
1672 1672 PullRequestReviewers.ROLE_OBSERVER, user)
1673 1673
1674 1674 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1675 1675 commit_changes, file_changes):
1676 1676
1677 1677 updating_user_id = updating_user.user_id
1678 1678 reviewers = set([x.user.user_id for x in pull_request.get_pull_request_reviewers()])
1679 1679 # NOTE(marcink): send notification to all other users except to
1680 1680 # person who updated the PR
1681 1681 recipients = reviewers.difference(set([updating_user_id]))
1682 1682
1683 1683 log.debug('Notify following recipients about pull-request update %s', recipients)
1684 1684
1685 1685 pull_request_obj = pull_request
1686 1686
1687 1687 # send email about the update
1688 1688 changed_files = (
1689 1689 file_changes.added + file_changes.modified + file_changes.removed)
1690 1690
1691 1691 pr_source_repo = pull_request_obj.source_repo
1692 1692 pr_target_repo = pull_request_obj.target_repo
1693 1693
1694 1694 pr_url = h.route_url('pullrequest_show',
1695 1695 repo_name=pr_target_repo.repo_name,
1696 1696 pull_request_id=pull_request_obj.pull_request_id,)
1697 1697
1698 1698 # set some variables for email notification
1699 1699 pr_target_repo_url = h.route_url(
1700 1700 'repo_summary', repo_name=pr_target_repo.repo_name)
1701 1701
1702 1702 pr_source_repo_url = h.route_url(
1703 1703 'repo_summary', repo_name=pr_source_repo.repo_name)
1704 1704
1705 1705 email_kwargs = {
1706 1706 'date': datetime.datetime.now(),
1707 1707 'updating_user': updating_user,
1708 1708
1709 1709 'pull_request': pull_request_obj,
1710 1710
1711 1711 'pull_request_target_repo': pr_target_repo,
1712 1712 'pull_request_target_repo_url': pr_target_repo_url,
1713 1713
1714 1714 'pull_request_source_repo': pr_source_repo,
1715 1715 'pull_request_source_repo_url': pr_source_repo_url,
1716 1716
1717 1717 'pull_request_url': pr_url,
1718 1718
1719 1719 'ancestor_commit_id': ancestor_commit_id,
1720 1720 'added_commits': commit_changes.added,
1721 1721 'removed_commits': commit_changes.removed,
1722 1722 'changed_files': changed_files,
1723 1723 'added_files': file_changes.added,
1724 1724 'modified_files': file_changes.modified,
1725 1725 'removed_files': file_changes.removed,
1726 1726 'thread_ids': [pr_url],
1727 1727 }
1728 1728
1729 1729 # create notification objects, and emails
1730 1730 NotificationModel().create(
1731 1731 created_by=updating_user,
1732 1732 notification_subject='', # Filled in based on the notification_type
1733 1733 notification_body='', # Filled in based on the notification_type
1734 1734 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1735 1735 recipients=recipients,
1736 1736 email_kwargs=email_kwargs,
1737 1737 )
1738 1738
1739 1739 def delete(self, pull_request, user=None):
1740 1740 if not user:
1741 1741 user = getattr(get_current_rhodecode_user(), 'username', None)
1742 1742
1743 1743 pull_request = self.__get_pull_request(pull_request)
1744 1744 old_data = pull_request.get_api_data(with_merge_state=False)
1745 1745 self._cleanup_merge_workspace(pull_request)
1746 1746 self._log_audit_action(
1747 1747 'repo.pull_request.delete', {'old_data': old_data},
1748 1748 user, pull_request)
1749 1749 Session().delete(pull_request)
1750 1750
1751 1751 def close_pull_request(self, pull_request, user):
1752 1752 pull_request = self.__get_pull_request(pull_request)
1753 1753 self._cleanup_merge_workspace(pull_request)
1754 1754 pull_request.status = PullRequest.STATUS_CLOSED
1755 1755 pull_request.updated_on = datetime.datetime.now()
1756 1756 Session().add(pull_request)
1757 1757 self.trigger_pull_request_hook(pull_request, pull_request.author, 'close')
1758 1758
1759 1759 pr_data = pull_request.get_api_data(with_merge_state=False)
1760 1760 self._log_audit_action(
1761 1761 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1762 1762
1763 1763 def close_pull_request_with_comment(
1764 1764 self, pull_request, user, repo, message=None, auth_user=None):
1765 1765
1766 1766 pull_request_review_status = pull_request.calculated_review_status()
1767 1767
1768 1768 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1769 1769 # approved only if we have voting consent
1770 1770 status = ChangesetStatus.STATUS_APPROVED
1771 1771 else:
1772 1772 status = ChangesetStatus.STATUS_REJECTED
1773 1773 status_lbl = ChangesetStatus.get_status_lbl(status)
1774 1774
1775 1775 default_message = (
1776 1776 'Closing with status change {transition_icon} {status}.'
1777 1777 ).format(transition_icon='>', status=status_lbl)
1778 1778 text = message or default_message
1779 1779
1780 1780 # create a comment, and link it to new status
1781 1781 comment = CommentsModel().create(
1782 1782 text=text,
1783 1783 repo=repo.repo_id,
1784 1784 user=user.user_id,
1785 1785 pull_request=pull_request.pull_request_id,
1786 1786 status_change=status_lbl,
1787 1787 status_change_type=status,
1788 1788 closing_pr=True,
1789 1789 auth_user=auth_user,
1790 1790 )
1791 1791
1792 1792 # calculate old status before we change it
1793 1793 old_calculated_status = pull_request.calculated_review_status()
1794 1794 ChangesetStatusModel().set_status(
1795 1795 repo.repo_id,
1796 1796 status,
1797 1797 user.user_id,
1798 1798 comment=comment,
1799 1799 pull_request=pull_request.pull_request_id
1800 1800 )
1801 1801
1802 1802 Session().flush()
1803 1803
1804 1804 self.trigger_pull_request_hook(pull_request, user, 'comment',
1805 1805 data={'comment': comment})
1806 1806
1807 1807 # we now calculate the status of pull request again, and based on that
1808 1808 # calculation trigger status change. This might happen in cases
1809 1809 # that non-reviewer admin closes a pr, which means his vote doesn't
1810 1810 # change the status, while if he's a reviewer this might change it.
1811 1811 calculated_status = pull_request.calculated_review_status()
1812 1812 if old_calculated_status != calculated_status:
1813 1813 self.trigger_pull_request_hook(pull_request, user, 'review_status_change',
1814 1814 data={'status': calculated_status})
1815 1815
1816 1816 # finally close the PR
1817 1817 PullRequestModel().close_pull_request(pull_request.pull_request_id, user)
1818 1818
1819 1819 return comment, status
1820 1820
1821 1821 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1822 1822 _ = translator or get_current_request().translate
1823 1823
1824 1824 if not self._is_merge_enabled(pull_request):
1825 1825 return None, False, _('Server-side pull request merging is disabled.')
1826 1826
1827 1827 if pull_request.is_closed():
1828 1828 return None, False, _('This pull request is closed.')
1829 1829
1830 1830 merge_possible, msg = self._check_repo_requirements(
1831 1831 target=pull_request.target_repo, source=pull_request.source_repo,
1832 1832 translator=_)
1833 1833 if not merge_possible:
1834 1834 return None, merge_possible, msg
1835 1835
1836 1836 try:
1837 1837 merge_response = self._try_merge(
1838 1838 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1839 1839 log.debug("Merge response: %s", merge_response)
1840 1840 return merge_response, merge_response.possible, merge_response.merge_status_message
1841 1841 except NotImplementedError:
1842 1842 return None, False, _('Pull request merging is not supported.')
1843 1843
1844 1844 def _check_repo_requirements(self, target, source, translator):
1845 1845 """
1846 1846 Check if `target` and `source` have compatible requirements.
1847 1847
1848 1848 Currently this is just checking for largefiles.
1849 1849 """
1850 1850 _ = translator
1851 1851 target_has_largefiles = self._has_largefiles(target)
1852 1852 source_has_largefiles = self._has_largefiles(source)
1853 1853 merge_possible = True
1854 1854 message = u''
1855 1855
1856 1856 if target_has_largefiles != source_has_largefiles:
1857 1857 merge_possible = False
1858 1858 if source_has_largefiles:
1859 1859 message = _(
1860 1860 'Target repository large files support is disabled.')
1861 1861 else:
1862 1862 message = _(
1863 1863 'Source repository large files support is disabled.')
1864 1864
1865 1865 return merge_possible, message
1866 1866
1867 1867 def _has_largefiles(self, repo):
1868 1868 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1869 1869 'extensions', 'largefiles')
1870 1870 return largefiles_ui and largefiles_ui[0].active
1871 1871
1872 1872 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1873 1873 """
1874 1874 Try to merge the pull request and return the merge status.
1875 1875 """
1876 1876 log.debug(
1877 1877 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1878 1878 pull_request.pull_request_id, force_shadow_repo_refresh)
1879 1879 target_vcs = pull_request.target_repo.scm_instance()
1880 1880 # Refresh the target reference.
1881 1881 try:
1882 1882 target_ref = self._refresh_reference(
1883 1883 pull_request.target_ref_parts, target_vcs)
1884 1884 except CommitDoesNotExistError:
1885 1885 merge_state = MergeResponse(
1886 1886 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1887 1887 metadata={'target_ref': pull_request.target_ref_parts})
1888 1888 return merge_state
1889 1889
1890 1890 target_locked = pull_request.target_repo.locked
1891 1891 if target_locked and target_locked[0]:
1892 1892 locked_by = 'user:{}'.format(target_locked[0])
1893 1893 log.debug("The target repository is locked by %s.", locked_by)
1894 1894 merge_state = MergeResponse(
1895 1895 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1896 1896 metadata={'locked_by': locked_by})
1897 1897 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1898 1898 pull_request, target_ref):
1899 1899 log.debug("Refreshing the merge status of the repository.")
1900 1900 merge_state = self._refresh_merge_state(
1901 1901 pull_request, target_vcs, target_ref)
1902 1902 else:
1903 1903 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1904 1904 metadata = {
1905 1905 'unresolved_files': '',
1906 1906 'target_ref': pull_request.target_ref_parts,
1907 1907 'source_ref': pull_request.source_ref_parts,
1908 1908 }
1909 1909 if pull_request.last_merge_metadata:
1910 1910 metadata.update(pull_request.last_merge_metadata_parsed)
1911 1911
1912 1912 if not possible and target_ref.type == 'branch':
1913 1913 # NOTE(marcink): case for mercurial multiple heads on branch
1914 1914 heads = target_vcs._heads(target_ref.name)
1915 1915 if len(heads) != 1:
1916 1916 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1917 1917 metadata.update({
1918 1918 'heads': heads
1919 1919 })
1920 1920
1921 1921 merge_state = MergeResponse(
1922 1922 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1923 1923
1924 1924 return merge_state
1925 1925
1926 1926 def _refresh_reference(self, reference, vcs_repository):
1927 1927 if reference.type in self.UPDATABLE_REF_TYPES:
1928 1928 name_or_id = reference.name
1929 1929 else:
1930 1930 name_or_id = reference.commit_id
1931 1931
1932 1932 refreshed_commit = vcs_repository.get_commit(name_or_id)
1933 1933 refreshed_reference = Reference(
1934 1934 reference.type, reference.name, refreshed_commit.raw_id)
1935 1935 return refreshed_reference
1936 1936
1937 1937 def _needs_merge_state_refresh(self, pull_request, target_reference):
1938 1938 return not(
1939 1939 pull_request.revisions and
1940 1940 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1941 1941 target_reference.commit_id == pull_request._last_merge_target_rev)
1942 1942
1943 1943 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1944 1944 workspace_id = self._workspace_id(pull_request)
1945 1945 source_vcs = pull_request.source_repo.scm_instance()
1946 1946 repo_id = pull_request.target_repo.repo_id
1947 1947 use_rebase = self._use_rebase_for_merging(pull_request)
1948 1948 close_branch = self._close_branch_before_merging(pull_request)
1949 1949 merge_state = target_vcs.merge(
1950 1950 repo_id, workspace_id,
1951 1951 target_reference, source_vcs, pull_request.source_ref_parts,
1952 1952 dry_run=True, use_rebase=use_rebase,
1953 1953 close_branch=close_branch)
1954 1954
1955 1955 # Do not store the response if there was an unknown error.
1956 1956 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1957 1957 pull_request._last_merge_source_rev = \
1958 1958 pull_request.source_ref_parts.commit_id
1959 1959 pull_request._last_merge_target_rev = target_reference.commit_id
1960 1960 pull_request.last_merge_status = merge_state.failure_reason
1961 1961 pull_request.last_merge_metadata = merge_state.metadata
1962 1962
1963 1963 pull_request.shadow_merge_ref = merge_state.merge_ref
1964 1964 Session().add(pull_request)
1965 1965 Session().commit()
1966 1966
1967 1967 return merge_state
1968 1968
1969 1969 def _workspace_id(self, pull_request):
1970 1970 workspace_id = 'pr-%s' % pull_request.pull_request_id
1971 1971 return workspace_id
1972 1972
1973 1973 def generate_repo_data(self, repo, commit_id=None, branch=None,
1974 1974 bookmark=None, translator=None):
1975 1975 from rhodecode.model.repo import RepoModel
1976 1976
1977 1977 all_refs, selected_ref = \
1978 1978 self._get_repo_pullrequest_sources(
1979 1979 repo.scm_instance(), commit_id=commit_id,
1980 1980 branch=branch, bookmark=bookmark, translator=translator)
1981 1981
1982 1982 refs_select2 = []
1983 1983 for element in all_refs:
1984 1984 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1985 1985 refs_select2.append({'text': element[1], 'children': children})
1986 1986
1987 1987 return {
1988 1988 'user': {
1989 1989 'user_id': repo.user.user_id,
1990 1990 'username': repo.user.username,
1991 1991 'firstname': repo.user.first_name,
1992 1992 'lastname': repo.user.last_name,
1993 1993 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1994 1994 },
1995 1995 'name': repo.repo_name,
1996 1996 'link': RepoModel().get_url(repo),
1997 1997 'description': h.chop_at_smart(repo.description_safe, '\n'),
1998 1998 'refs': {
1999 1999 'all_refs': all_refs,
2000 2000 'selected_ref': selected_ref,
2001 2001 'select2_refs': refs_select2
2002 2002 }
2003 2003 }
2004 2004
2005 2005 def generate_pullrequest_title(self, source, source_ref, target):
2006 2006 return u'{source}#{at_ref} to {target}'.format(
2007 2007 source=source,
2008 2008 at_ref=source_ref,
2009 2009 target=target,
2010 2010 )
2011 2011
2012 2012 def _cleanup_merge_workspace(self, pull_request):
2013 2013 # Merging related cleanup
2014 2014 repo_id = pull_request.target_repo.repo_id
2015 2015 target_scm = pull_request.target_repo.scm_instance()
2016 2016 workspace_id = self._workspace_id(pull_request)
2017 2017
2018 2018 try:
2019 2019 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
2020 2020 except NotImplementedError:
2021 2021 pass
2022 2022
2023 2023 def _get_repo_pullrequest_sources(
2024 2024 self, repo, commit_id=None, branch=None, bookmark=None,
2025 2025 translator=None):
2026 2026 """
2027 2027 Return a structure with repo's interesting commits, suitable for
2028 2028 the selectors in pullrequest controller
2029 2029
2030 2030 :param commit_id: a commit that must be in the list somehow
2031 2031 and selected by default
2032 2032 :param branch: a branch that must be in the list and selected
2033 2033 by default - even if closed
2034 2034 :param bookmark: a bookmark that must be in the list and selected
2035 2035 """
2036 2036 _ = translator or get_current_request().translate
2037 2037
2038 2038 commit_id = safe_str(commit_id) if commit_id else None
2039 2039 branch = safe_str(branch) if branch else None
2040 2040 bookmark = safe_str(bookmark) if bookmark else None
2041 2041
2042 2042 selected = None
2043 2043
2044 2044 # order matters: first source that has commit_id in it will be selected
2045 2045 sources = []
2046 2046 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
2047 2047 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
2048 2048
2049 2049 if commit_id:
2050 2050 ref_commit = (h.short_id(commit_id), commit_id)
2051 2051 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
2052 2052
2053 2053 sources.append(
2054 2054 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
2055 2055 )
2056 2056
2057 2057 groups = []
2058 2058
2059 2059 for group_key, ref_list, group_name, match in sources:
2060 2060 group_refs = []
2061 2061 for ref_name, ref_id in ref_list:
2062 2062 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
2063 2063 group_refs.append((ref_key, ref_name))
2064 2064
2065 2065 if not selected:
2066 2066 if set([commit_id, match]) & set([ref_id, ref_name]):
2067 2067 selected = ref_key
2068 2068
2069 2069 if group_refs:
2070 2070 groups.append((group_refs, group_name))
2071 2071
2072 2072 if not selected:
2073 2073 ref = commit_id or branch or bookmark
2074 2074 if ref:
2075 2075 raise CommitDoesNotExistError(
2076 2076 u'No commit refs could be found matching: {}'.format(ref))
2077 2077 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
2078 2078 selected = u'branch:{}:{}'.format(
2079 2079 safe_str(repo.DEFAULT_BRANCH_NAME),
2080 2080 safe_str(repo.branches[repo.DEFAULT_BRANCH_NAME])
2081 2081 )
2082 2082 elif repo.commit_ids:
2083 2083 # make the user select in this case
2084 2084 selected = None
2085 2085 else:
2086 2086 raise EmptyRepositoryError()
2087 2087 return groups, selected
2088 2088
2089 2089 def get_diff(self, source_repo, source_ref_id, target_ref_id,
2090 2090 hide_whitespace_changes, diff_context):
2091 2091
2092 2092 return self._get_diff_from_pr_or_version(
2093 2093 source_repo, source_ref_id, target_ref_id,
2094 2094 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
2095 2095
2096 2096 def _get_diff_from_pr_or_version(
2097 2097 self, source_repo, source_ref_id, target_ref_id,
2098 2098 hide_whitespace_changes, diff_context):
2099 2099
2100 2100 target_commit = source_repo.get_commit(
2101 2101 commit_id=safe_str(target_ref_id))
2102 2102 source_commit = source_repo.get_commit(
2103 2103 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
2104 2104 if isinstance(source_repo, Repository):
2105 2105 vcs_repo = source_repo.scm_instance()
2106 2106 else:
2107 2107 vcs_repo = source_repo
2108 2108
2109 2109 # TODO: johbo: In the context of an update, we cannot reach
2110 2110 # the old commit anymore with our normal mechanisms. It needs
2111 2111 # some sort of special support in the vcs layer to avoid this
2112 2112 # workaround.
2113 2113 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
2114 2114 vcs_repo.alias == 'git'):
2115 2115 source_commit.raw_id = safe_str(source_ref_id)
2116 2116
2117 2117 log.debug('calculating diff between '
2118 2118 'source_ref:%s and target_ref:%s for repo `%s`',
2119 2119 target_ref_id, source_ref_id,
2120 2120 safe_str(vcs_repo.path))
2121 2121
2122 2122 vcs_diff = vcs_repo.get_diff(
2123 2123 commit1=target_commit, commit2=source_commit,
2124 2124 ignore_whitespace=hide_whitespace_changes, context=diff_context)
2125 2125 return vcs_diff
2126 2126
2127 2127 def _is_merge_enabled(self, pull_request):
2128 2128 return self._get_general_setting(
2129 2129 pull_request, 'rhodecode_pr_merge_enabled')
2130 2130
2131 2131 def _use_rebase_for_merging(self, pull_request):
2132 2132 repo_type = pull_request.target_repo.repo_type
2133 2133 if repo_type == 'hg':
2134 2134 return self._get_general_setting(
2135 2135 pull_request, 'rhodecode_hg_use_rebase_for_merging')
2136 2136 elif repo_type == 'git':
2137 2137 return self._get_general_setting(
2138 2138 pull_request, 'rhodecode_git_use_rebase_for_merging')
2139 2139
2140 2140 return False
2141 2141
2142 2142 def _user_name_for_merging(self, pull_request, user):
2143 2143 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
2144 2144 if env_user_name_attr and hasattr(user, env_user_name_attr):
2145 2145 user_name_attr = env_user_name_attr
2146 2146 else:
2147 2147 user_name_attr = 'short_contact'
2148 2148
2149 2149 user_name = getattr(user, user_name_attr)
2150 2150 return user_name
2151 2151
2152 2152 def _close_branch_before_merging(self, pull_request):
2153 2153 repo_type = pull_request.target_repo.repo_type
2154 2154 if repo_type == 'hg':
2155 2155 return self._get_general_setting(
2156 2156 pull_request, 'rhodecode_hg_close_branch_before_merging')
2157 2157 elif repo_type == 'git':
2158 2158 return self._get_general_setting(
2159 2159 pull_request, 'rhodecode_git_close_branch_before_merging')
2160 2160
2161 2161 return False
2162 2162
2163 2163 def _get_general_setting(self, pull_request, settings_key, default=False):
2164 2164 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
2165 2165 settings = settings_model.get_general_settings()
2166 2166 return settings.get(settings_key, default)
2167 2167
2168 2168 def _log_audit_action(self, action, action_data, user, pull_request):
2169 2169 audit_logger.store(
2170 2170 action=action,
2171 2171 action_data=action_data,
2172 2172 user=user,
2173 2173 repo=pull_request.target_repo)
2174 2174
2175 2175 def get_reviewer_functions(self):
2176 2176 """
2177 2177 Fetches functions for validation and fetching default reviewers.
2178 2178 If available we use the EE package, else we fallback to CE
2179 2179 package functions
2180 2180 """
2181 2181 try:
2182 2182 from rc_reviewers.utils import get_default_reviewers_data
2183 2183 from rc_reviewers.utils import validate_default_reviewers
2184 2184 from rc_reviewers.utils import validate_observers
2185 2185 except ImportError:
2186 2186 from rhodecode.apps.repository.utils import get_default_reviewers_data
2187 2187 from rhodecode.apps.repository.utils import validate_default_reviewers
2188 2188 from rhodecode.apps.repository.utils import validate_observers
2189 2189
2190 2190 return get_default_reviewers_data, validate_default_reviewers, validate_observers
2191 2191
2192 2192
2193 2193 class MergeCheck(object):
2194 2194 """
2195 2195 Perform Merge Checks and returns a check object which stores information
2196 2196 about merge errors, and merge conditions
2197 2197 """
2198 2198 TODO_CHECK = 'todo'
2199 2199 PERM_CHECK = 'perm'
2200 2200 REVIEW_CHECK = 'review'
2201 2201 MERGE_CHECK = 'merge'
2202 2202 WIP_CHECK = 'wip'
2203 2203
2204 2204 def __init__(self):
2205 2205 self.review_status = None
2206 2206 self.merge_possible = None
2207 2207 self.merge_msg = ''
2208 2208 self.merge_response = None
2209 2209 self.failed = None
2210 2210 self.errors = []
2211 2211 self.error_details = OrderedDict()
2212 2212 self.source_commit = AttributeDict()
2213 2213 self.target_commit = AttributeDict()
2214 2214 self.reviewers_count = 0
2215 2215 self.observers_count = 0
2216 2216
2217 2217 def __repr__(self):
2218 2218 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
2219 2219 self.merge_possible, self.failed, self.errors)
2220 2220
2221 2221 def push_error(self, error_type, message, error_key, details):
2222 2222 self.failed = True
2223 2223 self.errors.append([error_type, message])
2224 2224 self.error_details[error_key] = dict(
2225 2225 details=details,
2226 2226 error_type=error_type,
2227 2227 message=message
2228 2228 )
2229 2229
2230 2230 @classmethod
2231 2231 def validate(cls, pull_request, auth_user, translator, fail_early=False,
2232 2232 force_shadow_repo_refresh=False):
2233 2233 _ = translator
2234 2234 merge_check = cls()
2235 2235
2236 2236 # title has WIP:
2237 2237 if pull_request.work_in_progress:
2238 2238 log.debug("MergeCheck: cannot merge, title has wip: marker.")
2239 2239
2240 2240 msg = _('WIP marker in title prevents from accidental merge.')
2241 2241 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
2242 2242 if fail_early:
2243 2243 return merge_check
2244 2244
2245 2245 # permissions to merge
2246 2246 user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user)
2247 2247 if not user_allowed_to_merge:
2248 2248 log.debug("MergeCheck: cannot merge, approval is pending.")
2249 2249
2250 2250 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
2251 2251 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2252 2252 if fail_early:
2253 2253 return merge_check
2254 2254
2255 2255 # permission to merge into the target branch
2256 2256 target_commit_id = pull_request.target_ref_parts.commit_id
2257 2257 if pull_request.target_ref_parts.type == 'branch':
2258 2258 branch_name = pull_request.target_ref_parts.name
2259 2259 else:
2260 2260 # for mercurial we can always figure out the branch from the commit
2261 2261 # in case of bookmark
2262 2262 target_commit = pull_request.target_repo.get_commit(target_commit_id)
2263 2263 branch_name = target_commit.branch
2264 2264
2265 2265 rule, branch_perm = auth_user.get_rule_and_branch_permission(
2266 2266 pull_request.target_repo.repo_name, branch_name)
2267 2267 if branch_perm and branch_perm == 'branch.none':
2268 2268 msg = _('Target branch `{}` changes rejected by rule {}.').format(
2269 2269 branch_name, rule)
2270 2270 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2271 2271 if fail_early:
2272 2272 return merge_check
2273 2273
2274 2274 # review status, must be always present
2275 2275 review_status = pull_request.calculated_review_status()
2276 2276 merge_check.review_status = review_status
2277 2277 merge_check.reviewers_count = pull_request.reviewers_count
2278 2278 merge_check.observers_count = pull_request.observers_count
2279 2279
2280 2280 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
2281 2281 if not status_approved and merge_check.reviewers_count:
2282 2282 log.debug("MergeCheck: cannot merge, approval is pending.")
2283 2283 msg = _('Pull request reviewer approval is pending.')
2284 2284
2285 2285 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
2286 2286
2287 2287 if fail_early:
2288 2288 return merge_check
2289 2289
2290 2290 # left over TODOs
2291 2291 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
2292 2292 if todos:
2293 2293 log.debug("MergeCheck: cannot merge, {} "
2294 2294 "unresolved TODOs left.".format(len(todos)))
2295 2295
2296 2296 if len(todos) == 1:
2297 2297 msg = _('Cannot merge, {} TODO still not resolved.').format(
2298 2298 len(todos))
2299 2299 else:
2300 2300 msg = _('Cannot merge, {} TODOs still not resolved.').format(
2301 2301 len(todos))
2302 2302
2303 2303 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
2304 2304
2305 2305 if fail_early:
2306 2306 return merge_check
2307 2307
2308 2308 # merge possible, here is the filesystem simulation + shadow repo
2309 2309 merge_response, merge_status, msg = PullRequestModel().merge_status(
2310 2310 pull_request, translator=translator,
2311 2311 force_shadow_repo_refresh=force_shadow_repo_refresh)
2312 2312
2313 2313 merge_check.merge_possible = merge_status
2314 2314 merge_check.merge_msg = msg
2315 2315 merge_check.merge_response = merge_response
2316 2316
2317 2317 source_ref_id = pull_request.source_ref_parts.commit_id
2318 2318 target_ref_id = pull_request.target_ref_parts.commit_id
2319 2319
2320 2320 try:
2321 2321 source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request)
2322 2322 merge_check.source_commit.changed = source_ref_id != source_commit.raw_id
2323 2323 merge_check.source_commit.ref_spec = pull_request.source_ref_parts
2324 2324 merge_check.source_commit.current_raw_id = source_commit.raw_id
2325 2325 merge_check.source_commit.previous_raw_id = source_ref_id
2326 2326
2327 2327 merge_check.target_commit.changed = target_ref_id != target_commit.raw_id
2328 2328 merge_check.target_commit.ref_spec = pull_request.target_ref_parts
2329 2329 merge_check.target_commit.current_raw_id = target_commit.raw_id
2330 2330 merge_check.target_commit.previous_raw_id = target_ref_id
2331 2331 except (SourceRefMissing, TargetRefMissing):
2332 2332 pass
2333 2333
2334 2334 if not merge_status:
2335 2335 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
2336 2336 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
2337 2337
2338 2338 if fail_early:
2339 2339 return merge_check
2340 2340
2341 2341 log.debug('MergeCheck: is failed: %s', merge_check.failed)
2342 2342 return merge_check
2343 2343
2344 2344 @classmethod
2345 2345 def get_merge_conditions(cls, pull_request, translator):
2346 2346 _ = translator
2347 2347 merge_details = {}
2348 2348
2349 2349 model = PullRequestModel()
2350 2350 use_rebase = model._use_rebase_for_merging(pull_request)
2351 2351
2352 2352 if use_rebase:
2353 2353 merge_details['merge_strategy'] = dict(
2354 2354 details={},
2355 2355 message=_('Merge strategy: rebase')
2356 2356 )
2357 2357 else:
2358 2358 merge_details['merge_strategy'] = dict(
2359 2359 details={},
2360 2360 message=_('Merge strategy: explicit merge commit')
2361 2361 )
2362 2362
2363 2363 close_branch = model._close_branch_before_merging(pull_request)
2364 2364 if close_branch:
2365 2365 repo_type = pull_request.target_repo.repo_type
2366 2366 close_msg = ''
2367 2367 if repo_type == 'hg':
2368 2368 close_msg = _('Source branch will be closed before the merge.')
2369 2369 elif repo_type == 'git':
2370 2370 close_msg = _('Source branch will be deleted after the merge.')
2371 2371
2372 2372 merge_details['close_branch'] = dict(
2373 2373 details={},
2374 2374 message=close_msg
2375 2375 )
2376 2376
2377 2377 return merge_details
2378 2378
2379 2379
2380 2380 @dataclasses.dataclass
2381 2381 class ChangeTuple:
2382 2382 added: list
2383 2383 common: list
2384 2384 removed: list
2385 2385 total: list
2386 2386
2387 2387
2388 2388 @dataclasses.dataclass
2389 2389 class FileChangeTuple:
2390 2390 added: list
2391 2391 modified: list
2392 2392 removed: list
@@ -1,1044 +1,1044 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 Scm model for RhodeCode
21 21 """
22 22
23 23 import os.path
24 24 import traceback
25 25 import logging
26 26 import io
27 27
28 28 from sqlalchemy import func
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 import rhodecode
32 32 from rhodecode.lib.str_utils import safe_bytes
33 33 from rhodecode.lib.vcs import get_backend
34 34 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
35 35 from rhodecode.lib.vcs.nodes import FileNode
36 36 from rhodecode.lib.vcs.backends.base import EmptyCommit
37 37 from rhodecode.lib import helpers as h, rc_cache
38 38 from rhodecode.lib.auth import (
39 39 HasRepoPermissionAny, HasRepoGroupPermissionAny,
40 40 HasUserGroupPermissionAny)
41 41 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
42 42 from rhodecode.lib import hooks_utils
43 43 from rhodecode.lib.utils import (
44 44 get_filesystem_repos, make_db_config)
45 45 from rhodecode.lib.str_utils import safe_str
46 46 from rhodecode.lib.system_info import get_system_info
47 47 from rhodecode.model import BaseModel
48 48 from rhodecode.model.db import (
49 or_, false,
49 or_, false, null,
50 50 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
51 51 PullRequest, FileStore)
52 52 from rhodecode.model.settings import VcsSettingsModel
53 53 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
54 54
55 55 log = logging.getLogger(__name__)
56 56
57 57
58 58 class UserTemp(object):
59 59 def __init__(self, user_id):
60 60 self.user_id = user_id
61 61
62 62 def __repr__(self):
63 63 return "<{}('id:{}')>".format(self.__class__.__name__, self.user_id)
64 64
65 65
66 66 class RepoTemp(object):
67 67 def __init__(self, repo_id):
68 68 self.repo_id = repo_id
69 69
70 70 def __repr__(self):
71 71 return "<{}('id:{}')>".format(self.__class__.__name__, self.repo_id)
72 72
73 73
74 74 class SimpleCachedRepoList(object):
75 75 """
76 76 Lighter version of of iteration of repos without the scm initialisation,
77 77 and with cache usage
78 78 """
79 79 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
80 80 self.db_repo_list = db_repo_list
81 81 self.repos_path = repos_path
82 82 self.order_by = order_by
83 83 self.reversed = (order_by or '').startswith('-')
84 84 if not perm_set:
85 85 perm_set = ['repository.read', 'repository.write',
86 86 'repository.admin']
87 87 self.perm_set = perm_set
88 88
89 89 def __len__(self):
90 90 return len(self.db_repo_list)
91 91
92 92 def __repr__(self):
93 93 return '<{} ({})>'.format(self.__class__.__name__, self.__len__())
94 94
95 95 def __iter__(self):
96 96 for dbr in self.db_repo_list:
97 97 # check permission at this level
98 98 has_perm = HasRepoPermissionAny(*self.perm_set)(
99 99 dbr.repo_name, 'SimpleCachedRepoList check')
100 100 if not has_perm:
101 101 continue
102 102
103 103 tmp_d = {
104 104 'name': dbr.repo_name,
105 105 'dbrepo': dbr.get_dict(),
106 106 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
107 107 }
108 108 yield tmp_d
109 109
110 110
111 111 class _PermCheckIterator(object):
112 112
113 113 def __init__(
114 114 self, obj_list, obj_attr, perm_set, perm_checker,
115 115 extra_kwargs=None):
116 116 """
117 117 Creates iterator from given list of objects, additionally
118 118 checking permission for them from perm_set var
119 119
120 120 :param obj_list: list of db objects
121 121 :param obj_attr: attribute of object to pass into perm_checker
122 122 :param perm_set: list of permissions to check
123 123 :param perm_checker: callable to check permissions against
124 124 """
125 125 self.obj_list = obj_list
126 126 self.obj_attr = obj_attr
127 127 self.perm_set = perm_set
128 128 self.perm_checker = perm_checker(*self.perm_set)
129 129 self.extra_kwargs = extra_kwargs or {}
130 130
131 131 def __len__(self):
132 132 return len(self.obj_list)
133 133
134 134 def __repr__(self):
135 135 return '<{} ({})>'.format(self.__class__.__name__, self.__len__())
136 136
137 137 def __iter__(self):
138 138 for db_obj in self.obj_list:
139 139 # check permission at this level
140 140 # NOTE(marcink): the __dict__.get() is ~4x faster then getattr()
141 141 name = db_obj.__dict__.get(self.obj_attr, None)
142 142 if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs):
143 143 continue
144 144
145 145 yield db_obj
146 146
147 147
148 148 class RepoList(_PermCheckIterator):
149 149
150 150 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
151 151 if not perm_set:
152 152 perm_set = ['repository.read', 'repository.write', 'repository.admin']
153 153
154 154 super().__init__(
155 155 obj_list=db_repo_list,
156 156 obj_attr='_repo_name', perm_set=perm_set,
157 157 perm_checker=HasRepoPermissionAny,
158 158 extra_kwargs=extra_kwargs)
159 159
160 160
161 161 class RepoGroupList(_PermCheckIterator):
162 162
163 163 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
164 164 if not perm_set:
165 165 perm_set = ['group.read', 'group.write', 'group.admin']
166 166
167 167 super().__init__(
168 168 obj_list=db_repo_group_list,
169 169 obj_attr='_group_name', perm_set=perm_set,
170 170 perm_checker=HasRepoGroupPermissionAny,
171 171 extra_kwargs=extra_kwargs)
172 172
173 173
174 174 class UserGroupList(_PermCheckIterator):
175 175
176 176 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
177 177 if not perm_set:
178 178 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
179 179
180 180 super().__init__(
181 181 obj_list=db_user_group_list,
182 182 obj_attr='users_group_name', perm_set=perm_set,
183 183 perm_checker=HasUserGroupPermissionAny,
184 184 extra_kwargs=extra_kwargs)
185 185
186 186
187 187 class ScmModel(BaseModel):
188 188 """
189 189 Generic Scm Model
190 190 """
191 191
192 192 @LazyProperty
193 193 def repos_path(self):
194 194 """
195 195 Gets the repositories root path from database
196 196 """
197 197
198 198 settings_model = VcsSettingsModel(sa=self.sa)
199 199 return settings_model.get_repos_location()
200 200
201 201 def repo_scan(self, repos_path=None):
202 202 """
203 203 Listing of repositories in given path. This path should not be a
204 204 repository itself. Return a dictionary of repository objects
205 205
206 206 :param repos_path: path to directory containing repositories
207 207 """
208 208
209 209 if repos_path is None:
210 210 repos_path = self.repos_path
211 211
212 212 log.info('scanning for repositories in %s', repos_path)
213 213
214 214 config = make_db_config()
215 215 config.set('extensions', 'largefiles', '')
216 216 repos = {}
217 217
218 218 for name, path in get_filesystem_repos(repos_path, recursive=True):
219 219 # name need to be decomposed and put back together using the /
220 220 # since this is internal storage separator for rhodecode
221 221 name = Repository.normalize_repo_name(name)
222 222
223 223 try:
224 224 if name in repos:
225 225 raise RepositoryError('Duplicate repository name %s '
226 226 'found in %s' % (name, path))
227 227 elif path[0] in rhodecode.BACKENDS:
228 228 backend = get_backend(path[0])
229 229 repos[name] = backend(path[1], config=config,
230 230 with_wire={"cache": False})
231 231 except OSError:
232 232 continue
233 233 except RepositoryError:
234 234 log.exception('Failed to create a repo')
235 235 continue
236 236
237 237 log.debug('found %s paths with repositories', len(repos))
238 238 return repos
239 239
240 240 def get_repos(self, all_repos=None, sort_key=None):
241 241 """
242 242 Get all repositories from db and for each repo create it's
243 243 backend instance and fill that backed with information from database
244 244
245 245 :param all_repos: list of repository names as strings
246 246 give specific repositories list, good for filtering
247 247
248 248 :param sort_key: initial sorting of repositories
249 249 """
250 250 if all_repos is None:
251 251 all_repos = self.sa.query(Repository)\
252 .filter(Repository.group_id == None)\
252 .filter(Repository.group_id == null())\
253 253 .order_by(func.lower(Repository.repo_name)).all()
254 254 repo_iter = SimpleCachedRepoList(
255 255 all_repos, repos_path=self.repos_path, order_by=sort_key)
256 256 return repo_iter
257 257
258 258 def get_repo_groups(self, all_groups=None):
259 259 if all_groups is None:
260 260 all_groups = RepoGroup.query()\
261 .filter(RepoGroup.group_parent_id == None).all()
261 .filter(RepoGroup.group_parent_id == null()).all()
262 262 return [x for x in RepoGroupList(all_groups)]
263 263
264 264 def mark_for_invalidation(self, repo_name, delete=False):
265 265 """
266 266 Mark caches of this repo invalid in the database. `delete` flag
267 267 removes the cache entries
268 268
269 269 :param repo_name: the repo_name for which caches should be marked
270 270 invalid, or deleted
271 271 :param delete: delete the entry keys instead of setting bool
272 272 flag on them, and also purge caches used by the dogpile
273 273 """
274 274 repo = Repository.get_by_repo_name(repo_name)
275 275
276 276 if repo:
277 277 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
278 278 repo_id=repo.repo_id)
279 279 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
280 280
281 281 repo_id = repo.repo_id
282 282 config = repo._config
283 283 config.set('extensions', 'largefiles', '')
284 284 repo.update_commit_cache(config=config, cs_cache=None)
285 285 if delete:
286 286 cache_namespace_uid = f'cache_repo.{repo_id}'
287 287 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid, method=rc_cache.CLEAR_INVALIDATE)
288 288
289 289 def toggle_following_repo(self, follow_repo_id, user_id):
290 290
291 291 f = self.sa.query(UserFollowing)\
292 292 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
293 293 .filter(UserFollowing.user_id == user_id).scalar()
294 294
295 295 if f is not None:
296 296 try:
297 297 self.sa.delete(f)
298 298 return
299 299 except Exception:
300 300 log.error(traceback.format_exc())
301 301 raise
302 302
303 303 try:
304 304 f = UserFollowing()
305 305 f.user_id = user_id
306 306 f.follows_repo_id = follow_repo_id
307 307 self.sa.add(f)
308 308 except Exception:
309 309 log.error(traceback.format_exc())
310 310 raise
311 311
312 312 def toggle_following_user(self, follow_user_id, user_id):
313 313 f = self.sa.query(UserFollowing)\
314 314 .filter(UserFollowing.follows_user_id == follow_user_id)\
315 315 .filter(UserFollowing.user_id == user_id).scalar()
316 316
317 317 if f is not None:
318 318 try:
319 319 self.sa.delete(f)
320 320 return
321 321 except Exception:
322 322 log.error(traceback.format_exc())
323 323 raise
324 324
325 325 try:
326 326 f = UserFollowing()
327 327 f.user_id = user_id
328 328 f.follows_user_id = follow_user_id
329 329 self.sa.add(f)
330 330 except Exception:
331 331 log.error(traceback.format_exc())
332 332 raise
333 333
334 334 def is_following_repo(self, repo_name, user_id, cache=False):
335 335 r = self.sa.query(Repository)\
336 336 .filter(Repository.repo_name == repo_name).scalar()
337 337
338 338 f = self.sa.query(UserFollowing)\
339 339 .filter(UserFollowing.follows_repository == r)\
340 340 .filter(UserFollowing.user_id == user_id).scalar()
341 341
342 342 return f is not None
343 343
344 344 def is_following_user(self, username, user_id, cache=False):
345 345 u = User.get_by_username(username)
346 346
347 347 f = self.sa.query(UserFollowing)\
348 348 .filter(UserFollowing.follows_user == u)\
349 349 .filter(UserFollowing.user_id == user_id).scalar()
350 350
351 351 return f is not None
352 352
353 353 def get_followers(self, repo):
354 354 repo = self._get_repo(repo)
355 355
356 356 return self.sa.query(UserFollowing)\
357 357 .filter(UserFollowing.follows_repository == repo).count()
358 358
359 359 def get_forks(self, repo):
360 360 repo = self._get_repo(repo)
361 361 return self.sa.query(Repository)\
362 362 .filter(Repository.fork == repo).count()
363 363
364 364 def get_pull_requests(self, repo):
365 365 repo = self._get_repo(repo)
366 366 return self.sa.query(PullRequest)\
367 367 .filter(PullRequest.target_repo == repo)\
368 368 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
369 369
370 370 def get_artifacts(self, repo):
371 371 repo = self._get_repo(repo)
372 372 return self.sa.query(FileStore)\
373 373 .filter(FileStore.repo == repo)\
374 .filter(or_(FileStore.hidden == None, FileStore.hidden == false())).count()
374 .filter(or_(FileStore.hidden == null(), FileStore.hidden == false())).count()
375 375
376 376 def mark_as_fork(self, repo, fork, user):
377 377 repo = self._get_repo(repo)
378 378 fork = self._get_repo(fork)
379 379 if fork and repo.repo_id == fork.repo_id:
380 380 raise Exception("Cannot set repository as fork of itself")
381 381
382 382 if fork and repo.repo_type != fork.repo_type:
383 383 raise RepositoryError(
384 384 "Cannot set repository as fork of repository with other type")
385 385
386 386 repo.fork = fork
387 387 self.sa.add(repo)
388 388 return repo
389 389
390 390 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
391 391 dbrepo = self._get_repo(repo)
392 392 remote_uri = remote_uri or dbrepo.clone_uri
393 393 if not remote_uri:
394 394 raise Exception("This repository doesn't have a clone uri")
395 395
396 396 repo = dbrepo.scm_instance(cache=False)
397 397 repo.config.clear_section('hooks')
398 398
399 399 try:
400 400 # NOTE(marcink): add extra validation so we skip invalid urls
401 401 # this is due this tasks can be executed via scheduler without
402 402 # proper validation of remote_uri
403 403 if validate_uri:
404 404 config = make_db_config(clear_session=False)
405 405 url_validator(remote_uri, dbrepo.repo_type, config)
406 406 except InvalidCloneUrl:
407 407 raise
408 408
409 409 repo_name = dbrepo.repo_name
410 410 try:
411 411 # TODO: we need to make sure those operations call proper hooks !
412 412 repo.fetch(remote_uri)
413 413
414 414 self.mark_for_invalidation(repo_name)
415 415 except Exception:
416 416 log.error(traceback.format_exc())
417 417 raise
418 418
419 419 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
420 420 dbrepo = self._get_repo(repo)
421 421 remote_uri = remote_uri or dbrepo.push_uri
422 422 if not remote_uri:
423 423 raise Exception("This repository doesn't have a clone uri")
424 424
425 425 repo = dbrepo.scm_instance(cache=False)
426 426 repo.config.clear_section('hooks')
427 427
428 428 try:
429 429 # NOTE(marcink): add extra validation so we skip invalid urls
430 430 # this is due this tasks can be executed via scheduler without
431 431 # proper validation of remote_uri
432 432 if validate_uri:
433 433 config = make_db_config(clear_session=False)
434 434 url_validator(remote_uri, dbrepo.repo_type, config)
435 435 except InvalidCloneUrl:
436 436 raise
437 437
438 438 try:
439 439 repo.push(remote_uri)
440 440 except Exception:
441 441 log.error(traceback.format_exc())
442 442 raise
443 443
444 444 def commit_change(self, repo, repo_name, commit, user, author, message,
445 445 content: bytes, f_path: bytes):
446 446 """
447 447 Commits changes
448 448 """
449 449 user = self._get_user(user)
450 450
451 451 # message and author needs to be unicode
452 452 # proper backend should then translate that into required type
453 453 message = safe_str(message)
454 454 author = safe_str(author)
455 455 imc = repo.in_memory_commit
456 456 imc.change(FileNode(f_path, content, mode=commit.get_file_mode(f_path)))
457 457 try:
458 458 # TODO: handle pre-push action !
459 459 tip = imc.commit(
460 460 message=message, author=author, parents=[commit],
461 461 branch=commit.branch)
462 462 except Exception as e:
463 463 log.error(traceback.format_exc())
464 464 raise IMCCommitError(str(e))
465 465 finally:
466 466 # always clear caches, if commit fails we want fresh object also
467 467 self.mark_for_invalidation(repo_name)
468 468
469 469 # We trigger the post-push action
470 470 hooks_utils.trigger_post_push_hook(
471 471 username=user.username, action='push_local', hook_type='post_push',
472 472 repo_name=repo_name, repo_type=repo.alias, commit_ids=[tip.raw_id])
473 473 return tip
474 474
475 475 def _sanitize_path(self, f_path: bytes):
476 476 if f_path.startswith(b'/') or f_path.startswith(b'./') or b'../' in f_path:
477 477 raise NonRelativePathError(b'%b is not an relative path' % f_path)
478 478 if f_path:
479 479 f_path = os.path.normpath(f_path)
480 480 return f_path
481 481
482 482 def get_dirnode_metadata(self, request, commit, dir_node):
483 483 if not dir_node.is_dir():
484 484 return []
485 485
486 486 data = []
487 487 for node in dir_node:
488 488 if not node.is_file():
489 489 # we skip file-nodes
490 490 continue
491 491
492 492 last_commit = node.last_commit
493 493 last_commit_date = last_commit.date
494 494 data.append({
495 495 'name': node.name,
496 496 'size': h.format_byte_size_binary(node.size),
497 497 'modified_at': h.format_date(last_commit_date),
498 498 'modified_ts': last_commit_date.isoformat(),
499 499 'revision': last_commit.revision,
500 500 'short_id': last_commit.short_id,
501 501 'message': h.escape(last_commit.message),
502 502 'author': h.escape(last_commit.author),
503 503 'user_profile': h.gravatar_with_user(
504 504 request, last_commit.author),
505 505 })
506 506
507 507 return data
508 508
509 509 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
510 510 extended_info=False, content=False, max_file_bytes=None):
511 511 """
512 512 recursive walk in root dir and return a set of all path in that dir
513 513 based on repository walk function
514 514
515 515 :param repo_name: name of repository
516 516 :param commit_id: commit id for which to list nodes
517 517 :param root_path: root path to list
518 518 :param flat: return as a list, if False returns a dict with description
519 519 :param extended_info: show additional info such as md5, binary, size etc
520 520 :param content: add nodes content to the return data
521 521 :param max_file_bytes: will not return file contents over this limit
522 522
523 523 """
524 524 _files = list()
525 525 _dirs = list()
526 526
527 527 try:
528 528 _repo = self._get_repo(repo_name)
529 529 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
530 530 root_path = root_path.lstrip('/')
531 531
532 532 # get RootNode, inject pre-load options before walking
533 533 top_node = commit.get_node(root_path)
534 534 extended_info_pre_load = []
535 535 if extended_info:
536 536 extended_info_pre_load += ['md5']
537 537 top_node.default_pre_load = ['is_binary', 'size'] + extended_info_pre_load
538 538
539 539 for __, dirs, files in commit.walk(top_node):
540 540
541 541 for f in files:
542 542 _content = None
543 543 _data = f_name = f.str_path
544 544
545 545 if not flat:
546 546 _data = {
547 547 "name": h.escape(f_name),
548 548 "type": "file",
549 549 }
550 550 if extended_info:
551 551 _data.update({
552 552 "md5": f.md5,
553 553 "binary": f.is_binary,
554 554 "size": f.size,
555 555 "extension": f.extension,
556 556 "mimetype": f.mimetype,
557 557 "lines": f.lines()[0]
558 558 })
559 559
560 560 if content:
561 561 over_size_limit = (max_file_bytes is not None
562 562 and f.size > max_file_bytes)
563 563 full_content = None
564 564 if not f.is_binary and not over_size_limit:
565 565 full_content = f.str_content
566 566
567 567 _data.update({
568 568 "content": full_content,
569 569 })
570 570 _files.append(_data)
571 571
572 572 for d in dirs:
573 573 _data = d_name = d.str_path
574 574 if not flat:
575 575 _data = {
576 576 "name": h.escape(d_name),
577 577 "type": "dir",
578 578 }
579 579 if extended_info:
580 580 _data.update({
581 581 "md5": "",
582 582 "binary": False,
583 583 "size": 0,
584 584 "extension": "",
585 585 })
586 586 if content:
587 587 _data.update({
588 588 "content": None
589 589 })
590 590 _dirs.append(_data)
591 591 except RepositoryError:
592 592 log.exception("Exception in get_nodes")
593 593 raise
594 594
595 595 return _dirs, _files
596 596
597 597 def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'):
598 598 """
599 599 Generate files for quick filter in files view
600 600 """
601 601
602 602 _files = list()
603 603 _dirs = list()
604 604 try:
605 605 _repo = self._get_repo(repo_name)
606 606 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
607 607 root_path = root_path.lstrip('/')
608 608
609 609 top_node = commit.get_node(root_path)
610 610 top_node.default_pre_load = []
611 611
612 612 for __, dirs, files in commit.walk(top_node):
613 613 for f in files:
614 614
615 615 _data = {
616 616 "name": h.escape(f.str_path),
617 617 "type": "file",
618 618 }
619 619
620 620 _files.append(_data)
621 621
622 622 for d in dirs:
623 623
624 624 _data = {
625 625 "name": h.escape(d.str_path),
626 626 "type": "dir",
627 627 }
628 628
629 629 _dirs.append(_data)
630 630 except RepositoryError:
631 631 log.exception("Exception in get_quick_filter_nodes")
632 632 raise
633 633
634 634 return _dirs, _files
635 635
636 636 def get_node(self, repo_name, commit_id, file_path,
637 637 extended_info=False, content=False, max_file_bytes=None, cache=True):
638 638 """
639 639 retrieve single node from commit
640 640 """
641 641
642 642 try:
643 643
644 644 _repo = self._get_repo(repo_name)
645 645 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
646 646
647 647 file_node = commit.get_node(file_path)
648 648 if file_node.is_dir():
649 649 raise RepositoryError('The given path is a directory')
650 650
651 651 _content = None
652 652 f_name = file_node.str_path
653 653
654 654 file_data = {
655 655 "name": h.escape(f_name),
656 656 "type": "file",
657 657 }
658 658
659 659 if extended_info:
660 660 file_data.update({
661 661 "extension": file_node.extension,
662 662 "mimetype": file_node.mimetype,
663 663 })
664 664
665 665 if cache:
666 666 md5 = file_node.md5
667 667 is_binary = file_node.is_binary
668 668 size = file_node.size
669 669 else:
670 670 is_binary, md5, size, _content = file_node.metadata_uncached()
671 671
672 672 file_data.update({
673 673 "md5": md5,
674 674 "binary": is_binary,
675 675 "size": size,
676 676 })
677 677
678 678 if content and cache:
679 679 # get content + cache
680 680 size = file_node.size
681 681 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
682 682 full_content = None
683 683 all_lines = 0
684 684 if not file_node.is_binary and not over_size_limit:
685 685 full_content = safe_str(file_node.content)
686 686 all_lines, empty_lines = file_node.count_lines(full_content)
687 687
688 688 file_data.update({
689 689 "content": full_content,
690 690 "lines": all_lines
691 691 })
692 692 elif content:
693 693 # get content *without* cache
694 694 if _content is None:
695 695 is_binary, md5, size, _content = file_node.metadata_uncached()
696 696
697 697 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
698 698 full_content = None
699 699 all_lines = 0
700 700 if not is_binary and not over_size_limit:
701 701 full_content = safe_str(_content)
702 702 all_lines, empty_lines = file_node.count_lines(full_content)
703 703
704 704 file_data.update({
705 705 "content": full_content,
706 706 "lines": all_lines
707 707 })
708 708
709 709 except RepositoryError:
710 710 log.exception("Exception in get_node")
711 711 raise
712 712
713 713 return file_data
714 714
715 715 def get_fts_data(self, repo_name, commit_id, root_path='/'):
716 716 """
717 717 Fetch node tree for usage in full text search
718 718 """
719 719
720 720 tree_info = list()
721 721
722 722 try:
723 723 _repo = self._get_repo(repo_name)
724 724 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
725 725 root_path = root_path.lstrip('/')
726 726 top_node = commit.get_node(root_path)
727 727 top_node.default_pre_load = []
728 728
729 729 for __, dirs, files in commit.walk(top_node):
730 730
731 731 for f in files:
732 732 is_binary, md5, size, _content = f.metadata_uncached()
733 733 _data = {
734 734 "name": f.str_path,
735 735 "md5": md5,
736 736 "extension": f.extension,
737 737 "binary": is_binary,
738 738 "size": size
739 739 }
740 740
741 741 tree_info.append(_data)
742 742
743 743 except RepositoryError:
744 744 log.exception("Exception in get_nodes")
745 745 raise
746 746
747 747 return tree_info
748 748
749 749 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
750 750 author=None, trigger_push_hook=True):
751 751 """
752 752 Commits given multiple nodes into repo
753 753
754 754 :param user: RhodeCode User object or user_id, the commiter
755 755 :param repo: RhodeCode Repository object
756 756 :param message: commit message
757 757 :param nodes: mapping {filename:{'content':content},...}
758 758 :param parent_commit: parent commit, can be empty than it's
759 759 initial commit
760 760 :param author: author of commit, cna be different that commiter
761 761 only for git
762 762 :param trigger_push_hook: trigger push hooks
763 763
764 764 :returns: new committed commit
765 765 """
766 766
767 767 user = self._get_user(user)
768 768 scm_instance = repo.scm_instance(cache=False)
769 769
770 770 message = safe_str(message)
771 771 commiter = user.full_contact
772 772 author = safe_str(author) if author else commiter
773 773
774 774 imc = scm_instance.in_memory_commit
775 775
776 776 if not parent_commit:
777 777 parent_commit = EmptyCommit(alias=scm_instance.alias)
778 778
779 779 if isinstance(parent_commit, EmptyCommit):
780 780 # EmptyCommit means we're editing empty repository
781 781 parents = None
782 782 else:
783 783 parents = [parent_commit]
784 784
785 785 upload_file_types = (io.BytesIO, io.BufferedRandom)
786 786 processed_nodes = []
787 787 for filename, content_dict in nodes.items():
788 788 if not isinstance(filename, bytes):
789 789 raise ValueError(f'filename key in nodes needs to be bytes , or {upload_file_types}')
790 790 content = content_dict['content']
791 791 if not isinstance(content, upload_file_types + (bytes,)):
792 792 raise ValueError('content key value in nodes needs to be bytes')
793 793
794 794 for f_path in nodes:
795 795 f_path = self._sanitize_path(f_path)
796 796 content = nodes[f_path]['content']
797 797
798 798 # decoding here will force that we have proper encoded values
799 799 # in any other case this will throw exceptions and deny commit
800 800
801 801 if isinstance(content, bytes):
802 802 pass
803 803 elif isinstance(content, upload_file_types):
804 804 content = content.read()
805 805 else:
806 806 raise Exception(f'Content is of unrecognized type {type(content)}, expected {upload_file_types}')
807 807 processed_nodes.append((f_path, content))
808 808
809 809 # add multiple nodes
810 810 for path, content in processed_nodes:
811 811 imc.add(FileNode(path, content=content))
812 812
813 813 # TODO: handle pre push scenario
814 814 tip = imc.commit(message=message,
815 815 author=author,
816 816 parents=parents,
817 817 branch=parent_commit.branch)
818 818
819 819 self.mark_for_invalidation(repo.repo_name)
820 820 if trigger_push_hook:
821 821 hooks_utils.trigger_post_push_hook(
822 822 username=user.username, action='push_local',
823 823 repo_name=repo.repo_name, repo_type=scm_instance.alias,
824 824 hook_type='post_push',
825 825 commit_ids=[tip.raw_id])
826 826 return tip
827 827
828 828 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
829 829 author=None, trigger_push_hook=True):
830 830 user = self._get_user(user)
831 831 scm_instance = repo.scm_instance(cache=False)
832 832
833 833 message = safe_str(message)
834 834 commiter = user.full_contact
835 835 author = safe_str(author) if author else commiter
836 836
837 837 imc = scm_instance.in_memory_commit
838 838
839 839 if not parent_commit:
840 840 parent_commit = EmptyCommit(alias=scm_instance.alias)
841 841
842 842 if isinstance(parent_commit, EmptyCommit):
843 843 # EmptyCommit means we we're editing empty repository
844 844 parents = None
845 845 else:
846 846 parents = [parent_commit]
847 847
848 848 # add multiple nodes
849 849 for _filename, data in nodes.items():
850 850 # new filename, can be renamed from the old one, also sanitaze
851 851 # the path for any hack around relative paths like ../../ etc.
852 852 filename = self._sanitize_path(data['filename'])
853 853 old_filename = self._sanitize_path(_filename)
854 854 content = data['content']
855 855 file_mode = data.get('mode')
856 856 filenode = FileNode(old_filename, content=content, mode=file_mode)
857 857 op = data['op']
858 858 if op == 'add':
859 859 imc.add(filenode)
860 860 elif op == 'del':
861 861 imc.remove(filenode)
862 862 elif op == 'mod':
863 863 if filename != old_filename:
864 864 # TODO: handle renames more efficient, needs vcs lib changes
865 865 imc.remove(filenode)
866 866 imc.add(FileNode(filename, content=content, mode=file_mode))
867 867 else:
868 868 imc.change(filenode)
869 869
870 870 try:
871 871 # TODO: handle pre push scenario commit changes
872 872 tip = imc.commit(message=message,
873 873 author=author,
874 874 parents=parents,
875 875 branch=parent_commit.branch)
876 876 except NodeNotChangedError:
877 877 raise
878 878 except Exception as e:
879 879 log.exception("Unexpected exception during call to imc.commit")
880 880 raise IMCCommitError(str(e))
881 881 finally:
882 882 # always clear caches, if commit fails we want fresh object also
883 883 self.mark_for_invalidation(repo.repo_name)
884 884
885 885 if trigger_push_hook:
886 886 hooks_utils.trigger_post_push_hook(
887 887 username=user.username, action='push_local', hook_type='post_push',
888 888 repo_name=repo.repo_name, repo_type=scm_instance.alias,
889 889 commit_ids=[tip.raw_id])
890 890
891 891 return tip
892 892
893 893 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
894 894 author=None, trigger_push_hook=True):
895 895 """
896 896 Deletes given multiple nodes into `repo`
897 897
898 898 :param user: RhodeCode User object or user_id, the committer
899 899 :param repo: RhodeCode Repository object
900 900 :param message: commit message
901 901 :param nodes: mapping {filename:{'content':content},...}
902 902 :param parent_commit: parent commit, can be empty than it's initial
903 903 commit
904 904 :param author: author of commit, cna be different that commiter only
905 905 for git
906 906 :param trigger_push_hook: trigger push hooks
907 907
908 908 :returns: new commit after deletion
909 909 """
910 910
911 911 user = self._get_user(user)
912 912 scm_instance = repo.scm_instance(cache=False)
913 913
914 914 processed_nodes = []
915 915 for f_path in nodes:
916 916 f_path = self._sanitize_path(f_path)
917 917 # content can be empty but for compatibility it allows same dicts
918 918 # structure as add_nodes
919 919 content = nodes[f_path].get('content')
920 920 processed_nodes.append((safe_bytes(f_path), content))
921 921
922 922 message = safe_str(message)
923 923 commiter = user.full_contact
924 924 author = safe_str(author) if author else commiter
925 925
926 926 imc = scm_instance.in_memory_commit
927 927
928 928 if not parent_commit:
929 929 parent_commit = EmptyCommit(alias=scm_instance.alias)
930 930
931 931 if isinstance(parent_commit, EmptyCommit):
932 932 # EmptyCommit means we we're editing empty repository
933 933 parents = None
934 934 else:
935 935 parents = [parent_commit]
936 936 # add multiple nodes
937 937 for path, content in processed_nodes:
938 938 imc.remove(FileNode(path, content=content))
939 939
940 940 # TODO: handle pre push scenario
941 941 tip = imc.commit(message=message,
942 942 author=author,
943 943 parents=parents,
944 944 branch=parent_commit.branch)
945 945
946 946 self.mark_for_invalidation(repo.repo_name)
947 947 if trigger_push_hook:
948 948 hooks_utils.trigger_post_push_hook(
949 949 username=user.username, action='push_local', hook_type='post_push',
950 950 repo_name=repo.repo_name, repo_type=scm_instance.alias,
951 951 commit_ids=[tip.raw_id])
952 952 return tip
953 953
954 954 def strip(self, repo, commit_id, branch):
955 955 scm_instance = repo.scm_instance(cache=False)
956 956 scm_instance.config.clear_section('hooks')
957 957 scm_instance.strip(commit_id, branch)
958 958 self.mark_for_invalidation(repo.repo_name)
959 959
960 960 def get_unread_journal(self):
961 961 return self.sa.query(UserLog).count()
962 962
963 963 @classmethod
964 964 def backend_landing_ref(cls, repo_type):
965 965 """
966 966 Return a default landing ref based on a repository type.
967 967 """
968 968
969 969 landing_ref = {
970 970 'hg': ('branch:default', 'default'),
971 971 'git': ('branch:master', 'master'),
972 972 'svn': ('rev:tip', 'latest tip'),
973 973 'default': ('rev:tip', 'latest tip'),
974 974 }
975 975
976 976 return landing_ref.get(repo_type) or landing_ref['default']
977 977
978 978 def get_repo_landing_revs(self, translator, repo=None):
979 979 """
980 980 Generates select option with tags branches and bookmarks (for hg only)
981 981 grouped by type
982 982
983 983 :param repo:
984 984 """
985 985 from rhodecode.lib.vcs.backends.git import GitRepository
986 986
987 987 _ = translator
988 988 repo = self._get_repo(repo)
989 989
990 990 if repo:
991 991 repo_type = repo.repo_type
992 992 else:
993 993 repo_type = 'default'
994 994
995 995 default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type)
996 996
997 997 default_ref_options = [
998 998 [default_landing_ref, landing_ref_lbl]
999 999 ]
1000 1000 default_choices = [
1001 1001 default_landing_ref
1002 1002 ]
1003 1003
1004 1004 if not repo:
1005 1005 # presented at NEW repo creation
1006 1006 return default_choices, default_ref_options
1007 1007
1008 1008 repo = repo.scm_instance()
1009 1009
1010 1010 ref_options = [(default_landing_ref, landing_ref_lbl)]
1011 1011 choices = [default_landing_ref]
1012 1012
1013 1013 # branches
1014 1014 branch_group = [(f'branch:{safe_str(b)}', safe_str(b)) for b in repo.branches]
1015 1015 if not branch_group:
1016 1016 # new repo, or without maybe a branch?
1017 1017 branch_group = default_ref_options
1018 1018
1019 1019 branches_group = (branch_group, _("Branches"))
1020 1020 ref_options.append(branches_group)
1021 1021 choices.extend([x[0] for x in branches_group[0]])
1022 1022
1023 1023 # bookmarks for HG
1024 1024 if repo.alias == 'hg':
1025 1025 bookmarks_group = (
1026 1026 [(f'book:{safe_str(b)}', safe_str(b))
1027 1027 for b in repo.bookmarks],
1028 1028 _("Bookmarks"))
1029 1029 ref_options.append(bookmarks_group)
1030 1030 choices.extend([x[0] for x in bookmarks_group[0]])
1031 1031
1032 1032 # tags
1033 1033 tags_group = (
1034 1034 [(f'tag:{safe_str(t)}', safe_str(t))
1035 1035 for t in repo.tags],
1036 1036 _("Tags"))
1037 1037 ref_options.append(tags_group)
1038 1038 choices.extend([x[0] for x in tags_group[0]])
1039 1039
1040 1040 return choices, ref_options
1041 1041
1042 1042 def get_server_info(self, environ=None):
1043 1043 server_info = get_system_info(environ)
1044 1044 return server_info
General Comments 0
You need to be logged in to leave comments. Login now