##// END OF EJS Templates
lint: use null() to compare to == None for linters to be happy
super-admin -
r5180:0f2a8907 default
parent child Browse files
Show More
@@ -1,412 +1,412 b''
1 # Copyright (C) 2012-2023 RhodeCode GmbH
1 # Copyright (C) 2012-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 RhodeCode task modules, containing all task that suppose to be run
20 RhodeCode task modules, containing all task that suppose to be run
21 by celery daemon
21 by celery daemon
22 """
22 """
23
23
24 import os
24 import os
25 import time
25 import time
26
26
27 from pyramid_mailer.mailer import Mailer
27 from pyramid_mailer.mailer import Mailer
28 from pyramid_mailer.message import Message
28 from pyramid_mailer.message import Message
29 from email.utils import formatdate
29 from email.utils import formatdate
30
30
31 import rhodecode
31 import rhodecode
32 from rhodecode.lib import audit_logger
32 from rhodecode.lib import audit_logger
33 from rhodecode.lib.celerylib import get_logger, async_task, RequestContextTask, run_task
33 from rhodecode.lib.celerylib import get_logger, async_task, RequestContextTask, run_task
34 from rhodecode.lib import hooks_base
34 from rhodecode.lib import hooks_base
35 from rhodecode.lib.utils2 import safe_int, str2bool, aslist
35 from rhodecode.lib.utils2 import safe_int, str2bool, aslist
36 from rhodecode.lib.statsd_client import StatsdClient
36 from rhodecode.lib.statsd_client import StatsdClient
37 from rhodecode.model.db import (
37 from rhodecode.model.db import (
38 Session, IntegrityError, true, Repository, RepoGroup, User)
38 true, null, Session, IntegrityError, Repository, RepoGroup, User)
39 from rhodecode.model.permission import PermissionModel
39 from rhodecode.model.permission import PermissionModel
40
40
41
41
42 @async_task(ignore_result=True, base=RequestContextTask)
42 @async_task(ignore_result=True, base=RequestContextTask)
43 def send_email(recipients, subject, body='', html_body='', email_config=None,
43 def send_email(recipients, subject, body='', html_body='', email_config=None,
44 extra_headers=None):
44 extra_headers=None):
45 """
45 """
46 Sends an email with defined parameters from the .ini files.
46 Sends an email with defined parameters from the .ini files.
47
47
48 :param recipients: list of recipients, it this is empty the defined email
48 :param recipients: list of recipients, it this is empty the defined email
49 address from field 'email_to' is used instead
49 address from field 'email_to' is used instead
50 :param subject: subject of the mail
50 :param subject: subject of the mail
51 :param body: body of the mail
51 :param body: body of the mail
52 :param html_body: html version of body
52 :param html_body: html version of body
53 :param email_config: specify custom configuration for mailer
53 :param email_config: specify custom configuration for mailer
54 :param extra_headers: specify custom headers
54 :param extra_headers: specify custom headers
55 """
55 """
56 log = get_logger(send_email)
56 log = get_logger(send_email)
57
57
58 email_config = email_config or rhodecode.CONFIG
58 email_config = email_config or rhodecode.CONFIG
59
59
60 mail_server = email_config.get('smtp_server') or None
60 mail_server = email_config.get('smtp_server') or None
61 if mail_server is None:
61 if mail_server is None:
62 log.error("SMTP server information missing. Sending email failed. "
62 log.error("SMTP server information missing. Sending email failed. "
63 "Make sure that `smtp_server` variable is configured "
63 "Make sure that `smtp_server` variable is configured "
64 "inside the .ini file")
64 "inside the .ini file")
65 return False
65 return False
66
66
67 subject = "%s %s" % (email_config.get('email_prefix', ''), subject)
67 subject = "%s %s" % (email_config.get('email_prefix', ''), subject)
68
68
69 if recipients:
69 if recipients:
70 if isinstance(recipients, str):
70 if isinstance(recipients, str):
71 recipients = recipients.split(',')
71 recipients = recipients.split(',')
72 else:
72 else:
73 # if recipients are not defined we send to email_config + all admins
73 # if recipients are not defined we send to email_config + all admins
74 admins = []
74 admins = []
75 for u in User.query().filter(User.admin == true()).all():
75 for u in User.query().filter(User.admin == true()).all():
76 if u.email:
76 if u.email:
77 admins.append(u.email)
77 admins.append(u.email)
78 recipients = []
78 recipients = []
79 config_email = email_config.get('email_to')
79 config_email = email_config.get('email_to')
80 if config_email:
80 if config_email:
81 recipients += [config_email]
81 recipients += [config_email]
82 recipients += admins
82 recipients += admins
83
83
84 # translate our LEGACY config into the one that pyramid_mailer supports
84 # translate our LEGACY config into the one that pyramid_mailer supports
85 email_conf = dict(
85 email_conf = dict(
86 host=mail_server,
86 host=mail_server,
87 port=email_config.get('smtp_port', 25),
87 port=email_config.get('smtp_port', 25),
88 username=email_config.get('smtp_username'),
88 username=email_config.get('smtp_username'),
89 password=email_config.get('smtp_password'),
89 password=email_config.get('smtp_password'),
90
90
91 tls=str2bool(email_config.get('smtp_use_tls')),
91 tls=str2bool(email_config.get('smtp_use_tls')),
92 ssl=str2bool(email_config.get('smtp_use_ssl')),
92 ssl=str2bool(email_config.get('smtp_use_ssl')),
93
93
94 # SSL key file
94 # SSL key file
95 # keyfile='',
95 # keyfile='',
96
96
97 # SSL certificate file
97 # SSL certificate file
98 # certfile='',
98 # certfile='',
99
99
100 # Location of maildir
100 # Location of maildir
101 # queue_path='',
101 # queue_path='',
102
102
103 default_sender=email_config.get('app_email_from', 'RhodeCode-noreply@rhodecode.com'),
103 default_sender=email_config.get('app_email_from', 'RhodeCode-noreply@rhodecode.com'),
104
104
105 debug=str2bool(email_config.get('smtp_debug')),
105 debug=str2bool(email_config.get('smtp_debug')),
106 # /usr/sbin/sendmail Sendmail executable
106 # /usr/sbin/sendmail Sendmail executable
107 # sendmail_app='',
107 # sendmail_app='',
108
108
109 # {sendmail_app} -t -i -f {sender} Template for sendmail execution
109 # {sendmail_app} -t -i -f {sender} Template for sendmail execution
110 # sendmail_template='',
110 # sendmail_template='',
111 )
111 )
112
112
113 if extra_headers is None:
113 if extra_headers is None:
114 extra_headers = {}
114 extra_headers = {}
115
115
116 extra_headers.setdefault('Date', formatdate(time.time()))
116 extra_headers.setdefault('Date', formatdate(time.time()))
117
117
118 if 'thread_ids' in extra_headers:
118 if 'thread_ids' in extra_headers:
119 thread_ids = extra_headers.pop('thread_ids')
119 thread_ids = extra_headers.pop('thread_ids')
120 extra_headers['References'] = ' '.join('<{}>'.format(t) for t in thread_ids)
120 extra_headers['References'] = ' '.join('<{}>'.format(t) for t in thread_ids)
121
121
122 try:
122 try:
123 mailer = Mailer(**email_conf)
123 mailer = Mailer(**email_conf)
124
124
125 message = Message(subject=subject,
125 message = Message(subject=subject,
126 sender=email_conf['default_sender'],
126 sender=email_conf['default_sender'],
127 recipients=recipients,
127 recipients=recipients,
128 body=body, html=html_body,
128 body=body, html=html_body,
129 extra_headers=extra_headers)
129 extra_headers=extra_headers)
130 mailer.send_immediately(message)
130 mailer.send_immediately(message)
131 statsd = StatsdClient.statsd
131 statsd = StatsdClient.statsd
132 if statsd:
132 if statsd:
133 statsd.incr('rhodecode_email_sent_total')
133 statsd.incr('rhodecode_email_sent_total')
134
134
135 except Exception:
135 except Exception:
136 log.exception('Mail sending failed')
136 log.exception('Mail sending failed')
137 return False
137 return False
138 return True
138 return True
139
139
140
140
141 @async_task(ignore_result=True, base=RequestContextTask)
141 @async_task(ignore_result=True, base=RequestContextTask)
142 def create_repo(form_data, cur_user):
142 def create_repo(form_data, cur_user):
143 from rhodecode.model.repo import RepoModel
143 from rhodecode.model.repo import RepoModel
144 from rhodecode.model.user import UserModel
144 from rhodecode.model.user import UserModel
145 from rhodecode.model.scm import ScmModel
145 from rhodecode.model.scm import ScmModel
146 from rhodecode.model.settings import SettingsModel
146 from rhodecode.model.settings import SettingsModel
147
147
148 log = get_logger(create_repo)
148 log = get_logger(create_repo)
149
149
150 cur_user = UserModel()._get_user(cur_user)
150 cur_user = UserModel()._get_user(cur_user)
151 owner = cur_user
151 owner = cur_user
152
152
153 repo_name = form_data['repo_name']
153 repo_name = form_data['repo_name']
154 repo_name_full = form_data['repo_name_full']
154 repo_name_full = form_data['repo_name_full']
155 repo_type = form_data['repo_type']
155 repo_type = form_data['repo_type']
156 description = form_data['repo_description']
156 description = form_data['repo_description']
157 private = form_data['repo_private']
157 private = form_data['repo_private']
158 clone_uri = form_data.get('clone_uri')
158 clone_uri = form_data.get('clone_uri')
159 repo_group = safe_int(form_data['repo_group'])
159 repo_group = safe_int(form_data['repo_group'])
160 copy_fork_permissions = form_data.get('copy_permissions')
160 copy_fork_permissions = form_data.get('copy_permissions')
161 copy_group_permissions = form_data.get('repo_copy_permissions')
161 copy_group_permissions = form_data.get('repo_copy_permissions')
162 fork_of = form_data.get('fork_parent_id')
162 fork_of = form_data.get('fork_parent_id')
163 state = form_data.get('repo_state', Repository.STATE_PENDING)
163 state = form_data.get('repo_state', Repository.STATE_PENDING)
164
164
165 # repo creation defaults, private and repo_type are filled in form
165 # repo creation defaults, private and repo_type are filled in form
166 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
166 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
167 enable_statistics = form_data.get(
167 enable_statistics = form_data.get(
168 'enable_statistics', defs.get('repo_enable_statistics'))
168 'enable_statistics', defs.get('repo_enable_statistics'))
169 enable_locking = form_data.get(
169 enable_locking = form_data.get(
170 'enable_locking', defs.get('repo_enable_locking'))
170 'enable_locking', defs.get('repo_enable_locking'))
171 enable_downloads = form_data.get(
171 enable_downloads = form_data.get(
172 'enable_downloads', defs.get('repo_enable_downloads'))
172 'enable_downloads', defs.get('repo_enable_downloads'))
173
173
174 # set landing rev based on default branches for SCM
174 # set landing rev based on default branches for SCM
175 landing_ref, _label = ScmModel.backend_landing_ref(repo_type)
175 landing_ref, _label = ScmModel.backend_landing_ref(repo_type)
176
176
177 try:
177 try:
178 RepoModel()._create_repo(
178 RepoModel()._create_repo(
179 repo_name=repo_name_full,
179 repo_name=repo_name_full,
180 repo_type=repo_type,
180 repo_type=repo_type,
181 description=description,
181 description=description,
182 owner=owner,
182 owner=owner,
183 private=private,
183 private=private,
184 clone_uri=clone_uri,
184 clone_uri=clone_uri,
185 repo_group=repo_group,
185 repo_group=repo_group,
186 landing_rev=landing_ref,
186 landing_rev=landing_ref,
187 fork_of=fork_of,
187 fork_of=fork_of,
188 copy_fork_permissions=copy_fork_permissions,
188 copy_fork_permissions=copy_fork_permissions,
189 copy_group_permissions=copy_group_permissions,
189 copy_group_permissions=copy_group_permissions,
190 enable_statistics=enable_statistics,
190 enable_statistics=enable_statistics,
191 enable_locking=enable_locking,
191 enable_locking=enable_locking,
192 enable_downloads=enable_downloads,
192 enable_downloads=enable_downloads,
193 state=state
193 state=state
194 )
194 )
195 Session().commit()
195 Session().commit()
196
196
197 # now create this repo on Filesystem
197 # now create this repo on Filesystem
198 RepoModel()._create_filesystem_repo(
198 RepoModel()._create_filesystem_repo(
199 repo_name=repo_name,
199 repo_name=repo_name,
200 repo_type=repo_type,
200 repo_type=repo_type,
201 repo_group=RepoModel()._get_repo_group(repo_group),
201 repo_group=RepoModel()._get_repo_group(repo_group),
202 clone_uri=clone_uri,
202 clone_uri=clone_uri,
203 )
203 )
204 repo = Repository.get_by_repo_name(repo_name_full)
204 repo = Repository.get_by_repo_name(repo_name_full)
205 hooks_base.create_repository(created_by=owner.username, **repo.get_dict())
205 hooks_base.create_repository(created_by=owner.username, **repo.get_dict())
206
206
207 # update repo commit caches initially
207 # update repo commit caches initially
208 repo.update_commit_cache()
208 repo.update_commit_cache()
209
209
210 # set new created state
210 # set new created state
211 repo.set_state(Repository.STATE_CREATED)
211 repo.set_state(Repository.STATE_CREATED)
212 repo_id = repo.repo_id
212 repo_id = repo.repo_id
213 repo_data = repo.get_api_data()
213 repo_data = repo.get_api_data()
214
214
215 audit_logger.store(
215 audit_logger.store(
216 'repo.create', action_data={'data': repo_data},
216 'repo.create', action_data={'data': repo_data},
217 user=cur_user,
217 user=cur_user,
218 repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id))
218 repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id))
219
219
220 Session().commit()
220 Session().commit()
221
221
222 PermissionModel().trigger_permission_flush()
222 PermissionModel().trigger_permission_flush()
223
223
224 except Exception as e:
224 except Exception as e:
225 log.warning('Exception occurred when creating repository, '
225 log.warning('Exception occurred when creating repository, '
226 'doing cleanup...', exc_info=True)
226 'doing cleanup...', exc_info=True)
227 if isinstance(e, IntegrityError):
227 if isinstance(e, IntegrityError):
228 Session().rollback()
228 Session().rollback()
229
229
230 # rollback things manually !
230 # rollback things manually !
231 repo = Repository.get_by_repo_name(repo_name_full)
231 repo = Repository.get_by_repo_name(repo_name_full)
232 if repo:
232 if repo:
233 Repository.delete(repo.repo_id)
233 Repository.delete(repo.repo_id)
234 Session().commit()
234 Session().commit()
235 RepoModel()._delete_filesystem_repo(repo)
235 RepoModel()._delete_filesystem_repo(repo)
236 log.info('Cleanup of repo %s finished', repo_name_full)
236 log.info('Cleanup of repo %s finished', repo_name_full)
237 raise
237 raise
238
238
239 return True
239 return True
240
240
241
241
242 @async_task(ignore_result=True, base=RequestContextTask)
242 @async_task(ignore_result=True, base=RequestContextTask)
243 def create_repo_fork(form_data, cur_user):
243 def create_repo_fork(form_data, cur_user):
244 """
244 """
245 Creates a fork of repository using internal VCS methods
245 Creates a fork of repository using internal VCS methods
246 """
246 """
247 from rhodecode.model.repo import RepoModel
247 from rhodecode.model.repo import RepoModel
248 from rhodecode.model.user import UserModel
248 from rhodecode.model.user import UserModel
249
249
250 log = get_logger(create_repo_fork)
250 log = get_logger(create_repo_fork)
251
251
252 cur_user = UserModel()._get_user(cur_user)
252 cur_user = UserModel()._get_user(cur_user)
253 owner = cur_user
253 owner = cur_user
254
254
255 repo_name = form_data['repo_name'] # fork in this case
255 repo_name = form_data['repo_name'] # fork in this case
256 repo_name_full = form_data['repo_name_full']
256 repo_name_full = form_data['repo_name_full']
257 repo_type = form_data['repo_type']
257 repo_type = form_data['repo_type']
258 description = form_data['description']
258 description = form_data['description']
259 private = form_data['private']
259 private = form_data['private']
260 clone_uri = form_data.get('clone_uri')
260 clone_uri = form_data.get('clone_uri')
261 repo_group = safe_int(form_data['repo_group'])
261 repo_group = safe_int(form_data['repo_group'])
262 landing_ref = form_data['landing_rev']
262 landing_ref = form_data['landing_rev']
263 copy_fork_permissions = form_data.get('copy_permissions')
263 copy_fork_permissions = form_data.get('copy_permissions')
264 fork_id = safe_int(form_data.get('fork_parent_id'))
264 fork_id = safe_int(form_data.get('fork_parent_id'))
265
265
266 try:
266 try:
267 fork_of = RepoModel()._get_repo(fork_id)
267 fork_of = RepoModel()._get_repo(fork_id)
268 RepoModel()._create_repo(
268 RepoModel()._create_repo(
269 repo_name=repo_name_full,
269 repo_name=repo_name_full,
270 repo_type=repo_type,
270 repo_type=repo_type,
271 description=description,
271 description=description,
272 owner=owner,
272 owner=owner,
273 private=private,
273 private=private,
274 clone_uri=clone_uri,
274 clone_uri=clone_uri,
275 repo_group=repo_group,
275 repo_group=repo_group,
276 landing_rev=landing_ref,
276 landing_rev=landing_ref,
277 fork_of=fork_of,
277 fork_of=fork_of,
278 copy_fork_permissions=copy_fork_permissions
278 copy_fork_permissions=copy_fork_permissions
279 )
279 )
280
280
281 Session().commit()
281 Session().commit()
282
282
283 base_path = Repository.base_path()
283 base_path = Repository.base_path()
284 source_repo_path = os.path.join(base_path, fork_of.repo_name)
284 source_repo_path = os.path.join(base_path, fork_of.repo_name)
285
285
286 # now create this repo on Filesystem
286 # now create this repo on Filesystem
287 RepoModel()._create_filesystem_repo(
287 RepoModel()._create_filesystem_repo(
288 repo_name=repo_name,
288 repo_name=repo_name,
289 repo_type=repo_type,
289 repo_type=repo_type,
290 repo_group=RepoModel()._get_repo_group(repo_group),
290 repo_group=RepoModel()._get_repo_group(repo_group),
291 clone_uri=source_repo_path,
291 clone_uri=source_repo_path,
292 )
292 )
293 repo = Repository.get_by_repo_name(repo_name_full)
293 repo = Repository.get_by_repo_name(repo_name_full)
294 hooks_base.create_repository(created_by=owner.username, **repo.get_dict())
294 hooks_base.create_repository(created_by=owner.username, **repo.get_dict())
295
295
296 # update repo commit caches initially
296 # update repo commit caches initially
297 config = repo._config
297 config = repo._config
298 config.set('extensions', 'largefiles', '')
298 config.set('extensions', 'largefiles', '')
299 repo.update_commit_cache(config=config)
299 repo.update_commit_cache(config=config)
300
300
301 # set new created state
301 # set new created state
302 repo.set_state(Repository.STATE_CREATED)
302 repo.set_state(Repository.STATE_CREATED)
303
303
304 repo_id = repo.repo_id
304 repo_id = repo.repo_id
305 repo_data = repo.get_api_data()
305 repo_data = repo.get_api_data()
306 audit_logger.store(
306 audit_logger.store(
307 'repo.fork', action_data={'data': repo_data},
307 'repo.fork', action_data={'data': repo_data},
308 user=cur_user,
308 user=cur_user,
309 repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id))
309 repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id))
310
310
311 Session().commit()
311 Session().commit()
312 except Exception as e:
312 except Exception as e:
313 log.warning('Exception occurred when forking repository, '
313 log.warning('Exception occurred when forking repository, '
314 'doing cleanup...', exc_info=True)
314 'doing cleanup...', exc_info=True)
315 if isinstance(e, IntegrityError):
315 if isinstance(e, IntegrityError):
316 Session().rollback()
316 Session().rollback()
317
317
318 # rollback things manually !
318 # rollback things manually !
319 repo = Repository.get_by_repo_name(repo_name_full)
319 repo = Repository.get_by_repo_name(repo_name_full)
320 if repo:
320 if repo:
321 Repository.delete(repo.repo_id)
321 Repository.delete(repo.repo_id)
322 Session().commit()
322 Session().commit()
323 RepoModel()._delete_filesystem_repo(repo)
323 RepoModel()._delete_filesystem_repo(repo)
324 log.info('Cleanup of repo %s finished', repo_name_full)
324 log.info('Cleanup of repo %s finished', repo_name_full)
325 raise
325 raise
326
326
327 return True
327 return True
328
328
329
329
330 @async_task(ignore_result=True, base=RequestContextTask)
330 @async_task(ignore_result=True, base=RequestContextTask)
331 def repo_maintenance(repoid):
331 def repo_maintenance(repoid):
332 from rhodecode.lib import repo_maintenance as repo_maintenance_lib
332 from rhodecode.lib import repo_maintenance as repo_maintenance_lib
333 log = get_logger(repo_maintenance)
333 log = get_logger(repo_maintenance)
334 repo = Repository.get_by_id_or_repo_name(repoid)
334 repo = Repository.get_by_id_or_repo_name(repoid)
335 if repo:
335 if repo:
336 maintenance = repo_maintenance_lib.RepoMaintenance()
336 maintenance = repo_maintenance_lib.RepoMaintenance()
337 tasks = maintenance.get_tasks_for_repo(repo)
337 tasks = maintenance.get_tasks_for_repo(repo)
338 log.debug('Executing %s tasks on repo `%s`', tasks, repoid)
338 log.debug('Executing %s tasks on repo `%s`', tasks, repoid)
339 executed_types = maintenance.execute(repo)
339 executed_types = maintenance.execute(repo)
340 log.debug('Got execution results %s', executed_types)
340 log.debug('Got execution results %s', executed_types)
341 else:
341 else:
342 log.debug('Repo `%s` not found or without a clone_url', repoid)
342 log.debug('Repo `%s` not found or without a clone_url', repoid)
343
343
344
344
345 @async_task(ignore_result=True, base=RequestContextTask)
345 @async_task(ignore_result=True, base=RequestContextTask)
346 def check_for_update(send_email_notification=True, email_recipients=None):
346 def check_for_update(send_email_notification=True, email_recipients=None):
347 from rhodecode.model.update import UpdateModel
347 from rhodecode.model.update import UpdateModel
348 from rhodecode.model.notification import EmailNotificationModel
348 from rhodecode.model.notification import EmailNotificationModel
349
349
350 log = get_logger(check_for_update)
350 log = get_logger(check_for_update)
351 update_url = UpdateModel().get_update_url()
351 update_url = UpdateModel().get_update_url()
352 cur_ver = rhodecode.__version__
352 cur_ver = rhodecode.__version__
353
353
354 try:
354 try:
355 data = UpdateModel().get_update_data(update_url)
355 data = UpdateModel().get_update_data(update_url)
356
356
357 current_ver = UpdateModel().get_stored_version(fallback=cur_ver)
357 current_ver = UpdateModel().get_stored_version(fallback=cur_ver)
358 latest_ver = data['versions'][0]['version']
358 latest_ver = data['versions'][0]['version']
359 UpdateModel().store_version(latest_ver)
359 UpdateModel().store_version(latest_ver)
360
360
361 if send_email_notification:
361 if send_email_notification:
362 log.debug('Send email notification is enabled. '
362 log.debug('Send email notification is enabled. '
363 'Current RhodeCode version: %s, latest known: %s', current_ver, latest_ver)
363 'Current RhodeCode version: %s, latest known: %s', current_ver, latest_ver)
364 if UpdateModel().is_outdated(current_ver, latest_ver):
364 if UpdateModel().is_outdated(current_ver, latest_ver):
365
365
366 email_kwargs = {
366 email_kwargs = {
367 'current_ver': current_ver,
367 'current_ver': current_ver,
368 'latest_ver': latest_ver,
368 'latest_ver': latest_ver,
369 }
369 }
370
370
371 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
371 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
372 EmailNotificationModel.TYPE_UPDATE_AVAILABLE, **email_kwargs)
372 EmailNotificationModel.TYPE_UPDATE_AVAILABLE, **email_kwargs)
373
373
374 email_recipients = aslist(email_recipients, sep=',') or \
374 email_recipients = aslist(email_recipients, sep=',') or \
375 [user.email for user in User.get_all_super_admins()]
375 [user.email for user in User.get_all_super_admins()]
376 run_task(send_email, email_recipients, subject,
376 run_task(send_email, email_recipients, subject,
377 email_body_plaintext, email_body)
377 email_body_plaintext, email_body)
378
378
379 except Exception:
379 except Exception:
380 log.exception('Failed to check for update')
380 log.exception('Failed to check for update')
381 raise
381 raise
382
382
383
383
384 def sync_last_update_for_objects(*args, **kwargs):
384 def sync_last_update_for_objects(*args, **kwargs):
385 skip_repos = kwargs.get('skip_repos')
385 skip_repos = kwargs.get('skip_repos')
386 if not skip_repos:
386 if not skip_repos:
387 repos = Repository.query() \
387 repos = Repository.query() \
388 .order_by(Repository.group_id.asc())
388 .order_by(Repository.group_id.asc())
389
389
390 for repo in repos:
390 for repo in repos:
391 repo.update_commit_cache()
391 repo.update_commit_cache()
392
392
393 skip_groups = kwargs.get('skip_groups')
393 skip_groups = kwargs.get('skip_groups')
394 if not skip_groups:
394 if not skip_groups:
395 repo_groups = RepoGroup.query() \
395 repo_groups = RepoGroup.query() \
396 .filter(RepoGroup.group_parent_id == None)
396 .filter(RepoGroup.group_parent_id == null())
397
397
398 for root_gr in repo_groups:
398 for root_gr in repo_groups:
399 for repo_gr in reversed(root_gr.recursive_groups()):
399 for repo_gr in reversed(root_gr.recursive_groups()):
400 repo_gr.update_commit_cache()
400 repo_gr.update_commit_cache()
401
401
402
402
403 @async_task(ignore_result=True, base=RequestContextTask)
403 @async_task(ignore_result=True, base=RequestContextTask)
404 def sync_last_update(*args, **kwargs):
404 def sync_last_update(*args, **kwargs):
405 sync_last_update_for_objects(*args, **kwargs)
405 sync_last_update_for_objects(*args, **kwargs)
406
406
407
407
408 @async_task(ignore_result=False)
408 @async_task(ignore_result=False)
409 def beat_check(*args, **kwargs):
409 def beat_check(*args, **kwargs):
410 log = get_logger(beat_check)
410 log = get_logger(beat_check)
411 log.info('%r: Got args: %r and kwargs %r', beat_check, args, kwargs)
411 log.info('%r: Got args: %r and kwargs %r', beat_check, args, kwargs)
412 return time.time()
412 return time.time()
@@ -1,402 +1,402 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19
19
20 import itertools
20 import itertools
21 import logging
21 import logging
22 import collections
22 import collections
23
23
24 from rhodecode.model import BaseModel
24 from rhodecode.model import BaseModel
25 from rhodecode.model.db import (
25 from rhodecode.model.db import (
26 ChangesetStatus, ChangesetComment, PullRequest, PullRequestReviewers, Session)
26 null, ChangesetStatus, ChangesetComment, PullRequest, PullRequestReviewers, Session)
27 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError
27 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError
28 from rhodecode.lib.markup_renderer import (
28 from rhodecode.lib.markup_renderer import (
29 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
29 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
30
30
31 log = logging.getLogger(__name__)
31 log = logging.getLogger(__name__)
32
32
33
33
34 class ChangesetStatusModel(BaseModel):
34 class ChangesetStatusModel(BaseModel):
35
35
36 cls = ChangesetStatus
36 cls = ChangesetStatus
37
37
38 def __get_changeset_status(self, changeset_status):
38 def __get_changeset_status(self, changeset_status):
39 return self._get_instance(ChangesetStatus, changeset_status)
39 return self._get_instance(ChangesetStatus, changeset_status)
40
40
41 def __get_pull_request(self, pull_request):
41 def __get_pull_request(self, pull_request):
42 return self._get_instance(PullRequest, pull_request)
42 return self._get_instance(PullRequest, pull_request)
43
43
44 def _get_status_query(self, repo, revision, pull_request,
44 def _get_status_query(self, repo, revision, pull_request,
45 with_revisions=False):
45 with_revisions=False):
46 repo = self._get_repo(repo)
46 repo = self._get_repo(repo)
47
47
48 q = ChangesetStatus.query()\
48 q = ChangesetStatus.query()\
49 .filter(ChangesetStatus.repo == repo)
49 .filter(ChangesetStatus.repo == repo)
50 if not with_revisions:
50 if not with_revisions:
51 q = q.filter(ChangesetStatus.version == 0)
51 q = q.filter(ChangesetStatus.version == 0)
52
52
53 if revision:
53 if revision:
54 q = q.filter(ChangesetStatus.revision == revision)
54 q = q.filter(ChangesetStatus.revision == revision)
55 elif pull_request:
55 elif pull_request:
56 pull_request = self.__get_pull_request(pull_request)
56 pull_request = self.__get_pull_request(pull_request)
57 # TODO: johbo: Think about the impact of this join, there must
57 # TODO: johbo: Think about the impact of this join, there must
58 # be a reason why ChangesetStatus and ChanagesetComment is linked
58 # be a reason why ChangesetStatus and ChanagesetComment is linked
59 # to the pull request. Might be that we want to do the same for
59 # to the pull request. Might be that we want to do the same for
60 # the pull_request_version_id.
60 # the pull_request_version_id.
61 q = q.join(ChangesetComment).filter(
61 q = q.join(ChangesetComment).filter(
62 ChangesetStatus.pull_request == pull_request,
62 ChangesetStatus.pull_request == pull_request,
63 ChangesetComment.pull_request_version_id == None)
63 ChangesetComment.pull_request_version_id == null())
64 else:
64 else:
65 raise Exception('Please specify revision or pull_request')
65 raise Exception('Please specify revision or pull_request')
66 q = q.order_by(ChangesetStatus.version.asc())
66 q = q.order_by(ChangesetStatus.version.asc())
67 return q
67 return q
68
68
69 def calculate_group_vote(self, group_id, group_statuses_by_reviewers,
69 def calculate_group_vote(self, group_id, group_statuses_by_reviewers,
70 trim_votes=True):
70 trim_votes=True):
71 """
71 """
72 Calculate status based on given group members, and voting rule
72 Calculate status based on given group members, and voting rule
73
73
74
74
75 group1 - 4 members, 3 required for approval
75 group1 - 4 members, 3 required for approval
76 user1 - approved
76 user1 - approved
77 user2 - reject
77 user2 - reject
78 user3 - approved
78 user3 - approved
79 user4 - rejected
79 user4 - rejected
80
80
81 final_state: rejected, reasons not at least 3 votes
81 final_state: rejected, reasons not at least 3 votes
82
82
83
83
84 group1 - 4 members, 2 required for approval
84 group1 - 4 members, 2 required for approval
85 user1 - approved
85 user1 - approved
86 user2 - reject
86 user2 - reject
87 user3 - approved
87 user3 - approved
88 user4 - rejected
88 user4 - rejected
89
89
90 final_state: approved, reasons got at least 2 approvals
90 final_state: approved, reasons got at least 2 approvals
91
91
92 group1 - 4 members, ALL required for approval
92 group1 - 4 members, ALL required for approval
93 user1 - approved
93 user1 - approved
94 user2 - reject
94 user2 - reject
95 user3 - approved
95 user3 - approved
96 user4 - rejected
96 user4 - rejected
97
97
98 final_state: rejected, reasons not all approvals
98 final_state: rejected, reasons not all approvals
99
99
100
100
101 group1 - 4 members, ALL required for approval
101 group1 - 4 members, ALL required for approval
102 user1 - approved
102 user1 - approved
103 user2 - approved
103 user2 - approved
104 user3 - approved
104 user3 - approved
105 user4 - approved
105 user4 - approved
106
106
107 final_state: approved, reason all approvals received
107 final_state: approved, reason all approvals received
108
108
109 group1 - 4 members, 5 required for approval
109 group1 - 4 members, 5 required for approval
110 (approval should be shorted to number of actual members)
110 (approval should be shorted to number of actual members)
111
111
112 user1 - approved
112 user1 - approved
113 user2 - approved
113 user2 - approved
114 user3 - approved
114 user3 - approved
115 user4 - approved
115 user4 - approved
116
116
117 final_state: approved, reason all approvals received
117 final_state: approved, reason all approvals received
118
118
119 """
119 """
120 group_vote_data = {}
120 group_vote_data = {}
121 got_rule = False
121 got_rule = False
122 members = collections.OrderedDict()
122 members = collections.OrderedDict()
123 for review_obj, user, reasons, mandatory, statuses \
123 for review_obj, user, reasons, mandatory, statuses \
124 in group_statuses_by_reviewers:
124 in group_statuses_by_reviewers:
125
125
126 if not got_rule:
126 if not got_rule:
127 group_vote_data = review_obj.rule_user_group_data()
127 group_vote_data = review_obj.rule_user_group_data()
128 got_rule = bool(group_vote_data)
128 got_rule = bool(group_vote_data)
129
129
130 members[user.user_id] = statuses
130 members[user.user_id] = statuses
131
131
132 if not group_vote_data:
132 if not group_vote_data:
133 return []
133 return []
134
134
135 required_votes = group_vote_data['vote_rule']
135 required_votes = group_vote_data['vote_rule']
136 if required_votes == -1:
136 if required_votes == -1:
137 # -1 means all required, so we replace it with how many people
137 # -1 means all required, so we replace it with how many people
138 # are in the members
138 # are in the members
139 required_votes = len(members)
139 required_votes = len(members)
140
140
141 if trim_votes and required_votes > len(members):
141 if trim_votes and required_votes > len(members):
142 # we require more votes than we have members in the group
142 # we require more votes than we have members in the group
143 # in this case we trim the required votes to the number of members
143 # in this case we trim the required votes to the number of members
144 required_votes = len(members)
144 required_votes = len(members)
145
145
146 approvals = sum([
146 approvals = sum([
147 1 for statuses in members.values()
147 1 for statuses in members.values()
148 if statuses and
148 if statuses and
149 statuses[0][1].status == ChangesetStatus.STATUS_APPROVED])
149 statuses[0][1].status == ChangesetStatus.STATUS_APPROVED])
150
150
151 calculated_votes = []
151 calculated_votes = []
152 # we have all votes from users, now check if we have enough votes
152 # we have all votes from users, now check if we have enough votes
153 # to fill other
153 # to fill other
154 fill_in = ChangesetStatus.STATUS_UNDER_REVIEW
154 fill_in = ChangesetStatus.STATUS_UNDER_REVIEW
155 if approvals >= required_votes:
155 if approvals >= required_votes:
156 fill_in = ChangesetStatus.STATUS_APPROVED
156 fill_in = ChangesetStatus.STATUS_APPROVED
157
157
158 for member, statuses in members.items():
158 for member, statuses in members.items():
159 if statuses:
159 if statuses:
160 ver, latest = statuses[0]
160 ver, latest = statuses[0]
161 if fill_in == ChangesetStatus.STATUS_APPROVED:
161 if fill_in == ChangesetStatus.STATUS_APPROVED:
162 calculated_votes.append(fill_in)
162 calculated_votes.append(fill_in)
163 else:
163 else:
164 calculated_votes.append(latest.status)
164 calculated_votes.append(latest.status)
165 else:
165 else:
166 calculated_votes.append(fill_in)
166 calculated_votes.append(fill_in)
167
167
168 return calculated_votes
168 return calculated_votes
169
169
170 def calculate_status(self, statuses_by_reviewers):
170 def calculate_status(self, statuses_by_reviewers):
171 """
171 """
172 Given the approval statuses from reviewers, calculates final approval
172 Given the approval statuses from reviewers, calculates final approval
173 status. There can only be 3 results, all approved, all rejected. If
173 status. There can only be 3 results, all approved, all rejected. If
174 there is no consensus the PR is under review.
174 there is no consensus the PR is under review.
175
175
176 :param statuses_by_reviewers:
176 :param statuses_by_reviewers:
177 """
177 """
178
178
179 def group_rule(element):
179 def group_rule(element):
180 _review_obj = element[0]
180 _review_obj = element[0]
181 rule_data = _review_obj.rule_user_group_data()
181 rule_data = _review_obj.rule_user_group_data()
182 if rule_data and rule_data['id']:
182 if rule_data and rule_data['id']:
183 return rule_data['id']
183 return rule_data['id']
184 # don't return None, as we cant compare this
184 # don't return None, as we cant compare this
185 return 0
185 return 0
186
186
187 voting_groups = itertools.groupby(sorted(statuses_by_reviewers, key=group_rule), group_rule)
187 voting_groups = itertools.groupby(sorted(statuses_by_reviewers, key=group_rule), group_rule)
188
188
189 voting_by_groups = [(x, list(y)) for x, y in voting_groups]
189 voting_by_groups = [(x, list(y)) for x, y in voting_groups]
190
190
191 reviewers_number = len(statuses_by_reviewers)
191 reviewers_number = len(statuses_by_reviewers)
192 votes = collections.defaultdict(int)
192 votes = collections.defaultdict(int)
193 for group, group_statuses_by_reviewers in voting_by_groups:
193 for group, group_statuses_by_reviewers in voting_by_groups:
194 if group:
194 if group:
195 # calculate how the "group" voted
195 # calculate how the "group" voted
196 for vote_status in self.calculate_group_vote(
196 for vote_status in self.calculate_group_vote(
197 group, group_statuses_by_reviewers):
197 group, group_statuses_by_reviewers):
198 votes[vote_status] += 1
198 votes[vote_status] += 1
199 else:
199 else:
200
200
201 for review_obj, user, reasons, mandatory, statuses \
201 for review_obj, user, reasons, mandatory, statuses \
202 in group_statuses_by_reviewers:
202 in group_statuses_by_reviewers:
203 # individual vote
203 # individual vote
204 if statuses:
204 if statuses:
205 ver, latest = statuses[0]
205 ver, latest = statuses[0]
206 votes[latest.status] += 1
206 votes[latest.status] += 1
207
207
208 approved_votes_count = votes[ChangesetStatus.STATUS_APPROVED]
208 approved_votes_count = votes[ChangesetStatus.STATUS_APPROVED]
209 rejected_votes_count = votes[ChangesetStatus.STATUS_REJECTED]
209 rejected_votes_count = votes[ChangesetStatus.STATUS_REJECTED]
210
210
211 # TODO(marcink): with group voting, how does rejected work,
211 # TODO(marcink): with group voting, how does rejected work,
212 # do we ever get rejected state ?
212 # do we ever get rejected state ?
213
213
214 if approved_votes_count and (approved_votes_count == reviewers_number):
214 if approved_votes_count and (approved_votes_count == reviewers_number):
215 return ChangesetStatus.STATUS_APPROVED
215 return ChangesetStatus.STATUS_APPROVED
216
216
217 if rejected_votes_count and (rejected_votes_count == reviewers_number):
217 if rejected_votes_count and (rejected_votes_count == reviewers_number):
218 return ChangesetStatus.STATUS_REJECTED
218 return ChangesetStatus.STATUS_REJECTED
219
219
220 return ChangesetStatus.STATUS_UNDER_REVIEW
220 return ChangesetStatus.STATUS_UNDER_REVIEW
221
221
222 def get_statuses(self, repo, revision=None, pull_request=None,
222 def get_statuses(self, repo, revision=None, pull_request=None,
223 with_revisions=False):
223 with_revisions=False):
224 q = self._get_status_query(repo, revision, pull_request,
224 q = self._get_status_query(repo, revision, pull_request,
225 with_revisions)
225 with_revisions)
226 return q.all()
226 return q.all()
227
227
228 def get_status(self, repo, revision=None, pull_request=None, as_str=True):
228 def get_status(self, repo, revision=None, pull_request=None, as_str=True):
229 """
229 """
230 Returns latest status of changeset for given revision or for given
230 Returns latest status of changeset for given revision or for given
231 pull request. Statuses are versioned inside a table itself and
231 pull request. Statuses are versioned inside a table itself and
232 version == 0 is always the current one
232 version == 0 is always the current one
233
233
234 :param repo:
234 :param repo:
235 :param revision: 40char hash or None
235 :param revision: 40char hash or None
236 :param pull_request: pull_request reference
236 :param pull_request: pull_request reference
237 :param as_str: return status as string not object
237 :param as_str: return status as string not object
238 """
238 """
239 q = self._get_status_query(repo, revision, pull_request)
239 q = self._get_status_query(repo, revision, pull_request)
240
240
241 # need to use first here since there can be multiple statuses
241 # need to use first here since there can be multiple statuses
242 # returned from pull_request
242 # returned from pull_request
243 status = q.first()
243 status = q.first()
244 if as_str:
244 if as_str:
245 status = status.status if status else status
245 status = status.status if status else status
246 st = status or ChangesetStatus.DEFAULT
246 st = status or ChangesetStatus.DEFAULT
247 return str(st)
247 return str(st)
248 return status
248 return status
249
249
250 def _render_auto_status_message(
250 def _render_auto_status_message(
251 self, status, commit_id=None, pull_request=None):
251 self, status, commit_id=None, pull_request=None):
252 """
252 """
253 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
253 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
254 so it's always looking the same disregarding on which default
254 so it's always looking the same disregarding on which default
255 renderer system is using.
255 renderer system is using.
256
256
257 :param status: status text to change into
257 :param status: status text to change into
258 :param commit_id: the commit_id we change the status for
258 :param commit_id: the commit_id we change the status for
259 :param pull_request: the pull request we change the status for
259 :param pull_request: the pull request we change the status for
260 """
260 """
261
261
262 new_status = ChangesetStatus.get_status_lbl(status)
262 new_status = ChangesetStatus.get_status_lbl(status)
263
263
264 params = {
264 params = {
265 'new_status_label': new_status,
265 'new_status_label': new_status,
266 'pull_request': pull_request,
266 'pull_request': pull_request,
267 'commit_id': commit_id,
267 'commit_id': commit_id,
268 }
268 }
269 renderer = RstTemplateRenderer()
269 renderer = RstTemplateRenderer()
270 return renderer.render('auto_status_change.mako', **params)
270 return renderer.render('auto_status_change.mako', **params)
271
271
272 def set_status(self, repo, status, user, comment=None, revision=None,
272 def set_status(self, repo, status, user, comment=None, revision=None,
273 pull_request=None, dont_allow_on_closed_pull_request=False):
273 pull_request=None, dont_allow_on_closed_pull_request=False):
274 """
274 """
275 Creates new status for changeset or updates the old ones bumping their
275 Creates new status for changeset or updates the old ones bumping their
276 version, leaving the current status at
276 version, leaving the current status at
277
277
278 :param repo:
278 :param repo:
279 :param revision:
279 :param revision:
280 :param status:
280 :param status:
281 :param user:
281 :param user:
282 :param comment:
282 :param comment:
283 :param dont_allow_on_closed_pull_request: don't allow a status change
283 :param dont_allow_on_closed_pull_request: don't allow a status change
284 if last status was for pull request and it's closed. We shouldn't
284 if last status was for pull request and it's closed. We shouldn't
285 mess around this manually
285 mess around this manually
286 """
286 """
287 repo = self._get_repo(repo)
287 repo = self._get_repo(repo)
288
288
289 q = ChangesetStatus.query()
289 q = ChangesetStatus.query()
290
290
291 if revision:
291 if revision:
292 q = q.filter(ChangesetStatus.repo == repo)
292 q = q.filter(ChangesetStatus.repo == repo)
293 q = q.filter(ChangesetStatus.revision == revision)
293 q = q.filter(ChangesetStatus.revision == revision)
294 elif pull_request:
294 elif pull_request:
295 pull_request = self.__get_pull_request(pull_request)
295 pull_request = self.__get_pull_request(pull_request)
296 q = q.filter(ChangesetStatus.repo == pull_request.source_repo)
296 q = q.filter(ChangesetStatus.repo == pull_request.source_repo)
297 q = q.filter(ChangesetStatus.revision.in_(pull_request.revisions))
297 q = q.filter(ChangesetStatus.revision.in_(pull_request.revisions))
298 cur_statuses = q.all()
298 cur_statuses = q.all()
299
299
300 # if statuses exists and last is associated with a closed pull request
300 # if statuses exists and last is associated with a closed pull request
301 # we need to check if we can allow this status change
301 # we need to check if we can allow this status change
302 if (dont_allow_on_closed_pull_request and cur_statuses
302 if (dont_allow_on_closed_pull_request and cur_statuses
303 and getattr(cur_statuses[0].pull_request, 'status', '')
303 and getattr(cur_statuses[0].pull_request, 'status', '')
304 == PullRequest.STATUS_CLOSED):
304 == PullRequest.STATUS_CLOSED):
305 raise StatusChangeOnClosedPullRequestError(
305 raise StatusChangeOnClosedPullRequestError(
306 'Changing status on closed pull request is not allowed'
306 'Changing status on closed pull request is not allowed'
307 )
307 )
308
308
309 # update all current statuses with older version
309 # update all current statuses with older version
310 if cur_statuses:
310 if cur_statuses:
311 for st in cur_statuses:
311 for st in cur_statuses:
312 st.version += 1
312 st.version += 1
313 Session().add(st)
313 Session().add(st)
314 Session().flush()
314 Session().flush()
315
315
316 def _create_status(user, repo, status, comment, revision, pull_request):
316 def _create_status(user, repo, status, comment, revision, pull_request):
317 new_status = ChangesetStatus()
317 new_status = ChangesetStatus()
318 new_status.author = self._get_user(user)
318 new_status.author = self._get_user(user)
319 new_status.repo = self._get_repo(repo)
319 new_status.repo = self._get_repo(repo)
320 new_status.status = status
320 new_status.status = status
321 new_status.comment = comment
321 new_status.comment = comment
322 new_status.revision = revision
322 new_status.revision = revision
323 new_status.pull_request = pull_request
323 new_status.pull_request = pull_request
324 return new_status
324 return new_status
325
325
326 if not comment:
326 if not comment:
327 from rhodecode.model.comment import CommentsModel
327 from rhodecode.model.comment import CommentsModel
328 comment = CommentsModel().create(
328 comment = CommentsModel().create(
329 text=self._render_auto_status_message(
329 text=self._render_auto_status_message(
330 status, commit_id=revision, pull_request=pull_request),
330 status, commit_id=revision, pull_request=pull_request),
331 repo=repo,
331 repo=repo,
332 user=user,
332 user=user,
333 pull_request=pull_request,
333 pull_request=pull_request,
334 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER
334 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER
335 )
335 )
336
336
337 if revision:
337 if revision:
338 new_status = _create_status(
338 new_status = _create_status(
339 user=user, repo=repo, status=status, comment=comment,
339 user=user, repo=repo, status=status, comment=comment,
340 revision=revision, pull_request=pull_request)
340 revision=revision, pull_request=pull_request)
341 Session().add(new_status)
341 Session().add(new_status)
342 return new_status
342 return new_status
343 elif pull_request:
343 elif pull_request:
344 # pull request can have more than one revision associated to it
344 # pull request can have more than one revision associated to it
345 # we need to create new version for each one
345 # we need to create new version for each one
346 new_statuses = []
346 new_statuses = []
347 repo = pull_request.source_repo
347 repo = pull_request.source_repo
348 for rev in pull_request.revisions:
348 for rev in pull_request.revisions:
349 new_status = _create_status(
349 new_status = _create_status(
350 user=user, repo=repo, status=status, comment=comment,
350 user=user, repo=repo, status=status, comment=comment,
351 revision=rev, pull_request=pull_request)
351 revision=rev, pull_request=pull_request)
352 new_statuses.append(new_status)
352 new_statuses.append(new_status)
353 Session().add(new_status)
353 Session().add(new_status)
354 return new_statuses
354 return new_statuses
355
355
356 def aggregate_votes_by_user(self, commit_statuses, reviewers_data, user=None):
356 def aggregate_votes_by_user(self, commit_statuses, reviewers_data, user=None):
357
357
358 commit_statuses_map = collections.defaultdict(list)
358 commit_statuses_map = collections.defaultdict(list)
359 for st in commit_statuses:
359 for st in commit_statuses:
360 commit_statuses_map[st.author.username] += [st]
360 commit_statuses_map[st.author.username] += [st]
361
361
362 reviewers = []
362 reviewers = []
363
363
364 def version(commit_status):
364 def version(commit_status):
365 return commit_status.version
365 return commit_status.version
366
366
367 for obj in reviewers_data:
367 for obj in reviewers_data:
368 if not obj.user:
368 if not obj.user:
369 continue
369 continue
370 if user and obj.user.username != user.username:
370 if user and obj.user.username != user.username:
371 # single user filter
371 # single user filter
372 continue
372 continue
373
373
374 statuses = commit_statuses_map.get(obj.user.username, None)
374 statuses = commit_statuses_map.get(obj.user.username, None)
375 if statuses:
375 if statuses:
376 status_groups = itertools.groupby(
376 status_groups = itertools.groupby(
377 sorted(statuses, key=version), version)
377 sorted(statuses, key=version), version)
378 statuses = [(x, list(y)[0]) for x, y in status_groups]
378 statuses = [(x, list(y)[0]) for x, y in status_groups]
379
379
380 reviewers.append((obj, obj.user, obj.reasons, obj.mandatory, statuses))
380 reviewers.append((obj, obj.user, obj.reasons, obj.mandatory, statuses))
381
381
382 if user:
382 if user:
383 return reviewers[0] if reviewers else reviewers
383 return reviewers[0] if reviewers else reviewers
384 else:
384 else:
385 return reviewers
385 return reviewers
386
386
387 def reviewers_statuses(self, pull_request, user=None):
387 def reviewers_statuses(self, pull_request, user=None):
388 _commit_statuses = self.get_statuses(
388 _commit_statuses = self.get_statuses(
389 pull_request.source_repo,
389 pull_request.source_repo,
390 pull_request=pull_request,
390 pull_request=pull_request,
391 with_revisions=True)
391 with_revisions=True)
392 reviewers = pull_request.get_pull_request_reviewers(
392 reviewers = pull_request.get_pull_request_reviewers(
393 role=PullRequestReviewers.ROLE_REVIEWER)
393 role=PullRequestReviewers.ROLE_REVIEWER)
394 return self.aggregate_votes_by_user(_commit_statuses, reviewers, user=user)
394 return self.aggregate_votes_by_user(_commit_statuses, reviewers, user=user)
395
395
396 def calculated_review_status(self, pull_request):
396 def calculated_review_status(self, pull_request):
397 """
397 """
398 calculate pull request status based on reviewers, it should be a list
398 calculate pull request status based on reviewers, it should be a list
399 of two element lists.
399 of two element lists.
400 """
400 """
401 reviewers = self.reviewers_statuses(pull_request)
401 reviewers = self.reviewers_statuses(pull_request)
402 return self.calculate_status(reviewers)
402 return self.calculate_status(reviewers)
@@ -1,852 +1,852 b''
1 # Copyright (C) 2011-2023 RhodeCode GmbH
1 # Copyright (C) 2011-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 comments model for RhodeCode
20 comments model for RhodeCode
21 """
21 """
22 import datetime
22 import datetime
23
23
24 import logging
24 import logging
25 import traceback
25 import traceback
26 import collections
26 import collections
27
27
28 from pyramid.threadlocal import get_current_registry, get_current_request
28 from pyramid.threadlocal import get_current_registry, get_current_request
29 from sqlalchemy.sql.expression import null
29 from sqlalchemy.sql.expression import null
30 from sqlalchemy.sql.functions import coalesce
30 from sqlalchemy.sql.functions import coalesce
31
31
32 from rhodecode.lib import helpers as h, diffs, channelstream, hooks_utils
32 from rhodecode.lib import helpers as h, diffs, channelstream, hooks_utils
33 from rhodecode.lib import audit_logger
33 from rhodecode.lib import audit_logger
34 from rhodecode.lib.exceptions import CommentVersionMismatch
34 from rhodecode.lib.exceptions import CommentVersionMismatch
35 from rhodecode.lib.utils2 import extract_mentioned_users, safe_str, safe_int
35 from rhodecode.lib.utils2 import extract_mentioned_users, safe_str, safe_int
36 from rhodecode.model import BaseModel
36 from rhodecode.model import BaseModel
37 from rhodecode.model.db import (
37 from rhodecode.model.db import (
38 false, true,
38 false, true,
39 ChangesetComment,
39 ChangesetComment,
40 User,
40 User,
41 Notification,
41 Notification,
42 PullRequest,
42 PullRequest,
43 AttributeDict,
43 AttributeDict,
44 ChangesetCommentHistory,
44 ChangesetCommentHistory,
45 )
45 )
46 from rhodecode.model.notification import NotificationModel
46 from rhodecode.model.notification import NotificationModel
47 from rhodecode.model.meta import Session
47 from rhodecode.model.meta import Session
48 from rhodecode.model.settings import VcsSettingsModel
48 from rhodecode.model.settings import VcsSettingsModel
49 from rhodecode.model.notification import EmailNotificationModel
49 from rhodecode.model.notification import EmailNotificationModel
50 from rhodecode.model.validation_schema.schemas import comment_schema
50 from rhodecode.model.validation_schema.schemas import comment_schema
51
51
52
52
53 log = logging.getLogger(__name__)
53 log = logging.getLogger(__name__)
54
54
55
55
56 class CommentsModel(BaseModel):
56 class CommentsModel(BaseModel):
57
57
58 cls = ChangesetComment
58 cls = ChangesetComment
59
59
60 DIFF_CONTEXT_BEFORE = 3
60 DIFF_CONTEXT_BEFORE = 3
61 DIFF_CONTEXT_AFTER = 3
61 DIFF_CONTEXT_AFTER = 3
62
62
63 def __get_commit_comment(self, changeset_comment):
63 def __get_commit_comment(self, changeset_comment):
64 return self._get_instance(ChangesetComment, changeset_comment)
64 return self._get_instance(ChangesetComment, changeset_comment)
65
65
66 def __get_pull_request(self, pull_request):
66 def __get_pull_request(self, pull_request):
67 return self._get_instance(PullRequest, pull_request)
67 return self._get_instance(PullRequest, pull_request)
68
68
69 def _extract_mentions(self, s):
69 def _extract_mentions(self, s):
70 user_objects = []
70 user_objects = []
71 for username in extract_mentioned_users(s):
71 for username in extract_mentioned_users(s):
72 user_obj = User.get_by_username(username, case_insensitive=True)
72 user_obj = User.get_by_username(username, case_insensitive=True)
73 if user_obj:
73 if user_obj:
74 user_objects.append(user_obj)
74 user_objects.append(user_obj)
75 return user_objects
75 return user_objects
76
76
77 def _get_renderer(self, global_renderer='rst', request=None):
77 def _get_renderer(self, global_renderer='rst', request=None):
78 request = request or get_current_request()
78 request = request or get_current_request()
79
79
80 try:
80 try:
81 global_renderer = request.call_context.visual.default_renderer
81 global_renderer = request.call_context.visual.default_renderer
82 except AttributeError:
82 except AttributeError:
83 log.debug("Renderer not set, falling back "
83 log.debug("Renderer not set, falling back "
84 "to default renderer '%s'", global_renderer)
84 "to default renderer '%s'", global_renderer)
85 except Exception:
85 except Exception:
86 log.error(traceback.format_exc())
86 log.error(traceback.format_exc())
87 return global_renderer
87 return global_renderer
88
88
89 def aggregate_comments(self, comments, versions, show_version, inline=False):
89 def aggregate_comments(self, comments, versions, show_version, inline=False):
90 # group by versions, and count until, and display objects
90 # group by versions, and count until, and display objects
91
91
92 comment_groups = collections.defaultdict(list)
92 comment_groups = collections.defaultdict(list)
93 [comment_groups[_co.pull_request_version_id].append(_co) for _co in comments]
93 [comment_groups[_co.pull_request_version_id].append(_co) for _co in comments]
94
94
95 def yield_comments(pos):
95 def yield_comments(pos):
96 yield from comment_groups[pos]
96 yield from comment_groups[pos]
97
97
98 comment_versions = collections.defaultdict(
98 comment_versions = collections.defaultdict(
99 lambda: collections.defaultdict(list))
99 lambda: collections.defaultdict(list))
100 prev_prvid = -1
100 prev_prvid = -1
101 # fake last entry with None, to aggregate on "latest" version which
101 # fake last entry with None, to aggregate on "latest" version which
102 # doesn't have an pull_request_version_id
102 # doesn't have an pull_request_version_id
103 for ver in versions + [AttributeDict({'pull_request_version_id': None})]:
103 for ver in versions + [AttributeDict({'pull_request_version_id': None})]:
104 prvid = ver.pull_request_version_id
104 prvid = ver.pull_request_version_id
105 if prev_prvid == -1:
105 if prev_prvid == -1:
106 prev_prvid = prvid
106 prev_prvid = prvid
107
107
108 for co in yield_comments(prvid):
108 for co in yield_comments(prvid):
109 comment_versions[prvid]['at'].append(co)
109 comment_versions[prvid]['at'].append(co)
110
110
111 # save until
111 # save until
112 current = comment_versions[prvid]['at']
112 current = comment_versions[prvid]['at']
113 prev_until = comment_versions[prev_prvid]['until']
113 prev_until = comment_versions[prev_prvid]['until']
114 cur_until = prev_until + current
114 cur_until = prev_until + current
115 comment_versions[prvid]['until'].extend(cur_until)
115 comment_versions[prvid]['until'].extend(cur_until)
116
116
117 # save outdated
117 # save outdated
118 if inline:
118 if inline:
119 outdated = [x for x in cur_until
119 outdated = [x for x in cur_until
120 if x.outdated_at_version(show_version)]
120 if x.outdated_at_version(show_version)]
121 else:
121 else:
122 outdated = [x for x in cur_until
122 outdated = [x for x in cur_until
123 if x.older_than_version(show_version)]
123 if x.older_than_version(show_version)]
124 display = [x for x in cur_until if x not in outdated]
124 display = [x for x in cur_until if x not in outdated]
125
125
126 comment_versions[prvid]['outdated'] = outdated
126 comment_versions[prvid]['outdated'] = outdated
127 comment_versions[prvid]['display'] = display
127 comment_versions[prvid]['display'] = display
128
128
129 prev_prvid = prvid
129 prev_prvid = prvid
130
130
131 return comment_versions
131 return comment_versions
132
132
133 def get_repository_comments(self, repo, comment_type=None, user=None, commit_id=None):
133 def get_repository_comments(self, repo, comment_type=None, user=None, commit_id=None):
134 qry = Session().query(ChangesetComment) \
134 qry = Session().query(ChangesetComment) \
135 .filter(ChangesetComment.repo == repo)
135 .filter(ChangesetComment.repo == repo)
136
136
137 if comment_type and comment_type in ChangesetComment.COMMENT_TYPES:
137 if comment_type and comment_type in ChangesetComment.COMMENT_TYPES:
138 qry = qry.filter(ChangesetComment.comment_type == comment_type)
138 qry = qry.filter(ChangesetComment.comment_type == comment_type)
139
139
140 if user:
140 if user:
141 user = self._get_user(user)
141 user = self._get_user(user)
142 if user:
142 if user:
143 qry = qry.filter(ChangesetComment.user_id == user.user_id)
143 qry = qry.filter(ChangesetComment.user_id == user.user_id)
144
144
145 if commit_id:
145 if commit_id:
146 qry = qry.filter(ChangesetComment.revision == commit_id)
146 qry = qry.filter(ChangesetComment.revision == commit_id)
147
147
148 qry = qry.order_by(ChangesetComment.created_on)
148 qry = qry.order_by(ChangesetComment.created_on)
149 return qry.all()
149 return qry.all()
150
150
151 def get_repository_unresolved_todos(self, repo):
151 def get_repository_unresolved_todos(self, repo):
152 todos = Session().query(ChangesetComment) \
152 todos = Session().query(ChangesetComment) \
153 .filter(ChangesetComment.repo == repo) \
153 .filter(ChangesetComment.repo == repo) \
154 .filter(ChangesetComment.resolved_by == None) \
154 .filter(ChangesetComment.resolved_by == null()) \
155 .filter(ChangesetComment.comment_type
155 .filter(ChangesetComment.comment_type
156 == ChangesetComment.COMMENT_TYPE_TODO)
156 == ChangesetComment.COMMENT_TYPE_TODO)
157 todos = todos.all()
157 todos = todos.all()
158
158
159 return todos
159 return todos
160
160
161 def get_pull_request_unresolved_todos(self, pull_request, show_outdated=True, include_drafts=True):
161 def get_pull_request_unresolved_todos(self, pull_request, show_outdated=True, include_drafts=True):
162
162
163 todos = Session().query(ChangesetComment) \
163 todos = Session().query(ChangesetComment) \
164 .filter(ChangesetComment.pull_request == pull_request) \
164 .filter(ChangesetComment.pull_request == pull_request) \
165 .filter(ChangesetComment.resolved_by == None) \
165 .filter(ChangesetComment.resolved_by == null()) \
166 .filter(ChangesetComment.comment_type
166 .filter(ChangesetComment.comment_type
167 == ChangesetComment.COMMENT_TYPE_TODO)
167 == ChangesetComment.COMMENT_TYPE_TODO)
168
168
169 if not include_drafts:
169 if not include_drafts:
170 todos = todos.filter(ChangesetComment.draft == false())
170 todos = todos.filter(ChangesetComment.draft == false())
171
171
172 if not show_outdated:
172 if not show_outdated:
173 todos = todos.filter(
173 todos = todos.filter(
174 coalesce(ChangesetComment.display_state, '') !=
174 coalesce(ChangesetComment.display_state, '') !=
175 ChangesetComment.COMMENT_OUTDATED)
175 ChangesetComment.COMMENT_OUTDATED)
176
176
177 todos = todos.all()
177 todos = todos.all()
178
178
179 return todos
179 return todos
180
180
181 def get_pull_request_resolved_todos(self, pull_request, show_outdated=True, include_drafts=True):
181 def get_pull_request_resolved_todos(self, pull_request, show_outdated=True, include_drafts=True):
182
182
183 todos = Session().query(ChangesetComment) \
183 todos = Session().query(ChangesetComment) \
184 .filter(ChangesetComment.pull_request == pull_request) \
184 .filter(ChangesetComment.pull_request == pull_request) \
185 .filter(ChangesetComment.resolved_by != None) \
185 .filter(ChangesetComment.resolved_by != None) \
186 .filter(ChangesetComment.comment_type
186 .filter(ChangesetComment.comment_type
187 == ChangesetComment.COMMENT_TYPE_TODO)
187 == ChangesetComment.COMMENT_TYPE_TODO)
188
188
189 if not include_drafts:
189 if not include_drafts:
190 todos = todos.filter(ChangesetComment.draft == false())
190 todos = todos.filter(ChangesetComment.draft == false())
191
191
192 if not show_outdated:
192 if not show_outdated:
193 todos = todos.filter(
193 todos = todos.filter(
194 coalesce(ChangesetComment.display_state, '') !=
194 coalesce(ChangesetComment.display_state, '') !=
195 ChangesetComment.COMMENT_OUTDATED)
195 ChangesetComment.COMMENT_OUTDATED)
196
196
197 todos = todos.all()
197 todos = todos.all()
198
198
199 return todos
199 return todos
200
200
201 def get_pull_request_drafts(self, user_id, pull_request):
201 def get_pull_request_drafts(self, user_id, pull_request):
202 drafts = Session().query(ChangesetComment) \
202 drafts = Session().query(ChangesetComment) \
203 .filter(ChangesetComment.pull_request == pull_request) \
203 .filter(ChangesetComment.pull_request == pull_request) \
204 .filter(ChangesetComment.user_id == user_id) \
204 .filter(ChangesetComment.user_id == user_id) \
205 .filter(ChangesetComment.draft == true())
205 .filter(ChangesetComment.draft == true())
206 return drafts.all()
206 return drafts.all()
207
207
208 def get_commit_unresolved_todos(self, commit_id, show_outdated=True, include_drafts=True):
208 def get_commit_unresolved_todos(self, commit_id, show_outdated=True, include_drafts=True):
209
209
210 todos = Session().query(ChangesetComment) \
210 todos = Session().query(ChangesetComment) \
211 .filter(ChangesetComment.revision == commit_id) \
211 .filter(ChangesetComment.revision == commit_id) \
212 .filter(ChangesetComment.resolved_by == None) \
212 .filter(ChangesetComment.resolved_by == null()) \
213 .filter(ChangesetComment.comment_type
213 .filter(ChangesetComment.comment_type
214 == ChangesetComment.COMMENT_TYPE_TODO)
214 == ChangesetComment.COMMENT_TYPE_TODO)
215
215
216 if not include_drafts:
216 if not include_drafts:
217 todos = todos.filter(ChangesetComment.draft == false())
217 todos = todos.filter(ChangesetComment.draft == false())
218
218
219 if not show_outdated:
219 if not show_outdated:
220 todos = todos.filter(
220 todos = todos.filter(
221 coalesce(ChangesetComment.display_state, '') !=
221 coalesce(ChangesetComment.display_state, '') !=
222 ChangesetComment.COMMENT_OUTDATED)
222 ChangesetComment.COMMENT_OUTDATED)
223
223
224 todos = todos.all()
224 todos = todos.all()
225
225
226 return todos
226 return todos
227
227
228 def get_commit_resolved_todos(self, commit_id, show_outdated=True, include_drafts=True):
228 def get_commit_resolved_todos(self, commit_id, show_outdated=True, include_drafts=True):
229
229
230 todos = Session().query(ChangesetComment) \
230 todos = Session().query(ChangesetComment) \
231 .filter(ChangesetComment.revision == commit_id) \
231 .filter(ChangesetComment.revision == commit_id) \
232 .filter(ChangesetComment.resolved_by != None) \
232 .filter(ChangesetComment.resolved_by != None) \
233 .filter(ChangesetComment.comment_type
233 .filter(ChangesetComment.comment_type
234 == ChangesetComment.COMMENT_TYPE_TODO)
234 == ChangesetComment.COMMENT_TYPE_TODO)
235
235
236 if not include_drafts:
236 if not include_drafts:
237 todos = todos.filter(ChangesetComment.draft == false())
237 todos = todos.filter(ChangesetComment.draft == false())
238
238
239 if not show_outdated:
239 if not show_outdated:
240 todos = todos.filter(
240 todos = todos.filter(
241 coalesce(ChangesetComment.display_state, '') !=
241 coalesce(ChangesetComment.display_state, '') !=
242 ChangesetComment.COMMENT_OUTDATED)
242 ChangesetComment.COMMENT_OUTDATED)
243
243
244 todos = todos.all()
244 todos = todos.all()
245
245
246 return todos
246 return todos
247
247
248 def get_commit_inline_comments(self, commit_id, include_drafts=True):
248 def get_commit_inline_comments(self, commit_id, include_drafts=True):
249 inline_comments = Session().query(ChangesetComment) \
249 inline_comments = Session().query(ChangesetComment) \
250 .filter(ChangesetComment.line_no != None) \
250 .filter(ChangesetComment.line_no != None) \
251 .filter(ChangesetComment.f_path != None) \
251 .filter(ChangesetComment.f_path != None) \
252 .filter(ChangesetComment.revision == commit_id)
252 .filter(ChangesetComment.revision == commit_id)
253
253
254 if not include_drafts:
254 if not include_drafts:
255 inline_comments = inline_comments.filter(ChangesetComment.draft == false())
255 inline_comments = inline_comments.filter(ChangesetComment.draft == false())
256
256
257 inline_comments = inline_comments.all()
257 inline_comments = inline_comments.all()
258 return inline_comments
258 return inline_comments
259
259
260 def _log_audit_action(self, action, action_data, auth_user, comment):
260 def _log_audit_action(self, action, action_data, auth_user, comment):
261 audit_logger.store(
261 audit_logger.store(
262 action=action,
262 action=action,
263 action_data=action_data,
263 action_data=action_data,
264 user=auth_user,
264 user=auth_user,
265 repo=comment.repo)
265 repo=comment.repo)
266
266
267 def create(self, text, repo, user, commit_id=None, pull_request=None,
267 def create(self, text, repo, user, commit_id=None, pull_request=None,
268 f_path=None, line_no=None, status_change=None,
268 f_path=None, line_no=None, status_change=None,
269 status_change_type=None, comment_type=None, is_draft=False,
269 status_change_type=None, comment_type=None, is_draft=False,
270 resolves_comment_id=None, closing_pr=False, send_email=True,
270 resolves_comment_id=None, closing_pr=False, send_email=True,
271 renderer=None, auth_user=None, extra_recipients=None):
271 renderer=None, auth_user=None, extra_recipients=None):
272 """
272 """
273 Creates new comment for commit or pull request.
273 Creates new comment for commit or pull request.
274 IF status_change is not none this comment is associated with a
274 IF status_change is not none this comment is associated with a
275 status change of commit or commit associated with pull request
275 status change of commit or commit associated with pull request
276
276
277 :param text:
277 :param text:
278 :param repo:
278 :param repo:
279 :param user:
279 :param user:
280 :param commit_id:
280 :param commit_id:
281 :param pull_request:
281 :param pull_request:
282 :param f_path:
282 :param f_path:
283 :param line_no:
283 :param line_no:
284 :param status_change: Label for status change
284 :param status_change: Label for status change
285 :param comment_type: Type of comment
285 :param comment_type: Type of comment
286 :param is_draft: is comment a draft only
286 :param is_draft: is comment a draft only
287 :param resolves_comment_id: id of comment which this one will resolve
287 :param resolves_comment_id: id of comment which this one will resolve
288 :param status_change_type: type of status change
288 :param status_change_type: type of status change
289 :param closing_pr:
289 :param closing_pr:
290 :param send_email:
290 :param send_email:
291 :param renderer: pick renderer for this comment
291 :param renderer: pick renderer for this comment
292 :param auth_user: current authenticated user calling this method
292 :param auth_user: current authenticated user calling this method
293 :param extra_recipients: list of extra users to be added to recipients
293 :param extra_recipients: list of extra users to be added to recipients
294 """
294 """
295
295
296 request = get_current_request()
296 request = get_current_request()
297 _ = request.translate
297 _ = request.translate
298
298
299 if not renderer:
299 if not renderer:
300 renderer = self._get_renderer(request=request)
300 renderer = self._get_renderer(request=request)
301
301
302 repo = self._get_repo(repo)
302 repo = self._get_repo(repo)
303 user = self._get_user(user)
303 user = self._get_user(user)
304 auth_user = auth_user or user
304 auth_user = auth_user or user
305
305
306 schema = comment_schema.CommentSchema()
306 schema = comment_schema.CommentSchema()
307 validated_kwargs = schema.deserialize(dict(
307 validated_kwargs = schema.deserialize(dict(
308 comment_body=text,
308 comment_body=text,
309 comment_type=comment_type,
309 comment_type=comment_type,
310 is_draft=is_draft,
310 is_draft=is_draft,
311 comment_file=f_path,
311 comment_file=f_path,
312 comment_line=line_no,
312 comment_line=line_no,
313 renderer_type=renderer,
313 renderer_type=renderer,
314 status_change=status_change_type,
314 status_change=status_change_type,
315 resolves_comment_id=resolves_comment_id,
315 resolves_comment_id=resolves_comment_id,
316 repo=repo.repo_id,
316 repo=repo.repo_id,
317 user=user.user_id,
317 user=user.user_id,
318 ))
318 ))
319
319
320 is_draft = validated_kwargs['is_draft']
320 is_draft = validated_kwargs['is_draft']
321
321
322 comment = ChangesetComment()
322 comment = ChangesetComment()
323 comment.renderer = validated_kwargs['renderer_type']
323 comment.renderer = validated_kwargs['renderer_type']
324 comment.text = validated_kwargs['comment_body']
324 comment.text = validated_kwargs['comment_body']
325 comment.f_path = validated_kwargs['comment_file']
325 comment.f_path = validated_kwargs['comment_file']
326 comment.line_no = validated_kwargs['comment_line']
326 comment.line_no = validated_kwargs['comment_line']
327 comment.comment_type = validated_kwargs['comment_type']
327 comment.comment_type = validated_kwargs['comment_type']
328 comment.draft = is_draft
328 comment.draft = is_draft
329
329
330 comment.repo = repo
330 comment.repo = repo
331 comment.author = user
331 comment.author = user
332 resolved_comment = self.__get_commit_comment(
332 resolved_comment = self.__get_commit_comment(
333 validated_kwargs['resolves_comment_id'])
333 validated_kwargs['resolves_comment_id'])
334
334
335 # check if the comment actually belongs to this PR
335 # check if the comment actually belongs to this PR
336 if resolved_comment and resolved_comment.pull_request and \
336 if resolved_comment and resolved_comment.pull_request and \
337 resolved_comment.pull_request != pull_request:
337 resolved_comment.pull_request != pull_request:
338 log.warning('Comment tried to resolved unrelated todo comment: %s',
338 log.warning('Comment tried to resolved unrelated todo comment: %s',
339 resolved_comment)
339 resolved_comment)
340 # comment not bound to this pull request, forbid
340 # comment not bound to this pull request, forbid
341 resolved_comment = None
341 resolved_comment = None
342
342
343 elif resolved_comment and resolved_comment.repo and \
343 elif resolved_comment and resolved_comment.repo and \
344 resolved_comment.repo != repo:
344 resolved_comment.repo != repo:
345 log.warning('Comment tried to resolved unrelated todo comment: %s',
345 log.warning('Comment tried to resolved unrelated todo comment: %s',
346 resolved_comment)
346 resolved_comment)
347 # comment not bound to this repo, forbid
347 # comment not bound to this repo, forbid
348 resolved_comment = None
348 resolved_comment = None
349
349
350 if resolved_comment and resolved_comment.resolved_by:
350 if resolved_comment and resolved_comment.resolved_by:
351 # if this comment is already resolved, don't mark it again!
351 # if this comment is already resolved, don't mark it again!
352 resolved_comment = None
352 resolved_comment = None
353
353
354 comment.resolved_comment = resolved_comment
354 comment.resolved_comment = resolved_comment
355
355
356 pull_request_id = pull_request
356 pull_request_id = pull_request
357
357
358 commit_obj = None
358 commit_obj = None
359 pull_request_obj = None
359 pull_request_obj = None
360
360
361 if commit_id:
361 if commit_id:
362 notification_type = EmailNotificationModel.TYPE_COMMIT_COMMENT
362 notification_type = EmailNotificationModel.TYPE_COMMIT_COMMENT
363 # do a lookup, so we don't pass something bad here
363 # do a lookup, so we don't pass something bad here
364 commit_obj = repo.scm_instance().get_commit(commit_id=commit_id)
364 commit_obj = repo.scm_instance().get_commit(commit_id=commit_id)
365 comment.revision = commit_obj.raw_id
365 comment.revision = commit_obj.raw_id
366
366
367 elif pull_request_id:
367 elif pull_request_id:
368 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST_COMMENT
368 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST_COMMENT
369 pull_request_obj = self.__get_pull_request(pull_request_id)
369 pull_request_obj = self.__get_pull_request(pull_request_id)
370 comment.pull_request = pull_request_obj
370 comment.pull_request = pull_request_obj
371 else:
371 else:
372 raise Exception('Please specify commit or pull_request_id')
372 raise Exception('Please specify commit or pull_request_id')
373
373
374 Session().add(comment)
374 Session().add(comment)
375 Session().flush()
375 Session().flush()
376 kwargs = {
376 kwargs = {
377 'user': user,
377 'user': user,
378 'renderer_type': renderer,
378 'renderer_type': renderer,
379 'repo_name': repo.repo_name,
379 'repo_name': repo.repo_name,
380 'status_change': status_change,
380 'status_change': status_change,
381 'status_change_type': status_change_type,
381 'status_change_type': status_change_type,
382 'comment_body': text,
382 'comment_body': text,
383 'comment_file': f_path,
383 'comment_file': f_path,
384 'comment_line': line_no,
384 'comment_line': line_no,
385 'comment_type': comment_type or 'note',
385 'comment_type': comment_type or 'note',
386 'comment_id': comment.comment_id
386 'comment_id': comment.comment_id
387 }
387 }
388
388
389 if commit_obj:
389 if commit_obj:
390 recipients = ChangesetComment.get_users(
390 recipients = ChangesetComment.get_users(
391 revision=commit_obj.raw_id)
391 revision=commit_obj.raw_id)
392 # add commit author if it's in RhodeCode system
392 # add commit author if it's in RhodeCode system
393 cs_author = User.get_from_cs_author(commit_obj.author)
393 cs_author = User.get_from_cs_author(commit_obj.author)
394 if not cs_author:
394 if not cs_author:
395 # use repo owner if we cannot extract the author correctly
395 # use repo owner if we cannot extract the author correctly
396 cs_author = repo.user
396 cs_author = repo.user
397 recipients += [cs_author]
397 recipients += [cs_author]
398
398
399 commit_comment_url = self.get_url(comment, request=request)
399 commit_comment_url = self.get_url(comment, request=request)
400 commit_comment_reply_url = self.get_url(
400 commit_comment_reply_url = self.get_url(
401 comment, request=request,
401 comment, request=request,
402 anchor=f'comment-{comment.comment_id}/?/ReplyToComment')
402 anchor=f'comment-{comment.comment_id}/?/ReplyToComment')
403
403
404 target_repo_url = h.link_to(
404 target_repo_url = h.link_to(
405 repo.repo_name,
405 repo.repo_name,
406 h.route_url('repo_summary', repo_name=repo.repo_name))
406 h.route_url('repo_summary', repo_name=repo.repo_name))
407
407
408 commit_url = h.route_url('repo_commit', repo_name=repo.repo_name,
408 commit_url = h.route_url('repo_commit', repo_name=repo.repo_name,
409 commit_id=commit_id)
409 commit_id=commit_id)
410
410
411 # commit specifics
411 # commit specifics
412 kwargs.update({
412 kwargs.update({
413 'commit': commit_obj,
413 'commit': commit_obj,
414 'commit_message': commit_obj.message,
414 'commit_message': commit_obj.message,
415 'commit_target_repo_url': target_repo_url,
415 'commit_target_repo_url': target_repo_url,
416 'commit_comment_url': commit_comment_url,
416 'commit_comment_url': commit_comment_url,
417 'commit_comment_reply_url': commit_comment_reply_url,
417 'commit_comment_reply_url': commit_comment_reply_url,
418 'commit_url': commit_url,
418 'commit_url': commit_url,
419 'thread_ids': [commit_url, commit_comment_url],
419 'thread_ids': [commit_url, commit_comment_url],
420 })
420 })
421
421
422 elif pull_request_obj:
422 elif pull_request_obj:
423 # get the current participants of this pull request
423 # get the current participants of this pull request
424 recipients = ChangesetComment.get_users(
424 recipients = ChangesetComment.get_users(
425 pull_request_id=pull_request_obj.pull_request_id)
425 pull_request_id=pull_request_obj.pull_request_id)
426 # add pull request author
426 # add pull request author
427 recipients += [pull_request_obj.author]
427 recipients += [pull_request_obj.author]
428
428
429 # add the reviewers to notification
429 # add the reviewers to notification
430 recipients += [x.user for x in pull_request_obj.get_pull_request_reviewers()]
430 recipients += [x.user for x in pull_request_obj.get_pull_request_reviewers()]
431
431
432 pr_target_repo = pull_request_obj.target_repo
432 pr_target_repo = pull_request_obj.target_repo
433 pr_source_repo = pull_request_obj.source_repo
433 pr_source_repo = pull_request_obj.source_repo
434
434
435 pr_comment_url = self.get_url(comment, request=request)
435 pr_comment_url = self.get_url(comment, request=request)
436 pr_comment_reply_url = self.get_url(
436 pr_comment_reply_url = self.get_url(
437 comment, request=request,
437 comment, request=request,
438 anchor=f'comment-{comment.comment_id}/?/ReplyToComment')
438 anchor=f'comment-{comment.comment_id}/?/ReplyToComment')
439
439
440 pr_url = h.route_url(
440 pr_url = h.route_url(
441 'pullrequest_show',
441 'pullrequest_show',
442 repo_name=pr_target_repo.repo_name,
442 repo_name=pr_target_repo.repo_name,
443 pull_request_id=pull_request_obj.pull_request_id, )
443 pull_request_id=pull_request_obj.pull_request_id, )
444
444
445 # set some variables for email notification
445 # set some variables for email notification
446 pr_target_repo_url = h.route_url(
446 pr_target_repo_url = h.route_url(
447 'repo_summary', repo_name=pr_target_repo.repo_name)
447 'repo_summary', repo_name=pr_target_repo.repo_name)
448
448
449 pr_source_repo_url = h.route_url(
449 pr_source_repo_url = h.route_url(
450 'repo_summary', repo_name=pr_source_repo.repo_name)
450 'repo_summary', repo_name=pr_source_repo.repo_name)
451
451
452 # pull request specifics
452 # pull request specifics
453 kwargs.update({
453 kwargs.update({
454 'pull_request': pull_request_obj,
454 'pull_request': pull_request_obj,
455 'pr_id': pull_request_obj.pull_request_id,
455 'pr_id': pull_request_obj.pull_request_id,
456 'pull_request_url': pr_url,
456 'pull_request_url': pr_url,
457 'pull_request_target_repo': pr_target_repo,
457 'pull_request_target_repo': pr_target_repo,
458 'pull_request_target_repo_url': pr_target_repo_url,
458 'pull_request_target_repo_url': pr_target_repo_url,
459 'pull_request_source_repo': pr_source_repo,
459 'pull_request_source_repo': pr_source_repo,
460 'pull_request_source_repo_url': pr_source_repo_url,
460 'pull_request_source_repo_url': pr_source_repo_url,
461 'pr_comment_url': pr_comment_url,
461 'pr_comment_url': pr_comment_url,
462 'pr_comment_reply_url': pr_comment_reply_url,
462 'pr_comment_reply_url': pr_comment_reply_url,
463 'pr_closing': closing_pr,
463 'pr_closing': closing_pr,
464 'thread_ids': [pr_url, pr_comment_url],
464 'thread_ids': [pr_url, pr_comment_url],
465 })
465 })
466
466
467 if send_email:
467 if send_email:
468 recipients += [self._get_user(u) for u in (extra_recipients or [])]
468 recipients += [self._get_user(u) for u in (extra_recipients or [])]
469
469
470 mention_recipients = set(
470 mention_recipients = set(
471 self._extract_mentions(text)).difference(recipients)
471 self._extract_mentions(text)).difference(recipients)
472
472
473 # create notification objects, and emails
473 # create notification objects, and emails
474 NotificationModel().create(
474 NotificationModel().create(
475 created_by=user,
475 created_by=user,
476 notification_subject='', # Filled in based on the notification_type
476 notification_subject='', # Filled in based on the notification_type
477 notification_body='', # Filled in based on the notification_type
477 notification_body='', # Filled in based on the notification_type
478 notification_type=notification_type,
478 notification_type=notification_type,
479 recipients=recipients,
479 recipients=recipients,
480 mention_recipients=mention_recipients,
480 mention_recipients=mention_recipients,
481 email_kwargs=kwargs,
481 email_kwargs=kwargs,
482 )
482 )
483
483
484 Session().flush()
484 Session().flush()
485 if comment.pull_request:
485 if comment.pull_request:
486 action = 'repo.pull_request.comment.create'
486 action = 'repo.pull_request.comment.create'
487 else:
487 else:
488 action = 'repo.commit.comment.create'
488 action = 'repo.commit.comment.create'
489
489
490 if not is_draft:
490 if not is_draft:
491 comment_data = comment.get_api_data()
491 comment_data = comment.get_api_data()
492
492
493 self._log_audit_action(
493 self._log_audit_action(
494 action, {'data': comment_data}, auth_user, comment)
494 action, {'data': comment_data}, auth_user, comment)
495
495
496 return comment
496 return comment
497
497
498 def edit(self, comment_id, text, auth_user, version):
498 def edit(self, comment_id, text, auth_user, version):
499 """
499 """
500 Change existing comment for commit or pull request.
500 Change existing comment for commit or pull request.
501
501
502 :param comment_id:
502 :param comment_id:
503 :param text:
503 :param text:
504 :param auth_user: current authenticated user calling this method
504 :param auth_user: current authenticated user calling this method
505 :param version: last comment version
505 :param version: last comment version
506 """
506 """
507 if not text:
507 if not text:
508 log.warning('Missing text for comment, skipping...')
508 log.warning('Missing text for comment, skipping...')
509 return
509 return
510
510
511 comment = ChangesetComment.get(comment_id)
511 comment = ChangesetComment.get(comment_id)
512 old_comment_text = comment.text
512 old_comment_text = comment.text
513 comment.text = text
513 comment.text = text
514 comment.modified_at = datetime.datetime.now()
514 comment.modified_at = datetime.datetime.now()
515 version = safe_int(version)
515 version = safe_int(version)
516
516
517 # NOTE(marcink): this returns initial comment + edits, so v2 from ui
517 # NOTE(marcink): this returns initial comment + edits, so v2 from ui
518 # would return 3 here
518 # would return 3 here
519 comment_version = ChangesetCommentHistory.get_version(comment_id)
519 comment_version = ChangesetCommentHistory.get_version(comment_id)
520
520
521 if isinstance(version, int) and (comment_version - version) != 1:
521 if isinstance(version, int) and (comment_version - version) != 1:
522 log.warning(
522 log.warning(
523 'Version mismatch comment_version {} submitted {}, skipping'.format(
523 'Version mismatch comment_version {} submitted {}, skipping'.format(
524 comment_version-1, # -1 since note above
524 comment_version-1, # -1 since note above
525 version
525 version
526 )
526 )
527 )
527 )
528 raise CommentVersionMismatch()
528 raise CommentVersionMismatch()
529
529
530 comment_history = ChangesetCommentHistory()
530 comment_history = ChangesetCommentHistory()
531 comment_history.comment_id = comment_id
531 comment_history.comment_id = comment_id
532 comment_history.version = comment_version
532 comment_history.version = comment_version
533 comment_history.created_by_user_id = auth_user.user_id
533 comment_history.created_by_user_id = auth_user.user_id
534 comment_history.text = old_comment_text
534 comment_history.text = old_comment_text
535 # TODO add email notification
535 # TODO add email notification
536 Session().add(comment_history)
536 Session().add(comment_history)
537 Session().add(comment)
537 Session().add(comment)
538 Session().flush()
538 Session().flush()
539
539
540 if comment.pull_request:
540 if comment.pull_request:
541 action = 'repo.pull_request.comment.edit'
541 action = 'repo.pull_request.comment.edit'
542 else:
542 else:
543 action = 'repo.commit.comment.edit'
543 action = 'repo.commit.comment.edit'
544
544
545 comment_data = comment.get_api_data()
545 comment_data = comment.get_api_data()
546 comment_data['old_comment_text'] = old_comment_text
546 comment_data['old_comment_text'] = old_comment_text
547 self._log_audit_action(
547 self._log_audit_action(
548 action, {'data': comment_data}, auth_user, comment)
548 action, {'data': comment_data}, auth_user, comment)
549
549
550 return comment_history
550 return comment_history
551
551
552 def delete(self, comment, auth_user):
552 def delete(self, comment, auth_user):
553 """
553 """
554 Deletes given comment
554 Deletes given comment
555 """
555 """
556 comment = self.__get_commit_comment(comment)
556 comment = self.__get_commit_comment(comment)
557 old_data = comment.get_api_data()
557 old_data = comment.get_api_data()
558 Session().delete(comment)
558 Session().delete(comment)
559
559
560 if comment.pull_request:
560 if comment.pull_request:
561 action = 'repo.pull_request.comment.delete'
561 action = 'repo.pull_request.comment.delete'
562 else:
562 else:
563 action = 'repo.commit.comment.delete'
563 action = 'repo.commit.comment.delete'
564
564
565 self._log_audit_action(
565 self._log_audit_action(
566 action, {'old_data': old_data}, auth_user, comment)
566 action, {'old_data': old_data}, auth_user, comment)
567
567
568 return comment
568 return comment
569
569
570 def get_all_comments(self, repo_id, revision=None, pull_request=None,
570 def get_all_comments(self, repo_id, revision=None, pull_request=None,
571 include_drafts=True, count_only=False):
571 include_drafts=True, count_only=False):
572 q = ChangesetComment.query()\
572 q = ChangesetComment.query()\
573 .filter(ChangesetComment.repo_id == repo_id)
573 .filter(ChangesetComment.repo_id == repo_id)
574 if revision:
574 if revision:
575 q = q.filter(ChangesetComment.revision == revision)
575 q = q.filter(ChangesetComment.revision == revision)
576 elif pull_request:
576 elif pull_request:
577 pull_request = self.__get_pull_request(pull_request)
577 pull_request = self.__get_pull_request(pull_request)
578 q = q.filter(ChangesetComment.pull_request_id == pull_request.pull_request_id)
578 q = q.filter(ChangesetComment.pull_request_id == pull_request.pull_request_id)
579 else:
579 else:
580 raise Exception('Please specify commit or pull_request')
580 raise Exception('Please specify commit or pull_request')
581 if not include_drafts:
581 if not include_drafts:
582 q = q.filter(ChangesetComment.draft == false())
582 q = q.filter(ChangesetComment.draft == false())
583 q = q.order_by(ChangesetComment.created_on)
583 q = q.order_by(ChangesetComment.created_on)
584 if count_only:
584 if count_only:
585 return q.count()
585 return q.count()
586
586
587 return q.all()
587 return q.all()
588
588
589 def get_url(self, comment, request=None, permalink=False, anchor=None):
589 def get_url(self, comment, request=None, permalink=False, anchor=None):
590 if not request:
590 if not request:
591 request = get_current_request()
591 request = get_current_request()
592
592
593 comment = self.__get_commit_comment(comment)
593 comment = self.__get_commit_comment(comment)
594 if anchor is None:
594 if anchor is None:
595 anchor = f'comment-{comment.comment_id}'
595 anchor = f'comment-{comment.comment_id}'
596
596
597 if comment.pull_request:
597 if comment.pull_request:
598 pull_request = comment.pull_request
598 pull_request = comment.pull_request
599 if permalink:
599 if permalink:
600 return request.route_url(
600 return request.route_url(
601 'pull_requests_global',
601 'pull_requests_global',
602 pull_request_id=pull_request.pull_request_id,
602 pull_request_id=pull_request.pull_request_id,
603 _anchor=anchor)
603 _anchor=anchor)
604 else:
604 else:
605 return request.route_url(
605 return request.route_url(
606 'pullrequest_show',
606 'pullrequest_show',
607 repo_name=safe_str(pull_request.target_repo.repo_name),
607 repo_name=safe_str(pull_request.target_repo.repo_name),
608 pull_request_id=pull_request.pull_request_id,
608 pull_request_id=pull_request.pull_request_id,
609 _anchor=anchor)
609 _anchor=anchor)
610
610
611 else:
611 else:
612 repo = comment.repo
612 repo = comment.repo
613 commit_id = comment.revision
613 commit_id = comment.revision
614
614
615 if permalink:
615 if permalink:
616 return request.route_url(
616 return request.route_url(
617 'repo_commit', repo_name=safe_str(repo.repo_id),
617 'repo_commit', repo_name=safe_str(repo.repo_id),
618 commit_id=commit_id,
618 commit_id=commit_id,
619 _anchor=anchor)
619 _anchor=anchor)
620
620
621 else:
621 else:
622 return request.route_url(
622 return request.route_url(
623 'repo_commit', repo_name=safe_str(repo.repo_name),
623 'repo_commit', repo_name=safe_str(repo.repo_name),
624 commit_id=commit_id,
624 commit_id=commit_id,
625 _anchor=anchor)
625 _anchor=anchor)
626
626
627 def get_comments(self, repo_id, revision=None, pull_request=None):
627 def get_comments(self, repo_id, revision=None, pull_request=None):
628 """
628 """
629 Gets main comments based on revision or pull_request_id
629 Gets main comments based on revision or pull_request_id
630
630
631 :param repo_id:
631 :param repo_id:
632 :param revision:
632 :param revision:
633 :param pull_request:
633 :param pull_request:
634 """
634 """
635
635
636 q = ChangesetComment.query()\
636 q = ChangesetComment.query()\
637 .filter(ChangesetComment.repo_id == repo_id)\
637 .filter(ChangesetComment.repo_id == repo_id)\
638 .filter(ChangesetComment.line_no == None)\
638 .filter(ChangesetComment.line_no == null())\
639 .filter(ChangesetComment.f_path == None)
639 .filter(ChangesetComment.f_path == null())
640 if revision:
640 if revision:
641 q = q.filter(ChangesetComment.revision == revision)
641 q = q.filter(ChangesetComment.revision == revision)
642 elif pull_request:
642 elif pull_request:
643 pull_request = self.__get_pull_request(pull_request)
643 pull_request = self.__get_pull_request(pull_request)
644 q = q.filter(ChangesetComment.pull_request == pull_request)
644 q = q.filter(ChangesetComment.pull_request == pull_request)
645 else:
645 else:
646 raise Exception('Please specify commit or pull_request')
646 raise Exception('Please specify commit or pull_request')
647 q = q.order_by(ChangesetComment.created_on)
647 q = q.order_by(ChangesetComment.created_on)
648 return q.all()
648 return q.all()
649
649
650 def get_inline_comments(self, repo_id, revision=None, pull_request=None):
650 def get_inline_comments(self, repo_id, revision=None, pull_request=None):
651 q = self._get_inline_comments_query(repo_id, revision, pull_request)
651 q = self._get_inline_comments_query(repo_id, revision, pull_request)
652 return self._group_comments_by_path_and_line_number(q)
652 return self._group_comments_by_path_and_line_number(q)
653
653
654 def get_inline_comments_as_list(self, inline_comments, skip_outdated=True,
654 def get_inline_comments_as_list(self, inline_comments, skip_outdated=True,
655 version=None):
655 version=None):
656 inline_comms = []
656 inline_comms = []
657 for fname, per_line_comments in inline_comments.items():
657 for fname, per_line_comments in inline_comments.items():
658 for lno, comments in per_line_comments.items():
658 for lno, comments in per_line_comments.items():
659 for comm in comments:
659 for comm in comments:
660 if not comm.outdated_at_version(version) and skip_outdated:
660 if not comm.outdated_at_version(version) and skip_outdated:
661 inline_comms.append(comm)
661 inline_comms.append(comm)
662
662
663 return inline_comms
663 return inline_comms
664
664
665 def get_outdated_comments(self, repo_id, pull_request):
665 def get_outdated_comments(self, repo_id, pull_request):
666 # TODO: johbo: Remove `repo_id`, it is not needed to find the comments
666 # TODO: johbo: Remove `repo_id`, it is not needed to find the comments
667 # of a pull request.
667 # of a pull request.
668 q = self._all_inline_comments_of_pull_request(pull_request)
668 q = self._all_inline_comments_of_pull_request(pull_request)
669 q = q.filter(
669 q = q.filter(
670 ChangesetComment.display_state ==
670 ChangesetComment.display_state ==
671 ChangesetComment.COMMENT_OUTDATED
671 ChangesetComment.COMMENT_OUTDATED
672 ).order_by(ChangesetComment.comment_id.asc())
672 ).order_by(ChangesetComment.comment_id.asc())
673
673
674 return self._group_comments_by_path_and_line_number(q)
674 return self._group_comments_by_path_and_line_number(q)
675
675
676 def _get_inline_comments_query(self, repo_id, revision, pull_request):
676 def _get_inline_comments_query(self, repo_id, revision, pull_request):
677 # TODO: johbo: Split this into two methods: One for PR and one for
677 # TODO: johbo: Split this into two methods: One for PR and one for
678 # commit.
678 # commit.
679 if revision:
679 if revision:
680 q = Session().query(ChangesetComment).filter(
680 q = Session().query(ChangesetComment).filter(
681 ChangesetComment.repo_id == repo_id,
681 ChangesetComment.repo_id == repo_id,
682 ChangesetComment.line_no != null(),
682 ChangesetComment.line_no != null(),
683 ChangesetComment.f_path != null(),
683 ChangesetComment.f_path != null(),
684 ChangesetComment.revision == revision)
684 ChangesetComment.revision == revision)
685
685
686 elif pull_request:
686 elif pull_request:
687 pull_request = self.__get_pull_request(pull_request)
687 pull_request = self.__get_pull_request(pull_request)
688 if not CommentsModel.use_outdated_comments(pull_request):
688 if not CommentsModel.use_outdated_comments(pull_request):
689 q = self._visible_inline_comments_of_pull_request(pull_request)
689 q = self._visible_inline_comments_of_pull_request(pull_request)
690 else:
690 else:
691 q = self._all_inline_comments_of_pull_request(pull_request)
691 q = self._all_inline_comments_of_pull_request(pull_request)
692
692
693 else:
693 else:
694 raise Exception('Please specify commit or pull_request_id')
694 raise Exception('Please specify commit or pull_request_id')
695 q = q.order_by(ChangesetComment.comment_id.asc())
695 q = q.order_by(ChangesetComment.comment_id.asc())
696 return q
696 return q
697
697
698 def _group_comments_by_path_and_line_number(self, q):
698 def _group_comments_by_path_and_line_number(self, q):
699 comments = q.all()
699 comments = q.all()
700 paths = collections.defaultdict(lambda: collections.defaultdict(list))
700 paths = collections.defaultdict(lambda: collections.defaultdict(list))
701 for co in comments:
701 for co in comments:
702 paths[co.f_path][co.line_no].append(co)
702 paths[co.f_path][co.line_no].append(co)
703 return paths
703 return paths
704
704
705 @classmethod
705 @classmethod
706 def needed_extra_diff_context(cls):
706 def needed_extra_diff_context(cls):
707 return max(cls.DIFF_CONTEXT_BEFORE, cls.DIFF_CONTEXT_AFTER)
707 return max(cls.DIFF_CONTEXT_BEFORE, cls.DIFF_CONTEXT_AFTER)
708
708
709 def outdate_comments(self, pull_request, old_diff_data, new_diff_data):
709 def outdate_comments(self, pull_request, old_diff_data, new_diff_data):
710 if not CommentsModel.use_outdated_comments(pull_request):
710 if not CommentsModel.use_outdated_comments(pull_request):
711 return
711 return
712
712
713 comments = self._visible_inline_comments_of_pull_request(pull_request)
713 comments = self._visible_inline_comments_of_pull_request(pull_request)
714 comments_to_outdate = comments.all()
714 comments_to_outdate = comments.all()
715
715
716 for comment in comments_to_outdate:
716 for comment in comments_to_outdate:
717 self._outdate_one_comment(comment, old_diff_data, new_diff_data)
717 self._outdate_one_comment(comment, old_diff_data, new_diff_data)
718
718
719 def _outdate_one_comment(self, comment, old_diff_proc, new_diff_proc):
719 def _outdate_one_comment(self, comment, old_diff_proc, new_diff_proc):
720 diff_line = _parse_comment_line_number(comment.line_no)
720 diff_line = _parse_comment_line_number(comment.line_no)
721
721
722 try:
722 try:
723 old_context = old_diff_proc.get_context_of_line(
723 old_context = old_diff_proc.get_context_of_line(
724 path=comment.f_path, diff_line=diff_line)
724 path=comment.f_path, diff_line=diff_line)
725 new_context = new_diff_proc.get_context_of_line(
725 new_context = new_diff_proc.get_context_of_line(
726 path=comment.f_path, diff_line=diff_line)
726 path=comment.f_path, diff_line=diff_line)
727 except (diffs.LineNotInDiffException,
727 except (diffs.LineNotInDiffException,
728 diffs.FileNotInDiffException):
728 diffs.FileNotInDiffException):
729 if not comment.draft:
729 if not comment.draft:
730 comment.display_state = ChangesetComment.COMMENT_OUTDATED
730 comment.display_state = ChangesetComment.COMMENT_OUTDATED
731 return
731 return
732
732
733 if old_context == new_context:
733 if old_context == new_context:
734 return
734 return
735
735
736 if self._should_relocate_diff_line(diff_line):
736 if self._should_relocate_diff_line(diff_line):
737 new_diff_lines = new_diff_proc.find_context(
737 new_diff_lines = new_diff_proc.find_context(
738 path=comment.f_path, context=old_context,
738 path=comment.f_path, context=old_context,
739 offset=self.DIFF_CONTEXT_BEFORE)
739 offset=self.DIFF_CONTEXT_BEFORE)
740 if not new_diff_lines and not comment.draft:
740 if not new_diff_lines and not comment.draft:
741 comment.display_state = ChangesetComment.COMMENT_OUTDATED
741 comment.display_state = ChangesetComment.COMMENT_OUTDATED
742 else:
742 else:
743 new_diff_line = self._choose_closest_diff_line(
743 new_diff_line = self._choose_closest_diff_line(
744 diff_line, new_diff_lines)
744 diff_line, new_diff_lines)
745 comment.line_no = _diff_to_comment_line_number(new_diff_line)
745 comment.line_no = _diff_to_comment_line_number(new_diff_line)
746 else:
746 else:
747 if not comment.draft:
747 if not comment.draft:
748 comment.display_state = ChangesetComment.COMMENT_OUTDATED
748 comment.display_state = ChangesetComment.COMMENT_OUTDATED
749
749
750 def _should_relocate_diff_line(self, diff_line):
750 def _should_relocate_diff_line(self, diff_line):
751 """
751 """
752 Checks if relocation shall be tried for the given `diff_line`.
752 Checks if relocation shall be tried for the given `diff_line`.
753
753
754 If a comment points into the first lines, then we can have a situation
754 If a comment points into the first lines, then we can have a situation
755 that after an update another line has been added on top. In this case
755 that after an update another line has been added on top. In this case
756 we would find the context still and move the comment around. This
756 we would find the context still and move the comment around. This
757 would be wrong.
757 would be wrong.
758 """
758 """
759 should_relocate = (
759 should_relocate = (
760 (diff_line.new and diff_line.new > self.DIFF_CONTEXT_BEFORE) or
760 (diff_line.new and diff_line.new > self.DIFF_CONTEXT_BEFORE) or
761 (diff_line.old and diff_line.old > self.DIFF_CONTEXT_BEFORE))
761 (diff_line.old and diff_line.old > self.DIFF_CONTEXT_BEFORE))
762 return should_relocate
762 return should_relocate
763
763
764 def _choose_closest_diff_line(self, diff_line, new_diff_lines):
764 def _choose_closest_diff_line(self, diff_line, new_diff_lines):
765 candidate = new_diff_lines[0]
765 candidate = new_diff_lines[0]
766 best_delta = _diff_line_delta(diff_line, candidate)
766 best_delta = _diff_line_delta(diff_line, candidate)
767 for new_diff_line in new_diff_lines[1:]:
767 for new_diff_line in new_diff_lines[1:]:
768 delta = _diff_line_delta(diff_line, new_diff_line)
768 delta = _diff_line_delta(diff_line, new_diff_line)
769 if delta < best_delta:
769 if delta < best_delta:
770 candidate = new_diff_line
770 candidate = new_diff_line
771 best_delta = delta
771 best_delta = delta
772 return candidate
772 return candidate
773
773
774 def _visible_inline_comments_of_pull_request(self, pull_request):
774 def _visible_inline_comments_of_pull_request(self, pull_request):
775 comments = self._all_inline_comments_of_pull_request(pull_request)
775 comments = self._all_inline_comments_of_pull_request(pull_request)
776 comments = comments.filter(
776 comments = comments.filter(
777 coalesce(ChangesetComment.display_state, '') !=
777 coalesce(ChangesetComment.display_state, '') !=
778 ChangesetComment.COMMENT_OUTDATED)
778 ChangesetComment.COMMENT_OUTDATED)
779 return comments
779 return comments
780
780
781 def _all_inline_comments_of_pull_request(self, pull_request):
781 def _all_inline_comments_of_pull_request(self, pull_request):
782 comments = Session().query(ChangesetComment)\
782 comments = Session().query(ChangesetComment)\
783 .filter(ChangesetComment.line_no != None)\
783 .filter(ChangesetComment.line_no != null())\
784 .filter(ChangesetComment.f_path != None)\
784 .filter(ChangesetComment.f_path != null())\
785 .filter(ChangesetComment.pull_request == pull_request)
785 .filter(ChangesetComment.pull_request == pull_request)
786 return comments
786 return comments
787
787
788 def _all_general_comments_of_pull_request(self, pull_request):
788 def _all_general_comments_of_pull_request(self, pull_request):
789 comments = Session().query(ChangesetComment)\
789 comments = Session().query(ChangesetComment)\
790 .filter(ChangesetComment.line_no == None)\
790 .filter(ChangesetComment.line_no == null())\
791 .filter(ChangesetComment.f_path == None)\
791 .filter(ChangesetComment.f_path == null())\
792 .filter(ChangesetComment.pull_request == pull_request)
792 .filter(ChangesetComment.pull_request == pull_request)
793
793
794 return comments
794 return comments
795
795
796 @staticmethod
796 @staticmethod
797 def use_outdated_comments(pull_request):
797 def use_outdated_comments(pull_request):
798 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
798 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
799 settings = settings_model.get_general_settings()
799 settings = settings_model.get_general_settings()
800 return settings.get('rhodecode_use_outdated_comments', False)
800 return settings.get('rhodecode_use_outdated_comments', False)
801
801
802 def trigger_commit_comment_hook(self, repo, user, action, data=None):
802 def trigger_commit_comment_hook(self, repo, user, action, data=None):
803 repo = self._get_repo(repo)
803 repo = self._get_repo(repo)
804 target_scm = repo.scm_instance()
804 target_scm = repo.scm_instance()
805 if action == 'create':
805 if action == 'create':
806 trigger_hook = hooks_utils.trigger_comment_commit_hooks
806 trigger_hook = hooks_utils.trigger_comment_commit_hooks
807 elif action == 'edit':
807 elif action == 'edit':
808 trigger_hook = hooks_utils.trigger_comment_commit_edit_hooks
808 trigger_hook = hooks_utils.trigger_comment_commit_edit_hooks
809 else:
809 else:
810 return
810 return
811
811
812 log.debug('Handling repo %s trigger_commit_comment_hook with action %s: %s',
812 log.debug('Handling repo %s trigger_commit_comment_hook with action %s: %s',
813 repo, action, trigger_hook)
813 repo, action, trigger_hook)
814 trigger_hook(
814 trigger_hook(
815 username=user.username,
815 username=user.username,
816 repo_name=repo.repo_name,
816 repo_name=repo.repo_name,
817 repo_type=target_scm.alias,
817 repo_type=target_scm.alias,
818 repo=repo,
818 repo=repo,
819 data=data)
819 data=data)
820
820
821
821
822 def _parse_comment_line_number(line_no):
822 def _parse_comment_line_number(line_no):
823 r"""
823 r"""
824 Parses line numbers of the form "(o|n)\d+" and returns them in a tuple.
824 Parses line numbers of the form "(o|n)\d+" and returns them in a tuple.
825 """
825 """
826 old_line = None
826 old_line = None
827 new_line = None
827 new_line = None
828 if line_no.startswith('o'):
828 if line_no.startswith('o'):
829 old_line = int(line_no[1:])
829 old_line = int(line_no[1:])
830 elif line_no.startswith('n'):
830 elif line_no.startswith('n'):
831 new_line = int(line_no[1:])
831 new_line = int(line_no[1:])
832 else:
832 else:
833 raise ValueError("Comment lines have to start with either 'o' or 'n'.")
833 raise ValueError("Comment lines have to start with either 'o' or 'n'.")
834 return diffs.DiffLineNumber(old_line, new_line)
834 return diffs.DiffLineNumber(old_line, new_line)
835
835
836
836
837 def _diff_to_comment_line_number(diff_line):
837 def _diff_to_comment_line_number(diff_line):
838 if diff_line.new is not None:
838 if diff_line.new is not None:
839 return f'n{diff_line.new}'
839 return f'n{diff_line.new}'
840 elif diff_line.old is not None:
840 elif diff_line.old is not None:
841 return f'o{diff_line.old}'
841 return f'o{diff_line.old}'
842 return ''
842 return ''
843
843
844
844
845 def _diff_line_delta(a, b):
845 def _diff_line_delta(a, b):
846 if None not in (a.new, b.new):
846 if None not in (a.new, b.new):
847 return abs(a.new - b.new)
847 return abs(a.new - b.new)
848 elif None not in (a.old, b.old):
848 elif None not in (a.old, b.old):
849 return abs(a.old - b.old)
849 return abs(a.old - b.old)
850 else:
850 else:
851 raise ValueError(
851 raise ValueError(
852 f"Cannot compute delta between {a} and {b}")
852 f"Cannot compute delta between {a} and {b}")
@@ -1,237 +1,237 b''
1 # Copyright (C) 2011-2023 RhodeCode GmbH
1 # Copyright (C) 2011-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19
19
20 """
20 """
21 Model for integrations
21 Model for integrations
22 """
22 """
23
23
24
24
25 import logging
25 import logging
26
26
27 from sqlalchemy import or_, and_
27 from sqlalchemy import or_, and_
28
28
29 from rhodecode import events
29 from rhodecode import events
30 from rhodecode.integrations.types.base import EEIntegration
30 from rhodecode.integrations.types.base import EEIntegration
31 from rhodecode.lib.caching_query import FromCache
31 from rhodecode.lib.caching_query import FromCache
32 from rhodecode.model import BaseModel
32 from rhodecode.model import BaseModel
33 from rhodecode.model.db import Integration, Repository, RepoGroup, true, false, case, null
33 from rhodecode.model.db import Integration, Repository, RepoGroup, true, false, case, null
34 from rhodecode.integrations import integration_type_registry
34 from rhodecode.integrations import integration_type_registry
35
35
36 log = logging.getLogger(__name__)
36 log = logging.getLogger(__name__)
37
37
38
38
39 class IntegrationModel(BaseModel):
39 class IntegrationModel(BaseModel):
40
40
41 cls = Integration
41 cls = Integration
42
42
43 def __get_integration(self, integration):
43 def __get_integration(self, integration):
44 if isinstance(integration, Integration):
44 if isinstance(integration, Integration):
45 return integration
45 return integration
46 elif isinstance(integration, int):
46 elif isinstance(integration, int):
47 return self.sa.query(Integration).get(integration)
47 return self.sa.query(Integration).get(integration)
48 else:
48 else:
49 if integration:
49 if integration:
50 raise Exception('integration must be int or Instance'
50 raise Exception('integration must be int or Instance'
51 ' of Integration got %s' % type(integration))
51 ' of Integration got %s' % type(integration))
52
52
53 def create(self, IntegrationType, name, enabled, repo, repo_group, child_repos_only, settings):
53 def create(self, IntegrationType, name, enabled, repo, repo_group, child_repos_only, settings):
54 """ Create an IntegrationType integration """
54 """ Create an IntegrationType integration """
55 integration = Integration()
55 integration = Integration()
56 integration.integration_type = IntegrationType.key
56 integration.integration_type = IntegrationType.key
57 self.sa.add(integration)
57 self.sa.add(integration)
58 self.update_integration(integration, name, enabled, repo, repo_group,
58 self.update_integration(integration, name, enabled, repo, repo_group,
59 child_repos_only, settings)
59 child_repos_only, settings)
60 self.sa.commit()
60 self.sa.commit()
61 return integration
61 return integration
62
62
63 def update_integration(self, integration, name, enabled, repo, repo_group,
63 def update_integration(self, integration, name, enabled, repo, repo_group,
64 child_repos_only, settings):
64 child_repos_only, settings):
65 integration = self.__get_integration(integration)
65 integration = self.__get_integration(integration)
66
66
67 integration.repo = repo
67 integration.repo = repo
68 integration.repo_group = repo_group
68 integration.repo_group = repo_group
69 integration.child_repos_only = child_repos_only
69 integration.child_repos_only = child_repos_only
70 integration.name = name
70 integration.name = name
71 integration.enabled = enabled
71 integration.enabled = enabled
72 integration.settings = settings
72 integration.settings = settings
73
73
74 return integration
74 return integration
75
75
76 def delete(self, integration):
76 def delete(self, integration):
77 integration = self.__get_integration(integration)
77 integration = self.__get_integration(integration)
78 if integration:
78 if integration:
79 self.sa.delete(integration)
79 self.sa.delete(integration)
80 return True
80 return True
81 return False
81 return False
82
82
83 def get_integration_handler(self, integration):
83 def get_integration_handler(self, integration):
84 TypeClass = integration_type_registry.get(integration.integration_type)
84 TypeClass = integration_type_registry.get(integration.integration_type)
85 if not TypeClass:
85 if not TypeClass:
86 log.error('No class could be found for integration type: {}'.format(
86 log.error('No class could be found for integration type: {}'.format(
87 integration.integration_type))
87 integration.integration_type))
88 return None
88 return None
89 elif isinstance(TypeClass, EEIntegration) or issubclass(TypeClass, EEIntegration):
89 elif isinstance(TypeClass, EEIntegration) or issubclass(TypeClass, EEIntegration):
90 log.error('EE integration cannot be '
90 log.error('EE integration cannot be '
91 'executed for integration type: {}'.format(
91 'executed for integration type: {}'.format(
92 integration.integration_type))
92 integration.integration_type))
93 return None
93 return None
94
94
95 return TypeClass(integration.settings)
95 return TypeClass(integration.settings)
96
96
97 def send_event(self, integration, event):
97 def send_event(self, integration, event):
98 """ Send an event to an integration """
98 """ Send an event to an integration """
99 handler = self.get_integration_handler(integration)
99 handler = self.get_integration_handler(integration)
100 if handler:
100 if handler:
101 log.debug(
101 log.debug(
102 'events: sending event %s on integration %s using handler %s',
102 'events: sending event %s on integration %s using handler %s',
103 event, integration, handler)
103 event, integration, handler)
104 handler.send_event(event)
104 handler.send_event(event)
105
105
106 def get_integrations(self, scope, IntegrationType=None):
106 def get_integrations(self, scope, IntegrationType=None):
107 """
107 """
108 Return integrations for a scope, which must be one of:
108 Return integrations for a scope, which must be one of:
109
109
110 'all' - every integration, global/repogroup/repo
110 'all' - every integration, global/repogroup/repo
111 'global' - global integrations only
111 'global' - global integrations only
112 <Repository> instance - integrations for this repo only
112 <Repository> instance - integrations for this repo only
113 <RepoGroup> instance - integrations for this repogroup only
113 <RepoGroup> instance - integrations for this repogroup only
114 """
114 """
115
115
116 if isinstance(scope, Repository):
116 if isinstance(scope, Repository):
117 query = self.sa.query(Integration).filter(
117 query = self.sa.query(Integration).filter(
118 Integration.repo == scope)
118 Integration.repo == scope)
119 elif isinstance(scope, RepoGroup):
119 elif isinstance(scope, RepoGroup):
120 query = self.sa.query(Integration).filter(
120 query = self.sa.query(Integration).filter(
121 Integration.repo_group == scope)
121 Integration.repo_group == scope)
122 elif scope == 'global':
122 elif scope == 'global':
123 # global integrations
123 # global integrations
124 query = self.sa.query(Integration).filter(
124 query = self.sa.query(Integration).filter(
125 and_(Integration.repo_id == None, Integration.repo_group_id == None)
125 and_(Integration.repo_id == null(), Integration.repo_group_id == null())
126 )
126 )
127 elif scope == 'root-repos':
127 elif scope == 'root-repos':
128 query = self.sa.query(Integration).filter(
128 query = self.sa.query(Integration).filter(
129 and_(Integration.repo_id == None,
129 and_(Integration.repo_id == null(),
130 Integration.repo_group_id == None,
130 Integration.repo_group_id == null(),
131 Integration.child_repos_only == true())
131 Integration.child_repos_only == true())
132 )
132 )
133 elif scope == 'all':
133 elif scope == 'all':
134 query = self.sa.query(Integration)
134 query = self.sa.query(Integration)
135 else:
135 else:
136 raise Exception(
136 raise Exception(
137 "invalid `scope`, must be one of: "
137 "invalid `scope`, must be one of: "
138 "['global', 'all', <Repository>, <RepoGroup>]")
138 "['global', 'all', <Repository>, <RepoGroup>]")
139
139
140 if IntegrationType is not None:
140 if IntegrationType is not None:
141 query = query.filter(
141 query = query.filter(
142 Integration.integration_type==IntegrationType.key)
142 Integration.integration_type==IntegrationType.key)
143
143
144 result = []
144 result = []
145 for integration in query.all():
145 for integration in query.all():
146 IntType = integration_type_registry.get(integration.integration_type)
146 IntType = integration_type_registry.get(integration.integration_type)
147 result.append((IntType, integration))
147 result.append((IntType, integration))
148 return result
148 return result
149
149
150 def get_for_event(self, event, cache=False):
150 def get_for_event(self, event, cache=False):
151 """
151 """
152 Get integrations that match an event
152 Get integrations that match an event
153 """
153 """
154 # base query
154 # base query
155 query = self.sa.query(
155 query = self.sa.query(
156 Integration
156 Integration
157 ).filter(
157 ).filter(
158 Integration.enabled == true()
158 Integration.enabled == true()
159 )
159 )
160
160
161 global_integrations_filter = and_(
161 global_integrations_filter = and_(
162 Integration.repo_id == null(),
162 Integration.repo_id == null(),
163 Integration.repo_group_id == null(),
163 Integration.repo_group_id == null(),
164 Integration.child_repos_only == false(),
164 Integration.child_repos_only == false(),
165 )
165 )
166
166
167 if isinstance(event, events.RepoEvent):
167 if isinstance(event, events.RepoEvent):
168 root_repos_integrations_filter = and_(
168 root_repos_integrations_filter = and_(
169 Integration.repo_id == null(),
169 Integration.repo_id == null(),
170 Integration.repo_group_id == null(),
170 Integration.repo_group_id == null(),
171 Integration.child_repos_only == true(),
171 Integration.child_repos_only == true(),
172 )
172 )
173
173
174 clauses = [
174 clauses = [
175 global_integrations_filter,
175 global_integrations_filter,
176 ]
176 ]
177 cases = [
177 cases = [
178 (global_integrations_filter, 1),
178 (global_integrations_filter, 1),
179 (root_repos_integrations_filter, 2),
179 (root_repos_integrations_filter, 2),
180 ]
180 ]
181
181
182 # repo group integrations
182 # repo group integrations
183 if event.repo.group:
183 if event.repo.group:
184 # repo group with only root level repos
184 # repo group with only root level repos
185 group_child_repos_filter = and_(
185 group_child_repos_filter = and_(
186 Integration.repo_group_id == event.repo.group.group_id,
186 Integration.repo_group_id == event.repo.group.group_id,
187 Integration.child_repos_only == true()
187 Integration.child_repos_only == true()
188 )
188 )
189
189
190 clauses.append(group_child_repos_filter)
190 clauses.append(group_child_repos_filter)
191 cases.append(
191 cases.append(
192 (group_child_repos_filter, 3),
192 (group_child_repos_filter, 3),
193 )
193 )
194
194
195 # repo group cascade to kids
195 # repo group cascade to kids
196 group_recursive_repos_filter = and_(
196 group_recursive_repos_filter = and_(
197 Integration.repo_group_id.in_(
197 Integration.repo_group_id.in_(
198 [group.group_id for group in event.repo.groups_with_parents]
198 [group.group_id for group in event.repo.groups_with_parents]
199 ),
199 ),
200 Integration.child_repos_only == false()
200 Integration.child_repos_only == false()
201 )
201 )
202 clauses.append(group_recursive_repos_filter)
202 clauses.append(group_recursive_repos_filter)
203 cases.append(
203 cases.append(
204 (group_recursive_repos_filter, 4),
204 (group_recursive_repos_filter, 4),
205 )
205 )
206
206
207 if not event.repo.group: # root repo
207 if not event.repo.group: # root repo
208 clauses.append(root_repos_integrations_filter)
208 clauses.append(root_repos_integrations_filter)
209
209
210 # repo integrations
210 # repo integrations
211 if event.repo.repo_id: # pre create events dont have a repo_id yet
211 if event.repo.repo_id: # pre create events dont have a repo_id yet
212 specific_repo_filter = Integration.repo_id == event.repo.repo_id
212 specific_repo_filter = Integration.repo_id == event.repo.repo_id
213 clauses.append(specific_repo_filter)
213 clauses.append(specific_repo_filter)
214 cases.append(
214 cases.append(
215 (specific_repo_filter, 5),
215 (specific_repo_filter, 5),
216 )
216 )
217
217
218 order_by_criterion = case(cases)
218 order_by_criterion = case(cases)
219
219
220 query = query.filter(or_(*clauses))
220 query = query.filter(or_(*clauses))
221 query = query.order_by(order_by_criterion)
221 query = query.order_by(order_by_criterion)
222
222
223 if cache:
223 if cache:
224 cache_key = f"get_enabled_repo_integrations_{event.repo.repo_id}"
224 cache_key = f"get_enabled_repo_integrations_{event.repo.repo_id}"
225 query = query.options(
225 query = query.options(
226 FromCache("sql_cache_short", cache_key))
226 FromCache("sql_cache_short", cache_key))
227 else: # only global integrations
227 else: # only global integrations
228 order_by_criterion = Integration.integration_id
228 order_by_criterion = Integration.integration_id
229
229
230 query = query.filter(global_integrations_filter)
230 query = query.filter(global_integrations_filter)
231 query = query.order_by(order_by_criterion)
231 query = query.order_by(order_by_criterion)
232 if cache:
232 if cache:
233 query = query.options(
233 query = query.options(
234 FromCache("sql_cache_short", "get_enabled_global_integrations"))
234 FromCache("sql_cache_short", "get_enabled_global_integrations"))
235
235
236 result = query.all()
236 result = query.all()
237 return result
237 return result
@@ -1,2392 +1,2392 b''
1 # Copyright (C) 2012-2023 RhodeCode GmbH
1 # Copyright (C) 2012-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19
19
20 """
20 """
21 pull request model for RhodeCode
21 pull request model for RhodeCode
22 """
22 """
23
23
24 import logging
24 import logging
25 import os
25 import os
26
26
27 import datetime
27 import datetime
28 import urllib.request
28 import urllib.request
29 import urllib.parse
29 import urllib.parse
30 import urllib.error
30 import urllib.error
31 import collections
31 import collections
32
32
33 import dataclasses as dataclasses
33 import dataclasses as dataclasses
34 from pyramid.threadlocal import get_current_request
34 from pyramid.threadlocal import get_current_request
35
35
36 from rhodecode.lib.vcs.nodes import FileNode
36 from rhodecode.lib.vcs.nodes import FileNode
37 from rhodecode.translation import lazy_ugettext
37 from rhodecode.translation import lazy_ugettext
38 from rhodecode.lib import helpers as h, hooks_utils, diffs
38 from rhodecode.lib import helpers as h, hooks_utils, diffs
39 from rhodecode.lib import audit_logger
39 from rhodecode.lib import audit_logger
40 from collections import OrderedDict
40 from collections import OrderedDict
41 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
41 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
42 from rhodecode.lib.ext_json import sjson as json
42 from rhodecode.lib.ext_json import sjson as json
43 from rhodecode.lib.markup_renderer import (
43 from rhodecode.lib.markup_renderer import (
44 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
44 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
45 from rhodecode.lib.hash_utils import md5_safe
45 from rhodecode.lib.hash_utils import md5_safe
46 from rhodecode.lib.str_utils import safe_str
46 from rhodecode.lib.str_utils import safe_str
47 from rhodecode.lib.utils2 import AttributeDict, get_current_rhodecode_user
47 from rhodecode.lib.utils2 import AttributeDict, get_current_rhodecode_user
48 from rhodecode.lib.vcs.backends.base import (
48 from rhodecode.lib.vcs.backends.base import (
49 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason,
49 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason,
50 TargetRefMissing, SourceRefMissing)
50 TargetRefMissing, SourceRefMissing)
51 from rhodecode.lib.vcs.conf import settings as vcs_settings
51 from rhodecode.lib.vcs.conf import settings as vcs_settings
52 from rhodecode.lib.vcs.exceptions import (
52 from rhodecode.lib.vcs.exceptions import (
53 CommitDoesNotExistError, EmptyRepositoryError)
53 CommitDoesNotExistError, EmptyRepositoryError)
54 from rhodecode.model import BaseModel
54 from rhodecode.model import BaseModel
55 from rhodecode.model.changeset_status import ChangesetStatusModel
55 from rhodecode.model.changeset_status import ChangesetStatusModel
56 from rhodecode.model.comment import CommentsModel
56 from rhodecode.model.comment import CommentsModel
57 from rhodecode.model.db import (
57 from rhodecode.model.db import (
58 aliased, null, lazyload, and_, or_, select, func, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
58 aliased, null, lazyload, and_, or_, select, func, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
59 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User)
59 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User)
60 from rhodecode.model.meta import Session
60 from rhodecode.model.meta import Session
61 from rhodecode.model.notification import NotificationModel, \
61 from rhodecode.model.notification import NotificationModel, \
62 EmailNotificationModel
62 EmailNotificationModel
63 from rhodecode.model.scm import ScmModel
63 from rhodecode.model.scm import ScmModel
64 from rhodecode.model.settings import VcsSettingsModel
64 from rhodecode.model.settings import VcsSettingsModel
65
65
66
66
67 log = logging.getLogger(__name__)
67 log = logging.getLogger(__name__)
68
68
69
69
70 # Data structure to hold the response data when updating commits during a pull
70 # Data structure to hold the response data when updating commits during a pull
71 # request update.
71 # request update.
72 class UpdateResponse(object):
72 class UpdateResponse(object):
73
73
74 def __init__(self, executed, reason, new, old, common_ancestor_id,
74 def __init__(self, executed, reason, new, old, common_ancestor_id,
75 commit_changes, source_changed, target_changed):
75 commit_changes, source_changed, target_changed):
76
76
77 self.executed = executed
77 self.executed = executed
78 self.reason = reason
78 self.reason = reason
79 self.new = new
79 self.new = new
80 self.old = old
80 self.old = old
81 self.common_ancestor_id = common_ancestor_id
81 self.common_ancestor_id = common_ancestor_id
82 self.changes = commit_changes
82 self.changes = commit_changes
83 self.source_changed = source_changed
83 self.source_changed = source_changed
84 self.target_changed = target_changed
84 self.target_changed = target_changed
85
85
86
86
87 def get_diff_info(
87 def get_diff_info(
88 source_repo, source_ref, target_repo, target_ref, get_authors=False,
88 source_repo, source_ref, target_repo, target_ref, get_authors=False,
89 get_commit_authors=True):
89 get_commit_authors=True):
90 """
90 """
91 Calculates detailed diff information for usage in preview of creation of a pull-request.
91 Calculates detailed diff information for usage in preview of creation of a pull-request.
92 This is also used for default reviewers logic
92 This is also used for default reviewers logic
93 """
93 """
94
94
95 source_scm = source_repo.scm_instance()
95 source_scm = source_repo.scm_instance()
96 target_scm = target_repo.scm_instance()
96 target_scm = target_repo.scm_instance()
97
97
98 ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm)
98 ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm)
99 if not ancestor_id:
99 if not ancestor_id:
100 raise ValueError(
100 raise ValueError(
101 'cannot calculate diff info without a common ancestor. '
101 'cannot calculate diff info without a common ancestor. '
102 'Make sure both repositories are related, and have a common forking commit.')
102 'Make sure both repositories are related, and have a common forking commit.')
103
103
104 # case here is that want a simple diff without incoming commits,
104 # case here is that want a simple diff without incoming commits,
105 # previewing what will be merged based only on commits in the source.
105 # previewing what will be merged based only on commits in the source.
106 log.debug('Using ancestor %s as source_ref instead of %s',
106 log.debug('Using ancestor %s as source_ref instead of %s',
107 ancestor_id, source_ref)
107 ancestor_id, source_ref)
108
108
109 # source of changes now is the common ancestor
109 # source of changes now is the common ancestor
110 source_commit = source_scm.get_commit(commit_id=ancestor_id)
110 source_commit = source_scm.get_commit(commit_id=ancestor_id)
111 # target commit becomes the source ref as it is the last commit
111 # target commit becomes the source ref as it is the last commit
112 # for diff generation this logic gives proper diff
112 # for diff generation this logic gives proper diff
113 target_commit = source_scm.get_commit(commit_id=source_ref)
113 target_commit = source_scm.get_commit(commit_id=source_ref)
114
114
115 vcs_diff = \
115 vcs_diff = \
116 source_scm.get_diff(commit1=source_commit, commit2=target_commit,
116 source_scm.get_diff(commit1=source_commit, commit2=target_commit,
117 ignore_whitespace=False, context=3)
117 ignore_whitespace=False, context=3)
118
118
119 diff_processor = diffs.DiffProcessor(vcs_diff, diff_format='newdiff',
119 diff_processor = diffs.DiffProcessor(vcs_diff, diff_format='newdiff',
120 diff_limit=0, file_limit=0, show_full_diff=True)
120 diff_limit=0, file_limit=0, show_full_diff=True)
121
121
122 _parsed = diff_processor.prepare()
122 _parsed = diff_processor.prepare()
123
123
124 all_files = []
124 all_files = []
125 all_files_changes = []
125 all_files_changes = []
126 changed_lines = {}
126 changed_lines = {}
127 stats = [0, 0]
127 stats = [0, 0]
128 for f in _parsed:
128 for f in _parsed:
129 all_files.append(f['filename'])
129 all_files.append(f['filename'])
130 all_files_changes.append({
130 all_files_changes.append({
131 'filename': f['filename'],
131 'filename': f['filename'],
132 'stats': f['stats']
132 'stats': f['stats']
133 })
133 })
134 stats[0] += f['stats']['added']
134 stats[0] += f['stats']['added']
135 stats[1] += f['stats']['deleted']
135 stats[1] += f['stats']['deleted']
136
136
137 changed_lines[f['filename']] = []
137 changed_lines[f['filename']] = []
138 if len(f['chunks']) < 2:
138 if len(f['chunks']) < 2:
139 continue
139 continue
140 # first line is "context" information
140 # first line is "context" information
141 for chunks in f['chunks'][1:]:
141 for chunks in f['chunks'][1:]:
142 for chunk in chunks['lines']:
142 for chunk in chunks['lines']:
143 if chunk['action'] not in ('del', 'mod'):
143 if chunk['action'] not in ('del', 'mod'):
144 continue
144 continue
145 changed_lines[f['filename']].append(chunk['old_lineno'])
145 changed_lines[f['filename']].append(chunk['old_lineno'])
146
146
147 commit_authors = []
147 commit_authors = []
148 user_counts = {}
148 user_counts = {}
149 email_counts = {}
149 email_counts = {}
150 author_counts = {}
150 author_counts = {}
151 _commit_cache = {}
151 _commit_cache = {}
152
152
153 commits = []
153 commits = []
154 if get_commit_authors:
154 if get_commit_authors:
155 log.debug('Obtaining commit authors from set of commits')
155 log.debug('Obtaining commit authors from set of commits')
156 _compare_data = target_scm.compare(
156 _compare_data = target_scm.compare(
157 target_ref, source_ref, source_scm, merge=True,
157 target_ref, source_ref, source_scm, merge=True,
158 pre_load=["author", "date", "message"]
158 pre_load=["author", "date", "message"]
159 )
159 )
160
160
161 for commit in _compare_data:
161 for commit in _compare_data:
162 # NOTE(marcink): we serialize here, so we don't produce more vcsserver calls on data returned
162 # NOTE(marcink): we serialize here, so we don't produce more vcsserver calls on data returned
163 # at this function which is later called via JSON serialization
163 # at this function which is later called via JSON serialization
164 serialized_commit = dict(
164 serialized_commit = dict(
165 author=commit.author,
165 author=commit.author,
166 date=commit.date,
166 date=commit.date,
167 message=commit.message,
167 message=commit.message,
168 commit_id=commit.raw_id,
168 commit_id=commit.raw_id,
169 raw_id=commit.raw_id
169 raw_id=commit.raw_id
170 )
170 )
171 commits.append(serialized_commit)
171 commits.append(serialized_commit)
172 user = User.get_from_cs_author(serialized_commit['author'])
172 user = User.get_from_cs_author(serialized_commit['author'])
173 if user and user not in commit_authors:
173 if user and user not in commit_authors:
174 commit_authors.append(user)
174 commit_authors.append(user)
175
175
176 # lines
176 # lines
177 if get_authors:
177 if get_authors:
178 log.debug('Calculating authors of changed files')
178 log.debug('Calculating authors of changed files')
179 target_commit = source_repo.get_commit(ancestor_id)
179 target_commit = source_repo.get_commit(ancestor_id)
180
180
181 for fname, lines in changed_lines.items():
181 for fname, lines in changed_lines.items():
182
182
183 try:
183 try:
184 node = target_commit.get_node(fname, pre_load=["is_binary"])
184 node = target_commit.get_node(fname, pre_load=["is_binary"])
185 except Exception:
185 except Exception:
186 log.exception("Failed to load node with path %s", fname)
186 log.exception("Failed to load node with path %s", fname)
187 continue
187 continue
188
188
189 if not isinstance(node, FileNode):
189 if not isinstance(node, FileNode):
190 continue
190 continue
191
191
192 # NOTE(marcink): for binary node we don't do annotation, just use last author
192 # NOTE(marcink): for binary node we don't do annotation, just use last author
193 if node.is_binary:
193 if node.is_binary:
194 author = node.last_commit.author
194 author = node.last_commit.author
195 email = node.last_commit.author_email
195 email = node.last_commit.author_email
196
196
197 user = User.get_from_cs_author(author)
197 user = User.get_from_cs_author(author)
198 if user:
198 if user:
199 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
199 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
200 author_counts[author] = author_counts.get(author, 0) + 1
200 author_counts[author] = author_counts.get(author, 0) + 1
201 email_counts[email] = email_counts.get(email, 0) + 1
201 email_counts[email] = email_counts.get(email, 0) + 1
202
202
203 continue
203 continue
204
204
205 for annotation in node.annotate:
205 for annotation in node.annotate:
206 line_no, commit_id, get_commit_func, line_text = annotation
206 line_no, commit_id, get_commit_func, line_text = annotation
207 if line_no in lines:
207 if line_no in lines:
208 if commit_id not in _commit_cache:
208 if commit_id not in _commit_cache:
209 _commit_cache[commit_id] = get_commit_func()
209 _commit_cache[commit_id] = get_commit_func()
210 commit = _commit_cache[commit_id]
210 commit = _commit_cache[commit_id]
211 author = commit.author
211 author = commit.author
212 email = commit.author_email
212 email = commit.author_email
213 user = User.get_from_cs_author(author)
213 user = User.get_from_cs_author(author)
214 if user:
214 if user:
215 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
215 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
216 author_counts[author] = author_counts.get(author, 0) + 1
216 author_counts[author] = author_counts.get(author, 0) + 1
217 email_counts[email] = email_counts.get(email, 0) + 1
217 email_counts[email] = email_counts.get(email, 0) + 1
218
218
219 log.debug('Default reviewers processing finished')
219 log.debug('Default reviewers processing finished')
220
220
221 return {
221 return {
222 'commits': commits,
222 'commits': commits,
223 'files': all_files_changes,
223 'files': all_files_changes,
224 'stats': stats,
224 'stats': stats,
225 'ancestor': ancestor_id,
225 'ancestor': ancestor_id,
226 # original authors of modified files
226 # original authors of modified files
227 'original_authors': {
227 'original_authors': {
228 'users': user_counts,
228 'users': user_counts,
229 'authors': author_counts,
229 'authors': author_counts,
230 'emails': email_counts,
230 'emails': email_counts,
231 },
231 },
232 'commit_authors': commit_authors
232 'commit_authors': commit_authors
233 }
233 }
234
234
235
235
236 class PullRequestModel(BaseModel):
236 class PullRequestModel(BaseModel):
237
237
238 cls = PullRequest
238 cls = PullRequest
239
239
240 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
240 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
241
241
242 UPDATE_STATUS_MESSAGES = {
242 UPDATE_STATUS_MESSAGES = {
243 UpdateFailureReason.NONE: lazy_ugettext(
243 UpdateFailureReason.NONE: lazy_ugettext(
244 'Pull request update successful.'),
244 'Pull request update successful.'),
245 UpdateFailureReason.UNKNOWN: lazy_ugettext(
245 UpdateFailureReason.UNKNOWN: lazy_ugettext(
246 'Pull request update failed because of an unknown error.'),
246 'Pull request update failed because of an unknown error.'),
247 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
247 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
248 'No update needed because the source and target have not changed.'),
248 'No update needed because the source and target have not changed.'),
249 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
249 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
250 'Pull request cannot be updated because the reference type is '
250 'Pull request cannot be updated because the reference type is '
251 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
251 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
252 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
252 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
253 'This pull request cannot be updated because the target '
253 'This pull request cannot be updated because the target '
254 'reference is missing.'),
254 'reference is missing.'),
255 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
255 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
256 'This pull request cannot be updated because the source '
256 'This pull request cannot be updated because the source '
257 'reference is missing.'),
257 'reference is missing.'),
258 }
258 }
259 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
259 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
260 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
260 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
261
261
262 def __get_pull_request(self, pull_request):
262 def __get_pull_request(self, pull_request):
263 return self._get_instance((
263 return self._get_instance((
264 PullRequest, PullRequestVersion), pull_request)
264 PullRequest, PullRequestVersion), pull_request)
265
265
266 def _check_perms(self, perms, pull_request, user, api=False):
266 def _check_perms(self, perms, pull_request, user, api=False):
267 if not api:
267 if not api:
268 return h.HasRepoPermissionAny(*perms)(
268 return h.HasRepoPermissionAny(*perms)(
269 user=user, repo_name=pull_request.target_repo.repo_name)
269 user=user, repo_name=pull_request.target_repo.repo_name)
270 else:
270 else:
271 return h.HasRepoPermissionAnyApi(*perms)(
271 return h.HasRepoPermissionAnyApi(*perms)(
272 user=user, repo_name=pull_request.target_repo.repo_name)
272 user=user, repo_name=pull_request.target_repo.repo_name)
273
273
274 def check_user_read(self, pull_request, user, api=False):
274 def check_user_read(self, pull_request, user, api=False):
275 _perms = ('repository.admin', 'repository.write', 'repository.read',)
275 _perms = ('repository.admin', 'repository.write', 'repository.read',)
276 return self._check_perms(_perms, pull_request, user, api)
276 return self._check_perms(_perms, pull_request, user, api)
277
277
278 def check_user_merge(self, pull_request, user, api=False):
278 def check_user_merge(self, pull_request, user, api=False):
279 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
279 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
280 return self._check_perms(_perms, pull_request, user, api)
280 return self._check_perms(_perms, pull_request, user, api)
281
281
282 def check_user_update(self, pull_request, user, api=False):
282 def check_user_update(self, pull_request, user, api=False):
283 owner = user.user_id == pull_request.user_id
283 owner = user.user_id == pull_request.user_id
284 return self.check_user_merge(pull_request, user, api) or owner
284 return self.check_user_merge(pull_request, user, api) or owner
285
285
286 def check_user_delete(self, pull_request, user):
286 def check_user_delete(self, pull_request, user):
287 owner = user.user_id == pull_request.user_id
287 owner = user.user_id == pull_request.user_id
288 _perms = ('repository.admin',)
288 _perms = ('repository.admin',)
289 return self._check_perms(_perms, pull_request, user) or owner
289 return self._check_perms(_perms, pull_request, user) or owner
290
290
291 def is_user_reviewer(self, pull_request, user):
291 def is_user_reviewer(self, pull_request, user):
292 return user.user_id in [
292 return user.user_id in [
293 x.user_id for x in
293 x.user_id for x in
294 pull_request.get_pull_request_reviewers(PullRequestReviewers.ROLE_REVIEWER)
294 pull_request.get_pull_request_reviewers(PullRequestReviewers.ROLE_REVIEWER)
295 if x.user
295 if x.user
296 ]
296 ]
297
297
298 def check_user_change_status(self, pull_request, user, api=False):
298 def check_user_change_status(self, pull_request, user, api=False):
299 return self.check_user_update(pull_request, user, api) \
299 return self.check_user_update(pull_request, user, api) \
300 or self.is_user_reviewer(pull_request, user)
300 or self.is_user_reviewer(pull_request, user)
301
301
302 def check_user_comment(self, pull_request, user):
302 def check_user_comment(self, pull_request, user):
303 owner = user.user_id == pull_request.user_id
303 owner = user.user_id == pull_request.user_id
304 return self.check_user_read(pull_request, user) or owner
304 return self.check_user_read(pull_request, user) or owner
305
305
306 def get(self, pull_request):
306 def get(self, pull_request):
307 return self.__get_pull_request(pull_request)
307 return self.__get_pull_request(pull_request)
308
308
309 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
309 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
310 statuses=None, opened_by=None, order_by=None,
310 statuses=None, opened_by=None, order_by=None,
311 order_dir='desc', only_created=False):
311 order_dir='desc', only_created=False):
312 repo = None
312 repo = None
313 if repo_name:
313 if repo_name:
314 repo = self._get_repo(repo_name)
314 repo = self._get_repo(repo_name)
315
315
316 q = PullRequest.query()
316 q = PullRequest.query()
317
317
318 if search_q:
318 if search_q:
319 like_expression = u'%{}%'.format(safe_str(search_q))
319 like_expression = u'%{}%'.format(safe_str(search_q))
320 q = q.join(User, User.user_id == PullRequest.user_id)
320 q = q.join(User, User.user_id == PullRequest.user_id)
321 q = q.filter(or_(
321 q = q.filter(or_(
322 cast(PullRequest.pull_request_id, String).ilike(like_expression),
322 cast(PullRequest.pull_request_id, String).ilike(like_expression),
323 User.username.ilike(like_expression),
323 User.username.ilike(like_expression),
324 PullRequest.title.ilike(like_expression),
324 PullRequest.title.ilike(like_expression),
325 PullRequest.description.ilike(like_expression),
325 PullRequest.description.ilike(like_expression),
326 ))
326 ))
327
327
328 # source or target
328 # source or target
329 if repo and source:
329 if repo and source:
330 q = q.filter(PullRequest.source_repo == repo)
330 q = q.filter(PullRequest.source_repo == repo)
331 elif repo:
331 elif repo:
332 q = q.filter(PullRequest.target_repo == repo)
332 q = q.filter(PullRequest.target_repo == repo)
333
333
334 # closed,opened
334 # closed,opened
335 if statuses:
335 if statuses:
336 q = q.filter(PullRequest.status.in_(statuses))
336 q = q.filter(PullRequest.status.in_(statuses))
337
337
338 # opened by filter
338 # opened by filter
339 if opened_by:
339 if opened_by:
340 q = q.filter(PullRequest.user_id.in_(opened_by))
340 q = q.filter(PullRequest.user_id.in_(opened_by))
341
341
342 # only get those that are in "created" state
342 # only get those that are in "created" state
343 if only_created:
343 if only_created:
344 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
344 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
345
345
346 order_map = {
346 order_map = {
347 'name_raw': PullRequest.pull_request_id,
347 'name_raw': PullRequest.pull_request_id,
348 'id': PullRequest.pull_request_id,
348 'id': PullRequest.pull_request_id,
349 'title': PullRequest.title,
349 'title': PullRequest.title,
350 'updated_on_raw': PullRequest.updated_on,
350 'updated_on_raw': PullRequest.updated_on,
351 'target_repo': PullRequest.target_repo_id
351 'target_repo': PullRequest.target_repo_id
352 }
352 }
353 if order_by and order_by in order_map:
353 if order_by and order_by in order_map:
354 if order_dir == 'asc':
354 if order_dir == 'asc':
355 q = q.order_by(order_map[order_by].asc())
355 q = q.order_by(order_map[order_by].asc())
356 else:
356 else:
357 q = q.order_by(order_map[order_by].desc())
357 q = q.order_by(order_map[order_by].desc())
358
358
359 return q
359 return q
360
360
361 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
361 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
362 opened_by=None):
362 opened_by=None):
363 """
363 """
364 Count the number of pull requests for a specific repository.
364 Count the number of pull requests for a specific repository.
365
365
366 :param repo_name: target or source repo
366 :param repo_name: target or source repo
367 :param search_q: filter by text
367 :param search_q: filter by text
368 :param source: boolean flag to specify if repo_name refers to source
368 :param source: boolean flag to specify if repo_name refers to source
369 :param statuses: list of pull request statuses
369 :param statuses: list of pull request statuses
370 :param opened_by: author user of the pull request
370 :param opened_by: author user of the pull request
371 :returns: int number of pull requests
371 :returns: int number of pull requests
372 """
372 """
373 q = self._prepare_get_all_query(
373 q = self._prepare_get_all_query(
374 repo_name, search_q=search_q, source=source, statuses=statuses,
374 repo_name, search_q=search_q, source=source, statuses=statuses,
375 opened_by=opened_by)
375 opened_by=opened_by)
376
376
377 return q.count()
377 return q.count()
378
378
379 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
379 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
380 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
380 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
381 """
381 """
382 Get all pull requests for a specific repository.
382 Get all pull requests for a specific repository.
383
383
384 :param repo_name: target or source repo
384 :param repo_name: target or source repo
385 :param search_q: filter by text
385 :param search_q: filter by text
386 :param source: boolean flag to specify if repo_name refers to source
386 :param source: boolean flag to specify if repo_name refers to source
387 :param statuses: list of pull request statuses
387 :param statuses: list of pull request statuses
388 :param opened_by: author user of the pull request
388 :param opened_by: author user of the pull request
389 :param offset: pagination offset
389 :param offset: pagination offset
390 :param length: length of returned list
390 :param length: length of returned list
391 :param order_by: order of the returned list
391 :param order_by: order of the returned list
392 :param order_dir: 'asc' or 'desc' ordering direction
392 :param order_dir: 'asc' or 'desc' ordering direction
393 :returns: list of pull requests
393 :returns: list of pull requests
394 """
394 """
395 q = self._prepare_get_all_query(
395 q = self._prepare_get_all_query(
396 repo_name, search_q=search_q, source=source, statuses=statuses,
396 repo_name, search_q=search_q, source=source, statuses=statuses,
397 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
397 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
398
398
399 if length:
399 if length:
400 pull_requests = q.limit(length).offset(offset).all()
400 pull_requests = q.limit(length).offset(offset).all()
401 else:
401 else:
402 pull_requests = q.all()
402 pull_requests = q.all()
403
403
404 return pull_requests
404 return pull_requests
405
405
406 def count_awaiting_review(self, repo_name, search_q=None, statuses=None):
406 def count_awaiting_review(self, repo_name, search_q=None, statuses=None):
407 """
407 """
408 Count the number of pull requests for a specific repository that are
408 Count the number of pull requests for a specific repository that are
409 awaiting review.
409 awaiting review.
410
410
411 :param repo_name: target or source repo
411 :param repo_name: target or source repo
412 :param search_q: filter by text
412 :param search_q: filter by text
413 :param statuses: list of pull request statuses
413 :param statuses: list of pull request statuses
414 :returns: int number of pull requests
414 :returns: int number of pull requests
415 """
415 """
416 pull_requests = self.get_awaiting_review(
416 pull_requests = self.get_awaiting_review(
417 repo_name, search_q=search_q, statuses=statuses)
417 repo_name, search_q=search_q, statuses=statuses)
418
418
419 return len(pull_requests)
419 return len(pull_requests)
420
420
421 def get_awaiting_review(self, repo_name, search_q=None, statuses=None,
421 def get_awaiting_review(self, repo_name, search_q=None, statuses=None,
422 offset=0, length=None, order_by=None, order_dir='desc'):
422 offset=0, length=None, order_by=None, order_dir='desc'):
423 """
423 """
424 Get all pull requests for a specific repository that are awaiting
424 Get all pull requests for a specific repository that are awaiting
425 review.
425 review.
426
426
427 :param repo_name: target or source repo
427 :param repo_name: target or source repo
428 :param search_q: filter by text
428 :param search_q: filter by text
429 :param statuses: list of pull request statuses
429 :param statuses: list of pull request statuses
430 :param offset: pagination offset
430 :param offset: pagination offset
431 :param length: length of returned list
431 :param length: length of returned list
432 :param order_by: order of the returned list
432 :param order_by: order of the returned list
433 :param order_dir: 'asc' or 'desc' ordering direction
433 :param order_dir: 'asc' or 'desc' ordering direction
434 :returns: list of pull requests
434 :returns: list of pull requests
435 """
435 """
436 pull_requests = self.get_all(
436 pull_requests = self.get_all(
437 repo_name, search_q=search_q, statuses=statuses,
437 repo_name, search_q=search_q, statuses=statuses,
438 order_by=order_by, order_dir=order_dir)
438 order_by=order_by, order_dir=order_dir)
439
439
440 _filtered_pull_requests = []
440 _filtered_pull_requests = []
441 for pr in pull_requests:
441 for pr in pull_requests:
442 status = pr.calculated_review_status()
442 status = pr.calculated_review_status()
443 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
443 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
444 ChangesetStatus.STATUS_UNDER_REVIEW]:
444 ChangesetStatus.STATUS_UNDER_REVIEW]:
445 _filtered_pull_requests.append(pr)
445 _filtered_pull_requests.append(pr)
446 if length:
446 if length:
447 return _filtered_pull_requests[offset:offset+length]
447 return _filtered_pull_requests[offset:offset+length]
448 else:
448 else:
449 return _filtered_pull_requests
449 return _filtered_pull_requests
450
450
451 def _prepare_awaiting_my_review_review_query(
451 def _prepare_awaiting_my_review_review_query(
452 self, repo_name, user_id, search_q=None, statuses=None,
452 self, repo_name, user_id, search_q=None, statuses=None,
453 order_by=None, order_dir='desc'):
453 order_by=None, order_dir='desc'):
454
454
455 for_review_statuses = [
455 for_review_statuses = [
456 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
456 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
457 ]
457 ]
458
458
459 pull_request_alias = aliased(PullRequest)
459 pull_request_alias = aliased(PullRequest)
460 status_alias = aliased(ChangesetStatus)
460 status_alias = aliased(ChangesetStatus)
461 reviewers_alias = aliased(PullRequestReviewers)
461 reviewers_alias = aliased(PullRequestReviewers)
462 repo_alias = aliased(Repository)
462 repo_alias = aliased(Repository)
463
463
464 last_ver_subq = Session()\
464 last_ver_subq = Session()\
465 .query(func.min(ChangesetStatus.version)) \
465 .query(func.min(ChangesetStatus.version)) \
466 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
466 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
467 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
467 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
468 .subquery()
468 .subquery()
469
469
470 q = Session().query(pull_request_alias) \
470 q = Session().query(pull_request_alias) \
471 .options(lazyload(pull_request_alias.author)) \
471 .options(lazyload(pull_request_alias.author)) \
472 .join(reviewers_alias,
472 .join(reviewers_alias,
473 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
473 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
474 .join(repo_alias,
474 .join(repo_alias,
475 repo_alias.repo_id == pull_request_alias.target_repo_id) \
475 repo_alias.repo_id == pull_request_alias.target_repo_id) \
476 .outerjoin(status_alias,
476 .outerjoin(status_alias,
477 and_(status_alias.user_id == reviewers_alias.user_id,
477 and_(status_alias.user_id == reviewers_alias.user_id,
478 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
478 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
479 .filter(or_(status_alias.version == null(),
479 .filter(or_(status_alias.version == null(),
480 status_alias.version == last_ver_subq)) \
480 status_alias.version == last_ver_subq)) \
481 .filter(reviewers_alias.user_id == user_id) \
481 .filter(reviewers_alias.user_id == user_id) \
482 .filter(repo_alias.repo_name == repo_name) \
482 .filter(repo_alias.repo_name == repo_name) \
483 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
483 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
484 .group_by(pull_request_alias)
484 .group_by(pull_request_alias)
485
485
486 # closed,opened
486 # closed,opened
487 if statuses:
487 if statuses:
488 q = q.filter(pull_request_alias.status.in_(statuses))
488 q = q.filter(pull_request_alias.status.in_(statuses))
489
489
490 if search_q:
490 if search_q:
491 like_expression = u'%{}%'.format(safe_str(search_q))
491 like_expression = u'%{}%'.format(safe_str(search_q))
492 q = q.join(User, User.user_id == pull_request_alias.user_id)
492 q = q.join(User, User.user_id == pull_request_alias.user_id)
493 q = q.filter(or_(
493 q = q.filter(or_(
494 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
494 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
495 User.username.ilike(like_expression),
495 User.username.ilike(like_expression),
496 pull_request_alias.title.ilike(like_expression),
496 pull_request_alias.title.ilike(like_expression),
497 pull_request_alias.description.ilike(like_expression),
497 pull_request_alias.description.ilike(like_expression),
498 ))
498 ))
499
499
500 order_map = {
500 order_map = {
501 'name_raw': pull_request_alias.pull_request_id,
501 'name_raw': pull_request_alias.pull_request_id,
502 'title': pull_request_alias.title,
502 'title': pull_request_alias.title,
503 'updated_on_raw': pull_request_alias.updated_on,
503 'updated_on_raw': pull_request_alias.updated_on,
504 'target_repo': pull_request_alias.target_repo_id
504 'target_repo': pull_request_alias.target_repo_id
505 }
505 }
506 if order_by and order_by in order_map:
506 if order_by and order_by in order_map:
507 if order_dir == 'asc':
507 if order_dir == 'asc':
508 q = q.order_by(order_map[order_by].asc())
508 q = q.order_by(order_map[order_by].asc())
509 else:
509 else:
510 q = q.order_by(order_map[order_by].desc())
510 q = q.order_by(order_map[order_by].desc())
511
511
512 return q
512 return q
513
513
514 def count_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None):
514 def count_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None):
515 """
515 """
516 Count the number of pull requests for a specific repository that are
516 Count the number of pull requests for a specific repository that are
517 awaiting review from a specific user.
517 awaiting review from a specific user.
518
518
519 :param repo_name: target or source repo
519 :param repo_name: target or source repo
520 :param user_id: reviewer user of the pull request
520 :param user_id: reviewer user of the pull request
521 :param search_q: filter by text
521 :param search_q: filter by text
522 :param statuses: list of pull request statuses
522 :param statuses: list of pull request statuses
523 :returns: int number of pull requests
523 :returns: int number of pull requests
524 """
524 """
525 q = self._prepare_awaiting_my_review_review_query(
525 q = self._prepare_awaiting_my_review_review_query(
526 repo_name, user_id, search_q=search_q, statuses=statuses)
526 repo_name, user_id, search_q=search_q, statuses=statuses)
527 return q.count()
527 return q.count()
528
528
529 def get_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None,
529 def get_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None,
530 offset=0, length=None, order_by=None, order_dir='desc'):
530 offset=0, length=None, order_by=None, order_dir='desc'):
531 """
531 """
532 Get all pull requests for a specific repository that are awaiting
532 Get all pull requests for a specific repository that are awaiting
533 review from a specific user.
533 review from a specific user.
534
534
535 :param repo_name: target or source repo
535 :param repo_name: target or source repo
536 :param user_id: reviewer user of the pull request
536 :param user_id: reviewer user of the pull request
537 :param search_q: filter by text
537 :param search_q: filter by text
538 :param statuses: list of pull request statuses
538 :param statuses: list of pull request statuses
539 :param offset: pagination offset
539 :param offset: pagination offset
540 :param length: length of returned list
540 :param length: length of returned list
541 :param order_by: order of the returned list
541 :param order_by: order of the returned list
542 :param order_dir: 'asc' or 'desc' ordering direction
542 :param order_dir: 'asc' or 'desc' ordering direction
543 :returns: list of pull requests
543 :returns: list of pull requests
544 """
544 """
545
545
546 q = self._prepare_awaiting_my_review_review_query(
546 q = self._prepare_awaiting_my_review_review_query(
547 repo_name, user_id, search_q=search_q, statuses=statuses,
547 repo_name, user_id, search_q=search_q, statuses=statuses,
548 order_by=order_by, order_dir=order_dir)
548 order_by=order_by, order_dir=order_dir)
549
549
550 if length:
550 if length:
551 pull_requests = q.limit(length).offset(offset).all()
551 pull_requests = q.limit(length).offset(offset).all()
552 else:
552 else:
553 pull_requests = q.all()
553 pull_requests = q.all()
554
554
555 return pull_requests
555 return pull_requests
556
556
557 def _prepare_im_participating_query(self, user_id=None, statuses=None, query='',
557 def _prepare_im_participating_query(self, user_id=None, statuses=None, query='',
558 order_by=None, order_dir='desc'):
558 order_by=None, order_dir='desc'):
559 """
559 """
560 return a query of pull-requests user is an creator, or he's added as a reviewer
560 return a query of pull-requests user is an creator, or he's added as a reviewer
561 """
561 """
562 q = PullRequest.query()
562 q = PullRequest.query()
563 if user_id:
563 if user_id:
564
564
565 base_query = select(PullRequestReviewers)\
565 base_query = select(PullRequestReviewers)\
566 .where(PullRequestReviewers.user_id == user_id)\
566 .where(PullRequestReviewers.user_id == user_id)\
567 .with_only_columns(PullRequestReviewers.pull_request_id)
567 .with_only_columns(PullRequestReviewers.pull_request_id)
568
568
569 user_filter = or_(
569 user_filter = or_(
570 PullRequest.user_id == user_id,
570 PullRequest.user_id == user_id,
571 PullRequest.pull_request_id.in_(base_query)
571 PullRequest.pull_request_id.in_(base_query)
572 )
572 )
573 q = PullRequest.query().filter(user_filter)
573 q = PullRequest.query().filter(user_filter)
574
574
575 # closed,opened
575 # closed,opened
576 if statuses:
576 if statuses:
577 q = q.filter(PullRequest.status.in_(statuses))
577 q = q.filter(PullRequest.status.in_(statuses))
578
578
579 if query:
579 if query:
580 like_expression = u'%{}%'.format(safe_str(query))
580 like_expression = u'%{}%'.format(safe_str(query))
581 q = q.join(User, User.user_id == PullRequest.user_id)
581 q = q.join(User, User.user_id == PullRequest.user_id)
582 q = q.filter(or_(
582 q = q.filter(or_(
583 cast(PullRequest.pull_request_id, String).ilike(like_expression),
583 cast(PullRequest.pull_request_id, String).ilike(like_expression),
584 User.username.ilike(like_expression),
584 User.username.ilike(like_expression),
585 PullRequest.title.ilike(like_expression),
585 PullRequest.title.ilike(like_expression),
586 PullRequest.description.ilike(like_expression),
586 PullRequest.description.ilike(like_expression),
587 ))
587 ))
588
588
589 order_map = {
589 order_map = {
590 'name_raw': PullRequest.pull_request_id,
590 'name_raw': PullRequest.pull_request_id,
591 'title': PullRequest.title,
591 'title': PullRequest.title,
592 'updated_on_raw': PullRequest.updated_on,
592 'updated_on_raw': PullRequest.updated_on,
593 'target_repo': PullRequest.target_repo_id
593 'target_repo': PullRequest.target_repo_id
594 }
594 }
595 if order_by and order_by in order_map:
595 if order_by and order_by in order_map:
596 if order_dir == 'asc':
596 if order_dir == 'asc':
597 q = q.order_by(order_map[order_by].asc())
597 q = q.order_by(order_map[order_by].asc())
598 else:
598 else:
599 q = q.order_by(order_map[order_by].desc())
599 q = q.order_by(order_map[order_by].desc())
600
600
601 return q
601 return q
602
602
603 def count_im_participating_in(self, user_id=None, statuses=None, query=''):
603 def count_im_participating_in(self, user_id=None, statuses=None, query=''):
604 q = self._prepare_im_participating_query(user_id, statuses=statuses, query=query)
604 q = self._prepare_im_participating_query(user_id, statuses=statuses, query=query)
605 return q.count()
605 return q.count()
606
606
607 def get_im_participating_in(
607 def get_im_participating_in(
608 self, user_id=None, statuses=None, query='', offset=0,
608 self, user_id=None, statuses=None, query='', offset=0,
609 length=None, order_by=None, order_dir='desc'):
609 length=None, order_by=None, order_dir='desc'):
610 """
610 """
611 Get all Pull requests that i'm participating in as a reviewer, or i have opened
611 Get all Pull requests that i'm participating in as a reviewer, or i have opened
612 """
612 """
613
613
614 q = self._prepare_im_participating_query(
614 q = self._prepare_im_participating_query(
615 user_id, statuses=statuses, query=query, order_by=order_by,
615 user_id, statuses=statuses, query=query, order_by=order_by,
616 order_dir=order_dir)
616 order_dir=order_dir)
617
617
618 if length:
618 if length:
619 pull_requests = q.limit(length).offset(offset).all()
619 pull_requests = q.limit(length).offset(offset).all()
620 else:
620 else:
621 pull_requests = q.all()
621 pull_requests = q.all()
622
622
623 return pull_requests
623 return pull_requests
624
624
625 def _prepare_participating_in_for_review_query(
625 def _prepare_participating_in_for_review_query(
626 self, user_id, statuses=None, query='', order_by=None, order_dir='desc'):
626 self, user_id, statuses=None, query='', order_by=None, order_dir='desc'):
627
627
628 for_review_statuses = [
628 for_review_statuses = [
629 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
629 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
630 ]
630 ]
631
631
632 pull_request_alias = aliased(PullRequest)
632 pull_request_alias = aliased(PullRequest)
633 status_alias = aliased(ChangesetStatus)
633 status_alias = aliased(ChangesetStatus)
634 reviewers_alias = aliased(PullRequestReviewers)
634 reviewers_alias = aliased(PullRequestReviewers)
635
635
636 last_ver_subq = Session()\
636 last_ver_subq = Session()\
637 .query(func.min(ChangesetStatus.version)) \
637 .query(func.min(ChangesetStatus.version)) \
638 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
638 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
639 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
639 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
640 .subquery()
640 .subquery()
641
641
642 q = Session().query(pull_request_alias) \
642 q = Session().query(pull_request_alias) \
643 .options(lazyload(pull_request_alias.author)) \
643 .options(lazyload(pull_request_alias.author)) \
644 .join(reviewers_alias,
644 .join(reviewers_alias,
645 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
645 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
646 .outerjoin(status_alias,
646 .outerjoin(status_alias,
647 and_(status_alias.user_id == reviewers_alias.user_id,
647 and_(status_alias.user_id == reviewers_alias.user_id,
648 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
648 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
649 .filter(or_(status_alias.version == null(),
649 .filter(or_(status_alias.version == null(),
650 status_alias.version == last_ver_subq)) \
650 status_alias.version == last_ver_subq)) \
651 .filter(reviewers_alias.user_id == user_id) \
651 .filter(reviewers_alias.user_id == user_id) \
652 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
652 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
653 .group_by(pull_request_alias)
653 .group_by(pull_request_alias)
654
654
655 # closed,opened
655 # closed,opened
656 if statuses:
656 if statuses:
657 q = q.filter(pull_request_alias.status.in_(statuses))
657 q = q.filter(pull_request_alias.status.in_(statuses))
658
658
659 if query:
659 if query:
660 like_expression = u'%{}%'.format(safe_str(query))
660 like_expression = u'%{}%'.format(safe_str(query))
661 q = q.join(User, User.user_id == pull_request_alias.user_id)
661 q = q.join(User, User.user_id == pull_request_alias.user_id)
662 q = q.filter(or_(
662 q = q.filter(or_(
663 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
663 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
664 User.username.ilike(like_expression),
664 User.username.ilike(like_expression),
665 pull_request_alias.title.ilike(like_expression),
665 pull_request_alias.title.ilike(like_expression),
666 pull_request_alias.description.ilike(like_expression),
666 pull_request_alias.description.ilike(like_expression),
667 ))
667 ))
668
668
669 order_map = {
669 order_map = {
670 'name_raw': pull_request_alias.pull_request_id,
670 'name_raw': pull_request_alias.pull_request_id,
671 'title': pull_request_alias.title,
671 'title': pull_request_alias.title,
672 'updated_on_raw': pull_request_alias.updated_on,
672 'updated_on_raw': pull_request_alias.updated_on,
673 'target_repo': pull_request_alias.target_repo_id
673 'target_repo': pull_request_alias.target_repo_id
674 }
674 }
675 if order_by and order_by in order_map:
675 if order_by and order_by in order_map:
676 if order_dir == 'asc':
676 if order_dir == 'asc':
677 q = q.order_by(order_map[order_by].asc())
677 q = q.order_by(order_map[order_by].asc())
678 else:
678 else:
679 q = q.order_by(order_map[order_by].desc())
679 q = q.order_by(order_map[order_by].desc())
680
680
681 return q
681 return q
682
682
683 def count_im_participating_in_for_review(self, user_id, statuses=None, query=''):
683 def count_im_participating_in_for_review(self, user_id, statuses=None, query=''):
684 q = self._prepare_participating_in_for_review_query(user_id, statuses=statuses, query=query)
684 q = self._prepare_participating_in_for_review_query(user_id, statuses=statuses, query=query)
685 return q.count()
685 return q.count()
686
686
687 def get_im_participating_in_for_review(
687 def get_im_participating_in_for_review(
688 self, user_id, statuses=None, query='', offset=0,
688 self, user_id, statuses=None, query='', offset=0,
689 length=None, order_by=None, order_dir='desc'):
689 length=None, order_by=None, order_dir='desc'):
690 """
690 """
691 Get all Pull requests that needs user approval or rejection
691 Get all Pull requests that needs user approval or rejection
692 """
692 """
693
693
694 q = self._prepare_participating_in_for_review_query(
694 q = self._prepare_participating_in_for_review_query(
695 user_id, statuses=statuses, query=query, order_by=order_by,
695 user_id, statuses=statuses, query=query, order_by=order_by,
696 order_dir=order_dir)
696 order_dir=order_dir)
697
697
698 if length:
698 if length:
699 pull_requests = q.limit(length).offset(offset).all()
699 pull_requests = q.limit(length).offset(offset).all()
700 else:
700 else:
701 pull_requests = q.all()
701 pull_requests = q.all()
702
702
703 return pull_requests
703 return pull_requests
704
704
705 def get_versions(self, pull_request):
705 def get_versions(self, pull_request):
706 """
706 """
707 returns version of pull request sorted by ID descending
707 returns version of pull request sorted by ID descending
708 """
708 """
709 return PullRequestVersion.query()\
709 return PullRequestVersion.query()\
710 .filter(PullRequestVersion.pull_request == pull_request)\
710 .filter(PullRequestVersion.pull_request == pull_request)\
711 .order_by(PullRequestVersion.pull_request_version_id.asc())\
711 .order_by(PullRequestVersion.pull_request_version_id.asc())\
712 .all()
712 .all()
713
713
714 def get_pr_version(self, pull_request_id, version=None):
714 def get_pr_version(self, pull_request_id, version=None):
715 at_version = None
715 at_version = None
716
716
717 if version and version == 'latest':
717 if version and version == 'latest':
718 pull_request_ver = PullRequest.get(pull_request_id)
718 pull_request_ver = PullRequest.get(pull_request_id)
719 pull_request_obj = pull_request_ver
719 pull_request_obj = pull_request_ver
720 _org_pull_request_obj = pull_request_obj
720 _org_pull_request_obj = pull_request_obj
721 at_version = 'latest'
721 at_version = 'latest'
722 elif version:
722 elif version:
723 pull_request_ver = PullRequestVersion.get_or_404(version)
723 pull_request_ver = PullRequestVersion.get_or_404(version)
724 pull_request_obj = pull_request_ver
724 pull_request_obj = pull_request_ver
725 _org_pull_request_obj = pull_request_ver.pull_request
725 _org_pull_request_obj = pull_request_ver.pull_request
726 at_version = pull_request_ver.pull_request_version_id
726 at_version = pull_request_ver.pull_request_version_id
727 else:
727 else:
728 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
728 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
729 pull_request_id)
729 pull_request_id)
730
730
731 pull_request_display_obj = PullRequest.get_pr_display_object(
731 pull_request_display_obj = PullRequest.get_pr_display_object(
732 pull_request_obj, _org_pull_request_obj)
732 pull_request_obj, _org_pull_request_obj)
733
733
734 return _org_pull_request_obj, pull_request_obj, \
734 return _org_pull_request_obj, pull_request_obj, \
735 pull_request_display_obj, at_version
735 pull_request_display_obj, at_version
736
736
737 def pr_commits_versions(self, versions):
737 def pr_commits_versions(self, versions):
738 """
738 """
739 Maps the pull-request commits into all known PR versions. This way we can obtain
739 Maps the pull-request commits into all known PR versions. This way we can obtain
740 each pr version the commit was introduced in.
740 each pr version the commit was introduced in.
741 """
741 """
742 commit_versions = collections.defaultdict(list)
742 commit_versions = collections.defaultdict(list)
743 num_versions = [x.pull_request_version_id for x in versions]
743 num_versions = [x.pull_request_version_id for x in versions]
744 for ver in versions:
744 for ver in versions:
745 for commit_id in ver.revisions:
745 for commit_id in ver.revisions:
746 ver_idx = ChangesetComment.get_index_from_version(
746 ver_idx = ChangesetComment.get_index_from_version(
747 ver.pull_request_version_id, num_versions=num_versions)
747 ver.pull_request_version_id, num_versions=num_versions)
748 commit_versions[commit_id].append(ver_idx)
748 commit_versions[commit_id].append(ver_idx)
749 return commit_versions
749 return commit_versions
750
750
751 def create(self, created_by, source_repo, source_ref, target_repo,
751 def create(self, created_by, source_repo, source_ref, target_repo,
752 target_ref, revisions, reviewers, observers, title, description=None,
752 target_ref, revisions, reviewers, observers, title, description=None,
753 common_ancestor_id=None,
753 common_ancestor_id=None,
754 description_renderer=None,
754 description_renderer=None,
755 reviewer_data=None, translator=None, auth_user=None):
755 reviewer_data=None, translator=None, auth_user=None):
756 translator = translator or get_current_request().translate
756 translator = translator or get_current_request().translate
757
757
758 created_by_user = self._get_user(created_by)
758 created_by_user = self._get_user(created_by)
759 auth_user = auth_user or created_by_user.AuthUser()
759 auth_user = auth_user or created_by_user.AuthUser()
760 source_repo = self._get_repo(source_repo)
760 source_repo = self._get_repo(source_repo)
761 target_repo = self._get_repo(target_repo)
761 target_repo = self._get_repo(target_repo)
762
762
763 pull_request = PullRequest()
763 pull_request = PullRequest()
764 pull_request.source_repo = source_repo
764 pull_request.source_repo = source_repo
765 pull_request.source_ref = source_ref
765 pull_request.source_ref = source_ref
766 pull_request.target_repo = target_repo
766 pull_request.target_repo = target_repo
767 pull_request.target_ref = target_ref
767 pull_request.target_ref = target_ref
768 pull_request.revisions = revisions
768 pull_request.revisions = revisions
769 pull_request.title = title
769 pull_request.title = title
770 pull_request.description = description
770 pull_request.description = description
771 pull_request.description_renderer = description_renderer
771 pull_request.description_renderer = description_renderer
772 pull_request.author = created_by_user
772 pull_request.author = created_by_user
773 pull_request.reviewer_data = reviewer_data
773 pull_request.reviewer_data = reviewer_data
774 pull_request.pull_request_state = pull_request.STATE_CREATING
774 pull_request.pull_request_state = pull_request.STATE_CREATING
775 pull_request.common_ancestor_id = common_ancestor_id
775 pull_request.common_ancestor_id = common_ancestor_id
776
776
777 Session().add(pull_request)
777 Session().add(pull_request)
778 Session().flush()
778 Session().flush()
779
779
780 reviewer_ids = set()
780 reviewer_ids = set()
781 # members / reviewers
781 # members / reviewers
782 for reviewer_object in reviewers:
782 for reviewer_object in reviewers:
783 user_id, reasons, mandatory, role, rules = reviewer_object
783 user_id, reasons, mandatory, role, rules = reviewer_object
784 user = self._get_user(user_id)
784 user = self._get_user(user_id)
785
785
786 # skip duplicates
786 # skip duplicates
787 if user.user_id in reviewer_ids:
787 if user.user_id in reviewer_ids:
788 continue
788 continue
789
789
790 reviewer_ids.add(user.user_id)
790 reviewer_ids.add(user.user_id)
791
791
792 reviewer = PullRequestReviewers()
792 reviewer = PullRequestReviewers()
793 reviewer.user = user
793 reviewer.user = user
794 reviewer.pull_request = pull_request
794 reviewer.pull_request = pull_request
795 reviewer.reasons = reasons
795 reviewer.reasons = reasons
796 reviewer.mandatory = mandatory
796 reviewer.mandatory = mandatory
797 reviewer.role = role
797 reviewer.role = role
798
798
799 # NOTE(marcink): pick only first rule for now
799 # NOTE(marcink): pick only first rule for now
800 rule_id = list(rules)[0] if rules else None
800 rule_id = list(rules)[0] if rules else None
801 rule = RepoReviewRule.get(rule_id) if rule_id else None
801 rule = RepoReviewRule.get(rule_id) if rule_id else None
802 if rule:
802 if rule:
803 review_group = rule.user_group_vote_rule(user_id)
803 review_group = rule.user_group_vote_rule(user_id)
804 # we check if this particular reviewer is member of a voting group
804 # we check if this particular reviewer is member of a voting group
805 if review_group:
805 if review_group:
806 # NOTE(marcink):
806 # NOTE(marcink):
807 # can be that user is member of more but we pick the first same,
807 # can be that user is member of more but we pick the first same,
808 # same as default reviewers algo
808 # same as default reviewers algo
809 review_group = review_group[0]
809 review_group = review_group[0]
810
810
811 rule_data = {
811 rule_data = {
812 'rule_name':
812 'rule_name':
813 rule.review_rule_name,
813 rule.review_rule_name,
814 'rule_user_group_entry_id':
814 'rule_user_group_entry_id':
815 review_group.repo_review_rule_users_group_id,
815 review_group.repo_review_rule_users_group_id,
816 'rule_user_group_name':
816 'rule_user_group_name':
817 review_group.users_group.users_group_name,
817 review_group.users_group.users_group_name,
818 'rule_user_group_members':
818 'rule_user_group_members':
819 [x.user.username for x in review_group.users_group.members],
819 [x.user.username for x in review_group.users_group.members],
820 'rule_user_group_members_id':
820 'rule_user_group_members_id':
821 [x.user.user_id for x in review_group.users_group.members],
821 [x.user.user_id for x in review_group.users_group.members],
822 }
822 }
823 # e.g {'vote_rule': -1, 'mandatory': True}
823 # e.g {'vote_rule': -1, 'mandatory': True}
824 rule_data.update(review_group.rule_data())
824 rule_data.update(review_group.rule_data())
825
825
826 reviewer.rule_data = rule_data
826 reviewer.rule_data = rule_data
827
827
828 Session().add(reviewer)
828 Session().add(reviewer)
829 Session().flush()
829 Session().flush()
830
830
831 for observer_object in observers:
831 for observer_object in observers:
832 user_id, reasons, mandatory, role, rules = observer_object
832 user_id, reasons, mandatory, role, rules = observer_object
833 user = self._get_user(user_id)
833 user = self._get_user(user_id)
834
834
835 # skip duplicates from reviewers
835 # skip duplicates from reviewers
836 if user.user_id in reviewer_ids:
836 if user.user_id in reviewer_ids:
837 continue
837 continue
838
838
839 #reviewer_ids.add(user.user_id)
839 #reviewer_ids.add(user.user_id)
840
840
841 observer = PullRequestReviewers()
841 observer = PullRequestReviewers()
842 observer.user = user
842 observer.user = user
843 observer.pull_request = pull_request
843 observer.pull_request = pull_request
844 observer.reasons = reasons
844 observer.reasons = reasons
845 observer.mandatory = mandatory
845 observer.mandatory = mandatory
846 observer.role = role
846 observer.role = role
847
847
848 # NOTE(marcink): pick only first rule for now
848 # NOTE(marcink): pick only first rule for now
849 rule_id = list(rules)[0] if rules else None
849 rule_id = list(rules)[0] if rules else None
850 rule = RepoReviewRule.get(rule_id) if rule_id else None
850 rule = RepoReviewRule.get(rule_id) if rule_id else None
851 if rule:
851 if rule:
852 # TODO(marcink): do we need this for observers ??
852 # TODO(marcink): do we need this for observers ??
853 pass
853 pass
854
854
855 Session().add(observer)
855 Session().add(observer)
856 Session().flush()
856 Session().flush()
857
857
858 # Set approval status to "Under Review" for all commits which are
858 # Set approval status to "Under Review" for all commits which are
859 # part of this pull request.
859 # part of this pull request.
860 ChangesetStatusModel().set_status(
860 ChangesetStatusModel().set_status(
861 repo=target_repo,
861 repo=target_repo,
862 status=ChangesetStatus.STATUS_UNDER_REVIEW,
862 status=ChangesetStatus.STATUS_UNDER_REVIEW,
863 user=created_by_user,
863 user=created_by_user,
864 pull_request=pull_request
864 pull_request=pull_request
865 )
865 )
866 # we commit early at this point. This has to do with a fact
866 # we commit early at this point. This has to do with a fact
867 # that before queries do some row-locking. And because of that
867 # that before queries do some row-locking. And because of that
868 # we need to commit and finish transaction before below validate call
868 # we need to commit and finish transaction before below validate call
869 # that for large repos could be long resulting in long row locks
869 # that for large repos could be long resulting in long row locks
870 Session().commit()
870 Session().commit()
871
871
872 # prepare workspace, and run initial merge simulation. Set state during that
872 # prepare workspace, and run initial merge simulation. Set state during that
873 # operation
873 # operation
874 pull_request = PullRequest.get(pull_request.pull_request_id)
874 pull_request = PullRequest.get(pull_request.pull_request_id)
875
875
876 # set as merging, for merge simulation, and if finished to created so we mark
876 # set as merging, for merge simulation, and if finished to created so we mark
877 # simulation is working fine
877 # simulation is working fine
878 with pull_request.set_state(PullRequest.STATE_MERGING,
878 with pull_request.set_state(PullRequest.STATE_MERGING,
879 final_state=PullRequest.STATE_CREATED) as state_obj:
879 final_state=PullRequest.STATE_CREATED) as state_obj:
880 MergeCheck.validate(
880 MergeCheck.validate(
881 pull_request, auth_user=auth_user, translator=translator)
881 pull_request, auth_user=auth_user, translator=translator)
882
882
883 self.notify_reviewers(pull_request, reviewer_ids, created_by_user)
883 self.notify_reviewers(pull_request, reviewer_ids, created_by_user)
884 self.trigger_pull_request_hook(pull_request, created_by_user, 'create')
884 self.trigger_pull_request_hook(pull_request, created_by_user, 'create')
885
885
886 creation_data = pull_request.get_api_data(with_merge_state=False)
886 creation_data = pull_request.get_api_data(with_merge_state=False)
887 self._log_audit_action(
887 self._log_audit_action(
888 'repo.pull_request.create', {'data': creation_data},
888 'repo.pull_request.create', {'data': creation_data},
889 auth_user, pull_request)
889 auth_user, pull_request)
890
890
891 return pull_request
891 return pull_request
892
892
893 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
893 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
894 pull_request = self.__get_pull_request(pull_request)
894 pull_request = self.__get_pull_request(pull_request)
895 target_scm = pull_request.target_repo.scm_instance()
895 target_scm = pull_request.target_repo.scm_instance()
896 if action == 'create':
896 if action == 'create':
897 trigger_hook = hooks_utils.trigger_create_pull_request_hook
897 trigger_hook = hooks_utils.trigger_create_pull_request_hook
898 elif action == 'merge':
898 elif action == 'merge':
899 trigger_hook = hooks_utils.trigger_merge_pull_request_hook
899 trigger_hook = hooks_utils.trigger_merge_pull_request_hook
900 elif action == 'close':
900 elif action == 'close':
901 trigger_hook = hooks_utils.trigger_close_pull_request_hook
901 trigger_hook = hooks_utils.trigger_close_pull_request_hook
902 elif action == 'review_status_change':
902 elif action == 'review_status_change':
903 trigger_hook = hooks_utils.trigger_review_pull_request_hook
903 trigger_hook = hooks_utils.trigger_review_pull_request_hook
904 elif action == 'update':
904 elif action == 'update':
905 trigger_hook = hooks_utils.trigger_update_pull_request_hook
905 trigger_hook = hooks_utils.trigger_update_pull_request_hook
906 elif action == 'comment':
906 elif action == 'comment':
907 trigger_hook = hooks_utils.trigger_comment_pull_request_hook
907 trigger_hook = hooks_utils.trigger_comment_pull_request_hook
908 elif action == 'comment_edit':
908 elif action == 'comment_edit':
909 trigger_hook = hooks_utils.trigger_comment_pull_request_edit_hook
909 trigger_hook = hooks_utils.trigger_comment_pull_request_edit_hook
910 else:
910 else:
911 return
911 return
912
912
913 log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s',
913 log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s',
914 pull_request, action, trigger_hook)
914 pull_request, action, trigger_hook)
915 trigger_hook(
915 trigger_hook(
916 username=user.username,
916 username=user.username,
917 repo_name=pull_request.target_repo.repo_name,
917 repo_name=pull_request.target_repo.repo_name,
918 repo_type=target_scm.alias,
918 repo_type=target_scm.alias,
919 pull_request=pull_request,
919 pull_request=pull_request,
920 data=data)
920 data=data)
921
921
922 def _get_commit_ids(self, pull_request):
922 def _get_commit_ids(self, pull_request):
923 """
923 """
924 Return the commit ids of the merged pull request.
924 Return the commit ids of the merged pull request.
925
925
926 This method is not dealing correctly yet with the lack of autoupdates
926 This method is not dealing correctly yet with the lack of autoupdates
927 nor with the implicit target updates.
927 nor with the implicit target updates.
928 For example: if a commit in the source repo is already in the target it
928 For example: if a commit in the source repo is already in the target it
929 will be reported anyways.
929 will be reported anyways.
930 """
930 """
931 merge_rev = pull_request.merge_rev
931 merge_rev = pull_request.merge_rev
932 if merge_rev is None:
932 if merge_rev is None:
933 raise ValueError('This pull request was not merged yet')
933 raise ValueError('This pull request was not merged yet')
934
934
935 commit_ids = list(pull_request.revisions)
935 commit_ids = list(pull_request.revisions)
936 if merge_rev not in commit_ids:
936 if merge_rev not in commit_ids:
937 commit_ids.append(merge_rev)
937 commit_ids.append(merge_rev)
938
938
939 return commit_ids
939 return commit_ids
940
940
941 def merge_repo(self, pull_request, user, extras):
941 def merge_repo(self, pull_request, user, extras):
942 repo_type = pull_request.source_repo.repo_type
942 repo_type = pull_request.source_repo.repo_type
943 log.debug("Merging pull request %s", pull_request)
943 log.debug("Merging pull request %s", pull_request)
944
944
945 extras['user_agent'] = '{}/internal-merge'.format(repo_type)
945 extras['user_agent'] = '{}/internal-merge'.format(repo_type)
946 merge_state = self._merge_pull_request(pull_request, user, extras)
946 merge_state = self._merge_pull_request(pull_request, user, extras)
947 if merge_state.executed:
947 if merge_state.executed:
948 log.debug("Merge was successful, updating the pull request comments.")
948 log.debug("Merge was successful, updating the pull request comments.")
949 self._comment_and_close_pr(pull_request, user, merge_state)
949 self._comment_and_close_pr(pull_request, user, merge_state)
950
950
951 self._log_audit_action(
951 self._log_audit_action(
952 'repo.pull_request.merge',
952 'repo.pull_request.merge',
953 {'merge_state': merge_state.__dict__},
953 {'merge_state': merge_state.__dict__},
954 user, pull_request)
954 user, pull_request)
955
955
956 else:
956 else:
957 log.warning("Merge failed, not updating the pull request.")
957 log.warning("Merge failed, not updating the pull request.")
958 return merge_state
958 return merge_state
959
959
960 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
960 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
961 target_vcs = pull_request.target_repo.scm_instance()
961 target_vcs = pull_request.target_repo.scm_instance()
962 source_vcs = pull_request.source_repo.scm_instance()
962 source_vcs = pull_request.source_repo.scm_instance()
963
963
964 message = safe_str(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
964 message = safe_str(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
965 pr_id=pull_request.pull_request_id,
965 pr_id=pull_request.pull_request_id,
966 pr_title=pull_request.title,
966 pr_title=pull_request.title,
967 pr_desc=pull_request.description,
967 pr_desc=pull_request.description,
968 source_repo=source_vcs.name,
968 source_repo=source_vcs.name,
969 source_ref_name=pull_request.source_ref_parts.name,
969 source_ref_name=pull_request.source_ref_parts.name,
970 target_repo=target_vcs.name,
970 target_repo=target_vcs.name,
971 target_ref_name=pull_request.target_ref_parts.name,
971 target_ref_name=pull_request.target_ref_parts.name,
972 )
972 )
973
973
974 workspace_id = self._workspace_id(pull_request)
974 workspace_id = self._workspace_id(pull_request)
975 repo_id = pull_request.target_repo.repo_id
975 repo_id = pull_request.target_repo.repo_id
976 use_rebase = self._use_rebase_for_merging(pull_request)
976 use_rebase = self._use_rebase_for_merging(pull_request)
977 close_branch = self._close_branch_before_merging(pull_request)
977 close_branch = self._close_branch_before_merging(pull_request)
978 user_name = self._user_name_for_merging(pull_request, user)
978 user_name = self._user_name_for_merging(pull_request, user)
979
979
980 target_ref = self._refresh_reference(
980 target_ref = self._refresh_reference(
981 pull_request.target_ref_parts, target_vcs)
981 pull_request.target_ref_parts, target_vcs)
982
982
983 callback_daemon, extras = prepare_callback_daemon(
983 callback_daemon, extras = prepare_callback_daemon(
984 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
984 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
985 host=vcs_settings.HOOKS_HOST,
985 host=vcs_settings.HOOKS_HOST,
986 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
986 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
987
987
988 with callback_daemon:
988 with callback_daemon:
989 # TODO: johbo: Implement a clean way to run a config_override
989 # TODO: johbo: Implement a clean way to run a config_override
990 # for a single call.
990 # for a single call.
991 target_vcs.config.set(
991 target_vcs.config.set(
992 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
992 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
993
993
994 merge_state = target_vcs.merge(
994 merge_state = target_vcs.merge(
995 repo_id, workspace_id, target_ref, source_vcs,
995 repo_id, workspace_id, target_ref, source_vcs,
996 pull_request.source_ref_parts,
996 pull_request.source_ref_parts,
997 user_name=user_name, user_email=user.email,
997 user_name=user_name, user_email=user.email,
998 message=message, use_rebase=use_rebase,
998 message=message, use_rebase=use_rebase,
999 close_branch=close_branch)
999 close_branch=close_branch)
1000
1000
1001 return merge_state
1001 return merge_state
1002
1002
1003 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
1003 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
1004 pull_request.merge_rev = merge_state.merge_ref.commit_id
1004 pull_request.merge_rev = merge_state.merge_ref.commit_id
1005 pull_request.updated_on = datetime.datetime.now()
1005 pull_request.updated_on = datetime.datetime.now()
1006 close_msg = close_msg or 'Pull request merged and closed'
1006 close_msg = close_msg or 'Pull request merged and closed'
1007
1007
1008 CommentsModel().create(
1008 CommentsModel().create(
1009 text=safe_str(close_msg),
1009 text=safe_str(close_msg),
1010 repo=pull_request.target_repo.repo_id,
1010 repo=pull_request.target_repo.repo_id,
1011 user=user.user_id,
1011 user=user.user_id,
1012 pull_request=pull_request.pull_request_id,
1012 pull_request=pull_request.pull_request_id,
1013 f_path=None,
1013 f_path=None,
1014 line_no=None,
1014 line_no=None,
1015 closing_pr=True
1015 closing_pr=True
1016 )
1016 )
1017
1017
1018 Session().add(pull_request)
1018 Session().add(pull_request)
1019 Session().flush()
1019 Session().flush()
1020 # TODO: paris: replace invalidation with less radical solution
1020 # TODO: paris: replace invalidation with less radical solution
1021 ScmModel().mark_for_invalidation(
1021 ScmModel().mark_for_invalidation(
1022 pull_request.target_repo.repo_name)
1022 pull_request.target_repo.repo_name)
1023 self.trigger_pull_request_hook(pull_request, user, 'merge')
1023 self.trigger_pull_request_hook(pull_request, user, 'merge')
1024
1024
1025 def has_valid_update_type(self, pull_request):
1025 def has_valid_update_type(self, pull_request):
1026 source_ref_type = pull_request.source_ref_parts.type
1026 source_ref_type = pull_request.source_ref_parts.type
1027 return source_ref_type in self.REF_TYPES
1027 return source_ref_type in self.REF_TYPES
1028
1028
1029 def get_flow_commits(self, pull_request):
1029 def get_flow_commits(self, pull_request):
1030
1030
1031 # source repo
1031 # source repo
1032 source_ref_name = pull_request.source_ref_parts.name
1032 source_ref_name = pull_request.source_ref_parts.name
1033 source_ref_type = pull_request.source_ref_parts.type
1033 source_ref_type = pull_request.source_ref_parts.type
1034 source_ref_id = pull_request.source_ref_parts.commit_id
1034 source_ref_id = pull_request.source_ref_parts.commit_id
1035 source_repo = pull_request.source_repo.scm_instance()
1035 source_repo = pull_request.source_repo.scm_instance()
1036
1036
1037 try:
1037 try:
1038 if source_ref_type in self.REF_TYPES:
1038 if source_ref_type in self.REF_TYPES:
1039 source_commit = source_repo.get_commit(
1039 source_commit = source_repo.get_commit(
1040 source_ref_name, reference_obj=pull_request.source_ref_parts)
1040 source_ref_name, reference_obj=pull_request.source_ref_parts)
1041 else:
1041 else:
1042 source_commit = source_repo.get_commit(source_ref_id)
1042 source_commit = source_repo.get_commit(source_ref_id)
1043 except CommitDoesNotExistError:
1043 except CommitDoesNotExistError:
1044 raise SourceRefMissing()
1044 raise SourceRefMissing()
1045
1045
1046 # target repo
1046 # target repo
1047 target_ref_name = pull_request.target_ref_parts.name
1047 target_ref_name = pull_request.target_ref_parts.name
1048 target_ref_type = pull_request.target_ref_parts.type
1048 target_ref_type = pull_request.target_ref_parts.type
1049 target_ref_id = pull_request.target_ref_parts.commit_id
1049 target_ref_id = pull_request.target_ref_parts.commit_id
1050 target_repo = pull_request.target_repo.scm_instance()
1050 target_repo = pull_request.target_repo.scm_instance()
1051
1051
1052 try:
1052 try:
1053 if target_ref_type in self.REF_TYPES:
1053 if target_ref_type in self.REF_TYPES:
1054 target_commit = target_repo.get_commit(
1054 target_commit = target_repo.get_commit(
1055 target_ref_name, reference_obj=pull_request.target_ref_parts)
1055 target_ref_name, reference_obj=pull_request.target_ref_parts)
1056 else:
1056 else:
1057 target_commit = target_repo.get_commit(target_ref_id)
1057 target_commit = target_repo.get_commit(target_ref_id)
1058 except CommitDoesNotExistError:
1058 except CommitDoesNotExistError:
1059 raise TargetRefMissing()
1059 raise TargetRefMissing()
1060
1060
1061 return source_commit, target_commit
1061 return source_commit, target_commit
1062
1062
1063 def update_commits(self, pull_request, updating_user):
1063 def update_commits(self, pull_request, updating_user):
1064 """
1064 """
1065 Get the updated list of commits for the pull request
1065 Get the updated list of commits for the pull request
1066 and return the new pull request version and the list
1066 and return the new pull request version and the list
1067 of commits processed by this update action
1067 of commits processed by this update action
1068
1068
1069 updating_user is the user_object who triggered the update
1069 updating_user is the user_object who triggered the update
1070 """
1070 """
1071 pull_request = self.__get_pull_request(pull_request)
1071 pull_request = self.__get_pull_request(pull_request)
1072 source_ref_type = pull_request.source_ref_parts.type
1072 source_ref_type = pull_request.source_ref_parts.type
1073 source_ref_name = pull_request.source_ref_parts.name
1073 source_ref_name = pull_request.source_ref_parts.name
1074 source_ref_id = pull_request.source_ref_parts.commit_id
1074 source_ref_id = pull_request.source_ref_parts.commit_id
1075
1075
1076 target_ref_type = pull_request.target_ref_parts.type
1076 target_ref_type = pull_request.target_ref_parts.type
1077 target_ref_name = pull_request.target_ref_parts.name
1077 target_ref_name = pull_request.target_ref_parts.name
1078 target_ref_id = pull_request.target_ref_parts.commit_id
1078 target_ref_id = pull_request.target_ref_parts.commit_id
1079
1079
1080 if not self.has_valid_update_type(pull_request):
1080 if not self.has_valid_update_type(pull_request):
1081 log.debug("Skipping update of pull request %s due to ref type: %s",
1081 log.debug("Skipping update of pull request %s due to ref type: %s",
1082 pull_request, source_ref_type)
1082 pull_request, source_ref_type)
1083 return UpdateResponse(
1083 return UpdateResponse(
1084 executed=False,
1084 executed=False,
1085 reason=UpdateFailureReason.WRONG_REF_TYPE,
1085 reason=UpdateFailureReason.WRONG_REF_TYPE,
1086 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1086 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1087 source_changed=False, target_changed=False)
1087 source_changed=False, target_changed=False)
1088
1088
1089 try:
1089 try:
1090 source_commit, target_commit = self.get_flow_commits(pull_request)
1090 source_commit, target_commit = self.get_flow_commits(pull_request)
1091 except SourceRefMissing:
1091 except SourceRefMissing:
1092 return UpdateResponse(
1092 return UpdateResponse(
1093 executed=False,
1093 executed=False,
1094 reason=UpdateFailureReason.MISSING_SOURCE_REF,
1094 reason=UpdateFailureReason.MISSING_SOURCE_REF,
1095 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1095 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1096 source_changed=False, target_changed=False)
1096 source_changed=False, target_changed=False)
1097 except TargetRefMissing:
1097 except TargetRefMissing:
1098 return UpdateResponse(
1098 return UpdateResponse(
1099 executed=False,
1099 executed=False,
1100 reason=UpdateFailureReason.MISSING_TARGET_REF,
1100 reason=UpdateFailureReason.MISSING_TARGET_REF,
1101 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1101 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1102 source_changed=False, target_changed=False)
1102 source_changed=False, target_changed=False)
1103
1103
1104 source_changed = source_ref_id != source_commit.raw_id
1104 source_changed = source_ref_id != source_commit.raw_id
1105 target_changed = target_ref_id != target_commit.raw_id
1105 target_changed = target_ref_id != target_commit.raw_id
1106
1106
1107 if not (source_changed or target_changed):
1107 if not (source_changed or target_changed):
1108 log.debug("Nothing changed in pull request %s", pull_request)
1108 log.debug("Nothing changed in pull request %s", pull_request)
1109 return UpdateResponse(
1109 return UpdateResponse(
1110 executed=False,
1110 executed=False,
1111 reason=UpdateFailureReason.NO_CHANGE,
1111 reason=UpdateFailureReason.NO_CHANGE,
1112 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1112 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1113 source_changed=target_changed, target_changed=source_changed)
1113 source_changed=target_changed, target_changed=source_changed)
1114
1114
1115 change_in_found = 'target repo' if target_changed else 'source repo'
1115 change_in_found = 'target repo' if target_changed else 'source repo'
1116 log.debug('Updating pull request because of change in %s detected',
1116 log.debug('Updating pull request because of change in %s detected',
1117 change_in_found)
1117 change_in_found)
1118
1118
1119 # Finally there is a need for an update, in case of source change
1119 # Finally there is a need for an update, in case of source change
1120 # we create a new version, else just an update
1120 # we create a new version, else just an update
1121 if source_changed:
1121 if source_changed:
1122 pull_request_version = self._create_version_from_snapshot(pull_request)
1122 pull_request_version = self._create_version_from_snapshot(pull_request)
1123 self._link_comments_to_version(pull_request_version)
1123 self._link_comments_to_version(pull_request_version)
1124 else:
1124 else:
1125 try:
1125 try:
1126 ver = pull_request.versions[-1]
1126 ver = pull_request.versions[-1]
1127 except IndexError:
1127 except IndexError:
1128 ver = None
1128 ver = None
1129
1129
1130 pull_request.pull_request_version_id = \
1130 pull_request.pull_request_version_id = \
1131 ver.pull_request_version_id if ver else None
1131 ver.pull_request_version_id if ver else None
1132 pull_request_version = pull_request
1132 pull_request_version = pull_request
1133
1133
1134 source_repo = pull_request.source_repo.scm_instance()
1134 source_repo = pull_request.source_repo.scm_instance()
1135 target_repo = pull_request.target_repo.scm_instance()
1135 target_repo = pull_request.target_repo.scm_instance()
1136
1136
1137 # re-compute commit ids
1137 # re-compute commit ids
1138 old_commit_ids = pull_request.revisions
1138 old_commit_ids = pull_request.revisions
1139 pre_load = ["author", "date", "message", "branch"]
1139 pre_load = ["author", "date", "message", "branch"]
1140 commit_ranges = target_repo.compare(
1140 commit_ranges = target_repo.compare(
1141 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
1141 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
1142 pre_load=pre_load)
1142 pre_load=pre_load)
1143
1143
1144 target_ref = target_commit.raw_id
1144 target_ref = target_commit.raw_id
1145 source_ref = source_commit.raw_id
1145 source_ref = source_commit.raw_id
1146 ancestor_commit_id = target_repo.get_common_ancestor(
1146 ancestor_commit_id = target_repo.get_common_ancestor(
1147 target_ref, source_ref, source_repo)
1147 target_ref, source_ref, source_repo)
1148
1148
1149 if not ancestor_commit_id:
1149 if not ancestor_commit_id:
1150 raise ValueError(
1150 raise ValueError(
1151 'cannot calculate diff info without a common ancestor. '
1151 'cannot calculate diff info without a common ancestor. '
1152 'Make sure both repositories are related, and have a common forking commit.')
1152 'Make sure both repositories are related, and have a common forking commit.')
1153
1153
1154 pull_request.common_ancestor_id = ancestor_commit_id
1154 pull_request.common_ancestor_id = ancestor_commit_id
1155
1155
1156 pull_request.source_ref = '%s:%s:%s' % (
1156 pull_request.source_ref = '%s:%s:%s' % (
1157 source_ref_type, source_ref_name, source_commit.raw_id)
1157 source_ref_type, source_ref_name, source_commit.raw_id)
1158 pull_request.target_ref = '%s:%s:%s' % (
1158 pull_request.target_ref = '%s:%s:%s' % (
1159 target_ref_type, target_ref_name, ancestor_commit_id)
1159 target_ref_type, target_ref_name, ancestor_commit_id)
1160
1160
1161 pull_request.revisions = [
1161 pull_request.revisions = [
1162 commit.raw_id for commit in reversed(commit_ranges)]
1162 commit.raw_id for commit in reversed(commit_ranges)]
1163 pull_request.updated_on = datetime.datetime.now()
1163 pull_request.updated_on = datetime.datetime.now()
1164 Session().add(pull_request)
1164 Session().add(pull_request)
1165 new_commit_ids = pull_request.revisions
1165 new_commit_ids = pull_request.revisions
1166
1166
1167 old_diff_data, new_diff_data = self._generate_update_diffs(
1167 old_diff_data, new_diff_data = self._generate_update_diffs(
1168 pull_request, pull_request_version)
1168 pull_request, pull_request_version)
1169
1169
1170 # calculate commit and file changes
1170 # calculate commit and file changes
1171 commit_changes = self._calculate_commit_id_changes(
1171 commit_changes = self._calculate_commit_id_changes(
1172 old_commit_ids, new_commit_ids)
1172 old_commit_ids, new_commit_ids)
1173 file_changes = self._calculate_file_changes(
1173 file_changes = self._calculate_file_changes(
1174 old_diff_data, new_diff_data)
1174 old_diff_data, new_diff_data)
1175
1175
1176 # set comments as outdated if DIFFS changed
1176 # set comments as outdated if DIFFS changed
1177 CommentsModel().outdate_comments(
1177 CommentsModel().outdate_comments(
1178 pull_request, old_diff_data=old_diff_data,
1178 pull_request, old_diff_data=old_diff_data,
1179 new_diff_data=new_diff_data)
1179 new_diff_data=new_diff_data)
1180
1180
1181 valid_commit_changes = (commit_changes.added or commit_changes.removed)
1181 valid_commit_changes = (commit_changes.added or commit_changes.removed)
1182 file_node_changes = (
1182 file_node_changes = (
1183 file_changes.added or file_changes.modified or file_changes.removed)
1183 file_changes.added or file_changes.modified or file_changes.removed)
1184 pr_has_changes = valid_commit_changes or file_node_changes
1184 pr_has_changes = valid_commit_changes or file_node_changes
1185
1185
1186 # Add an automatic comment to the pull request, in case
1186 # Add an automatic comment to the pull request, in case
1187 # anything has changed
1187 # anything has changed
1188 if pr_has_changes:
1188 if pr_has_changes:
1189 update_comment = CommentsModel().create(
1189 update_comment = CommentsModel().create(
1190 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
1190 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
1191 repo=pull_request.target_repo,
1191 repo=pull_request.target_repo,
1192 user=pull_request.author,
1192 user=pull_request.author,
1193 pull_request=pull_request,
1193 pull_request=pull_request,
1194 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
1194 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
1195
1195
1196 # Update status to "Under Review" for added commits
1196 # Update status to "Under Review" for added commits
1197 for commit_id in commit_changes.added:
1197 for commit_id in commit_changes.added:
1198 ChangesetStatusModel().set_status(
1198 ChangesetStatusModel().set_status(
1199 repo=pull_request.source_repo,
1199 repo=pull_request.source_repo,
1200 status=ChangesetStatus.STATUS_UNDER_REVIEW,
1200 status=ChangesetStatus.STATUS_UNDER_REVIEW,
1201 comment=update_comment,
1201 comment=update_comment,
1202 user=pull_request.author,
1202 user=pull_request.author,
1203 pull_request=pull_request,
1203 pull_request=pull_request,
1204 revision=commit_id)
1204 revision=commit_id)
1205
1205
1206 # initial commit
1206 # initial commit
1207 Session().commit()
1207 Session().commit()
1208
1208
1209 if pr_has_changes:
1209 if pr_has_changes:
1210 # send update email to users
1210 # send update email to users
1211 try:
1211 try:
1212 self.notify_users(pull_request=pull_request, updating_user=updating_user,
1212 self.notify_users(pull_request=pull_request, updating_user=updating_user,
1213 ancestor_commit_id=ancestor_commit_id,
1213 ancestor_commit_id=ancestor_commit_id,
1214 commit_changes=commit_changes,
1214 commit_changes=commit_changes,
1215 file_changes=file_changes)
1215 file_changes=file_changes)
1216 Session().commit()
1216 Session().commit()
1217 except Exception:
1217 except Exception:
1218 log.exception('Failed to send email notification to users')
1218 log.exception('Failed to send email notification to users')
1219 Session().rollback()
1219 Session().rollback()
1220
1220
1221 log.debug(
1221 log.debug(
1222 'Updated pull request %s, added_ids: %s, common_ids: %s, '
1222 'Updated pull request %s, added_ids: %s, common_ids: %s, '
1223 'removed_ids: %s', pull_request.pull_request_id,
1223 'removed_ids: %s', pull_request.pull_request_id,
1224 commit_changes.added, commit_changes.common, commit_changes.removed)
1224 commit_changes.added, commit_changes.common, commit_changes.removed)
1225 log.debug(
1225 log.debug(
1226 'Updated pull request with the following file changes: %s',
1226 'Updated pull request with the following file changes: %s',
1227 file_changes)
1227 file_changes)
1228
1228
1229 log.info(
1229 log.info(
1230 "Updated pull request %s from commit %s to commit %s, "
1230 "Updated pull request %s from commit %s to commit %s, "
1231 "stored new version %s of this pull request.",
1231 "stored new version %s of this pull request.",
1232 pull_request.pull_request_id, source_ref_id,
1232 pull_request.pull_request_id, source_ref_id,
1233 pull_request.source_ref_parts.commit_id,
1233 pull_request.source_ref_parts.commit_id,
1234 pull_request_version.pull_request_version_id)
1234 pull_request_version.pull_request_version_id)
1235
1235
1236 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
1236 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
1237
1237
1238 return UpdateResponse(
1238 return UpdateResponse(
1239 executed=True, reason=UpdateFailureReason.NONE,
1239 executed=True, reason=UpdateFailureReason.NONE,
1240 old=pull_request, new=pull_request_version,
1240 old=pull_request, new=pull_request_version,
1241 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
1241 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
1242 source_changed=source_changed, target_changed=target_changed)
1242 source_changed=source_changed, target_changed=target_changed)
1243
1243
1244 def _create_version_from_snapshot(self, pull_request):
1244 def _create_version_from_snapshot(self, pull_request):
1245 version = PullRequestVersion()
1245 version = PullRequestVersion()
1246 version.title = pull_request.title
1246 version.title = pull_request.title
1247 version.description = pull_request.description
1247 version.description = pull_request.description
1248 version.status = pull_request.status
1248 version.status = pull_request.status
1249 version.pull_request_state = pull_request.pull_request_state
1249 version.pull_request_state = pull_request.pull_request_state
1250 version.created_on = datetime.datetime.now()
1250 version.created_on = datetime.datetime.now()
1251 version.updated_on = pull_request.updated_on
1251 version.updated_on = pull_request.updated_on
1252 version.user_id = pull_request.user_id
1252 version.user_id = pull_request.user_id
1253 version.source_repo = pull_request.source_repo
1253 version.source_repo = pull_request.source_repo
1254 version.source_ref = pull_request.source_ref
1254 version.source_ref = pull_request.source_ref
1255 version.target_repo = pull_request.target_repo
1255 version.target_repo = pull_request.target_repo
1256 version.target_ref = pull_request.target_ref
1256 version.target_ref = pull_request.target_ref
1257
1257
1258 version._last_merge_source_rev = pull_request._last_merge_source_rev
1258 version._last_merge_source_rev = pull_request._last_merge_source_rev
1259 version._last_merge_target_rev = pull_request._last_merge_target_rev
1259 version._last_merge_target_rev = pull_request._last_merge_target_rev
1260 version.last_merge_status = pull_request.last_merge_status
1260 version.last_merge_status = pull_request.last_merge_status
1261 version.last_merge_metadata = pull_request.last_merge_metadata
1261 version.last_merge_metadata = pull_request.last_merge_metadata
1262 version.shadow_merge_ref = pull_request.shadow_merge_ref
1262 version.shadow_merge_ref = pull_request.shadow_merge_ref
1263 version.merge_rev = pull_request.merge_rev
1263 version.merge_rev = pull_request.merge_rev
1264 version.reviewer_data = pull_request.reviewer_data
1264 version.reviewer_data = pull_request.reviewer_data
1265
1265
1266 version.revisions = pull_request.revisions
1266 version.revisions = pull_request.revisions
1267 version.common_ancestor_id = pull_request.common_ancestor_id
1267 version.common_ancestor_id = pull_request.common_ancestor_id
1268 version.pull_request = pull_request
1268 version.pull_request = pull_request
1269 Session().add(version)
1269 Session().add(version)
1270 Session().flush()
1270 Session().flush()
1271
1271
1272 return version
1272 return version
1273
1273
1274 def _generate_update_diffs(self, pull_request, pull_request_version):
1274 def _generate_update_diffs(self, pull_request, pull_request_version):
1275
1275
1276 diff_context = (
1276 diff_context = (
1277 self.DIFF_CONTEXT +
1277 self.DIFF_CONTEXT +
1278 CommentsModel.needed_extra_diff_context())
1278 CommentsModel.needed_extra_diff_context())
1279 hide_whitespace_changes = False
1279 hide_whitespace_changes = False
1280 source_repo = pull_request_version.source_repo
1280 source_repo = pull_request_version.source_repo
1281 source_ref_id = pull_request_version.source_ref_parts.commit_id
1281 source_ref_id = pull_request_version.source_ref_parts.commit_id
1282 target_ref_id = pull_request_version.target_ref_parts.commit_id
1282 target_ref_id = pull_request_version.target_ref_parts.commit_id
1283 old_diff = self._get_diff_from_pr_or_version(
1283 old_diff = self._get_diff_from_pr_or_version(
1284 source_repo, source_ref_id, target_ref_id,
1284 source_repo, source_ref_id, target_ref_id,
1285 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1285 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1286
1286
1287 source_repo = pull_request.source_repo
1287 source_repo = pull_request.source_repo
1288 source_ref_id = pull_request.source_ref_parts.commit_id
1288 source_ref_id = pull_request.source_ref_parts.commit_id
1289 target_ref_id = pull_request.target_ref_parts.commit_id
1289 target_ref_id = pull_request.target_ref_parts.commit_id
1290
1290
1291 new_diff = self._get_diff_from_pr_or_version(
1291 new_diff = self._get_diff_from_pr_or_version(
1292 source_repo, source_ref_id, target_ref_id,
1292 source_repo, source_ref_id, target_ref_id,
1293 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1293 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1294
1294
1295 # NOTE: this was using diff_format='gitdiff'
1295 # NOTE: this was using diff_format='gitdiff'
1296 old_diff_data = diffs.DiffProcessor(old_diff, diff_format='newdiff')
1296 old_diff_data = diffs.DiffProcessor(old_diff, diff_format='newdiff')
1297 old_diff_data.prepare()
1297 old_diff_data.prepare()
1298 new_diff_data = diffs.DiffProcessor(new_diff, diff_format='newdiff')
1298 new_diff_data = diffs.DiffProcessor(new_diff, diff_format='newdiff')
1299 new_diff_data.prepare()
1299 new_diff_data.prepare()
1300
1300
1301 return old_diff_data, new_diff_data
1301 return old_diff_data, new_diff_data
1302
1302
1303 def _link_comments_to_version(self, pull_request_version):
1303 def _link_comments_to_version(self, pull_request_version):
1304 """
1304 """
1305 Link all unlinked comments of this pull request to the given version.
1305 Link all unlinked comments of this pull request to the given version.
1306
1306
1307 :param pull_request_version: The `PullRequestVersion` to which
1307 :param pull_request_version: The `PullRequestVersion` to which
1308 the comments shall be linked.
1308 the comments shall be linked.
1309
1309
1310 """
1310 """
1311 pull_request = pull_request_version.pull_request
1311 pull_request = pull_request_version.pull_request
1312 comments = ChangesetComment.query()\
1312 comments = ChangesetComment.query()\
1313 .filter(
1313 .filter(
1314 # TODO: johbo: Should we query for the repo at all here?
1314 # TODO: johbo: Should we query for the repo at all here?
1315 # Pending decision on how comments of PRs are to be related
1315 # Pending decision on how comments of PRs are to be related
1316 # to either the source repo, the target repo or no repo at all.
1316 # to either the source repo, the target repo or no repo at all.
1317 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
1317 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
1318 ChangesetComment.pull_request == pull_request,
1318 ChangesetComment.pull_request == pull_request,
1319 ChangesetComment.pull_request_version == None)\
1319 ChangesetComment.pull_request_version == null())\
1320 .order_by(ChangesetComment.comment_id.asc())
1320 .order_by(ChangesetComment.comment_id.asc())
1321
1321
1322 # TODO: johbo: Find out why this breaks if it is done in a bulk
1322 # TODO: johbo: Find out why this breaks if it is done in a bulk
1323 # operation.
1323 # operation.
1324 for comment in comments:
1324 for comment in comments:
1325 comment.pull_request_version_id = (
1325 comment.pull_request_version_id = (
1326 pull_request_version.pull_request_version_id)
1326 pull_request_version.pull_request_version_id)
1327 Session().add(comment)
1327 Session().add(comment)
1328
1328
1329 def _calculate_commit_id_changes(self, old_ids, new_ids):
1329 def _calculate_commit_id_changes(self, old_ids, new_ids):
1330 added = [x for x in new_ids if x not in old_ids]
1330 added = [x for x in new_ids if x not in old_ids]
1331 common = [x for x in new_ids if x in old_ids]
1331 common = [x for x in new_ids if x in old_ids]
1332 removed = [x for x in old_ids if x not in new_ids]
1332 removed = [x for x in old_ids if x not in new_ids]
1333 total = new_ids
1333 total = new_ids
1334 return ChangeTuple(added, common, removed, total)
1334 return ChangeTuple(added, common, removed, total)
1335
1335
1336 def _calculate_file_changes(self, old_diff_data, new_diff_data):
1336 def _calculate_file_changes(self, old_diff_data, new_diff_data):
1337
1337
1338 old_files = OrderedDict()
1338 old_files = OrderedDict()
1339 for diff_data in old_diff_data.parsed_diff:
1339 for diff_data in old_diff_data.parsed_diff:
1340 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
1340 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
1341
1341
1342 added_files = []
1342 added_files = []
1343 modified_files = []
1343 modified_files = []
1344 removed_files = []
1344 removed_files = []
1345 for diff_data in new_diff_data.parsed_diff:
1345 for diff_data in new_diff_data.parsed_diff:
1346 new_filename = diff_data['filename']
1346 new_filename = diff_data['filename']
1347 new_hash = md5_safe(diff_data['raw_diff'])
1347 new_hash = md5_safe(diff_data['raw_diff'])
1348
1348
1349 old_hash = old_files.get(new_filename)
1349 old_hash = old_files.get(new_filename)
1350 if not old_hash:
1350 if not old_hash:
1351 # file is not present in old diff, we have to figure out from parsed diff
1351 # file is not present in old diff, we have to figure out from parsed diff
1352 # operation ADD/REMOVE
1352 # operation ADD/REMOVE
1353 operations_dict = diff_data['stats']['ops']
1353 operations_dict = diff_data['stats']['ops']
1354 if diffs.DEL_FILENODE in operations_dict:
1354 if diffs.DEL_FILENODE in operations_dict:
1355 removed_files.append(new_filename)
1355 removed_files.append(new_filename)
1356 else:
1356 else:
1357 added_files.append(new_filename)
1357 added_files.append(new_filename)
1358 else:
1358 else:
1359 if new_hash != old_hash:
1359 if new_hash != old_hash:
1360 modified_files.append(new_filename)
1360 modified_files.append(new_filename)
1361 # now remove a file from old, since we have seen it already
1361 # now remove a file from old, since we have seen it already
1362 del old_files[new_filename]
1362 del old_files[new_filename]
1363
1363
1364 # removed files is when there are present in old, but not in NEW,
1364 # removed files is when there are present in old, but not in NEW,
1365 # since we remove old files that are present in new diff, left-overs
1365 # since we remove old files that are present in new diff, left-overs
1366 # if any should be the removed files
1366 # if any should be the removed files
1367 removed_files.extend(old_files.keys())
1367 removed_files.extend(old_files.keys())
1368
1368
1369 return FileChangeTuple(added_files, modified_files, removed_files)
1369 return FileChangeTuple(added_files, modified_files, removed_files)
1370
1370
1371 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
1371 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
1372 """
1372 """
1373 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
1373 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
1374 so it's always looking the same disregarding on which default
1374 so it's always looking the same disregarding on which default
1375 renderer system is using.
1375 renderer system is using.
1376
1376
1377 :param ancestor_commit_id: ancestor raw_id
1377 :param ancestor_commit_id: ancestor raw_id
1378 :param changes: changes named tuple
1378 :param changes: changes named tuple
1379 :param file_changes: file changes named tuple
1379 :param file_changes: file changes named tuple
1380
1380
1381 """
1381 """
1382 new_status = ChangesetStatus.get_status_lbl(
1382 new_status = ChangesetStatus.get_status_lbl(
1383 ChangesetStatus.STATUS_UNDER_REVIEW)
1383 ChangesetStatus.STATUS_UNDER_REVIEW)
1384
1384
1385 changed_files = (
1385 changed_files = (
1386 file_changes.added + file_changes.modified + file_changes.removed)
1386 file_changes.added + file_changes.modified + file_changes.removed)
1387
1387
1388 params = {
1388 params = {
1389 'under_review_label': new_status,
1389 'under_review_label': new_status,
1390 'added_commits': changes.added,
1390 'added_commits': changes.added,
1391 'removed_commits': changes.removed,
1391 'removed_commits': changes.removed,
1392 'changed_files': changed_files,
1392 'changed_files': changed_files,
1393 'added_files': file_changes.added,
1393 'added_files': file_changes.added,
1394 'modified_files': file_changes.modified,
1394 'modified_files': file_changes.modified,
1395 'removed_files': file_changes.removed,
1395 'removed_files': file_changes.removed,
1396 'ancestor_commit_id': ancestor_commit_id
1396 'ancestor_commit_id': ancestor_commit_id
1397 }
1397 }
1398 renderer = RstTemplateRenderer()
1398 renderer = RstTemplateRenderer()
1399 return renderer.render('pull_request_update.mako', **params)
1399 return renderer.render('pull_request_update.mako', **params)
1400
1400
1401 def edit(self, pull_request, title, description, description_renderer, user):
1401 def edit(self, pull_request, title, description, description_renderer, user):
1402 pull_request = self.__get_pull_request(pull_request)
1402 pull_request = self.__get_pull_request(pull_request)
1403 old_data = pull_request.get_api_data(with_merge_state=False)
1403 old_data = pull_request.get_api_data(with_merge_state=False)
1404 if pull_request.is_closed():
1404 if pull_request.is_closed():
1405 raise ValueError('This pull request is closed')
1405 raise ValueError('This pull request is closed')
1406 if title:
1406 if title:
1407 pull_request.title = title
1407 pull_request.title = title
1408 pull_request.description = description
1408 pull_request.description = description
1409 pull_request.updated_on = datetime.datetime.now()
1409 pull_request.updated_on = datetime.datetime.now()
1410 pull_request.description_renderer = description_renderer
1410 pull_request.description_renderer = description_renderer
1411 Session().add(pull_request)
1411 Session().add(pull_request)
1412 self._log_audit_action(
1412 self._log_audit_action(
1413 'repo.pull_request.edit', {'old_data': old_data},
1413 'repo.pull_request.edit', {'old_data': old_data},
1414 user, pull_request)
1414 user, pull_request)
1415
1415
1416 def update_reviewers(self, pull_request, reviewer_data, user):
1416 def update_reviewers(self, pull_request, reviewer_data, user):
1417 """
1417 """
1418 Update the reviewers in the pull request
1418 Update the reviewers in the pull request
1419
1419
1420 :param pull_request: the pr to update
1420 :param pull_request: the pr to update
1421 :param reviewer_data: list of tuples
1421 :param reviewer_data: list of tuples
1422 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1422 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1423 :param user: current use who triggers this action
1423 :param user: current use who triggers this action
1424 """
1424 """
1425
1425
1426 pull_request = self.__get_pull_request(pull_request)
1426 pull_request = self.__get_pull_request(pull_request)
1427 if pull_request.is_closed():
1427 if pull_request.is_closed():
1428 raise ValueError('This pull request is closed')
1428 raise ValueError('This pull request is closed')
1429
1429
1430 reviewers = {}
1430 reviewers = {}
1431 for user_id, reasons, mandatory, role, rules in reviewer_data:
1431 for user_id, reasons, mandatory, role, rules in reviewer_data:
1432 if isinstance(user_id, (int, str)):
1432 if isinstance(user_id, (int, str)):
1433 user_id = self._get_user(user_id).user_id
1433 user_id = self._get_user(user_id).user_id
1434 reviewers[user_id] = {
1434 reviewers[user_id] = {
1435 'reasons': reasons, 'mandatory': mandatory, 'role': role}
1435 'reasons': reasons, 'mandatory': mandatory, 'role': role}
1436
1436
1437 reviewers_ids = set(reviewers.keys())
1437 reviewers_ids = set(reviewers.keys())
1438 current_reviewers = PullRequestReviewers.get_pull_request_reviewers(
1438 current_reviewers = PullRequestReviewers.get_pull_request_reviewers(
1439 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_REVIEWER)
1439 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_REVIEWER)
1440
1440
1441 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1441 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1442
1442
1443 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1443 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1444 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1444 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1445
1445
1446 log.debug("Adding %s reviewers", ids_to_add)
1446 log.debug("Adding %s reviewers", ids_to_add)
1447 log.debug("Removing %s reviewers", ids_to_remove)
1447 log.debug("Removing %s reviewers", ids_to_remove)
1448 changed = False
1448 changed = False
1449 added_audit_reviewers = []
1449 added_audit_reviewers = []
1450 removed_audit_reviewers = []
1450 removed_audit_reviewers = []
1451
1451
1452 for uid in ids_to_add:
1452 for uid in ids_to_add:
1453 changed = True
1453 changed = True
1454 _usr = self._get_user(uid)
1454 _usr = self._get_user(uid)
1455 reviewer = PullRequestReviewers()
1455 reviewer = PullRequestReviewers()
1456 reviewer.user = _usr
1456 reviewer.user = _usr
1457 reviewer.pull_request = pull_request
1457 reviewer.pull_request = pull_request
1458 reviewer.reasons = reviewers[uid]['reasons']
1458 reviewer.reasons = reviewers[uid]['reasons']
1459 # NOTE(marcink): mandatory shouldn't be changed now
1459 # NOTE(marcink): mandatory shouldn't be changed now
1460 # reviewer.mandatory = reviewers[uid]['reasons']
1460 # reviewer.mandatory = reviewers[uid]['reasons']
1461 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1461 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1462 reviewer.role = PullRequestReviewers.ROLE_REVIEWER
1462 reviewer.role = PullRequestReviewers.ROLE_REVIEWER
1463 Session().add(reviewer)
1463 Session().add(reviewer)
1464 added_audit_reviewers.append(reviewer.get_dict())
1464 added_audit_reviewers.append(reviewer.get_dict())
1465
1465
1466 for uid in ids_to_remove:
1466 for uid in ids_to_remove:
1467 changed = True
1467 changed = True
1468 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1468 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1469 # This is an edge case that handles previous state of having the same reviewer twice.
1469 # This is an edge case that handles previous state of having the same reviewer twice.
1470 # this CAN happen due to the lack of DB checks
1470 # this CAN happen due to the lack of DB checks
1471 reviewers = PullRequestReviewers.query()\
1471 reviewers = PullRequestReviewers.query()\
1472 .filter(PullRequestReviewers.user_id == uid,
1472 .filter(PullRequestReviewers.user_id == uid,
1473 PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER,
1473 PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER,
1474 PullRequestReviewers.pull_request == pull_request)\
1474 PullRequestReviewers.pull_request == pull_request)\
1475 .all()
1475 .all()
1476
1476
1477 for obj in reviewers:
1477 for obj in reviewers:
1478 added_audit_reviewers.append(obj.get_dict())
1478 added_audit_reviewers.append(obj.get_dict())
1479 Session().delete(obj)
1479 Session().delete(obj)
1480
1480
1481 if changed:
1481 if changed:
1482 Session().expire_all()
1482 Session().expire_all()
1483 pull_request.updated_on = datetime.datetime.now()
1483 pull_request.updated_on = datetime.datetime.now()
1484 Session().add(pull_request)
1484 Session().add(pull_request)
1485
1485
1486 # finally store audit logs
1486 # finally store audit logs
1487 for user_data in added_audit_reviewers:
1487 for user_data in added_audit_reviewers:
1488 self._log_audit_action(
1488 self._log_audit_action(
1489 'repo.pull_request.reviewer.add', {'data': user_data},
1489 'repo.pull_request.reviewer.add', {'data': user_data},
1490 user, pull_request)
1490 user, pull_request)
1491 for user_data in removed_audit_reviewers:
1491 for user_data in removed_audit_reviewers:
1492 self._log_audit_action(
1492 self._log_audit_action(
1493 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1493 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1494 user, pull_request)
1494 user, pull_request)
1495
1495
1496 self.notify_reviewers(pull_request, ids_to_add, user)
1496 self.notify_reviewers(pull_request, ids_to_add, user)
1497 return ids_to_add, ids_to_remove
1497 return ids_to_add, ids_to_remove
1498
1498
1499 def update_observers(self, pull_request, observer_data, user):
1499 def update_observers(self, pull_request, observer_data, user):
1500 """
1500 """
1501 Update the observers in the pull request
1501 Update the observers in the pull request
1502
1502
1503 :param pull_request: the pr to update
1503 :param pull_request: the pr to update
1504 :param observer_data: list of tuples
1504 :param observer_data: list of tuples
1505 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1505 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1506 :param user: current use who triggers this action
1506 :param user: current use who triggers this action
1507 """
1507 """
1508 pull_request = self.__get_pull_request(pull_request)
1508 pull_request = self.__get_pull_request(pull_request)
1509 if pull_request.is_closed():
1509 if pull_request.is_closed():
1510 raise ValueError('This pull request is closed')
1510 raise ValueError('This pull request is closed')
1511
1511
1512 observers = {}
1512 observers = {}
1513 for user_id, reasons, mandatory, role, rules in observer_data:
1513 for user_id, reasons, mandatory, role, rules in observer_data:
1514 if isinstance(user_id, (int, str)):
1514 if isinstance(user_id, (int, str)):
1515 user_id = self._get_user(user_id).user_id
1515 user_id = self._get_user(user_id).user_id
1516 observers[user_id] = {
1516 observers[user_id] = {
1517 'reasons': reasons, 'observers': mandatory, 'role': role}
1517 'reasons': reasons, 'observers': mandatory, 'role': role}
1518
1518
1519 observers_ids = set(observers.keys())
1519 observers_ids = set(observers.keys())
1520 current_observers = PullRequestReviewers.get_pull_request_reviewers(
1520 current_observers = PullRequestReviewers.get_pull_request_reviewers(
1521 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_OBSERVER)
1521 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_OBSERVER)
1522
1522
1523 current_observers_ids = set([x.user.user_id for x in current_observers])
1523 current_observers_ids = set([x.user.user_id for x in current_observers])
1524
1524
1525 ids_to_add = observers_ids.difference(current_observers_ids)
1525 ids_to_add = observers_ids.difference(current_observers_ids)
1526 ids_to_remove = current_observers_ids.difference(observers_ids)
1526 ids_to_remove = current_observers_ids.difference(observers_ids)
1527
1527
1528 log.debug("Adding %s observer", ids_to_add)
1528 log.debug("Adding %s observer", ids_to_add)
1529 log.debug("Removing %s observer", ids_to_remove)
1529 log.debug("Removing %s observer", ids_to_remove)
1530 changed = False
1530 changed = False
1531 added_audit_observers = []
1531 added_audit_observers = []
1532 removed_audit_observers = []
1532 removed_audit_observers = []
1533
1533
1534 for uid in ids_to_add:
1534 for uid in ids_to_add:
1535 changed = True
1535 changed = True
1536 _usr = self._get_user(uid)
1536 _usr = self._get_user(uid)
1537 observer = PullRequestReviewers()
1537 observer = PullRequestReviewers()
1538 observer.user = _usr
1538 observer.user = _usr
1539 observer.pull_request = pull_request
1539 observer.pull_request = pull_request
1540 observer.reasons = observers[uid]['reasons']
1540 observer.reasons = observers[uid]['reasons']
1541 # NOTE(marcink): mandatory shouldn't be changed now
1541 # NOTE(marcink): mandatory shouldn't be changed now
1542 # observer.mandatory = observer[uid]['reasons']
1542 # observer.mandatory = observer[uid]['reasons']
1543
1543
1544 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1544 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1545 observer.role = PullRequestReviewers.ROLE_OBSERVER
1545 observer.role = PullRequestReviewers.ROLE_OBSERVER
1546 Session().add(observer)
1546 Session().add(observer)
1547 added_audit_observers.append(observer.get_dict())
1547 added_audit_observers.append(observer.get_dict())
1548
1548
1549 for uid in ids_to_remove:
1549 for uid in ids_to_remove:
1550 changed = True
1550 changed = True
1551 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1551 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1552 # This is an edge case that handles previous state of having the same reviewer twice.
1552 # This is an edge case that handles previous state of having the same reviewer twice.
1553 # this CAN happen due to the lack of DB checks
1553 # this CAN happen due to the lack of DB checks
1554 observers = PullRequestReviewers.query()\
1554 observers = PullRequestReviewers.query()\
1555 .filter(PullRequestReviewers.user_id == uid,
1555 .filter(PullRequestReviewers.user_id == uid,
1556 PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER,
1556 PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER,
1557 PullRequestReviewers.pull_request == pull_request)\
1557 PullRequestReviewers.pull_request == pull_request)\
1558 .all()
1558 .all()
1559
1559
1560 for obj in observers:
1560 for obj in observers:
1561 added_audit_observers.append(obj.get_dict())
1561 added_audit_observers.append(obj.get_dict())
1562 Session().delete(obj)
1562 Session().delete(obj)
1563
1563
1564 if changed:
1564 if changed:
1565 Session().expire_all()
1565 Session().expire_all()
1566 pull_request.updated_on = datetime.datetime.now()
1566 pull_request.updated_on = datetime.datetime.now()
1567 Session().add(pull_request)
1567 Session().add(pull_request)
1568
1568
1569 # finally store audit logs
1569 # finally store audit logs
1570 for user_data in added_audit_observers:
1570 for user_data in added_audit_observers:
1571 self._log_audit_action(
1571 self._log_audit_action(
1572 'repo.pull_request.observer.add', {'data': user_data},
1572 'repo.pull_request.observer.add', {'data': user_data},
1573 user, pull_request)
1573 user, pull_request)
1574 for user_data in removed_audit_observers:
1574 for user_data in removed_audit_observers:
1575 self._log_audit_action(
1575 self._log_audit_action(
1576 'repo.pull_request.observer.delete', {'old_data': user_data},
1576 'repo.pull_request.observer.delete', {'old_data': user_data},
1577 user, pull_request)
1577 user, pull_request)
1578
1578
1579 self.notify_observers(pull_request, ids_to_add, user)
1579 self.notify_observers(pull_request, ids_to_add, user)
1580 return ids_to_add, ids_to_remove
1580 return ids_to_add, ids_to_remove
1581
1581
1582 def get_url(self, pull_request, request=None, permalink=False):
1582 def get_url(self, pull_request, request=None, permalink=False):
1583 if not request:
1583 if not request:
1584 request = get_current_request()
1584 request = get_current_request()
1585
1585
1586 if permalink:
1586 if permalink:
1587 return request.route_url(
1587 return request.route_url(
1588 'pull_requests_global',
1588 'pull_requests_global',
1589 pull_request_id=pull_request.pull_request_id,)
1589 pull_request_id=pull_request.pull_request_id,)
1590 else:
1590 else:
1591 return request.route_url('pullrequest_show',
1591 return request.route_url('pullrequest_show',
1592 repo_name=safe_str(pull_request.target_repo.repo_name),
1592 repo_name=safe_str(pull_request.target_repo.repo_name),
1593 pull_request_id=pull_request.pull_request_id,)
1593 pull_request_id=pull_request.pull_request_id,)
1594
1594
1595 def get_shadow_clone_url(self, pull_request, request=None):
1595 def get_shadow_clone_url(self, pull_request, request=None):
1596 """
1596 """
1597 Returns qualified url pointing to the shadow repository. If this pull
1597 Returns qualified url pointing to the shadow repository. If this pull
1598 request is closed there is no shadow repository and ``None`` will be
1598 request is closed there is no shadow repository and ``None`` will be
1599 returned.
1599 returned.
1600 """
1600 """
1601 if pull_request.is_closed():
1601 if pull_request.is_closed():
1602 return None
1602 return None
1603 else:
1603 else:
1604 pr_url = urllib.parse.unquote(self.get_url(pull_request, request=request))
1604 pr_url = urllib.parse.unquote(self.get_url(pull_request, request=request))
1605 return safe_str('{pr_url}/repository'.format(pr_url=pr_url))
1605 return safe_str('{pr_url}/repository'.format(pr_url=pr_url))
1606
1606
1607 def _notify_reviewers(self, pull_request, user_ids, role, user):
1607 def _notify_reviewers(self, pull_request, user_ids, role, user):
1608 # notification to reviewers/observers
1608 # notification to reviewers/observers
1609 if not user_ids:
1609 if not user_ids:
1610 return
1610 return
1611
1611
1612 log.debug('Notify following %s users about pull-request %s', role, user_ids)
1612 log.debug('Notify following %s users about pull-request %s', role, user_ids)
1613
1613
1614 pull_request_obj = pull_request
1614 pull_request_obj = pull_request
1615 # get the current participants of this pull request
1615 # get the current participants of this pull request
1616 recipients = user_ids
1616 recipients = user_ids
1617 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1617 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1618
1618
1619 pr_source_repo = pull_request_obj.source_repo
1619 pr_source_repo = pull_request_obj.source_repo
1620 pr_target_repo = pull_request_obj.target_repo
1620 pr_target_repo = pull_request_obj.target_repo
1621
1621
1622 pr_url = h.route_url('pullrequest_show',
1622 pr_url = h.route_url('pullrequest_show',
1623 repo_name=pr_target_repo.repo_name,
1623 repo_name=pr_target_repo.repo_name,
1624 pull_request_id=pull_request_obj.pull_request_id,)
1624 pull_request_id=pull_request_obj.pull_request_id,)
1625
1625
1626 # set some variables for email notification
1626 # set some variables for email notification
1627 pr_target_repo_url = h.route_url(
1627 pr_target_repo_url = h.route_url(
1628 'repo_summary', repo_name=pr_target_repo.repo_name)
1628 'repo_summary', repo_name=pr_target_repo.repo_name)
1629
1629
1630 pr_source_repo_url = h.route_url(
1630 pr_source_repo_url = h.route_url(
1631 'repo_summary', repo_name=pr_source_repo.repo_name)
1631 'repo_summary', repo_name=pr_source_repo.repo_name)
1632
1632
1633 # pull request specifics
1633 # pull request specifics
1634 pull_request_commits = [
1634 pull_request_commits = [
1635 (x.raw_id, x.message)
1635 (x.raw_id, x.message)
1636 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1636 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1637
1637
1638 current_rhodecode_user = user
1638 current_rhodecode_user = user
1639 kwargs = {
1639 kwargs = {
1640 'user': current_rhodecode_user,
1640 'user': current_rhodecode_user,
1641 'pull_request_author': pull_request.author,
1641 'pull_request_author': pull_request.author,
1642 'pull_request': pull_request_obj,
1642 'pull_request': pull_request_obj,
1643 'pull_request_commits': pull_request_commits,
1643 'pull_request_commits': pull_request_commits,
1644
1644
1645 'pull_request_target_repo': pr_target_repo,
1645 'pull_request_target_repo': pr_target_repo,
1646 'pull_request_target_repo_url': pr_target_repo_url,
1646 'pull_request_target_repo_url': pr_target_repo_url,
1647
1647
1648 'pull_request_source_repo': pr_source_repo,
1648 'pull_request_source_repo': pr_source_repo,
1649 'pull_request_source_repo_url': pr_source_repo_url,
1649 'pull_request_source_repo_url': pr_source_repo_url,
1650
1650
1651 'pull_request_url': pr_url,
1651 'pull_request_url': pr_url,
1652 'thread_ids': [pr_url],
1652 'thread_ids': [pr_url],
1653 'user_role': role
1653 'user_role': role
1654 }
1654 }
1655
1655
1656 # create notification objects, and emails
1656 # create notification objects, and emails
1657 NotificationModel().create(
1657 NotificationModel().create(
1658 created_by=current_rhodecode_user,
1658 created_by=current_rhodecode_user,
1659 notification_subject='', # Filled in based on the notification_type
1659 notification_subject='', # Filled in based on the notification_type
1660 notification_body='', # Filled in based on the notification_type
1660 notification_body='', # Filled in based on the notification_type
1661 notification_type=notification_type,
1661 notification_type=notification_type,
1662 recipients=recipients,
1662 recipients=recipients,
1663 email_kwargs=kwargs,
1663 email_kwargs=kwargs,
1664 )
1664 )
1665
1665
1666 def notify_reviewers(self, pull_request, reviewers_ids, user):
1666 def notify_reviewers(self, pull_request, reviewers_ids, user):
1667 return self._notify_reviewers(pull_request, reviewers_ids,
1667 return self._notify_reviewers(pull_request, reviewers_ids,
1668 PullRequestReviewers.ROLE_REVIEWER, user)
1668 PullRequestReviewers.ROLE_REVIEWER, user)
1669
1669
1670 def notify_observers(self, pull_request, observers_ids, user):
1670 def notify_observers(self, pull_request, observers_ids, user):
1671 return self._notify_reviewers(pull_request, observers_ids,
1671 return self._notify_reviewers(pull_request, observers_ids,
1672 PullRequestReviewers.ROLE_OBSERVER, user)
1672 PullRequestReviewers.ROLE_OBSERVER, user)
1673
1673
1674 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1674 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1675 commit_changes, file_changes):
1675 commit_changes, file_changes):
1676
1676
1677 updating_user_id = updating_user.user_id
1677 updating_user_id = updating_user.user_id
1678 reviewers = set([x.user.user_id for x in pull_request.get_pull_request_reviewers()])
1678 reviewers = set([x.user.user_id for x in pull_request.get_pull_request_reviewers()])
1679 # NOTE(marcink): send notification to all other users except to
1679 # NOTE(marcink): send notification to all other users except to
1680 # person who updated the PR
1680 # person who updated the PR
1681 recipients = reviewers.difference(set([updating_user_id]))
1681 recipients = reviewers.difference(set([updating_user_id]))
1682
1682
1683 log.debug('Notify following recipients about pull-request update %s', recipients)
1683 log.debug('Notify following recipients about pull-request update %s', recipients)
1684
1684
1685 pull_request_obj = pull_request
1685 pull_request_obj = pull_request
1686
1686
1687 # send email about the update
1687 # send email about the update
1688 changed_files = (
1688 changed_files = (
1689 file_changes.added + file_changes.modified + file_changes.removed)
1689 file_changes.added + file_changes.modified + file_changes.removed)
1690
1690
1691 pr_source_repo = pull_request_obj.source_repo
1691 pr_source_repo = pull_request_obj.source_repo
1692 pr_target_repo = pull_request_obj.target_repo
1692 pr_target_repo = pull_request_obj.target_repo
1693
1693
1694 pr_url = h.route_url('pullrequest_show',
1694 pr_url = h.route_url('pullrequest_show',
1695 repo_name=pr_target_repo.repo_name,
1695 repo_name=pr_target_repo.repo_name,
1696 pull_request_id=pull_request_obj.pull_request_id,)
1696 pull_request_id=pull_request_obj.pull_request_id,)
1697
1697
1698 # set some variables for email notification
1698 # set some variables for email notification
1699 pr_target_repo_url = h.route_url(
1699 pr_target_repo_url = h.route_url(
1700 'repo_summary', repo_name=pr_target_repo.repo_name)
1700 'repo_summary', repo_name=pr_target_repo.repo_name)
1701
1701
1702 pr_source_repo_url = h.route_url(
1702 pr_source_repo_url = h.route_url(
1703 'repo_summary', repo_name=pr_source_repo.repo_name)
1703 'repo_summary', repo_name=pr_source_repo.repo_name)
1704
1704
1705 email_kwargs = {
1705 email_kwargs = {
1706 'date': datetime.datetime.now(),
1706 'date': datetime.datetime.now(),
1707 'updating_user': updating_user,
1707 'updating_user': updating_user,
1708
1708
1709 'pull_request': pull_request_obj,
1709 'pull_request': pull_request_obj,
1710
1710
1711 'pull_request_target_repo': pr_target_repo,
1711 'pull_request_target_repo': pr_target_repo,
1712 'pull_request_target_repo_url': pr_target_repo_url,
1712 'pull_request_target_repo_url': pr_target_repo_url,
1713
1713
1714 'pull_request_source_repo': pr_source_repo,
1714 'pull_request_source_repo': pr_source_repo,
1715 'pull_request_source_repo_url': pr_source_repo_url,
1715 'pull_request_source_repo_url': pr_source_repo_url,
1716
1716
1717 'pull_request_url': pr_url,
1717 'pull_request_url': pr_url,
1718
1718
1719 'ancestor_commit_id': ancestor_commit_id,
1719 'ancestor_commit_id': ancestor_commit_id,
1720 'added_commits': commit_changes.added,
1720 'added_commits': commit_changes.added,
1721 'removed_commits': commit_changes.removed,
1721 'removed_commits': commit_changes.removed,
1722 'changed_files': changed_files,
1722 'changed_files': changed_files,
1723 'added_files': file_changes.added,
1723 'added_files': file_changes.added,
1724 'modified_files': file_changes.modified,
1724 'modified_files': file_changes.modified,
1725 'removed_files': file_changes.removed,
1725 'removed_files': file_changes.removed,
1726 'thread_ids': [pr_url],
1726 'thread_ids': [pr_url],
1727 }
1727 }
1728
1728
1729 # create notification objects, and emails
1729 # create notification objects, and emails
1730 NotificationModel().create(
1730 NotificationModel().create(
1731 created_by=updating_user,
1731 created_by=updating_user,
1732 notification_subject='', # Filled in based on the notification_type
1732 notification_subject='', # Filled in based on the notification_type
1733 notification_body='', # Filled in based on the notification_type
1733 notification_body='', # Filled in based on the notification_type
1734 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1734 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1735 recipients=recipients,
1735 recipients=recipients,
1736 email_kwargs=email_kwargs,
1736 email_kwargs=email_kwargs,
1737 )
1737 )
1738
1738
1739 def delete(self, pull_request, user=None):
1739 def delete(self, pull_request, user=None):
1740 if not user:
1740 if not user:
1741 user = getattr(get_current_rhodecode_user(), 'username', None)
1741 user = getattr(get_current_rhodecode_user(), 'username', None)
1742
1742
1743 pull_request = self.__get_pull_request(pull_request)
1743 pull_request = self.__get_pull_request(pull_request)
1744 old_data = pull_request.get_api_data(with_merge_state=False)
1744 old_data = pull_request.get_api_data(with_merge_state=False)
1745 self._cleanup_merge_workspace(pull_request)
1745 self._cleanup_merge_workspace(pull_request)
1746 self._log_audit_action(
1746 self._log_audit_action(
1747 'repo.pull_request.delete', {'old_data': old_data},
1747 'repo.pull_request.delete', {'old_data': old_data},
1748 user, pull_request)
1748 user, pull_request)
1749 Session().delete(pull_request)
1749 Session().delete(pull_request)
1750
1750
1751 def close_pull_request(self, pull_request, user):
1751 def close_pull_request(self, pull_request, user):
1752 pull_request = self.__get_pull_request(pull_request)
1752 pull_request = self.__get_pull_request(pull_request)
1753 self._cleanup_merge_workspace(pull_request)
1753 self._cleanup_merge_workspace(pull_request)
1754 pull_request.status = PullRequest.STATUS_CLOSED
1754 pull_request.status = PullRequest.STATUS_CLOSED
1755 pull_request.updated_on = datetime.datetime.now()
1755 pull_request.updated_on = datetime.datetime.now()
1756 Session().add(pull_request)
1756 Session().add(pull_request)
1757 self.trigger_pull_request_hook(pull_request, pull_request.author, 'close')
1757 self.trigger_pull_request_hook(pull_request, pull_request.author, 'close')
1758
1758
1759 pr_data = pull_request.get_api_data(with_merge_state=False)
1759 pr_data = pull_request.get_api_data(with_merge_state=False)
1760 self._log_audit_action(
1760 self._log_audit_action(
1761 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1761 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1762
1762
1763 def close_pull_request_with_comment(
1763 def close_pull_request_with_comment(
1764 self, pull_request, user, repo, message=None, auth_user=None):
1764 self, pull_request, user, repo, message=None, auth_user=None):
1765
1765
1766 pull_request_review_status = pull_request.calculated_review_status()
1766 pull_request_review_status = pull_request.calculated_review_status()
1767
1767
1768 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1768 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1769 # approved only if we have voting consent
1769 # approved only if we have voting consent
1770 status = ChangesetStatus.STATUS_APPROVED
1770 status = ChangesetStatus.STATUS_APPROVED
1771 else:
1771 else:
1772 status = ChangesetStatus.STATUS_REJECTED
1772 status = ChangesetStatus.STATUS_REJECTED
1773 status_lbl = ChangesetStatus.get_status_lbl(status)
1773 status_lbl = ChangesetStatus.get_status_lbl(status)
1774
1774
1775 default_message = (
1775 default_message = (
1776 'Closing with status change {transition_icon} {status}.'
1776 'Closing with status change {transition_icon} {status}.'
1777 ).format(transition_icon='>', status=status_lbl)
1777 ).format(transition_icon='>', status=status_lbl)
1778 text = message or default_message
1778 text = message or default_message
1779
1779
1780 # create a comment, and link it to new status
1780 # create a comment, and link it to new status
1781 comment = CommentsModel().create(
1781 comment = CommentsModel().create(
1782 text=text,
1782 text=text,
1783 repo=repo.repo_id,
1783 repo=repo.repo_id,
1784 user=user.user_id,
1784 user=user.user_id,
1785 pull_request=pull_request.pull_request_id,
1785 pull_request=pull_request.pull_request_id,
1786 status_change=status_lbl,
1786 status_change=status_lbl,
1787 status_change_type=status,
1787 status_change_type=status,
1788 closing_pr=True,
1788 closing_pr=True,
1789 auth_user=auth_user,
1789 auth_user=auth_user,
1790 )
1790 )
1791
1791
1792 # calculate old status before we change it
1792 # calculate old status before we change it
1793 old_calculated_status = pull_request.calculated_review_status()
1793 old_calculated_status = pull_request.calculated_review_status()
1794 ChangesetStatusModel().set_status(
1794 ChangesetStatusModel().set_status(
1795 repo.repo_id,
1795 repo.repo_id,
1796 status,
1796 status,
1797 user.user_id,
1797 user.user_id,
1798 comment=comment,
1798 comment=comment,
1799 pull_request=pull_request.pull_request_id
1799 pull_request=pull_request.pull_request_id
1800 )
1800 )
1801
1801
1802 Session().flush()
1802 Session().flush()
1803
1803
1804 self.trigger_pull_request_hook(pull_request, user, 'comment',
1804 self.trigger_pull_request_hook(pull_request, user, 'comment',
1805 data={'comment': comment})
1805 data={'comment': comment})
1806
1806
1807 # we now calculate the status of pull request again, and based on that
1807 # we now calculate the status of pull request again, and based on that
1808 # calculation trigger status change. This might happen in cases
1808 # calculation trigger status change. This might happen in cases
1809 # that non-reviewer admin closes a pr, which means his vote doesn't
1809 # that non-reviewer admin closes a pr, which means his vote doesn't
1810 # change the status, while if he's a reviewer this might change it.
1810 # change the status, while if he's a reviewer this might change it.
1811 calculated_status = pull_request.calculated_review_status()
1811 calculated_status = pull_request.calculated_review_status()
1812 if old_calculated_status != calculated_status:
1812 if old_calculated_status != calculated_status:
1813 self.trigger_pull_request_hook(pull_request, user, 'review_status_change',
1813 self.trigger_pull_request_hook(pull_request, user, 'review_status_change',
1814 data={'status': calculated_status})
1814 data={'status': calculated_status})
1815
1815
1816 # finally close the PR
1816 # finally close the PR
1817 PullRequestModel().close_pull_request(pull_request.pull_request_id, user)
1817 PullRequestModel().close_pull_request(pull_request.pull_request_id, user)
1818
1818
1819 return comment, status
1819 return comment, status
1820
1820
1821 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1821 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1822 _ = translator or get_current_request().translate
1822 _ = translator or get_current_request().translate
1823
1823
1824 if not self._is_merge_enabled(pull_request):
1824 if not self._is_merge_enabled(pull_request):
1825 return None, False, _('Server-side pull request merging is disabled.')
1825 return None, False, _('Server-side pull request merging is disabled.')
1826
1826
1827 if pull_request.is_closed():
1827 if pull_request.is_closed():
1828 return None, False, _('This pull request is closed.')
1828 return None, False, _('This pull request is closed.')
1829
1829
1830 merge_possible, msg = self._check_repo_requirements(
1830 merge_possible, msg = self._check_repo_requirements(
1831 target=pull_request.target_repo, source=pull_request.source_repo,
1831 target=pull_request.target_repo, source=pull_request.source_repo,
1832 translator=_)
1832 translator=_)
1833 if not merge_possible:
1833 if not merge_possible:
1834 return None, merge_possible, msg
1834 return None, merge_possible, msg
1835
1835
1836 try:
1836 try:
1837 merge_response = self._try_merge(
1837 merge_response = self._try_merge(
1838 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1838 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1839 log.debug("Merge response: %s", merge_response)
1839 log.debug("Merge response: %s", merge_response)
1840 return merge_response, merge_response.possible, merge_response.merge_status_message
1840 return merge_response, merge_response.possible, merge_response.merge_status_message
1841 except NotImplementedError:
1841 except NotImplementedError:
1842 return None, False, _('Pull request merging is not supported.')
1842 return None, False, _('Pull request merging is not supported.')
1843
1843
1844 def _check_repo_requirements(self, target, source, translator):
1844 def _check_repo_requirements(self, target, source, translator):
1845 """
1845 """
1846 Check if `target` and `source` have compatible requirements.
1846 Check if `target` and `source` have compatible requirements.
1847
1847
1848 Currently this is just checking for largefiles.
1848 Currently this is just checking for largefiles.
1849 """
1849 """
1850 _ = translator
1850 _ = translator
1851 target_has_largefiles = self._has_largefiles(target)
1851 target_has_largefiles = self._has_largefiles(target)
1852 source_has_largefiles = self._has_largefiles(source)
1852 source_has_largefiles = self._has_largefiles(source)
1853 merge_possible = True
1853 merge_possible = True
1854 message = u''
1854 message = u''
1855
1855
1856 if target_has_largefiles != source_has_largefiles:
1856 if target_has_largefiles != source_has_largefiles:
1857 merge_possible = False
1857 merge_possible = False
1858 if source_has_largefiles:
1858 if source_has_largefiles:
1859 message = _(
1859 message = _(
1860 'Target repository large files support is disabled.')
1860 'Target repository large files support is disabled.')
1861 else:
1861 else:
1862 message = _(
1862 message = _(
1863 'Source repository large files support is disabled.')
1863 'Source repository large files support is disabled.')
1864
1864
1865 return merge_possible, message
1865 return merge_possible, message
1866
1866
1867 def _has_largefiles(self, repo):
1867 def _has_largefiles(self, repo):
1868 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1868 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1869 'extensions', 'largefiles')
1869 'extensions', 'largefiles')
1870 return largefiles_ui and largefiles_ui[0].active
1870 return largefiles_ui and largefiles_ui[0].active
1871
1871
1872 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1872 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1873 """
1873 """
1874 Try to merge the pull request and return the merge status.
1874 Try to merge the pull request and return the merge status.
1875 """
1875 """
1876 log.debug(
1876 log.debug(
1877 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1877 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1878 pull_request.pull_request_id, force_shadow_repo_refresh)
1878 pull_request.pull_request_id, force_shadow_repo_refresh)
1879 target_vcs = pull_request.target_repo.scm_instance()
1879 target_vcs = pull_request.target_repo.scm_instance()
1880 # Refresh the target reference.
1880 # Refresh the target reference.
1881 try:
1881 try:
1882 target_ref = self._refresh_reference(
1882 target_ref = self._refresh_reference(
1883 pull_request.target_ref_parts, target_vcs)
1883 pull_request.target_ref_parts, target_vcs)
1884 except CommitDoesNotExistError:
1884 except CommitDoesNotExistError:
1885 merge_state = MergeResponse(
1885 merge_state = MergeResponse(
1886 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1886 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1887 metadata={'target_ref': pull_request.target_ref_parts})
1887 metadata={'target_ref': pull_request.target_ref_parts})
1888 return merge_state
1888 return merge_state
1889
1889
1890 target_locked = pull_request.target_repo.locked
1890 target_locked = pull_request.target_repo.locked
1891 if target_locked and target_locked[0]:
1891 if target_locked and target_locked[0]:
1892 locked_by = 'user:{}'.format(target_locked[0])
1892 locked_by = 'user:{}'.format(target_locked[0])
1893 log.debug("The target repository is locked by %s.", locked_by)
1893 log.debug("The target repository is locked by %s.", locked_by)
1894 merge_state = MergeResponse(
1894 merge_state = MergeResponse(
1895 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1895 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1896 metadata={'locked_by': locked_by})
1896 metadata={'locked_by': locked_by})
1897 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1897 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1898 pull_request, target_ref):
1898 pull_request, target_ref):
1899 log.debug("Refreshing the merge status of the repository.")
1899 log.debug("Refreshing the merge status of the repository.")
1900 merge_state = self._refresh_merge_state(
1900 merge_state = self._refresh_merge_state(
1901 pull_request, target_vcs, target_ref)
1901 pull_request, target_vcs, target_ref)
1902 else:
1902 else:
1903 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1903 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1904 metadata = {
1904 metadata = {
1905 'unresolved_files': '',
1905 'unresolved_files': '',
1906 'target_ref': pull_request.target_ref_parts,
1906 'target_ref': pull_request.target_ref_parts,
1907 'source_ref': pull_request.source_ref_parts,
1907 'source_ref': pull_request.source_ref_parts,
1908 }
1908 }
1909 if pull_request.last_merge_metadata:
1909 if pull_request.last_merge_metadata:
1910 metadata.update(pull_request.last_merge_metadata_parsed)
1910 metadata.update(pull_request.last_merge_metadata_parsed)
1911
1911
1912 if not possible and target_ref.type == 'branch':
1912 if not possible and target_ref.type == 'branch':
1913 # NOTE(marcink): case for mercurial multiple heads on branch
1913 # NOTE(marcink): case for mercurial multiple heads on branch
1914 heads = target_vcs._heads(target_ref.name)
1914 heads = target_vcs._heads(target_ref.name)
1915 if len(heads) != 1:
1915 if len(heads) != 1:
1916 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1916 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1917 metadata.update({
1917 metadata.update({
1918 'heads': heads
1918 'heads': heads
1919 })
1919 })
1920
1920
1921 merge_state = MergeResponse(
1921 merge_state = MergeResponse(
1922 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1922 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1923
1923
1924 return merge_state
1924 return merge_state
1925
1925
1926 def _refresh_reference(self, reference, vcs_repository):
1926 def _refresh_reference(self, reference, vcs_repository):
1927 if reference.type in self.UPDATABLE_REF_TYPES:
1927 if reference.type in self.UPDATABLE_REF_TYPES:
1928 name_or_id = reference.name
1928 name_or_id = reference.name
1929 else:
1929 else:
1930 name_or_id = reference.commit_id
1930 name_or_id = reference.commit_id
1931
1931
1932 refreshed_commit = vcs_repository.get_commit(name_or_id)
1932 refreshed_commit = vcs_repository.get_commit(name_or_id)
1933 refreshed_reference = Reference(
1933 refreshed_reference = Reference(
1934 reference.type, reference.name, refreshed_commit.raw_id)
1934 reference.type, reference.name, refreshed_commit.raw_id)
1935 return refreshed_reference
1935 return refreshed_reference
1936
1936
1937 def _needs_merge_state_refresh(self, pull_request, target_reference):
1937 def _needs_merge_state_refresh(self, pull_request, target_reference):
1938 return not(
1938 return not(
1939 pull_request.revisions and
1939 pull_request.revisions and
1940 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1940 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1941 target_reference.commit_id == pull_request._last_merge_target_rev)
1941 target_reference.commit_id == pull_request._last_merge_target_rev)
1942
1942
1943 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1943 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1944 workspace_id = self._workspace_id(pull_request)
1944 workspace_id = self._workspace_id(pull_request)
1945 source_vcs = pull_request.source_repo.scm_instance()
1945 source_vcs = pull_request.source_repo.scm_instance()
1946 repo_id = pull_request.target_repo.repo_id
1946 repo_id = pull_request.target_repo.repo_id
1947 use_rebase = self._use_rebase_for_merging(pull_request)
1947 use_rebase = self._use_rebase_for_merging(pull_request)
1948 close_branch = self._close_branch_before_merging(pull_request)
1948 close_branch = self._close_branch_before_merging(pull_request)
1949 merge_state = target_vcs.merge(
1949 merge_state = target_vcs.merge(
1950 repo_id, workspace_id,
1950 repo_id, workspace_id,
1951 target_reference, source_vcs, pull_request.source_ref_parts,
1951 target_reference, source_vcs, pull_request.source_ref_parts,
1952 dry_run=True, use_rebase=use_rebase,
1952 dry_run=True, use_rebase=use_rebase,
1953 close_branch=close_branch)
1953 close_branch=close_branch)
1954
1954
1955 # Do not store the response if there was an unknown error.
1955 # Do not store the response if there was an unknown error.
1956 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1956 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1957 pull_request._last_merge_source_rev = \
1957 pull_request._last_merge_source_rev = \
1958 pull_request.source_ref_parts.commit_id
1958 pull_request.source_ref_parts.commit_id
1959 pull_request._last_merge_target_rev = target_reference.commit_id
1959 pull_request._last_merge_target_rev = target_reference.commit_id
1960 pull_request.last_merge_status = merge_state.failure_reason
1960 pull_request.last_merge_status = merge_state.failure_reason
1961 pull_request.last_merge_metadata = merge_state.metadata
1961 pull_request.last_merge_metadata = merge_state.metadata
1962
1962
1963 pull_request.shadow_merge_ref = merge_state.merge_ref
1963 pull_request.shadow_merge_ref = merge_state.merge_ref
1964 Session().add(pull_request)
1964 Session().add(pull_request)
1965 Session().commit()
1965 Session().commit()
1966
1966
1967 return merge_state
1967 return merge_state
1968
1968
1969 def _workspace_id(self, pull_request):
1969 def _workspace_id(self, pull_request):
1970 workspace_id = 'pr-%s' % pull_request.pull_request_id
1970 workspace_id = 'pr-%s' % pull_request.pull_request_id
1971 return workspace_id
1971 return workspace_id
1972
1972
1973 def generate_repo_data(self, repo, commit_id=None, branch=None,
1973 def generate_repo_data(self, repo, commit_id=None, branch=None,
1974 bookmark=None, translator=None):
1974 bookmark=None, translator=None):
1975 from rhodecode.model.repo import RepoModel
1975 from rhodecode.model.repo import RepoModel
1976
1976
1977 all_refs, selected_ref = \
1977 all_refs, selected_ref = \
1978 self._get_repo_pullrequest_sources(
1978 self._get_repo_pullrequest_sources(
1979 repo.scm_instance(), commit_id=commit_id,
1979 repo.scm_instance(), commit_id=commit_id,
1980 branch=branch, bookmark=bookmark, translator=translator)
1980 branch=branch, bookmark=bookmark, translator=translator)
1981
1981
1982 refs_select2 = []
1982 refs_select2 = []
1983 for element in all_refs:
1983 for element in all_refs:
1984 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1984 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1985 refs_select2.append({'text': element[1], 'children': children})
1985 refs_select2.append({'text': element[1], 'children': children})
1986
1986
1987 return {
1987 return {
1988 'user': {
1988 'user': {
1989 'user_id': repo.user.user_id,
1989 'user_id': repo.user.user_id,
1990 'username': repo.user.username,
1990 'username': repo.user.username,
1991 'firstname': repo.user.first_name,
1991 'firstname': repo.user.first_name,
1992 'lastname': repo.user.last_name,
1992 'lastname': repo.user.last_name,
1993 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1993 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1994 },
1994 },
1995 'name': repo.repo_name,
1995 'name': repo.repo_name,
1996 'link': RepoModel().get_url(repo),
1996 'link': RepoModel().get_url(repo),
1997 'description': h.chop_at_smart(repo.description_safe, '\n'),
1997 'description': h.chop_at_smart(repo.description_safe, '\n'),
1998 'refs': {
1998 'refs': {
1999 'all_refs': all_refs,
1999 'all_refs': all_refs,
2000 'selected_ref': selected_ref,
2000 'selected_ref': selected_ref,
2001 'select2_refs': refs_select2
2001 'select2_refs': refs_select2
2002 }
2002 }
2003 }
2003 }
2004
2004
2005 def generate_pullrequest_title(self, source, source_ref, target):
2005 def generate_pullrequest_title(self, source, source_ref, target):
2006 return u'{source}#{at_ref} to {target}'.format(
2006 return u'{source}#{at_ref} to {target}'.format(
2007 source=source,
2007 source=source,
2008 at_ref=source_ref,
2008 at_ref=source_ref,
2009 target=target,
2009 target=target,
2010 )
2010 )
2011
2011
2012 def _cleanup_merge_workspace(self, pull_request):
2012 def _cleanup_merge_workspace(self, pull_request):
2013 # Merging related cleanup
2013 # Merging related cleanup
2014 repo_id = pull_request.target_repo.repo_id
2014 repo_id = pull_request.target_repo.repo_id
2015 target_scm = pull_request.target_repo.scm_instance()
2015 target_scm = pull_request.target_repo.scm_instance()
2016 workspace_id = self._workspace_id(pull_request)
2016 workspace_id = self._workspace_id(pull_request)
2017
2017
2018 try:
2018 try:
2019 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
2019 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
2020 except NotImplementedError:
2020 except NotImplementedError:
2021 pass
2021 pass
2022
2022
2023 def _get_repo_pullrequest_sources(
2023 def _get_repo_pullrequest_sources(
2024 self, repo, commit_id=None, branch=None, bookmark=None,
2024 self, repo, commit_id=None, branch=None, bookmark=None,
2025 translator=None):
2025 translator=None):
2026 """
2026 """
2027 Return a structure with repo's interesting commits, suitable for
2027 Return a structure with repo's interesting commits, suitable for
2028 the selectors in pullrequest controller
2028 the selectors in pullrequest controller
2029
2029
2030 :param commit_id: a commit that must be in the list somehow
2030 :param commit_id: a commit that must be in the list somehow
2031 and selected by default
2031 and selected by default
2032 :param branch: a branch that must be in the list and selected
2032 :param branch: a branch that must be in the list and selected
2033 by default - even if closed
2033 by default - even if closed
2034 :param bookmark: a bookmark that must be in the list and selected
2034 :param bookmark: a bookmark that must be in the list and selected
2035 """
2035 """
2036 _ = translator or get_current_request().translate
2036 _ = translator or get_current_request().translate
2037
2037
2038 commit_id = safe_str(commit_id) if commit_id else None
2038 commit_id = safe_str(commit_id) if commit_id else None
2039 branch = safe_str(branch) if branch else None
2039 branch = safe_str(branch) if branch else None
2040 bookmark = safe_str(bookmark) if bookmark else None
2040 bookmark = safe_str(bookmark) if bookmark else None
2041
2041
2042 selected = None
2042 selected = None
2043
2043
2044 # order matters: first source that has commit_id in it will be selected
2044 # order matters: first source that has commit_id in it will be selected
2045 sources = []
2045 sources = []
2046 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
2046 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
2047 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
2047 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
2048
2048
2049 if commit_id:
2049 if commit_id:
2050 ref_commit = (h.short_id(commit_id), commit_id)
2050 ref_commit = (h.short_id(commit_id), commit_id)
2051 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
2051 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
2052
2052
2053 sources.append(
2053 sources.append(
2054 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
2054 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
2055 )
2055 )
2056
2056
2057 groups = []
2057 groups = []
2058
2058
2059 for group_key, ref_list, group_name, match in sources:
2059 for group_key, ref_list, group_name, match in sources:
2060 group_refs = []
2060 group_refs = []
2061 for ref_name, ref_id in ref_list:
2061 for ref_name, ref_id in ref_list:
2062 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
2062 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
2063 group_refs.append((ref_key, ref_name))
2063 group_refs.append((ref_key, ref_name))
2064
2064
2065 if not selected:
2065 if not selected:
2066 if set([commit_id, match]) & set([ref_id, ref_name]):
2066 if set([commit_id, match]) & set([ref_id, ref_name]):
2067 selected = ref_key
2067 selected = ref_key
2068
2068
2069 if group_refs:
2069 if group_refs:
2070 groups.append((group_refs, group_name))
2070 groups.append((group_refs, group_name))
2071
2071
2072 if not selected:
2072 if not selected:
2073 ref = commit_id or branch or bookmark
2073 ref = commit_id or branch or bookmark
2074 if ref:
2074 if ref:
2075 raise CommitDoesNotExistError(
2075 raise CommitDoesNotExistError(
2076 u'No commit refs could be found matching: {}'.format(ref))
2076 u'No commit refs could be found matching: {}'.format(ref))
2077 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
2077 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
2078 selected = u'branch:{}:{}'.format(
2078 selected = u'branch:{}:{}'.format(
2079 safe_str(repo.DEFAULT_BRANCH_NAME),
2079 safe_str(repo.DEFAULT_BRANCH_NAME),
2080 safe_str(repo.branches[repo.DEFAULT_BRANCH_NAME])
2080 safe_str(repo.branches[repo.DEFAULT_BRANCH_NAME])
2081 )
2081 )
2082 elif repo.commit_ids:
2082 elif repo.commit_ids:
2083 # make the user select in this case
2083 # make the user select in this case
2084 selected = None
2084 selected = None
2085 else:
2085 else:
2086 raise EmptyRepositoryError()
2086 raise EmptyRepositoryError()
2087 return groups, selected
2087 return groups, selected
2088
2088
2089 def get_diff(self, source_repo, source_ref_id, target_ref_id,
2089 def get_diff(self, source_repo, source_ref_id, target_ref_id,
2090 hide_whitespace_changes, diff_context):
2090 hide_whitespace_changes, diff_context):
2091
2091
2092 return self._get_diff_from_pr_or_version(
2092 return self._get_diff_from_pr_or_version(
2093 source_repo, source_ref_id, target_ref_id,
2093 source_repo, source_ref_id, target_ref_id,
2094 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
2094 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
2095
2095
2096 def _get_diff_from_pr_or_version(
2096 def _get_diff_from_pr_or_version(
2097 self, source_repo, source_ref_id, target_ref_id,
2097 self, source_repo, source_ref_id, target_ref_id,
2098 hide_whitespace_changes, diff_context):
2098 hide_whitespace_changes, diff_context):
2099
2099
2100 target_commit = source_repo.get_commit(
2100 target_commit = source_repo.get_commit(
2101 commit_id=safe_str(target_ref_id))
2101 commit_id=safe_str(target_ref_id))
2102 source_commit = source_repo.get_commit(
2102 source_commit = source_repo.get_commit(
2103 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
2103 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
2104 if isinstance(source_repo, Repository):
2104 if isinstance(source_repo, Repository):
2105 vcs_repo = source_repo.scm_instance()
2105 vcs_repo = source_repo.scm_instance()
2106 else:
2106 else:
2107 vcs_repo = source_repo
2107 vcs_repo = source_repo
2108
2108
2109 # TODO: johbo: In the context of an update, we cannot reach
2109 # TODO: johbo: In the context of an update, we cannot reach
2110 # the old commit anymore with our normal mechanisms. It needs
2110 # the old commit anymore with our normal mechanisms. It needs
2111 # some sort of special support in the vcs layer to avoid this
2111 # some sort of special support in the vcs layer to avoid this
2112 # workaround.
2112 # workaround.
2113 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
2113 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
2114 vcs_repo.alias == 'git'):
2114 vcs_repo.alias == 'git'):
2115 source_commit.raw_id = safe_str(source_ref_id)
2115 source_commit.raw_id = safe_str(source_ref_id)
2116
2116
2117 log.debug('calculating diff between '
2117 log.debug('calculating diff between '
2118 'source_ref:%s and target_ref:%s for repo `%s`',
2118 'source_ref:%s and target_ref:%s for repo `%s`',
2119 target_ref_id, source_ref_id,
2119 target_ref_id, source_ref_id,
2120 safe_str(vcs_repo.path))
2120 safe_str(vcs_repo.path))
2121
2121
2122 vcs_diff = vcs_repo.get_diff(
2122 vcs_diff = vcs_repo.get_diff(
2123 commit1=target_commit, commit2=source_commit,
2123 commit1=target_commit, commit2=source_commit,
2124 ignore_whitespace=hide_whitespace_changes, context=diff_context)
2124 ignore_whitespace=hide_whitespace_changes, context=diff_context)
2125 return vcs_diff
2125 return vcs_diff
2126
2126
2127 def _is_merge_enabled(self, pull_request):
2127 def _is_merge_enabled(self, pull_request):
2128 return self._get_general_setting(
2128 return self._get_general_setting(
2129 pull_request, 'rhodecode_pr_merge_enabled')
2129 pull_request, 'rhodecode_pr_merge_enabled')
2130
2130
2131 def _use_rebase_for_merging(self, pull_request):
2131 def _use_rebase_for_merging(self, pull_request):
2132 repo_type = pull_request.target_repo.repo_type
2132 repo_type = pull_request.target_repo.repo_type
2133 if repo_type == 'hg':
2133 if repo_type == 'hg':
2134 return self._get_general_setting(
2134 return self._get_general_setting(
2135 pull_request, 'rhodecode_hg_use_rebase_for_merging')
2135 pull_request, 'rhodecode_hg_use_rebase_for_merging')
2136 elif repo_type == 'git':
2136 elif repo_type == 'git':
2137 return self._get_general_setting(
2137 return self._get_general_setting(
2138 pull_request, 'rhodecode_git_use_rebase_for_merging')
2138 pull_request, 'rhodecode_git_use_rebase_for_merging')
2139
2139
2140 return False
2140 return False
2141
2141
2142 def _user_name_for_merging(self, pull_request, user):
2142 def _user_name_for_merging(self, pull_request, user):
2143 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
2143 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
2144 if env_user_name_attr and hasattr(user, env_user_name_attr):
2144 if env_user_name_attr and hasattr(user, env_user_name_attr):
2145 user_name_attr = env_user_name_attr
2145 user_name_attr = env_user_name_attr
2146 else:
2146 else:
2147 user_name_attr = 'short_contact'
2147 user_name_attr = 'short_contact'
2148
2148
2149 user_name = getattr(user, user_name_attr)
2149 user_name = getattr(user, user_name_attr)
2150 return user_name
2150 return user_name
2151
2151
2152 def _close_branch_before_merging(self, pull_request):
2152 def _close_branch_before_merging(self, pull_request):
2153 repo_type = pull_request.target_repo.repo_type
2153 repo_type = pull_request.target_repo.repo_type
2154 if repo_type == 'hg':
2154 if repo_type == 'hg':
2155 return self._get_general_setting(
2155 return self._get_general_setting(
2156 pull_request, 'rhodecode_hg_close_branch_before_merging')
2156 pull_request, 'rhodecode_hg_close_branch_before_merging')
2157 elif repo_type == 'git':
2157 elif repo_type == 'git':
2158 return self._get_general_setting(
2158 return self._get_general_setting(
2159 pull_request, 'rhodecode_git_close_branch_before_merging')
2159 pull_request, 'rhodecode_git_close_branch_before_merging')
2160
2160
2161 return False
2161 return False
2162
2162
2163 def _get_general_setting(self, pull_request, settings_key, default=False):
2163 def _get_general_setting(self, pull_request, settings_key, default=False):
2164 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
2164 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
2165 settings = settings_model.get_general_settings()
2165 settings = settings_model.get_general_settings()
2166 return settings.get(settings_key, default)
2166 return settings.get(settings_key, default)
2167
2167
2168 def _log_audit_action(self, action, action_data, user, pull_request):
2168 def _log_audit_action(self, action, action_data, user, pull_request):
2169 audit_logger.store(
2169 audit_logger.store(
2170 action=action,
2170 action=action,
2171 action_data=action_data,
2171 action_data=action_data,
2172 user=user,
2172 user=user,
2173 repo=pull_request.target_repo)
2173 repo=pull_request.target_repo)
2174
2174
2175 def get_reviewer_functions(self):
2175 def get_reviewer_functions(self):
2176 """
2176 """
2177 Fetches functions for validation and fetching default reviewers.
2177 Fetches functions for validation and fetching default reviewers.
2178 If available we use the EE package, else we fallback to CE
2178 If available we use the EE package, else we fallback to CE
2179 package functions
2179 package functions
2180 """
2180 """
2181 try:
2181 try:
2182 from rc_reviewers.utils import get_default_reviewers_data
2182 from rc_reviewers.utils import get_default_reviewers_data
2183 from rc_reviewers.utils import validate_default_reviewers
2183 from rc_reviewers.utils import validate_default_reviewers
2184 from rc_reviewers.utils import validate_observers
2184 from rc_reviewers.utils import validate_observers
2185 except ImportError:
2185 except ImportError:
2186 from rhodecode.apps.repository.utils import get_default_reviewers_data
2186 from rhodecode.apps.repository.utils import get_default_reviewers_data
2187 from rhodecode.apps.repository.utils import validate_default_reviewers
2187 from rhodecode.apps.repository.utils import validate_default_reviewers
2188 from rhodecode.apps.repository.utils import validate_observers
2188 from rhodecode.apps.repository.utils import validate_observers
2189
2189
2190 return get_default_reviewers_data, validate_default_reviewers, validate_observers
2190 return get_default_reviewers_data, validate_default_reviewers, validate_observers
2191
2191
2192
2192
2193 class MergeCheck(object):
2193 class MergeCheck(object):
2194 """
2194 """
2195 Perform Merge Checks and returns a check object which stores information
2195 Perform Merge Checks and returns a check object which stores information
2196 about merge errors, and merge conditions
2196 about merge errors, and merge conditions
2197 """
2197 """
2198 TODO_CHECK = 'todo'
2198 TODO_CHECK = 'todo'
2199 PERM_CHECK = 'perm'
2199 PERM_CHECK = 'perm'
2200 REVIEW_CHECK = 'review'
2200 REVIEW_CHECK = 'review'
2201 MERGE_CHECK = 'merge'
2201 MERGE_CHECK = 'merge'
2202 WIP_CHECK = 'wip'
2202 WIP_CHECK = 'wip'
2203
2203
2204 def __init__(self):
2204 def __init__(self):
2205 self.review_status = None
2205 self.review_status = None
2206 self.merge_possible = None
2206 self.merge_possible = None
2207 self.merge_msg = ''
2207 self.merge_msg = ''
2208 self.merge_response = None
2208 self.merge_response = None
2209 self.failed = None
2209 self.failed = None
2210 self.errors = []
2210 self.errors = []
2211 self.error_details = OrderedDict()
2211 self.error_details = OrderedDict()
2212 self.source_commit = AttributeDict()
2212 self.source_commit = AttributeDict()
2213 self.target_commit = AttributeDict()
2213 self.target_commit = AttributeDict()
2214 self.reviewers_count = 0
2214 self.reviewers_count = 0
2215 self.observers_count = 0
2215 self.observers_count = 0
2216
2216
2217 def __repr__(self):
2217 def __repr__(self):
2218 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
2218 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
2219 self.merge_possible, self.failed, self.errors)
2219 self.merge_possible, self.failed, self.errors)
2220
2220
2221 def push_error(self, error_type, message, error_key, details):
2221 def push_error(self, error_type, message, error_key, details):
2222 self.failed = True
2222 self.failed = True
2223 self.errors.append([error_type, message])
2223 self.errors.append([error_type, message])
2224 self.error_details[error_key] = dict(
2224 self.error_details[error_key] = dict(
2225 details=details,
2225 details=details,
2226 error_type=error_type,
2226 error_type=error_type,
2227 message=message
2227 message=message
2228 )
2228 )
2229
2229
2230 @classmethod
2230 @classmethod
2231 def validate(cls, pull_request, auth_user, translator, fail_early=False,
2231 def validate(cls, pull_request, auth_user, translator, fail_early=False,
2232 force_shadow_repo_refresh=False):
2232 force_shadow_repo_refresh=False):
2233 _ = translator
2233 _ = translator
2234 merge_check = cls()
2234 merge_check = cls()
2235
2235
2236 # title has WIP:
2236 # title has WIP:
2237 if pull_request.work_in_progress:
2237 if pull_request.work_in_progress:
2238 log.debug("MergeCheck: cannot merge, title has wip: marker.")
2238 log.debug("MergeCheck: cannot merge, title has wip: marker.")
2239
2239
2240 msg = _('WIP marker in title prevents from accidental merge.')
2240 msg = _('WIP marker in title prevents from accidental merge.')
2241 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
2241 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
2242 if fail_early:
2242 if fail_early:
2243 return merge_check
2243 return merge_check
2244
2244
2245 # permissions to merge
2245 # permissions to merge
2246 user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user)
2246 user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user)
2247 if not user_allowed_to_merge:
2247 if not user_allowed_to_merge:
2248 log.debug("MergeCheck: cannot merge, approval is pending.")
2248 log.debug("MergeCheck: cannot merge, approval is pending.")
2249
2249
2250 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
2250 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
2251 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2251 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2252 if fail_early:
2252 if fail_early:
2253 return merge_check
2253 return merge_check
2254
2254
2255 # permission to merge into the target branch
2255 # permission to merge into the target branch
2256 target_commit_id = pull_request.target_ref_parts.commit_id
2256 target_commit_id = pull_request.target_ref_parts.commit_id
2257 if pull_request.target_ref_parts.type == 'branch':
2257 if pull_request.target_ref_parts.type == 'branch':
2258 branch_name = pull_request.target_ref_parts.name
2258 branch_name = pull_request.target_ref_parts.name
2259 else:
2259 else:
2260 # for mercurial we can always figure out the branch from the commit
2260 # for mercurial we can always figure out the branch from the commit
2261 # in case of bookmark
2261 # in case of bookmark
2262 target_commit = pull_request.target_repo.get_commit(target_commit_id)
2262 target_commit = pull_request.target_repo.get_commit(target_commit_id)
2263 branch_name = target_commit.branch
2263 branch_name = target_commit.branch
2264
2264
2265 rule, branch_perm = auth_user.get_rule_and_branch_permission(
2265 rule, branch_perm = auth_user.get_rule_and_branch_permission(
2266 pull_request.target_repo.repo_name, branch_name)
2266 pull_request.target_repo.repo_name, branch_name)
2267 if branch_perm and branch_perm == 'branch.none':
2267 if branch_perm and branch_perm == 'branch.none':
2268 msg = _('Target branch `{}` changes rejected by rule {}.').format(
2268 msg = _('Target branch `{}` changes rejected by rule {}.').format(
2269 branch_name, rule)
2269 branch_name, rule)
2270 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2270 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2271 if fail_early:
2271 if fail_early:
2272 return merge_check
2272 return merge_check
2273
2273
2274 # review status, must be always present
2274 # review status, must be always present
2275 review_status = pull_request.calculated_review_status()
2275 review_status = pull_request.calculated_review_status()
2276 merge_check.review_status = review_status
2276 merge_check.review_status = review_status
2277 merge_check.reviewers_count = pull_request.reviewers_count
2277 merge_check.reviewers_count = pull_request.reviewers_count
2278 merge_check.observers_count = pull_request.observers_count
2278 merge_check.observers_count = pull_request.observers_count
2279
2279
2280 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
2280 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
2281 if not status_approved and merge_check.reviewers_count:
2281 if not status_approved and merge_check.reviewers_count:
2282 log.debug("MergeCheck: cannot merge, approval is pending.")
2282 log.debug("MergeCheck: cannot merge, approval is pending.")
2283 msg = _('Pull request reviewer approval is pending.')
2283 msg = _('Pull request reviewer approval is pending.')
2284
2284
2285 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
2285 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
2286
2286
2287 if fail_early:
2287 if fail_early:
2288 return merge_check
2288 return merge_check
2289
2289
2290 # left over TODOs
2290 # left over TODOs
2291 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
2291 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
2292 if todos:
2292 if todos:
2293 log.debug("MergeCheck: cannot merge, {} "
2293 log.debug("MergeCheck: cannot merge, {} "
2294 "unresolved TODOs left.".format(len(todos)))
2294 "unresolved TODOs left.".format(len(todos)))
2295
2295
2296 if len(todos) == 1:
2296 if len(todos) == 1:
2297 msg = _('Cannot merge, {} TODO still not resolved.').format(
2297 msg = _('Cannot merge, {} TODO still not resolved.').format(
2298 len(todos))
2298 len(todos))
2299 else:
2299 else:
2300 msg = _('Cannot merge, {} TODOs still not resolved.').format(
2300 msg = _('Cannot merge, {} TODOs still not resolved.').format(
2301 len(todos))
2301 len(todos))
2302
2302
2303 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
2303 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
2304
2304
2305 if fail_early:
2305 if fail_early:
2306 return merge_check
2306 return merge_check
2307
2307
2308 # merge possible, here is the filesystem simulation + shadow repo
2308 # merge possible, here is the filesystem simulation + shadow repo
2309 merge_response, merge_status, msg = PullRequestModel().merge_status(
2309 merge_response, merge_status, msg = PullRequestModel().merge_status(
2310 pull_request, translator=translator,
2310 pull_request, translator=translator,
2311 force_shadow_repo_refresh=force_shadow_repo_refresh)
2311 force_shadow_repo_refresh=force_shadow_repo_refresh)
2312
2312
2313 merge_check.merge_possible = merge_status
2313 merge_check.merge_possible = merge_status
2314 merge_check.merge_msg = msg
2314 merge_check.merge_msg = msg
2315 merge_check.merge_response = merge_response
2315 merge_check.merge_response = merge_response
2316
2316
2317 source_ref_id = pull_request.source_ref_parts.commit_id
2317 source_ref_id = pull_request.source_ref_parts.commit_id
2318 target_ref_id = pull_request.target_ref_parts.commit_id
2318 target_ref_id = pull_request.target_ref_parts.commit_id
2319
2319
2320 try:
2320 try:
2321 source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request)
2321 source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request)
2322 merge_check.source_commit.changed = source_ref_id != source_commit.raw_id
2322 merge_check.source_commit.changed = source_ref_id != source_commit.raw_id
2323 merge_check.source_commit.ref_spec = pull_request.source_ref_parts
2323 merge_check.source_commit.ref_spec = pull_request.source_ref_parts
2324 merge_check.source_commit.current_raw_id = source_commit.raw_id
2324 merge_check.source_commit.current_raw_id = source_commit.raw_id
2325 merge_check.source_commit.previous_raw_id = source_ref_id
2325 merge_check.source_commit.previous_raw_id = source_ref_id
2326
2326
2327 merge_check.target_commit.changed = target_ref_id != target_commit.raw_id
2327 merge_check.target_commit.changed = target_ref_id != target_commit.raw_id
2328 merge_check.target_commit.ref_spec = pull_request.target_ref_parts
2328 merge_check.target_commit.ref_spec = pull_request.target_ref_parts
2329 merge_check.target_commit.current_raw_id = target_commit.raw_id
2329 merge_check.target_commit.current_raw_id = target_commit.raw_id
2330 merge_check.target_commit.previous_raw_id = target_ref_id
2330 merge_check.target_commit.previous_raw_id = target_ref_id
2331 except (SourceRefMissing, TargetRefMissing):
2331 except (SourceRefMissing, TargetRefMissing):
2332 pass
2332 pass
2333
2333
2334 if not merge_status:
2334 if not merge_status:
2335 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
2335 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
2336 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
2336 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
2337
2337
2338 if fail_early:
2338 if fail_early:
2339 return merge_check
2339 return merge_check
2340
2340
2341 log.debug('MergeCheck: is failed: %s', merge_check.failed)
2341 log.debug('MergeCheck: is failed: %s', merge_check.failed)
2342 return merge_check
2342 return merge_check
2343
2343
2344 @classmethod
2344 @classmethod
2345 def get_merge_conditions(cls, pull_request, translator):
2345 def get_merge_conditions(cls, pull_request, translator):
2346 _ = translator
2346 _ = translator
2347 merge_details = {}
2347 merge_details = {}
2348
2348
2349 model = PullRequestModel()
2349 model = PullRequestModel()
2350 use_rebase = model._use_rebase_for_merging(pull_request)
2350 use_rebase = model._use_rebase_for_merging(pull_request)
2351
2351
2352 if use_rebase:
2352 if use_rebase:
2353 merge_details['merge_strategy'] = dict(
2353 merge_details['merge_strategy'] = dict(
2354 details={},
2354 details={},
2355 message=_('Merge strategy: rebase')
2355 message=_('Merge strategy: rebase')
2356 )
2356 )
2357 else:
2357 else:
2358 merge_details['merge_strategy'] = dict(
2358 merge_details['merge_strategy'] = dict(
2359 details={},
2359 details={},
2360 message=_('Merge strategy: explicit merge commit')
2360 message=_('Merge strategy: explicit merge commit')
2361 )
2361 )
2362
2362
2363 close_branch = model._close_branch_before_merging(pull_request)
2363 close_branch = model._close_branch_before_merging(pull_request)
2364 if close_branch:
2364 if close_branch:
2365 repo_type = pull_request.target_repo.repo_type
2365 repo_type = pull_request.target_repo.repo_type
2366 close_msg = ''
2366 close_msg = ''
2367 if repo_type == 'hg':
2367 if repo_type == 'hg':
2368 close_msg = _('Source branch will be closed before the merge.')
2368 close_msg = _('Source branch will be closed before the merge.')
2369 elif repo_type == 'git':
2369 elif repo_type == 'git':
2370 close_msg = _('Source branch will be deleted after the merge.')
2370 close_msg = _('Source branch will be deleted after the merge.')
2371
2371
2372 merge_details['close_branch'] = dict(
2372 merge_details['close_branch'] = dict(
2373 details={},
2373 details={},
2374 message=close_msg
2374 message=close_msg
2375 )
2375 )
2376
2376
2377 return merge_details
2377 return merge_details
2378
2378
2379
2379
2380 @dataclasses.dataclass
2380 @dataclasses.dataclass
2381 class ChangeTuple:
2381 class ChangeTuple:
2382 added: list
2382 added: list
2383 common: list
2383 common: list
2384 removed: list
2384 removed: list
2385 total: list
2385 total: list
2386
2386
2387
2387
2388 @dataclasses.dataclass
2388 @dataclasses.dataclass
2389 class FileChangeTuple:
2389 class FileChangeTuple:
2390 added: list
2390 added: list
2391 modified: list
2391 modified: list
2392 removed: list
2392 removed: list
@@ -1,1044 +1,1044 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 Scm model for RhodeCode
20 Scm model for RhodeCode
21 """
21 """
22
22
23 import os.path
23 import os.path
24 import traceback
24 import traceback
25 import logging
25 import logging
26 import io
26 import io
27
27
28 from sqlalchemy import func
28 from sqlalchemy import func
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 import rhodecode
31 import rhodecode
32 from rhodecode.lib.str_utils import safe_bytes
32 from rhodecode.lib.str_utils import safe_bytes
33 from rhodecode.lib.vcs import get_backend
33 from rhodecode.lib.vcs import get_backend
34 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
34 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
35 from rhodecode.lib.vcs.nodes import FileNode
35 from rhodecode.lib.vcs.nodes import FileNode
36 from rhodecode.lib.vcs.backends.base import EmptyCommit
36 from rhodecode.lib.vcs.backends.base import EmptyCommit
37 from rhodecode.lib import helpers as h, rc_cache
37 from rhodecode.lib import helpers as h, rc_cache
38 from rhodecode.lib.auth import (
38 from rhodecode.lib.auth import (
39 HasRepoPermissionAny, HasRepoGroupPermissionAny,
39 HasRepoPermissionAny, HasRepoGroupPermissionAny,
40 HasUserGroupPermissionAny)
40 HasUserGroupPermissionAny)
41 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
41 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
42 from rhodecode.lib import hooks_utils
42 from rhodecode.lib import hooks_utils
43 from rhodecode.lib.utils import (
43 from rhodecode.lib.utils import (
44 get_filesystem_repos, make_db_config)
44 get_filesystem_repos, make_db_config)
45 from rhodecode.lib.str_utils import safe_str
45 from rhodecode.lib.str_utils import safe_str
46 from rhodecode.lib.system_info import get_system_info
46 from rhodecode.lib.system_info import get_system_info
47 from rhodecode.model import BaseModel
47 from rhodecode.model import BaseModel
48 from rhodecode.model.db import (
48 from rhodecode.model.db import (
49 or_, false,
49 or_, false, null,
50 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
50 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
51 PullRequest, FileStore)
51 PullRequest, FileStore)
52 from rhodecode.model.settings import VcsSettingsModel
52 from rhodecode.model.settings import VcsSettingsModel
53 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
53 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
54
54
55 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
56
56
57
57
58 class UserTemp(object):
58 class UserTemp(object):
59 def __init__(self, user_id):
59 def __init__(self, user_id):
60 self.user_id = user_id
60 self.user_id = user_id
61
61
62 def __repr__(self):
62 def __repr__(self):
63 return "<{}('id:{}')>".format(self.__class__.__name__, self.user_id)
63 return "<{}('id:{}')>".format(self.__class__.__name__, self.user_id)
64
64
65
65
66 class RepoTemp(object):
66 class RepoTemp(object):
67 def __init__(self, repo_id):
67 def __init__(self, repo_id):
68 self.repo_id = repo_id
68 self.repo_id = repo_id
69
69
70 def __repr__(self):
70 def __repr__(self):
71 return "<{}('id:{}')>".format(self.__class__.__name__, self.repo_id)
71 return "<{}('id:{}')>".format(self.__class__.__name__, self.repo_id)
72
72
73
73
74 class SimpleCachedRepoList(object):
74 class SimpleCachedRepoList(object):
75 """
75 """
76 Lighter version of of iteration of repos without the scm initialisation,
76 Lighter version of of iteration of repos without the scm initialisation,
77 and with cache usage
77 and with cache usage
78 """
78 """
79 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
79 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
80 self.db_repo_list = db_repo_list
80 self.db_repo_list = db_repo_list
81 self.repos_path = repos_path
81 self.repos_path = repos_path
82 self.order_by = order_by
82 self.order_by = order_by
83 self.reversed = (order_by or '').startswith('-')
83 self.reversed = (order_by or '').startswith('-')
84 if not perm_set:
84 if not perm_set:
85 perm_set = ['repository.read', 'repository.write',
85 perm_set = ['repository.read', 'repository.write',
86 'repository.admin']
86 'repository.admin']
87 self.perm_set = perm_set
87 self.perm_set = perm_set
88
88
89 def __len__(self):
89 def __len__(self):
90 return len(self.db_repo_list)
90 return len(self.db_repo_list)
91
91
92 def __repr__(self):
92 def __repr__(self):
93 return '<{} ({})>'.format(self.__class__.__name__, self.__len__())
93 return '<{} ({})>'.format(self.__class__.__name__, self.__len__())
94
94
95 def __iter__(self):
95 def __iter__(self):
96 for dbr in self.db_repo_list:
96 for dbr in self.db_repo_list:
97 # check permission at this level
97 # check permission at this level
98 has_perm = HasRepoPermissionAny(*self.perm_set)(
98 has_perm = HasRepoPermissionAny(*self.perm_set)(
99 dbr.repo_name, 'SimpleCachedRepoList check')
99 dbr.repo_name, 'SimpleCachedRepoList check')
100 if not has_perm:
100 if not has_perm:
101 continue
101 continue
102
102
103 tmp_d = {
103 tmp_d = {
104 'name': dbr.repo_name,
104 'name': dbr.repo_name,
105 'dbrepo': dbr.get_dict(),
105 'dbrepo': dbr.get_dict(),
106 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
106 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
107 }
107 }
108 yield tmp_d
108 yield tmp_d
109
109
110
110
111 class _PermCheckIterator(object):
111 class _PermCheckIterator(object):
112
112
113 def __init__(
113 def __init__(
114 self, obj_list, obj_attr, perm_set, perm_checker,
114 self, obj_list, obj_attr, perm_set, perm_checker,
115 extra_kwargs=None):
115 extra_kwargs=None):
116 """
116 """
117 Creates iterator from given list of objects, additionally
117 Creates iterator from given list of objects, additionally
118 checking permission for them from perm_set var
118 checking permission for them from perm_set var
119
119
120 :param obj_list: list of db objects
120 :param obj_list: list of db objects
121 :param obj_attr: attribute of object to pass into perm_checker
121 :param obj_attr: attribute of object to pass into perm_checker
122 :param perm_set: list of permissions to check
122 :param perm_set: list of permissions to check
123 :param perm_checker: callable to check permissions against
123 :param perm_checker: callable to check permissions against
124 """
124 """
125 self.obj_list = obj_list
125 self.obj_list = obj_list
126 self.obj_attr = obj_attr
126 self.obj_attr = obj_attr
127 self.perm_set = perm_set
127 self.perm_set = perm_set
128 self.perm_checker = perm_checker(*self.perm_set)
128 self.perm_checker = perm_checker(*self.perm_set)
129 self.extra_kwargs = extra_kwargs or {}
129 self.extra_kwargs = extra_kwargs or {}
130
130
131 def __len__(self):
131 def __len__(self):
132 return len(self.obj_list)
132 return len(self.obj_list)
133
133
134 def __repr__(self):
134 def __repr__(self):
135 return '<{} ({})>'.format(self.__class__.__name__, self.__len__())
135 return '<{} ({})>'.format(self.__class__.__name__, self.__len__())
136
136
137 def __iter__(self):
137 def __iter__(self):
138 for db_obj in self.obj_list:
138 for db_obj in self.obj_list:
139 # check permission at this level
139 # check permission at this level
140 # NOTE(marcink): the __dict__.get() is ~4x faster then getattr()
140 # NOTE(marcink): the __dict__.get() is ~4x faster then getattr()
141 name = db_obj.__dict__.get(self.obj_attr, None)
141 name = db_obj.__dict__.get(self.obj_attr, None)
142 if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs):
142 if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs):
143 continue
143 continue
144
144
145 yield db_obj
145 yield db_obj
146
146
147
147
148 class RepoList(_PermCheckIterator):
148 class RepoList(_PermCheckIterator):
149
149
150 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
150 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
151 if not perm_set:
151 if not perm_set:
152 perm_set = ['repository.read', 'repository.write', 'repository.admin']
152 perm_set = ['repository.read', 'repository.write', 'repository.admin']
153
153
154 super().__init__(
154 super().__init__(
155 obj_list=db_repo_list,
155 obj_list=db_repo_list,
156 obj_attr='_repo_name', perm_set=perm_set,
156 obj_attr='_repo_name', perm_set=perm_set,
157 perm_checker=HasRepoPermissionAny,
157 perm_checker=HasRepoPermissionAny,
158 extra_kwargs=extra_kwargs)
158 extra_kwargs=extra_kwargs)
159
159
160
160
161 class RepoGroupList(_PermCheckIterator):
161 class RepoGroupList(_PermCheckIterator):
162
162
163 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
163 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
164 if not perm_set:
164 if not perm_set:
165 perm_set = ['group.read', 'group.write', 'group.admin']
165 perm_set = ['group.read', 'group.write', 'group.admin']
166
166
167 super().__init__(
167 super().__init__(
168 obj_list=db_repo_group_list,
168 obj_list=db_repo_group_list,
169 obj_attr='_group_name', perm_set=perm_set,
169 obj_attr='_group_name', perm_set=perm_set,
170 perm_checker=HasRepoGroupPermissionAny,
170 perm_checker=HasRepoGroupPermissionAny,
171 extra_kwargs=extra_kwargs)
171 extra_kwargs=extra_kwargs)
172
172
173
173
174 class UserGroupList(_PermCheckIterator):
174 class UserGroupList(_PermCheckIterator):
175
175
176 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
176 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
177 if not perm_set:
177 if not perm_set:
178 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
178 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
179
179
180 super().__init__(
180 super().__init__(
181 obj_list=db_user_group_list,
181 obj_list=db_user_group_list,
182 obj_attr='users_group_name', perm_set=perm_set,
182 obj_attr='users_group_name', perm_set=perm_set,
183 perm_checker=HasUserGroupPermissionAny,
183 perm_checker=HasUserGroupPermissionAny,
184 extra_kwargs=extra_kwargs)
184 extra_kwargs=extra_kwargs)
185
185
186
186
187 class ScmModel(BaseModel):
187 class ScmModel(BaseModel):
188 """
188 """
189 Generic Scm Model
189 Generic Scm Model
190 """
190 """
191
191
192 @LazyProperty
192 @LazyProperty
193 def repos_path(self):
193 def repos_path(self):
194 """
194 """
195 Gets the repositories root path from database
195 Gets the repositories root path from database
196 """
196 """
197
197
198 settings_model = VcsSettingsModel(sa=self.sa)
198 settings_model = VcsSettingsModel(sa=self.sa)
199 return settings_model.get_repos_location()
199 return settings_model.get_repos_location()
200
200
201 def repo_scan(self, repos_path=None):
201 def repo_scan(self, repos_path=None):
202 """
202 """
203 Listing of repositories in given path. This path should not be a
203 Listing of repositories in given path. This path should not be a
204 repository itself. Return a dictionary of repository objects
204 repository itself. Return a dictionary of repository objects
205
205
206 :param repos_path: path to directory containing repositories
206 :param repos_path: path to directory containing repositories
207 """
207 """
208
208
209 if repos_path is None:
209 if repos_path is None:
210 repos_path = self.repos_path
210 repos_path = self.repos_path
211
211
212 log.info('scanning for repositories in %s', repos_path)
212 log.info('scanning for repositories in %s', repos_path)
213
213
214 config = make_db_config()
214 config = make_db_config()
215 config.set('extensions', 'largefiles', '')
215 config.set('extensions', 'largefiles', '')
216 repos = {}
216 repos = {}
217
217
218 for name, path in get_filesystem_repos(repos_path, recursive=True):
218 for name, path in get_filesystem_repos(repos_path, recursive=True):
219 # name need to be decomposed and put back together using the /
219 # name need to be decomposed and put back together using the /
220 # since this is internal storage separator for rhodecode
220 # since this is internal storage separator for rhodecode
221 name = Repository.normalize_repo_name(name)
221 name = Repository.normalize_repo_name(name)
222
222
223 try:
223 try:
224 if name in repos:
224 if name in repos:
225 raise RepositoryError('Duplicate repository name %s '
225 raise RepositoryError('Duplicate repository name %s '
226 'found in %s' % (name, path))
226 'found in %s' % (name, path))
227 elif path[0] in rhodecode.BACKENDS:
227 elif path[0] in rhodecode.BACKENDS:
228 backend = get_backend(path[0])
228 backend = get_backend(path[0])
229 repos[name] = backend(path[1], config=config,
229 repos[name] = backend(path[1], config=config,
230 with_wire={"cache": False})
230 with_wire={"cache": False})
231 except OSError:
231 except OSError:
232 continue
232 continue
233 except RepositoryError:
233 except RepositoryError:
234 log.exception('Failed to create a repo')
234 log.exception('Failed to create a repo')
235 continue
235 continue
236
236
237 log.debug('found %s paths with repositories', len(repos))
237 log.debug('found %s paths with repositories', len(repos))
238 return repos
238 return repos
239
239
240 def get_repos(self, all_repos=None, sort_key=None):
240 def get_repos(self, all_repos=None, sort_key=None):
241 """
241 """
242 Get all repositories from db and for each repo create it's
242 Get all repositories from db and for each repo create it's
243 backend instance and fill that backed with information from database
243 backend instance and fill that backed with information from database
244
244
245 :param all_repos: list of repository names as strings
245 :param all_repos: list of repository names as strings
246 give specific repositories list, good for filtering
246 give specific repositories list, good for filtering
247
247
248 :param sort_key: initial sorting of repositories
248 :param sort_key: initial sorting of repositories
249 """
249 """
250 if all_repos is None:
250 if all_repos is None:
251 all_repos = self.sa.query(Repository)\
251 all_repos = self.sa.query(Repository)\
252 .filter(Repository.group_id == None)\
252 .filter(Repository.group_id == null())\
253 .order_by(func.lower(Repository.repo_name)).all()
253 .order_by(func.lower(Repository.repo_name)).all()
254 repo_iter = SimpleCachedRepoList(
254 repo_iter = SimpleCachedRepoList(
255 all_repos, repos_path=self.repos_path, order_by=sort_key)
255 all_repos, repos_path=self.repos_path, order_by=sort_key)
256 return repo_iter
256 return repo_iter
257
257
258 def get_repo_groups(self, all_groups=None):
258 def get_repo_groups(self, all_groups=None):
259 if all_groups is None:
259 if all_groups is None:
260 all_groups = RepoGroup.query()\
260 all_groups = RepoGroup.query()\
261 .filter(RepoGroup.group_parent_id == None).all()
261 .filter(RepoGroup.group_parent_id == null()).all()
262 return [x for x in RepoGroupList(all_groups)]
262 return [x for x in RepoGroupList(all_groups)]
263
263
264 def mark_for_invalidation(self, repo_name, delete=False):
264 def mark_for_invalidation(self, repo_name, delete=False):
265 """
265 """
266 Mark caches of this repo invalid in the database. `delete` flag
266 Mark caches of this repo invalid in the database. `delete` flag
267 removes the cache entries
267 removes the cache entries
268
268
269 :param repo_name: the repo_name for which caches should be marked
269 :param repo_name: the repo_name for which caches should be marked
270 invalid, or deleted
270 invalid, or deleted
271 :param delete: delete the entry keys instead of setting bool
271 :param delete: delete the entry keys instead of setting bool
272 flag on them, and also purge caches used by the dogpile
272 flag on them, and also purge caches used by the dogpile
273 """
273 """
274 repo = Repository.get_by_repo_name(repo_name)
274 repo = Repository.get_by_repo_name(repo_name)
275
275
276 if repo:
276 if repo:
277 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
277 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
278 repo_id=repo.repo_id)
278 repo_id=repo.repo_id)
279 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
279 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
280
280
281 repo_id = repo.repo_id
281 repo_id = repo.repo_id
282 config = repo._config
282 config = repo._config
283 config.set('extensions', 'largefiles', '')
283 config.set('extensions', 'largefiles', '')
284 repo.update_commit_cache(config=config, cs_cache=None)
284 repo.update_commit_cache(config=config, cs_cache=None)
285 if delete:
285 if delete:
286 cache_namespace_uid = f'cache_repo.{repo_id}'
286 cache_namespace_uid = f'cache_repo.{repo_id}'
287 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid, method=rc_cache.CLEAR_INVALIDATE)
287 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid, method=rc_cache.CLEAR_INVALIDATE)
288
288
289 def toggle_following_repo(self, follow_repo_id, user_id):
289 def toggle_following_repo(self, follow_repo_id, user_id):
290
290
291 f = self.sa.query(UserFollowing)\
291 f = self.sa.query(UserFollowing)\
292 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
292 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
293 .filter(UserFollowing.user_id == user_id).scalar()
293 .filter(UserFollowing.user_id == user_id).scalar()
294
294
295 if f is not None:
295 if f is not None:
296 try:
296 try:
297 self.sa.delete(f)
297 self.sa.delete(f)
298 return
298 return
299 except Exception:
299 except Exception:
300 log.error(traceback.format_exc())
300 log.error(traceback.format_exc())
301 raise
301 raise
302
302
303 try:
303 try:
304 f = UserFollowing()
304 f = UserFollowing()
305 f.user_id = user_id
305 f.user_id = user_id
306 f.follows_repo_id = follow_repo_id
306 f.follows_repo_id = follow_repo_id
307 self.sa.add(f)
307 self.sa.add(f)
308 except Exception:
308 except Exception:
309 log.error(traceback.format_exc())
309 log.error(traceback.format_exc())
310 raise
310 raise
311
311
312 def toggle_following_user(self, follow_user_id, user_id):
312 def toggle_following_user(self, follow_user_id, user_id):
313 f = self.sa.query(UserFollowing)\
313 f = self.sa.query(UserFollowing)\
314 .filter(UserFollowing.follows_user_id == follow_user_id)\
314 .filter(UserFollowing.follows_user_id == follow_user_id)\
315 .filter(UserFollowing.user_id == user_id).scalar()
315 .filter(UserFollowing.user_id == user_id).scalar()
316
316
317 if f is not None:
317 if f is not None:
318 try:
318 try:
319 self.sa.delete(f)
319 self.sa.delete(f)
320 return
320 return
321 except Exception:
321 except Exception:
322 log.error(traceback.format_exc())
322 log.error(traceback.format_exc())
323 raise
323 raise
324
324
325 try:
325 try:
326 f = UserFollowing()
326 f = UserFollowing()
327 f.user_id = user_id
327 f.user_id = user_id
328 f.follows_user_id = follow_user_id
328 f.follows_user_id = follow_user_id
329 self.sa.add(f)
329 self.sa.add(f)
330 except Exception:
330 except Exception:
331 log.error(traceback.format_exc())
331 log.error(traceback.format_exc())
332 raise
332 raise
333
333
334 def is_following_repo(self, repo_name, user_id, cache=False):
334 def is_following_repo(self, repo_name, user_id, cache=False):
335 r = self.sa.query(Repository)\
335 r = self.sa.query(Repository)\
336 .filter(Repository.repo_name == repo_name).scalar()
336 .filter(Repository.repo_name == repo_name).scalar()
337
337
338 f = self.sa.query(UserFollowing)\
338 f = self.sa.query(UserFollowing)\
339 .filter(UserFollowing.follows_repository == r)\
339 .filter(UserFollowing.follows_repository == r)\
340 .filter(UserFollowing.user_id == user_id).scalar()
340 .filter(UserFollowing.user_id == user_id).scalar()
341
341
342 return f is not None
342 return f is not None
343
343
344 def is_following_user(self, username, user_id, cache=False):
344 def is_following_user(self, username, user_id, cache=False):
345 u = User.get_by_username(username)
345 u = User.get_by_username(username)
346
346
347 f = self.sa.query(UserFollowing)\
347 f = self.sa.query(UserFollowing)\
348 .filter(UserFollowing.follows_user == u)\
348 .filter(UserFollowing.follows_user == u)\
349 .filter(UserFollowing.user_id == user_id).scalar()
349 .filter(UserFollowing.user_id == user_id).scalar()
350
350
351 return f is not None
351 return f is not None
352
352
353 def get_followers(self, repo):
353 def get_followers(self, repo):
354 repo = self._get_repo(repo)
354 repo = self._get_repo(repo)
355
355
356 return self.sa.query(UserFollowing)\
356 return self.sa.query(UserFollowing)\
357 .filter(UserFollowing.follows_repository == repo).count()
357 .filter(UserFollowing.follows_repository == repo).count()
358
358
359 def get_forks(self, repo):
359 def get_forks(self, repo):
360 repo = self._get_repo(repo)
360 repo = self._get_repo(repo)
361 return self.sa.query(Repository)\
361 return self.sa.query(Repository)\
362 .filter(Repository.fork == repo).count()
362 .filter(Repository.fork == repo).count()
363
363
364 def get_pull_requests(self, repo):
364 def get_pull_requests(self, repo):
365 repo = self._get_repo(repo)
365 repo = self._get_repo(repo)
366 return self.sa.query(PullRequest)\
366 return self.sa.query(PullRequest)\
367 .filter(PullRequest.target_repo == repo)\
367 .filter(PullRequest.target_repo == repo)\
368 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
368 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
369
369
370 def get_artifacts(self, repo):
370 def get_artifacts(self, repo):
371 repo = self._get_repo(repo)
371 repo = self._get_repo(repo)
372 return self.sa.query(FileStore)\
372 return self.sa.query(FileStore)\
373 .filter(FileStore.repo == repo)\
373 .filter(FileStore.repo == repo)\
374 .filter(or_(FileStore.hidden == None, FileStore.hidden == false())).count()
374 .filter(or_(FileStore.hidden == null(), FileStore.hidden == false())).count()
375
375
376 def mark_as_fork(self, repo, fork, user):
376 def mark_as_fork(self, repo, fork, user):
377 repo = self._get_repo(repo)
377 repo = self._get_repo(repo)
378 fork = self._get_repo(fork)
378 fork = self._get_repo(fork)
379 if fork and repo.repo_id == fork.repo_id:
379 if fork and repo.repo_id == fork.repo_id:
380 raise Exception("Cannot set repository as fork of itself")
380 raise Exception("Cannot set repository as fork of itself")
381
381
382 if fork and repo.repo_type != fork.repo_type:
382 if fork and repo.repo_type != fork.repo_type:
383 raise RepositoryError(
383 raise RepositoryError(
384 "Cannot set repository as fork of repository with other type")
384 "Cannot set repository as fork of repository with other type")
385
385
386 repo.fork = fork
386 repo.fork = fork
387 self.sa.add(repo)
387 self.sa.add(repo)
388 return repo
388 return repo
389
389
390 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
390 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
391 dbrepo = self._get_repo(repo)
391 dbrepo = self._get_repo(repo)
392 remote_uri = remote_uri or dbrepo.clone_uri
392 remote_uri = remote_uri or dbrepo.clone_uri
393 if not remote_uri:
393 if not remote_uri:
394 raise Exception("This repository doesn't have a clone uri")
394 raise Exception("This repository doesn't have a clone uri")
395
395
396 repo = dbrepo.scm_instance(cache=False)
396 repo = dbrepo.scm_instance(cache=False)
397 repo.config.clear_section('hooks')
397 repo.config.clear_section('hooks')
398
398
399 try:
399 try:
400 # NOTE(marcink): add extra validation so we skip invalid urls
400 # NOTE(marcink): add extra validation so we skip invalid urls
401 # this is due this tasks can be executed via scheduler without
401 # this is due this tasks can be executed via scheduler without
402 # proper validation of remote_uri
402 # proper validation of remote_uri
403 if validate_uri:
403 if validate_uri:
404 config = make_db_config(clear_session=False)
404 config = make_db_config(clear_session=False)
405 url_validator(remote_uri, dbrepo.repo_type, config)
405 url_validator(remote_uri, dbrepo.repo_type, config)
406 except InvalidCloneUrl:
406 except InvalidCloneUrl:
407 raise
407 raise
408
408
409 repo_name = dbrepo.repo_name
409 repo_name = dbrepo.repo_name
410 try:
410 try:
411 # TODO: we need to make sure those operations call proper hooks !
411 # TODO: we need to make sure those operations call proper hooks !
412 repo.fetch(remote_uri)
412 repo.fetch(remote_uri)
413
413
414 self.mark_for_invalidation(repo_name)
414 self.mark_for_invalidation(repo_name)
415 except Exception:
415 except Exception:
416 log.error(traceback.format_exc())
416 log.error(traceback.format_exc())
417 raise
417 raise
418
418
419 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
419 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
420 dbrepo = self._get_repo(repo)
420 dbrepo = self._get_repo(repo)
421 remote_uri = remote_uri or dbrepo.push_uri
421 remote_uri = remote_uri or dbrepo.push_uri
422 if not remote_uri:
422 if not remote_uri:
423 raise Exception("This repository doesn't have a clone uri")
423 raise Exception("This repository doesn't have a clone uri")
424
424
425 repo = dbrepo.scm_instance(cache=False)
425 repo = dbrepo.scm_instance(cache=False)
426 repo.config.clear_section('hooks')
426 repo.config.clear_section('hooks')
427
427
428 try:
428 try:
429 # NOTE(marcink): add extra validation so we skip invalid urls
429 # NOTE(marcink): add extra validation so we skip invalid urls
430 # this is due this tasks can be executed via scheduler without
430 # this is due this tasks can be executed via scheduler without
431 # proper validation of remote_uri
431 # proper validation of remote_uri
432 if validate_uri:
432 if validate_uri:
433 config = make_db_config(clear_session=False)
433 config = make_db_config(clear_session=False)
434 url_validator(remote_uri, dbrepo.repo_type, config)
434 url_validator(remote_uri, dbrepo.repo_type, config)
435 except InvalidCloneUrl:
435 except InvalidCloneUrl:
436 raise
436 raise
437
437
438 try:
438 try:
439 repo.push(remote_uri)
439 repo.push(remote_uri)
440 except Exception:
440 except Exception:
441 log.error(traceback.format_exc())
441 log.error(traceback.format_exc())
442 raise
442 raise
443
443
444 def commit_change(self, repo, repo_name, commit, user, author, message,
444 def commit_change(self, repo, repo_name, commit, user, author, message,
445 content: bytes, f_path: bytes):
445 content: bytes, f_path: bytes):
446 """
446 """
447 Commits changes
447 Commits changes
448 """
448 """
449 user = self._get_user(user)
449 user = self._get_user(user)
450
450
451 # message and author needs to be unicode
451 # message and author needs to be unicode
452 # proper backend should then translate that into required type
452 # proper backend should then translate that into required type
453 message = safe_str(message)
453 message = safe_str(message)
454 author = safe_str(author)
454 author = safe_str(author)
455 imc = repo.in_memory_commit
455 imc = repo.in_memory_commit
456 imc.change(FileNode(f_path, content, mode=commit.get_file_mode(f_path)))
456 imc.change(FileNode(f_path, content, mode=commit.get_file_mode(f_path)))
457 try:
457 try:
458 # TODO: handle pre-push action !
458 # TODO: handle pre-push action !
459 tip = imc.commit(
459 tip = imc.commit(
460 message=message, author=author, parents=[commit],
460 message=message, author=author, parents=[commit],
461 branch=commit.branch)
461 branch=commit.branch)
462 except Exception as e:
462 except Exception as e:
463 log.error(traceback.format_exc())
463 log.error(traceback.format_exc())
464 raise IMCCommitError(str(e))
464 raise IMCCommitError(str(e))
465 finally:
465 finally:
466 # always clear caches, if commit fails we want fresh object also
466 # always clear caches, if commit fails we want fresh object also
467 self.mark_for_invalidation(repo_name)
467 self.mark_for_invalidation(repo_name)
468
468
469 # We trigger the post-push action
469 # We trigger the post-push action
470 hooks_utils.trigger_post_push_hook(
470 hooks_utils.trigger_post_push_hook(
471 username=user.username, action='push_local', hook_type='post_push',
471 username=user.username, action='push_local', hook_type='post_push',
472 repo_name=repo_name, repo_type=repo.alias, commit_ids=[tip.raw_id])
472 repo_name=repo_name, repo_type=repo.alias, commit_ids=[tip.raw_id])
473 return tip
473 return tip
474
474
475 def _sanitize_path(self, f_path: bytes):
475 def _sanitize_path(self, f_path: bytes):
476 if f_path.startswith(b'/') or f_path.startswith(b'./') or b'../' in f_path:
476 if f_path.startswith(b'/') or f_path.startswith(b'./') or b'../' in f_path:
477 raise NonRelativePathError(b'%b is not an relative path' % f_path)
477 raise NonRelativePathError(b'%b is not an relative path' % f_path)
478 if f_path:
478 if f_path:
479 f_path = os.path.normpath(f_path)
479 f_path = os.path.normpath(f_path)
480 return f_path
480 return f_path
481
481
482 def get_dirnode_metadata(self, request, commit, dir_node):
482 def get_dirnode_metadata(self, request, commit, dir_node):
483 if not dir_node.is_dir():
483 if not dir_node.is_dir():
484 return []
484 return []
485
485
486 data = []
486 data = []
487 for node in dir_node:
487 for node in dir_node:
488 if not node.is_file():
488 if not node.is_file():
489 # we skip file-nodes
489 # we skip file-nodes
490 continue
490 continue
491
491
492 last_commit = node.last_commit
492 last_commit = node.last_commit
493 last_commit_date = last_commit.date
493 last_commit_date = last_commit.date
494 data.append({
494 data.append({
495 'name': node.name,
495 'name': node.name,
496 'size': h.format_byte_size_binary(node.size),
496 'size': h.format_byte_size_binary(node.size),
497 'modified_at': h.format_date(last_commit_date),
497 'modified_at': h.format_date(last_commit_date),
498 'modified_ts': last_commit_date.isoformat(),
498 'modified_ts': last_commit_date.isoformat(),
499 'revision': last_commit.revision,
499 'revision': last_commit.revision,
500 'short_id': last_commit.short_id,
500 'short_id': last_commit.short_id,
501 'message': h.escape(last_commit.message),
501 'message': h.escape(last_commit.message),
502 'author': h.escape(last_commit.author),
502 'author': h.escape(last_commit.author),
503 'user_profile': h.gravatar_with_user(
503 'user_profile': h.gravatar_with_user(
504 request, last_commit.author),
504 request, last_commit.author),
505 })
505 })
506
506
507 return data
507 return data
508
508
509 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
509 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
510 extended_info=False, content=False, max_file_bytes=None):
510 extended_info=False, content=False, max_file_bytes=None):
511 """
511 """
512 recursive walk in root dir and return a set of all path in that dir
512 recursive walk in root dir and return a set of all path in that dir
513 based on repository walk function
513 based on repository walk function
514
514
515 :param repo_name: name of repository
515 :param repo_name: name of repository
516 :param commit_id: commit id for which to list nodes
516 :param commit_id: commit id for which to list nodes
517 :param root_path: root path to list
517 :param root_path: root path to list
518 :param flat: return as a list, if False returns a dict with description
518 :param flat: return as a list, if False returns a dict with description
519 :param extended_info: show additional info such as md5, binary, size etc
519 :param extended_info: show additional info such as md5, binary, size etc
520 :param content: add nodes content to the return data
520 :param content: add nodes content to the return data
521 :param max_file_bytes: will not return file contents over this limit
521 :param max_file_bytes: will not return file contents over this limit
522
522
523 """
523 """
524 _files = list()
524 _files = list()
525 _dirs = list()
525 _dirs = list()
526
526
527 try:
527 try:
528 _repo = self._get_repo(repo_name)
528 _repo = self._get_repo(repo_name)
529 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
529 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
530 root_path = root_path.lstrip('/')
530 root_path = root_path.lstrip('/')
531
531
532 # get RootNode, inject pre-load options before walking
532 # get RootNode, inject pre-load options before walking
533 top_node = commit.get_node(root_path)
533 top_node = commit.get_node(root_path)
534 extended_info_pre_load = []
534 extended_info_pre_load = []
535 if extended_info:
535 if extended_info:
536 extended_info_pre_load += ['md5']
536 extended_info_pre_load += ['md5']
537 top_node.default_pre_load = ['is_binary', 'size'] + extended_info_pre_load
537 top_node.default_pre_load = ['is_binary', 'size'] + extended_info_pre_load
538
538
539 for __, dirs, files in commit.walk(top_node):
539 for __, dirs, files in commit.walk(top_node):
540
540
541 for f in files:
541 for f in files:
542 _content = None
542 _content = None
543 _data = f_name = f.str_path
543 _data = f_name = f.str_path
544
544
545 if not flat:
545 if not flat:
546 _data = {
546 _data = {
547 "name": h.escape(f_name),
547 "name": h.escape(f_name),
548 "type": "file",
548 "type": "file",
549 }
549 }
550 if extended_info:
550 if extended_info:
551 _data.update({
551 _data.update({
552 "md5": f.md5,
552 "md5": f.md5,
553 "binary": f.is_binary,
553 "binary": f.is_binary,
554 "size": f.size,
554 "size": f.size,
555 "extension": f.extension,
555 "extension": f.extension,
556 "mimetype": f.mimetype,
556 "mimetype": f.mimetype,
557 "lines": f.lines()[0]
557 "lines": f.lines()[0]
558 })
558 })
559
559
560 if content:
560 if content:
561 over_size_limit = (max_file_bytes is not None
561 over_size_limit = (max_file_bytes is not None
562 and f.size > max_file_bytes)
562 and f.size > max_file_bytes)
563 full_content = None
563 full_content = None
564 if not f.is_binary and not over_size_limit:
564 if not f.is_binary and not over_size_limit:
565 full_content = f.str_content
565 full_content = f.str_content
566
566
567 _data.update({
567 _data.update({
568 "content": full_content,
568 "content": full_content,
569 })
569 })
570 _files.append(_data)
570 _files.append(_data)
571
571
572 for d in dirs:
572 for d in dirs:
573 _data = d_name = d.str_path
573 _data = d_name = d.str_path
574 if not flat:
574 if not flat:
575 _data = {
575 _data = {
576 "name": h.escape(d_name),
576 "name": h.escape(d_name),
577 "type": "dir",
577 "type": "dir",
578 }
578 }
579 if extended_info:
579 if extended_info:
580 _data.update({
580 _data.update({
581 "md5": "",
581 "md5": "",
582 "binary": False,
582 "binary": False,
583 "size": 0,
583 "size": 0,
584 "extension": "",
584 "extension": "",
585 })
585 })
586 if content:
586 if content:
587 _data.update({
587 _data.update({
588 "content": None
588 "content": None
589 })
589 })
590 _dirs.append(_data)
590 _dirs.append(_data)
591 except RepositoryError:
591 except RepositoryError:
592 log.exception("Exception in get_nodes")
592 log.exception("Exception in get_nodes")
593 raise
593 raise
594
594
595 return _dirs, _files
595 return _dirs, _files
596
596
597 def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'):
597 def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'):
598 """
598 """
599 Generate files for quick filter in files view
599 Generate files for quick filter in files view
600 """
600 """
601
601
602 _files = list()
602 _files = list()
603 _dirs = list()
603 _dirs = list()
604 try:
604 try:
605 _repo = self._get_repo(repo_name)
605 _repo = self._get_repo(repo_name)
606 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
606 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
607 root_path = root_path.lstrip('/')
607 root_path = root_path.lstrip('/')
608
608
609 top_node = commit.get_node(root_path)
609 top_node = commit.get_node(root_path)
610 top_node.default_pre_load = []
610 top_node.default_pre_load = []
611
611
612 for __, dirs, files in commit.walk(top_node):
612 for __, dirs, files in commit.walk(top_node):
613 for f in files:
613 for f in files:
614
614
615 _data = {
615 _data = {
616 "name": h.escape(f.str_path),
616 "name": h.escape(f.str_path),
617 "type": "file",
617 "type": "file",
618 }
618 }
619
619
620 _files.append(_data)
620 _files.append(_data)
621
621
622 for d in dirs:
622 for d in dirs:
623
623
624 _data = {
624 _data = {
625 "name": h.escape(d.str_path),
625 "name": h.escape(d.str_path),
626 "type": "dir",
626 "type": "dir",
627 }
627 }
628
628
629 _dirs.append(_data)
629 _dirs.append(_data)
630 except RepositoryError:
630 except RepositoryError:
631 log.exception("Exception in get_quick_filter_nodes")
631 log.exception("Exception in get_quick_filter_nodes")
632 raise
632 raise
633
633
634 return _dirs, _files
634 return _dirs, _files
635
635
636 def get_node(self, repo_name, commit_id, file_path,
636 def get_node(self, repo_name, commit_id, file_path,
637 extended_info=False, content=False, max_file_bytes=None, cache=True):
637 extended_info=False, content=False, max_file_bytes=None, cache=True):
638 """
638 """
639 retrieve single node from commit
639 retrieve single node from commit
640 """
640 """
641
641
642 try:
642 try:
643
643
644 _repo = self._get_repo(repo_name)
644 _repo = self._get_repo(repo_name)
645 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
645 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
646
646
647 file_node = commit.get_node(file_path)
647 file_node = commit.get_node(file_path)
648 if file_node.is_dir():
648 if file_node.is_dir():
649 raise RepositoryError('The given path is a directory')
649 raise RepositoryError('The given path is a directory')
650
650
651 _content = None
651 _content = None
652 f_name = file_node.str_path
652 f_name = file_node.str_path
653
653
654 file_data = {
654 file_data = {
655 "name": h.escape(f_name),
655 "name": h.escape(f_name),
656 "type": "file",
656 "type": "file",
657 }
657 }
658
658
659 if extended_info:
659 if extended_info:
660 file_data.update({
660 file_data.update({
661 "extension": file_node.extension,
661 "extension": file_node.extension,
662 "mimetype": file_node.mimetype,
662 "mimetype": file_node.mimetype,
663 })
663 })
664
664
665 if cache:
665 if cache:
666 md5 = file_node.md5
666 md5 = file_node.md5
667 is_binary = file_node.is_binary
667 is_binary = file_node.is_binary
668 size = file_node.size
668 size = file_node.size
669 else:
669 else:
670 is_binary, md5, size, _content = file_node.metadata_uncached()
670 is_binary, md5, size, _content = file_node.metadata_uncached()
671
671
672 file_data.update({
672 file_data.update({
673 "md5": md5,
673 "md5": md5,
674 "binary": is_binary,
674 "binary": is_binary,
675 "size": size,
675 "size": size,
676 })
676 })
677
677
678 if content and cache:
678 if content and cache:
679 # get content + cache
679 # get content + cache
680 size = file_node.size
680 size = file_node.size
681 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
681 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
682 full_content = None
682 full_content = None
683 all_lines = 0
683 all_lines = 0
684 if not file_node.is_binary and not over_size_limit:
684 if not file_node.is_binary and not over_size_limit:
685 full_content = safe_str(file_node.content)
685 full_content = safe_str(file_node.content)
686 all_lines, empty_lines = file_node.count_lines(full_content)
686 all_lines, empty_lines = file_node.count_lines(full_content)
687
687
688 file_data.update({
688 file_data.update({
689 "content": full_content,
689 "content": full_content,
690 "lines": all_lines
690 "lines": all_lines
691 })
691 })
692 elif content:
692 elif content:
693 # get content *without* cache
693 # get content *without* cache
694 if _content is None:
694 if _content is None:
695 is_binary, md5, size, _content = file_node.metadata_uncached()
695 is_binary, md5, size, _content = file_node.metadata_uncached()
696
696
697 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
697 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
698 full_content = None
698 full_content = None
699 all_lines = 0
699 all_lines = 0
700 if not is_binary and not over_size_limit:
700 if not is_binary and not over_size_limit:
701 full_content = safe_str(_content)
701 full_content = safe_str(_content)
702 all_lines, empty_lines = file_node.count_lines(full_content)
702 all_lines, empty_lines = file_node.count_lines(full_content)
703
703
704 file_data.update({
704 file_data.update({
705 "content": full_content,
705 "content": full_content,
706 "lines": all_lines
706 "lines": all_lines
707 })
707 })
708
708
709 except RepositoryError:
709 except RepositoryError:
710 log.exception("Exception in get_node")
710 log.exception("Exception in get_node")
711 raise
711 raise
712
712
713 return file_data
713 return file_data
714
714
715 def get_fts_data(self, repo_name, commit_id, root_path='/'):
715 def get_fts_data(self, repo_name, commit_id, root_path='/'):
716 """
716 """
717 Fetch node tree for usage in full text search
717 Fetch node tree for usage in full text search
718 """
718 """
719
719
720 tree_info = list()
720 tree_info = list()
721
721
722 try:
722 try:
723 _repo = self._get_repo(repo_name)
723 _repo = self._get_repo(repo_name)
724 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
724 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
725 root_path = root_path.lstrip('/')
725 root_path = root_path.lstrip('/')
726 top_node = commit.get_node(root_path)
726 top_node = commit.get_node(root_path)
727 top_node.default_pre_load = []
727 top_node.default_pre_load = []
728
728
729 for __, dirs, files in commit.walk(top_node):
729 for __, dirs, files in commit.walk(top_node):
730
730
731 for f in files:
731 for f in files:
732 is_binary, md5, size, _content = f.metadata_uncached()
732 is_binary, md5, size, _content = f.metadata_uncached()
733 _data = {
733 _data = {
734 "name": f.str_path,
734 "name": f.str_path,
735 "md5": md5,
735 "md5": md5,
736 "extension": f.extension,
736 "extension": f.extension,
737 "binary": is_binary,
737 "binary": is_binary,
738 "size": size
738 "size": size
739 }
739 }
740
740
741 tree_info.append(_data)
741 tree_info.append(_data)
742
742
743 except RepositoryError:
743 except RepositoryError:
744 log.exception("Exception in get_nodes")
744 log.exception("Exception in get_nodes")
745 raise
745 raise
746
746
747 return tree_info
747 return tree_info
748
748
749 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
749 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
750 author=None, trigger_push_hook=True):
750 author=None, trigger_push_hook=True):
751 """
751 """
752 Commits given multiple nodes into repo
752 Commits given multiple nodes into repo
753
753
754 :param user: RhodeCode User object or user_id, the commiter
754 :param user: RhodeCode User object or user_id, the commiter
755 :param repo: RhodeCode Repository object
755 :param repo: RhodeCode Repository object
756 :param message: commit message
756 :param message: commit message
757 :param nodes: mapping {filename:{'content':content},...}
757 :param nodes: mapping {filename:{'content':content},...}
758 :param parent_commit: parent commit, can be empty than it's
758 :param parent_commit: parent commit, can be empty than it's
759 initial commit
759 initial commit
760 :param author: author of commit, cna be different that commiter
760 :param author: author of commit, cna be different that commiter
761 only for git
761 only for git
762 :param trigger_push_hook: trigger push hooks
762 :param trigger_push_hook: trigger push hooks
763
763
764 :returns: new committed commit
764 :returns: new committed commit
765 """
765 """
766
766
767 user = self._get_user(user)
767 user = self._get_user(user)
768 scm_instance = repo.scm_instance(cache=False)
768 scm_instance = repo.scm_instance(cache=False)
769
769
770 message = safe_str(message)
770 message = safe_str(message)
771 commiter = user.full_contact
771 commiter = user.full_contact
772 author = safe_str(author) if author else commiter
772 author = safe_str(author) if author else commiter
773
773
774 imc = scm_instance.in_memory_commit
774 imc = scm_instance.in_memory_commit
775
775
776 if not parent_commit:
776 if not parent_commit:
777 parent_commit = EmptyCommit(alias=scm_instance.alias)
777 parent_commit = EmptyCommit(alias=scm_instance.alias)
778
778
779 if isinstance(parent_commit, EmptyCommit):
779 if isinstance(parent_commit, EmptyCommit):
780 # EmptyCommit means we're editing empty repository
780 # EmptyCommit means we're editing empty repository
781 parents = None
781 parents = None
782 else:
782 else:
783 parents = [parent_commit]
783 parents = [parent_commit]
784
784
785 upload_file_types = (io.BytesIO, io.BufferedRandom)
785 upload_file_types = (io.BytesIO, io.BufferedRandom)
786 processed_nodes = []
786 processed_nodes = []
787 for filename, content_dict in nodes.items():
787 for filename, content_dict in nodes.items():
788 if not isinstance(filename, bytes):
788 if not isinstance(filename, bytes):
789 raise ValueError(f'filename key in nodes needs to be bytes , or {upload_file_types}')
789 raise ValueError(f'filename key in nodes needs to be bytes , or {upload_file_types}')
790 content = content_dict['content']
790 content = content_dict['content']
791 if not isinstance(content, upload_file_types + (bytes,)):
791 if not isinstance(content, upload_file_types + (bytes,)):
792 raise ValueError('content key value in nodes needs to be bytes')
792 raise ValueError('content key value in nodes needs to be bytes')
793
793
794 for f_path in nodes:
794 for f_path in nodes:
795 f_path = self._sanitize_path(f_path)
795 f_path = self._sanitize_path(f_path)
796 content = nodes[f_path]['content']
796 content = nodes[f_path]['content']
797
797
798 # decoding here will force that we have proper encoded values
798 # decoding here will force that we have proper encoded values
799 # in any other case this will throw exceptions and deny commit
799 # in any other case this will throw exceptions and deny commit
800
800
801 if isinstance(content, bytes):
801 if isinstance(content, bytes):
802 pass
802 pass
803 elif isinstance(content, upload_file_types):
803 elif isinstance(content, upload_file_types):
804 content = content.read()
804 content = content.read()
805 else:
805 else:
806 raise Exception(f'Content is of unrecognized type {type(content)}, expected {upload_file_types}')
806 raise Exception(f'Content is of unrecognized type {type(content)}, expected {upload_file_types}')
807 processed_nodes.append((f_path, content))
807 processed_nodes.append((f_path, content))
808
808
809 # add multiple nodes
809 # add multiple nodes
810 for path, content in processed_nodes:
810 for path, content in processed_nodes:
811 imc.add(FileNode(path, content=content))
811 imc.add(FileNode(path, content=content))
812
812
813 # TODO: handle pre push scenario
813 # TODO: handle pre push scenario
814 tip = imc.commit(message=message,
814 tip = imc.commit(message=message,
815 author=author,
815 author=author,
816 parents=parents,
816 parents=parents,
817 branch=parent_commit.branch)
817 branch=parent_commit.branch)
818
818
819 self.mark_for_invalidation(repo.repo_name)
819 self.mark_for_invalidation(repo.repo_name)
820 if trigger_push_hook:
820 if trigger_push_hook:
821 hooks_utils.trigger_post_push_hook(
821 hooks_utils.trigger_post_push_hook(
822 username=user.username, action='push_local',
822 username=user.username, action='push_local',
823 repo_name=repo.repo_name, repo_type=scm_instance.alias,
823 repo_name=repo.repo_name, repo_type=scm_instance.alias,
824 hook_type='post_push',
824 hook_type='post_push',
825 commit_ids=[tip.raw_id])
825 commit_ids=[tip.raw_id])
826 return tip
826 return tip
827
827
828 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
828 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
829 author=None, trigger_push_hook=True):
829 author=None, trigger_push_hook=True):
830 user = self._get_user(user)
830 user = self._get_user(user)
831 scm_instance = repo.scm_instance(cache=False)
831 scm_instance = repo.scm_instance(cache=False)
832
832
833 message = safe_str(message)
833 message = safe_str(message)
834 commiter = user.full_contact
834 commiter = user.full_contact
835 author = safe_str(author) if author else commiter
835 author = safe_str(author) if author else commiter
836
836
837 imc = scm_instance.in_memory_commit
837 imc = scm_instance.in_memory_commit
838
838
839 if not parent_commit:
839 if not parent_commit:
840 parent_commit = EmptyCommit(alias=scm_instance.alias)
840 parent_commit = EmptyCommit(alias=scm_instance.alias)
841
841
842 if isinstance(parent_commit, EmptyCommit):
842 if isinstance(parent_commit, EmptyCommit):
843 # EmptyCommit means we we're editing empty repository
843 # EmptyCommit means we we're editing empty repository
844 parents = None
844 parents = None
845 else:
845 else:
846 parents = [parent_commit]
846 parents = [parent_commit]
847
847
848 # add multiple nodes
848 # add multiple nodes
849 for _filename, data in nodes.items():
849 for _filename, data in nodes.items():
850 # new filename, can be renamed from the old one, also sanitaze
850 # new filename, can be renamed from the old one, also sanitaze
851 # the path for any hack around relative paths like ../../ etc.
851 # the path for any hack around relative paths like ../../ etc.
852 filename = self._sanitize_path(data['filename'])
852 filename = self._sanitize_path(data['filename'])
853 old_filename = self._sanitize_path(_filename)
853 old_filename = self._sanitize_path(_filename)
854 content = data['content']
854 content = data['content']
855 file_mode = data.get('mode')
855 file_mode = data.get('mode')
856 filenode = FileNode(old_filename, content=content, mode=file_mode)
856 filenode = FileNode(old_filename, content=content, mode=file_mode)
857 op = data['op']
857 op = data['op']
858 if op == 'add':
858 if op == 'add':
859 imc.add(filenode)
859 imc.add(filenode)
860 elif op == 'del':
860 elif op == 'del':
861 imc.remove(filenode)
861 imc.remove(filenode)
862 elif op == 'mod':
862 elif op == 'mod':
863 if filename != old_filename:
863 if filename != old_filename:
864 # TODO: handle renames more efficient, needs vcs lib changes
864 # TODO: handle renames more efficient, needs vcs lib changes
865 imc.remove(filenode)
865 imc.remove(filenode)
866 imc.add(FileNode(filename, content=content, mode=file_mode))
866 imc.add(FileNode(filename, content=content, mode=file_mode))
867 else:
867 else:
868 imc.change(filenode)
868 imc.change(filenode)
869
869
870 try:
870 try:
871 # TODO: handle pre push scenario commit changes
871 # TODO: handle pre push scenario commit changes
872 tip = imc.commit(message=message,
872 tip = imc.commit(message=message,
873 author=author,
873 author=author,
874 parents=parents,
874 parents=parents,
875 branch=parent_commit.branch)
875 branch=parent_commit.branch)
876 except NodeNotChangedError:
876 except NodeNotChangedError:
877 raise
877 raise
878 except Exception as e:
878 except Exception as e:
879 log.exception("Unexpected exception during call to imc.commit")
879 log.exception("Unexpected exception during call to imc.commit")
880 raise IMCCommitError(str(e))
880 raise IMCCommitError(str(e))
881 finally:
881 finally:
882 # always clear caches, if commit fails we want fresh object also
882 # always clear caches, if commit fails we want fresh object also
883 self.mark_for_invalidation(repo.repo_name)
883 self.mark_for_invalidation(repo.repo_name)
884
884
885 if trigger_push_hook:
885 if trigger_push_hook:
886 hooks_utils.trigger_post_push_hook(
886 hooks_utils.trigger_post_push_hook(
887 username=user.username, action='push_local', hook_type='post_push',
887 username=user.username, action='push_local', hook_type='post_push',
888 repo_name=repo.repo_name, repo_type=scm_instance.alias,
888 repo_name=repo.repo_name, repo_type=scm_instance.alias,
889 commit_ids=[tip.raw_id])
889 commit_ids=[tip.raw_id])
890
890
891 return tip
891 return tip
892
892
893 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
893 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
894 author=None, trigger_push_hook=True):
894 author=None, trigger_push_hook=True):
895 """
895 """
896 Deletes given multiple nodes into `repo`
896 Deletes given multiple nodes into `repo`
897
897
898 :param user: RhodeCode User object or user_id, the committer
898 :param user: RhodeCode User object or user_id, the committer
899 :param repo: RhodeCode Repository object
899 :param repo: RhodeCode Repository object
900 :param message: commit message
900 :param message: commit message
901 :param nodes: mapping {filename:{'content':content},...}
901 :param nodes: mapping {filename:{'content':content},...}
902 :param parent_commit: parent commit, can be empty than it's initial
902 :param parent_commit: parent commit, can be empty than it's initial
903 commit
903 commit
904 :param author: author of commit, cna be different that commiter only
904 :param author: author of commit, cna be different that commiter only
905 for git
905 for git
906 :param trigger_push_hook: trigger push hooks
906 :param trigger_push_hook: trigger push hooks
907
907
908 :returns: new commit after deletion
908 :returns: new commit after deletion
909 """
909 """
910
910
911 user = self._get_user(user)
911 user = self._get_user(user)
912 scm_instance = repo.scm_instance(cache=False)
912 scm_instance = repo.scm_instance(cache=False)
913
913
914 processed_nodes = []
914 processed_nodes = []
915 for f_path in nodes:
915 for f_path in nodes:
916 f_path = self._sanitize_path(f_path)
916 f_path = self._sanitize_path(f_path)
917 # content can be empty but for compatibility it allows same dicts
917 # content can be empty but for compatibility it allows same dicts
918 # structure as add_nodes
918 # structure as add_nodes
919 content = nodes[f_path].get('content')
919 content = nodes[f_path].get('content')
920 processed_nodes.append((safe_bytes(f_path), content))
920 processed_nodes.append((safe_bytes(f_path), content))
921
921
922 message = safe_str(message)
922 message = safe_str(message)
923 commiter = user.full_contact
923 commiter = user.full_contact
924 author = safe_str(author) if author else commiter
924 author = safe_str(author) if author else commiter
925
925
926 imc = scm_instance.in_memory_commit
926 imc = scm_instance.in_memory_commit
927
927
928 if not parent_commit:
928 if not parent_commit:
929 parent_commit = EmptyCommit(alias=scm_instance.alias)
929 parent_commit = EmptyCommit(alias=scm_instance.alias)
930
930
931 if isinstance(parent_commit, EmptyCommit):
931 if isinstance(parent_commit, EmptyCommit):
932 # EmptyCommit means we we're editing empty repository
932 # EmptyCommit means we we're editing empty repository
933 parents = None
933 parents = None
934 else:
934 else:
935 parents = [parent_commit]
935 parents = [parent_commit]
936 # add multiple nodes
936 # add multiple nodes
937 for path, content in processed_nodes:
937 for path, content in processed_nodes:
938 imc.remove(FileNode(path, content=content))
938 imc.remove(FileNode(path, content=content))
939
939
940 # TODO: handle pre push scenario
940 # TODO: handle pre push scenario
941 tip = imc.commit(message=message,
941 tip = imc.commit(message=message,
942 author=author,
942 author=author,
943 parents=parents,
943 parents=parents,
944 branch=parent_commit.branch)
944 branch=parent_commit.branch)
945
945
946 self.mark_for_invalidation(repo.repo_name)
946 self.mark_for_invalidation(repo.repo_name)
947 if trigger_push_hook:
947 if trigger_push_hook:
948 hooks_utils.trigger_post_push_hook(
948 hooks_utils.trigger_post_push_hook(
949 username=user.username, action='push_local', hook_type='post_push',
949 username=user.username, action='push_local', hook_type='post_push',
950 repo_name=repo.repo_name, repo_type=scm_instance.alias,
950 repo_name=repo.repo_name, repo_type=scm_instance.alias,
951 commit_ids=[tip.raw_id])
951 commit_ids=[tip.raw_id])
952 return tip
952 return tip
953
953
954 def strip(self, repo, commit_id, branch):
954 def strip(self, repo, commit_id, branch):
955 scm_instance = repo.scm_instance(cache=False)
955 scm_instance = repo.scm_instance(cache=False)
956 scm_instance.config.clear_section('hooks')
956 scm_instance.config.clear_section('hooks')
957 scm_instance.strip(commit_id, branch)
957 scm_instance.strip(commit_id, branch)
958 self.mark_for_invalidation(repo.repo_name)
958 self.mark_for_invalidation(repo.repo_name)
959
959
960 def get_unread_journal(self):
960 def get_unread_journal(self):
961 return self.sa.query(UserLog).count()
961 return self.sa.query(UserLog).count()
962
962
963 @classmethod
963 @classmethod
964 def backend_landing_ref(cls, repo_type):
964 def backend_landing_ref(cls, repo_type):
965 """
965 """
966 Return a default landing ref based on a repository type.
966 Return a default landing ref based on a repository type.
967 """
967 """
968
968
969 landing_ref = {
969 landing_ref = {
970 'hg': ('branch:default', 'default'),
970 'hg': ('branch:default', 'default'),
971 'git': ('branch:master', 'master'),
971 'git': ('branch:master', 'master'),
972 'svn': ('rev:tip', 'latest tip'),
972 'svn': ('rev:tip', 'latest tip'),
973 'default': ('rev:tip', 'latest tip'),
973 'default': ('rev:tip', 'latest tip'),
974 }
974 }
975
975
976 return landing_ref.get(repo_type) or landing_ref['default']
976 return landing_ref.get(repo_type) or landing_ref['default']
977
977
978 def get_repo_landing_revs(self, translator, repo=None):
978 def get_repo_landing_revs(self, translator, repo=None):
979 """
979 """
980 Generates select option with tags branches and bookmarks (for hg only)
980 Generates select option with tags branches and bookmarks (for hg only)
981 grouped by type
981 grouped by type
982
982
983 :param repo:
983 :param repo:
984 """
984 """
985 from rhodecode.lib.vcs.backends.git import GitRepository
985 from rhodecode.lib.vcs.backends.git import GitRepository
986
986
987 _ = translator
987 _ = translator
988 repo = self._get_repo(repo)
988 repo = self._get_repo(repo)
989
989
990 if repo:
990 if repo:
991 repo_type = repo.repo_type
991 repo_type = repo.repo_type
992 else:
992 else:
993 repo_type = 'default'
993 repo_type = 'default'
994
994
995 default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type)
995 default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type)
996
996
997 default_ref_options = [
997 default_ref_options = [
998 [default_landing_ref, landing_ref_lbl]
998 [default_landing_ref, landing_ref_lbl]
999 ]
999 ]
1000 default_choices = [
1000 default_choices = [
1001 default_landing_ref
1001 default_landing_ref
1002 ]
1002 ]
1003
1003
1004 if not repo:
1004 if not repo:
1005 # presented at NEW repo creation
1005 # presented at NEW repo creation
1006 return default_choices, default_ref_options
1006 return default_choices, default_ref_options
1007
1007
1008 repo = repo.scm_instance()
1008 repo = repo.scm_instance()
1009
1009
1010 ref_options = [(default_landing_ref, landing_ref_lbl)]
1010 ref_options = [(default_landing_ref, landing_ref_lbl)]
1011 choices = [default_landing_ref]
1011 choices = [default_landing_ref]
1012
1012
1013 # branches
1013 # branches
1014 branch_group = [(f'branch:{safe_str(b)}', safe_str(b)) for b in repo.branches]
1014 branch_group = [(f'branch:{safe_str(b)}', safe_str(b)) for b in repo.branches]
1015 if not branch_group:
1015 if not branch_group:
1016 # new repo, or without maybe a branch?
1016 # new repo, or without maybe a branch?
1017 branch_group = default_ref_options
1017 branch_group = default_ref_options
1018
1018
1019 branches_group = (branch_group, _("Branches"))
1019 branches_group = (branch_group, _("Branches"))
1020 ref_options.append(branches_group)
1020 ref_options.append(branches_group)
1021 choices.extend([x[0] for x in branches_group[0]])
1021 choices.extend([x[0] for x in branches_group[0]])
1022
1022
1023 # bookmarks for HG
1023 # bookmarks for HG
1024 if repo.alias == 'hg':
1024 if repo.alias == 'hg':
1025 bookmarks_group = (
1025 bookmarks_group = (
1026 [(f'book:{safe_str(b)}', safe_str(b))
1026 [(f'book:{safe_str(b)}', safe_str(b))
1027 for b in repo.bookmarks],
1027 for b in repo.bookmarks],
1028 _("Bookmarks"))
1028 _("Bookmarks"))
1029 ref_options.append(bookmarks_group)
1029 ref_options.append(bookmarks_group)
1030 choices.extend([x[0] for x in bookmarks_group[0]])
1030 choices.extend([x[0] for x in bookmarks_group[0]])
1031
1031
1032 # tags
1032 # tags
1033 tags_group = (
1033 tags_group = (
1034 [(f'tag:{safe_str(t)}', safe_str(t))
1034 [(f'tag:{safe_str(t)}', safe_str(t))
1035 for t in repo.tags],
1035 for t in repo.tags],
1036 _("Tags"))
1036 _("Tags"))
1037 ref_options.append(tags_group)
1037 ref_options.append(tags_group)
1038 choices.extend([x[0] for x in tags_group[0]])
1038 choices.extend([x[0] for x in tags_group[0]])
1039
1039
1040 return choices, ref_options
1040 return choices, ref_options
1041
1041
1042 def get_server_info(self, environ=None):
1042 def get_server_info(self, environ=None):
1043 server_info = get_system_info(environ)
1043 server_info = get_system_info(environ)
1044 return server_info
1044 return server_info
General Comments 0
You need to be logged in to leave comments. Login now