##// END OF EJS Templates
vcs: optimized pre-load attributes for better caching.
marcink -
r3850:0415fef3 default
parent child Browse files
Show More
@@ -1,728 +1,728 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2019 RhodeCode GmbH
3 # Copyright (C) 2016-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import time
21 import time
22 import logging
22 import logging
23 import operator
23 import operator
24
24
25 from pyramid import compat
25 from pyramid import compat
26 from pyramid.httpexceptions import HTTPFound, HTTPForbidden, HTTPBadRequest
26 from pyramid.httpexceptions import HTTPFound, HTTPForbidden, HTTPBadRequest
27
27
28 from rhodecode.lib import helpers as h, diffs
28 from rhodecode.lib import helpers as h, diffs
29 from rhodecode.lib.utils2 import (
29 from rhodecode.lib.utils2 import (
30 StrictAttributeDict, str2bool, safe_int, datetime_to_time, safe_unicode)
30 StrictAttributeDict, str2bool, safe_int, datetime_to_time, safe_unicode)
31 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
31 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
32 from rhodecode.model import repo
32 from rhodecode.model import repo
33 from rhodecode.model import repo_group
33 from rhodecode.model import repo_group
34 from rhodecode.model import user_group
34 from rhodecode.model import user_group
35 from rhodecode.model import user
35 from rhodecode.model import user
36 from rhodecode.model.db import User
36 from rhodecode.model.db import User
37 from rhodecode.model.scm import ScmModel
37 from rhodecode.model.scm import ScmModel
38 from rhodecode.model.settings import VcsSettingsModel
38 from rhodecode.model.settings import VcsSettingsModel
39
39
40 log = logging.getLogger(__name__)
40 log = logging.getLogger(__name__)
41
41
42
42
43 ADMIN_PREFIX = '/_admin'
43 ADMIN_PREFIX = '/_admin'
44 STATIC_FILE_PREFIX = '/_static'
44 STATIC_FILE_PREFIX = '/_static'
45
45
46 URL_NAME_REQUIREMENTS = {
46 URL_NAME_REQUIREMENTS = {
47 # group name can have a slash in them, but they must not end with a slash
47 # group name can have a slash in them, but they must not end with a slash
48 'group_name': r'.*?[^/]',
48 'group_name': r'.*?[^/]',
49 'repo_group_name': r'.*?[^/]',
49 'repo_group_name': r'.*?[^/]',
50 # repo names can have a slash in them, but they must not end with a slash
50 # repo names can have a slash in them, but they must not end with a slash
51 'repo_name': r'.*?[^/]',
51 'repo_name': r'.*?[^/]',
52 # file path eats up everything at the end
52 # file path eats up everything at the end
53 'f_path': r'.*',
53 'f_path': r'.*',
54 # reference types
54 # reference types
55 'source_ref_type': '(branch|book|tag|rev|\%\(source_ref_type\)s)',
55 'source_ref_type': '(branch|book|tag|rev|\%\(source_ref_type\)s)',
56 'target_ref_type': '(branch|book|tag|rev|\%\(target_ref_type\)s)',
56 'target_ref_type': '(branch|book|tag|rev|\%\(target_ref_type\)s)',
57 }
57 }
58
58
59
59
60 def add_route_with_slash(config,name, pattern, **kw):
60 def add_route_with_slash(config,name, pattern, **kw):
61 config.add_route(name, pattern, **kw)
61 config.add_route(name, pattern, **kw)
62 if not pattern.endswith('/'):
62 if not pattern.endswith('/'):
63 config.add_route(name + '_slash', pattern + '/', **kw)
63 config.add_route(name + '_slash', pattern + '/', **kw)
64
64
65
65
66 def add_route_requirements(route_path, requirements=None):
66 def add_route_requirements(route_path, requirements=None):
67 """
67 """
68 Adds regex requirements to pyramid routes using a mapping dict
68 Adds regex requirements to pyramid routes using a mapping dict
69 e.g::
69 e.g::
70 add_route_requirements('{repo_name}/settings')
70 add_route_requirements('{repo_name}/settings')
71 """
71 """
72 requirements = requirements or URL_NAME_REQUIREMENTS
72 requirements = requirements or URL_NAME_REQUIREMENTS
73 for key, regex in requirements.items():
73 for key, regex in requirements.items():
74 route_path = route_path.replace('{%s}' % key, '{%s:%s}' % (key, regex))
74 route_path = route_path.replace('{%s}' % key, '{%s:%s}' % (key, regex))
75 return route_path
75 return route_path
76
76
77
77
78 def get_format_ref_id(repo):
78 def get_format_ref_id(repo):
79 """Returns a `repo` specific reference formatter function"""
79 """Returns a `repo` specific reference formatter function"""
80 if h.is_svn(repo):
80 if h.is_svn(repo):
81 return _format_ref_id_svn
81 return _format_ref_id_svn
82 else:
82 else:
83 return _format_ref_id
83 return _format_ref_id
84
84
85
85
86 def _format_ref_id(name, raw_id):
86 def _format_ref_id(name, raw_id):
87 """Default formatting of a given reference `name`"""
87 """Default formatting of a given reference `name`"""
88 return name
88 return name
89
89
90
90
91 def _format_ref_id_svn(name, raw_id):
91 def _format_ref_id_svn(name, raw_id):
92 """Special way of formatting a reference for Subversion including path"""
92 """Special way of formatting a reference for Subversion including path"""
93 return '%s@%s' % (name, raw_id)
93 return '%s@%s' % (name, raw_id)
94
94
95
95
96 class TemplateArgs(StrictAttributeDict):
96 class TemplateArgs(StrictAttributeDict):
97 pass
97 pass
98
98
99
99
100 class BaseAppView(object):
100 class BaseAppView(object):
101
101
102 def __init__(self, context, request):
102 def __init__(self, context, request):
103 self.request = request
103 self.request = request
104 self.context = context
104 self.context = context
105 self.session = request.session
105 self.session = request.session
106 if not hasattr(request, 'user'):
106 if not hasattr(request, 'user'):
107 # NOTE(marcink): edge case, we ended up in matched route
107 # NOTE(marcink): edge case, we ended up in matched route
108 # but probably of web-app context, e.g API CALL/VCS CALL
108 # but probably of web-app context, e.g API CALL/VCS CALL
109 if hasattr(request, 'vcs_call') or hasattr(request, 'rpc_method'):
109 if hasattr(request, 'vcs_call') or hasattr(request, 'rpc_method'):
110 log.warning('Unable to process request `%s` in this scope', request)
110 log.warning('Unable to process request `%s` in this scope', request)
111 raise HTTPBadRequest()
111 raise HTTPBadRequest()
112
112
113 self._rhodecode_user = request.user # auth user
113 self._rhodecode_user = request.user # auth user
114 self._rhodecode_db_user = self._rhodecode_user.get_instance()
114 self._rhodecode_db_user = self._rhodecode_user.get_instance()
115 self._maybe_needs_password_change(
115 self._maybe_needs_password_change(
116 request.matched_route.name, self._rhodecode_db_user)
116 request.matched_route.name, self._rhodecode_db_user)
117
117
118 def _maybe_needs_password_change(self, view_name, user_obj):
118 def _maybe_needs_password_change(self, view_name, user_obj):
119 log.debug('Checking if user %s needs password change on view %s',
119 log.debug('Checking if user %s needs password change on view %s',
120 user_obj, view_name)
120 user_obj, view_name)
121 skip_user_views = [
121 skip_user_views = [
122 'logout', 'login',
122 'logout', 'login',
123 'my_account_password', 'my_account_password_update'
123 'my_account_password', 'my_account_password_update'
124 ]
124 ]
125
125
126 if not user_obj:
126 if not user_obj:
127 return
127 return
128
128
129 if user_obj.username == User.DEFAULT_USER:
129 if user_obj.username == User.DEFAULT_USER:
130 return
130 return
131
131
132 now = time.time()
132 now = time.time()
133 should_change = user_obj.user_data.get('force_password_change')
133 should_change = user_obj.user_data.get('force_password_change')
134 change_after = safe_int(should_change) or 0
134 change_after = safe_int(should_change) or 0
135 if should_change and now > change_after:
135 if should_change and now > change_after:
136 log.debug('User %s requires password change', user_obj)
136 log.debug('User %s requires password change', user_obj)
137 h.flash('You are required to change your password', 'warning',
137 h.flash('You are required to change your password', 'warning',
138 ignore_duplicate=True)
138 ignore_duplicate=True)
139
139
140 if view_name not in skip_user_views:
140 if view_name not in skip_user_views:
141 raise HTTPFound(
141 raise HTTPFound(
142 self.request.route_path('my_account_password'))
142 self.request.route_path('my_account_password'))
143
143
144 def _log_creation_exception(self, e, repo_name):
144 def _log_creation_exception(self, e, repo_name):
145 _ = self.request.translate
145 _ = self.request.translate
146 reason = None
146 reason = None
147 if len(e.args) == 2:
147 if len(e.args) == 2:
148 reason = e.args[1]
148 reason = e.args[1]
149
149
150 if reason == 'INVALID_CERTIFICATE':
150 if reason == 'INVALID_CERTIFICATE':
151 log.exception(
151 log.exception(
152 'Exception creating a repository: invalid certificate')
152 'Exception creating a repository: invalid certificate')
153 msg = (_('Error creating repository %s: invalid certificate')
153 msg = (_('Error creating repository %s: invalid certificate')
154 % repo_name)
154 % repo_name)
155 else:
155 else:
156 log.exception("Exception creating a repository")
156 log.exception("Exception creating a repository")
157 msg = (_('Error creating repository %s')
157 msg = (_('Error creating repository %s')
158 % repo_name)
158 % repo_name)
159 return msg
159 return msg
160
160
161 def _get_local_tmpl_context(self, include_app_defaults=True):
161 def _get_local_tmpl_context(self, include_app_defaults=True):
162 c = TemplateArgs()
162 c = TemplateArgs()
163 c.auth_user = self.request.user
163 c.auth_user = self.request.user
164 # TODO(marcink): migrate the usage of c.rhodecode_user to c.auth_user
164 # TODO(marcink): migrate the usage of c.rhodecode_user to c.auth_user
165 c.rhodecode_user = self.request.user
165 c.rhodecode_user = self.request.user
166
166
167 if include_app_defaults:
167 if include_app_defaults:
168 from rhodecode.lib.base import attach_context_attributes
168 from rhodecode.lib.base import attach_context_attributes
169 attach_context_attributes(c, self.request, self.request.user.user_id)
169 attach_context_attributes(c, self.request, self.request.user.user_id)
170
170
171 c.is_super_admin = c.auth_user.is_admin
171 c.is_super_admin = c.auth_user.is_admin
172
172
173 c.can_create_repo = c.is_super_admin
173 c.can_create_repo = c.is_super_admin
174 c.can_create_repo_group = c.is_super_admin
174 c.can_create_repo_group = c.is_super_admin
175 c.can_create_user_group = c.is_super_admin
175 c.can_create_user_group = c.is_super_admin
176
176
177 c.is_delegated_admin = False
177 c.is_delegated_admin = False
178
178
179 if not c.auth_user.is_default and not c.is_super_admin:
179 if not c.auth_user.is_default and not c.is_super_admin:
180 c.can_create_repo = h.HasPermissionAny('hg.create.repository')(
180 c.can_create_repo = h.HasPermissionAny('hg.create.repository')(
181 user=self.request.user)
181 user=self.request.user)
182 repositories = c.auth_user.repositories_admin or c.can_create_repo
182 repositories = c.auth_user.repositories_admin or c.can_create_repo
183
183
184 c.can_create_repo_group = h.HasPermissionAny('hg.repogroup.create.true')(
184 c.can_create_repo_group = h.HasPermissionAny('hg.repogroup.create.true')(
185 user=self.request.user)
185 user=self.request.user)
186 repository_groups = c.auth_user.repository_groups_admin or c.can_create_repo_group
186 repository_groups = c.auth_user.repository_groups_admin or c.can_create_repo_group
187
187
188 c.can_create_user_group = h.HasPermissionAny('hg.usergroup.create.true')(
188 c.can_create_user_group = h.HasPermissionAny('hg.usergroup.create.true')(
189 user=self.request.user)
189 user=self.request.user)
190 user_groups = c.auth_user.user_groups_admin or c.can_create_user_group
190 user_groups = c.auth_user.user_groups_admin or c.can_create_user_group
191 # delegated admin can create, or manage some objects
191 # delegated admin can create, or manage some objects
192 c.is_delegated_admin = repositories or repository_groups or user_groups
192 c.is_delegated_admin = repositories or repository_groups or user_groups
193 return c
193 return c
194
194
195 def _get_template_context(self, tmpl_args, **kwargs):
195 def _get_template_context(self, tmpl_args, **kwargs):
196
196
197 local_tmpl_args = {
197 local_tmpl_args = {
198 'defaults': {},
198 'defaults': {},
199 'errors': {},
199 'errors': {},
200 'c': tmpl_args
200 'c': tmpl_args
201 }
201 }
202 local_tmpl_args.update(kwargs)
202 local_tmpl_args.update(kwargs)
203 return local_tmpl_args
203 return local_tmpl_args
204
204
205 def load_default_context(self):
205 def load_default_context(self):
206 """
206 """
207 example:
207 example:
208
208
209 def load_default_context(self):
209 def load_default_context(self):
210 c = self._get_local_tmpl_context()
210 c = self._get_local_tmpl_context()
211 c.custom_var = 'foobar'
211 c.custom_var = 'foobar'
212
212
213 return c
213 return c
214 """
214 """
215 raise NotImplementedError('Needs implementation in view class')
215 raise NotImplementedError('Needs implementation in view class')
216
216
217
217
218 class RepoAppView(BaseAppView):
218 class RepoAppView(BaseAppView):
219
219
220 def __init__(self, context, request):
220 def __init__(self, context, request):
221 super(RepoAppView, self).__init__(context, request)
221 super(RepoAppView, self).__init__(context, request)
222 self.db_repo = request.db_repo
222 self.db_repo = request.db_repo
223 self.db_repo_name = self.db_repo.repo_name
223 self.db_repo_name = self.db_repo.repo_name
224 self.db_repo_pull_requests = ScmModel().get_pull_requests(self.db_repo)
224 self.db_repo_pull_requests = ScmModel().get_pull_requests(self.db_repo)
225
225
226 def _handle_missing_requirements(self, error):
226 def _handle_missing_requirements(self, error):
227 log.error(
227 log.error(
228 'Requirements are missing for repository %s: %s',
228 'Requirements are missing for repository %s: %s',
229 self.db_repo_name, safe_unicode(error))
229 self.db_repo_name, safe_unicode(error))
230
230
231 def _get_local_tmpl_context(self, include_app_defaults=True):
231 def _get_local_tmpl_context(self, include_app_defaults=True):
232 _ = self.request.translate
232 _ = self.request.translate
233 c = super(RepoAppView, self)._get_local_tmpl_context(
233 c = super(RepoAppView, self)._get_local_tmpl_context(
234 include_app_defaults=include_app_defaults)
234 include_app_defaults=include_app_defaults)
235
235
236 # register common vars for this type of view
236 # register common vars for this type of view
237 c.rhodecode_db_repo = self.db_repo
237 c.rhodecode_db_repo = self.db_repo
238 c.repo_name = self.db_repo_name
238 c.repo_name = self.db_repo_name
239 c.repository_pull_requests = self.db_repo_pull_requests
239 c.repository_pull_requests = self.db_repo_pull_requests
240 c.repository_is_user_following = ScmModel().is_following_repo(
240 c.repository_is_user_following = ScmModel().is_following_repo(
241 self.db_repo_name, self._rhodecode_user.user_id)
241 self.db_repo_name, self._rhodecode_user.user_id)
242 self.path_filter = PathFilter(None)
242 self.path_filter = PathFilter(None)
243
243
244 c.repository_requirements_missing = {}
244 c.repository_requirements_missing = {}
245 try:
245 try:
246 self.rhodecode_vcs_repo = self.db_repo.scm_instance()
246 self.rhodecode_vcs_repo = self.db_repo.scm_instance()
247 # NOTE(marcink):
247 # NOTE(marcink):
248 # comparison to None since if it's an object __bool__ is expensive to
248 # comparison to None since if it's an object __bool__ is expensive to
249 # calculate
249 # calculate
250 if self.rhodecode_vcs_repo is not None:
250 if self.rhodecode_vcs_repo is not None:
251 path_perms = self.rhodecode_vcs_repo.get_path_permissions(
251 path_perms = self.rhodecode_vcs_repo.get_path_permissions(
252 c.auth_user.username)
252 c.auth_user.username)
253 self.path_filter = PathFilter(path_perms)
253 self.path_filter = PathFilter(path_perms)
254 except RepositoryRequirementError as e:
254 except RepositoryRequirementError as e:
255 c.repository_requirements_missing = {'error': str(e)}
255 c.repository_requirements_missing = {'error': str(e)}
256 self._handle_missing_requirements(e)
256 self._handle_missing_requirements(e)
257 self.rhodecode_vcs_repo = None
257 self.rhodecode_vcs_repo = None
258
258
259 c.path_filter = self.path_filter # used by atom_feed_entry.mako
259 c.path_filter = self.path_filter # used by atom_feed_entry.mako
260
260
261 if self.rhodecode_vcs_repo is None:
261 if self.rhodecode_vcs_repo is None:
262 # unable to fetch this repo as vcs instance, report back to user
262 # unable to fetch this repo as vcs instance, report back to user
263 h.flash(_(
263 h.flash(_(
264 "The repository `%(repo_name)s` cannot be loaded in filesystem. "
264 "The repository `%(repo_name)s` cannot be loaded in filesystem. "
265 "Please check if it exist, or is not damaged.") %
265 "Please check if it exist, or is not damaged.") %
266 {'repo_name': c.repo_name},
266 {'repo_name': c.repo_name},
267 category='error', ignore_duplicate=True)
267 category='error', ignore_duplicate=True)
268 if c.repository_requirements_missing:
268 if c.repository_requirements_missing:
269 route = self.request.matched_route.name
269 route = self.request.matched_route.name
270 if route.startswith(('edit_repo', 'repo_summary')):
270 if route.startswith(('edit_repo', 'repo_summary')):
271 # allow summary and edit repo on missing requirements
271 # allow summary and edit repo on missing requirements
272 return c
272 return c
273
273
274 raise HTTPFound(
274 raise HTTPFound(
275 h.route_path('repo_summary', repo_name=self.db_repo_name))
275 h.route_path('repo_summary', repo_name=self.db_repo_name))
276
276
277 else: # redirect if we don't show missing requirements
277 else: # redirect if we don't show missing requirements
278 raise HTTPFound(h.route_path('home'))
278 raise HTTPFound(h.route_path('home'))
279
279
280 c.has_origin_repo_read_perm = False
280 c.has_origin_repo_read_perm = False
281 if self.db_repo.fork:
281 if self.db_repo.fork:
282 c.has_origin_repo_read_perm = h.HasRepoPermissionAny(
282 c.has_origin_repo_read_perm = h.HasRepoPermissionAny(
283 'repository.write', 'repository.read', 'repository.admin')(
283 'repository.write', 'repository.read', 'repository.admin')(
284 self.db_repo.fork.repo_name, 'summary fork link')
284 self.db_repo.fork.repo_name, 'summary fork link')
285
285
286 return c
286 return c
287
287
288 def _get_f_path_unchecked(self, matchdict, default=None):
288 def _get_f_path_unchecked(self, matchdict, default=None):
289 """
289 """
290 Should only be used by redirects, everything else should call _get_f_path
290 Should only be used by redirects, everything else should call _get_f_path
291 """
291 """
292 f_path = matchdict.get('f_path')
292 f_path = matchdict.get('f_path')
293 if f_path:
293 if f_path:
294 # fix for multiple initial slashes that causes errors for GIT
294 # fix for multiple initial slashes that causes errors for GIT
295 return f_path.lstrip('/')
295 return f_path.lstrip('/')
296
296
297 return default
297 return default
298
298
299 def _get_f_path(self, matchdict, default=None):
299 def _get_f_path(self, matchdict, default=None):
300 f_path_match = self._get_f_path_unchecked(matchdict, default)
300 f_path_match = self._get_f_path_unchecked(matchdict, default)
301 return self.path_filter.assert_path_permissions(f_path_match)
301 return self.path_filter.assert_path_permissions(f_path_match)
302
302
303 def _get_general_setting(self, target_repo, settings_key, default=False):
303 def _get_general_setting(self, target_repo, settings_key, default=False):
304 settings_model = VcsSettingsModel(repo=target_repo)
304 settings_model = VcsSettingsModel(repo=target_repo)
305 settings = settings_model.get_general_settings()
305 settings = settings_model.get_general_settings()
306 return settings.get(settings_key, default)
306 return settings.get(settings_key, default)
307
307
308 def get_recache_flag(self):
308 def get_recache_flag(self):
309 for flag_name in ['force_recache', 'force-recache', 'no-cache']:
309 for flag_name in ['force_recache', 'force-recache', 'no-cache']:
310 flag_val = self.request.GET.get(flag_name)
310 flag_val = self.request.GET.get(flag_name)
311 if str2bool(flag_val):
311 if str2bool(flag_val):
312 return True
312 return True
313 return False
313 return False
314
314
315
315
316 class PathFilter(object):
316 class PathFilter(object):
317
317
318 # Expects and instance of BasePathPermissionChecker or None
318 # Expects and instance of BasePathPermissionChecker or None
319 def __init__(self, permission_checker):
319 def __init__(self, permission_checker):
320 self.permission_checker = permission_checker
320 self.permission_checker = permission_checker
321
321
322 def assert_path_permissions(self, path):
322 def assert_path_permissions(self, path):
323 if path and self.permission_checker and not self.permission_checker.has_access(path):
323 if path and self.permission_checker and not self.permission_checker.has_access(path):
324 raise HTTPForbidden()
324 raise HTTPForbidden()
325 return path
325 return path
326
326
327 def filter_patchset(self, patchset):
327 def filter_patchset(self, patchset):
328 if not self.permission_checker or not patchset:
328 if not self.permission_checker or not patchset:
329 return patchset, False
329 return patchset, False
330 had_filtered = False
330 had_filtered = False
331 filtered_patchset = []
331 filtered_patchset = []
332 for patch in patchset:
332 for patch in patchset:
333 filename = patch.get('filename', None)
333 filename = patch.get('filename', None)
334 if not filename or self.permission_checker.has_access(filename):
334 if not filename or self.permission_checker.has_access(filename):
335 filtered_patchset.append(patch)
335 filtered_patchset.append(patch)
336 else:
336 else:
337 had_filtered = True
337 had_filtered = True
338 if had_filtered:
338 if had_filtered:
339 if isinstance(patchset, diffs.LimitedDiffContainer):
339 if isinstance(patchset, diffs.LimitedDiffContainer):
340 filtered_patchset = diffs.LimitedDiffContainer(patchset.diff_limit, patchset.cur_diff_size, filtered_patchset)
340 filtered_patchset = diffs.LimitedDiffContainer(patchset.diff_limit, patchset.cur_diff_size, filtered_patchset)
341 return filtered_patchset, True
341 return filtered_patchset, True
342 else:
342 else:
343 return patchset, False
343 return patchset, False
344
344
345 def render_patchset_filtered(self, diffset, patchset, source_ref=None, target_ref=None):
345 def render_patchset_filtered(self, diffset, patchset, source_ref=None, target_ref=None):
346 filtered_patchset, has_hidden_changes = self.filter_patchset(patchset)
346 filtered_patchset, has_hidden_changes = self.filter_patchset(patchset)
347 result = diffset.render_patchset(
347 result = diffset.render_patchset(
348 filtered_patchset, source_ref=source_ref, target_ref=target_ref)
348 filtered_patchset, source_ref=source_ref, target_ref=target_ref)
349 result.has_hidden_changes = has_hidden_changes
349 result.has_hidden_changes = has_hidden_changes
350 return result
350 return result
351
351
352 def get_raw_patch(self, diff_processor):
352 def get_raw_patch(self, diff_processor):
353 if self.permission_checker is None:
353 if self.permission_checker is None:
354 return diff_processor.as_raw()
354 return diff_processor.as_raw()
355 elif self.permission_checker.has_full_access:
355 elif self.permission_checker.has_full_access:
356 return diff_processor.as_raw()
356 return diff_processor.as_raw()
357 else:
357 else:
358 return '# Repository has user-specific filters, raw patch generation is disabled.'
358 return '# Repository has user-specific filters, raw patch generation is disabled.'
359
359
360 @property
360 @property
361 def is_enabled(self):
361 def is_enabled(self):
362 return self.permission_checker is not None
362 return self.permission_checker is not None
363
363
364
364
365 class RepoGroupAppView(BaseAppView):
365 class RepoGroupAppView(BaseAppView):
366 def __init__(self, context, request):
366 def __init__(self, context, request):
367 super(RepoGroupAppView, self).__init__(context, request)
367 super(RepoGroupAppView, self).__init__(context, request)
368 self.db_repo_group = request.db_repo_group
368 self.db_repo_group = request.db_repo_group
369 self.db_repo_group_name = self.db_repo_group.group_name
369 self.db_repo_group_name = self.db_repo_group.group_name
370
370
371 def _get_local_tmpl_context(self, include_app_defaults=True):
371 def _get_local_tmpl_context(self, include_app_defaults=True):
372 _ = self.request.translate
372 _ = self.request.translate
373 c = super(RepoGroupAppView, self)._get_local_tmpl_context(
373 c = super(RepoGroupAppView, self)._get_local_tmpl_context(
374 include_app_defaults=include_app_defaults)
374 include_app_defaults=include_app_defaults)
375 c.repo_group = self.db_repo_group
375 c.repo_group = self.db_repo_group
376 return c
376 return c
377
377
378 def _revoke_perms_on_yourself(self, form_result):
378 def _revoke_perms_on_yourself(self, form_result):
379 _updates = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
379 _updates = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
380 form_result['perm_updates'])
380 form_result['perm_updates'])
381 _additions = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
381 _additions = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
382 form_result['perm_additions'])
382 form_result['perm_additions'])
383 _deletions = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
383 _deletions = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
384 form_result['perm_deletions'])
384 form_result['perm_deletions'])
385 admin_perm = 'group.admin'
385 admin_perm = 'group.admin'
386 if _updates and _updates[0][1] != admin_perm or \
386 if _updates and _updates[0][1] != admin_perm or \
387 _additions and _additions[0][1] != admin_perm or \
387 _additions and _additions[0][1] != admin_perm or \
388 _deletions and _deletions[0][1] != admin_perm:
388 _deletions and _deletions[0][1] != admin_perm:
389 return True
389 return True
390 return False
390 return False
391
391
392
392
393 class UserGroupAppView(BaseAppView):
393 class UserGroupAppView(BaseAppView):
394 def __init__(self, context, request):
394 def __init__(self, context, request):
395 super(UserGroupAppView, self).__init__(context, request)
395 super(UserGroupAppView, self).__init__(context, request)
396 self.db_user_group = request.db_user_group
396 self.db_user_group = request.db_user_group
397 self.db_user_group_name = self.db_user_group.users_group_name
397 self.db_user_group_name = self.db_user_group.users_group_name
398
398
399
399
400 class UserAppView(BaseAppView):
400 class UserAppView(BaseAppView):
401 def __init__(self, context, request):
401 def __init__(self, context, request):
402 super(UserAppView, self).__init__(context, request)
402 super(UserAppView, self).__init__(context, request)
403 self.db_user = request.db_user
403 self.db_user = request.db_user
404 self.db_user_id = self.db_user.user_id
404 self.db_user_id = self.db_user.user_id
405
405
406 _ = self.request.translate
406 _ = self.request.translate
407 if not request.db_user_supports_default:
407 if not request.db_user_supports_default:
408 if self.db_user.username == User.DEFAULT_USER:
408 if self.db_user.username == User.DEFAULT_USER:
409 h.flash(_("Editing user `{}` is disabled.".format(
409 h.flash(_("Editing user `{}` is disabled.".format(
410 User.DEFAULT_USER)), category='warning')
410 User.DEFAULT_USER)), category='warning')
411 raise HTTPFound(h.route_path('users'))
411 raise HTTPFound(h.route_path('users'))
412
412
413
413
414 class DataGridAppView(object):
414 class DataGridAppView(object):
415 """
415 """
416 Common class to have re-usable grid rendering components
416 Common class to have re-usable grid rendering components
417 """
417 """
418
418
419 def _extract_ordering(self, request, column_map=None):
419 def _extract_ordering(self, request, column_map=None):
420 column_map = column_map or {}
420 column_map = column_map or {}
421 column_index = safe_int(request.GET.get('order[0][column]'))
421 column_index = safe_int(request.GET.get('order[0][column]'))
422 order_dir = request.GET.get(
422 order_dir = request.GET.get(
423 'order[0][dir]', 'desc')
423 'order[0][dir]', 'desc')
424 order_by = request.GET.get(
424 order_by = request.GET.get(
425 'columns[%s][data][sort]' % column_index, 'name_raw')
425 'columns[%s][data][sort]' % column_index, 'name_raw')
426
426
427 # translate datatable to DB columns
427 # translate datatable to DB columns
428 order_by = column_map.get(order_by) or order_by
428 order_by = column_map.get(order_by) or order_by
429
429
430 search_q = request.GET.get('search[value]')
430 search_q = request.GET.get('search[value]')
431 return search_q, order_by, order_dir
431 return search_q, order_by, order_dir
432
432
433 def _extract_chunk(self, request):
433 def _extract_chunk(self, request):
434 start = safe_int(request.GET.get('start'), 0)
434 start = safe_int(request.GET.get('start'), 0)
435 length = safe_int(request.GET.get('length'), 25)
435 length = safe_int(request.GET.get('length'), 25)
436 draw = safe_int(request.GET.get('draw'))
436 draw = safe_int(request.GET.get('draw'))
437 return draw, start, length
437 return draw, start, length
438
438
439 def _get_order_col(self, order_by, model):
439 def _get_order_col(self, order_by, model):
440 if isinstance(order_by, compat.string_types):
440 if isinstance(order_by, compat.string_types):
441 try:
441 try:
442 return operator.attrgetter(order_by)(model)
442 return operator.attrgetter(order_by)(model)
443 except AttributeError:
443 except AttributeError:
444 return None
444 return None
445 else:
445 else:
446 return order_by
446 return order_by
447
447
448
448
449 class BaseReferencesView(RepoAppView):
449 class BaseReferencesView(RepoAppView):
450 """
450 """
451 Base for reference view for branches, tags and bookmarks.
451 Base for reference view for branches, tags and bookmarks.
452 """
452 """
453 def load_default_context(self):
453 def load_default_context(self):
454 c = self._get_local_tmpl_context()
454 c = self._get_local_tmpl_context()
455
455
456
456
457 return c
457 return c
458
458
459 def load_refs_context(self, ref_items, partials_template):
459 def load_refs_context(self, ref_items, partials_template):
460 _render = self.request.get_partial_renderer(partials_template)
460 _render = self.request.get_partial_renderer(partials_template)
461 pre_load = ["author", "date", "message"]
461 pre_load = ["author", "date", "message", "parents"]
462
462
463 is_svn = h.is_svn(self.rhodecode_vcs_repo)
463 is_svn = h.is_svn(self.rhodecode_vcs_repo)
464 is_hg = h.is_hg(self.rhodecode_vcs_repo)
464 is_hg = h.is_hg(self.rhodecode_vcs_repo)
465
465
466 format_ref_id = get_format_ref_id(self.rhodecode_vcs_repo)
466 format_ref_id = get_format_ref_id(self.rhodecode_vcs_repo)
467
467
468 closed_refs = {}
468 closed_refs = {}
469 if is_hg:
469 if is_hg:
470 closed_refs = self.rhodecode_vcs_repo.branches_closed
470 closed_refs = self.rhodecode_vcs_repo.branches_closed
471
471
472 data = []
472 data = []
473 for ref_name, commit_id in ref_items:
473 for ref_name, commit_id in ref_items:
474 commit = self.rhodecode_vcs_repo.get_commit(
474 commit = self.rhodecode_vcs_repo.get_commit(
475 commit_id=commit_id, pre_load=pre_load)
475 commit_id=commit_id, pre_load=pre_load)
476 closed = ref_name in closed_refs
476 closed = ref_name in closed_refs
477
477
478 # TODO: johbo: Unify generation of reference links
478 # TODO: johbo: Unify generation of reference links
479 use_commit_id = '/' in ref_name or is_svn
479 use_commit_id = '/' in ref_name or is_svn
480
480
481 if use_commit_id:
481 if use_commit_id:
482 files_url = h.route_path(
482 files_url = h.route_path(
483 'repo_files',
483 'repo_files',
484 repo_name=self.db_repo_name,
484 repo_name=self.db_repo_name,
485 f_path=ref_name if is_svn else '',
485 f_path=ref_name if is_svn else '',
486 commit_id=commit_id)
486 commit_id=commit_id)
487
487
488 else:
488 else:
489 files_url = h.route_path(
489 files_url = h.route_path(
490 'repo_files',
490 'repo_files',
491 repo_name=self.db_repo_name,
491 repo_name=self.db_repo_name,
492 f_path=ref_name if is_svn else '',
492 f_path=ref_name if is_svn else '',
493 commit_id=ref_name,
493 commit_id=ref_name,
494 _query=dict(at=ref_name))
494 _query=dict(at=ref_name))
495
495
496 data.append({
496 data.append({
497 "name": _render('name', ref_name, files_url, closed),
497 "name": _render('name', ref_name, files_url, closed),
498 "name_raw": ref_name,
498 "name_raw": ref_name,
499 "date": _render('date', commit.date),
499 "date": _render('date', commit.date),
500 "date_raw": datetime_to_time(commit.date),
500 "date_raw": datetime_to_time(commit.date),
501 "author": _render('author', commit.author),
501 "author": _render('author', commit.author),
502 "commit": _render(
502 "commit": _render(
503 'commit', commit.message, commit.raw_id, commit.idx),
503 'commit', commit.message, commit.raw_id, commit.idx),
504 "commit_raw": commit.idx,
504 "commit_raw": commit.idx,
505 "compare": _render(
505 "compare": _render(
506 'compare', format_ref_id(ref_name, commit.raw_id)),
506 'compare', format_ref_id(ref_name, commit.raw_id)),
507 })
507 })
508
508
509 return data
509 return data
510
510
511
511
512 class RepoRoutePredicate(object):
512 class RepoRoutePredicate(object):
513 def __init__(self, val, config):
513 def __init__(self, val, config):
514 self.val = val
514 self.val = val
515
515
516 def text(self):
516 def text(self):
517 return 'repo_route = %s' % self.val
517 return 'repo_route = %s' % self.val
518
518
519 phash = text
519 phash = text
520
520
521 def __call__(self, info, request):
521 def __call__(self, info, request):
522 if hasattr(request, 'vcs_call'):
522 if hasattr(request, 'vcs_call'):
523 # skip vcs calls
523 # skip vcs calls
524 return
524 return
525
525
526 repo_name = info['match']['repo_name']
526 repo_name = info['match']['repo_name']
527 repo_model = repo.RepoModel()
527 repo_model = repo.RepoModel()
528
528
529 by_name_match = repo_model.get_by_repo_name(repo_name, cache=False)
529 by_name_match = repo_model.get_by_repo_name(repo_name, cache=False)
530
530
531 def redirect_if_creating(route_info, db_repo):
531 def redirect_if_creating(route_info, db_repo):
532 skip_views = ['edit_repo_advanced_delete']
532 skip_views = ['edit_repo_advanced_delete']
533 route = route_info['route']
533 route = route_info['route']
534 # we should skip delete view so we can actually "remove" repositories
534 # we should skip delete view so we can actually "remove" repositories
535 # if they get stuck in creating state.
535 # if they get stuck in creating state.
536 if route.name in skip_views:
536 if route.name in skip_views:
537 return
537 return
538
538
539 if db_repo.repo_state in [repo.Repository.STATE_PENDING]:
539 if db_repo.repo_state in [repo.Repository.STATE_PENDING]:
540 repo_creating_url = request.route_path(
540 repo_creating_url = request.route_path(
541 'repo_creating', repo_name=db_repo.repo_name)
541 'repo_creating', repo_name=db_repo.repo_name)
542 raise HTTPFound(repo_creating_url)
542 raise HTTPFound(repo_creating_url)
543
543
544 if by_name_match:
544 if by_name_match:
545 # register this as request object we can re-use later
545 # register this as request object we can re-use later
546 request.db_repo = by_name_match
546 request.db_repo = by_name_match
547 redirect_if_creating(info, by_name_match)
547 redirect_if_creating(info, by_name_match)
548 return True
548 return True
549
549
550 by_id_match = repo_model.get_repo_by_id(repo_name)
550 by_id_match = repo_model.get_repo_by_id(repo_name)
551 if by_id_match:
551 if by_id_match:
552 request.db_repo = by_id_match
552 request.db_repo = by_id_match
553 redirect_if_creating(info, by_id_match)
553 redirect_if_creating(info, by_id_match)
554 return True
554 return True
555
555
556 return False
556 return False
557
557
558
558
559 class RepoForbidArchivedRoutePredicate(object):
559 class RepoForbidArchivedRoutePredicate(object):
560 def __init__(self, val, config):
560 def __init__(self, val, config):
561 self.val = val
561 self.val = val
562
562
563 def text(self):
563 def text(self):
564 return 'repo_forbid_archived = %s' % self.val
564 return 'repo_forbid_archived = %s' % self.val
565
565
566 phash = text
566 phash = text
567
567
568 def __call__(self, info, request):
568 def __call__(self, info, request):
569 _ = request.translate
569 _ = request.translate
570 rhodecode_db_repo = request.db_repo
570 rhodecode_db_repo = request.db_repo
571
571
572 log.debug(
572 log.debug(
573 '%s checking if archived flag for repo for %s',
573 '%s checking if archived flag for repo for %s',
574 self.__class__.__name__, rhodecode_db_repo.repo_name)
574 self.__class__.__name__, rhodecode_db_repo.repo_name)
575
575
576 if rhodecode_db_repo.archived:
576 if rhodecode_db_repo.archived:
577 log.warning('Current view is not supported for archived repo:%s',
577 log.warning('Current view is not supported for archived repo:%s',
578 rhodecode_db_repo.repo_name)
578 rhodecode_db_repo.repo_name)
579
579
580 h.flash(
580 h.flash(
581 h.literal(_('Action not supported for archived repository.')),
581 h.literal(_('Action not supported for archived repository.')),
582 category='warning')
582 category='warning')
583 summary_url = request.route_path(
583 summary_url = request.route_path(
584 'repo_summary', repo_name=rhodecode_db_repo.repo_name)
584 'repo_summary', repo_name=rhodecode_db_repo.repo_name)
585 raise HTTPFound(summary_url)
585 raise HTTPFound(summary_url)
586 return True
586 return True
587
587
588
588
589 class RepoTypeRoutePredicate(object):
589 class RepoTypeRoutePredicate(object):
590 def __init__(self, val, config):
590 def __init__(self, val, config):
591 self.val = val or ['hg', 'git', 'svn']
591 self.val = val or ['hg', 'git', 'svn']
592
592
593 def text(self):
593 def text(self):
594 return 'repo_accepted_type = %s' % self.val
594 return 'repo_accepted_type = %s' % self.val
595
595
596 phash = text
596 phash = text
597
597
598 def __call__(self, info, request):
598 def __call__(self, info, request):
599 if hasattr(request, 'vcs_call'):
599 if hasattr(request, 'vcs_call'):
600 # skip vcs calls
600 # skip vcs calls
601 return
601 return
602
602
603 rhodecode_db_repo = request.db_repo
603 rhodecode_db_repo = request.db_repo
604
604
605 log.debug(
605 log.debug(
606 '%s checking repo type for %s in %s',
606 '%s checking repo type for %s in %s',
607 self.__class__.__name__, rhodecode_db_repo.repo_type, self.val)
607 self.__class__.__name__, rhodecode_db_repo.repo_type, self.val)
608
608
609 if rhodecode_db_repo.repo_type in self.val:
609 if rhodecode_db_repo.repo_type in self.val:
610 return True
610 return True
611 else:
611 else:
612 log.warning('Current view is not supported for repo type:%s',
612 log.warning('Current view is not supported for repo type:%s',
613 rhodecode_db_repo.repo_type)
613 rhodecode_db_repo.repo_type)
614 return False
614 return False
615
615
616
616
617 class RepoGroupRoutePredicate(object):
617 class RepoGroupRoutePredicate(object):
618 def __init__(self, val, config):
618 def __init__(self, val, config):
619 self.val = val
619 self.val = val
620
620
621 def text(self):
621 def text(self):
622 return 'repo_group_route = %s' % self.val
622 return 'repo_group_route = %s' % self.val
623
623
624 phash = text
624 phash = text
625
625
626 def __call__(self, info, request):
626 def __call__(self, info, request):
627 if hasattr(request, 'vcs_call'):
627 if hasattr(request, 'vcs_call'):
628 # skip vcs calls
628 # skip vcs calls
629 return
629 return
630
630
631 repo_group_name = info['match']['repo_group_name']
631 repo_group_name = info['match']['repo_group_name']
632 repo_group_model = repo_group.RepoGroupModel()
632 repo_group_model = repo_group.RepoGroupModel()
633 by_name_match = repo_group_model.get_by_group_name(repo_group_name, cache=False)
633 by_name_match = repo_group_model.get_by_group_name(repo_group_name, cache=False)
634
634
635 if by_name_match:
635 if by_name_match:
636 # register this as request object we can re-use later
636 # register this as request object we can re-use later
637 request.db_repo_group = by_name_match
637 request.db_repo_group = by_name_match
638 return True
638 return True
639
639
640 return False
640 return False
641
641
642
642
643 class UserGroupRoutePredicate(object):
643 class UserGroupRoutePredicate(object):
644 def __init__(self, val, config):
644 def __init__(self, val, config):
645 self.val = val
645 self.val = val
646
646
647 def text(self):
647 def text(self):
648 return 'user_group_route = %s' % self.val
648 return 'user_group_route = %s' % self.val
649
649
650 phash = text
650 phash = text
651
651
652 def __call__(self, info, request):
652 def __call__(self, info, request):
653 if hasattr(request, 'vcs_call'):
653 if hasattr(request, 'vcs_call'):
654 # skip vcs calls
654 # skip vcs calls
655 return
655 return
656
656
657 user_group_id = info['match']['user_group_id']
657 user_group_id = info['match']['user_group_id']
658 user_group_model = user_group.UserGroup()
658 user_group_model = user_group.UserGroup()
659 by_id_match = user_group_model.get(user_group_id, cache=False)
659 by_id_match = user_group_model.get(user_group_id, cache=False)
660
660
661 if by_id_match:
661 if by_id_match:
662 # register this as request object we can re-use later
662 # register this as request object we can re-use later
663 request.db_user_group = by_id_match
663 request.db_user_group = by_id_match
664 return True
664 return True
665
665
666 return False
666 return False
667
667
668
668
669 class UserRoutePredicateBase(object):
669 class UserRoutePredicateBase(object):
670 supports_default = None
670 supports_default = None
671
671
672 def __init__(self, val, config):
672 def __init__(self, val, config):
673 self.val = val
673 self.val = val
674
674
675 def text(self):
675 def text(self):
676 raise NotImplementedError()
676 raise NotImplementedError()
677
677
678 def __call__(self, info, request):
678 def __call__(self, info, request):
679 if hasattr(request, 'vcs_call'):
679 if hasattr(request, 'vcs_call'):
680 # skip vcs calls
680 # skip vcs calls
681 return
681 return
682
682
683 user_id = info['match']['user_id']
683 user_id = info['match']['user_id']
684 user_model = user.User()
684 user_model = user.User()
685 by_id_match = user_model.get(user_id, cache=False)
685 by_id_match = user_model.get(user_id, cache=False)
686
686
687 if by_id_match:
687 if by_id_match:
688 # register this as request object we can re-use later
688 # register this as request object we can re-use later
689 request.db_user = by_id_match
689 request.db_user = by_id_match
690 request.db_user_supports_default = self.supports_default
690 request.db_user_supports_default = self.supports_default
691 return True
691 return True
692
692
693 return False
693 return False
694
694
695
695
696 class UserRoutePredicate(UserRoutePredicateBase):
696 class UserRoutePredicate(UserRoutePredicateBase):
697 supports_default = False
697 supports_default = False
698
698
699 def text(self):
699 def text(self):
700 return 'user_route = %s' % self.val
700 return 'user_route = %s' % self.val
701
701
702 phash = text
702 phash = text
703
703
704
704
705 class UserRouteWithDefaultPredicate(UserRoutePredicateBase):
705 class UserRouteWithDefaultPredicate(UserRoutePredicateBase):
706 supports_default = True
706 supports_default = True
707
707
708 def text(self):
708 def text(self):
709 return 'user_with_default_route = %s' % self.val
709 return 'user_with_default_route = %s' % self.val
710
710
711 phash = text
711 phash = text
712
712
713
713
714 def includeme(config):
714 def includeme(config):
715 config.add_route_predicate(
715 config.add_route_predicate(
716 'repo_route', RepoRoutePredicate)
716 'repo_route', RepoRoutePredicate)
717 config.add_route_predicate(
717 config.add_route_predicate(
718 'repo_accepted_types', RepoTypeRoutePredicate)
718 'repo_accepted_types', RepoTypeRoutePredicate)
719 config.add_route_predicate(
719 config.add_route_predicate(
720 'repo_forbid_when_archived', RepoForbidArchivedRoutePredicate)
720 'repo_forbid_when_archived', RepoForbidArchivedRoutePredicate)
721 config.add_route_predicate(
721 config.add_route_predicate(
722 'repo_group_route', RepoGroupRoutePredicate)
722 'repo_group_route', RepoGroupRoutePredicate)
723 config.add_route_predicate(
723 config.add_route_predicate(
724 'user_group_route', UserGroupRoutePredicate)
724 'user_group_route', UserGroupRoutePredicate)
725 config.add_route_predicate(
725 config.add_route_predicate(
726 'user_route_with_default', UserRouteWithDefaultPredicate)
726 'user_route_with_default', UserRouteWithDefaultPredicate)
727 config.add_route_predicate(
727 config.add_route_predicate(
728 'user_route', UserRoutePredicate)
728 'user_route', UserRoutePredicate)
@@ -1,311 +1,311 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2019 RhodeCode GmbH
3 # Copyright (C) 2012-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 import logging
22 import logging
23
23
24 from pyramid.httpexceptions import HTTPBadRequest, HTTPNotFound, HTTPFound
24 from pyramid.httpexceptions import HTTPBadRequest, HTTPNotFound, HTTPFound
25 from pyramid.view import view_config
25 from pyramid.view import view_config
26 from pyramid.renderers import render
26 from pyramid.renderers import render
27 from pyramid.response import Response
27 from pyramid.response import Response
28
28
29 from rhodecode.apps._base import RepoAppView
29 from rhodecode.apps._base import RepoAppView
30
30
31 from rhodecode.lib import helpers as h
31 from rhodecode.lib import helpers as h
32 from rhodecode.lib import diffs, codeblocks
32 from rhodecode.lib import diffs, codeblocks
33 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
33 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
34 from rhodecode.lib.utils import safe_str
34 from rhodecode.lib.utils import safe_str
35 from rhodecode.lib.utils2 import safe_unicode, str2bool
35 from rhodecode.lib.utils2 import safe_unicode, str2bool
36 from rhodecode.lib.view_utils import parse_path_ref, get_commit_from_ref_name
36 from rhodecode.lib.view_utils import parse_path_ref, get_commit_from_ref_name
37 from rhodecode.lib.vcs.exceptions import (
37 from rhodecode.lib.vcs.exceptions import (
38 EmptyRepositoryError, RepositoryError, RepositoryRequirementError,
38 EmptyRepositoryError, RepositoryError, RepositoryRequirementError,
39 NodeDoesNotExistError)
39 NodeDoesNotExistError)
40 from rhodecode.model.db import Repository, ChangesetStatus
40 from rhodecode.model.db import Repository, ChangesetStatus
41
41
42 log = logging.getLogger(__name__)
42 log = logging.getLogger(__name__)
43
43
44
44
45 class RepoCompareView(RepoAppView):
45 class RepoCompareView(RepoAppView):
46 def load_default_context(self):
46 def load_default_context(self):
47 c = self._get_local_tmpl_context(include_app_defaults=True)
47 c = self._get_local_tmpl_context(include_app_defaults=True)
48 c.rhodecode_repo = self.rhodecode_vcs_repo
48 c.rhodecode_repo = self.rhodecode_vcs_repo
49 return c
49 return c
50
50
51 def _get_commit_or_redirect(
51 def _get_commit_or_redirect(
52 self, ref, ref_type, repo, redirect_after=True, partial=False):
52 self, ref, ref_type, repo, redirect_after=True, partial=False):
53 """
53 """
54 This is a safe way to get a commit. If an error occurs it
54 This is a safe way to get a commit. If an error occurs it
55 redirects to a commit with a proper message. If partial is set
55 redirects to a commit with a proper message. If partial is set
56 then it does not do redirect raise and throws an exception instead.
56 then it does not do redirect raise and throws an exception instead.
57 """
57 """
58 _ = self.request.translate
58 _ = self.request.translate
59 try:
59 try:
60 return get_commit_from_ref_name(repo, safe_str(ref), ref_type)
60 return get_commit_from_ref_name(repo, safe_str(ref), ref_type)
61 except EmptyRepositoryError:
61 except EmptyRepositoryError:
62 if not redirect_after:
62 if not redirect_after:
63 return repo.scm_instance().EMPTY_COMMIT
63 return repo.scm_instance().EMPTY_COMMIT
64 h.flash(h.literal(_('There are no commits yet')),
64 h.flash(h.literal(_('There are no commits yet')),
65 category='warning')
65 category='warning')
66 if not partial:
66 if not partial:
67 raise HTTPFound(
67 raise HTTPFound(
68 h.route_path('repo_summary', repo_name=repo.repo_name))
68 h.route_path('repo_summary', repo_name=repo.repo_name))
69 raise HTTPBadRequest()
69 raise HTTPBadRequest()
70
70
71 except RepositoryError as e:
71 except RepositoryError as e:
72 log.exception(safe_str(e))
72 log.exception(safe_str(e))
73 h.flash(safe_str(h.escape(e)), category='warning')
73 h.flash(safe_str(h.escape(e)), category='warning')
74 if not partial:
74 if not partial:
75 raise HTTPFound(
75 raise HTTPFound(
76 h.route_path('repo_summary', repo_name=repo.repo_name))
76 h.route_path('repo_summary', repo_name=repo.repo_name))
77 raise HTTPBadRequest()
77 raise HTTPBadRequest()
78
78
79 @LoginRequired()
79 @LoginRequired()
80 @HasRepoPermissionAnyDecorator(
80 @HasRepoPermissionAnyDecorator(
81 'repository.read', 'repository.write', 'repository.admin')
81 'repository.read', 'repository.write', 'repository.admin')
82 @view_config(
82 @view_config(
83 route_name='repo_compare_select', request_method='GET',
83 route_name='repo_compare_select', request_method='GET',
84 renderer='rhodecode:templates/compare/compare_diff.mako')
84 renderer='rhodecode:templates/compare/compare_diff.mako')
85 def compare_select(self):
85 def compare_select(self):
86 _ = self.request.translate
86 _ = self.request.translate
87 c = self.load_default_context()
87 c = self.load_default_context()
88
88
89 source_repo = self.db_repo_name
89 source_repo = self.db_repo_name
90 target_repo = self.request.GET.get('target_repo', source_repo)
90 target_repo = self.request.GET.get('target_repo', source_repo)
91 c.source_repo = Repository.get_by_repo_name(source_repo)
91 c.source_repo = Repository.get_by_repo_name(source_repo)
92 c.target_repo = Repository.get_by_repo_name(target_repo)
92 c.target_repo = Repository.get_by_repo_name(target_repo)
93
93
94 if c.source_repo is None or c.target_repo is None:
94 if c.source_repo is None or c.target_repo is None:
95 raise HTTPNotFound()
95 raise HTTPNotFound()
96
96
97 c.compare_home = True
97 c.compare_home = True
98 c.commit_ranges = []
98 c.commit_ranges = []
99 c.collapse_all_commits = False
99 c.collapse_all_commits = False
100 c.diffset = None
100 c.diffset = None
101 c.limited_diff = False
101 c.limited_diff = False
102 c.source_ref = c.target_ref = _('Select commit')
102 c.source_ref = c.target_ref = _('Select commit')
103 c.source_ref_type = ""
103 c.source_ref_type = ""
104 c.target_ref_type = ""
104 c.target_ref_type = ""
105 c.commit_statuses = ChangesetStatus.STATUSES
105 c.commit_statuses = ChangesetStatus.STATUSES
106 c.preview_mode = False
106 c.preview_mode = False
107 c.file_path = None
107 c.file_path = None
108
108
109 return self._get_template_context(c)
109 return self._get_template_context(c)
110
110
111 @LoginRequired()
111 @LoginRequired()
112 @HasRepoPermissionAnyDecorator(
112 @HasRepoPermissionAnyDecorator(
113 'repository.read', 'repository.write', 'repository.admin')
113 'repository.read', 'repository.write', 'repository.admin')
114 @view_config(
114 @view_config(
115 route_name='repo_compare', request_method='GET',
115 route_name='repo_compare', request_method='GET',
116 renderer=None)
116 renderer=None)
117 def compare(self):
117 def compare(self):
118 _ = self.request.translate
118 _ = self.request.translate
119 c = self.load_default_context()
119 c = self.load_default_context()
120
120
121 source_ref_type = self.request.matchdict['source_ref_type']
121 source_ref_type = self.request.matchdict['source_ref_type']
122 source_ref = self.request.matchdict['source_ref']
122 source_ref = self.request.matchdict['source_ref']
123 target_ref_type = self.request.matchdict['target_ref_type']
123 target_ref_type = self.request.matchdict['target_ref_type']
124 target_ref = self.request.matchdict['target_ref']
124 target_ref = self.request.matchdict['target_ref']
125
125
126 # source_ref will be evaluated in source_repo
126 # source_ref will be evaluated in source_repo
127 source_repo_name = self.db_repo_name
127 source_repo_name = self.db_repo_name
128 source_path, source_id = parse_path_ref(source_ref)
128 source_path, source_id = parse_path_ref(source_ref)
129
129
130 # target_ref will be evaluated in target_repo
130 # target_ref will be evaluated in target_repo
131 target_repo_name = self.request.GET.get('target_repo', source_repo_name)
131 target_repo_name = self.request.GET.get('target_repo', source_repo_name)
132 target_path, target_id = parse_path_ref(
132 target_path, target_id = parse_path_ref(
133 target_ref, default_path=self.request.GET.get('f_path', ''))
133 target_ref, default_path=self.request.GET.get('f_path', ''))
134
134
135 # if merge is True
135 # if merge is True
136 # Show what changes since the shared ancestor commit of target/source
136 # Show what changes since the shared ancestor commit of target/source
137 # the source would get if it was merged with target. Only commits
137 # the source would get if it was merged with target. Only commits
138 # which are in target but not in source will be shown.
138 # which are in target but not in source will be shown.
139 merge = str2bool(self.request.GET.get('merge'))
139 merge = str2bool(self.request.GET.get('merge'))
140 # if merge is False
140 # if merge is False
141 # Show a raw diff of source/target refs even if no ancestor exists
141 # Show a raw diff of source/target refs even if no ancestor exists
142
142
143 # c.fulldiff disables cut_off_limit
143 # c.fulldiff disables cut_off_limit
144 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
144 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
145
145
146 # fetch global flags of ignore ws or context lines
146 # fetch global flags of ignore ws or context lines
147 diff_context = diffs.get_diff_context(self.request)
147 diff_context = diffs.get_diff_context(self.request)
148 hide_whitespace_changes = diffs.get_diff_whitespace_flag(self.request)
148 hide_whitespace_changes = diffs.get_diff_whitespace_flag(self.request)
149
149
150 c.file_path = target_path
150 c.file_path = target_path
151 c.commit_statuses = ChangesetStatus.STATUSES
151 c.commit_statuses = ChangesetStatus.STATUSES
152
152
153 # if partial, returns just compare_commits.html (commits log)
153 # if partial, returns just compare_commits.html (commits log)
154 partial = self.request.is_xhr
154 partial = self.request.is_xhr
155
155
156 # swap url for compare_diff page
156 # swap url for compare_diff page
157 c.swap_url = h.route_path(
157 c.swap_url = h.route_path(
158 'repo_compare',
158 'repo_compare',
159 repo_name=target_repo_name,
159 repo_name=target_repo_name,
160 source_ref_type=target_ref_type,
160 source_ref_type=target_ref_type,
161 source_ref=target_ref,
161 source_ref=target_ref,
162 target_repo=source_repo_name,
162 target_repo=source_repo_name,
163 target_ref_type=source_ref_type,
163 target_ref_type=source_ref_type,
164 target_ref=source_ref,
164 target_ref=source_ref,
165 _query=dict(merge=merge and '1' or '', f_path=target_path))
165 _query=dict(merge=merge and '1' or '', f_path=target_path))
166
166
167 source_repo = Repository.get_by_repo_name(source_repo_name)
167 source_repo = Repository.get_by_repo_name(source_repo_name)
168 target_repo = Repository.get_by_repo_name(target_repo_name)
168 target_repo = Repository.get_by_repo_name(target_repo_name)
169
169
170 if source_repo is None:
170 if source_repo is None:
171 log.error('Could not find the source repo: {}'
171 log.error('Could not find the source repo: {}'
172 .format(source_repo_name))
172 .format(source_repo_name))
173 h.flash(_('Could not find the source repo: `{}`')
173 h.flash(_('Could not find the source repo: `{}`')
174 .format(h.escape(source_repo_name)), category='error')
174 .format(h.escape(source_repo_name)), category='error')
175 raise HTTPFound(
175 raise HTTPFound(
176 h.route_path('repo_compare_select', repo_name=self.db_repo_name))
176 h.route_path('repo_compare_select', repo_name=self.db_repo_name))
177
177
178 if target_repo is None:
178 if target_repo is None:
179 log.error('Could not find the target repo: {}'
179 log.error('Could not find the target repo: {}'
180 .format(source_repo_name))
180 .format(source_repo_name))
181 h.flash(_('Could not find the target repo: `{}`')
181 h.flash(_('Could not find the target repo: `{}`')
182 .format(h.escape(target_repo_name)), category='error')
182 .format(h.escape(target_repo_name)), category='error')
183 raise HTTPFound(
183 raise HTTPFound(
184 h.route_path('repo_compare_select', repo_name=self.db_repo_name))
184 h.route_path('repo_compare_select', repo_name=self.db_repo_name))
185
185
186 source_scm = source_repo.scm_instance()
186 source_scm = source_repo.scm_instance()
187 target_scm = target_repo.scm_instance()
187 target_scm = target_repo.scm_instance()
188
188
189 source_alias = source_scm.alias
189 source_alias = source_scm.alias
190 target_alias = target_scm.alias
190 target_alias = target_scm.alias
191 if source_alias != target_alias:
191 if source_alias != target_alias:
192 msg = _('The comparison of two different kinds of remote repos '
192 msg = _('The comparison of two different kinds of remote repos '
193 'is not available')
193 'is not available')
194 log.error(msg)
194 log.error(msg)
195 h.flash(msg, category='error')
195 h.flash(msg, category='error')
196 raise HTTPFound(
196 raise HTTPFound(
197 h.route_path('repo_compare_select', repo_name=self.db_repo_name))
197 h.route_path('repo_compare_select', repo_name=self.db_repo_name))
198
198
199 source_commit = self._get_commit_or_redirect(
199 source_commit = self._get_commit_or_redirect(
200 ref=source_id, ref_type=source_ref_type, repo=source_repo,
200 ref=source_id, ref_type=source_ref_type, repo=source_repo,
201 partial=partial)
201 partial=partial)
202 target_commit = self._get_commit_or_redirect(
202 target_commit = self._get_commit_or_redirect(
203 ref=target_id, ref_type=target_ref_type, repo=target_repo,
203 ref=target_id, ref_type=target_ref_type, repo=target_repo,
204 partial=partial)
204 partial=partial)
205
205
206 c.compare_home = False
206 c.compare_home = False
207 c.source_repo = source_repo
207 c.source_repo = source_repo
208 c.target_repo = target_repo
208 c.target_repo = target_repo
209 c.source_ref = source_ref
209 c.source_ref = source_ref
210 c.target_ref = target_ref
210 c.target_ref = target_ref
211 c.source_ref_type = source_ref_type
211 c.source_ref_type = source_ref_type
212 c.target_ref_type = target_ref_type
212 c.target_ref_type = target_ref_type
213
213
214 pre_load = ["author", "branch", "date", "message"]
214 pre_load = ["author", "date", "message", "branch"]
215 c.ancestor = None
215 c.ancestor = None
216
216
217 try:
217 try:
218 c.commit_ranges = source_scm.compare(
218 c.commit_ranges = source_scm.compare(
219 source_commit.raw_id, target_commit.raw_id,
219 source_commit.raw_id, target_commit.raw_id,
220 target_scm, merge, pre_load=pre_load) or []
220 target_scm, merge, pre_load=pre_load) or []
221 if merge:
221 if merge:
222 c.ancestor = source_scm.get_common_ancestor(
222 c.ancestor = source_scm.get_common_ancestor(
223 source_commit.raw_id, target_commit.raw_id, target_scm)
223 source_commit.raw_id, target_commit.raw_id, target_scm)
224 except RepositoryRequirementError:
224 except RepositoryRequirementError:
225 msg = _('Could not compare repos with different '
225 msg = _('Could not compare repos with different '
226 'large file settings')
226 'large file settings')
227 log.error(msg)
227 log.error(msg)
228 if partial:
228 if partial:
229 return Response(msg)
229 return Response(msg)
230 h.flash(msg, category='error')
230 h.flash(msg, category='error')
231 raise HTTPFound(
231 raise HTTPFound(
232 h.route_path('repo_compare_select',
232 h.route_path('repo_compare_select',
233 repo_name=self.db_repo_name))
233 repo_name=self.db_repo_name))
234
234
235 c.statuses = self.db_repo.statuses(
235 c.statuses = self.db_repo.statuses(
236 [x.raw_id for x in c.commit_ranges])
236 [x.raw_id for x in c.commit_ranges])
237
237
238 # auto collapse if we have more than limit
238 # auto collapse if we have more than limit
239 collapse_limit = diffs.DiffProcessor._collapse_commits_over
239 collapse_limit = diffs.DiffProcessor._collapse_commits_over
240 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
240 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
241
241
242 if partial: # for PR ajax commits loader
242 if partial: # for PR ajax commits loader
243 if not c.ancestor:
243 if not c.ancestor:
244 return Response('') # cannot merge if there is no ancestor
244 return Response('') # cannot merge if there is no ancestor
245
245
246 html = render(
246 html = render(
247 'rhodecode:templates/compare/compare_commits.mako',
247 'rhodecode:templates/compare/compare_commits.mako',
248 self._get_template_context(c), self.request)
248 self._get_template_context(c), self.request)
249 return Response(html)
249 return Response(html)
250
250
251 if c.ancestor:
251 if c.ancestor:
252 # case we want a simple diff without incoming commits,
252 # case we want a simple diff without incoming commits,
253 # previewing what will be merged.
253 # previewing what will be merged.
254 # Make the diff on target repo (which is known to have target_ref)
254 # Make the diff on target repo (which is known to have target_ref)
255 log.debug('Using ancestor %s as source_ref instead of %s',
255 log.debug('Using ancestor %s as source_ref instead of %s',
256 c.ancestor, source_ref)
256 c.ancestor, source_ref)
257 source_repo = target_repo
257 source_repo = target_repo
258 source_commit = target_repo.get_commit(commit_id=c.ancestor)
258 source_commit = target_repo.get_commit(commit_id=c.ancestor)
259
259
260 # diff_limit will cut off the whole diff if the limit is applied
260 # diff_limit will cut off the whole diff if the limit is applied
261 # otherwise it will just hide the big files from the front-end
261 # otherwise it will just hide the big files from the front-end
262 diff_limit = c.visual.cut_off_limit_diff
262 diff_limit = c.visual.cut_off_limit_diff
263 file_limit = c.visual.cut_off_limit_file
263 file_limit = c.visual.cut_off_limit_file
264
264
265 log.debug('calculating diff between '
265 log.debug('calculating diff between '
266 'source_ref:%s and target_ref:%s for repo `%s`',
266 'source_ref:%s and target_ref:%s for repo `%s`',
267 source_commit, target_commit,
267 source_commit, target_commit,
268 safe_unicode(source_repo.scm_instance().path))
268 safe_unicode(source_repo.scm_instance().path))
269
269
270 if source_commit.repository != target_commit.repository:
270 if source_commit.repository != target_commit.repository:
271 msg = _(
271 msg = _(
272 "Repositories unrelated. "
272 "Repositories unrelated. "
273 "Cannot compare commit %(commit1)s from repository %(repo1)s "
273 "Cannot compare commit %(commit1)s from repository %(repo1)s "
274 "with commit %(commit2)s from repository %(repo2)s.") % {
274 "with commit %(commit2)s from repository %(repo2)s.") % {
275 'commit1': h.show_id(source_commit),
275 'commit1': h.show_id(source_commit),
276 'repo1': source_repo.repo_name,
276 'repo1': source_repo.repo_name,
277 'commit2': h.show_id(target_commit),
277 'commit2': h.show_id(target_commit),
278 'repo2': target_repo.repo_name,
278 'repo2': target_repo.repo_name,
279 }
279 }
280 h.flash(msg, category='error')
280 h.flash(msg, category='error')
281 raise HTTPFound(
281 raise HTTPFound(
282 h.route_path('repo_compare_select',
282 h.route_path('repo_compare_select',
283 repo_name=self.db_repo_name))
283 repo_name=self.db_repo_name))
284
284
285 txt_diff = source_repo.scm_instance().get_diff(
285 txt_diff = source_repo.scm_instance().get_diff(
286 commit1=source_commit, commit2=target_commit,
286 commit1=source_commit, commit2=target_commit,
287 path=target_path, path1=source_path,
287 path=target_path, path1=source_path,
288 ignore_whitespace=hide_whitespace_changes, context=diff_context)
288 ignore_whitespace=hide_whitespace_changes, context=diff_context)
289
289
290 diff_processor = diffs.DiffProcessor(
290 diff_processor = diffs.DiffProcessor(
291 txt_diff, format='newdiff', diff_limit=diff_limit,
291 txt_diff, format='newdiff', diff_limit=diff_limit,
292 file_limit=file_limit, show_full_diff=c.fulldiff)
292 file_limit=file_limit, show_full_diff=c.fulldiff)
293 _parsed = diff_processor.prepare()
293 _parsed = diff_processor.prepare()
294
294
295 diffset = codeblocks.DiffSet(
295 diffset = codeblocks.DiffSet(
296 repo_name=source_repo.repo_name,
296 repo_name=source_repo.repo_name,
297 source_node_getter=codeblocks.diffset_node_getter(source_commit),
297 source_node_getter=codeblocks.diffset_node_getter(source_commit),
298 target_repo_name=self.db_repo_name,
298 target_repo_name=self.db_repo_name,
299 target_node_getter=codeblocks.diffset_node_getter(target_commit),
299 target_node_getter=codeblocks.diffset_node_getter(target_commit),
300 )
300 )
301 c.diffset = self.path_filter.render_patchset_filtered(
301 c.diffset = self.path_filter.render_patchset_filtered(
302 diffset, _parsed, source_ref, target_ref)
302 diffset, _parsed, source_ref, target_ref)
303
303
304 c.preview_mode = merge
304 c.preview_mode = merge
305 c.source_commit = source_commit
305 c.source_commit = source_commit
306 c.target_commit = target_commit
306 c.target_commit = target_commit
307
307
308 html = render(
308 html = render(
309 'rhodecode:templates/compare/compare_diff.mako',
309 'rhodecode:templates/compare/compare_diff.mako',
310 self._get_template_context(c), self.request)
310 self._get_template_context(c), self.request)
311 return Response(html) No newline at end of file
311 return Response(html)
@@ -1,1464 +1,1464 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2019 RhodeCode GmbH
3 # Copyright (C) 2011-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import collections
22 import collections
23
23
24 import formencode
24 import formencode
25 import formencode.htmlfill
25 import formencode.htmlfill
26 import peppercorn
26 import peppercorn
27 from pyramid.httpexceptions import (
27 from pyramid.httpexceptions import (
28 HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest)
28 HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest)
29 from pyramid.view import view_config
29 from pyramid.view import view_config
30 from pyramid.renderers import render
30 from pyramid.renderers import render
31
31
32 from rhodecode.apps._base import RepoAppView, DataGridAppView
32 from rhodecode.apps._base import RepoAppView, DataGridAppView
33
33
34 from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream
34 from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream
35 from rhodecode.lib.base import vcs_operation_context
35 from rhodecode.lib.base import vcs_operation_context
36 from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist
36 from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist
37 from rhodecode.lib.ext_json import json
37 from rhodecode.lib.ext_json import json
38 from rhodecode.lib.auth import (
38 from rhodecode.lib.auth import (
39 LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator,
39 LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator,
40 NotAnonymous, CSRFRequired)
40 NotAnonymous, CSRFRequired)
41 from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode
41 from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode
42 from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason
42 from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason
43 from rhodecode.lib.vcs.exceptions import (CommitDoesNotExistError,
43 from rhodecode.lib.vcs.exceptions import (CommitDoesNotExistError,
44 RepositoryRequirementError, EmptyRepositoryError)
44 RepositoryRequirementError, EmptyRepositoryError)
45 from rhodecode.model.changeset_status import ChangesetStatusModel
45 from rhodecode.model.changeset_status import ChangesetStatusModel
46 from rhodecode.model.comment import CommentsModel
46 from rhodecode.model.comment import CommentsModel
47 from rhodecode.model.db import (func, or_, PullRequest, PullRequestVersion,
47 from rhodecode.model.db import (func, or_, PullRequest, PullRequestVersion,
48 ChangesetComment, ChangesetStatus, Repository)
48 ChangesetComment, ChangesetStatus, Repository)
49 from rhodecode.model.forms import PullRequestForm
49 from rhodecode.model.forms import PullRequestForm
50 from rhodecode.model.meta import Session
50 from rhodecode.model.meta import Session
51 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
51 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
52 from rhodecode.model.scm import ScmModel
52 from rhodecode.model.scm import ScmModel
53
53
54 log = logging.getLogger(__name__)
54 log = logging.getLogger(__name__)
55
55
56
56
57 class RepoPullRequestsView(RepoAppView, DataGridAppView):
57 class RepoPullRequestsView(RepoAppView, DataGridAppView):
58
58
59 def load_default_context(self):
59 def load_default_context(self):
60 c = self._get_local_tmpl_context(include_app_defaults=True)
60 c = self._get_local_tmpl_context(include_app_defaults=True)
61 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
61 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
62 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
62 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
63 # backward compat., we use for OLD PRs a plain renderer
63 # backward compat., we use for OLD PRs a plain renderer
64 c.renderer = 'plain'
64 c.renderer = 'plain'
65 return c
65 return c
66
66
67 def _get_pull_requests_list(
67 def _get_pull_requests_list(
68 self, repo_name, source, filter_type, opened_by, statuses):
68 self, repo_name, source, filter_type, opened_by, statuses):
69
69
70 draw, start, limit = self._extract_chunk(self.request)
70 draw, start, limit = self._extract_chunk(self.request)
71 search_q, order_by, order_dir = self._extract_ordering(self.request)
71 search_q, order_by, order_dir = self._extract_ordering(self.request)
72 _render = self.request.get_partial_renderer(
72 _render = self.request.get_partial_renderer(
73 'rhodecode:templates/data_table/_dt_elements.mako')
73 'rhodecode:templates/data_table/_dt_elements.mako')
74
74
75 # pagination
75 # pagination
76
76
77 if filter_type == 'awaiting_review':
77 if filter_type == 'awaiting_review':
78 pull_requests = PullRequestModel().get_awaiting_review(
78 pull_requests = PullRequestModel().get_awaiting_review(
79 repo_name, source=source, opened_by=opened_by,
79 repo_name, source=source, opened_by=opened_by,
80 statuses=statuses, offset=start, length=limit,
80 statuses=statuses, offset=start, length=limit,
81 order_by=order_by, order_dir=order_dir)
81 order_by=order_by, order_dir=order_dir)
82 pull_requests_total_count = PullRequestModel().count_awaiting_review(
82 pull_requests_total_count = PullRequestModel().count_awaiting_review(
83 repo_name, source=source, statuses=statuses,
83 repo_name, source=source, statuses=statuses,
84 opened_by=opened_by)
84 opened_by=opened_by)
85 elif filter_type == 'awaiting_my_review':
85 elif filter_type == 'awaiting_my_review':
86 pull_requests = PullRequestModel().get_awaiting_my_review(
86 pull_requests = PullRequestModel().get_awaiting_my_review(
87 repo_name, source=source, opened_by=opened_by,
87 repo_name, source=source, opened_by=opened_by,
88 user_id=self._rhodecode_user.user_id, statuses=statuses,
88 user_id=self._rhodecode_user.user_id, statuses=statuses,
89 offset=start, length=limit, order_by=order_by,
89 offset=start, length=limit, order_by=order_by,
90 order_dir=order_dir)
90 order_dir=order_dir)
91 pull_requests_total_count = PullRequestModel().count_awaiting_my_review(
91 pull_requests_total_count = PullRequestModel().count_awaiting_my_review(
92 repo_name, source=source, user_id=self._rhodecode_user.user_id,
92 repo_name, source=source, user_id=self._rhodecode_user.user_id,
93 statuses=statuses, opened_by=opened_by)
93 statuses=statuses, opened_by=opened_by)
94 else:
94 else:
95 pull_requests = PullRequestModel().get_all(
95 pull_requests = PullRequestModel().get_all(
96 repo_name, source=source, opened_by=opened_by,
96 repo_name, source=source, opened_by=opened_by,
97 statuses=statuses, offset=start, length=limit,
97 statuses=statuses, offset=start, length=limit,
98 order_by=order_by, order_dir=order_dir)
98 order_by=order_by, order_dir=order_dir)
99 pull_requests_total_count = PullRequestModel().count_all(
99 pull_requests_total_count = PullRequestModel().count_all(
100 repo_name, source=source, statuses=statuses,
100 repo_name, source=source, statuses=statuses,
101 opened_by=opened_by)
101 opened_by=opened_by)
102
102
103 data = []
103 data = []
104 comments_model = CommentsModel()
104 comments_model = CommentsModel()
105 for pr in pull_requests:
105 for pr in pull_requests:
106 comments = comments_model.get_all_comments(
106 comments = comments_model.get_all_comments(
107 self.db_repo.repo_id, pull_request=pr)
107 self.db_repo.repo_id, pull_request=pr)
108
108
109 data.append({
109 data.append({
110 'name': _render('pullrequest_name',
110 'name': _render('pullrequest_name',
111 pr.pull_request_id, pr.target_repo.repo_name),
111 pr.pull_request_id, pr.target_repo.repo_name),
112 'name_raw': pr.pull_request_id,
112 'name_raw': pr.pull_request_id,
113 'status': _render('pullrequest_status',
113 'status': _render('pullrequest_status',
114 pr.calculated_review_status()),
114 pr.calculated_review_status()),
115 'title': _render(
115 'title': _render(
116 'pullrequest_title', pr.title, pr.description),
116 'pullrequest_title', pr.title, pr.description),
117 'description': h.escape(pr.description),
117 'description': h.escape(pr.description),
118 'updated_on': _render('pullrequest_updated_on',
118 'updated_on': _render('pullrequest_updated_on',
119 h.datetime_to_time(pr.updated_on)),
119 h.datetime_to_time(pr.updated_on)),
120 'updated_on_raw': h.datetime_to_time(pr.updated_on),
120 'updated_on_raw': h.datetime_to_time(pr.updated_on),
121 'created_on': _render('pullrequest_updated_on',
121 'created_on': _render('pullrequest_updated_on',
122 h.datetime_to_time(pr.created_on)),
122 h.datetime_to_time(pr.created_on)),
123 'created_on_raw': h.datetime_to_time(pr.created_on),
123 'created_on_raw': h.datetime_to_time(pr.created_on),
124 'author': _render('pullrequest_author',
124 'author': _render('pullrequest_author',
125 pr.author.full_contact, ),
125 pr.author.full_contact, ),
126 'author_raw': pr.author.full_name,
126 'author_raw': pr.author.full_name,
127 'comments': _render('pullrequest_comments', len(comments)),
127 'comments': _render('pullrequest_comments', len(comments)),
128 'comments_raw': len(comments),
128 'comments_raw': len(comments),
129 'closed': pr.is_closed(),
129 'closed': pr.is_closed(),
130 })
130 })
131
131
132 data = ({
132 data = ({
133 'draw': draw,
133 'draw': draw,
134 'data': data,
134 'data': data,
135 'recordsTotal': pull_requests_total_count,
135 'recordsTotal': pull_requests_total_count,
136 'recordsFiltered': pull_requests_total_count,
136 'recordsFiltered': pull_requests_total_count,
137 })
137 })
138 return data
138 return data
139
139
140 @LoginRequired()
140 @LoginRequired()
141 @HasRepoPermissionAnyDecorator(
141 @HasRepoPermissionAnyDecorator(
142 'repository.read', 'repository.write', 'repository.admin')
142 'repository.read', 'repository.write', 'repository.admin')
143 @view_config(
143 @view_config(
144 route_name='pullrequest_show_all', request_method='GET',
144 route_name='pullrequest_show_all', request_method='GET',
145 renderer='rhodecode:templates/pullrequests/pullrequests.mako')
145 renderer='rhodecode:templates/pullrequests/pullrequests.mako')
146 def pull_request_list(self):
146 def pull_request_list(self):
147 c = self.load_default_context()
147 c = self.load_default_context()
148
148
149 req_get = self.request.GET
149 req_get = self.request.GET
150 c.source = str2bool(req_get.get('source'))
150 c.source = str2bool(req_get.get('source'))
151 c.closed = str2bool(req_get.get('closed'))
151 c.closed = str2bool(req_get.get('closed'))
152 c.my = str2bool(req_get.get('my'))
152 c.my = str2bool(req_get.get('my'))
153 c.awaiting_review = str2bool(req_get.get('awaiting_review'))
153 c.awaiting_review = str2bool(req_get.get('awaiting_review'))
154 c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
154 c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
155
155
156 c.active = 'open'
156 c.active = 'open'
157 if c.my:
157 if c.my:
158 c.active = 'my'
158 c.active = 'my'
159 if c.closed:
159 if c.closed:
160 c.active = 'closed'
160 c.active = 'closed'
161 if c.awaiting_review and not c.source:
161 if c.awaiting_review and not c.source:
162 c.active = 'awaiting'
162 c.active = 'awaiting'
163 if c.source and not c.awaiting_review:
163 if c.source and not c.awaiting_review:
164 c.active = 'source'
164 c.active = 'source'
165 if c.awaiting_my_review:
165 if c.awaiting_my_review:
166 c.active = 'awaiting_my'
166 c.active = 'awaiting_my'
167
167
168 return self._get_template_context(c)
168 return self._get_template_context(c)
169
169
170 @LoginRequired()
170 @LoginRequired()
171 @HasRepoPermissionAnyDecorator(
171 @HasRepoPermissionAnyDecorator(
172 'repository.read', 'repository.write', 'repository.admin')
172 'repository.read', 'repository.write', 'repository.admin')
173 @view_config(
173 @view_config(
174 route_name='pullrequest_show_all_data', request_method='GET',
174 route_name='pullrequest_show_all_data', request_method='GET',
175 renderer='json_ext', xhr=True)
175 renderer='json_ext', xhr=True)
176 def pull_request_list_data(self):
176 def pull_request_list_data(self):
177 self.load_default_context()
177 self.load_default_context()
178
178
179 # additional filters
179 # additional filters
180 req_get = self.request.GET
180 req_get = self.request.GET
181 source = str2bool(req_get.get('source'))
181 source = str2bool(req_get.get('source'))
182 closed = str2bool(req_get.get('closed'))
182 closed = str2bool(req_get.get('closed'))
183 my = str2bool(req_get.get('my'))
183 my = str2bool(req_get.get('my'))
184 awaiting_review = str2bool(req_get.get('awaiting_review'))
184 awaiting_review = str2bool(req_get.get('awaiting_review'))
185 awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
185 awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
186
186
187 filter_type = 'awaiting_review' if awaiting_review \
187 filter_type = 'awaiting_review' if awaiting_review \
188 else 'awaiting_my_review' if awaiting_my_review \
188 else 'awaiting_my_review' if awaiting_my_review \
189 else None
189 else None
190
190
191 opened_by = None
191 opened_by = None
192 if my:
192 if my:
193 opened_by = [self._rhodecode_user.user_id]
193 opened_by = [self._rhodecode_user.user_id]
194
194
195 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
195 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
196 if closed:
196 if closed:
197 statuses = [PullRequest.STATUS_CLOSED]
197 statuses = [PullRequest.STATUS_CLOSED]
198
198
199 data = self._get_pull_requests_list(
199 data = self._get_pull_requests_list(
200 repo_name=self.db_repo_name, source=source,
200 repo_name=self.db_repo_name, source=source,
201 filter_type=filter_type, opened_by=opened_by, statuses=statuses)
201 filter_type=filter_type, opened_by=opened_by, statuses=statuses)
202
202
203 return data
203 return data
204
204
205 def _is_diff_cache_enabled(self, target_repo):
205 def _is_diff_cache_enabled(self, target_repo):
206 caching_enabled = self._get_general_setting(
206 caching_enabled = self._get_general_setting(
207 target_repo, 'rhodecode_diff_cache')
207 target_repo, 'rhodecode_diff_cache')
208 log.debug('Diff caching enabled: %s', caching_enabled)
208 log.debug('Diff caching enabled: %s', caching_enabled)
209 return caching_enabled
209 return caching_enabled
210
210
211 def _get_diffset(self, source_repo_name, source_repo,
211 def _get_diffset(self, source_repo_name, source_repo,
212 source_ref_id, target_ref_id,
212 source_ref_id, target_ref_id,
213 target_commit, source_commit, diff_limit, file_limit,
213 target_commit, source_commit, diff_limit, file_limit,
214 fulldiff, hide_whitespace_changes, diff_context):
214 fulldiff, hide_whitespace_changes, diff_context):
215
215
216 vcs_diff = PullRequestModel().get_diff(
216 vcs_diff = PullRequestModel().get_diff(
217 source_repo, source_ref_id, target_ref_id,
217 source_repo, source_ref_id, target_ref_id,
218 hide_whitespace_changes, diff_context)
218 hide_whitespace_changes, diff_context)
219
219
220 diff_processor = diffs.DiffProcessor(
220 diff_processor = diffs.DiffProcessor(
221 vcs_diff, format='newdiff', diff_limit=diff_limit,
221 vcs_diff, format='newdiff', diff_limit=diff_limit,
222 file_limit=file_limit, show_full_diff=fulldiff)
222 file_limit=file_limit, show_full_diff=fulldiff)
223
223
224 _parsed = diff_processor.prepare()
224 _parsed = diff_processor.prepare()
225
225
226 diffset = codeblocks.DiffSet(
226 diffset = codeblocks.DiffSet(
227 repo_name=self.db_repo_name,
227 repo_name=self.db_repo_name,
228 source_repo_name=source_repo_name,
228 source_repo_name=source_repo_name,
229 source_node_getter=codeblocks.diffset_node_getter(target_commit),
229 source_node_getter=codeblocks.diffset_node_getter(target_commit),
230 target_node_getter=codeblocks.diffset_node_getter(source_commit),
230 target_node_getter=codeblocks.diffset_node_getter(source_commit),
231 )
231 )
232 diffset = self.path_filter.render_patchset_filtered(
232 diffset = self.path_filter.render_patchset_filtered(
233 diffset, _parsed, target_commit.raw_id, source_commit.raw_id)
233 diffset, _parsed, target_commit.raw_id, source_commit.raw_id)
234
234
235 return diffset
235 return diffset
236
236
237 def _get_range_diffset(self, source_scm, source_repo,
237 def _get_range_diffset(self, source_scm, source_repo,
238 commit1, commit2, diff_limit, file_limit,
238 commit1, commit2, diff_limit, file_limit,
239 fulldiff, hide_whitespace_changes, diff_context):
239 fulldiff, hide_whitespace_changes, diff_context):
240 vcs_diff = source_scm.get_diff(
240 vcs_diff = source_scm.get_diff(
241 commit1, commit2,
241 commit1, commit2,
242 ignore_whitespace=hide_whitespace_changes,
242 ignore_whitespace=hide_whitespace_changes,
243 context=diff_context)
243 context=diff_context)
244
244
245 diff_processor = diffs.DiffProcessor(
245 diff_processor = diffs.DiffProcessor(
246 vcs_diff, format='newdiff', diff_limit=diff_limit,
246 vcs_diff, format='newdiff', diff_limit=diff_limit,
247 file_limit=file_limit, show_full_diff=fulldiff)
247 file_limit=file_limit, show_full_diff=fulldiff)
248
248
249 _parsed = diff_processor.prepare()
249 _parsed = diff_processor.prepare()
250
250
251 diffset = codeblocks.DiffSet(
251 diffset = codeblocks.DiffSet(
252 repo_name=source_repo.repo_name,
252 repo_name=source_repo.repo_name,
253 source_node_getter=codeblocks.diffset_node_getter(commit1),
253 source_node_getter=codeblocks.diffset_node_getter(commit1),
254 target_node_getter=codeblocks.diffset_node_getter(commit2))
254 target_node_getter=codeblocks.diffset_node_getter(commit2))
255
255
256 diffset = self.path_filter.render_patchset_filtered(
256 diffset = self.path_filter.render_patchset_filtered(
257 diffset, _parsed, commit1.raw_id, commit2.raw_id)
257 diffset, _parsed, commit1.raw_id, commit2.raw_id)
258
258
259 return diffset
259 return diffset
260
260
261 @LoginRequired()
261 @LoginRequired()
262 @HasRepoPermissionAnyDecorator(
262 @HasRepoPermissionAnyDecorator(
263 'repository.read', 'repository.write', 'repository.admin')
263 'repository.read', 'repository.write', 'repository.admin')
264 @view_config(
264 @view_config(
265 route_name='pullrequest_show', request_method='GET',
265 route_name='pullrequest_show', request_method='GET',
266 renderer='rhodecode:templates/pullrequests/pullrequest_show.mako')
266 renderer='rhodecode:templates/pullrequests/pullrequest_show.mako')
267 def pull_request_show(self):
267 def pull_request_show(self):
268 _ = self.request.translate
268 _ = self.request.translate
269 c = self.load_default_context()
269 c = self.load_default_context()
270
270
271 pull_request = PullRequest.get_or_404(
271 pull_request = PullRequest.get_or_404(
272 self.request.matchdict['pull_request_id'])
272 self.request.matchdict['pull_request_id'])
273 pull_request_id = pull_request.pull_request_id
273 pull_request_id = pull_request.pull_request_id
274
274
275 if pull_request.pull_request_state != PullRequest.STATE_CREATED:
275 if pull_request.pull_request_state != PullRequest.STATE_CREATED:
276 log.debug('show: forbidden because pull request is in state %s',
276 log.debug('show: forbidden because pull request is in state %s',
277 pull_request.pull_request_state)
277 pull_request.pull_request_state)
278 msg = _(u'Cannot show pull requests in state other than `{}`. '
278 msg = _(u'Cannot show pull requests in state other than `{}`. '
279 u'Current state is: `{}`').format(PullRequest.STATE_CREATED,
279 u'Current state is: `{}`').format(PullRequest.STATE_CREATED,
280 pull_request.pull_request_state)
280 pull_request.pull_request_state)
281 h.flash(msg, category='error')
281 h.flash(msg, category='error')
282 raise HTTPFound(h.route_path('pullrequest_show_all',
282 raise HTTPFound(h.route_path('pullrequest_show_all',
283 repo_name=self.db_repo_name))
283 repo_name=self.db_repo_name))
284
284
285 version = self.request.GET.get('version')
285 version = self.request.GET.get('version')
286 from_version = self.request.GET.get('from_version') or version
286 from_version = self.request.GET.get('from_version') or version
287 merge_checks = self.request.GET.get('merge_checks')
287 merge_checks = self.request.GET.get('merge_checks')
288 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
288 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
289
289
290 # fetch global flags of ignore ws or context lines
290 # fetch global flags of ignore ws or context lines
291 diff_context = diffs.get_diff_context(self.request)
291 diff_context = diffs.get_diff_context(self.request)
292 hide_whitespace_changes = diffs.get_diff_whitespace_flag(self.request)
292 hide_whitespace_changes = diffs.get_diff_whitespace_flag(self.request)
293
293
294 force_refresh = str2bool(self.request.GET.get('force_refresh'))
294 force_refresh = str2bool(self.request.GET.get('force_refresh'))
295
295
296 (pull_request_latest,
296 (pull_request_latest,
297 pull_request_at_ver,
297 pull_request_at_ver,
298 pull_request_display_obj,
298 pull_request_display_obj,
299 at_version) = PullRequestModel().get_pr_version(
299 at_version) = PullRequestModel().get_pr_version(
300 pull_request_id, version=version)
300 pull_request_id, version=version)
301 pr_closed = pull_request_latest.is_closed()
301 pr_closed = pull_request_latest.is_closed()
302
302
303 if pr_closed and (version or from_version):
303 if pr_closed and (version or from_version):
304 # not allow to browse versions
304 # not allow to browse versions
305 raise HTTPFound(h.route_path(
305 raise HTTPFound(h.route_path(
306 'pullrequest_show', repo_name=self.db_repo_name,
306 'pullrequest_show', repo_name=self.db_repo_name,
307 pull_request_id=pull_request_id))
307 pull_request_id=pull_request_id))
308
308
309 versions = pull_request_display_obj.versions()
309 versions = pull_request_display_obj.versions()
310 # used to store per-commit range diffs
310 # used to store per-commit range diffs
311 c.changes = collections.OrderedDict()
311 c.changes = collections.OrderedDict()
312 c.range_diff_on = self.request.GET.get('range-diff') == "1"
312 c.range_diff_on = self.request.GET.get('range-diff') == "1"
313
313
314 c.at_version = at_version
314 c.at_version = at_version
315 c.at_version_num = (at_version
315 c.at_version_num = (at_version
316 if at_version and at_version != 'latest'
316 if at_version and at_version != 'latest'
317 else None)
317 else None)
318 c.at_version_pos = ChangesetComment.get_index_from_version(
318 c.at_version_pos = ChangesetComment.get_index_from_version(
319 c.at_version_num, versions)
319 c.at_version_num, versions)
320
320
321 (prev_pull_request_latest,
321 (prev_pull_request_latest,
322 prev_pull_request_at_ver,
322 prev_pull_request_at_ver,
323 prev_pull_request_display_obj,
323 prev_pull_request_display_obj,
324 prev_at_version) = PullRequestModel().get_pr_version(
324 prev_at_version) = PullRequestModel().get_pr_version(
325 pull_request_id, version=from_version)
325 pull_request_id, version=from_version)
326
326
327 c.from_version = prev_at_version
327 c.from_version = prev_at_version
328 c.from_version_num = (prev_at_version
328 c.from_version_num = (prev_at_version
329 if prev_at_version and prev_at_version != 'latest'
329 if prev_at_version and prev_at_version != 'latest'
330 else None)
330 else None)
331 c.from_version_pos = ChangesetComment.get_index_from_version(
331 c.from_version_pos = ChangesetComment.get_index_from_version(
332 c.from_version_num, versions)
332 c.from_version_num, versions)
333
333
334 # define if we're in COMPARE mode or VIEW at version mode
334 # define if we're in COMPARE mode or VIEW at version mode
335 compare = at_version != prev_at_version
335 compare = at_version != prev_at_version
336
336
337 # pull_requests repo_name we opened it against
337 # pull_requests repo_name we opened it against
338 # ie. target_repo must match
338 # ie. target_repo must match
339 if self.db_repo_name != pull_request_at_ver.target_repo.repo_name:
339 if self.db_repo_name != pull_request_at_ver.target_repo.repo_name:
340 raise HTTPNotFound()
340 raise HTTPNotFound()
341
341
342 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(
342 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(
343 pull_request_at_ver)
343 pull_request_at_ver)
344
344
345 c.pull_request = pull_request_display_obj
345 c.pull_request = pull_request_display_obj
346 c.renderer = pull_request_at_ver.description_renderer or c.renderer
346 c.renderer = pull_request_at_ver.description_renderer or c.renderer
347 c.pull_request_latest = pull_request_latest
347 c.pull_request_latest = pull_request_latest
348
348
349 if compare or (at_version and not at_version == 'latest'):
349 if compare or (at_version and not at_version == 'latest'):
350 c.allowed_to_change_status = False
350 c.allowed_to_change_status = False
351 c.allowed_to_update = False
351 c.allowed_to_update = False
352 c.allowed_to_merge = False
352 c.allowed_to_merge = False
353 c.allowed_to_delete = False
353 c.allowed_to_delete = False
354 c.allowed_to_comment = False
354 c.allowed_to_comment = False
355 c.allowed_to_close = False
355 c.allowed_to_close = False
356 else:
356 else:
357 can_change_status = PullRequestModel().check_user_change_status(
357 can_change_status = PullRequestModel().check_user_change_status(
358 pull_request_at_ver, self._rhodecode_user)
358 pull_request_at_ver, self._rhodecode_user)
359 c.allowed_to_change_status = can_change_status and not pr_closed
359 c.allowed_to_change_status = can_change_status and not pr_closed
360
360
361 c.allowed_to_update = PullRequestModel().check_user_update(
361 c.allowed_to_update = PullRequestModel().check_user_update(
362 pull_request_latest, self._rhodecode_user) and not pr_closed
362 pull_request_latest, self._rhodecode_user) and not pr_closed
363 c.allowed_to_merge = PullRequestModel().check_user_merge(
363 c.allowed_to_merge = PullRequestModel().check_user_merge(
364 pull_request_latest, self._rhodecode_user) and not pr_closed
364 pull_request_latest, self._rhodecode_user) and not pr_closed
365 c.allowed_to_delete = PullRequestModel().check_user_delete(
365 c.allowed_to_delete = PullRequestModel().check_user_delete(
366 pull_request_latest, self._rhodecode_user) and not pr_closed
366 pull_request_latest, self._rhodecode_user) and not pr_closed
367 c.allowed_to_comment = not pr_closed
367 c.allowed_to_comment = not pr_closed
368 c.allowed_to_close = c.allowed_to_merge and not pr_closed
368 c.allowed_to_close = c.allowed_to_merge and not pr_closed
369
369
370 c.forbid_adding_reviewers = False
370 c.forbid_adding_reviewers = False
371 c.forbid_author_to_review = False
371 c.forbid_author_to_review = False
372 c.forbid_commit_author_to_review = False
372 c.forbid_commit_author_to_review = False
373
373
374 if pull_request_latest.reviewer_data and \
374 if pull_request_latest.reviewer_data and \
375 'rules' in pull_request_latest.reviewer_data:
375 'rules' in pull_request_latest.reviewer_data:
376 rules = pull_request_latest.reviewer_data['rules'] or {}
376 rules = pull_request_latest.reviewer_data['rules'] or {}
377 try:
377 try:
378 c.forbid_adding_reviewers = rules.get(
378 c.forbid_adding_reviewers = rules.get(
379 'forbid_adding_reviewers')
379 'forbid_adding_reviewers')
380 c.forbid_author_to_review = rules.get(
380 c.forbid_author_to_review = rules.get(
381 'forbid_author_to_review')
381 'forbid_author_to_review')
382 c.forbid_commit_author_to_review = rules.get(
382 c.forbid_commit_author_to_review = rules.get(
383 'forbid_commit_author_to_review')
383 'forbid_commit_author_to_review')
384 except Exception:
384 except Exception:
385 pass
385 pass
386
386
387 # check merge capabilities
387 # check merge capabilities
388 _merge_check = MergeCheck.validate(
388 _merge_check = MergeCheck.validate(
389 pull_request_latest, auth_user=self._rhodecode_user,
389 pull_request_latest, auth_user=self._rhodecode_user,
390 translator=self.request.translate,
390 translator=self.request.translate,
391 force_shadow_repo_refresh=force_refresh)
391 force_shadow_repo_refresh=force_refresh)
392 c.pr_merge_errors = _merge_check.error_details
392 c.pr_merge_errors = _merge_check.error_details
393 c.pr_merge_possible = not _merge_check.failed
393 c.pr_merge_possible = not _merge_check.failed
394 c.pr_merge_message = _merge_check.merge_msg
394 c.pr_merge_message = _merge_check.merge_msg
395
395
396 c.pr_merge_info = MergeCheck.get_merge_conditions(
396 c.pr_merge_info = MergeCheck.get_merge_conditions(
397 pull_request_latest, translator=self.request.translate)
397 pull_request_latest, translator=self.request.translate)
398
398
399 c.pull_request_review_status = _merge_check.review_status
399 c.pull_request_review_status = _merge_check.review_status
400 if merge_checks:
400 if merge_checks:
401 self.request.override_renderer = \
401 self.request.override_renderer = \
402 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako'
402 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako'
403 return self._get_template_context(c)
403 return self._get_template_context(c)
404
404
405 comments_model = CommentsModel()
405 comments_model = CommentsModel()
406
406
407 # reviewers and statuses
407 # reviewers and statuses
408 c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses()
408 c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses()
409 allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers]
409 allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers]
410
410
411 # GENERAL COMMENTS with versions #
411 # GENERAL COMMENTS with versions #
412 q = comments_model._all_general_comments_of_pull_request(pull_request_latest)
412 q = comments_model._all_general_comments_of_pull_request(pull_request_latest)
413 q = q.order_by(ChangesetComment.comment_id.asc())
413 q = q.order_by(ChangesetComment.comment_id.asc())
414 general_comments = q
414 general_comments = q
415
415
416 # pick comments we want to render at current version
416 # pick comments we want to render at current version
417 c.comment_versions = comments_model.aggregate_comments(
417 c.comment_versions = comments_model.aggregate_comments(
418 general_comments, versions, c.at_version_num)
418 general_comments, versions, c.at_version_num)
419 c.comments = c.comment_versions[c.at_version_num]['until']
419 c.comments = c.comment_versions[c.at_version_num]['until']
420
420
421 # INLINE COMMENTS with versions #
421 # INLINE COMMENTS with versions #
422 q = comments_model._all_inline_comments_of_pull_request(pull_request_latest)
422 q = comments_model._all_inline_comments_of_pull_request(pull_request_latest)
423 q = q.order_by(ChangesetComment.comment_id.asc())
423 q = q.order_by(ChangesetComment.comment_id.asc())
424 inline_comments = q
424 inline_comments = q
425
425
426 c.inline_versions = comments_model.aggregate_comments(
426 c.inline_versions = comments_model.aggregate_comments(
427 inline_comments, versions, c.at_version_num, inline=True)
427 inline_comments, versions, c.at_version_num, inline=True)
428
428
429 # inject latest version
429 # inject latest version
430 latest_ver = PullRequest.get_pr_display_object(
430 latest_ver = PullRequest.get_pr_display_object(
431 pull_request_latest, pull_request_latest)
431 pull_request_latest, pull_request_latest)
432
432
433 c.versions = versions + [latest_ver]
433 c.versions = versions + [latest_ver]
434
434
435 # if we use version, then do not show later comments
435 # if we use version, then do not show later comments
436 # than current version
436 # than current version
437 display_inline_comments = collections.defaultdict(
437 display_inline_comments = collections.defaultdict(
438 lambda: collections.defaultdict(list))
438 lambda: collections.defaultdict(list))
439 for co in inline_comments:
439 for co in inline_comments:
440 if c.at_version_num:
440 if c.at_version_num:
441 # pick comments that are at least UPTO given version, so we
441 # pick comments that are at least UPTO given version, so we
442 # don't render comments for higher version
442 # don't render comments for higher version
443 should_render = co.pull_request_version_id and \
443 should_render = co.pull_request_version_id and \
444 co.pull_request_version_id <= c.at_version_num
444 co.pull_request_version_id <= c.at_version_num
445 else:
445 else:
446 # showing all, for 'latest'
446 # showing all, for 'latest'
447 should_render = True
447 should_render = True
448
448
449 if should_render:
449 if should_render:
450 display_inline_comments[co.f_path][co.line_no].append(co)
450 display_inline_comments[co.f_path][co.line_no].append(co)
451
451
452 # load diff data into template context, if we use compare mode then
452 # load diff data into template context, if we use compare mode then
453 # diff is calculated based on changes between versions of PR
453 # diff is calculated based on changes between versions of PR
454
454
455 source_repo = pull_request_at_ver.source_repo
455 source_repo = pull_request_at_ver.source_repo
456 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
456 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
457
457
458 target_repo = pull_request_at_ver.target_repo
458 target_repo = pull_request_at_ver.target_repo
459 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
459 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
460
460
461 if compare:
461 if compare:
462 # in compare switch the diff base to latest commit from prev version
462 # in compare switch the diff base to latest commit from prev version
463 target_ref_id = prev_pull_request_display_obj.revisions[0]
463 target_ref_id = prev_pull_request_display_obj.revisions[0]
464
464
465 # despite opening commits for bookmarks/branches/tags, we always
465 # despite opening commits for bookmarks/branches/tags, we always
466 # convert this to rev to prevent changes after bookmark or branch change
466 # convert this to rev to prevent changes after bookmark or branch change
467 c.source_ref_type = 'rev'
467 c.source_ref_type = 'rev'
468 c.source_ref = source_ref_id
468 c.source_ref = source_ref_id
469
469
470 c.target_ref_type = 'rev'
470 c.target_ref_type = 'rev'
471 c.target_ref = target_ref_id
471 c.target_ref = target_ref_id
472
472
473 c.source_repo = source_repo
473 c.source_repo = source_repo
474 c.target_repo = target_repo
474 c.target_repo = target_repo
475
475
476 c.commit_ranges = []
476 c.commit_ranges = []
477 source_commit = EmptyCommit()
477 source_commit = EmptyCommit()
478 target_commit = EmptyCommit()
478 target_commit = EmptyCommit()
479 c.missing_requirements = False
479 c.missing_requirements = False
480
480
481 source_scm = source_repo.scm_instance()
481 source_scm = source_repo.scm_instance()
482 target_scm = target_repo.scm_instance()
482 target_scm = target_repo.scm_instance()
483
483
484 shadow_scm = None
484 shadow_scm = None
485 try:
485 try:
486 shadow_scm = pull_request_latest.get_shadow_repo()
486 shadow_scm = pull_request_latest.get_shadow_repo()
487 except Exception:
487 except Exception:
488 log.debug('Failed to get shadow repo', exc_info=True)
488 log.debug('Failed to get shadow repo', exc_info=True)
489 # try first the existing source_repo, and then shadow
489 # try first the existing source_repo, and then shadow
490 # repo if we can obtain one
490 # repo if we can obtain one
491 commits_source_repo = source_scm or shadow_scm
491 commits_source_repo = source_scm or shadow_scm
492
492
493 c.commits_source_repo = commits_source_repo
493 c.commits_source_repo = commits_source_repo
494 c.ancestor = None # set it to None, to hide it from PR view
494 c.ancestor = None # set it to None, to hide it from PR view
495
495
496 # empty version means latest, so we keep this to prevent
496 # empty version means latest, so we keep this to prevent
497 # double caching
497 # double caching
498 version_normalized = version or 'latest'
498 version_normalized = version or 'latest'
499 from_version_normalized = from_version or 'latest'
499 from_version_normalized = from_version or 'latest'
500
500
501 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(target_repo)
501 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(target_repo)
502 cache_file_path = diff_cache_exist(
502 cache_file_path = diff_cache_exist(
503 cache_path, 'pull_request', pull_request_id, version_normalized,
503 cache_path, 'pull_request', pull_request_id, version_normalized,
504 from_version_normalized, source_ref_id, target_ref_id,
504 from_version_normalized, source_ref_id, target_ref_id,
505 hide_whitespace_changes, diff_context, c.fulldiff)
505 hide_whitespace_changes, diff_context, c.fulldiff)
506
506
507 caching_enabled = self._is_diff_cache_enabled(c.target_repo)
507 caching_enabled = self._is_diff_cache_enabled(c.target_repo)
508 force_recache = self.get_recache_flag()
508 force_recache = self.get_recache_flag()
509
509
510 cached_diff = None
510 cached_diff = None
511 if caching_enabled:
511 if caching_enabled:
512 cached_diff = load_cached_diff(cache_file_path)
512 cached_diff = load_cached_diff(cache_file_path)
513
513
514 has_proper_commit_cache = (
514 has_proper_commit_cache = (
515 cached_diff and cached_diff.get('commits')
515 cached_diff and cached_diff.get('commits')
516 and len(cached_diff.get('commits', [])) == 5
516 and len(cached_diff.get('commits', [])) == 5
517 and cached_diff.get('commits')[0]
517 and cached_diff.get('commits')[0]
518 and cached_diff.get('commits')[3])
518 and cached_diff.get('commits')[3])
519
519
520 if not force_recache and not c.range_diff_on and has_proper_commit_cache:
520 if not force_recache and not c.range_diff_on and has_proper_commit_cache:
521 diff_commit_cache = \
521 diff_commit_cache = \
522 (ancestor_commit, commit_cache, missing_requirements,
522 (ancestor_commit, commit_cache, missing_requirements,
523 source_commit, target_commit) = cached_diff['commits']
523 source_commit, target_commit) = cached_diff['commits']
524 else:
524 else:
525 diff_commit_cache = \
525 diff_commit_cache = \
526 (ancestor_commit, commit_cache, missing_requirements,
526 (ancestor_commit, commit_cache, missing_requirements,
527 source_commit, target_commit) = self.get_commits(
527 source_commit, target_commit) = self.get_commits(
528 commits_source_repo,
528 commits_source_repo,
529 pull_request_at_ver,
529 pull_request_at_ver,
530 source_commit,
530 source_commit,
531 source_ref_id,
531 source_ref_id,
532 source_scm,
532 source_scm,
533 target_commit,
533 target_commit,
534 target_ref_id,
534 target_ref_id,
535 target_scm)
535 target_scm)
536
536
537 # register our commit range
537 # register our commit range
538 for comm in commit_cache.values():
538 for comm in commit_cache.values():
539 c.commit_ranges.append(comm)
539 c.commit_ranges.append(comm)
540
540
541 c.missing_requirements = missing_requirements
541 c.missing_requirements = missing_requirements
542 c.ancestor_commit = ancestor_commit
542 c.ancestor_commit = ancestor_commit
543 c.statuses = source_repo.statuses(
543 c.statuses = source_repo.statuses(
544 [x.raw_id for x in c.commit_ranges])
544 [x.raw_id for x in c.commit_ranges])
545
545
546 # auto collapse if we have more than limit
546 # auto collapse if we have more than limit
547 collapse_limit = diffs.DiffProcessor._collapse_commits_over
547 collapse_limit = diffs.DiffProcessor._collapse_commits_over
548 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
548 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
549 c.compare_mode = compare
549 c.compare_mode = compare
550
550
551 # diff_limit is the old behavior, will cut off the whole diff
551 # diff_limit is the old behavior, will cut off the whole diff
552 # if the limit is applied otherwise will just hide the
552 # if the limit is applied otherwise will just hide the
553 # big files from the front-end
553 # big files from the front-end
554 diff_limit = c.visual.cut_off_limit_diff
554 diff_limit = c.visual.cut_off_limit_diff
555 file_limit = c.visual.cut_off_limit_file
555 file_limit = c.visual.cut_off_limit_file
556
556
557 c.missing_commits = False
557 c.missing_commits = False
558 if (c.missing_requirements
558 if (c.missing_requirements
559 or isinstance(source_commit, EmptyCommit)
559 or isinstance(source_commit, EmptyCommit)
560 or source_commit == target_commit):
560 or source_commit == target_commit):
561
561
562 c.missing_commits = True
562 c.missing_commits = True
563 else:
563 else:
564 c.inline_comments = display_inline_comments
564 c.inline_comments = display_inline_comments
565
565
566 has_proper_diff_cache = cached_diff and cached_diff.get('commits')
566 has_proper_diff_cache = cached_diff and cached_diff.get('commits')
567 if not force_recache and has_proper_diff_cache:
567 if not force_recache and has_proper_diff_cache:
568 c.diffset = cached_diff['diff']
568 c.diffset = cached_diff['diff']
569 (ancestor_commit, commit_cache, missing_requirements,
569 (ancestor_commit, commit_cache, missing_requirements,
570 source_commit, target_commit) = cached_diff['commits']
570 source_commit, target_commit) = cached_diff['commits']
571 else:
571 else:
572 c.diffset = self._get_diffset(
572 c.diffset = self._get_diffset(
573 c.source_repo.repo_name, commits_source_repo,
573 c.source_repo.repo_name, commits_source_repo,
574 source_ref_id, target_ref_id,
574 source_ref_id, target_ref_id,
575 target_commit, source_commit,
575 target_commit, source_commit,
576 diff_limit, file_limit, c.fulldiff,
576 diff_limit, file_limit, c.fulldiff,
577 hide_whitespace_changes, diff_context)
577 hide_whitespace_changes, diff_context)
578
578
579 # save cached diff
579 # save cached diff
580 if caching_enabled:
580 if caching_enabled:
581 cache_diff(cache_file_path, c.diffset, diff_commit_cache)
581 cache_diff(cache_file_path, c.diffset, diff_commit_cache)
582
582
583 c.limited_diff = c.diffset.limited_diff
583 c.limited_diff = c.diffset.limited_diff
584
584
585 # calculate removed files that are bound to comments
585 # calculate removed files that are bound to comments
586 comment_deleted_files = [
586 comment_deleted_files = [
587 fname for fname in display_inline_comments
587 fname for fname in display_inline_comments
588 if fname not in c.diffset.file_stats]
588 if fname not in c.diffset.file_stats]
589
589
590 c.deleted_files_comments = collections.defaultdict(dict)
590 c.deleted_files_comments = collections.defaultdict(dict)
591 for fname, per_line_comments in display_inline_comments.items():
591 for fname, per_line_comments in display_inline_comments.items():
592 if fname in comment_deleted_files:
592 if fname in comment_deleted_files:
593 c.deleted_files_comments[fname]['stats'] = 0
593 c.deleted_files_comments[fname]['stats'] = 0
594 c.deleted_files_comments[fname]['comments'] = list()
594 c.deleted_files_comments[fname]['comments'] = list()
595 for lno, comments in per_line_comments.items():
595 for lno, comments in per_line_comments.items():
596 c.deleted_files_comments[fname]['comments'].extend(comments)
596 c.deleted_files_comments[fname]['comments'].extend(comments)
597
597
598 # maybe calculate the range diff
598 # maybe calculate the range diff
599 if c.range_diff_on:
599 if c.range_diff_on:
600 # TODO(marcink): set whitespace/context
600 # TODO(marcink): set whitespace/context
601 context_lcl = 3
601 context_lcl = 3
602 ign_whitespace_lcl = False
602 ign_whitespace_lcl = False
603
603
604 for commit in c.commit_ranges:
604 for commit in c.commit_ranges:
605 commit2 = commit
605 commit2 = commit
606 commit1 = commit.first_parent
606 commit1 = commit.first_parent
607
607
608 range_diff_cache_file_path = diff_cache_exist(
608 range_diff_cache_file_path = diff_cache_exist(
609 cache_path, 'diff', commit.raw_id,
609 cache_path, 'diff', commit.raw_id,
610 ign_whitespace_lcl, context_lcl, c.fulldiff)
610 ign_whitespace_lcl, context_lcl, c.fulldiff)
611
611
612 cached_diff = None
612 cached_diff = None
613 if caching_enabled:
613 if caching_enabled:
614 cached_diff = load_cached_diff(range_diff_cache_file_path)
614 cached_diff = load_cached_diff(range_diff_cache_file_path)
615
615
616 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
616 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
617 if not force_recache and has_proper_diff_cache:
617 if not force_recache and has_proper_diff_cache:
618 diffset = cached_diff['diff']
618 diffset = cached_diff['diff']
619 else:
619 else:
620 diffset = self._get_range_diffset(
620 diffset = self._get_range_diffset(
621 source_scm, source_repo,
621 source_scm, source_repo,
622 commit1, commit2, diff_limit, file_limit,
622 commit1, commit2, diff_limit, file_limit,
623 c.fulldiff, ign_whitespace_lcl, context_lcl
623 c.fulldiff, ign_whitespace_lcl, context_lcl
624 )
624 )
625
625
626 # save cached diff
626 # save cached diff
627 if caching_enabled:
627 if caching_enabled:
628 cache_diff(range_diff_cache_file_path, diffset, None)
628 cache_diff(range_diff_cache_file_path, diffset, None)
629
629
630 c.changes[commit.raw_id] = diffset
630 c.changes[commit.raw_id] = diffset
631
631
632 # this is a hack to properly display links, when creating PR, the
632 # this is a hack to properly display links, when creating PR, the
633 # compare view and others uses different notation, and
633 # compare view and others uses different notation, and
634 # compare_commits.mako renders links based on the target_repo.
634 # compare_commits.mako renders links based on the target_repo.
635 # We need to swap that here to generate it properly on the html side
635 # We need to swap that here to generate it properly on the html side
636 c.target_repo = c.source_repo
636 c.target_repo = c.source_repo
637
637
638 c.commit_statuses = ChangesetStatus.STATUSES
638 c.commit_statuses = ChangesetStatus.STATUSES
639
639
640 c.show_version_changes = not pr_closed
640 c.show_version_changes = not pr_closed
641 if c.show_version_changes:
641 if c.show_version_changes:
642 cur_obj = pull_request_at_ver
642 cur_obj = pull_request_at_ver
643 prev_obj = prev_pull_request_at_ver
643 prev_obj = prev_pull_request_at_ver
644
644
645 old_commit_ids = prev_obj.revisions
645 old_commit_ids = prev_obj.revisions
646 new_commit_ids = cur_obj.revisions
646 new_commit_ids = cur_obj.revisions
647 commit_changes = PullRequestModel()._calculate_commit_id_changes(
647 commit_changes = PullRequestModel()._calculate_commit_id_changes(
648 old_commit_ids, new_commit_ids)
648 old_commit_ids, new_commit_ids)
649 c.commit_changes_summary = commit_changes
649 c.commit_changes_summary = commit_changes
650
650
651 # calculate the diff for commits between versions
651 # calculate the diff for commits between versions
652 c.commit_changes = []
652 c.commit_changes = []
653 mark = lambda cs, fw: list(
653 mark = lambda cs, fw: list(
654 h.itertools.izip_longest([], cs, fillvalue=fw))
654 h.itertools.izip_longest([], cs, fillvalue=fw))
655 for c_type, raw_id in mark(commit_changes.added, 'a') \
655 for c_type, raw_id in mark(commit_changes.added, 'a') \
656 + mark(commit_changes.removed, 'r') \
656 + mark(commit_changes.removed, 'r') \
657 + mark(commit_changes.common, 'c'):
657 + mark(commit_changes.common, 'c'):
658
658
659 if raw_id in commit_cache:
659 if raw_id in commit_cache:
660 commit = commit_cache[raw_id]
660 commit = commit_cache[raw_id]
661 else:
661 else:
662 try:
662 try:
663 commit = commits_source_repo.get_commit(raw_id)
663 commit = commits_source_repo.get_commit(raw_id)
664 except CommitDoesNotExistError:
664 except CommitDoesNotExistError:
665 # in case we fail extracting still use "dummy" commit
665 # in case we fail extracting still use "dummy" commit
666 # for display in commit diff
666 # for display in commit diff
667 commit = h.AttributeDict(
667 commit = h.AttributeDict(
668 {'raw_id': raw_id,
668 {'raw_id': raw_id,
669 'message': 'EMPTY or MISSING COMMIT'})
669 'message': 'EMPTY or MISSING COMMIT'})
670 c.commit_changes.append([c_type, commit])
670 c.commit_changes.append([c_type, commit])
671
671
672 # current user review statuses for each version
672 # current user review statuses for each version
673 c.review_versions = {}
673 c.review_versions = {}
674 if self._rhodecode_user.user_id in allowed_reviewers:
674 if self._rhodecode_user.user_id in allowed_reviewers:
675 for co in general_comments:
675 for co in general_comments:
676 if co.author.user_id == self._rhodecode_user.user_id:
676 if co.author.user_id == self._rhodecode_user.user_id:
677 status = co.status_change
677 status = co.status_change
678 if status:
678 if status:
679 _ver_pr = status[0].comment.pull_request_version_id
679 _ver_pr = status[0].comment.pull_request_version_id
680 c.review_versions[_ver_pr] = status[0]
680 c.review_versions[_ver_pr] = status[0]
681
681
682 return self._get_template_context(c)
682 return self._get_template_context(c)
683
683
684 def get_commits(
684 def get_commits(
685 self, commits_source_repo, pull_request_at_ver, source_commit,
685 self, commits_source_repo, pull_request_at_ver, source_commit,
686 source_ref_id, source_scm, target_commit, target_ref_id, target_scm):
686 source_ref_id, source_scm, target_commit, target_ref_id, target_scm):
687 commit_cache = collections.OrderedDict()
687 commit_cache = collections.OrderedDict()
688 missing_requirements = False
688 missing_requirements = False
689 try:
689 try:
690 pre_load = ["author", "branch", "date", "message", "parents"]
690 pre_load = ["author", "date", "message", "branch", "parents"]
691 show_revs = pull_request_at_ver.revisions
691 show_revs = pull_request_at_ver.revisions
692 for rev in show_revs:
692 for rev in show_revs:
693 comm = commits_source_repo.get_commit(
693 comm = commits_source_repo.get_commit(
694 commit_id=rev, pre_load=pre_load)
694 commit_id=rev, pre_load=pre_load)
695 commit_cache[comm.raw_id] = comm
695 commit_cache[comm.raw_id] = comm
696
696
697 # Order here matters, we first need to get target, and then
697 # Order here matters, we first need to get target, and then
698 # the source
698 # the source
699 target_commit = commits_source_repo.get_commit(
699 target_commit = commits_source_repo.get_commit(
700 commit_id=safe_str(target_ref_id))
700 commit_id=safe_str(target_ref_id))
701
701
702 source_commit = commits_source_repo.get_commit(
702 source_commit = commits_source_repo.get_commit(
703 commit_id=safe_str(source_ref_id))
703 commit_id=safe_str(source_ref_id))
704 except CommitDoesNotExistError:
704 except CommitDoesNotExistError:
705 log.warning(
705 log.warning(
706 'Failed to get commit from `{}` repo'.format(
706 'Failed to get commit from `{}` repo'.format(
707 commits_source_repo), exc_info=True)
707 commits_source_repo), exc_info=True)
708 except RepositoryRequirementError:
708 except RepositoryRequirementError:
709 log.warning(
709 log.warning(
710 'Failed to get all required data from repo', exc_info=True)
710 'Failed to get all required data from repo', exc_info=True)
711 missing_requirements = True
711 missing_requirements = True
712 ancestor_commit = None
712 ancestor_commit = None
713 try:
713 try:
714 ancestor_id = source_scm.get_common_ancestor(
714 ancestor_id = source_scm.get_common_ancestor(
715 source_commit.raw_id, target_commit.raw_id, target_scm)
715 source_commit.raw_id, target_commit.raw_id, target_scm)
716 ancestor_commit = source_scm.get_commit(ancestor_id)
716 ancestor_commit = source_scm.get_commit(ancestor_id)
717 except Exception:
717 except Exception:
718 ancestor_commit = None
718 ancestor_commit = None
719 return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit
719 return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit
720
720
721 def assure_not_empty_repo(self):
721 def assure_not_empty_repo(self):
722 _ = self.request.translate
722 _ = self.request.translate
723
723
724 try:
724 try:
725 self.db_repo.scm_instance().get_commit()
725 self.db_repo.scm_instance().get_commit()
726 except EmptyRepositoryError:
726 except EmptyRepositoryError:
727 h.flash(h.literal(_('There are no commits yet')),
727 h.flash(h.literal(_('There are no commits yet')),
728 category='warning')
728 category='warning')
729 raise HTTPFound(
729 raise HTTPFound(
730 h.route_path('repo_summary', repo_name=self.db_repo.repo_name))
730 h.route_path('repo_summary', repo_name=self.db_repo.repo_name))
731
731
732 @LoginRequired()
732 @LoginRequired()
733 @NotAnonymous()
733 @NotAnonymous()
734 @HasRepoPermissionAnyDecorator(
734 @HasRepoPermissionAnyDecorator(
735 'repository.read', 'repository.write', 'repository.admin')
735 'repository.read', 'repository.write', 'repository.admin')
736 @view_config(
736 @view_config(
737 route_name='pullrequest_new', request_method='GET',
737 route_name='pullrequest_new', request_method='GET',
738 renderer='rhodecode:templates/pullrequests/pullrequest.mako')
738 renderer='rhodecode:templates/pullrequests/pullrequest.mako')
739 def pull_request_new(self):
739 def pull_request_new(self):
740 _ = self.request.translate
740 _ = self.request.translate
741 c = self.load_default_context()
741 c = self.load_default_context()
742
742
743 self.assure_not_empty_repo()
743 self.assure_not_empty_repo()
744 source_repo = self.db_repo
744 source_repo = self.db_repo
745
745
746 commit_id = self.request.GET.get('commit')
746 commit_id = self.request.GET.get('commit')
747 branch_ref = self.request.GET.get('branch')
747 branch_ref = self.request.GET.get('branch')
748 bookmark_ref = self.request.GET.get('bookmark')
748 bookmark_ref = self.request.GET.get('bookmark')
749
749
750 try:
750 try:
751 source_repo_data = PullRequestModel().generate_repo_data(
751 source_repo_data = PullRequestModel().generate_repo_data(
752 source_repo, commit_id=commit_id,
752 source_repo, commit_id=commit_id,
753 branch=branch_ref, bookmark=bookmark_ref,
753 branch=branch_ref, bookmark=bookmark_ref,
754 translator=self.request.translate)
754 translator=self.request.translate)
755 except CommitDoesNotExistError as e:
755 except CommitDoesNotExistError as e:
756 log.exception(e)
756 log.exception(e)
757 h.flash(_('Commit does not exist'), 'error')
757 h.flash(_('Commit does not exist'), 'error')
758 raise HTTPFound(
758 raise HTTPFound(
759 h.route_path('pullrequest_new', repo_name=source_repo.repo_name))
759 h.route_path('pullrequest_new', repo_name=source_repo.repo_name))
760
760
761 default_target_repo = source_repo
761 default_target_repo = source_repo
762
762
763 if source_repo.parent and c.has_origin_repo_read_perm:
763 if source_repo.parent and c.has_origin_repo_read_perm:
764 parent_vcs_obj = source_repo.parent.scm_instance()
764 parent_vcs_obj = source_repo.parent.scm_instance()
765 if parent_vcs_obj and not parent_vcs_obj.is_empty():
765 if parent_vcs_obj and not parent_vcs_obj.is_empty():
766 # change default if we have a parent repo
766 # change default if we have a parent repo
767 default_target_repo = source_repo.parent
767 default_target_repo = source_repo.parent
768
768
769 target_repo_data = PullRequestModel().generate_repo_data(
769 target_repo_data = PullRequestModel().generate_repo_data(
770 default_target_repo, translator=self.request.translate)
770 default_target_repo, translator=self.request.translate)
771
771
772 selected_source_ref = source_repo_data['refs']['selected_ref']
772 selected_source_ref = source_repo_data['refs']['selected_ref']
773 title_source_ref = ''
773 title_source_ref = ''
774 if selected_source_ref:
774 if selected_source_ref:
775 title_source_ref = selected_source_ref.split(':', 2)[1]
775 title_source_ref = selected_source_ref.split(':', 2)[1]
776 c.default_title = PullRequestModel().generate_pullrequest_title(
776 c.default_title = PullRequestModel().generate_pullrequest_title(
777 source=source_repo.repo_name,
777 source=source_repo.repo_name,
778 source_ref=title_source_ref,
778 source_ref=title_source_ref,
779 target=default_target_repo.repo_name
779 target=default_target_repo.repo_name
780 )
780 )
781
781
782 c.default_repo_data = {
782 c.default_repo_data = {
783 'source_repo_name': source_repo.repo_name,
783 'source_repo_name': source_repo.repo_name,
784 'source_refs_json': json.dumps(source_repo_data),
784 'source_refs_json': json.dumps(source_repo_data),
785 'target_repo_name': default_target_repo.repo_name,
785 'target_repo_name': default_target_repo.repo_name,
786 'target_refs_json': json.dumps(target_repo_data),
786 'target_refs_json': json.dumps(target_repo_data),
787 }
787 }
788 c.default_source_ref = selected_source_ref
788 c.default_source_ref = selected_source_ref
789
789
790 return self._get_template_context(c)
790 return self._get_template_context(c)
791
791
792 @LoginRequired()
792 @LoginRequired()
793 @NotAnonymous()
793 @NotAnonymous()
794 @HasRepoPermissionAnyDecorator(
794 @HasRepoPermissionAnyDecorator(
795 'repository.read', 'repository.write', 'repository.admin')
795 'repository.read', 'repository.write', 'repository.admin')
796 @view_config(
796 @view_config(
797 route_name='pullrequest_repo_refs', request_method='GET',
797 route_name='pullrequest_repo_refs', request_method='GET',
798 renderer='json_ext', xhr=True)
798 renderer='json_ext', xhr=True)
799 def pull_request_repo_refs(self):
799 def pull_request_repo_refs(self):
800 self.load_default_context()
800 self.load_default_context()
801 target_repo_name = self.request.matchdict['target_repo_name']
801 target_repo_name = self.request.matchdict['target_repo_name']
802 repo = Repository.get_by_repo_name(target_repo_name)
802 repo = Repository.get_by_repo_name(target_repo_name)
803 if not repo:
803 if not repo:
804 raise HTTPNotFound()
804 raise HTTPNotFound()
805
805
806 target_perm = HasRepoPermissionAny(
806 target_perm = HasRepoPermissionAny(
807 'repository.read', 'repository.write', 'repository.admin')(
807 'repository.read', 'repository.write', 'repository.admin')(
808 target_repo_name)
808 target_repo_name)
809 if not target_perm:
809 if not target_perm:
810 raise HTTPNotFound()
810 raise HTTPNotFound()
811
811
812 return PullRequestModel().generate_repo_data(
812 return PullRequestModel().generate_repo_data(
813 repo, translator=self.request.translate)
813 repo, translator=self.request.translate)
814
814
815 @LoginRequired()
815 @LoginRequired()
816 @NotAnonymous()
816 @NotAnonymous()
817 @HasRepoPermissionAnyDecorator(
817 @HasRepoPermissionAnyDecorator(
818 'repository.read', 'repository.write', 'repository.admin')
818 'repository.read', 'repository.write', 'repository.admin')
819 @view_config(
819 @view_config(
820 route_name='pullrequest_repo_targets', request_method='GET',
820 route_name='pullrequest_repo_targets', request_method='GET',
821 renderer='json_ext', xhr=True)
821 renderer='json_ext', xhr=True)
822 def pullrequest_repo_targets(self):
822 def pullrequest_repo_targets(self):
823 _ = self.request.translate
823 _ = self.request.translate
824 filter_query = self.request.GET.get('query')
824 filter_query = self.request.GET.get('query')
825
825
826 # get the parents
826 # get the parents
827 parent_target_repos = []
827 parent_target_repos = []
828 if self.db_repo.parent:
828 if self.db_repo.parent:
829 parents_query = Repository.query() \
829 parents_query = Repository.query() \
830 .order_by(func.length(Repository.repo_name)) \
830 .order_by(func.length(Repository.repo_name)) \
831 .filter(Repository.fork_id == self.db_repo.parent.repo_id)
831 .filter(Repository.fork_id == self.db_repo.parent.repo_id)
832
832
833 if filter_query:
833 if filter_query:
834 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
834 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
835 parents_query = parents_query.filter(
835 parents_query = parents_query.filter(
836 Repository.repo_name.ilike(ilike_expression))
836 Repository.repo_name.ilike(ilike_expression))
837 parents = parents_query.limit(20).all()
837 parents = parents_query.limit(20).all()
838
838
839 for parent in parents:
839 for parent in parents:
840 parent_vcs_obj = parent.scm_instance()
840 parent_vcs_obj = parent.scm_instance()
841 if parent_vcs_obj and not parent_vcs_obj.is_empty():
841 if parent_vcs_obj and not parent_vcs_obj.is_empty():
842 parent_target_repos.append(parent)
842 parent_target_repos.append(parent)
843
843
844 # get other forks, and repo itself
844 # get other forks, and repo itself
845 query = Repository.query() \
845 query = Repository.query() \
846 .order_by(func.length(Repository.repo_name)) \
846 .order_by(func.length(Repository.repo_name)) \
847 .filter(
847 .filter(
848 or_(Repository.repo_id == self.db_repo.repo_id, # repo itself
848 or_(Repository.repo_id == self.db_repo.repo_id, # repo itself
849 Repository.fork_id == self.db_repo.repo_id) # forks of this repo
849 Repository.fork_id == self.db_repo.repo_id) # forks of this repo
850 ) \
850 ) \
851 .filter(~Repository.repo_id.in_([x.repo_id for x in parent_target_repos]))
851 .filter(~Repository.repo_id.in_([x.repo_id for x in parent_target_repos]))
852
852
853 if filter_query:
853 if filter_query:
854 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
854 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
855 query = query.filter(Repository.repo_name.ilike(ilike_expression))
855 query = query.filter(Repository.repo_name.ilike(ilike_expression))
856
856
857 limit = max(20 - len(parent_target_repos), 5) # not less then 5
857 limit = max(20 - len(parent_target_repos), 5) # not less then 5
858 target_repos = query.limit(limit).all()
858 target_repos = query.limit(limit).all()
859
859
860 all_target_repos = target_repos + parent_target_repos
860 all_target_repos = target_repos + parent_target_repos
861
861
862 repos = []
862 repos = []
863 # This checks permissions to the repositories
863 # This checks permissions to the repositories
864 for obj in ScmModel().get_repos(all_target_repos):
864 for obj in ScmModel().get_repos(all_target_repos):
865 repos.append({
865 repos.append({
866 'id': obj['name'],
866 'id': obj['name'],
867 'text': obj['name'],
867 'text': obj['name'],
868 'type': 'repo',
868 'type': 'repo',
869 'repo_id': obj['dbrepo']['repo_id'],
869 'repo_id': obj['dbrepo']['repo_id'],
870 'repo_type': obj['dbrepo']['repo_type'],
870 'repo_type': obj['dbrepo']['repo_type'],
871 'private': obj['dbrepo']['private'],
871 'private': obj['dbrepo']['private'],
872
872
873 })
873 })
874
874
875 data = {
875 data = {
876 'more': False,
876 'more': False,
877 'results': [{
877 'results': [{
878 'text': _('Repositories'),
878 'text': _('Repositories'),
879 'children': repos
879 'children': repos
880 }] if repos else []
880 }] if repos else []
881 }
881 }
882 return data
882 return data
883
883
884 @LoginRequired()
884 @LoginRequired()
885 @NotAnonymous()
885 @NotAnonymous()
886 @HasRepoPermissionAnyDecorator(
886 @HasRepoPermissionAnyDecorator(
887 'repository.read', 'repository.write', 'repository.admin')
887 'repository.read', 'repository.write', 'repository.admin')
888 @CSRFRequired()
888 @CSRFRequired()
889 @view_config(
889 @view_config(
890 route_name='pullrequest_create', request_method='POST',
890 route_name='pullrequest_create', request_method='POST',
891 renderer=None)
891 renderer=None)
892 def pull_request_create(self):
892 def pull_request_create(self):
893 _ = self.request.translate
893 _ = self.request.translate
894 self.assure_not_empty_repo()
894 self.assure_not_empty_repo()
895 self.load_default_context()
895 self.load_default_context()
896
896
897 controls = peppercorn.parse(self.request.POST.items())
897 controls = peppercorn.parse(self.request.POST.items())
898
898
899 try:
899 try:
900 form = PullRequestForm(
900 form = PullRequestForm(
901 self.request.translate, self.db_repo.repo_id)()
901 self.request.translate, self.db_repo.repo_id)()
902 _form = form.to_python(controls)
902 _form = form.to_python(controls)
903 except formencode.Invalid as errors:
903 except formencode.Invalid as errors:
904 if errors.error_dict.get('revisions'):
904 if errors.error_dict.get('revisions'):
905 msg = 'Revisions: %s' % errors.error_dict['revisions']
905 msg = 'Revisions: %s' % errors.error_dict['revisions']
906 elif errors.error_dict.get('pullrequest_title'):
906 elif errors.error_dict.get('pullrequest_title'):
907 msg = errors.error_dict.get('pullrequest_title')
907 msg = errors.error_dict.get('pullrequest_title')
908 else:
908 else:
909 msg = _('Error creating pull request: {}').format(errors)
909 msg = _('Error creating pull request: {}').format(errors)
910 log.exception(msg)
910 log.exception(msg)
911 h.flash(msg, 'error')
911 h.flash(msg, 'error')
912
912
913 # would rather just go back to form ...
913 # would rather just go back to form ...
914 raise HTTPFound(
914 raise HTTPFound(
915 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
915 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
916
916
917 source_repo = _form['source_repo']
917 source_repo = _form['source_repo']
918 source_ref = _form['source_ref']
918 source_ref = _form['source_ref']
919 target_repo = _form['target_repo']
919 target_repo = _form['target_repo']
920 target_ref = _form['target_ref']
920 target_ref = _form['target_ref']
921 commit_ids = _form['revisions'][::-1]
921 commit_ids = _form['revisions'][::-1]
922
922
923 # find the ancestor for this pr
923 # find the ancestor for this pr
924 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
924 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
925 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
925 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
926
926
927 if not (source_db_repo or target_db_repo):
927 if not (source_db_repo or target_db_repo):
928 h.flash(_('source_repo or target repo not found'), category='error')
928 h.flash(_('source_repo or target repo not found'), category='error')
929 raise HTTPFound(
929 raise HTTPFound(
930 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
930 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
931
931
932 # re-check permissions again here
932 # re-check permissions again here
933 # source_repo we must have read permissions
933 # source_repo we must have read permissions
934
934
935 source_perm = HasRepoPermissionAny(
935 source_perm = HasRepoPermissionAny(
936 'repository.read', 'repository.write', 'repository.admin')(
936 'repository.read', 'repository.write', 'repository.admin')(
937 source_db_repo.repo_name)
937 source_db_repo.repo_name)
938 if not source_perm:
938 if not source_perm:
939 msg = _('Not Enough permissions to source repo `{}`.'.format(
939 msg = _('Not Enough permissions to source repo `{}`.'.format(
940 source_db_repo.repo_name))
940 source_db_repo.repo_name))
941 h.flash(msg, category='error')
941 h.flash(msg, category='error')
942 # copy the args back to redirect
942 # copy the args back to redirect
943 org_query = self.request.GET.mixed()
943 org_query = self.request.GET.mixed()
944 raise HTTPFound(
944 raise HTTPFound(
945 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
945 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
946 _query=org_query))
946 _query=org_query))
947
947
948 # target repo we must have read permissions, and also later on
948 # target repo we must have read permissions, and also later on
949 # we want to check branch permissions here
949 # we want to check branch permissions here
950 target_perm = HasRepoPermissionAny(
950 target_perm = HasRepoPermissionAny(
951 'repository.read', 'repository.write', 'repository.admin')(
951 'repository.read', 'repository.write', 'repository.admin')(
952 target_db_repo.repo_name)
952 target_db_repo.repo_name)
953 if not target_perm:
953 if not target_perm:
954 msg = _('Not Enough permissions to target repo `{}`.'.format(
954 msg = _('Not Enough permissions to target repo `{}`.'.format(
955 target_db_repo.repo_name))
955 target_db_repo.repo_name))
956 h.flash(msg, category='error')
956 h.flash(msg, category='error')
957 # copy the args back to redirect
957 # copy the args back to redirect
958 org_query = self.request.GET.mixed()
958 org_query = self.request.GET.mixed()
959 raise HTTPFound(
959 raise HTTPFound(
960 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
960 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
961 _query=org_query))
961 _query=org_query))
962
962
963 source_scm = source_db_repo.scm_instance()
963 source_scm = source_db_repo.scm_instance()
964 target_scm = target_db_repo.scm_instance()
964 target_scm = target_db_repo.scm_instance()
965
965
966 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
966 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
967 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
967 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
968
968
969 ancestor = source_scm.get_common_ancestor(
969 ancestor = source_scm.get_common_ancestor(
970 source_commit.raw_id, target_commit.raw_id, target_scm)
970 source_commit.raw_id, target_commit.raw_id, target_scm)
971
971
972 # recalculate target ref based on ancestor
972 # recalculate target ref based on ancestor
973 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
973 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
974 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
974 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
975
975
976 get_default_reviewers_data, validate_default_reviewers = \
976 get_default_reviewers_data, validate_default_reviewers = \
977 PullRequestModel().get_reviewer_functions()
977 PullRequestModel().get_reviewer_functions()
978
978
979 # recalculate reviewers logic, to make sure we can validate this
979 # recalculate reviewers logic, to make sure we can validate this
980 reviewer_rules = get_default_reviewers_data(
980 reviewer_rules = get_default_reviewers_data(
981 self._rhodecode_db_user, source_db_repo,
981 self._rhodecode_db_user, source_db_repo,
982 source_commit, target_db_repo, target_commit)
982 source_commit, target_db_repo, target_commit)
983
983
984 given_reviewers = _form['review_members']
984 given_reviewers = _form['review_members']
985 reviewers = validate_default_reviewers(
985 reviewers = validate_default_reviewers(
986 given_reviewers, reviewer_rules)
986 given_reviewers, reviewer_rules)
987
987
988 pullrequest_title = _form['pullrequest_title']
988 pullrequest_title = _form['pullrequest_title']
989 title_source_ref = source_ref.split(':', 2)[1]
989 title_source_ref = source_ref.split(':', 2)[1]
990 if not pullrequest_title:
990 if not pullrequest_title:
991 pullrequest_title = PullRequestModel().generate_pullrequest_title(
991 pullrequest_title = PullRequestModel().generate_pullrequest_title(
992 source=source_repo,
992 source=source_repo,
993 source_ref=title_source_ref,
993 source_ref=title_source_ref,
994 target=target_repo
994 target=target_repo
995 )
995 )
996
996
997 description = _form['pullrequest_desc']
997 description = _form['pullrequest_desc']
998 description_renderer = _form['description_renderer']
998 description_renderer = _form['description_renderer']
999
999
1000 try:
1000 try:
1001 pull_request = PullRequestModel().create(
1001 pull_request = PullRequestModel().create(
1002 created_by=self._rhodecode_user.user_id,
1002 created_by=self._rhodecode_user.user_id,
1003 source_repo=source_repo,
1003 source_repo=source_repo,
1004 source_ref=source_ref,
1004 source_ref=source_ref,
1005 target_repo=target_repo,
1005 target_repo=target_repo,
1006 target_ref=target_ref,
1006 target_ref=target_ref,
1007 revisions=commit_ids,
1007 revisions=commit_ids,
1008 reviewers=reviewers,
1008 reviewers=reviewers,
1009 title=pullrequest_title,
1009 title=pullrequest_title,
1010 description=description,
1010 description=description,
1011 description_renderer=description_renderer,
1011 description_renderer=description_renderer,
1012 reviewer_data=reviewer_rules,
1012 reviewer_data=reviewer_rules,
1013 auth_user=self._rhodecode_user
1013 auth_user=self._rhodecode_user
1014 )
1014 )
1015 Session().commit()
1015 Session().commit()
1016
1016
1017 h.flash(_('Successfully opened new pull request'),
1017 h.flash(_('Successfully opened new pull request'),
1018 category='success')
1018 category='success')
1019 except Exception:
1019 except Exception:
1020 msg = _('Error occurred during creation of this pull request.')
1020 msg = _('Error occurred during creation of this pull request.')
1021 log.exception(msg)
1021 log.exception(msg)
1022 h.flash(msg, category='error')
1022 h.flash(msg, category='error')
1023
1023
1024 # copy the args back to redirect
1024 # copy the args back to redirect
1025 org_query = self.request.GET.mixed()
1025 org_query = self.request.GET.mixed()
1026 raise HTTPFound(
1026 raise HTTPFound(
1027 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1027 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1028 _query=org_query))
1028 _query=org_query))
1029
1029
1030 raise HTTPFound(
1030 raise HTTPFound(
1031 h.route_path('pullrequest_show', repo_name=target_repo,
1031 h.route_path('pullrequest_show', repo_name=target_repo,
1032 pull_request_id=pull_request.pull_request_id))
1032 pull_request_id=pull_request.pull_request_id))
1033
1033
1034 @LoginRequired()
1034 @LoginRequired()
1035 @NotAnonymous()
1035 @NotAnonymous()
1036 @HasRepoPermissionAnyDecorator(
1036 @HasRepoPermissionAnyDecorator(
1037 'repository.read', 'repository.write', 'repository.admin')
1037 'repository.read', 'repository.write', 'repository.admin')
1038 @CSRFRequired()
1038 @CSRFRequired()
1039 @view_config(
1039 @view_config(
1040 route_name='pullrequest_update', request_method='POST',
1040 route_name='pullrequest_update', request_method='POST',
1041 renderer='json_ext')
1041 renderer='json_ext')
1042 def pull_request_update(self):
1042 def pull_request_update(self):
1043 pull_request = PullRequest.get_or_404(
1043 pull_request = PullRequest.get_or_404(
1044 self.request.matchdict['pull_request_id'])
1044 self.request.matchdict['pull_request_id'])
1045 _ = self.request.translate
1045 _ = self.request.translate
1046
1046
1047 self.load_default_context()
1047 self.load_default_context()
1048
1048
1049 if pull_request.is_closed():
1049 if pull_request.is_closed():
1050 log.debug('update: forbidden because pull request is closed')
1050 log.debug('update: forbidden because pull request is closed')
1051 msg = _(u'Cannot update closed pull requests.')
1051 msg = _(u'Cannot update closed pull requests.')
1052 h.flash(msg, category='error')
1052 h.flash(msg, category='error')
1053 return True
1053 return True
1054
1054
1055 if pull_request.pull_request_state != PullRequest.STATE_CREATED:
1055 if pull_request.pull_request_state != PullRequest.STATE_CREATED:
1056 log.debug('update: forbidden because pull request is in state %s',
1056 log.debug('update: forbidden because pull request is in state %s',
1057 pull_request.pull_request_state)
1057 pull_request.pull_request_state)
1058 msg = _(u'Cannot update pull requests in state other than `{}`. '
1058 msg = _(u'Cannot update pull requests in state other than `{}`. '
1059 u'Current state is: `{}`').format(PullRequest.STATE_CREATED,
1059 u'Current state is: `{}`').format(PullRequest.STATE_CREATED,
1060 pull_request.pull_request_state)
1060 pull_request.pull_request_state)
1061 h.flash(msg, category='error')
1061 h.flash(msg, category='error')
1062 return True
1062 return True
1063
1063
1064 # only owner or admin can update it
1064 # only owner or admin can update it
1065 allowed_to_update = PullRequestModel().check_user_update(
1065 allowed_to_update = PullRequestModel().check_user_update(
1066 pull_request, self._rhodecode_user)
1066 pull_request, self._rhodecode_user)
1067 if allowed_to_update:
1067 if allowed_to_update:
1068 controls = peppercorn.parse(self.request.POST.items())
1068 controls = peppercorn.parse(self.request.POST.items())
1069
1069
1070 if 'review_members' in controls:
1070 if 'review_members' in controls:
1071 self._update_reviewers(
1071 self._update_reviewers(
1072 pull_request, controls['review_members'],
1072 pull_request, controls['review_members'],
1073 pull_request.reviewer_data)
1073 pull_request.reviewer_data)
1074 elif str2bool(self.request.POST.get('update_commits', 'false')):
1074 elif str2bool(self.request.POST.get('update_commits', 'false')):
1075 self._update_commits(pull_request)
1075 self._update_commits(pull_request)
1076 elif str2bool(self.request.POST.get('edit_pull_request', 'false')):
1076 elif str2bool(self.request.POST.get('edit_pull_request', 'false')):
1077 self._edit_pull_request(pull_request)
1077 self._edit_pull_request(pull_request)
1078 else:
1078 else:
1079 raise HTTPBadRequest()
1079 raise HTTPBadRequest()
1080 return True
1080 return True
1081 raise HTTPForbidden()
1081 raise HTTPForbidden()
1082
1082
1083 def _edit_pull_request(self, pull_request):
1083 def _edit_pull_request(self, pull_request):
1084 _ = self.request.translate
1084 _ = self.request.translate
1085
1085
1086 try:
1086 try:
1087 PullRequestModel().edit(
1087 PullRequestModel().edit(
1088 pull_request,
1088 pull_request,
1089 self.request.POST.get('title'),
1089 self.request.POST.get('title'),
1090 self.request.POST.get('description'),
1090 self.request.POST.get('description'),
1091 self.request.POST.get('description_renderer'),
1091 self.request.POST.get('description_renderer'),
1092 self._rhodecode_user)
1092 self._rhodecode_user)
1093 except ValueError:
1093 except ValueError:
1094 msg = _(u'Cannot update closed pull requests.')
1094 msg = _(u'Cannot update closed pull requests.')
1095 h.flash(msg, category='error')
1095 h.flash(msg, category='error')
1096 return
1096 return
1097 else:
1097 else:
1098 Session().commit()
1098 Session().commit()
1099
1099
1100 msg = _(u'Pull request title & description updated.')
1100 msg = _(u'Pull request title & description updated.')
1101 h.flash(msg, category='success')
1101 h.flash(msg, category='success')
1102 return
1102 return
1103
1103
1104 def _update_commits(self, pull_request):
1104 def _update_commits(self, pull_request):
1105 _ = self.request.translate
1105 _ = self.request.translate
1106
1106
1107 with pull_request.set_state(PullRequest.STATE_UPDATING):
1107 with pull_request.set_state(PullRequest.STATE_UPDATING):
1108 resp = PullRequestModel().update_commits(pull_request)
1108 resp = PullRequestModel().update_commits(pull_request)
1109
1109
1110 if resp.executed:
1110 if resp.executed:
1111
1111
1112 if resp.target_changed and resp.source_changed:
1112 if resp.target_changed and resp.source_changed:
1113 changed = 'target and source repositories'
1113 changed = 'target and source repositories'
1114 elif resp.target_changed and not resp.source_changed:
1114 elif resp.target_changed and not resp.source_changed:
1115 changed = 'target repository'
1115 changed = 'target repository'
1116 elif not resp.target_changed and resp.source_changed:
1116 elif not resp.target_changed and resp.source_changed:
1117 changed = 'source repository'
1117 changed = 'source repository'
1118 else:
1118 else:
1119 changed = 'nothing'
1119 changed = 'nothing'
1120
1120
1121 msg = _(u'Pull request updated to "{source_commit_id}" with '
1121 msg = _(u'Pull request updated to "{source_commit_id}" with '
1122 u'{count_added} added, {count_removed} removed commits. '
1122 u'{count_added} added, {count_removed} removed commits. '
1123 u'Source of changes: {change_source}')
1123 u'Source of changes: {change_source}')
1124 msg = msg.format(
1124 msg = msg.format(
1125 source_commit_id=pull_request.source_ref_parts.commit_id,
1125 source_commit_id=pull_request.source_ref_parts.commit_id,
1126 count_added=len(resp.changes.added),
1126 count_added=len(resp.changes.added),
1127 count_removed=len(resp.changes.removed),
1127 count_removed=len(resp.changes.removed),
1128 change_source=changed)
1128 change_source=changed)
1129 h.flash(msg, category='success')
1129 h.flash(msg, category='success')
1130
1130
1131 channel = '/repo${}$/pr/{}'.format(
1131 channel = '/repo${}$/pr/{}'.format(
1132 pull_request.target_repo.repo_name, pull_request.pull_request_id)
1132 pull_request.target_repo.repo_name, pull_request.pull_request_id)
1133 message = msg + (
1133 message = msg + (
1134 ' - <a onclick="window.location.reload()">'
1134 ' - <a onclick="window.location.reload()">'
1135 '<strong>{}</strong></a>'.format(_('Reload page')))
1135 '<strong>{}</strong></a>'.format(_('Reload page')))
1136 channelstream.post_message(
1136 channelstream.post_message(
1137 channel, message, self._rhodecode_user.username,
1137 channel, message, self._rhodecode_user.username,
1138 registry=self.request.registry)
1138 registry=self.request.registry)
1139 else:
1139 else:
1140 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
1140 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
1141 warning_reasons = [
1141 warning_reasons = [
1142 UpdateFailureReason.NO_CHANGE,
1142 UpdateFailureReason.NO_CHANGE,
1143 UpdateFailureReason.WRONG_REF_TYPE,
1143 UpdateFailureReason.WRONG_REF_TYPE,
1144 ]
1144 ]
1145 category = 'warning' if resp.reason in warning_reasons else 'error'
1145 category = 'warning' if resp.reason in warning_reasons else 'error'
1146 h.flash(msg, category=category)
1146 h.flash(msg, category=category)
1147
1147
1148 @LoginRequired()
1148 @LoginRequired()
1149 @NotAnonymous()
1149 @NotAnonymous()
1150 @HasRepoPermissionAnyDecorator(
1150 @HasRepoPermissionAnyDecorator(
1151 'repository.read', 'repository.write', 'repository.admin')
1151 'repository.read', 'repository.write', 'repository.admin')
1152 @CSRFRequired()
1152 @CSRFRequired()
1153 @view_config(
1153 @view_config(
1154 route_name='pullrequest_merge', request_method='POST',
1154 route_name='pullrequest_merge', request_method='POST',
1155 renderer='json_ext')
1155 renderer='json_ext')
1156 def pull_request_merge(self):
1156 def pull_request_merge(self):
1157 """
1157 """
1158 Merge will perform a server-side merge of the specified
1158 Merge will perform a server-side merge of the specified
1159 pull request, if the pull request is approved and mergeable.
1159 pull request, if the pull request is approved and mergeable.
1160 After successful merging, the pull request is automatically
1160 After successful merging, the pull request is automatically
1161 closed, with a relevant comment.
1161 closed, with a relevant comment.
1162 """
1162 """
1163 pull_request = PullRequest.get_or_404(
1163 pull_request = PullRequest.get_or_404(
1164 self.request.matchdict['pull_request_id'])
1164 self.request.matchdict['pull_request_id'])
1165 _ = self.request.translate
1165 _ = self.request.translate
1166
1166
1167 if pull_request.pull_request_state != PullRequest.STATE_CREATED:
1167 if pull_request.pull_request_state != PullRequest.STATE_CREATED:
1168 log.debug('show: forbidden because pull request is in state %s',
1168 log.debug('show: forbidden because pull request is in state %s',
1169 pull_request.pull_request_state)
1169 pull_request.pull_request_state)
1170 msg = _(u'Cannot merge pull requests in state other than `{}`. '
1170 msg = _(u'Cannot merge pull requests in state other than `{}`. '
1171 u'Current state is: `{}`').format(PullRequest.STATE_CREATED,
1171 u'Current state is: `{}`').format(PullRequest.STATE_CREATED,
1172 pull_request.pull_request_state)
1172 pull_request.pull_request_state)
1173 h.flash(msg, category='error')
1173 h.flash(msg, category='error')
1174 raise HTTPFound(
1174 raise HTTPFound(
1175 h.route_path('pullrequest_show',
1175 h.route_path('pullrequest_show',
1176 repo_name=pull_request.target_repo.repo_name,
1176 repo_name=pull_request.target_repo.repo_name,
1177 pull_request_id=pull_request.pull_request_id))
1177 pull_request_id=pull_request.pull_request_id))
1178
1178
1179 self.load_default_context()
1179 self.load_default_context()
1180
1180
1181 with pull_request.set_state(PullRequest.STATE_UPDATING):
1181 with pull_request.set_state(PullRequest.STATE_UPDATING):
1182 check = MergeCheck.validate(
1182 check = MergeCheck.validate(
1183 pull_request, auth_user=self._rhodecode_user,
1183 pull_request, auth_user=self._rhodecode_user,
1184 translator=self.request.translate)
1184 translator=self.request.translate)
1185 merge_possible = not check.failed
1185 merge_possible = not check.failed
1186
1186
1187 for err_type, error_msg in check.errors:
1187 for err_type, error_msg in check.errors:
1188 h.flash(error_msg, category=err_type)
1188 h.flash(error_msg, category=err_type)
1189
1189
1190 if merge_possible:
1190 if merge_possible:
1191 log.debug("Pre-conditions checked, trying to merge.")
1191 log.debug("Pre-conditions checked, trying to merge.")
1192 extras = vcs_operation_context(
1192 extras = vcs_operation_context(
1193 self.request.environ, repo_name=pull_request.target_repo.repo_name,
1193 self.request.environ, repo_name=pull_request.target_repo.repo_name,
1194 username=self._rhodecode_db_user.username, action='push',
1194 username=self._rhodecode_db_user.username, action='push',
1195 scm=pull_request.target_repo.repo_type)
1195 scm=pull_request.target_repo.repo_type)
1196 with pull_request.set_state(PullRequest.STATE_UPDATING):
1196 with pull_request.set_state(PullRequest.STATE_UPDATING):
1197 self._merge_pull_request(
1197 self._merge_pull_request(
1198 pull_request, self._rhodecode_db_user, extras)
1198 pull_request, self._rhodecode_db_user, extras)
1199 else:
1199 else:
1200 log.debug("Pre-conditions failed, NOT merging.")
1200 log.debug("Pre-conditions failed, NOT merging.")
1201
1201
1202 raise HTTPFound(
1202 raise HTTPFound(
1203 h.route_path('pullrequest_show',
1203 h.route_path('pullrequest_show',
1204 repo_name=pull_request.target_repo.repo_name,
1204 repo_name=pull_request.target_repo.repo_name,
1205 pull_request_id=pull_request.pull_request_id))
1205 pull_request_id=pull_request.pull_request_id))
1206
1206
1207 def _merge_pull_request(self, pull_request, user, extras):
1207 def _merge_pull_request(self, pull_request, user, extras):
1208 _ = self.request.translate
1208 _ = self.request.translate
1209 merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras)
1209 merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras)
1210
1210
1211 if merge_resp.executed:
1211 if merge_resp.executed:
1212 log.debug("The merge was successful, closing the pull request.")
1212 log.debug("The merge was successful, closing the pull request.")
1213 PullRequestModel().close_pull_request(
1213 PullRequestModel().close_pull_request(
1214 pull_request.pull_request_id, user)
1214 pull_request.pull_request_id, user)
1215 Session().commit()
1215 Session().commit()
1216 msg = _('Pull request was successfully merged and closed.')
1216 msg = _('Pull request was successfully merged and closed.')
1217 h.flash(msg, category='success')
1217 h.flash(msg, category='success')
1218 else:
1218 else:
1219 log.debug(
1219 log.debug(
1220 "The merge was not successful. Merge response: %s", merge_resp)
1220 "The merge was not successful. Merge response: %s", merge_resp)
1221 msg = merge_resp.merge_status_message
1221 msg = merge_resp.merge_status_message
1222 h.flash(msg, category='error')
1222 h.flash(msg, category='error')
1223
1223
1224 def _update_reviewers(self, pull_request, review_members, reviewer_rules):
1224 def _update_reviewers(self, pull_request, review_members, reviewer_rules):
1225 _ = self.request.translate
1225 _ = self.request.translate
1226
1226
1227 get_default_reviewers_data, validate_default_reviewers = \
1227 get_default_reviewers_data, validate_default_reviewers = \
1228 PullRequestModel().get_reviewer_functions()
1228 PullRequestModel().get_reviewer_functions()
1229
1229
1230 try:
1230 try:
1231 reviewers = validate_default_reviewers(review_members, reviewer_rules)
1231 reviewers = validate_default_reviewers(review_members, reviewer_rules)
1232 except ValueError as e:
1232 except ValueError as e:
1233 log.error('Reviewers Validation: {}'.format(e))
1233 log.error('Reviewers Validation: {}'.format(e))
1234 h.flash(e, category='error')
1234 h.flash(e, category='error')
1235 return
1235 return
1236
1236
1237 old_calculated_status = pull_request.calculated_review_status()
1237 old_calculated_status = pull_request.calculated_review_status()
1238 PullRequestModel().update_reviewers(
1238 PullRequestModel().update_reviewers(
1239 pull_request, reviewers, self._rhodecode_user)
1239 pull_request, reviewers, self._rhodecode_user)
1240 h.flash(_('Pull request reviewers updated.'), category='success')
1240 h.flash(_('Pull request reviewers updated.'), category='success')
1241 Session().commit()
1241 Session().commit()
1242
1242
1243 # trigger status changed if change in reviewers changes the status
1243 # trigger status changed if change in reviewers changes the status
1244 calculated_status = pull_request.calculated_review_status()
1244 calculated_status = pull_request.calculated_review_status()
1245 if old_calculated_status != calculated_status:
1245 if old_calculated_status != calculated_status:
1246 PullRequestModel().trigger_pull_request_hook(
1246 PullRequestModel().trigger_pull_request_hook(
1247 pull_request, self._rhodecode_user, 'review_status_change',
1247 pull_request, self._rhodecode_user, 'review_status_change',
1248 data={'status': calculated_status})
1248 data={'status': calculated_status})
1249
1249
1250 @LoginRequired()
1250 @LoginRequired()
1251 @NotAnonymous()
1251 @NotAnonymous()
1252 @HasRepoPermissionAnyDecorator(
1252 @HasRepoPermissionAnyDecorator(
1253 'repository.read', 'repository.write', 'repository.admin')
1253 'repository.read', 'repository.write', 'repository.admin')
1254 @CSRFRequired()
1254 @CSRFRequired()
1255 @view_config(
1255 @view_config(
1256 route_name='pullrequest_delete', request_method='POST',
1256 route_name='pullrequest_delete', request_method='POST',
1257 renderer='json_ext')
1257 renderer='json_ext')
1258 def pull_request_delete(self):
1258 def pull_request_delete(self):
1259 _ = self.request.translate
1259 _ = self.request.translate
1260
1260
1261 pull_request = PullRequest.get_or_404(
1261 pull_request = PullRequest.get_or_404(
1262 self.request.matchdict['pull_request_id'])
1262 self.request.matchdict['pull_request_id'])
1263 self.load_default_context()
1263 self.load_default_context()
1264
1264
1265 pr_closed = pull_request.is_closed()
1265 pr_closed = pull_request.is_closed()
1266 allowed_to_delete = PullRequestModel().check_user_delete(
1266 allowed_to_delete = PullRequestModel().check_user_delete(
1267 pull_request, self._rhodecode_user) and not pr_closed
1267 pull_request, self._rhodecode_user) and not pr_closed
1268
1268
1269 # only owner can delete it !
1269 # only owner can delete it !
1270 if allowed_to_delete:
1270 if allowed_to_delete:
1271 PullRequestModel().delete(pull_request, self._rhodecode_user)
1271 PullRequestModel().delete(pull_request, self._rhodecode_user)
1272 Session().commit()
1272 Session().commit()
1273 h.flash(_('Successfully deleted pull request'),
1273 h.flash(_('Successfully deleted pull request'),
1274 category='success')
1274 category='success')
1275 raise HTTPFound(h.route_path('pullrequest_show_all',
1275 raise HTTPFound(h.route_path('pullrequest_show_all',
1276 repo_name=self.db_repo_name))
1276 repo_name=self.db_repo_name))
1277
1277
1278 log.warning('user %s tried to delete pull request without access',
1278 log.warning('user %s tried to delete pull request without access',
1279 self._rhodecode_user)
1279 self._rhodecode_user)
1280 raise HTTPNotFound()
1280 raise HTTPNotFound()
1281
1281
1282 @LoginRequired()
1282 @LoginRequired()
1283 @NotAnonymous()
1283 @NotAnonymous()
1284 @HasRepoPermissionAnyDecorator(
1284 @HasRepoPermissionAnyDecorator(
1285 'repository.read', 'repository.write', 'repository.admin')
1285 'repository.read', 'repository.write', 'repository.admin')
1286 @CSRFRequired()
1286 @CSRFRequired()
1287 @view_config(
1287 @view_config(
1288 route_name='pullrequest_comment_create', request_method='POST',
1288 route_name='pullrequest_comment_create', request_method='POST',
1289 renderer='json_ext')
1289 renderer='json_ext')
1290 def pull_request_comment_create(self):
1290 def pull_request_comment_create(self):
1291 _ = self.request.translate
1291 _ = self.request.translate
1292
1292
1293 pull_request = PullRequest.get_or_404(
1293 pull_request = PullRequest.get_or_404(
1294 self.request.matchdict['pull_request_id'])
1294 self.request.matchdict['pull_request_id'])
1295 pull_request_id = pull_request.pull_request_id
1295 pull_request_id = pull_request.pull_request_id
1296
1296
1297 if pull_request.is_closed():
1297 if pull_request.is_closed():
1298 log.debug('comment: forbidden because pull request is closed')
1298 log.debug('comment: forbidden because pull request is closed')
1299 raise HTTPForbidden()
1299 raise HTTPForbidden()
1300
1300
1301 allowed_to_comment = PullRequestModel().check_user_comment(
1301 allowed_to_comment = PullRequestModel().check_user_comment(
1302 pull_request, self._rhodecode_user)
1302 pull_request, self._rhodecode_user)
1303 if not allowed_to_comment:
1303 if not allowed_to_comment:
1304 log.debug(
1304 log.debug(
1305 'comment: forbidden because pull request is from forbidden repo')
1305 'comment: forbidden because pull request is from forbidden repo')
1306 raise HTTPForbidden()
1306 raise HTTPForbidden()
1307
1307
1308 c = self.load_default_context()
1308 c = self.load_default_context()
1309
1309
1310 status = self.request.POST.get('changeset_status', None)
1310 status = self.request.POST.get('changeset_status', None)
1311 text = self.request.POST.get('text')
1311 text = self.request.POST.get('text')
1312 comment_type = self.request.POST.get('comment_type')
1312 comment_type = self.request.POST.get('comment_type')
1313 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
1313 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
1314 close_pull_request = self.request.POST.get('close_pull_request')
1314 close_pull_request = self.request.POST.get('close_pull_request')
1315
1315
1316 # the logic here should work like following, if we submit close
1316 # the logic here should work like following, if we submit close
1317 # pr comment, use `close_pull_request_with_comment` function
1317 # pr comment, use `close_pull_request_with_comment` function
1318 # else handle regular comment logic
1318 # else handle regular comment logic
1319
1319
1320 if close_pull_request:
1320 if close_pull_request:
1321 # only owner or admin or person with write permissions
1321 # only owner or admin or person with write permissions
1322 allowed_to_close = PullRequestModel().check_user_update(
1322 allowed_to_close = PullRequestModel().check_user_update(
1323 pull_request, self._rhodecode_user)
1323 pull_request, self._rhodecode_user)
1324 if not allowed_to_close:
1324 if not allowed_to_close:
1325 log.debug('comment: forbidden because not allowed to close '
1325 log.debug('comment: forbidden because not allowed to close '
1326 'pull request %s', pull_request_id)
1326 'pull request %s', pull_request_id)
1327 raise HTTPForbidden()
1327 raise HTTPForbidden()
1328
1328
1329 # This also triggers `review_status_change`
1329 # This also triggers `review_status_change`
1330 comment, status = PullRequestModel().close_pull_request_with_comment(
1330 comment, status = PullRequestModel().close_pull_request_with_comment(
1331 pull_request, self._rhodecode_user, self.db_repo, message=text,
1331 pull_request, self._rhodecode_user, self.db_repo, message=text,
1332 auth_user=self._rhodecode_user)
1332 auth_user=self._rhodecode_user)
1333 Session().flush()
1333 Session().flush()
1334
1334
1335 PullRequestModel().trigger_pull_request_hook(
1335 PullRequestModel().trigger_pull_request_hook(
1336 pull_request, self._rhodecode_user, 'comment',
1336 pull_request, self._rhodecode_user, 'comment',
1337 data={'comment': comment})
1337 data={'comment': comment})
1338
1338
1339 else:
1339 else:
1340 # regular comment case, could be inline, or one with status.
1340 # regular comment case, could be inline, or one with status.
1341 # for that one we check also permissions
1341 # for that one we check also permissions
1342
1342
1343 allowed_to_change_status = PullRequestModel().check_user_change_status(
1343 allowed_to_change_status = PullRequestModel().check_user_change_status(
1344 pull_request, self._rhodecode_user)
1344 pull_request, self._rhodecode_user)
1345
1345
1346 if status and allowed_to_change_status:
1346 if status and allowed_to_change_status:
1347 message = (_('Status change %(transition_icon)s %(status)s')
1347 message = (_('Status change %(transition_icon)s %(status)s')
1348 % {'transition_icon': '>',
1348 % {'transition_icon': '>',
1349 'status': ChangesetStatus.get_status_lbl(status)})
1349 'status': ChangesetStatus.get_status_lbl(status)})
1350 text = text or message
1350 text = text or message
1351
1351
1352 comment = CommentsModel().create(
1352 comment = CommentsModel().create(
1353 text=text,
1353 text=text,
1354 repo=self.db_repo.repo_id,
1354 repo=self.db_repo.repo_id,
1355 user=self._rhodecode_user.user_id,
1355 user=self._rhodecode_user.user_id,
1356 pull_request=pull_request,
1356 pull_request=pull_request,
1357 f_path=self.request.POST.get('f_path'),
1357 f_path=self.request.POST.get('f_path'),
1358 line_no=self.request.POST.get('line'),
1358 line_no=self.request.POST.get('line'),
1359 status_change=(ChangesetStatus.get_status_lbl(status)
1359 status_change=(ChangesetStatus.get_status_lbl(status)
1360 if status and allowed_to_change_status else None),
1360 if status and allowed_to_change_status else None),
1361 status_change_type=(status
1361 status_change_type=(status
1362 if status and allowed_to_change_status else None),
1362 if status and allowed_to_change_status else None),
1363 comment_type=comment_type,
1363 comment_type=comment_type,
1364 resolves_comment_id=resolves_comment_id,
1364 resolves_comment_id=resolves_comment_id,
1365 auth_user=self._rhodecode_user
1365 auth_user=self._rhodecode_user
1366 )
1366 )
1367
1367
1368 if allowed_to_change_status:
1368 if allowed_to_change_status:
1369 # calculate old status before we change it
1369 # calculate old status before we change it
1370 old_calculated_status = pull_request.calculated_review_status()
1370 old_calculated_status = pull_request.calculated_review_status()
1371
1371
1372 # get status if set !
1372 # get status if set !
1373 if status:
1373 if status:
1374 ChangesetStatusModel().set_status(
1374 ChangesetStatusModel().set_status(
1375 self.db_repo.repo_id,
1375 self.db_repo.repo_id,
1376 status,
1376 status,
1377 self._rhodecode_user.user_id,
1377 self._rhodecode_user.user_id,
1378 comment,
1378 comment,
1379 pull_request=pull_request
1379 pull_request=pull_request
1380 )
1380 )
1381
1381
1382 Session().flush()
1382 Session().flush()
1383 # this is somehow required to get access to some relationship
1383 # this is somehow required to get access to some relationship
1384 # loaded on comment
1384 # loaded on comment
1385 Session().refresh(comment)
1385 Session().refresh(comment)
1386
1386
1387 PullRequestModel().trigger_pull_request_hook(
1387 PullRequestModel().trigger_pull_request_hook(
1388 pull_request, self._rhodecode_user, 'comment',
1388 pull_request, self._rhodecode_user, 'comment',
1389 data={'comment': comment})
1389 data={'comment': comment})
1390
1390
1391 # we now calculate the status of pull request, and based on that
1391 # we now calculate the status of pull request, and based on that
1392 # calculation we set the commits status
1392 # calculation we set the commits status
1393 calculated_status = pull_request.calculated_review_status()
1393 calculated_status = pull_request.calculated_review_status()
1394 if old_calculated_status != calculated_status:
1394 if old_calculated_status != calculated_status:
1395 PullRequestModel().trigger_pull_request_hook(
1395 PullRequestModel().trigger_pull_request_hook(
1396 pull_request, self._rhodecode_user, 'review_status_change',
1396 pull_request, self._rhodecode_user, 'review_status_change',
1397 data={'status': calculated_status})
1397 data={'status': calculated_status})
1398
1398
1399 Session().commit()
1399 Session().commit()
1400
1400
1401 data = {
1401 data = {
1402 'target_id': h.safeid(h.safe_unicode(
1402 'target_id': h.safeid(h.safe_unicode(
1403 self.request.POST.get('f_path'))),
1403 self.request.POST.get('f_path'))),
1404 }
1404 }
1405 if comment:
1405 if comment:
1406 c.co = comment
1406 c.co = comment
1407 rendered_comment = render(
1407 rendered_comment = render(
1408 'rhodecode:templates/changeset/changeset_comment_block.mako',
1408 'rhodecode:templates/changeset/changeset_comment_block.mako',
1409 self._get_template_context(c), self.request)
1409 self._get_template_context(c), self.request)
1410
1410
1411 data.update(comment.get_dict())
1411 data.update(comment.get_dict())
1412 data.update({'rendered_text': rendered_comment})
1412 data.update({'rendered_text': rendered_comment})
1413
1413
1414 return data
1414 return data
1415
1415
1416 @LoginRequired()
1416 @LoginRequired()
1417 @NotAnonymous()
1417 @NotAnonymous()
1418 @HasRepoPermissionAnyDecorator(
1418 @HasRepoPermissionAnyDecorator(
1419 'repository.read', 'repository.write', 'repository.admin')
1419 'repository.read', 'repository.write', 'repository.admin')
1420 @CSRFRequired()
1420 @CSRFRequired()
1421 @view_config(
1421 @view_config(
1422 route_name='pullrequest_comment_delete', request_method='POST',
1422 route_name='pullrequest_comment_delete', request_method='POST',
1423 renderer='json_ext')
1423 renderer='json_ext')
1424 def pull_request_comment_delete(self):
1424 def pull_request_comment_delete(self):
1425 pull_request = PullRequest.get_or_404(
1425 pull_request = PullRequest.get_or_404(
1426 self.request.matchdict['pull_request_id'])
1426 self.request.matchdict['pull_request_id'])
1427
1427
1428 comment = ChangesetComment.get_or_404(
1428 comment = ChangesetComment.get_or_404(
1429 self.request.matchdict['comment_id'])
1429 self.request.matchdict['comment_id'])
1430 comment_id = comment.comment_id
1430 comment_id = comment.comment_id
1431
1431
1432 if pull_request.is_closed():
1432 if pull_request.is_closed():
1433 log.debug('comment: forbidden because pull request is closed')
1433 log.debug('comment: forbidden because pull request is closed')
1434 raise HTTPForbidden()
1434 raise HTTPForbidden()
1435
1435
1436 if not comment:
1436 if not comment:
1437 log.debug('Comment with id:%s not found, skipping', comment_id)
1437 log.debug('Comment with id:%s not found, skipping', comment_id)
1438 # comment already deleted in another call probably
1438 # comment already deleted in another call probably
1439 return True
1439 return True
1440
1440
1441 if comment.pull_request.is_closed():
1441 if comment.pull_request.is_closed():
1442 # don't allow deleting comments on closed pull request
1442 # don't allow deleting comments on closed pull request
1443 raise HTTPForbidden()
1443 raise HTTPForbidden()
1444
1444
1445 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1445 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1446 super_admin = h.HasPermissionAny('hg.admin')()
1446 super_admin = h.HasPermissionAny('hg.admin')()
1447 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1447 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1448 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1448 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1449 comment_repo_admin = is_repo_admin and is_repo_comment
1449 comment_repo_admin = is_repo_admin and is_repo_comment
1450
1450
1451 if super_admin or comment_owner or comment_repo_admin:
1451 if super_admin or comment_owner or comment_repo_admin:
1452 old_calculated_status = comment.pull_request.calculated_review_status()
1452 old_calculated_status = comment.pull_request.calculated_review_status()
1453 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
1453 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
1454 Session().commit()
1454 Session().commit()
1455 calculated_status = comment.pull_request.calculated_review_status()
1455 calculated_status = comment.pull_request.calculated_review_status()
1456 if old_calculated_status != calculated_status:
1456 if old_calculated_status != calculated_status:
1457 PullRequestModel().trigger_pull_request_hook(
1457 PullRequestModel().trigger_pull_request_hook(
1458 comment.pull_request, self._rhodecode_user, 'review_status_change',
1458 comment.pull_request, self._rhodecode_user, 'review_status_change',
1459 data={'status': calculated_status})
1459 data={'status': calculated_status})
1460 return True
1460 return True
1461 else:
1461 else:
1462 log.warning('No permissions for user %s to delete comment_id: %s',
1462 log.warning('No permissions for user %s to delete comment_id: %s',
1463 self._rhodecode_db_user, comment_id)
1463 self._rhodecode_db_user, comment_id)
1464 raise HTTPNotFound()
1464 raise HTTPNotFound()
@@ -1,507 +1,506 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 GIT commit module
22 GIT commit module
23 """
23 """
24
24
25 import re
25 import re
26 import stat
26 import stat
27 from itertools import chain
27 from itertools import chain
28 from StringIO import StringIO
28 from StringIO import StringIO
29
29
30 from zope.cachedescriptors.property import Lazy as LazyProperty
30 from zope.cachedescriptors.property import Lazy as LazyProperty
31
31
32 from rhodecode.lib.datelib import utcdate_fromtimestamp
32 from rhodecode.lib.datelib import utcdate_fromtimestamp
33 from rhodecode.lib.utils import safe_unicode, safe_str
33 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils2 import safe_int
34 from rhodecode.lib.utils2 import safe_int
35 from rhodecode.lib.vcs.conf import settings
35 from rhodecode.lib.vcs.conf import settings
36 from rhodecode.lib.vcs.backends import base
36 from rhodecode.lib.vcs.backends import base
37 from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError
37 from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError
38 from rhodecode.lib.vcs.nodes import (
38 from rhodecode.lib.vcs.nodes import (
39 FileNode, DirNode, NodeKind, RootNode, SubModuleNode,
39 FileNode, DirNode, NodeKind, RootNode, SubModuleNode,
40 ChangedFileNodesGenerator, AddedFileNodesGenerator,
40 ChangedFileNodesGenerator, AddedFileNodesGenerator,
41 RemovedFileNodesGenerator, LargeFileNode)
41 RemovedFileNodesGenerator, LargeFileNode)
42 from rhodecode.lib.vcs.compat import configparser
42 from rhodecode.lib.vcs.compat import configparser
43
43
44
44
45 class GitCommit(base.BaseCommit):
45 class GitCommit(base.BaseCommit):
46 """
46 """
47 Represents state of the repository at single commit id.
47 Represents state of the repository at single commit id.
48 """
48 """
49
49
50 _filter_pre_load = [
50 _filter_pre_load = [
51 # done through a more complex tree walk on parents
51 # done through a more complex tree walk on parents
52 "affected_files",
52 "affected_files",
53 # based on repository cached property
54 "branch",
55 # done through subprocess not remote call
53 # done through subprocess not remote call
56 "children",
54 "children",
57 # done through a more complex tree walk on parents
55 # done through a more complex tree walk on parents
58 "status",
56 "status",
59 # mercurial specific property not supported here
57 # mercurial specific property not supported here
60 "_file_paths",
58 "_file_paths",
61 # mercurial specific property not supported here
59 # mercurial specific property not supported here
62 'obsolete',
60 'obsolete',
63 # mercurial specific property not supported here
61 # mercurial specific property not supported here
64 'phase',
62 'phase',
65 # mercurial specific property not supported here
63 # mercurial specific property not supported here
66 'hidden'
64 'hidden'
67 ]
65 ]
68
66
69 def __init__(self, repository, raw_id, idx, pre_load=None):
67 def __init__(self, repository, raw_id, idx, pre_load=None):
70 self.repository = repository
68 self.repository = repository
71 self._remote = repository._remote
69 self._remote = repository._remote
72 # TODO: johbo: Tweak of raw_id should not be necessary
70 # TODO: johbo: Tweak of raw_id should not be necessary
73 self.raw_id = safe_str(raw_id)
71 self.raw_id = safe_str(raw_id)
74 self.idx = idx
72 self.idx = idx
75
73
76 self._set_bulk_properties(pre_load)
74 self._set_bulk_properties(pre_load)
77
75
78 # caches
76 # caches
79 self._stat_modes = {} # stat info for paths
77 self._stat_modes = {} # stat info for paths
80 self._paths = {} # path processed with parse_tree
78 self._paths = {} # path processed with parse_tree
81 self.nodes = {}
79 self.nodes = {}
82 self._submodules = None
80 self._submodules = None
83
81
84 def _set_bulk_properties(self, pre_load):
82 def _set_bulk_properties(self, pre_load):
83
85 if not pre_load:
84 if not pre_load:
86 return
85 return
87 pre_load = [entry for entry in pre_load
86 pre_load = [entry for entry in pre_load
88 if entry not in self._filter_pre_load]
87 if entry not in self._filter_pre_load]
89 if not pre_load:
88 if not pre_load:
90 return
89 return
91
90
92 result = self._remote.bulk_request(self.raw_id, pre_load)
91 result = self._remote.bulk_request(self.raw_id, pre_load)
93 for attr, value in result.items():
92 for attr, value in result.items():
94 if attr in ["author", "message"]:
93 if attr in ["author", "message"]:
95 if value:
94 if value:
96 value = safe_unicode(value)
95 value = safe_unicode(value)
97 elif attr == "date":
96 elif attr == "date":
98 value = utcdate_fromtimestamp(*value)
97 value = utcdate_fromtimestamp(*value)
99 elif attr == "parents":
98 elif attr == "parents":
100 value = self._make_commits(value)
99 value = self._make_commits(value)
100 elif attr == "branch":
101 value = value[0] if value else None
101 self.__dict__[attr] = value
102 self.__dict__[attr] = value
102
103
103 @LazyProperty
104 @LazyProperty
104 def _commit(self):
105 def _commit(self):
105 return self._remote[self.raw_id]
106 return self._remote[self.raw_id]
106
107
107 @LazyProperty
108 @LazyProperty
108 def _tree_id(self):
109 def _tree_id(self):
109 return self._remote[self._commit['tree']]['id']
110 return self._remote[self._commit['tree']]['id']
110
111
111 @LazyProperty
112 @LazyProperty
112 def id(self):
113 def id(self):
113 return self.raw_id
114 return self.raw_id
114
115
115 @LazyProperty
116 @LazyProperty
116 def short_id(self):
117 def short_id(self):
117 return self.raw_id[:12]
118 return self.raw_id[:12]
118
119
119 @LazyProperty
120 @LazyProperty
120 def message(self):
121 def message(self):
121 return safe_unicode(self._remote.message(self.id))
122 return safe_unicode(self._remote.message(self.id))
122
123
123 @LazyProperty
124 @LazyProperty
124 def committer(self):
125 def committer(self):
125 return safe_unicode(self._remote.author(self.id))
126 return safe_unicode(self._remote.author(self.id))
126
127
127 @LazyProperty
128 @LazyProperty
128 def author(self):
129 def author(self):
129 return safe_unicode(self._remote.author(self.id))
130 return safe_unicode(self._remote.author(self.id))
130
131
131 @LazyProperty
132 @LazyProperty
132 def date(self):
133 def date(self):
133 unix_ts, tz = self._remote.date(self.raw_id)
134 unix_ts, tz = self._remote.date(self.raw_id)
134 return utcdate_fromtimestamp(unix_ts, tz)
135 return utcdate_fromtimestamp(unix_ts, tz)
135
136
136 @LazyProperty
137 @LazyProperty
137 def status(self):
138 def status(self):
138 """
139 """
139 Returns modified, added, removed, deleted files for current commit
140 Returns modified, added, removed, deleted files for current commit
140 """
141 """
141 return self.changed, self.added, self.removed
142 return self.changed, self.added, self.removed
142
143
143 @LazyProperty
144 @LazyProperty
144 def tags(self):
145 def tags(self):
145 tags = [safe_unicode(name) for name,
146 tags = [safe_unicode(name) for name,
146 commit_id in self.repository.tags.iteritems()
147 commit_id in self.repository.tags.iteritems()
147 if commit_id == self.raw_id]
148 if commit_id == self.raw_id]
148 return tags
149 return tags
149
150
150 @LazyProperty
151 @LazyProperty
151 def commit_branches(self):
152 def commit_branches(self):
152 branches = []
153 branches = []
153 for name, commit_id in self.repository.branches.iteritems():
154 for name, commit_id in self.repository.branches.iteritems():
154 if commit_id == self.raw_id:
155 if commit_id == self.raw_id:
155 branches.append(name)
156 branches.append(name)
156 return branches
157 return branches
157
158
158 @LazyProperty
159 @LazyProperty
159 def branch(self):
160 def branch(self):
160 # actually commit can have multiple branches
161 branches = safe_unicode(self._remote.branch(self.raw_id))
161 branches = self.commit_branches
162 if branches:
162 if branches:
163 return branches[0]
163 # actually commit can have multiple branches in git
164
164 return safe_unicode(branches[0])
165 return None
166
165
167 def _get_tree_id_for_path(self, path):
166 def _get_tree_id_for_path(self, path):
168 path = safe_str(path)
167 path = safe_str(path)
169 if path in self._paths:
168 if path in self._paths:
170 return self._paths[path]
169 return self._paths[path]
171
170
172 tree_id = self._tree_id
171 tree_id = self._tree_id
173
172
174 path = path.strip('/')
173 path = path.strip('/')
175 if path == '':
174 if path == '':
176 data = [tree_id, "tree"]
175 data = [tree_id, "tree"]
177 self._paths[''] = data
176 self._paths[''] = data
178 return data
177 return data
179
178
180 tree_id, tree_type, tree_mode = \
179 tree_id, tree_type, tree_mode = \
181 self._remote.tree_and_type_for_path(self.raw_id, path)
180 self._remote.tree_and_type_for_path(self.raw_id, path)
182 if tree_id is None:
181 if tree_id is None:
183 raise self.no_node_at_path(path)
182 raise self.no_node_at_path(path)
184
183
185 self._paths[path] = [tree_id, tree_type]
184 self._paths[path] = [tree_id, tree_type]
186 self._stat_modes[path] = tree_mode
185 self._stat_modes[path] = tree_mode
187
186
188 if path not in self._paths:
187 if path not in self._paths:
189 raise self.no_node_at_path(path)
188 raise self.no_node_at_path(path)
190
189
191 return self._paths[path]
190 return self._paths[path]
192
191
193 def _get_kind(self, path):
192 def _get_kind(self, path):
194 tree_id, type_ = self._get_tree_id_for_path(path)
193 tree_id, type_ = self._get_tree_id_for_path(path)
195 if type_ == 'blob':
194 if type_ == 'blob':
196 return NodeKind.FILE
195 return NodeKind.FILE
197 elif type_ == 'tree':
196 elif type_ == 'tree':
198 return NodeKind.DIR
197 return NodeKind.DIR
199 elif type_ == 'link':
198 elif type_ == 'link':
200 return NodeKind.SUBMODULE
199 return NodeKind.SUBMODULE
201 return None
200 return None
202
201
203 def _get_filectx(self, path):
202 def _get_filectx(self, path):
204 path = self._fix_path(path)
203 path = self._fix_path(path)
205 if self._get_kind(path) != NodeKind.FILE:
204 if self._get_kind(path) != NodeKind.FILE:
206 raise CommitError(
205 raise CommitError(
207 "File does not exist for commit %s at '%s'" % (self.raw_id, path))
206 "File does not exist for commit %s at '%s'" % (self.raw_id, path))
208 return path
207 return path
209
208
210 def _get_file_nodes(self):
209 def _get_file_nodes(self):
211 return chain(*(t[2] for t in self.walk()))
210 return chain(*(t[2] for t in self.walk()))
212
211
213 @LazyProperty
212 @LazyProperty
214 def parents(self):
213 def parents(self):
215 """
214 """
216 Returns list of parent commits.
215 Returns list of parent commits.
217 """
216 """
218 parent_ids = self._remote.parents(self.id)
217 parent_ids = self._remote.parents(self.id)
219 return self._make_commits(parent_ids)
218 return self._make_commits(parent_ids)
220
219
221 @LazyProperty
220 @LazyProperty
222 def children(self):
221 def children(self):
223 """
222 """
224 Returns list of child commits.
223 Returns list of child commits.
225 """
224 """
226 rev_filter = settings.GIT_REV_FILTER
225 rev_filter = settings.GIT_REV_FILTER
227 output, __ = self.repository.run_git_command(
226 output, __ = self.repository.run_git_command(
228 ['rev-list', '--children'] + rev_filter)
227 ['rev-list', '--children'] + rev_filter)
229
228
230 child_ids = []
229 child_ids = []
231 pat = re.compile(r'^%s' % self.raw_id)
230 pat = re.compile(r'^%s' % self.raw_id)
232 for l in output.splitlines():
231 for l in output.splitlines():
233 if pat.match(l):
232 if pat.match(l):
234 found_ids = l.split(' ')[1:]
233 found_ids = l.split(' ')[1:]
235 child_ids.extend(found_ids)
234 child_ids.extend(found_ids)
236 return self._make_commits(child_ids)
235 return self._make_commits(child_ids)
237
236
238 def _make_commits(self, commit_ids):
237 def _make_commits(self, commit_ids):
239 def commit_maker(_commit_id):
238 def commit_maker(_commit_id):
240 return self.repository.get_commit(commit_id=commit_id)
239 return self.repository.get_commit(commit_id=commit_id)
241
240
242 return [commit_maker(commit_id) for commit_id in commit_ids]
241 return [commit_maker(commit_id) for commit_id in commit_ids]
243
242
244 def get_file_mode(self, path):
243 def get_file_mode(self, path):
245 """
244 """
246 Returns stat mode of the file at the given `path`.
245 Returns stat mode of the file at the given `path`.
247 """
246 """
248 path = safe_str(path)
247 path = safe_str(path)
249 # ensure path is traversed
248 # ensure path is traversed
250 self._get_tree_id_for_path(path)
249 self._get_tree_id_for_path(path)
251 return self._stat_modes[path]
250 return self._stat_modes[path]
252
251
253 def is_link(self, path):
252 def is_link(self, path):
254 return stat.S_ISLNK(self.get_file_mode(path))
253 return stat.S_ISLNK(self.get_file_mode(path))
255
254
256 def get_file_content(self, path):
255 def get_file_content(self, path):
257 """
256 """
258 Returns content of the file at given `path`.
257 Returns content of the file at given `path`.
259 """
258 """
260 tree_id, _ = self._get_tree_id_for_path(path)
259 tree_id, _ = self._get_tree_id_for_path(path)
261 return self._remote.blob_as_pretty_string(tree_id)
260 return self._remote.blob_as_pretty_string(tree_id)
262
261
263 def get_file_size(self, path):
262 def get_file_size(self, path):
264 """
263 """
265 Returns size of the file at given `path`.
264 Returns size of the file at given `path`.
266 """
265 """
267 tree_id, _ = self._get_tree_id_for_path(path)
266 tree_id, _ = self._get_tree_id_for_path(path)
268 return self._remote.blob_raw_length(tree_id)
267 return self._remote.blob_raw_length(tree_id)
269
268
270 def get_path_history(self, path, limit=None, pre_load=None):
269 def get_path_history(self, path, limit=None, pre_load=None):
271 """
270 """
272 Returns history of file as reversed list of `GitCommit` objects for
271 Returns history of file as reversed list of `GitCommit` objects for
273 which file at given `path` has been modified.
272 which file at given `path` has been modified.
274
273
275 TODO: This function now uses an underlying 'git' command which works
274 TODO: This function now uses an underlying 'git' command which works
276 quickly but ideally we should replace with an algorithm.
275 quickly but ideally we should replace with an algorithm.
277 """
276 """
278 self._get_filectx(path)
277 self._get_filectx(path)
279 f_path = safe_str(path)
278 f_path = safe_str(path)
280
279
281 # optimize for n==1, rev-list is much faster for that use-case
280 # optimize for n==1, rev-list is much faster for that use-case
282 if limit == 1:
281 if limit == 1:
283 cmd = ['rev-list', '-1', self.raw_id, '--', f_path]
282 cmd = ['rev-list', '-1', self.raw_id, '--', f_path]
284 else:
283 else:
285 cmd = ['log']
284 cmd = ['log']
286 if limit:
285 if limit:
287 cmd.extend(['-n', str(safe_int(limit, 0))])
286 cmd.extend(['-n', str(safe_int(limit, 0))])
288 cmd.extend(['--pretty=format: %H', '-s', self.raw_id, '--', f_path])
287 cmd.extend(['--pretty=format: %H', '-s', self.raw_id, '--', f_path])
289
288
290 output, __ = self.repository.run_git_command(cmd)
289 output, __ = self.repository.run_git_command(cmd)
291 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
290 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
292
291
293 return [
292 return [
294 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
293 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
295 for commit_id in commit_ids]
294 for commit_id in commit_ids]
296
295
297 def get_file_annotate(self, path, pre_load=None):
296 def get_file_annotate(self, path, pre_load=None):
298 """
297 """
299 Returns a generator of four element tuples with
298 Returns a generator of four element tuples with
300 lineno, commit_id, commit lazy loader and line
299 lineno, commit_id, commit lazy loader and line
301
300
302 TODO: This function now uses os underlying 'git' command which is
301 TODO: This function now uses os underlying 'git' command which is
303 generally not good. Should be replaced with algorithm iterating
302 generally not good. Should be replaced with algorithm iterating
304 commits.
303 commits.
305 """
304 """
306 cmd = ['blame', '-l', '--root', '-r', self.raw_id, '--', path]
305 cmd = ['blame', '-l', '--root', '-r', self.raw_id, '--', path]
307 # -l ==> outputs long shas (and we need all 40 characters)
306 # -l ==> outputs long shas (and we need all 40 characters)
308 # --root ==> doesn't put '^' character for bounderies
307 # --root ==> doesn't put '^' character for bounderies
309 # -r commit_id ==> blames for the given commit
308 # -r commit_id ==> blames for the given commit
310 output, __ = self.repository.run_git_command(cmd)
309 output, __ = self.repository.run_git_command(cmd)
311
310
312 for i, blame_line in enumerate(output.split('\n')[:-1]):
311 for i, blame_line in enumerate(output.split('\n')[:-1]):
313 line_no = i + 1
312 line_no = i + 1
314 commit_id, line = re.split(r' ', blame_line, 1)
313 commit_id, line = re.split(r' ', blame_line, 1)
315 yield (
314 yield (
316 line_no, commit_id,
315 line_no, commit_id,
317 lambda: self.repository.get_commit(commit_id=commit_id,
316 lambda: self.repository.get_commit(commit_id=commit_id,
318 pre_load=pre_load),
317 pre_load=pre_load),
319 line)
318 line)
320
319
321 def get_nodes(self, path):
320 def get_nodes(self, path):
322
321
323 if self._get_kind(path) != NodeKind.DIR:
322 if self._get_kind(path) != NodeKind.DIR:
324 raise CommitError(
323 raise CommitError(
325 "Directory does not exist for commit %s at '%s'" % (self.raw_id, path))
324 "Directory does not exist for commit %s at '%s'" % (self.raw_id, path))
326 path = self._fix_path(path)
325 path = self._fix_path(path)
327
326
328 tree_id, _ = self._get_tree_id_for_path(path)
327 tree_id, _ = self._get_tree_id_for_path(path)
329
328
330 dirnodes = []
329 dirnodes = []
331 filenodes = []
330 filenodes = []
332
331
333 # extracted tree ID gives us our files...
332 # extracted tree ID gives us our files...
334 for name, stat_, id_, type_ in self._remote.tree_items(tree_id):
333 for name, stat_, id_, type_ in self._remote.tree_items(tree_id):
335 if type_ == 'link':
334 if type_ == 'link':
336 url = self._get_submodule_url('/'.join((path, name)))
335 url = self._get_submodule_url('/'.join((path, name)))
337 dirnodes.append(SubModuleNode(
336 dirnodes.append(SubModuleNode(
338 name, url=url, commit=id_, alias=self.repository.alias))
337 name, url=url, commit=id_, alias=self.repository.alias))
339 continue
338 continue
340
339
341 if path != '':
340 if path != '':
342 obj_path = '/'.join((path, name))
341 obj_path = '/'.join((path, name))
343 else:
342 else:
344 obj_path = name
343 obj_path = name
345 if obj_path not in self._stat_modes:
344 if obj_path not in self._stat_modes:
346 self._stat_modes[obj_path] = stat_
345 self._stat_modes[obj_path] = stat_
347
346
348 if type_ == 'tree':
347 if type_ == 'tree':
349 dirnodes.append(DirNode(obj_path, commit=self))
348 dirnodes.append(DirNode(obj_path, commit=self))
350 elif type_ == 'blob':
349 elif type_ == 'blob':
351 filenodes.append(FileNode(obj_path, commit=self, mode=stat_))
350 filenodes.append(FileNode(obj_path, commit=self, mode=stat_))
352 else:
351 else:
353 raise CommitError(
352 raise CommitError(
354 "Requested object should be Tree or Blob, is %s", type_)
353 "Requested object should be Tree or Blob, is %s", type_)
355
354
356 nodes = dirnodes + filenodes
355 nodes = dirnodes + filenodes
357 for node in nodes:
356 for node in nodes:
358 if node.path not in self.nodes:
357 if node.path not in self.nodes:
359 self.nodes[node.path] = node
358 self.nodes[node.path] = node
360 nodes.sort()
359 nodes.sort()
361 return nodes
360 return nodes
362
361
363 def get_node(self, path, pre_load=None):
362 def get_node(self, path, pre_load=None):
364 if isinstance(path, unicode):
363 if isinstance(path, unicode):
365 path = path.encode('utf-8')
364 path = path.encode('utf-8')
366 path = self._fix_path(path)
365 path = self._fix_path(path)
367 if path not in self.nodes:
366 if path not in self.nodes:
368 try:
367 try:
369 tree_id, type_ = self._get_tree_id_for_path(path)
368 tree_id, type_ = self._get_tree_id_for_path(path)
370 except CommitError:
369 except CommitError:
371 raise NodeDoesNotExistError(
370 raise NodeDoesNotExistError(
372 "Cannot find one of parents' directories for a given "
371 "Cannot find one of parents' directories for a given "
373 "path: %s" % path)
372 "path: %s" % path)
374
373
375 if type_ == 'link':
374 if type_ == 'link':
376 url = self._get_submodule_url(path)
375 url = self._get_submodule_url(path)
377 node = SubModuleNode(path, url=url, commit=tree_id,
376 node = SubModuleNode(path, url=url, commit=tree_id,
378 alias=self.repository.alias)
377 alias=self.repository.alias)
379 elif type_ == 'tree':
378 elif type_ == 'tree':
380 if path == '':
379 if path == '':
381 node = RootNode(commit=self)
380 node = RootNode(commit=self)
382 else:
381 else:
383 node = DirNode(path, commit=self)
382 node = DirNode(path, commit=self)
384 elif type_ == 'blob':
383 elif type_ == 'blob':
385 node = FileNode(path, commit=self, pre_load=pre_load)
384 node = FileNode(path, commit=self, pre_load=pre_load)
386 self._stat_modes[path] = node.mode
385 self._stat_modes[path] = node.mode
387 else:
386 else:
388 raise self.no_node_at_path(path)
387 raise self.no_node_at_path(path)
389
388
390 # cache node
389 # cache node
391 self.nodes[path] = node
390 self.nodes[path] = node
392
391
393 return self.nodes[path]
392 return self.nodes[path]
394
393
395 def get_largefile_node(self, path):
394 def get_largefile_node(self, path):
396 tree_id, _ = self._get_tree_id_for_path(path)
395 tree_id, _ = self._get_tree_id_for_path(path)
397 pointer_spec = self._remote.is_large_file(tree_id)
396 pointer_spec = self._remote.is_large_file(tree_id)
398
397
399 if pointer_spec:
398 if pointer_spec:
400 # content of that file regular FileNode is the hash of largefile
399 # content of that file regular FileNode is the hash of largefile
401 file_id = pointer_spec.get('oid_hash')
400 file_id = pointer_spec.get('oid_hash')
402 if self._remote.in_largefiles_store(file_id):
401 if self._remote.in_largefiles_store(file_id):
403 lf_path = self._remote.store_path(file_id)
402 lf_path = self._remote.store_path(file_id)
404 return LargeFileNode(lf_path, commit=self, org_path=path)
403 return LargeFileNode(lf_path, commit=self, org_path=path)
405
404
406 @LazyProperty
405 @LazyProperty
407 def affected_files(self):
406 def affected_files(self):
408 """
407 """
409 Gets a fast accessible file changes for given commit
408 Gets a fast accessible file changes for given commit
410 """
409 """
411 added, modified, deleted = self._changes_cache
410 added, modified, deleted = self._changes_cache
412 return list(added.union(modified).union(deleted))
411 return list(added.union(modified).union(deleted))
413
412
414 @LazyProperty
413 @LazyProperty
415 def _changes_cache(self):
414 def _changes_cache(self):
416 added = set()
415 added = set()
417 modified = set()
416 modified = set()
418 deleted = set()
417 deleted = set()
419 _r = self._remote
418 _r = self._remote
420
419
421 parents = self.parents
420 parents = self.parents
422 if not self.parents:
421 if not self.parents:
423 parents = [base.EmptyCommit()]
422 parents = [base.EmptyCommit()]
424 for parent in parents:
423 for parent in parents:
425 if isinstance(parent, base.EmptyCommit):
424 if isinstance(parent, base.EmptyCommit):
426 oid = None
425 oid = None
427 else:
426 else:
428 oid = parent.raw_id
427 oid = parent.raw_id
429 changes = _r.tree_changes(oid, self.raw_id)
428 changes = _r.tree_changes(oid, self.raw_id)
430 for (oldpath, newpath), (_, _), (_, _) in changes:
429 for (oldpath, newpath), (_, _), (_, _) in changes:
431 if newpath and oldpath:
430 if newpath and oldpath:
432 modified.add(newpath)
431 modified.add(newpath)
433 elif newpath and not oldpath:
432 elif newpath and not oldpath:
434 added.add(newpath)
433 added.add(newpath)
435 elif not newpath and oldpath:
434 elif not newpath and oldpath:
436 deleted.add(oldpath)
435 deleted.add(oldpath)
437 return added, modified, deleted
436 return added, modified, deleted
438
437
439 def _get_paths_for_status(self, status):
438 def _get_paths_for_status(self, status):
440 """
439 """
441 Returns sorted list of paths for given ``status``.
440 Returns sorted list of paths for given ``status``.
442
441
443 :param status: one of: *added*, *modified* or *deleted*
442 :param status: one of: *added*, *modified* or *deleted*
444 """
443 """
445 added, modified, deleted = self._changes_cache
444 added, modified, deleted = self._changes_cache
446 return sorted({
445 return sorted({
447 'added': list(added),
446 'added': list(added),
448 'modified': list(modified),
447 'modified': list(modified),
449 'deleted': list(deleted)}[status]
448 'deleted': list(deleted)}[status]
450 )
449 )
451
450
452 @LazyProperty
451 @LazyProperty
453 def added(self):
452 def added(self):
454 """
453 """
455 Returns list of added ``FileNode`` objects.
454 Returns list of added ``FileNode`` objects.
456 """
455 """
457 if not self.parents:
456 if not self.parents:
458 return list(self._get_file_nodes())
457 return list(self._get_file_nodes())
459 return AddedFileNodesGenerator(
458 return AddedFileNodesGenerator(
460 [n for n in self._get_paths_for_status('added')], self)
459 [n for n in self._get_paths_for_status('added')], self)
461
460
462 @LazyProperty
461 @LazyProperty
463 def changed(self):
462 def changed(self):
464 """
463 """
465 Returns list of modified ``FileNode`` objects.
464 Returns list of modified ``FileNode`` objects.
466 """
465 """
467 if not self.parents:
466 if not self.parents:
468 return []
467 return []
469 return ChangedFileNodesGenerator(
468 return ChangedFileNodesGenerator(
470 [n for n in self._get_paths_for_status('modified')], self)
469 [n for n in self._get_paths_for_status('modified')], self)
471
470
472 @LazyProperty
471 @LazyProperty
473 def removed(self):
472 def removed(self):
474 """
473 """
475 Returns list of removed ``FileNode`` objects.
474 Returns list of removed ``FileNode`` objects.
476 """
475 """
477 if not self.parents:
476 if not self.parents:
478 return []
477 return []
479 return RemovedFileNodesGenerator(
478 return RemovedFileNodesGenerator(
480 [n for n in self._get_paths_for_status('deleted')], self)
479 [n for n in self._get_paths_for_status('deleted')], self)
481
480
482 def _get_submodule_url(self, submodule_path):
481 def _get_submodule_url(self, submodule_path):
483 git_modules_path = '.gitmodules'
482 git_modules_path = '.gitmodules'
484
483
485 if self._submodules is None:
484 if self._submodules is None:
486 self._submodules = {}
485 self._submodules = {}
487
486
488 try:
487 try:
489 submodules_node = self.get_node(git_modules_path)
488 submodules_node = self.get_node(git_modules_path)
490 except NodeDoesNotExistError:
489 except NodeDoesNotExistError:
491 return None
490 return None
492
491
493 content = submodules_node.content
492 content = submodules_node.content
494
493
495 # ConfigParser fails if there are whitespaces
494 # ConfigParser fails if there are whitespaces
496 content = '\n'.join(l.strip() for l in content.split('\n'))
495 content = '\n'.join(l.strip() for l in content.split('\n'))
497
496
498 parser = configparser.ConfigParser()
497 parser = configparser.ConfigParser()
499 parser.readfp(StringIO(content))
498 parser.readfp(StringIO(content))
500
499
501 for section in parser.sections():
500 for section in parser.sections():
502 path = parser.get(section, 'path')
501 path = parser.get(section, 'path')
503 url = parser.get(section, 'url')
502 url = parser.get(section, 'url')
504 if path and url:
503 if path and url:
505 self._submodules[path.strip('/')] = url
504 self._submodules[path.strip('/')] = url
506
505
507 return self._submodules.get(submodule_path.strip('/'))
506 return self._submodules.get(submodule_path.strip('/'))
@@ -1,850 +1,850 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Module holding everything related to vcs nodes, with vcs2 architecture.
22 Module holding everything related to vcs nodes, with vcs2 architecture.
23 """
23 """
24
24
25 import os
25 import os
26 import stat
26 import stat
27
27
28 from zope.cachedescriptors.property import Lazy as LazyProperty
28 from zope.cachedescriptors.property import Lazy as LazyProperty
29
29
30 from rhodecode.config.conf import LANGUAGES_EXTENSIONS_MAP
30 from rhodecode.config.conf import LANGUAGES_EXTENSIONS_MAP
31 from rhodecode.lib.utils import safe_unicode, safe_str
31 from rhodecode.lib.utils import safe_unicode, safe_str
32 from rhodecode.lib.utils2 import md5
32 from rhodecode.lib.utils2 import md5
33 from rhodecode.lib.vcs import path as vcspath
33 from rhodecode.lib.vcs import path as vcspath
34 from rhodecode.lib.vcs.backends.base import EmptyCommit, FILEMODE_DEFAULT
34 from rhodecode.lib.vcs.backends.base import EmptyCommit, FILEMODE_DEFAULT
35 from rhodecode.lib.vcs.conf.mtypes import get_mimetypes_db
35 from rhodecode.lib.vcs.conf.mtypes import get_mimetypes_db
36 from rhodecode.lib.vcs.exceptions import NodeError, RemovedFileNodeError
36 from rhodecode.lib.vcs.exceptions import NodeError, RemovedFileNodeError
37
37
38 LARGEFILE_PREFIX = '.hglf'
38 LARGEFILE_PREFIX = '.hglf'
39
39
40
40
41 class NodeKind:
41 class NodeKind:
42 SUBMODULE = -1
42 SUBMODULE = -1
43 DIR = 1
43 DIR = 1
44 FILE = 2
44 FILE = 2
45 LARGEFILE = 3
45 LARGEFILE = 3
46
46
47
47
48 class NodeState:
48 class NodeState:
49 ADDED = u'added'
49 ADDED = u'added'
50 CHANGED = u'changed'
50 CHANGED = u'changed'
51 NOT_CHANGED = u'not changed'
51 NOT_CHANGED = u'not changed'
52 REMOVED = u'removed'
52 REMOVED = u'removed'
53
53
54
54
55 class NodeGeneratorBase(object):
55 class NodeGeneratorBase(object):
56 """
56 """
57 Base class for removed added and changed filenodes, it's a lazy generator
57 Base class for removed added and changed filenodes, it's a lazy generator
58 class that will create filenodes only on iteration or call
58 class that will create filenodes only on iteration or call
59
59
60 The len method doesn't need to create filenodes at all
60 The len method doesn't need to create filenodes at all
61 """
61 """
62
62
63 def __init__(self, current_paths, cs):
63 def __init__(self, current_paths, cs):
64 self.cs = cs
64 self.cs = cs
65 self.current_paths = current_paths
65 self.current_paths = current_paths
66
66
67 def __call__(self):
67 def __call__(self):
68 return [n for n in self]
68 return [n for n in self]
69
69
70 def __getslice__(self, i, j):
70 def __getslice__(self, i, j):
71 for p in self.current_paths[i:j]:
71 for p in self.current_paths[i:j]:
72 yield self.cs.get_node(p)
72 yield self.cs.get_node(p)
73
73
74 def __len__(self):
74 def __len__(self):
75 return len(self.current_paths)
75 return len(self.current_paths)
76
76
77 def __iter__(self):
77 def __iter__(self):
78 for p in self.current_paths:
78 for p in self.current_paths:
79 yield self.cs.get_node(p)
79 yield self.cs.get_node(p)
80
80
81
81
82 class AddedFileNodesGenerator(NodeGeneratorBase):
82 class AddedFileNodesGenerator(NodeGeneratorBase):
83 """
83 """
84 Class holding added files for current commit
84 Class holding added files for current commit
85 """
85 """
86
86
87
87
88 class ChangedFileNodesGenerator(NodeGeneratorBase):
88 class ChangedFileNodesGenerator(NodeGeneratorBase):
89 """
89 """
90 Class holding changed files for current commit
90 Class holding changed files for current commit
91 """
91 """
92
92
93
93
94 class RemovedFileNodesGenerator(NodeGeneratorBase):
94 class RemovedFileNodesGenerator(NodeGeneratorBase):
95 """
95 """
96 Class holding removed files for current commit
96 Class holding removed files for current commit
97 """
97 """
98 def __iter__(self):
98 def __iter__(self):
99 for p in self.current_paths:
99 for p in self.current_paths:
100 yield RemovedFileNode(path=p)
100 yield RemovedFileNode(path=p)
101
101
102 def __getslice__(self, i, j):
102 def __getslice__(self, i, j):
103 for p in self.current_paths[i:j]:
103 for p in self.current_paths[i:j]:
104 yield RemovedFileNode(path=p)
104 yield RemovedFileNode(path=p)
105
105
106
106
107 class Node(object):
107 class Node(object):
108 """
108 """
109 Simplest class representing file or directory on repository. SCM backends
109 Simplest class representing file or directory on repository. SCM backends
110 should use ``FileNode`` and ``DirNode`` subclasses rather than ``Node``
110 should use ``FileNode`` and ``DirNode`` subclasses rather than ``Node``
111 directly.
111 directly.
112
112
113 Node's ``path`` cannot start with slash as we operate on *relative* paths
113 Node's ``path`` cannot start with slash as we operate on *relative* paths
114 only. Moreover, every single node is identified by the ``path`` attribute,
114 only. Moreover, every single node is identified by the ``path`` attribute,
115 so it cannot end with slash, too. Otherwise, path could lead to mistakes.
115 so it cannot end with slash, too. Otherwise, path could lead to mistakes.
116 """
116 """
117 RTLO_MARKER = u"\u202E" # RTLO marker allows swapping text, and certain
117 RTLO_MARKER = u"\u202E" # RTLO marker allows swapping text, and certain
118 # security attacks could be used with this
118 # security attacks could be used with this
119 commit = None
119 commit = None
120
120
121 def __init__(self, path, kind):
121 def __init__(self, path, kind):
122 self._validate_path(path) # can throw exception if path is invalid
122 self._validate_path(path) # can throw exception if path is invalid
123 self.path = safe_str(path.rstrip('/')) # we store paths as str
123 self.path = safe_str(path.rstrip('/')) # we store paths as str
124 if path == '' and kind != NodeKind.DIR:
124 if path == '' and kind != NodeKind.DIR:
125 raise NodeError("Only DirNode and its subclasses may be "
125 raise NodeError("Only DirNode and its subclasses may be "
126 "initialized with empty path")
126 "initialized with empty path")
127 self.kind = kind
127 self.kind = kind
128
128
129 if self.is_root() and not self.is_dir():
129 if self.is_root() and not self.is_dir():
130 raise NodeError("Root node cannot be FILE kind")
130 raise NodeError("Root node cannot be FILE kind")
131
131
132 def _validate_path(self, path):
132 def _validate_path(self, path):
133 if path.startswith('/'):
133 if path.startswith('/'):
134 raise NodeError(
134 raise NodeError(
135 "Cannot initialize Node objects with slash at "
135 "Cannot initialize Node objects with slash at "
136 "the beginning as only relative paths are supported. "
136 "the beginning as only relative paths are supported. "
137 "Got %s" % (path,))
137 "Got %s" % (path,))
138
138
139 @LazyProperty
139 @LazyProperty
140 def parent(self):
140 def parent(self):
141 parent_path = self.get_parent_path()
141 parent_path = self.get_parent_path()
142 if parent_path:
142 if parent_path:
143 if self.commit:
143 if self.commit:
144 return self.commit.get_node(parent_path)
144 return self.commit.get_node(parent_path)
145 return DirNode(parent_path)
145 return DirNode(parent_path)
146 return None
146 return None
147
147
148 @LazyProperty
148 @LazyProperty
149 def unicode_path(self):
149 def unicode_path(self):
150 return safe_unicode(self.path)
150 return safe_unicode(self.path)
151
151
152 @LazyProperty
152 @LazyProperty
153 def has_rtlo(self):
153 def has_rtlo(self):
154 """Detects if a path has right-to-left-override marker"""
154 """Detects if a path has right-to-left-override marker"""
155 return self.RTLO_MARKER in self.unicode_path
155 return self.RTLO_MARKER in self.unicode_path
156
156
157 @LazyProperty
157 @LazyProperty
158 def unicode_path_safe(self):
158 def unicode_path_safe(self):
159 """
159 """
160 Special SAFE representation of path without the right-to-left-override.
160 Special SAFE representation of path without the right-to-left-override.
161 This should be only used for "showing" the file, cannot be used for any
161 This should be only used for "showing" the file, cannot be used for any
162 urls etc.
162 urls etc.
163 """
163 """
164 return safe_unicode(self.path).replace(self.RTLO_MARKER, '')
164 return safe_unicode(self.path).replace(self.RTLO_MARKER, '')
165
165
166 @LazyProperty
166 @LazyProperty
167 def dir_path(self):
167 def dir_path(self):
168 """
168 """
169 Returns name of the directory from full path of this vcs node. Empty
169 Returns name of the directory from full path of this vcs node. Empty
170 string is returned if there's no directory in the path
170 string is returned if there's no directory in the path
171 """
171 """
172 _parts = self.path.rstrip('/').rsplit('/', 1)
172 _parts = self.path.rstrip('/').rsplit('/', 1)
173 if len(_parts) == 2:
173 if len(_parts) == 2:
174 return safe_unicode(_parts[0])
174 return safe_unicode(_parts[0])
175 return u''
175 return u''
176
176
177 @LazyProperty
177 @LazyProperty
178 def name(self):
178 def name(self):
179 """
179 """
180 Returns name of the node so if its path
180 Returns name of the node so if its path
181 then only last part is returned.
181 then only last part is returned.
182 """
182 """
183 return safe_unicode(self.path.rstrip('/').split('/')[-1])
183 return safe_unicode(self.path.rstrip('/').split('/')[-1])
184
184
185 @property
185 @property
186 def kind(self):
186 def kind(self):
187 return self._kind
187 return self._kind
188
188
189 @kind.setter
189 @kind.setter
190 def kind(self, kind):
190 def kind(self, kind):
191 if hasattr(self, '_kind'):
191 if hasattr(self, '_kind'):
192 raise NodeError("Cannot change node's kind")
192 raise NodeError("Cannot change node's kind")
193 else:
193 else:
194 self._kind = kind
194 self._kind = kind
195 # Post setter check (path's trailing slash)
195 # Post setter check (path's trailing slash)
196 if self.path.endswith('/'):
196 if self.path.endswith('/'):
197 raise NodeError("Node's path cannot end with slash")
197 raise NodeError("Node's path cannot end with slash")
198
198
199 def __cmp__(self, other):
199 def __cmp__(self, other):
200 """
200 """
201 Comparator using name of the node, needed for quick list sorting.
201 Comparator using name of the node, needed for quick list sorting.
202 """
202 """
203
203
204 kind_cmp = cmp(self.kind, other.kind)
204 kind_cmp = cmp(self.kind, other.kind)
205 if kind_cmp:
205 if kind_cmp:
206 if isinstance(self, SubModuleNode):
206 if isinstance(self, SubModuleNode):
207 # we make submodules equal to dirnode for "sorting" purposes
207 # we make submodules equal to dirnode for "sorting" purposes
208 return NodeKind.DIR
208 return NodeKind.DIR
209 return kind_cmp
209 return kind_cmp
210 return cmp(self.name, other.name)
210 return cmp(self.name, other.name)
211
211
212 def __eq__(self, other):
212 def __eq__(self, other):
213 for attr in ['name', 'path', 'kind']:
213 for attr in ['name', 'path', 'kind']:
214 if getattr(self, attr) != getattr(other, attr):
214 if getattr(self, attr) != getattr(other, attr):
215 return False
215 return False
216 if self.is_file():
216 if self.is_file():
217 if self.content != other.content:
217 if self.content != other.content:
218 return False
218 return False
219 else:
219 else:
220 # For DirNode's check without entering each dir
220 # For DirNode's check without entering each dir
221 self_nodes_paths = list(sorted(n.path for n in self.nodes))
221 self_nodes_paths = list(sorted(n.path for n in self.nodes))
222 other_nodes_paths = list(sorted(n.path for n in self.nodes))
222 other_nodes_paths = list(sorted(n.path for n in self.nodes))
223 if self_nodes_paths != other_nodes_paths:
223 if self_nodes_paths != other_nodes_paths:
224 return False
224 return False
225 return True
225 return True
226
226
227 def __ne__(self, other):
227 def __ne__(self, other):
228 return not self.__eq__(other)
228 return not self.__eq__(other)
229
229
230 def __repr__(self):
230 def __repr__(self):
231 return '<%s %r>' % (self.__class__.__name__, self.path)
231 return '<%s %r>' % (self.__class__.__name__, self.path)
232
232
233 def __str__(self):
233 def __str__(self):
234 return self.__repr__()
234 return self.__repr__()
235
235
236 def __unicode__(self):
236 def __unicode__(self):
237 return self.name
237 return self.name
238
238
239 def get_parent_path(self):
239 def get_parent_path(self):
240 """
240 """
241 Returns node's parent path or empty string if node is root.
241 Returns node's parent path or empty string if node is root.
242 """
242 """
243 if self.is_root():
243 if self.is_root():
244 return ''
244 return ''
245 return vcspath.dirname(self.path.rstrip('/')) + '/'
245 return vcspath.dirname(self.path.rstrip('/')) + '/'
246
246
247 def is_file(self):
247 def is_file(self):
248 """
248 """
249 Returns ``True`` if node's kind is ``NodeKind.FILE``, ``False``
249 Returns ``True`` if node's kind is ``NodeKind.FILE``, ``False``
250 otherwise.
250 otherwise.
251 """
251 """
252 return self.kind == NodeKind.FILE
252 return self.kind == NodeKind.FILE
253
253
254 def is_dir(self):
254 def is_dir(self):
255 """
255 """
256 Returns ``True`` if node's kind is ``NodeKind.DIR``, ``False``
256 Returns ``True`` if node's kind is ``NodeKind.DIR``, ``False``
257 otherwise.
257 otherwise.
258 """
258 """
259 return self.kind == NodeKind.DIR
259 return self.kind == NodeKind.DIR
260
260
261 def is_root(self):
261 def is_root(self):
262 """
262 """
263 Returns ``True`` if node is a root node and ``False`` otherwise.
263 Returns ``True`` if node is a root node and ``False`` otherwise.
264 """
264 """
265 return self.kind == NodeKind.DIR and self.path == ''
265 return self.kind == NodeKind.DIR and self.path == ''
266
266
267 def is_submodule(self):
267 def is_submodule(self):
268 """
268 """
269 Returns ``True`` if node's kind is ``NodeKind.SUBMODULE``, ``False``
269 Returns ``True`` if node's kind is ``NodeKind.SUBMODULE``, ``False``
270 otherwise.
270 otherwise.
271 """
271 """
272 return self.kind == NodeKind.SUBMODULE
272 return self.kind == NodeKind.SUBMODULE
273
273
274 def is_largefile(self):
274 def is_largefile(self):
275 """
275 """
276 Returns ``True`` if node's kind is ``NodeKind.LARGEFILE``, ``False``
276 Returns ``True`` if node's kind is ``NodeKind.LARGEFILE``, ``False``
277 otherwise
277 otherwise
278 """
278 """
279 return self.kind == NodeKind.LARGEFILE
279 return self.kind == NodeKind.LARGEFILE
280
280
281 def is_link(self):
281 def is_link(self):
282 if self.commit:
282 if self.commit:
283 return self.commit.is_link(self.path)
283 return self.commit.is_link(self.path)
284 return False
284 return False
285
285
286 @LazyProperty
286 @LazyProperty
287 def added(self):
287 def added(self):
288 return self.state is NodeState.ADDED
288 return self.state is NodeState.ADDED
289
289
290 @LazyProperty
290 @LazyProperty
291 def changed(self):
291 def changed(self):
292 return self.state is NodeState.CHANGED
292 return self.state is NodeState.CHANGED
293
293
294 @LazyProperty
294 @LazyProperty
295 def not_changed(self):
295 def not_changed(self):
296 return self.state is NodeState.NOT_CHANGED
296 return self.state is NodeState.NOT_CHANGED
297
297
298 @LazyProperty
298 @LazyProperty
299 def removed(self):
299 def removed(self):
300 return self.state is NodeState.REMOVED
300 return self.state is NodeState.REMOVED
301
301
302
302
303 class FileNode(Node):
303 class FileNode(Node):
304 """
304 """
305 Class representing file nodes.
305 Class representing file nodes.
306
306
307 :attribute: path: path to the node, relative to repository's root
307 :attribute: path: path to the node, relative to repository's root
308 :attribute: content: if given arbitrary sets content of the file
308 :attribute: content: if given arbitrary sets content of the file
309 :attribute: commit: if given, first time content is accessed, callback
309 :attribute: commit: if given, first time content is accessed, callback
310 :attribute: mode: stat mode for a node. Default is `FILEMODE_DEFAULT`.
310 :attribute: mode: stat mode for a node. Default is `FILEMODE_DEFAULT`.
311 """
311 """
312 _filter_pre_load = []
312 _filter_pre_load = []
313
313
314 def __init__(self, path, content=None, commit=None, mode=None, pre_load=None):
314 def __init__(self, path, content=None, commit=None, mode=None, pre_load=None):
315 """
315 """
316 Only one of ``content`` and ``commit`` may be given. Passing both
316 Only one of ``content`` and ``commit`` may be given. Passing both
317 would raise ``NodeError`` exception.
317 would raise ``NodeError`` exception.
318
318
319 :param path: relative path to the node
319 :param path: relative path to the node
320 :param content: content may be passed to constructor
320 :param content: content may be passed to constructor
321 :param commit: if given, will use it to lazily fetch content
321 :param commit: if given, will use it to lazily fetch content
322 :param mode: ST_MODE (i.e. 0100644)
322 :param mode: ST_MODE (i.e. 0100644)
323 """
323 """
324 if content and commit:
324 if content and commit:
325 raise NodeError("Cannot use both content and commit")
325 raise NodeError("Cannot use both content and commit")
326 super(FileNode, self).__init__(path, kind=NodeKind.FILE)
326 super(FileNode, self).__init__(path, kind=NodeKind.FILE)
327 self.commit = commit
327 self.commit = commit
328 self._content = content
328 self._content = content
329 self._mode = mode or FILEMODE_DEFAULT
329 self._mode = mode or FILEMODE_DEFAULT
330
330
331 self._set_bulk_properties(pre_load)
331 self._set_bulk_properties(pre_load)
332
332
333 def _set_bulk_properties(self, pre_load):
333 def _set_bulk_properties(self, pre_load):
334 if not pre_load:
334 if not pre_load:
335 return
335 return
336 pre_load = [entry for entry in pre_load
336 pre_load = [entry for entry in pre_load
337 if entry not in self._filter_pre_load]
337 if entry not in self._filter_pre_load]
338 if not pre_load:
338 if not pre_load:
339 return
339 return
340
340
341 for attr_name in pre_load:
341 for attr_name in pre_load:
342 result = getattr(self, attr_name)
342 result = getattr(self, attr_name)
343 if callable(result):
343 if callable(result):
344 result = result()
344 result = result()
345 self.__dict__[attr_name] = result
345 self.__dict__[attr_name] = result
346
346
347 @LazyProperty
347 @LazyProperty
348 def mode(self):
348 def mode(self):
349 """
349 """
350 Returns lazily mode of the FileNode. If `commit` is not set, would
350 Returns lazily mode of the FileNode. If `commit` is not set, would
351 use value given at initialization or `FILEMODE_DEFAULT` (default).
351 use value given at initialization or `FILEMODE_DEFAULT` (default).
352 """
352 """
353 if self.commit:
353 if self.commit:
354 mode = self.commit.get_file_mode(self.path)
354 mode = self.commit.get_file_mode(self.path)
355 else:
355 else:
356 mode = self._mode
356 mode = self._mode
357 return mode
357 return mode
358
358
359 @LazyProperty
359 @LazyProperty
360 def raw_bytes(self):
360 def raw_bytes(self):
361 """
361 """
362 Returns lazily the raw bytes of the FileNode.
362 Returns lazily the raw bytes of the FileNode.
363 """
363 """
364 if self.commit:
364 if self.commit:
365 if self._content is None:
365 if self._content is None:
366 self._content = self.commit.get_file_content(self.path)
366 self._content = self.commit.get_file_content(self.path)
367 content = self._content
367 content = self._content
368 else:
368 else:
369 content = self._content
369 content = self._content
370 return content
370 return content
371
371
372 @LazyProperty
372 @LazyProperty
373 def md5(self):
373 def md5(self):
374 """
374 """
375 Returns md5 of the file node.
375 Returns md5 of the file node.
376 """
376 """
377 return md5(self.raw_bytes)
377 return md5(self.raw_bytes)
378
378
379 def metadata_uncached(self):
379 def metadata_uncached(self):
380 """
380 """
381 Returns md5, binary flag of the file node, without any cache usage.
381 Returns md5, binary flag of the file node, without any cache usage.
382 """
382 """
383
383
384 content = self.content_uncached()
384 content = self.content_uncached()
385
385
386 is_binary = content and '\0' in content
386 is_binary = content and '\0' in content
387 size = 0
387 size = 0
388 if content:
388 if content:
389 size = len(content)
389 size = len(content)
390
390
391 return is_binary, md5(content), size, content
391 return is_binary, md5(content), size, content
392
392
393 def content_uncached(self):
393 def content_uncached(self):
394 """
394 """
395 Returns lazily content of the FileNode. If possible, would try to
395 Returns lazily content of the FileNode. If possible, would try to
396 decode content from UTF-8.
396 decode content from UTF-8.
397 """
397 """
398 if self.commit:
398 if self.commit:
399 content = self.commit.get_file_content(self.path)
399 content = self.commit.get_file_content(self.path)
400 else:
400 else:
401 content = self._content
401 content = self._content
402 return content
402 return content
403
403
404 @LazyProperty
404 @LazyProperty
405 def content(self):
405 def content(self):
406 """
406 """
407 Returns lazily content of the FileNode. If possible, would try to
407 Returns lazily content of the FileNode. If possible, would try to
408 decode content from UTF-8.
408 decode content from UTF-8.
409 """
409 """
410 content = self.raw_bytes
410 content = self.raw_bytes
411
411
412 if self.is_binary:
412 if self.is_binary:
413 return content
413 return content
414 return safe_unicode(content)
414 return safe_unicode(content)
415
415
416 @LazyProperty
416 @LazyProperty
417 def size(self):
417 def size(self):
418 if self.commit:
418 if self.commit:
419 return self.commit.get_file_size(self.path)
419 return self.commit.get_file_size(self.path)
420 raise NodeError(
420 raise NodeError(
421 "Cannot retrieve size of the file without related "
421 "Cannot retrieve size of the file without related "
422 "commit attribute")
422 "commit attribute")
423
423
424 @LazyProperty
424 @LazyProperty
425 def message(self):
425 def message(self):
426 if self.commit:
426 if self.commit:
427 return self.last_commit.message
427 return self.last_commit.message
428 raise NodeError(
428 raise NodeError(
429 "Cannot retrieve message of the file without related "
429 "Cannot retrieve message of the file without related "
430 "commit attribute")
430 "commit attribute")
431
431
432 @LazyProperty
432 @LazyProperty
433 def last_commit(self):
433 def last_commit(self):
434 if self.commit:
434 if self.commit:
435 pre_load = ["author", "date", "message"]
435 pre_load = ["author", "date", "message", "parents"]
436 return self.commit.get_path_commit(self.path, pre_load=pre_load)
436 return self.commit.get_path_commit(self.path, pre_load=pre_load)
437 raise NodeError(
437 raise NodeError(
438 "Cannot retrieve last commit of the file without "
438 "Cannot retrieve last commit of the file without "
439 "related commit attribute")
439 "related commit attribute")
440
440
441 def get_mimetype(self):
441 def get_mimetype(self):
442 """
442 """
443 Mimetype is calculated based on the file's content. If ``_mimetype``
443 Mimetype is calculated based on the file's content. If ``_mimetype``
444 attribute is available, it will be returned (backends which store
444 attribute is available, it will be returned (backends which store
445 mimetypes or can easily recognize them, should set this private
445 mimetypes or can easily recognize them, should set this private
446 attribute to indicate that type should *NOT* be calculated).
446 attribute to indicate that type should *NOT* be calculated).
447 """
447 """
448
448
449 if hasattr(self, '_mimetype'):
449 if hasattr(self, '_mimetype'):
450 if (isinstance(self._mimetype, (tuple, list,)) and
450 if (isinstance(self._mimetype, (tuple, list,)) and
451 len(self._mimetype) == 2):
451 len(self._mimetype) == 2):
452 return self._mimetype
452 return self._mimetype
453 else:
453 else:
454 raise NodeError('given _mimetype attribute must be an 2 '
454 raise NodeError('given _mimetype attribute must be an 2 '
455 'element list or tuple')
455 'element list or tuple')
456
456
457 db = get_mimetypes_db()
457 db = get_mimetypes_db()
458 mtype, encoding = db.guess_type(self.name)
458 mtype, encoding = db.guess_type(self.name)
459
459
460 if mtype is None:
460 if mtype is None:
461 if self.is_binary:
461 if self.is_binary:
462 mtype = 'application/octet-stream'
462 mtype = 'application/octet-stream'
463 encoding = None
463 encoding = None
464 else:
464 else:
465 mtype = 'text/plain'
465 mtype = 'text/plain'
466 encoding = None
466 encoding = None
467
467
468 # try with pygments
468 # try with pygments
469 try:
469 try:
470 from pygments.lexers import get_lexer_for_filename
470 from pygments.lexers import get_lexer_for_filename
471 mt = get_lexer_for_filename(self.name).mimetypes
471 mt = get_lexer_for_filename(self.name).mimetypes
472 except Exception:
472 except Exception:
473 mt = None
473 mt = None
474
474
475 if mt:
475 if mt:
476 mtype = mt[0]
476 mtype = mt[0]
477
477
478 return mtype, encoding
478 return mtype, encoding
479
479
480 @LazyProperty
480 @LazyProperty
481 def mimetype(self):
481 def mimetype(self):
482 """
482 """
483 Wrapper around full mimetype info. It returns only type of fetched
483 Wrapper around full mimetype info. It returns only type of fetched
484 mimetype without the encoding part. use get_mimetype function to fetch
484 mimetype without the encoding part. use get_mimetype function to fetch
485 full set of (type,encoding)
485 full set of (type,encoding)
486 """
486 """
487 return self.get_mimetype()[0]
487 return self.get_mimetype()[0]
488
488
489 @LazyProperty
489 @LazyProperty
490 def mimetype_main(self):
490 def mimetype_main(self):
491 return self.mimetype.split('/')[0]
491 return self.mimetype.split('/')[0]
492
492
493 @classmethod
493 @classmethod
494 def get_lexer(cls, filename, content=None):
494 def get_lexer(cls, filename, content=None):
495 from pygments import lexers
495 from pygments import lexers
496
496
497 extension = filename.split('.')[-1]
497 extension = filename.split('.')[-1]
498 lexer = None
498 lexer = None
499
499
500 try:
500 try:
501 lexer = lexers.guess_lexer_for_filename(
501 lexer = lexers.guess_lexer_for_filename(
502 filename, content, stripnl=False)
502 filename, content, stripnl=False)
503 except lexers.ClassNotFound:
503 except lexers.ClassNotFound:
504 lexer = None
504 lexer = None
505
505
506 # try our EXTENSION_MAP
506 # try our EXTENSION_MAP
507 if not lexer:
507 if not lexer:
508 try:
508 try:
509 lexer_class = LANGUAGES_EXTENSIONS_MAP.get(extension)
509 lexer_class = LANGUAGES_EXTENSIONS_MAP.get(extension)
510 if lexer_class:
510 if lexer_class:
511 lexer = lexers.get_lexer_by_name(lexer_class[0])
511 lexer = lexers.get_lexer_by_name(lexer_class[0])
512 except lexers.ClassNotFound:
512 except lexers.ClassNotFound:
513 lexer = None
513 lexer = None
514
514
515 if not lexer:
515 if not lexer:
516 lexer = lexers.TextLexer(stripnl=False)
516 lexer = lexers.TextLexer(stripnl=False)
517
517
518 return lexer
518 return lexer
519
519
520 @LazyProperty
520 @LazyProperty
521 def lexer(self):
521 def lexer(self):
522 """
522 """
523 Returns pygment's lexer class. Would try to guess lexer taking file's
523 Returns pygment's lexer class. Would try to guess lexer taking file's
524 content, name and mimetype.
524 content, name and mimetype.
525 """
525 """
526 return self.get_lexer(self.name, self.content)
526 return self.get_lexer(self.name, self.content)
527
527
528 @LazyProperty
528 @LazyProperty
529 def lexer_alias(self):
529 def lexer_alias(self):
530 """
530 """
531 Returns first alias of the lexer guessed for this file.
531 Returns first alias of the lexer guessed for this file.
532 """
532 """
533 return self.lexer.aliases[0]
533 return self.lexer.aliases[0]
534
534
535 @LazyProperty
535 @LazyProperty
536 def history(self):
536 def history(self):
537 """
537 """
538 Returns a list of commit for this file in which the file was changed
538 Returns a list of commit for this file in which the file was changed
539 """
539 """
540 if self.commit is None:
540 if self.commit is None:
541 raise NodeError('Unable to get commit for this FileNode')
541 raise NodeError('Unable to get commit for this FileNode')
542 return self.commit.get_path_history(self.path)
542 return self.commit.get_path_history(self.path)
543
543
544 @LazyProperty
544 @LazyProperty
545 def annotate(self):
545 def annotate(self):
546 """
546 """
547 Returns a list of three element tuples with lineno, commit and line
547 Returns a list of three element tuples with lineno, commit and line
548 """
548 """
549 if self.commit is None:
549 if self.commit is None:
550 raise NodeError('Unable to get commit for this FileNode')
550 raise NodeError('Unable to get commit for this FileNode')
551 pre_load = ["author", "date", "message"]
551 pre_load = ["author", "date", "message", "parents"]
552 return self.commit.get_file_annotate(self.path, pre_load=pre_load)
552 return self.commit.get_file_annotate(self.path, pre_load=pre_load)
553
553
554 @LazyProperty
554 @LazyProperty
555 def state(self):
555 def state(self):
556 if not self.commit:
556 if not self.commit:
557 raise NodeError(
557 raise NodeError(
558 "Cannot check state of the node if it's not "
558 "Cannot check state of the node if it's not "
559 "linked with commit")
559 "linked with commit")
560 elif self.path in (node.path for node in self.commit.added):
560 elif self.path in (node.path for node in self.commit.added):
561 return NodeState.ADDED
561 return NodeState.ADDED
562 elif self.path in (node.path for node in self.commit.changed):
562 elif self.path in (node.path for node in self.commit.changed):
563 return NodeState.CHANGED
563 return NodeState.CHANGED
564 else:
564 else:
565 return NodeState.NOT_CHANGED
565 return NodeState.NOT_CHANGED
566
566
567 @LazyProperty
567 @LazyProperty
568 def is_binary(self):
568 def is_binary(self):
569 """
569 """
570 Returns True if file has binary content.
570 Returns True if file has binary content.
571 """
571 """
572 _bin = self.raw_bytes and '\0' in self.raw_bytes
572 _bin = self.raw_bytes and '\0' in self.raw_bytes
573 return _bin
573 return _bin
574
574
575 @LazyProperty
575 @LazyProperty
576 def extension(self):
576 def extension(self):
577 """Returns filenode extension"""
577 """Returns filenode extension"""
578 return self.name.split('.')[-1]
578 return self.name.split('.')[-1]
579
579
580 @property
580 @property
581 def is_executable(self):
581 def is_executable(self):
582 """
582 """
583 Returns ``True`` if file has executable flag turned on.
583 Returns ``True`` if file has executable flag turned on.
584 """
584 """
585 return bool(self.mode & stat.S_IXUSR)
585 return bool(self.mode & stat.S_IXUSR)
586
586
587 def get_largefile_node(self):
587 def get_largefile_node(self):
588 """
588 """
589 Try to return a Mercurial FileNode from this node. It does internal
589 Try to return a Mercurial FileNode from this node. It does internal
590 checks inside largefile store, if that file exist there it will
590 checks inside largefile store, if that file exist there it will
591 create special instance of LargeFileNode which can get content from
591 create special instance of LargeFileNode which can get content from
592 LF store.
592 LF store.
593 """
593 """
594 if self.commit:
594 if self.commit:
595 return self.commit.get_largefile_node(self.path)
595 return self.commit.get_largefile_node(self.path)
596
596
597 def lines(self, count_empty=False):
597 def lines(self, count_empty=False):
598 all_lines, empty_lines = 0, 0
598 all_lines, empty_lines = 0, 0
599
599
600 if not self.is_binary:
600 if not self.is_binary:
601 content = self.content
601 content = self.content
602 if count_empty:
602 if count_empty:
603 all_lines = 0
603 all_lines = 0
604 empty_lines = 0
604 empty_lines = 0
605 for line in content.splitlines(True):
605 for line in content.splitlines(True):
606 if line == '\n':
606 if line == '\n':
607 empty_lines += 1
607 empty_lines += 1
608 all_lines += 1
608 all_lines += 1
609
609
610 return all_lines, all_lines - empty_lines
610 return all_lines, all_lines - empty_lines
611 else:
611 else:
612 # fast method
612 # fast method
613 empty_lines = all_lines = content.count('\n')
613 empty_lines = all_lines = content.count('\n')
614 if all_lines == 0 and content:
614 if all_lines == 0 and content:
615 # one-line without a newline
615 # one-line without a newline
616 empty_lines = all_lines = 1
616 empty_lines = all_lines = 1
617
617
618 return all_lines, empty_lines
618 return all_lines, empty_lines
619
619
620 def __repr__(self):
620 def __repr__(self):
621 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
621 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
622 getattr(self.commit, 'short_id', ''))
622 getattr(self.commit, 'short_id', ''))
623
623
624
624
625 class RemovedFileNode(FileNode):
625 class RemovedFileNode(FileNode):
626 """
626 """
627 Dummy FileNode class - trying to access any public attribute except path,
627 Dummy FileNode class - trying to access any public attribute except path,
628 name, kind or state (or methods/attributes checking those two) would raise
628 name, kind or state (or methods/attributes checking those two) would raise
629 RemovedFileNodeError.
629 RemovedFileNodeError.
630 """
630 """
631 ALLOWED_ATTRIBUTES = [
631 ALLOWED_ATTRIBUTES = [
632 'name', 'path', 'state', 'is_root', 'is_file', 'is_dir', 'kind',
632 'name', 'path', 'state', 'is_root', 'is_file', 'is_dir', 'kind',
633 'added', 'changed', 'not_changed', 'removed'
633 'added', 'changed', 'not_changed', 'removed'
634 ]
634 ]
635
635
636 def __init__(self, path):
636 def __init__(self, path):
637 """
637 """
638 :param path: relative path to the node
638 :param path: relative path to the node
639 """
639 """
640 super(RemovedFileNode, self).__init__(path=path)
640 super(RemovedFileNode, self).__init__(path=path)
641
641
642 def __getattribute__(self, attr):
642 def __getattribute__(self, attr):
643 if attr.startswith('_') or attr in RemovedFileNode.ALLOWED_ATTRIBUTES:
643 if attr.startswith('_') or attr in RemovedFileNode.ALLOWED_ATTRIBUTES:
644 return super(RemovedFileNode, self).__getattribute__(attr)
644 return super(RemovedFileNode, self).__getattribute__(attr)
645 raise RemovedFileNodeError(
645 raise RemovedFileNodeError(
646 "Cannot access attribute %s on RemovedFileNode" % attr)
646 "Cannot access attribute %s on RemovedFileNode" % attr)
647
647
648 @LazyProperty
648 @LazyProperty
649 def state(self):
649 def state(self):
650 return NodeState.REMOVED
650 return NodeState.REMOVED
651
651
652
652
653 class DirNode(Node):
653 class DirNode(Node):
654 """
654 """
655 DirNode stores list of files and directories within this node.
655 DirNode stores list of files and directories within this node.
656 Nodes may be used standalone but within repository context they
656 Nodes may be used standalone but within repository context they
657 lazily fetch data within same repositorty's commit.
657 lazily fetch data within same repositorty's commit.
658 """
658 """
659
659
660 def __init__(self, path, nodes=(), commit=None):
660 def __init__(self, path, nodes=(), commit=None):
661 """
661 """
662 Only one of ``nodes`` and ``commit`` may be given. Passing both
662 Only one of ``nodes`` and ``commit`` may be given. Passing both
663 would raise ``NodeError`` exception.
663 would raise ``NodeError`` exception.
664
664
665 :param path: relative path to the node
665 :param path: relative path to the node
666 :param nodes: content may be passed to constructor
666 :param nodes: content may be passed to constructor
667 :param commit: if given, will use it to lazily fetch content
667 :param commit: if given, will use it to lazily fetch content
668 """
668 """
669 if nodes and commit:
669 if nodes and commit:
670 raise NodeError("Cannot use both nodes and commit")
670 raise NodeError("Cannot use both nodes and commit")
671 super(DirNode, self).__init__(path, NodeKind.DIR)
671 super(DirNode, self).__init__(path, NodeKind.DIR)
672 self.commit = commit
672 self.commit = commit
673 self._nodes = nodes
673 self._nodes = nodes
674
674
675 @LazyProperty
675 @LazyProperty
676 def content(self):
676 def content(self):
677 raise NodeError(
677 raise NodeError(
678 "%s represents a dir and has no `content` attribute" % self)
678 "%s represents a dir and has no `content` attribute" % self)
679
679
680 @LazyProperty
680 @LazyProperty
681 def nodes(self):
681 def nodes(self):
682 if self.commit:
682 if self.commit:
683 nodes = self.commit.get_nodes(self.path)
683 nodes = self.commit.get_nodes(self.path)
684 else:
684 else:
685 nodes = self._nodes
685 nodes = self._nodes
686 self._nodes_dict = dict((node.path, node) for node in nodes)
686 self._nodes_dict = dict((node.path, node) for node in nodes)
687 return sorted(nodes)
687 return sorted(nodes)
688
688
689 @LazyProperty
689 @LazyProperty
690 def files(self):
690 def files(self):
691 return sorted((node for node in self.nodes if node.is_file()))
691 return sorted((node for node in self.nodes if node.is_file()))
692
692
693 @LazyProperty
693 @LazyProperty
694 def dirs(self):
694 def dirs(self):
695 return sorted((node for node in self.nodes if node.is_dir()))
695 return sorted((node for node in self.nodes if node.is_dir()))
696
696
697 def __iter__(self):
697 def __iter__(self):
698 for node in self.nodes:
698 for node in self.nodes:
699 yield node
699 yield node
700
700
701 def get_node(self, path):
701 def get_node(self, path):
702 """
702 """
703 Returns node from within this particular ``DirNode``, so it is now
703 Returns node from within this particular ``DirNode``, so it is now
704 allowed to fetch, i.e. node located at 'docs/api/index.rst' from node
704 allowed to fetch, i.e. node located at 'docs/api/index.rst' from node
705 'docs'. In order to access deeper nodes one must fetch nodes between
705 'docs'. In order to access deeper nodes one must fetch nodes between
706 them first - this would work::
706 them first - this would work::
707
707
708 docs = root.get_node('docs')
708 docs = root.get_node('docs')
709 docs.get_node('api').get_node('index.rst')
709 docs.get_node('api').get_node('index.rst')
710
710
711 :param: path - relative to the current node
711 :param: path - relative to the current node
712
712
713 .. note::
713 .. note::
714 To access lazily (as in example above) node have to be initialized
714 To access lazily (as in example above) node have to be initialized
715 with related commit object - without it node is out of
715 with related commit object - without it node is out of
716 context and may know nothing about anything else than nearest
716 context and may know nothing about anything else than nearest
717 (located at same level) nodes.
717 (located at same level) nodes.
718 """
718 """
719 try:
719 try:
720 path = path.rstrip('/')
720 path = path.rstrip('/')
721 if path == '':
721 if path == '':
722 raise NodeError("Cannot retrieve node without path")
722 raise NodeError("Cannot retrieve node without path")
723 self.nodes # access nodes first in order to set _nodes_dict
723 self.nodes # access nodes first in order to set _nodes_dict
724 paths = path.split('/')
724 paths = path.split('/')
725 if len(paths) == 1:
725 if len(paths) == 1:
726 if not self.is_root():
726 if not self.is_root():
727 path = '/'.join((self.path, paths[0]))
727 path = '/'.join((self.path, paths[0]))
728 else:
728 else:
729 path = paths[0]
729 path = paths[0]
730 return self._nodes_dict[path]
730 return self._nodes_dict[path]
731 elif len(paths) > 1:
731 elif len(paths) > 1:
732 if self.commit is None:
732 if self.commit is None:
733 raise NodeError(
733 raise NodeError(
734 "Cannot access deeper nodes without commit")
734 "Cannot access deeper nodes without commit")
735 else:
735 else:
736 path1, path2 = paths[0], '/'.join(paths[1:])
736 path1, path2 = paths[0], '/'.join(paths[1:])
737 return self.get_node(path1).get_node(path2)
737 return self.get_node(path1).get_node(path2)
738 else:
738 else:
739 raise KeyError
739 raise KeyError
740 except KeyError:
740 except KeyError:
741 raise NodeError("Node does not exist at %s" % path)
741 raise NodeError("Node does not exist at %s" % path)
742
742
743 @LazyProperty
743 @LazyProperty
744 def state(self):
744 def state(self):
745 raise NodeError("Cannot access state of DirNode")
745 raise NodeError("Cannot access state of DirNode")
746
746
747 @LazyProperty
747 @LazyProperty
748 def size(self):
748 def size(self):
749 size = 0
749 size = 0
750 for root, dirs, files in self.commit.walk(self.path):
750 for root, dirs, files in self.commit.walk(self.path):
751 for f in files:
751 for f in files:
752 size += f.size
752 size += f.size
753
753
754 return size
754 return size
755
755
756 @LazyProperty
756 @LazyProperty
757 def last_commit(self):
757 def last_commit(self):
758 if self.commit:
758 if self.commit:
759 pre_load = ["author", "date", "message"]
759 pre_load = ["author", "date", "message", "parents"]
760 return self.commit.get_path_commit(self.path, pre_load=pre_load)
760 return self.commit.get_path_commit(self.path, pre_load=pre_load)
761 raise NodeError(
761 raise NodeError(
762 "Cannot retrieve last commit of the file without "
762 "Cannot retrieve last commit of the file without "
763 "related commit attribute")
763 "related commit attribute")
764
764
765 def __repr__(self):
765 def __repr__(self):
766 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
766 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
767 getattr(self.commit, 'short_id', ''))
767 getattr(self.commit, 'short_id', ''))
768
768
769
769
770 class RootNode(DirNode):
770 class RootNode(DirNode):
771 """
771 """
772 DirNode being the root node of the repository.
772 DirNode being the root node of the repository.
773 """
773 """
774
774
775 def __init__(self, nodes=(), commit=None):
775 def __init__(self, nodes=(), commit=None):
776 super(RootNode, self).__init__(path='', nodes=nodes, commit=commit)
776 super(RootNode, self).__init__(path='', nodes=nodes, commit=commit)
777
777
778 def __repr__(self):
778 def __repr__(self):
779 return '<%s>' % self.__class__.__name__
779 return '<%s>' % self.__class__.__name__
780
780
781
781
782 class SubModuleNode(Node):
782 class SubModuleNode(Node):
783 """
783 """
784 represents a SubModule of Git or SubRepo of Mercurial
784 represents a SubModule of Git or SubRepo of Mercurial
785 """
785 """
786 is_binary = False
786 is_binary = False
787 size = 0
787 size = 0
788
788
789 def __init__(self, name, url=None, commit=None, alias=None):
789 def __init__(self, name, url=None, commit=None, alias=None):
790 self.path = name
790 self.path = name
791 self.kind = NodeKind.SUBMODULE
791 self.kind = NodeKind.SUBMODULE
792 self.alias = alias
792 self.alias = alias
793
793
794 # we have to use EmptyCommit here since this can point to svn/git/hg
794 # we have to use EmptyCommit here since this can point to svn/git/hg
795 # submodules we cannot get from repository
795 # submodules we cannot get from repository
796 self.commit = EmptyCommit(str(commit), alias=alias)
796 self.commit = EmptyCommit(str(commit), alias=alias)
797 self.url = url or self._extract_submodule_url()
797 self.url = url or self._extract_submodule_url()
798
798
799 def __repr__(self):
799 def __repr__(self):
800 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
800 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
801 getattr(self.commit, 'short_id', ''))
801 getattr(self.commit, 'short_id', ''))
802
802
803 def _extract_submodule_url(self):
803 def _extract_submodule_url(self):
804 # TODO: find a way to parse gits submodule file and extract the
804 # TODO: find a way to parse gits submodule file and extract the
805 # linking URL
805 # linking URL
806 return self.path
806 return self.path
807
807
808 @LazyProperty
808 @LazyProperty
809 def name(self):
809 def name(self):
810 """
810 """
811 Returns name of the node so if its path
811 Returns name of the node so if its path
812 then only last part is returned.
812 then only last part is returned.
813 """
813 """
814 org = safe_unicode(self.path.rstrip('/').split('/')[-1])
814 org = safe_unicode(self.path.rstrip('/').split('/')[-1])
815 return u'%s @ %s' % (org, self.commit.short_id)
815 return u'%s @ %s' % (org, self.commit.short_id)
816
816
817
817
818 class LargeFileNode(FileNode):
818 class LargeFileNode(FileNode):
819
819
820 def __init__(self, path, url=None, commit=None, alias=None, org_path=None):
820 def __init__(self, path, url=None, commit=None, alias=None, org_path=None):
821 self.path = path
821 self.path = path
822 self.org_path = org_path
822 self.org_path = org_path
823 self.kind = NodeKind.LARGEFILE
823 self.kind = NodeKind.LARGEFILE
824 self.alias = alias
824 self.alias = alias
825
825
826 def _validate_path(self, path):
826 def _validate_path(self, path):
827 """
827 """
828 we override check since the LargeFileNode path is system absolute
828 we override check since the LargeFileNode path is system absolute
829 """
829 """
830 pass
830 pass
831
831
832 def __repr__(self):
832 def __repr__(self):
833 return '<%s %r>' % (self.__class__.__name__, self.path)
833 return '<%s %r>' % (self.__class__.__name__, self.path)
834
834
835 @LazyProperty
835 @LazyProperty
836 def size(self):
836 def size(self):
837 return os.stat(self.path).st_size
837 return os.stat(self.path).st_size
838
838
839 @LazyProperty
839 @LazyProperty
840 def raw_bytes(self):
840 def raw_bytes(self):
841 with open(self.path, 'rb') as f:
841 with open(self.path, 'rb') as f:
842 content = f.read()
842 content = f.read()
843 return content
843 return content
844
844
845 @LazyProperty
845 @LazyProperty
846 def name(self):
846 def name(self):
847 """
847 """
848 Overwrites name to be the org lf path
848 Overwrites name to be the org lf path
849 """
849 """
850 return self.org_path
850 return self.org_path
@@ -1,1742 +1,1742 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2019 RhodeCode GmbH
3 # Copyright (C) 2012-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 pull request model for RhodeCode
23 pull request model for RhodeCode
24 """
24 """
25
25
26
26
27 import json
27 import json
28 import logging
28 import logging
29 import datetime
29 import datetime
30 import urllib
30 import urllib
31 import collections
31 import collections
32
32
33 from pyramid import compat
33 from pyramid import compat
34 from pyramid.threadlocal import get_current_request
34 from pyramid.threadlocal import get_current_request
35
35
36 from rhodecode import events
36 from rhodecode import events
37 from rhodecode.translation import lazy_ugettext
37 from rhodecode.translation import lazy_ugettext
38 from rhodecode.lib import helpers as h, hooks_utils, diffs
38 from rhodecode.lib import helpers as h, hooks_utils, diffs
39 from rhodecode.lib import audit_logger
39 from rhodecode.lib import audit_logger
40 from rhodecode.lib.compat import OrderedDict
40 from rhodecode.lib.compat import OrderedDict
41 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
41 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
42 from rhodecode.lib.markup_renderer import (
42 from rhodecode.lib.markup_renderer import (
43 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
43 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
44 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
44 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
45 from rhodecode.lib.vcs.backends.base import (
45 from rhodecode.lib.vcs.backends.base import (
46 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
46 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
47 from rhodecode.lib.vcs.conf import settings as vcs_settings
47 from rhodecode.lib.vcs.conf import settings as vcs_settings
48 from rhodecode.lib.vcs.exceptions import (
48 from rhodecode.lib.vcs.exceptions import (
49 CommitDoesNotExistError, EmptyRepositoryError)
49 CommitDoesNotExistError, EmptyRepositoryError)
50 from rhodecode.model import BaseModel
50 from rhodecode.model import BaseModel
51 from rhodecode.model.changeset_status import ChangesetStatusModel
51 from rhodecode.model.changeset_status import ChangesetStatusModel
52 from rhodecode.model.comment import CommentsModel
52 from rhodecode.model.comment import CommentsModel
53 from rhodecode.model.db import (
53 from rhodecode.model.db import (
54 or_, PullRequest, PullRequestReviewers, ChangesetStatus,
54 or_, PullRequest, PullRequestReviewers, ChangesetStatus,
55 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
55 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
56 from rhodecode.model.meta import Session
56 from rhodecode.model.meta import Session
57 from rhodecode.model.notification import NotificationModel, \
57 from rhodecode.model.notification import NotificationModel, \
58 EmailNotificationModel
58 EmailNotificationModel
59 from rhodecode.model.scm import ScmModel
59 from rhodecode.model.scm import ScmModel
60 from rhodecode.model.settings import VcsSettingsModel
60 from rhodecode.model.settings import VcsSettingsModel
61
61
62
62
63 log = logging.getLogger(__name__)
63 log = logging.getLogger(__name__)
64
64
65
65
66 # Data structure to hold the response data when updating commits during a pull
66 # Data structure to hold the response data when updating commits during a pull
67 # request update.
67 # request update.
68 UpdateResponse = collections.namedtuple('UpdateResponse', [
68 UpdateResponse = collections.namedtuple('UpdateResponse', [
69 'executed', 'reason', 'new', 'old', 'changes',
69 'executed', 'reason', 'new', 'old', 'changes',
70 'source_changed', 'target_changed'])
70 'source_changed', 'target_changed'])
71
71
72
72
73 class PullRequestModel(BaseModel):
73 class PullRequestModel(BaseModel):
74
74
75 cls = PullRequest
75 cls = PullRequest
76
76
77 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
77 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
78
78
79 UPDATE_STATUS_MESSAGES = {
79 UPDATE_STATUS_MESSAGES = {
80 UpdateFailureReason.NONE: lazy_ugettext(
80 UpdateFailureReason.NONE: lazy_ugettext(
81 'Pull request update successful.'),
81 'Pull request update successful.'),
82 UpdateFailureReason.UNKNOWN: lazy_ugettext(
82 UpdateFailureReason.UNKNOWN: lazy_ugettext(
83 'Pull request update failed because of an unknown error.'),
83 'Pull request update failed because of an unknown error.'),
84 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
84 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
85 'No update needed because the source and target have not changed.'),
85 'No update needed because the source and target have not changed.'),
86 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
86 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
87 'Pull request cannot be updated because the reference type is '
87 'Pull request cannot be updated because the reference type is '
88 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
88 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
89 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
89 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
90 'This pull request cannot be updated because the target '
90 'This pull request cannot be updated because the target '
91 'reference is missing.'),
91 'reference is missing.'),
92 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
92 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
93 'This pull request cannot be updated because the source '
93 'This pull request cannot be updated because the source '
94 'reference is missing.'),
94 'reference is missing.'),
95 }
95 }
96 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
96 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
97 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
97 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
98
98
99 def __get_pull_request(self, pull_request):
99 def __get_pull_request(self, pull_request):
100 return self._get_instance((
100 return self._get_instance((
101 PullRequest, PullRequestVersion), pull_request)
101 PullRequest, PullRequestVersion), pull_request)
102
102
103 def _check_perms(self, perms, pull_request, user, api=False):
103 def _check_perms(self, perms, pull_request, user, api=False):
104 if not api:
104 if not api:
105 return h.HasRepoPermissionAny(*perms)(
105 return h.HasRepoPermissionAny(*perms)(
106 user=user, repo_name=pull_request.target_repo.repo_name)
106 user=user, repo_name=pull_request.target_repo.repo_name)
107 else:
107 else:
108 return h.HasRepoPermissionAnyApi(*perms)(
108 return h.HasRepoPermissionAnyApi(*perms)(
109 user=user, repo_name=pull_request.target_repo.repo_name)
109 user=user, repo_name=pull_request.target_repo.repo_name)
110
110
111 def check_user_read(self, pull_request, user, api=False):
111 def check_user_read(self, pull_request, user, api=False):
112 _perms = ('repository.admin', 'repository.write', 'repository.read',)
112 _perms = ('repository.admin', 'repository.write', 'repository.read',)
113 return self._check_perms(_perms, pull_request, user, api)
113 return self._check_perms(_perms, pull_request, user, api)
114
114
115 def check_user_merge(self, pull_request, user, api=False):
115 def check_user_merge(self, pull_request, user, api=False):
116 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
116 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
117 return self._check_perms(_perms, pull_request, user, api)
117 return self._check_perms(_perms, pull_request, user, api)
118
118
119 def check_user_update(self, pull_request, user, api=False):
119 def check_user_update(self, pull_request, user, api=False):
120 owner = user.user_id == pull_request.user_id
120 owner = user.user_id == pull_request.user_id
121 return self.check_user_merge(pull_request, user, api) or owner
121 return self.check_user_merge(pull_request, user, api) or owner
122
122
123 def check_user_delete(self, pull_request, user):
123 def check_user_delete(self, pull_request, user):
124 owner = user.user_id == pull_request.user_id
124 owner = user.user_id == pull_request.user_id
125 _perms = ('repository.admin',)
125 _perms = ('repository.admin',)
126 return self._check_perms(_perms, pull_request, user) or owner
126 return self._check_perms(_perms, pull_request, user) or owner
127
127
128 def check_user_change_status(self, pull_request, user, api=False):
128 def check_user_change_status(self, pull_request, user, api=False):
129 reviewer = user.user_id in [x.user_id for x in
129 reviewer = user.user_id in [x.user_id for x in
130 pull_request.reviewers]
130 pull_request.reviewers]
131 return self.check_user_update(pull_request, user, api) or reviewer
131 return self.check_user_update(pull_request, user, api) or reviewer
132
132
133 def check_user_comment(self, pull_request, user):
133 def check_user_comment(self, pull_request, user):
134 owner = user.user_id == pull_request.user_id
134 owner = user.user_id == pull_request.user_id
135 return self.check_user_read(pull_request, user) or owner
135 return self.check_user_read(pull_request, user) or owner
136
136
137 def get(self, pull_request):
137 def get(self, pull_request):
138 return self.__get_pull_request(pull_request)
138 return self.__get_pull_request(pull_request)
139
139
140 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
140 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
141 opened_by=None, order_by=None,
141 opened_by=None, order_by=None,
142 order_dir='desc', only_created=True):
142 order_dir='desc', only_created=True):
143 repo = None
143 repo = None
144 if repo_name:
144 if repo_name:
145 repo = self._get_repo(repo_name)
145 repo = self._get_repo(repo_name)
146
146
147 q = PullRequest.query()
147 q = PullRequest.query()
148
148
149 # source or target
149 # source or target
150 if repo and source:
150 if repo and source:
151 q = q.filter(PullRequest.source_repo == repo)
151 q = q.filter(PullRequest.source_repo == repo)
152 elif repo:
152 elif repo:
153 q = q.filter(PullRequest.target_repo == repo)
153 q = q.filter(PullRequest.target_repo == repo)
154
154
155 # closed,opened
155 # closed,opened
156 if statuses:
156 if statuses:
157 q = q.filter(PullRequest.status.in_(statuses))
157 q = q.filter(PullRequest.status.in_(statuses))
158
158
159 # opened by filter
159 # opened by filter
160 if opened_by:
160 if opened_by:
161 q = q.filter(PullRequest.user_id.in_(opened_by))
161 q = q.filter(PullRequest.user_id.in_(opened_by))
162
162
163 # only get those that are in "created" state
163 # only get those that are in "created" state
164 if only_created:
164 if only_created:
165 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
165 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
166
166
167 if order_by:
167 if order_by:
168 order_map = {
168 order_map = {
169 'name_raw': PullRequest.pull_request_id,
169 'name_raw': PullRequest.pull_request_id,
170 'id': PullRequest.pull_request_id,
170 'id': PullRequest.pull_request_id,
171 'title': PullRequest.title,
171 'title': PullRequest.title,
172 'updated_on_raw': PullRequest.updated_on,
172 'updated_on_raw': PullRequest.updated_on,
173 'target_repo': PullRequest.target_repo_id
173 'target_repo': PullRequest.target_repo_id
174 }
174 }
175 if order_dir == 'asc':
175 if order_dir == 'asc':
176 q = q.order_by(order_map[order_by].asc())
176 q = q.order_by(order_map[order_by].asc())
177 else:
177 else:
178 q = q.order_by(order_map[order_by].desc())
178 q = q.order_by(order_map[order_by].desc())
179
179
180 return q
180 return q
181
181
182 def count_all(self, repo_name, source=False, statuses=None,
182 def count_all(self, repo_name, source=False, statuses=None,
183 opened_by=None):
183 opened_by=None):
184 """
184 """
185 Count the number of pull requests for a specific repository.
185 Count the number of pull requests for a specific repository.
186
186
187 :param repo_name: target or source repo
187 :param repo_name: target or source repo
188 :param source: boolean flag to specify if repo_name refers to source
188 :param source: boolean flag to specify if repo_name refers to source
189 :param statuses: list of pull request statuses
189 :param statuses: list of pull request statuses
190 :param opened_by: author user of the pull request
190 :param opened_by: author user of the pull request
191 :returns: int number of pull requests
191 :returns: int number of pull requests
192 """
192 """
193 q = self._prepare_get_all_query(
193 q = self._prepare_get_all_query(
194 repo_name, source=source, statuses=statuses, opened_by=opened_by)
194 repo_name, source=source, statuses=statuses, opened_by=opened_by)
195
195
196 return q.count()
196 return q.count()
197
197
198 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
198 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
199 offset=0, length=None, order_by=None, order_dir='desc'):
199 offset=0, length=None, order_by=None, order_dir='desc'):
200 """
200 """
201 Get all pull requests for a specific repository.
201 Get all pull requests for a specific repository.
202
202
203 :param repo_name: target or source repo
203 :param repo_name: target or source repo
204 :param source: boolean flag to specify if repo_name refers to source
204 :param source: boolean flag to specify if repo_name refers to source
205 :param statuses: list of pull request statuses
205 :param statuses: list of pull request statuses
206 :param opened_by: author user of the pull request
206 :param opened_by: author user of the pull request
207 :param offset: pagination offset
207 :param offset: pagination offset
208 :param length: length of returned list
208 :param length: length of returned list
209 :param order_by: order of the returned list
209 :param order_by: order of the returned list
210 :param order_dir: 'asc' or 'desc' ordering direction
210 :param order_dir: 'asc' or 'desc' ordering direction
211 :returns: list of pull requests
211 :returns: list of pull requests
212 """
212 """
213 q = self._prepare_get_all_query(
213 q = self._prepare_get_all_query(
214 repo_name, source=source, statuses=statuses, opened_by=opened_by,
214 repo_name, source=source, statuses=statuses, opened_by=opened_by,
215 order_by=order_by, order_dir=order_dir)
215 order_by=order_by, order_dir=order_dir)
216
216
217 if length:
217 if length:
218 pull_requests = q.limit(length).offset(offset).all()
218 pull_requests = q.limit(length).offset(offset).all()
219 else:
219 else:
220 pull_requests = q.all()
220 pull_requests = q.all()
221
221
222 return pull_requests
222 return pull_requests
223
223
224 def count_awaiting_review(self, repo_name, source=False, statuses=None,
224 def count_awaiting_review(self, repo_name, source=False, statuses=None,
225 opened_by=None):
225 opened_by=None):
226 """
226 """
227 Count the number of pull requests for a specific repository that are
227 Count the number of pull requests for a specific repository that are
228 awaiting review.
228 awaiting review.
229
229
230 :param repo_name: target or source repo
230 :param repo_name: target or source repo
231 :param source: boolean flag to specify if repo_name refers to source
231 :param source: boolean flag to specify if repo_name refers to source
232 :param statuses: list of pull request statuses
232 :param statuses: list of pull request statuses
233 :param opened_by: author user of the pull request
233 :param opened_by: author user of the pull request
234 :returns: int number of pull requests
234 :returns: int number of pull requests
235 """
235 """
236 pull_requests = self.get_awaiting_review(
236 pull_requests = self.get_awaiting_review(
237 repo_name, source=source, statuses=statuses, opened_by=opened_by)
237 repo_name, source=source, statuses=statuses, opened_by=opened_by)
238
238
239 return len(pull_requests)
239 return len(pull_requests)
240
240
241 def get_awaiting_review(self, repo_name, source=False, statuses=None,
241 def get_awaiting_review(self, repo_name, source=False, statuses=None,
242 opened_by=None, offset=0, length=None,
242 opened_by=None, offset=0, length=None,
243 order_by=None, order_dir='desc'):
243 order_by=None, order_dir='desc'):
244 """
244 """
245 Get all pull requests for a specific repository that are awaiting
245 Get all pull requests for a specific repository that are awaiting
246 review.
246 review.
247
247
248 :param repo_name: target or source repo
248 :param repo_name: target or source repo
249 :param source: boolean flag to specify if repo_name refers to source
249 :param source: boolean flag to specify if repo_name refers to source
250 :param statuses: list of pull request statuses
250 :param statuses: list of pull request statuses
251 :param opened_by: author user of the pull request
251 :param opened_by: author user of the pull request
252 :param offset: pagination offset
252 :param offset: pagination offset
253 :param length: length of returned list
253 :param length: length of returned list
254 :param order_by: order of the returned list
254 :param order_by: order of the returned list
255 :param order_dir: 'asc' or 'desc' ordering direction
255 :param order_dir: 'asc' or 'desc' ordering direction
256 :returns: list of pull requests
256 :returns: list of pull requests
257 """
257 """
258 pull_requests = self.get_all(
258 pull_requests = self.get_all(
259 repo_name, source=source, statuses=statuses, opened_by=opened_by,
259 repo_name, source=source, statuses=statuses, opened_by=opened_by,
260 order_by=order_by, order_dir=order_dir)
260 order_by=order_by, order_dir=order_dir)
261
261
262 _filtered_pull_requests = []
262 _filtered_pull_requests = []
263 for pr in pull_requests:
263 for pr in pull_requests:
264 status = pr.calculated_review_status()
264 status = pr.calculated_review_status()
265 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
265 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
266 ChangesetStatus.STATUS_UNDER_REVIEW]:
266 ChangesetStatus.STATUS_UNDER_REVIEW]:
267 _filtered_pull_requests.append(pr)
267 _filtered_pull_requests.append(pr)
268 if length:
268 if length:
269 return _filtered_pull_requests[offset:offset+length]
269 return _filtered_pull_requests[offset:offset+length]
270 else:
270 else:
271 return _filtered_pull_requests
271 return _filtered_pull_requests
272
272
273 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
273 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
274 opened_by=None, user_id=None):
274 opened_by=None, user_id=None):
275 """
275 """
276 Count the number of pull requests for a specific repository that are
276 Count the number of pull requests for a specific repository that are
277 awaiting review from a specific user.
277 awaiting review from a specific user.
278
278
279 :param repo_name: target or source repo
279 :param repo_name: target or source repo
280 :param source: boolean flag to specify if repo_name refers to source
280 :param source: boolean flag to specify if repo_name refers to source
281 :param statuses: list of pull request statuses
281 :param statuses: list of pull request statuses
282 :param opened_by: author user of the pull request
282 :param opened_by: author user of the pull request
283 :param user_id: reviewer user of the pull request
283 :param user_id: reviewer user of the pull request
284 :returns: int number of pull requests
284 :returns: int number of pull requests
285 """
285 """
286 pull_requests = self.get_awaiting_my_review(
286 pull_requests = self.get_awaiting_my_review(
287 repo_name, source=source, statuses=statuses, opened_by=opened_by,
287 repo_name, source=source, statuses=statuses, opened_by=opened_by,
288 user_id=user_id)
288 user_id=user_id)
289
289
290 return len(pull_requests)
290 return len(pull_requests)
291
291
292 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
292 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
293 opened_by=None, user_id=None, offset=0,
293 opened_by=None, user_id=None, offset=0,
294 length=None, order_by=None, order_dir='desc'):
294 length=None, order_by=None, order_dir='desc'):
295 """
295 """
296 Get all pull requests for a specific repository that are awaiting
296 Get all pull requests for a specific repository that are awaiting
297 review from a specific user.
297 review from a specific user.
298
298
299 :param repo_name: target or source repo
299 :param repo_name: target or source repo
300 :param source: boolean flag to specify if repo_name refers to source
300 :param source: boolean flag to specify if repo_name refers to source
301 :param statuses: list of pull request statuses
301 :param statuses: list of pull request statuses
302 :param opened_by: author user of the pull request
302 :param opened_by: author user of the pull request
303 :param user_id: reviewer user of the pull request
303 :param user_id: reviewer user of the pull request
304 :param offset: pagination offset
304 :param offset: pagination offset
305 :param length: length of returned list
305 :param length: length of returned list
306 :param order_by: order of the returned list
306 :param order_by: order of the returned list
307 :param order_dir: 'asc' or 'desc' ordering direction
307 :param order_dir: 'asc' or 'desc' ordering direction
308 :returns: list of pull requests
308 :returns: list of pull requests
309 """
309 """
310 pull_requests = self.get_all(
310 pull_requests = self.get_all(
311 repo_name, source=source, statuses=statuses, opened_by=opened_by,
311 repo_name, source=source, statuses=statuses, opened_by=opened_by,
312 order_by=order_by, order_dir=order_dir)
312 order_by=order_by, order_dir=order_dir)
313
313
314 _my = PullRequestModel().get_not_reviewed(user_id)
314 _my = PullRequestModel().get_not_reviewed(user_id)
315 my_participation = []
315 my_participation = []
316 for pr in pull_requests:
316 for pr in pull_requests:
317 if pr in _my:
317 if pr in _my:
318 my_participation.append(pr)
318 my_participation.append(pr)
319 _filtered_pull_requests = my_participation
319 _filtered_pull_requests = my_participation
320 if length:
320 if length:
321 return _filtered_pull_requests[offset:offset+length]
321 return _filtered_pull_requests[offset:offset+length]
322 else:
322 else:
323 return _filtered_pull_requests
323 return _filtered_pull_requests
324
324
325 def get_not_reviewed(self, user_id):
325 def get_not_reviewed(self, user_id):
326 return [
326 return [
327 x.pull_request for x in PullRequestReviewers.query().filter(
327 x.pull_request for x in PullRequestReviewers.query().filter(
328 PullRequestReviewers.user_id == user_id).all()
328 PullRequestReviewers.user_id == user_id).all()
329 ]
329 ]
330
330
331 def _prepare_participating_query(self, user_id=None, statuses=None,
331 def _prepare_participating_query(self, user_id=None, statuses=None,
332 order_by=None, order_dir='desc'):
332 order_by=None, order_dir='desc'):
333 q = PullRequest.query()
333 q = PullRequest.query()
334 if user_id:
334 if user_id:
335 reviewers_subquery = Session().query(
335 reviewers_subquery = Session().query(
336 PullRequestReviewers.pull_request_id).filter(
336 PullRequestReviewers.pull_request_id).filter(
337 PullRequestReviewers.user_id == user_id).subquery()
337 PullRequestReviewers.user_id == user_id).subquery()
338 user_filter = or_(
338 user_filter = or_(
339 PullRequest.user_id == user_id,
339 PullRequest.user_id == user_id,
340 PullRequest.pull_request_id.in_(reviewers_subquery)
340 PullRequest.pull_request_id.in_(reviewers_subquery)
341 )
341 )
342 q = PullRequest.query().filter(user_filter)
342 q = PullRequest.query().filter(user_filter)
343
343
344 # closed,opened
344 # closed,opened
345 if statuses:
345 if statuses:
346 q = q.filter(PullRequest.status.in_(statuses))
346 q = q.filter(PullRequest.status.in_(statuses))
347
347
348 if order_by:
348 if order_by:
349 order_map = {
349 order_map = {
350 'name_raw': PullRequest.pull_request_id,
350 'name_raw': PullRequest.pull_request_id,
351 'title': PullRequest.title,
351 'title': PullRequest.title,
352 'updated_on_raw': PullRequest.updated_on,
352 'updated_on_raw': PullRequest.updated_on,
353 'target_repo': PullRequest.target_repo_id
353 'target_repo': PullRequest.target_repo_id
354 }
354 }
355 if order_dir == 'asc':
355 if order_dir == 'asc':
356 q = q.order_by(order_map[order_by].asc())
356 q = q.order_by(order_map[order_by].asc())
357 else:
357 else:
358 q = q.order_by(order_map[order_by].desc())
358 q = q.order_by(order_map[order_by].desc())
359
359
360 return q
360 return q
361
361
362 def count_im_participating_in(self, user_id=None, statuses=None):
362 def count_im_participating_in(self, user_id=None, statuses=None):
363 q = self._prepare_participating_query(user_id, statuses=statuses)
363 q = self._prepare_participating_query(user_id, statuses=statuses)
364 return q.count()
364 return q.count()
365
365
366 def get_im_participating_in(
366 def get_im_participating_in(
367 self, user_id=None, statuses=None, offset=0,
367 self, user_id=None, statuses=None, offset=0,
368 length=None, order_by=None, order_dir='desc'):
368 length=None, order_by=None, order_dir='desc'):
369 """
369 """
370 Get all Pull requests that i'm participating in, or i have opened
370 Get all Pull requests that i'm participating in, or i have opened
371 """
371 """
372
372
373 q = self._prepare_participating_query(
373 q = self._prepare_participating_query(
374 user_id, statuses=statuses, order_by=order_by,
374 user_id, statuses=statuses, order_by=order_by,
375 order_dir=order_dir)
375 order_dir=order_dir)
376
376
377 if length:
377 if length:
378 pull_requests = q.limit(length).offset(offset).all()
378 pull_requests = q.limit(length).offset(offset).all()
379 else:
379 else:
380 pull_requests = q.all()
380 pull_requests = q.all()
381
381
382 return pull_requests
382 return pull_requests
383
383
384 def get_versions(self, pull_request):
384 def get_versions(self, pull_request):
385 """
385 """
386 returns version of pull request sorted by ID descending
386 returns version of pull request sorted by ID descending
387 """
387 """
388 return PullRequestVersion.query()\
388 return PullRequestVersion.query()\
389 .filter(PullRequestVersion.pull_request == pull_request)\
389 .filter(PullRequestVersion.pull_request == pull_request)\
390 .order_by(PullRequestVersion.pull_request_version_id.asc())\
390 .order_by(PullRequestVersion.pull_request_version_id.asc())\
391 .all()
391 .all()
392
392
393 def get_pr_version(self, pull_request_id, version=None):
393 def get_pr_version(self, pull_request_id, version=None):
394 at_version = None
394 at_version = None
395
395
396 if version and version == 'latest':
396 if version and version == 'latest':
397 pull_request_ver = PullRequest.get(pull_request_id)
397 pull_request_ver = PullRequest.get(pull_request_id)
398 pull_request_obj = pull_request_ver
398 pull_request_obj = pull_request_ver
399 _org_pull_request_obj = pull_request_obj
399 _org_pull_request_obj = pull_request_obj
400 at_version = 'latest'
400 at_version = 'latest'
401 elif version:
401 elif version:
402 pull_request_ver = PullRequestVersion.get_or_404(version)
402 pull_request_ver = PullRequestVersion.get_or_404(version)
403 pull_request_obj = pull_request_ver
403 pull_request_obj = pull_request_ver
404 _org_pull_request_obj = pull_request_ver.pull_request
404 _org_pull_request_obj = pull_request_ver.pull_request
405 at_version = pull_request_ver.pull_request_version_id
405 at_version = pull_request_ver.pull_request_version_id
406 else:
406 else:
407 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
407 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
408 pull_request_id)
408 pull_request_id)
409
409
410 pull_request_display_obj = PullRequest.get_pr_display_object(
410 pull_request_display_obj = PullRequest.get_pr_display_object(
411 pull_request_obj, _org_pull_request_obj)
411 pull_request_obj, _org_pull_request_obj)
412
412
413 return _org_pull_request_obj, pull_request_obj, \
413 return _org_pull_request_obj, pull_request_obj, \
414 pull_request_display_obj, at_version
414 pull_request_display_obj, at_version
415
415
416 def create(self, created_by, source_repo, source_ref, target_repo,
416 def create(self, created_by, source_repo, source_ref, target_repo,
417 target_ref, revisions, reviewers, title, description=None,
417 target_ref, revisions, reviewers, title, description=None,
418 description_renderer=None,
418 description_renderer=None,
419 reviewer_data=None, translator=None, auth_user=None):
419 reviewer_data=None, translator=None, auth_user=None):
420 translator = translator or get_current_request().translate
420 translator = translator or get_current_request().translate
421
421
422 created_by_user = self._get_user(created_by)
422 created_by_user = self._get_user(created_by)
423 auth_user = auth_user or created_by_user.AuthUser()
423 auth_user = auth_user or created_by_user.AuthUser()
424 source_repo = self._get_repo(source_repo)
424 source_repo = self._get_repo(source_repo)
425 target_repo = self._get_repo(target_repo)
425 target_repo = self._get_repo(target_repo)
426
426
427 pull_request = PullRequest()
427 pull_request = PullRequest()
428 pull_request.source_repo = source_repo
428 pull_request.source_repo = source_repo
429 pull_request.source_ref = source_ref
429 pull_request.source_ref = source_ref
430 pull_request.target_repo = target_repo
430 pull_request.target_repo = target_repo
431 pull_request.target_ref = target_ref
431 pull_request.target_ref = target_ref
432 pull_request.revisions = revisions
432 pull_request.revisions = revisions
433 pull_request.title = title
433 pull_request.title = title
434 pull_request.description = description
434 pull_request.description = description
435 pull_request.description_renderer = description_renderer
435 pull_request.description_renderer = description_renderer
436 pull_request.author = created_by_user
436 pull_request.author = created_by_user
437 pull_request.reviewer_data = reviewer_data
437 pull_request.reviewer_data = reviewer_data
438 pull_request.pull_request_state = pull_request.STATE_CREATING
438 pull_request.pull_request_state = pull_request.STATE_CREATING
439 Session().add(pull_request)
439 Session().add(pull_request)
440 Session().flush()
440 Session().flush()
441
441
442 reviewer_ids = set()
442 reviewer_ids = set()
443 # members / reviewers
443 # members / reviewers
444 for reviewer_object in reviewers:
444 for reviewer_object in reviewers:
445 user_id, reasons, mandatory, rules = reviewer_object
445 user_id, reasons, mandatory, rules = reviewer_object
446 user = self._get_user(user_id)
446 user = self._get_user(user_id)
447
447
448 # skip duplicates
448 # skip duplicates
449 if user.user_id in reviewer_ids:
449 if user.user_id in reviewer_ids:
450 continue
450 continue
451
451
452 reviewer_ids.add(user.user_id)
452 reviewer_ids.add(user.user_id)
453
453
454 reviewer = PullRequestReviewers()
454 reviewer = PullRequestReviewers()
455 reviewer.user = user
455 reviewer.user = user
456 reviewer.pull_request = pull_request
456 reviewer.pull_request = pull_request
457 reviewer.reasons = reasons
457 reviewer.reasons = reasons
458 reviewer.mandatory = mandatory
458 reviewer.mandatory = mandatory
459
459
460 # NOTE(marcink): pick only first rule for now
460 # NOTE(marcink): pick only first rule for now
461 rule_id = list(rules)[0] if rules else None
461 rule_id = list(rules)[0] if rules else None
462 rule = RepoReviewRule.get(rule_id) if rule_id else None
462 rule = RepoReviewRule.get(rule_id) if rule_id else None
463 if rule:
463 if rule:
464 review_group = rule.user_group_vote_rule(user_id)
464 review_group = rule.user_group_vote_rule(user_id)
465 # we check if this particular reviewer is member of a voting group
465 # we check if this particular reviewer is member of a voting group
466 if review_group:
466 if review_group:
467 # NOTE(marcink):
467 # NOTE(marcink):
468 # can be that user is member of more but we pick the first same,
468 # can be that user is member of more but we pick the first same,
469 # same as default reviewers algo
469 # same as default reviewers algo
470 review_group = review_group[0]
470 review_group = review_group[0]
471
471
472 rule_data = {
472 rule_data = {
473 'rule_name':
473 'rule_name':
474 rule.review_rule_name,
474 rule.review_rule_name,
475 'rule_user_group_entry_id':
475 'rule_user_group_entry_id':
476 review_group.repo_review_rule_users_group_id,
476 review_group.repo_review_rule_users_group_id,
477 'rule_user_group_name':
477 'rule_user_group_name':
478 review_group.users_group.users_group_name,
478 review_group.users_group.users_group_name,
479 'rule_user_group_members':
479 'rule_user_group_members':
480 [x.user.username for x in review_group.users_group.members],
480 [x.user.username for x in review_group.users_group.members],
481 'rule_user_group_members_id':
481 'rule_user_group_members_id':
482 [x.user.user_id for x in review_group.users_group.members],
482 [x.user.user_id for x in review_group.users_group.members],
483 }
483 }
484 # e.g {'vote_rule': -1, 'mandatory': True}
484 # e.g {'vote_rule': -1, 'mandatory': True}
485 rule_data.update(review_group.rule_data())
485 rule_data.update(review_group.rule_data())
486
486
487 reviewer.rule_data = rule_data
487 reviewer.rule_data = rule_data
488
488
489 Session().add(reviewer)
489 Session().add(reviewer)
490 Session().flush()
490 Session().flush()
491
491
492 # Set approval status to "Under Review" for all commits which are
492 # Set approval status to "Under Review" for all commits which are
493 # part of this pull request.
493 # part of this pull request.
494 ChangesetStatusModel().set_status(
494 ChangesetStatusModel().set_status(
495 repo=target_repo,
495 repo=target_repo,
496 status=ChangesetStatus.STATUS_UNDER_REVIEW,
496 status=ChangesetStatus.STATUS_UNDER_REVIEW,
497 user=created_by_user,
497 user=created_by_user,
498 pull_request=pull_request
498 pull_request=pull_request
499 )
499 )
500 # we commit early at this point. This has to do with a fact
500 # we commit early at this point. This has to do with a fact
501 # that before queries do some row-locking. And because of that
501 # that before queries do some row-locking. And because of that
502 # we need to commit and finish transaction before below validate call
502 # we need to commit and finish transaction before below validate call
503 # that for large repos could be long resulting in long row locks
503 # that for large repos could be long resulting in long row locks
504 Session().commit()
504 Session().commit()
505
505
506 # prepare workspace, and run initial merge simulation. Set state during that
506 # prepare workspace, and run initial merge simulation. Set state during that
507 # operation
507 # operation
508 pull_request = PullRequest.get(pull_request.pull_request_id)
508 pull_request = PullRequest.get(pull_request.pull_request_id)
509
509
510 # set as merging, for simulation, and if finished to created so we mark
510 # set as merging, for simulation, and if finished to created so we mark
511 # simulation is working fine
511 # simulation is working fine
512 with pull_request.set_state(PullRequest.STATE_MERGING,
512 with pull_request.set_state(PullRequest.STATE_MERGING,
513 final_state=PullRequest.STATE_CREATED):
513 final_state=PullRequest.STATE_CREATED):
514 MergeCheck.validate(
514 MergeCheck.validate(
515 pull_request, auth_user=auth_user, translator=translator)
515 pull_request, auth_user=auth_user, translator=translator)
516
516
517 self.notify_reviewers(pull_request, reviewer_ids)
517 self.notify_reviewers(pull_request, reviewer_ids)
518 self.trigger_pull_request_hook(
518 self.trigger_pull_request_hook(
519 pull_request, created_by_user, 'create')
519 pull_request, created_by_user, 'create')
520
520
521 creation_data = pull_request.get_api_data(with_merge_state=False)
521 creation_data = pull_request.get_api_data(with_merge_state=False)
522 self._log_audit_action(
522 self._log_audit_action(
523 'repo.pull_request.create', {'data': creation_data},
523 'repo.pull_request.create', {'data': creation_data},
524 auth_user, pull_request)
524 auth_user, pull_request)
525
525
526 return pull_request
526 return pull_request
527
527
528 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
528 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
529 pull_request = self.__get_pull_request(pull_request)
529 pull_request = self.__get_pull_request(pull_request)
530 target_scm = pull_request.target_repo.scm_instance()
530 target_scm = pull_request.target_repo.scm_instance()
531 if action == 'create':
531 if action == 'create':
532 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
532 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
533 elif action == 'merge':
533 elif action == 'merge':
534 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
534 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
535 elif action == 'close':
535 elif action == 'close':
536 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
536 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
537 elif action == 'review_status_change':
537 elif action == 'review_status_change':
538 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
538 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
539 elif action == 'update':
539 elif action == 'update':
540 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
540 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
541 elif action == 'comment':
541 elif action == 'comment':
542 # dummy hook ! for comment. We want this function to handle all cases
542 # dummy hook ! for comment. We want this function to handle all cases
543 def trigger_hook(*args, **kwargs):
543 def trigger_hook(*args, **kwargs):
544 pass
544 pass
545 comment = data['comment']
545 comment = data['comment']
546 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
546 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
547 else:
547 else:
548 return
548 return
549
549
550 trigger_hook(
550 trigger_hook(
551 username=user.username,
551 username=user.username,
552 repo_name=pull_request.target_repo.repo_name,
552 repo_name=pull_request.target_repo.repo_name,
553 repo_alias=target_scm.alias,
553 repo_alias=target_scm.alias,
554 pull_request=pull_request,
554 pull_request=pull_request,
555 data=data)
555 data=data)
556
556
557 def _get_commit_ids(self, pull_request):
557 def _get_commit_ids(self, pull_request):
558 """
558 """
559 Return the commit ids of the merged pull request.
559 Return the commit ids of the merged pull request.
560
560
561 This method is not dealing correctly yet with the lack of autoupdates
561 This method is not dealing correctly yet with the lack of autoupdates
562 nor with the implicit target updates.
562 nor with the implicit target updates.
563 For example: if a commit in the source repo is already in the target it
563 For example: if a commit in the source repo is already in the target it
564 will be reported anyways.
564 will be reported anyways.
565 """
565 """
566 merge_rev = pull_request.merge_rev
566 merge_rev = pull_request.merge_rev
567 if merge_rev is None:
567 if merge_rev is None:
568 raise ValueError('This pull request was not merged yet')
568 raise ValueError('This pull request was not merged yet')
569
569
570 commit_ids = list(pull_request.revisions)
570 commit_ids = list(pull_request.revisions)
571 if merge_rev not in commit_ids:
571 if merge_rev not in commit_ids:
572 commit_ids.append(merge_rev)
572 commit_ids.append(merge_rev)
573
573
574 return commit_ids
574 return commit_ids
575
575
576 def merge_repo(self, pull_request, user, extras):
576 def merge_repo(self, pull_request, user, extras):
577 log.debug("Merging pull request %s", pull_request.pull_request_id)
577 log.debug("Merging pull request %s", pull_request.pull_request_id)
578 extras['user_agent'] = 'internal-merge'
578 extras['user_agent'] = 'internal-merge'
579 merge_state = self._merge_pull_request(pull_request, user, extras)
579 merge_state = self._merge_pull_request(pull_request, user, extras)
580 if merge_state.executed:
580 if merge_state.executed:
581 log.debug("Merge was successful, updating the pull request comments.")
581 log.debug("Merge was successful, updating the pull request comments.")
582 self._comment_and_close_pr(pull_request, user, merge_state)
582 self._comment_and_close_pr(pull_request, user, merge_state)
583
583
584 self._log_audit_action(
584 self._log_audit_action(
585 'repo.pull_request.merge',
585 'repo.pull_request.merge',
586 {'merge_state': merge_state.__dict__},
586 {'merge_state': merge_state.__dict__},
587 user, pull_request)
587 user, pull_request)
588
588
589 else:
589 else:
590 log.warn("Merge failed, not updating the pull request.")
590 log.warn("Merge failed, not updating the pull request.")
591 return merge_state
591 return merge_state
592
592
593 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
593 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
594 target_vcs = pull_request.target_repo.scm_instance()
594 target_vcs = pull_request.target_repo.scm_instance()
595 source_vcs = pull_request.source_repo.scm_instance()
595 source_vcs = pull_request.source_repo.scm_instance()
596
596
597 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
597 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
598 pr_id=pull_request.pull_request_id,
598 pr_id=pull_request.pull_request_id,
599 pr_title=pull_request.title,
599 pr_title=pull_request.title,
600 source_repo=source_vcs.name,
600 source_repo=source_vcs.name,
601 source_ref_name=pull_request.source_ref_parts.name,
601 source_ref_name=pull_request.source_ref_parts.name,
602 target_repo=target_vcs.name,
602 target_repo=target_vcs.name,
603 target_ref_name=pull_request.target_ref_parts.name,
603 target_ref_name=pull_request.target_ref_parts.name,
604 )
604 )
605
605
606 workspace_id = self._workspace_id(pull_request)
606 workspace_id = self._workspace_id(pull_request)
607 repo_id = pull_request.target_repo.repo_id
607 repo_id = pull_request.target_repo.repo_id
608 use_rebase = self._use_rebase_for_merging(pull_request)
608 use_rebase = self._use_rebase_for_merging(pull_request)
609 close_branch = self._close_branch_before_merging(pull_request)
609 close_branch = self._close_branch_before_merging(pull_request)
610
610
611 target_ref = self._refresh_reference(
611 target_ref = self._refresh_reference(
612 pull_request.target_ref_parts, target_vcs)
612 pull_request.target_ref_parts, target_vcs)
613
613
614 callback_daemon, extras = prepare_callback_daemon(
614 callback_daemon, extras = prepare_callback_daemon(
615 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
615 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
616 host=vcs_settings.HOOKS_HOST,
616 host=vcs_settings.HOOKS_HOST,
617 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
617 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
618
618
619 with callback_daemon:
619 with callback_daemon:
620 # TODO: johbo: Implement a clean way to run a config_override
620 # TODO: johbo: Implement a clean way to run a config_override
621 # for a single call.
621 # for a single call.
622 target_vcs.config.set(
622 target_vcs.config.set(
623 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
623 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
624
624
625 user_name = user.short_contact
625 user_name = user.short_contact
626 merge_state = target_vcs.merge(
626 merge_state = target_vcs.merge(
627 repo_id, workspace_id, target_ref, source_vcs,
627 repo_id, workspace_id, target_ref, source_vcs,
628 pull_request.source_ref_parts,
628 pull_request.source_ref_parts,
629 user_name=user_name, user_email=user.email,
629 user_name=user_name, user_email=user.email,
630 message=message, use_rebase=use_rebase,
630 message=message, use_rebase=use_rebase,
631 close_branch=close_branch)
631 close_branch=close_branch)
632 return merge_state
632 return merge_state
633
633
634 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
634 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
635 pull_request.merge_rev = merge_state.merge_ref.commit_id
635 pull_request.merge_rev = merge_state.merge_ref.commit_id
636 pull_request.updated_on = datetime.datetime.now()
636 pull_request.updated_on = datetime.datetime.now()
637 close_msg = close_msg or 'Pull request merged and closed'
637 close_msg = close_msg or 'Pull request merged and closed'
638
638
639 CommentsModel().create(
639 CommentsModel().create(
640 text=safe_unicode(close_msg),
640 text=safe_unicode(close_msg),
641 repo=pull_request.target_repo.repo_id,
641 repo=pull_request.target_repo.repo_id,
642 user=user.user_id,
642 user=user.user_id,
643 pull_request=pull_request.pull_request_id,
643 pull_request=pull_request.pull_request_id,
644 f_path=None,
644 f_path=None,
645 line_no=None,
645 line_no=None,
646 closing_pr=True
646 closing_pr=True
647 )
647 )
648
648
649 Session().add(pull_request)
649 Session().add(pull_request)
650 Session().flush()
650 Session().flush()
651 # TODO: paris: replace invalidation with less radical solution
651 # TODO: paris: replace invalidation with less radical solution
652 ScmModel().mark_for_invalidation(
652 ScmModel().mark_for_invalidation(
653 pull_request.target_repo.repo_name)
653 pull_request.target_repo.repo_name)
654 self.trigger_pull_request_hook(pull_request, user, 'merge')
654 self.trigger_pull_request_hook(pull_request, user, 'merge')
655
655
656 def has_valid_update_type(self, pull_request):
656 def has_valid_update_type(self, pull_request):
657 source_ref_type = pull_request.source_ref_parts.type
657 source_ref_type = pull_request.source_ref_parts.type
658 return source_ref_type in self.REF_TYPES
658 return source_ref_type in self.REF_TYPES
659
659
660 def update_commits(self, pull_request):
660 def update_commits(self, pull_request):
661 """
661 """
662 Get the updated list of commits for the pull request
662 Get the updated list of commits for the pull request
663 and return the new pull request version and the list
663 and return the new pull request version and the list
664 of commits processed by this update action
664 of commits processed by this update action
665 """
665 """
666 pull_request = self.__get_pull_request(pull_request)
666 pull_request = self.__get_pull_request(pull_request)
667 source_ref_type = pull_request.source_ref_parts.type
667 source_ref_type = pull_request.source_ref_parts.type
668 source_ref_name = pull_request.source_ref_parts.name
668 source_ref_name = pull_request.source_ref_parts.name
669 source_ref_id = pull_request.source_ref_parts.commit_id
669 source_ref_id = pull_request.source_ref_parts.commit_id
670
670
671 target_ref_type = pull_request.target_ref_parts.type
671 target_ref_type = pull_request.target_ref_parts.type
672 target_ref_name = pull_request.target_ref_parts.name
672 target_ref_name = pull_request.target_ref_parts.name
673 target_ref_id = pull_request.target_ref_parts.commit_id
673 target_ref_id = pull_request.target_ref_parts.commit_id
674
674
675 if not self.has_valid_update_type(pull_request):
675 if not self.has_valid_update_type(pull_request):
676 log.debug("Skipping update of pull request %s due to ref type: %s",
676 log.debug("Skipping update of pull request %s due to ref type: %s",
677 pull_request, source_ref_type)
677 pull_request, source_ref_type)
678 return UpdateResponse(
678 return UpdateResponse(
679 executed=False,
679 executed=False,
680 reason=UpdateFailureReason.WRONG_REF_TYPE,
680 reason=UpdateFailureReason.WRONG_REF_TYPE,
681 old=pull_request, new=None, changes=None,
681 old=pull_request, new=None, changes=None,
682 source_changed=False, target_changed=False)
682 source_changed=False, target_changed=False)
683
683
684 # source repo
684 # source repo
685 source_repo = pull_request.source_repo.scm_instance()
685 source_repo = pull_request.source_repo.scm_instance()
686
686
687 try:
687 try:
688 source_commit = source_repo.get_commit(commit_id=source_ref_name)
688 source_commit = source_repo.get_commit(commit_id=source_ref_name)
689 except CommitDoesNotExistError:
689 except CommitDoesNotExistError:
690 return UpdateResponse(
690 return UpdateResponse(
691 executed=False,
691 executed=False,
692 reason=UpdateFailureReason.MISSING_SOURCE_REF,
692 reason=UpdateFailureReason.MISSING_SOURCE_REF,
693 old=pull_request, new=None, changes=None,
693 old=pull_request, new=None, changes=None,
694 source_changed=False, target_changed=False)
694 source_changed=False, target_changed=False)
695
695
696 source_changed = source_ref_id != source_commit.raw_id
696 source_changed = source_ref_id != source_commit.raw_id
697
697
698 # target repo
698 # target repo
699 target_repo = pull_request.target_repo.scm_instance()
699 target_repo = pull_request.target_repo.scm_instance()
700
700
701 try:
701 try:
702 target_commit = target_repo.get_commit(commit_id=target_ref_name)
702 target_commit = target_repo.get_commit(commit_id=target_ref_name)
703 except CommitDoesNotExistError:
703 except CommitDoesNotExistError:
704 return UpdateResponse(
704 return UpdateResponse(
705 executed=False,
705 executed=False,
706 reason=UpdateFailureReason.MISSING_TARGET_REF,
706 reason=UpdateFailureReason.MISSING_TARGET_REF,
707 old=pull_request, new=None, changes=None,
707 old=pull_request, new=None, changes=None,
708 source_changed=False, target_changed=False)
708 source_changed=False, target_changed=False)
709 target_changed = target_ref_id != target_commit.raw_id
709 target_changed = target_ref_id != target_commit.raw_id
710
710
711 if not (source_changed or target_changed):
711 if not (source_changed or target_changed):
712 log.debug("Nothing changed in pull request %s", pull_request)
712 log.debug("Nothing changed in pull request %s", pull_request)
713 return UpdateResponse(
713 return UpdateResponse(
714 executed=False,
714 executed=False,
715 reason=UpdateFailureReason.NO_CHANGE,
715 reason=UpdateFailureReason.NO_CHANGE,
716 old=pull_request, new=None, changes=None,
716 old=pull_request, new=None, changes=None,
717 source_changed=target_changed, target_changed=source_changed)
717 source_changed=target_changed, target_changed=source_changed)
718
718
719 change_in_found = 'target repo' if target_changed else 'source repo'
719 change_in_found = 'target repo' if target_changed else 'source repo'
720 log.debug('Updating pull request because of change in %s detected',
720 log.debug('Updating pull request because of change in %s detected',
721 change_in_found)
721 change_in_found)
722
722
723 # Finally there is a need for an update, in case of source change
723 # Finally there is a need for an update, in case of source change
724 # we create a new version, else just an update
724 # we create a new version, else just an update
725 if source_changed:
725 if source_changed:
726 pull_request_version = self._create_version_from_snapshot(pull_request)
726 pull_request_version = self._create_version_from_snapshot(pull_request)
727 self._link_comments_to_version(pull_request_version)
727 self._link_comments_to_version(pull_request_version)
728 else:
728 else:
729 try:
729 try:
730 ver = pull_request.versions[-1]
730 ver = pull_request.versions[-1]
731 except IndexError:
731 except IndexError:
732 ver = None
732 ver = None
733
733
734 pull_request.pull_request_version_id = \
734 pull_request.pull_request_version_id = \
735 ver.pull_request_version_id if ver else None
735 ver.pull_request_version_id if ver else None
736 pull_request_version = pull_request
736 pull_request_version = pull_request
737
737
738 try:
738 try:
739 if target_ref_type in self.REF_TYPES:
739 if target_ref_type in self.REF_TYPES:
740 target_commit = target_repo.get_commit(target_ref_name)
740 target_commit = target_repo.get_commit(target_ref_name)
741 else:
741 else:
742 target_commit = target_repo.get_commit(target_ref_id)
742 target_commit = target_repo.get_commit(target_ref_id)
743 except CommitDoesNotExistError:
743 except CommitDoesNotExistError:
744 return UpdateResponse(
744 return UpdateResponse(
745 executed=False,
745 executed=False,
746 reason=UpdateFailureReason.MISSING_TARGET_REF,
746 reason=UpdateFailureReason.MISSING_TARGET_REF,
747 old=pull_request, new=None, changes=None,
747 old=pull_request, new=None, changes=None,
748 source_changed=source_changed, target_changed=target_changed)
748 source_changed=source_changed, target_changed=target_changed)
749
749
750 # re-compute commit ids
750 # re-compute commit ids
751 old_commit_ids = pull_request.revisions
751 old_commit_ids = pull_request.revisions
752 pre_load = ["author", "branch", "date", "message"]
752 pre_load = ["author", "date", "message", "branch"]
753 commit_ranges = target_repo.compare(
753 commit_ranges = target_repo.compare(
754 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
754 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
755 pre_load=pre_load)
755 pre_load=pre_load)
756
756
757 ancestor = source_repo.get_common_ancestor(
757 ancestor = source_repo.get_common_ancestor(
758 source_commit.raw_id, target_commit.raw_id, target_repo)
758 source_commit.raw_id, target_commit.raw_id, target_repo)
759
759
760 pull_request.source_ref = '%s:%s:%s' % (
760 pull_request.source_ref = '%s:%s:%s' % (
761 source_ref_type, source_ref_name, source_commit.raw_id)
761 source_ref_type, source_ref_name, source_commit.raw_id)
762 pull_request.target_ref = '%s:%s:%s' % (
762 pull_request.target_ref = '%s:%s:%s' % (
763 target_ref_type, target_ref_name, ancestor)
763 target_ref_type, target_ref_name, ancestor)
764
764
765 pull_request.revisions = [
765 pull_request.revisions = [
766 commit.raw_id for commit in reversed(commit_ranges)]
766 commit.raw_id for commit in reversed(commit_ranges)]
767 pull_request.updated_on = datetime.datetime.now()
767 pull_request.updated_on = datetime.datetime.now()
768 Session().add(pull_request)
768 Session().add(pull_request)
769 new_commit_ids = pull_request.revisions
769 new_commit_ids = pull_request.revisions
770
770
771 old_diff_data, new_diff_data = self._generate_update_diffs(
771 old_diff_data, new_diff_data = self._generate_update_diffs(
772 pull_request, pull_request_version)
772 pull_request, pull_request_version)
773
773
774 # calculate commit and file changes
774 # calculate commit and file changes
775 changes = self._calculate_commit_id_changes(
775 changes = self._calculate_commit_id_changes(
776 old_commit_ids, new_commit_ids)
776 old_commit_ids, new_commit_ids)
777 file_changes = self._calculate_file_changes(
777 file_changes = self._calculate_file_changes(
778 old_diff_data, new_diff_data)
778 old_diff_data, new_diff_data)
779
779
780 # set comments as outdated if DIFFS changed
780 # set comments as outdated if DIFFS changed
781 CommentsModel().outdate_comments(
781 CommentsModel().outdate_comments(
782 pull_request, old_diff_data=old_diff_data,
782 pull_request, old_diff_data=old_diff_data,
783 new_diff_data=new_diff_data)
783 new_diff_data=new_diff_data)
784
784
785 commit_changes = (changes.added or changes.removed)
785 commit_changes = (changes.added or changes.removed)
786 file_node_changes = (
786 file_node_changes = (
787 file_changes.added or file_changes.modified or file_changes.removed)
787 file_changes.added or file_changes.modified or file_changes.removed)
788 pr_has_changes = commit_changes or file_node_changes
788 pr_has_changes = commit_changes or file_node_changes
789
789
790 # Add an automatic comment to the pull request, in case
790 # Add an automatic comment to the pull request, in case
791 # anything has changed
791 # anything has changed
792 if pr_has_changes:
792 if pr_has_changes:
793 update_comment = CommentsModel().create(
793 update_comment = CommentsModel().create(
794 text=self._render_update_message(changes, file_changes),
794 text=self._render_update_message(changes, file_changes),
795 repo=pull_request.target_repo,
795 repo=pull_request.target_repo,
796 user=pull_request.author,
796 user=pull_request.author,
797 pull_request=pull_request,
797 pull_request=pull_request,
798 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
798 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
799
799
800 # Update status to "Under Review" for added commits
800 # Update status to "Under Review" for added commits
801 for commit_id in changes.added:
801 for commit_id in changes.added:
802 ChangesetStatusModel().set_status(
802 ChangesetStatusModel().set_status(
803 repo=pull_request.source_repo,
803 repo=pull_request.source_repo,
804 status=ChangesetStatus.STATUS_UNDER_REVIEW,
804 status=ChangesetStatus.STATUS_UNDER_REVIEW,
805 comment=update_comment,
805 comment=update_comment,
806 user=pull_request.author,
806 user=pull_request.author,
807 pull_request=pull_request,
807 pull_request=pull_request,
808 revision=commit_id)
808 revision=commit_id)
809
809
810 log.debug(
810 log.debug(
811 'Updated pull request %s, added_ids: %s, common_ids: %s, '
811 'Updated pull request %s, added_ids: %s, common_ids: %s, '
812 'removed_ids: %s', pull_request.pull_request_id,
812 'removed_ids: %s', pull_request.pull_request_id,
813 changes.added, changes.common, changes.removed)
813 changes.added, changes.common, changes.removed)
814 log.debug(
814 log.debug(
815 'Updated pull request with the following file changes: %s',
815 'Updated pull request with the following file changes: %s',
816 file_changes)
816 file_changes)
817
817
818 log.info(
818 log.info(
819 "Updated pull request %s from commit %s to commit %s, "
819 "Updated pull request %s from commit %s to commit %s, "
820 "stored new version %s of this pull request.",
820 "stored new version %s of this pull request.",
821 pull_request.pull_request_id, source_ref_id,
821 pull_request.pull_request_id, source_ref_id,
822 pull_request.source_ref_parts.commit_id,
822 pull_request.source_ref_parts.commit_id,
823 pull_request_version.pull_request_version_id)
823 pull_request_version.pull_request_version_id)
824 Session().commit()
824 Session().commit()
825 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
825 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
826
826
827 return UpdateResponse(
827 return UpdateResponse(
828 executed=True, reason=UpdateFailureReason.NONE,
828 executed=True, reason=UpdateFailureReason.NONE,
829 old=pull_request, new=pull_request_version, changes=changes,
829 old=pull_request, new=pull_request_version, changes=changes,
830 source_changed=source_changed, target_changed=target_changed)
830 source_changed=source_changed, target_changed=target_changed)
831
831
832 def _create_version_from_snapshot(self, pull_request):
832 def _create_version_from_snapshot(self, pull_request):
833 version = PullRequestVersion()
833 version = PullRequestVersion()
834 version.title = pull_request.title
834 version.title = pull_request.title
835 version.description = pull_request.description
835 version.description = pull_request.description
836 version.status = pull_request.status
836 version.status = pull_request.status
837 version.pull_request_state = pull_request.pull_request_state
837 version.pull_request_state = pull_request.pull_request_state
838 version.created_on = datetime.datetime.now()
838 version.created_on = datetime.datetime.now()
839 version.updated_on = pull_request.updated_on
839 version.updated_on = pull_request.updated_on
840 version.user_id = pull_request.user_id
840 version.user_id = pull_request.user_id
841 version.source_repo = pull_request.source_repo
841 version.source_repo = pull_request.source_repo
842 version.source_ref = pull_request.source_ref
842 version.source_ref = pull_request.source_ref
843 version.target_repo = pull_request.target_repo
843 version.target_repo = pull_request.target_repo
844 version.target_ref = pull_request.target_ref
844 version.target_ref = pull_request.target_ref
845
845
846 version._last_merge_source_rev = pull_request._last_merge_source_rev
846 version._last_merge_source_rev = pull_request._last_merge_source_rev
847 version._last_merge_target_rev = pull_request._last_merge_target_rev
847 version._last_merge_target_rev = pull_request._last_merge_target_rev
848 version.last_merge_status = pull_request.last_merge_status
848 version.last_merge_status = pull_request.last_merge_status
849 version.shadow_merge_ref = pull_request.shadow_merge_ref
849 version.shadow_merge_ref = pull_request.shadow_merge_ref
850 version.merge_rev = pull_request.merge_rev
850 version.merge_rev = pull_request.merge_rev
851 version.reviewer_data = pull_request.reviewer_data
851 version.reviewer_data = pull_request.reviewer_data
852
852
853 version.revisions = pull_request.revisions
853 version.revisions = pull_request.revisions
854 version.pull_request = pull_request
854 version.pull_request = pull_request
855 Session().add(version)
855 Session().add(version)
856 Session().flush()
856 Session().flush()
857
857
858 return version
858 return version
859
859
860 def _generate_update_diffs(self, pull_request, pull_request_version):
860 def _generate_update_diffs(self, pull_request, pull_request_version):
861
861
862 diff_context = (
862 diff_context = (
863 self.DIFF_CONTEXT +
863 self.DIFF_CONTEXT +
864 CommentsModel.needed_extra_diff_context())
864 CommentsModel.needed_extra_diff_context())
865 hide_whitespace_changes = False
865 hide_whitespace_changes = False
866 source_repo = pull_request_version.source_repo
866 source_repo = pull_request_version.source_repo
867 source_ref_id = pull_request_version.source_ref_parts.commit_id
867 source_ref_id = pull_request_version.source_ref_parts.commit_id
868 target_ref_id = pull_request_version.target_ref_parts.commit_id
868 target_ref_id = pull_request_version.target_ref_parts.commit_id
869 old_diff = self._get_diff_from_pr_or_version(
869 old_diff = self._get_diff_from_pr_or_version(
870 source_repo, source_ref_id, target_ref_id,
870 source_repo, source_ref_id, target_ref_id,
871 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
871 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
872
872
873 source_repo = pull_request.source_repo
873 source_repo = pull_request.source_repo
874 source_ref_id = pull_request.source_ref_parts.commit_id
874 source_ref_id = pull_request.source_ref_parts.commit_id
875 target_ref_id = pull_request.target_ref_parts.commit_id
875 target_ref_id = pull_request.target_ref_parts.commit_id
876
876
877 new_diff = self._get_diff_from_pr_or_version(
877 new_diff = self._get_diff_from_pr_or_version(
878 source_repo, source_ref_id, target_ref_id,
878 source_repo, source_ref_id, target_ref_id,
879 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
879 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
880
880
881 old_diff_data = diffs.DiffProcessor(old_diff)
881 old_diff_data = diffs.DiffProcessor(old_diff)
882 old_diff_data.prepare()
882 old_diff_data.prepare()
883 new_diff_data = diffs.DiffProcessor(new_diff)
883 new_diff_data = diffs.DiffProcessor(new_diff)
884 new_diff_data.prepare()
884 new_diff_data.prepare()
885
885
886 return old_diff_data, new_diff_data
886 return old_diff_data, new_diff_data
887
887
888 def _link_comments_to_version(self, pull_request_version):
888 def _link_comments_to_version(self, pull_request_version):
889 """
889 """
890 Link all unlinked comments of this pull request to the given version.
890 Link all unlinked comments of this pull request to the given version.
891
891
892 :param pull_request_version: The `PullRequestVersion` to which
892 :param pull_request_version: The `PullRequestVersion` to which
893 the comments shall be linked.
893 the comments shall be linked.
894
894
895 """
895 """
896 pull_request = pull_request_version.pull_request
896 pull_request = pull_request_version.pull_request
897 comments = ChangesetComment.query()\
897 comments = ChangesetComment.query()\
898 .filter(
898 .filter(
899 # TODO: johbo: Should we query for the repo at all here?
899 # TODO: johbo: Should we query for the repo at all here?
900 # Pending decision on how comments of PRs are to be related
900 # Pending decision on how comments of PRs are to be related
901 # to either the source repo, the target repo or no repo at all.
901 # to either the source repo, the target repo or no repo at all.
902 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
902 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
903 ChangesetComment.pull_request == pull_request,
903 ChangesetComment.pull_request == pull_request,
904 ChangesetComment.pull_request_version == None)\
904 ChangesetComment.pull_request_version == None)\
905 .order_by(ChangesetComment.comment_id.asc())
905 .order_by(ChangesetComment.comment_id.asc())
906
906
907 # TODO: johbo: Find out why this breaks if it is done in a bulk
907 # TODO: johbo: Find out why this breaks if it is done in a bulk
908 # operation.
908 # operation.
909 for comment in comments:
909 for comment in comments:
910 comment.pull_request_version_id = (
910 comment.pull_request_version_id = (
911 pull_request_version.pull_request_version_id)
911 pull_request_version.pull_request_version_id)
912 Session().add(comment)
912 Session().add(comment)
913
913
914 def _calculate_commit_id_changes(self, old_ids, new_ids):
914 def _calculate_commit_id_changes(self, old_ids, new_ids):
915 added = [x for x in new_ids if x not in old_ids]
915 added = [x for x in new_ids if x not in old_ids]
916 common = [x for x in new_ids if x in old_ids]
916 common = [x for x in new_ids if x in old_ids]
917 removed = [x for x in old_ids if x not in new_ids]
917 removed = [x for x in old_ids if x not in new_ids]
918 total = new_ids
918 total = new_ids
919 return ChangeTuple(added, common, removed, total)
919 return ChangeTuple(added, common, removed, total)
920
920
921 def _calculate_file_changes(self, old_diff_data, new_diff_data):
921 def _calculate_file_changes(self, old_diff_data, new_diff_data):
922
922
923 old_files = OrderedDict()
923 old_files = OrderedDict()
924 for diff_data in old_diff_data.parsed_diff:
924 for diff_data in old_diff_data.parsed_diff:
925 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
925 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
926
926
927 added_files = []
927 added_files = []
928 modified_files = []
928 modified_files = []
929 removed_files = []
929 removed_files = []
930 for diff_data in new_diff_data.parsed_diff:
930 for diff_data in new_diff_data.parsed_diff:
931 new_filename = diff_data['filename']
931 new_filename = diff_data['filename']
932 new_hash = md5_safe(diff_data['raw_diff'])
932 new_hash = md5_safe(diff_data['raw_diff'])
933
933
934 old_hash = old_files.get(new_filename)
934 old_hash = old_files.get(new_filename)
935 if not old_hash:
935 if not old_hash:
936 # file is not present in old diff, means it's added
936 # file is not present in old diff, means it's added
937 added_files.append(new_filename)
937 added_files.append(new_filename)
938 else:
938 else:
939 if new_hash != old_hash:
939 if new_hash != old_hash:
940 modified_files.append(new_filename)
940 modified_files.append(new_filename)
941 # now remove a file from old, since we have seen it already
941 # now remove a file from old, since we have seen it already
942 del old_files[new_filename]
942 del old_files[new_filename]
943
943
944 # removed files is when there are present in old, but not in NEW,
944 # removed files is when there are present in old, but not in NEW,
945 # since we remove old files that are present in new diff, left-overs
945 # since we remove old files that are present in new diff, left-overs
946 # if any should be the removed files
946 # if any should be the removed files
947 removed_files.extend(old_files.keys())
947 removed_files.extend(old_files.keys())
948
948
949 return FileChangeTuple(added_files, modified_files, removed_files)
949 return FileChangeTuple(added_files, modified_files, removed_files)
950
950
951 def _render_update_message(self, changes, file_changes):
951 def _render_update_message(self, changes, file_changes):
952 """
952 """
953 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
953 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
954 so it's always looking the same disregarding on which default
954 so it's always looking the same disregarding on which default
955 renderer system is using.
955 renderer system is using.
956
956
957 :param changes: changes named tuple
957 :param changes: changes named tuple
958 :param file_changes: file changes named tuple
958 :param file_changes: file changes named tuple
959
959
960 """
960 """
961 new_status = ChangesetStatus.get_status_lbl(
961 new_status = ChangesetStatus.get_status_lbl(
962 ChangesetStatus.STATUS_UNDER_REVIEW)
962 ChangesetStatus.STATUS_UNDER_REVIEW)
963
963
964 changed_files = (
964 changed_files = (
965 file_changes.added + file_changes.modified + file_changes.removed)
965 file_changes.added + file_changes.modified + file_changes.removed)
966
966
967 params = {
967 params = {
968 'under_review_label': new_status,
968 'under_review_label': new_status,
969 'added_commits': changes.added,
969 'added_commits': changes.added,
970 'removed_commits': changes.removed,
970 'removed_commits': changes.removed,
971 'changed_files': changed_files,
971 'changed_files': changed_files,
972 'added_files': file_changes.added,
972 'added_files': file_changes.added,
973 'modified_files': file_changes.modified,
973 'modified_files': file_changes.modified,
974 'removed_files': file_changes.removed,
974 'removed_files': file_changes.removed,
975 }
975 }
976 renderer = RstTemplateRenderer()
976 renderer = RstTemplateRenderer()
977 return renderer.render('pull_request_update.mako', **params)
977 return renderer.render('pull_request_update.mako', **params)
978
978
979 def edit(self, pull_request, title, description, description_renderer, user):
979 def edit(self, pull_request, title, description, description_renderer, user):
980 pull_request = self.__get_pull_request(pull_request)
980 pull_request = self.__get_pull_request(pull_request)
981 old_data = pull_request.get_api_data(with_merge_state=False)
981 old_data = pull_request.get_api_data(with_merge_state=False)
982 if pull_request.is_closed():
982 if pull_request.is_closed():
983 raise ValueError('This pull request is closed')
983 raise ValueError('This pull request is closed')
984 if title:
984 if title:
985 pull_request.title = title
985 pull_request.title = title
986 pull_request.description = description
986 pull_request.description = description
987 pull_request.updated_on = datetime.datetime.now()
987 pull_request.updated_on = datetime.datetime.now()
988 pull_request.description_renderer = description_renderer
988 pull_request.description_renderer = description_renderer
989 Session().add(pull_request)
989 Session().add(pull_request)
990 self._log_audit_action(
990 self._log_audit_action(
991 'repo.pull_request.edit', {'old_data': old_data},
991 'repo.pull_request.edit', {'old_data': old_data},
992 user, pull_request)
992 user, pull_request)
993
993
994 def update_reviewers(self, pull_request, reviewer_data, user):
994 def update_reviewers(self, pull_request, reviewer_data, user):
995 """
995 """
996 Update the reviewers in the pull request
996 Update the reviewers in the pull request
997
997
998 :param pull_request: the pr to update
998 :param pull_request: the pr to update
999 :param reviewer_data: list of tuples
999 :param reviewer_data: list of tuples
1000 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
1000 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
1001 """
1001 """
1002 pull_request = self.__get_pull_request(pull_request)
1002 pull_request = self.__get_pull_request(pull_request)
1003 if pull_request.is_closed():
1003 if pull_request.is_closed():
1004 raise ValueError('This pull request is closed')
1004 raise ValueError('This pull request is closed')
1005
1005
1006 reviewers = {}
1006 reviewers = {}
1007 for user_id, reasons, mandatory, rules in reviewer_data:
1007 for user_id, reasons, mandatory, rules in reviewer_data:
1008 if isinstance(user_id, (int, compat.string_types)):
1008 if isinstance(user_id, (int, compat.string_types)):
1009 user_id = self._get_user(user_id).user_id
1009 user_id = self._get_user(user_id).user_id
1010 reviewers[user_id] = {
1010 reviewers[user_id] = {
1011 'reasons': reasons, 'mandatory': mandatory}
1011 'reasons': reasons, 'mandatory': mandatory}
1012
1012
1013 reviewers_ids = set(reviewers.keys())
1013 reviewers_ids = set(reviewers.keys())
1014 current_reviewers = PullRequestReviewers.query()\
1014 current_reviewers = PullRequestReviewers.query()\
1015 .filter(PullRequestReviewers.pull_request ==
1015 .filter(PullRequestReviewers.pull_request ==
1016 pull_request).all()
1016 pull_request).all()
1017 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1017 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1018
1018
1019 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1019 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1020 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1020 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1021
1021
1022 log.debug("Adding %s reviewers", ids_to_add)
1022 log.debug("Adding %s reviewers", ids_to_add)
1023 log.debug("Removing %s reviewers", ids_to_remove)
1023 log.debug("Removing %s reviewers", ids_to_remove)
1024 changed = False
1024 changed = False
1025 added_audit_reviewers = []
1025 added_audit_reviewers = []
1026 removed_audit_reviewers = []
1026 removed_audit_reviewers = []
1027
1027
1028 for uid in ids_to_add:
1028 for uid in ids_to_add:
1029 changed = True
1029 changed = True
1030 _usr = self._get_user(uid)
1030 _usr = self._get_user(uid)
1031 reviewer = PullRequestReviewers()
1031 reviewer = PullRequestReviewers()
1032 reviewer.user = _usr
1032 reviewer.user = _usr
1033 reviewer.pull_request = pull_request
1033 reviewer.pull_request = pull_request
1034 reviewer.reasons = reviewers[uid]['reasons']
1034 reviewer.reasons = reviewers[uid]['reasons']
1035 # NOTE(marcink): mandatory shouldn't be changed now
1035 # NOTE(marcink): mandatory shouldn't be changed now
1036 # reviewer.mandatory = reviewers[uid]['reasons']
1036 # reviewer.mandatory = reviewers[uid]['reasons']
1037 Session().add(reviewer)
1037 Session().add(reviewer)
1038 added_audit_reviewers.append(reviewer.get_dict())
1038 added_audit_reviewers.append(reviewer.get_dict())
1039
1039
1040 for uid in ids_to_remove:
1040 for uid in ids_to_remove:
1041 changed = True
1041 changed = True
1042 # NOTE(marcink): we fetch "ALL" reviewers using .all(). This is an edge case
1042 # NOTE(marcink): we fetch "ALL" reviewers using .all(). This is an edge case
1043 # that prevents and fixes cases that we added the same reviewer twice.
1043 # that prevents and fixes cases that we added the same reviewer twice.
1044 # this CAN happen due to the lack of DB checks
1044 # this CAN happen due to the lack of DB checks
1045 reviewers = PullRequestReviewers.query()\
1045 reviewers = PullRequestReviewers.query()\
1046 .filter(PullRequestReviewers.user_id == uid,
1046 .filter(PullRequestReviewers.user_id == uid,
1047 PullRequestReviewers.pull_request == pull_request)\
1047 PullRequestReviewers.pull_request == pull_request)\
1048 .all()
1048 .all()
1049
1049
1050 for obj in reviewers:
1050 for obj in reviewers:
1051 added_audit_reviewers.append(obj.get_dict())
1051 added_audit_reviewers.append(obj.get_dict())
1052 Session().delete(obj)
1052 Session().delete(obj)
1053
1053
1054 if changed:
1054 if changed:
1055 Session().expire_all()
1055 Session().expire_all()
1056 pull_request.updated_on = datetime.datetime.now()
1056 pull_request.updated_on = datetime.datetime.now()
1057 Session().add(pull_request)
1057 Session().add(pull_request)
1058
1058
1059 # finally store audit logs
1059 # finally store audit logs
1060 for user_data in added_audit_reviewers:
1060 for user_data in added_audit_reviewers:
1061 self._log_audit_action(
1061 self._log_audit_action(
1062 'repo.pull_request.reviewer.add', {'data': user_data},
1062 'repo.pull_request.reviewer.add', {'data': user_data},
1063 user, pull_request)
1063 user, pull_request)
1064 for user_data in removed_audit_reviewers:
1064 for user_data in removed_audit_reviewers:
1065 self._log_audit_action(
1065 self._log_audit_action(
1066 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1066 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1067 user, pull_request)
1067 user, pull_request)
1068
1068
1069 self.notify_reviewers(pull_request, ids_to_add)
1069 self.notify_reviewers(pull_request, ids_to_add)
1070 return ids_to_add, ids_to_remove
1070 return ids_to_add, ids_to_remove
1071
1071
1072 def get_url(self, pull_request, request=None, permalink=False):
1072 def get_url(self, pull_request, request=None, permalink=False):
1073 if not request:
1073 if not request:
1074 request = get_current_request()
1074 request = get_current_request()
1075
1075
1076 if permalink:
1076 if permalink:
1077 return request.route_url(
1077 return request.route_url(
1078 'pull_requests_global',
1078 'pull_requests_global',
1079 pull_request_id=pull_request.pull_request_id,)
1079 pull_request_id=pull_request.pull_request_id,)
1080 else:
1080 else:
1081 return request.route_url('pullrequest_show',
1081 return request.route_url('pullrequest_show',
1082 repo_name=safe_str(pull_request.target_repo.repo_name),
1082 repo_name=safe_str(pull_request.target_repo.repo_name),
1083 pull_request_id=pull_request.pull_request_id,)
1083 pull_request_id=pull_request.pull_request_id,)
1084
1084
1085 def get_shadow_clone_url(self, pull_request, request=None):
1085 def get_shadow_clone_url(self, pull_request, request=None):
1086 """
1086 """
1087 Returns qualified url pointing to the shadow repository. If this pull
1087 Returns qualified url pointing to the shadow repository. If this pull
1088 request is closed there is no shadow repository and ``None`` will be
1088 request is closed there is no shadow repository and ``None`` will be
1089 returned.
1089 returned.
1090 """
1090 """
1091 if pull_request.is_closed():
1091 if pull_request.is_closed():
1092 return None
1092 return None
1093 else:
1093 else:
1094 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1094 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1095 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1095 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1096
1096
1097 def notify_reviewers(self, pull_request, reviewers_ids):
1097 def notify_reviewers(self, pull_request, reviewers_ids):
1098 # notification to reviewers
1098 # notification to reviewers
1099 if not reviewers_ids:
1099 if not reviewers_ids:
1100 return
1100 return
1101
1101
1102 pull_request_obj = pull_request
1102 pull_request_obj = pull_request
1103 # get the current participants of this pull request
1103 # get the current participants of this pull request
1104 recipients = reviewers_ids
1104 recipients = reviewers_ids
1105 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1105 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1106
1106
1107 pr_source_repo = pull_request_obj.source_repo
1107 pr_source_repo = pull_request_obj.source_repo
1108 pr_target_repo = pull_request_obj.target_repo
1108 pr_target_repo = pull_request_obj.target_repo
1109
1109
1110 pr_url = h.route_url('pullrequest_show',
1110 pr_url = h.route_url('pullrequest_show',
1111 repo_name=pr_target_repo.repo_name,
1111 repo_name=pr_target_repo.repo_name,
1112 pull_request_id=pull_request_obj.pull_request_id,)
1112 pull_request_id=pull_request_obj.pull_request_id,)
1113
1113
1114 # set some variables for email notification
1114 # set some variables for email notification
1115 pr_target_repo_url = h.route_url(
1115 pr_target_repo_url = h.route_url(
1116 'repo_summary', repo_name=pr_target_repo.repo_name)
1116 'repo_summary', repo_name=pr_target_repo.repo_name)
1117
1117
1118 pr_source_repo_url = h.route_url(
1118 pr_source_repo_url = h.route_url(
1119 'repo_summary', repo_name=pr_source_repo.repo_name)
1119 'repo_summary', repo_name=pr_source_repo.repo_name)
1120
1120
1121 # pull request specifics
1121 # pull request specifics
1122 pull_request_commits = [
1122 pull_request_commits = [
1123 (x.raw_id, x.message)
1123 (x.raw_id, x.message)
1124 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1124 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1125
1125
1126 kwargs = {
1126 kwargs = {
1127 'user': pull_request.author,
1127 'user': pull_request.author,
1128 'pull_request': pull_request_obj,
1128 'pull_request': pull_request_obj,
1129 'pull_request_commits': pull_request_commits,
1129 'pull_request_commits': pull_request_commits,
1130
1130
1131 'pull_request_target_repo': pr_target_repo,
1131 'pull_request_target_repo': pr_target_repo,
1132 'pull_request_target_repo_url': pr_target_repo_url,
1132 'pull_request_target_repo_url': pr_target_repo_url,
1133
1133
1134 'pull_request_source_repo': pr_source_repo,
1134 'pull_request_source_repo': pr_source_repo,
1135 'pull_request_source_repo_url': pr_source_repo_url,
1135 'pull_request_source_repo_url': pr_source_repo_url,
1136
1136
1137 'pull_request_url': pr_url,
1137 'pull_request_url': pr_url,
1138 }
1138 }
1139
1139
1140 # pre-generate the subject for notification itself
1140 # pre-generate the subject for notification itself
1141 (subject,
1141 (subject,
1142 _h, _e, # we don't care about those
1142 _h, _e, # we don't care about those
1143 body_plaintext) = EmailNotificationModel().render_email(
1143 body_plaintext) = EmailNotificationModel().render_email(
1144 notification_type, **kwargs)
1144 notification_type, **kwargs)
1145
1145
1146 # create notification objects, and emails
1146 # create notification objects, and emails
1147 NotificationModel().create(
1147 NotificationModel().create(
1148 created_by=pull_request.author,
1148 created_by=pull_request.author,
1149 notification_subject=subject,
1149 notification_subject=subject,
1150 notification_body=body_plaintext,
1150 notification_body=body_plaintext,
1151 notification_type=notification_type,
1151 notification_type=notification_type,
1152 recipients=recipients,
1152 recipients=recipients,
1153 email_kwargs=kwargs,
1153 email_kwargs=kwargs,
1154 )
1154 )
1155
1155
1156 def delete(self, pull_request, user):
1156 def delete(self, pull_request, user):
1157 pull_request = self.__get_pull_request(pull_request)
1157 pull_request = self.__get_pull_request(pull_request)
1158 old_data = pull_request.get_api_data(with_merge_state=False)
1158 old_data = pull_request.get_api_data(with_merge_state=False)
1159 self._cleanup_merge_workspace(pull_request)
1159 self._cleanup_merge_workspace(pull_request)
1160 self._log_audit_action(
1160 self._log_audit_action(
1161 'repo.pull_request.delete', {'old_data': old_data},
1161 'repo.pull_request.delete', {'old_data': old_data},
1162 user, pull_request)
1162 user, pull_request)
1163 Session().delete(pull_request)
1163 Session().delete(pull_request)
1164
1164
1165 def close_pull_request(self, pull_request, user):
1165 def close_pull_request(self, pull_request, user):
1166 pull_request = self.__get_pull_request(pull_request)
1166 pull_request = self.__get_pull_request(pull_request)
1167 self._cleanup_merge_workspace(pull_request)
1167 self._cleanup_merge_workspace(pull_request)
1168 pull_request.status = PullRequest.STATUS_CLOSED
1168 pull_request.status = PullRequest.STATUS_CLOSED
1169 pull_request.updated_on = datetime.datetime.now()
1169 pull_request.updated_on = datetime.datetime.now()
1170 Session().add(pull_request)
1170 Session().add(pull_request)
1171 self.trigger_pull_request_hook(
1171 self.trigger_pull_request_hook(
1172 pull_request, pull_request.author, 'close')
1172 pull_request, pull_request.author, 'close')
1173
1173
1174 pr_data = pull_request.get_api_data(with_merge_state=False)
1174 pr_data = pull_request.get_api_data(with_merge_state=False)
1175 self._log_audit_action(
1175 self._log_audit_action(
1176 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1176 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1177
1177
1178 def close_pull_request_with_comment(
1178 def close_pull_request_with_comment(
1179 self, pull_request, user, repo, message=None, auth_user=None):
1179 self, pull_request, user, repo, message=None, auth_user=None):
1180
1180
1181 pull_request_review_status = pull_request.calculated_review_status()
1181 pull_request_review_status = pull_request.calculated_review_status()
1182
1182
1183 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1183 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1184 # approved only if we have voting consent
1184 # approved only if we have voting consent
1185 status = ChangesetStatus.STATUS_APPROVED
1185 status = ChangesetStatus.STATUS_APPROVED
1186 else:
1186 else:
1187 status = ChangesetStatus.STATUS_REJECTED
1187 status = ChangesetStatus.STATUS_REJECTED
1188 status_lbl = ChangesetStatus.get_status_lbl(status)
1188 status_lbl = ChangesetStatus.get_status_lbl(status)
1189
1189
1190 default_message = (
1190 default_message = (
1191 'Closing with status change {transition_icon} {status}.'
1191 'Closing with status change {transition_icon} {status}.'
1192 ).format(transition_icon='>', status=status_lbl)
1192 ).format(transition_icon='>', status=status_lbl)
1193 text = message or default_message
1193 text = message or default_message
1194
1194
1195 # create a comment, and link it to new status
1195 # create a comment, and link it to new status
1196 comment = CommentsModel().create(
1196 comment = CommentsModel().create(
1197 text=text,
1197 text=text,
1198 repo=repo.repo_id,
1198 repo=repo.repo_id,
1199 user=user.user_id,
1199 user=user.user_id,
1200 pull_request=pull_request.pull_request_id,
1200 pull_request=pull_request.pull_request_id,
1201 status_change=status_lbl,
1201 status_change=status_lbl,
1202 status_change_type=status,
1202 status_change_type=status,
1203 closing_pr=True,
1203 closing_pr=True,
1204 auth_user=auth_user,
1204 auth_user=auth_user,
1205 )
1205 )
1206
1206
1207 # calculate old status before we change it
1207 # calculate old status before we change it
1208 old_calculated_status = pull_request.calculated_review_status()
1208 old_calculated_status = pull_request.calculated_review_status()
1209 ChangesetStatusModel().set_status(
1209 ChangesetStatusModel().set_status(
1210 repo.repo_id,
1210 repo.repo_id,
1211 status,
1211 status,
1212 user.user_id,
1212 user.user_id,
1213 comment=comment,
1213 comment=comment,
1214 pull_request=pull_request.pull_request_id
1214 pull_request=pull_request.pull_request_id
1215 )
1215 )
1216
1216
1217 Session().flush()
1217 Session().flush()
1218 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1218 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1219 # we now calculate the status of pull request again, and based on that
1219 # we now calculate the status of pull request again, and based on that
1220 # calculation trigger status change. This might happen in cases
1220 # calculation trigger status change. This might happen in cases
1221 # that non-reviewer admin closes a pr, which means his vote doesn't
1221 # that non-reviewer admin closes a pr, which means his vote doesn't
1222 # change the status, while if he's a reviewer this might change it.
1222 # change the status, while if he's a reviewer this might change it.
1223 calculated_status = pull_request.calculated_review_status()
1223 calculated_status = pull_request.calculated_review_status()
1224 if old_calculated_status != calculated_status:
1224 if old_calculated_status != calculated_status:
1225 self.trigger_pull_request_hook(
1225 self.trigger_pull_request_hook(
1226 pull_request, user, 'review_status_change',
1226 pull_request, user, 'review_status_change',
1227 data={'status': calculated_status})
1227 data={'status': calculated_status})
1228
1228
1229 # finally close the PR
1229 # finally close the PR
1230 PullRequestModel().close_pull_request(
1230 PullRequestModel().close_pull_request(
1231 pull_request.pull_request_id, user)
1231 pull_request.pull_request_id, user)
1232
1232
1233 return comment, status
1233 return comment, status
1234
1234
1235 def merge_status(self, pull_request, translator=None,
1235 def merge_status(self, pull_request, translator=None,
1236 force_shadow_repo_refresh=False):
1236 force_shadow_repo_refresh=False):
1237 _ = translator or get_current_request().translate
1237 _ = translator or get_current_request().translate
1238
1238
1239 if not self._is_merge_enabled(pull_request):
1239 if not self._is_merge_enabled(pull_request):
1240 return False, _('Server-side pull request merging is disabled.')
1240 return False, _('Server-side pull request merging is disabled.')
1241 if pull_request.is_closed():
1241 if pull_request.is_closed():
1242 return False, _('This pull request is closed.')
1242 return False, _('This pull request is closed.')
1243 merge_possible, msg = self._check_repo_requirements(
1243 merge_possible, msg = self._check_repo_requirements(
1244 target=pull_request.target_repo, source=pull_request.source_repo,
1244 target=pull_request.target_repo, source=pull_request.source_repo,
1245 translator=_)
1245 translator=_)
1246 if not merge_possible:
1246 if not merge_possible:
1247 return merge_possible, msg
1247 return merge_possible, msg
1248
1248
1249 try:
1249 try:
1250 resp = self._try_merge(
1250 resp = self._try_merge(
1251 pull_request,
1251 pull_request,
1252 force_shadow_repo_refresh=force_shadow_repo_refresh)
1252 force_shadow_repo_refresh=force_shadow_repo_refresh)
1253 log.debug("Merge response: %s", resp)
1253 log.debug("Merge response: %s", resp)
1254 status = resp.possible, resp.merge_status_message
1254 status = resp.possible, resp.merge_status_message
1255 except NotImplementedError:
1255 except NotImplementedError:
1256 status = False, _('Pull request merging is not supported.')
1256 status = False, _('Pull request merging is not supported.')
1257
1257
1258 return status
1258 return status
1259
1259
1260 def _check_repo_requirements(self, target, source, translator):
1260 def _check_repo_requirements(self, target, source, translator):
1261 """
1261 """
1262 Check if `target` and `source` have compatible requirements.
1262 Check if `target` and `source` have compatible requirements.
1263
1263
1264 Currently this is just checking for largefiles.
1264 Currently this is just checking for largefiles.
1265 """
1265 """
1266 _ = translator
1266 _ = translator
1267 target_has_largefiles = self._has_largefiles(target)
1267 target_has_largefiles = self._has_largefiles(target)
1268 source_has_largefiles = self._has_largefiles(source)
1268 source_has_largefiles = self._has_largefiles(source)
1269 merge_possible = True
1269 merge_possible = True
1270 message = u''
1270 message = u''
1271
1271
1272 if target_has_largefiles != source_has_largefiles:
1272 if target_has_largefiles != source_has_largefiles:
1273 merge_possible = False
1273 merge_possible = False
1274 if source_has_largefiles:
1274 if source_has_largefiles:
1275 message = _(
1275 message = _(
1276 'Target repository large files support is disabled.')
1276 'Target repository large files support is disabled.')
1277 else:
1277 else:
1278 message = _(
1278 message = _(
1279 'Source repository large files support is disabled.')
1279 'Source repository large files support is disabled.')
1280
1280
1281 return merge_possible, message
1281 return merge_possible, message
1282
1282
1283 def _has_largefiles(self, repo):
1283 def _has_largefiles(self, repo):
1284 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1284 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1285 'extensions', 'largefiles')
1285 'extensions', 'largefiles')
1286 return largefiles_ui and largefiles_ui[0].active
1286 return largefiles_ui and largefiles_ui[0].active
1287
1287
1288 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1288 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1289 """
1289 """
1290 Try to merge the pull request and return the merge status.
1290 Try to merge the pull request and return the merge status.
1291 """
1291 """
1292 log.debug(
1292 log.debug(
1293 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1293 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1294 pull_request.pull_request_id, force_shadow_repo_refresh)
1294 pull_request.pull_request_id, force_shadow_repo_refresh)
1295 target_vcs = pull_request.target_repo.scm_instance()
1295 target_vcs = pull_request.target_repo.scm_instance()
1296 # Refresh the target reference.
1296 # Refresh the target reference.
1297 try:
1297 try:
1298 target_ref = self._refresh_reference(
1298 target_ref = self._refresh_reference(
1299 pull_request.target_ref_parts, target_vcs)
1299 pull_request.target_ref_parts, target_vcs)
1300 except CommitDoesNotExistError:
1300 except CommitDoesNotExistError:
1301 merge_state = MergeResponse(
1301 merge_state = MergeResponse(
1302 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1302 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1303 metadata={'target_ref': pull_request.target_ref_parts})
1303 metadata={'target_ref': pull_request.target_ref_parts})
1304 return merge_state
1304 return merge_state
1305
1305
1306 target_locked = pull_request.target_repo.locked
1306 target_locked = pull_request.target_repo.locked
1307 if target_locked and target_locked[0]:
1307 if target_locked and target_locked[0]:
1308 locked_by = 'user:{}'.format(target_locked[0])
1308 locked_by = 'user:{}'.format(target_locked[0])
1309 log.debug("The target repository is locked by %s.", locked_by)
1309 log.debug("The target repository is locked by %s.", locked_by)
1310 merge_state = MergeResponse(
1310 merge_state = MergeResponse(
1311 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1311 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1312 metadata={'locked_by': locked_by})
1312 metadata={'locked_by': locked_by})
1313 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1313 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1314 pull_request, target_ref):
1314 pull_request, target_ref):
1315 log.debug("Refreshing the merge status of the repository.")
1315 log.debug("Refreshing the merge status of the repository.")
1316 merge_state = self._refresh_merge_state(
1316 merge_state = self._refresh_merge_state(
1317 pull_request, target_vcs, target_ref)
1317 pull_request, target_vcs, target_ref)
1318 else:
1318 else:
1319 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1319 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1320 metadata = {
1320 metadata = {
1321 'target_ref': pull_request.target_ref_parts,
1321 'target_ref': pull_request.target_ref_parts,
1322 'source_ref': pull_request.source_ref_parts,
1322 'source_ref': pull_request.source_ref_parts,
1323 }
1323 }
1324 if not possible and target_ref.type == 'branch':
1324 if not possible and target_ref.type == 'branch':
1325 # NOTE(marcink): case for mercurial multiple heads on branch
1325 # NOTE(marcink): case for mercurial multiple heads on branch
1326 heads = target_vcs._heads(target_ref.name)
1326 heads = target_vcs._heads(target_ref.name)
1327 if len(heads) != 1:
1327 if len(heads) != 1:
1328 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1328 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1329 metadata.update({
1329 metadata.update({
1330 'heads': heads
1330 'heads': heads
1331 })
1331 })
1332 merge_state = MergeResponse(
1332 merge_state = MergeResponse(
1333 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1333 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1334
1334
1335 return merge_state
1335 return merge_state
1336
1336
1337 def _refresh_reference(self, reference, vcs_repository):
1337 def _refresh_reference(self, reference, vcs_repository):
1338 if reference.type in self.UPDATABLE_REF_TYPES:
1338 if reference.type in self.UPDATABLE_REF_TYPES:
1339 name_or_id = reference.name
1339 name_or_id = reference.name
1340 else:
1340 else:
1341 name_or_id = reference.commit_id
1341 name_or_id = reference.commit_id
1342
1342
1343 refreshed_commit = vcs_repository.get_commit(name_or_id)
1343 refreshed_commit = vcs_repository.get_commit(name_or_id)
1344 refreshed_reference = Reference(
1344 refreshed_reference = Reference(
1345 reference.type, reference.name, refreshed_commit.raw_id)
1345 reference.type, reference.name, refreshed_commit.raw_id)
1346 return refreshed_reference
1346 return refreshed_reference
1347
1347
1348 def _needs_merge_state_refresh(self, pull_request, target_reference):
1348 def _needs_merge_state_refresh(self, pull_request, target_reference):
1349 return not(
1349 return not(
1350 pull_request.revisions and
1350 pull_request.revisions and
1351 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1351 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1352 target_reference.commit_id == pull_request._last_merge_target_rev)
1352 target_reference.commit_id == pull_request._last_merge_target_rev)
1353
1353
1354 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1354 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1355 workspace_id = self._workspace_id(pull_request)
1355 workspace_id = self._workspace_id(pull_request)
1356 source_vcs = pull_request.source_repo.scm_instance()
1356 source_vcs = pull_request.source_repo.scm_instance()
1357 repo_id = pull_request.target_repo.repo_id
1357 repo_id = pull_request.target_repo.repo_id
1358 use_rebase = self._use_rebase_for_merging(pull_request)
1358 use_rebase = self._use_rebase_for_merging(pull_request)
1359 close_branch = self._close_branch_before_merging(pull_request)
1359 close_branch = self._close_branch_before_merging(pull_request)
1360 merge_state = target_vcs.merge(
1360 merge_state = target_vcs.merge(
1361 repo_id, workspace_id,
1361 repo_id, workspace_id,
1362 target_reference, source_vcs, pull_request.source_ref_parts,
1362 target_reference, source_vcs, pull_request.source_ref_parts,
1363 dry_run=True, use_rebase=use_rebase,
1363 dry_run=True, use_rebase=use_rebase,
1364 close_branch=close_branch)
1364 close_branch=close_branch)
1365
1365
1366 # Do not store the response if there was an unknown error.
1366 # Do not store the response if there was an unknown error.
1367 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1367 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1368 pull_request._last_merge_source_rev = \
1368 pull_request._last_merge_source_rev = \
1369 pull_request.source_ref_parts.commit_id
1369 pull_request.source_ref_parts.commit_id
1370 pull_request._last_merge_target_rev = target_reference.commit_id
1370 pull_request._last_merge_target_rev = target_reference.commit_id
1371 pull_request.last_merge_status = merge_state.failure_reason
1371 pull_request.last_merge_status = merge_state.failure_reason
1372 pull_request.shadow_merge_ref = merge_state.merge_ref
1372 pull_request.shadow_merge_ref = merge_state.merge_ref
1373 Session().add(pull_request)
1373 Session().add(pull_request)
1374 Session().commit()
1374 Session().commit()
1375
1375
1376 return merge_state
1376 return merge_state
1377
1377
1378 def _workspace_id(self, pull_request):
1378 def _workspace_id(self, pull_request):
1379 workspace_id = 'pr-%s' % pull_request.pull_request_id
1379 workspace_id = 'pr-%s' % pull_request.pull_request_id
1380 return workspace_id
1380 return workspace_id
1381
1381
1382 def generate_repo_data(self, repo, commit_id=None, branch=None,
1382 def generate_repo_data(self, repo, commit_id=None, branch=None,
1383 bookmark=None, translator=None):
1383 bookmark=None, translator=None):
1384 from rhodecode.model.repo import RepoModel
1384 from rhodecode.model.repo import RepoModel
1385
1385
1386 all_refs, selected_ref = \
1386 all_refs, selected_ref = \
1387 self._get_repo_pullrequest_sources(
1387 self._get_repo_pullrequest_sources(
1388 repo.scm_instance(), commit_id=commit_id,
1388 repo.scm_instance(), commit_id=commit_id,
1389 branch=branch, bookmark=bookmark, translator=translator)
1389 branch=branch, bookmark=bookmark, translator=translator)
1390
1390
1391 refs_select2 = []
1391 refs_select2 = []
1392 for element in all_refs:
1392 for element in all_refs:
1393 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1393 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1394 refs_select2.append({'text': element[1], 'children': children})
1394 refs_select2.append({'text': element[1], 'children': children})
1395
1395
1396 return {
1396 return {
1397 'user': {
1397 'user': {
1398 'user_id': repo.user.user_id,
1398 'user_id': repo.user.user_id,
1399 'username': repo.user.username,
1399 'username': repo.user.username,
1400 'firstname': repo.user.first_name,
1400 'firstname': repo.user.first_name,
1401 'lastname': repo.user.last_name,
1401 'lastname': repo.user.last_name,
1402 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1402 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1403 },
1403 },
1404 'name': repo.repo_name,
1404 'name': repo.repo_name,
1405 'link': RepoModel().get_url(repo),
1405 'link': RepoModel().get_url(repo),
1406 'description': h.chop_at_smart(repo.description_safe, '\n'),
1406 'description': h.chop_at_smart(repo.description_safe, '\n'),
1407 'refs': {
1407 'refs': {
1408 'all_refs': all_refs,
1408 'all_refs': all_refs,
1409 'selected_ref': selected_ref,
1409 'selected_ref': selected_ref,
1410 'select2_refs': refs_select2
1410 'select2_refs': refs_select2
1411 }
1411 }
1412 }
1412 }
1413
1413
1414 def generate_pullrequest_title(self, source, source_ref, target):
1414 def generate_pullrequest_title(self, source, source_ref, target):
1415 return u'{source}#{at_ref} to {target}'.format(
1415 return u'{source}#{at_ref} to {target}'.format(
1416 source=source,
1416 source=source,
1417 at_ref=source_ref,
1417 at_ref=source_ref,
1418 target=target,
1418 target=target,
1419 )
1419 )
1420
1420
1421 def _cleanup_merge_workspace(self, pull_request):
1421 def _cleanup_merge_workspace(self, pull_request):
1422 # Merging related cleanup
1422 # Merging related cleanup
1423 repo_id = pull_request.target_repo.repo_id
1423 repo_id = pull_request.target_repo.repo_id
1424 target_scm = pull_request.target_repo.scm_instance()
1424 target_scm = pull_request.target_repo.scm_instance()
1425 workspace_id = self._workspace_id(pull_request)
1425 workspace_id = self._workspace_id(pull_request)
1426
1426
1427 try:
1427 try:
1428 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1428 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1429 except NotImplementedError:
1429 except NotImplementedError:
1430 pass
1430 pass
1431
1431
1432 def _get_repo_pullrequest_sources(
1432 def _get_repo_pullrequest_sources(
1433 self, repo, commit_id=None, branch=None, bookmark=None,
1433 self, repo, commit_id=None, branch=None, bookmark=None,
1434 translator=None):
1434 translator=None):
1435 """
1435 """
1436 Return a structure with repo's interesting commits, suitable for
1436 Return a structure with repo's interesting commits, suitable for
1437 the selectors in pullrequest controller
1437 the selectors in pullrequest controller
1438
1438
1439 :param commit_id: a commit that must be in the list somehow
1439 :param commit_id: a commit that must be in the list somehow
1440 and selected by default
1440 and selected by default
1441 :param branch: a branch that must be in the list and selected
1441 :param branch: a branch that must be in the list and selected
1442 by default - even if closed
1442 by default - even if closed
1443 :param bookmark: a bookmark that must be in the list and selected
1443 :param bookmark: a bookmark that must be in the list and selected
1444 """
1444 """
1445 _ = translator or get_current_request().translate
1445 _ = translator or get_current_request().translate
1446
1446
1447 commit_id = safe_str(commit_id) if commit_id else None
1447 commit_id = safe_str(commit_id) if commit_id else None
1448 branch = safe_unicode(branch) if branch else None
1448 branch = safe_unicode(branch) if branch else None
1449 bookmark = safe_unicode(bookmark) if bookmark else None
1449 bookmark = safe_unicode(bookmark) if bookmark else None
1450
1450
1451 selected = None
1451 selected = None
1452
1452
1453 # order matters: first source that has commit_id in it will be selected
1453 # order matters: first source that has commit_id in it will be selected
1454 sources = []
1454 sources = []
1455 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1455 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1456 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1456 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1457
1457
1458 if commit_id:
1458 if commit_id:
1459 ref_commit = (h.short_id(commit_id), commit_id)
1459 ref_commit = (h.short_id(commit_id), commit_id)
1460 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1460 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1461
1461
1462 sources.append(
1462 sources.append(
1463 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1463 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1464 )
1464 )
1465
1465
1466 groups = []
1466 groups = []
1467
1467
1468 for group_key, ref_list, group_name, match in sources:
1468 for group_key, ref_list, group_name, match in sources:
1469 group_refs = []
1469 group_refs = []
1470 for ref_name, ref_id in ref_list:
1470 for ref_name, ref_id in ref_list:
1471 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1471 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1472 group_refs.append((ref_key, ref_name))
1472 group_refs.append((ref_key, ref_name))
1473
1473
1474 if not selected:
1474 if not selected:
1475 if set([commit_id, match]) & set([ref_id, ref_name]):
1475 if set([commit_id, match]) & set([ref_id, ref_name]):
1476 selected = ref_key
1476 selected = ref_key
1477
1477
1478 if group_refs:
1478 if group_refs:
1479 groups.append((group_refs, group_name))
1479 groups.append((group_refs, group_name))
1480
1480
1481 if not selected:
1481 if not selected:
1482 ref = commit_id or branch or bookmark
1482 ref = commit_id or branch or bookmark
1483 if ref:
1483 if ref:
1484 raise CommitDoesNotExistError(
1484 raise CommitDoesNotExistError(
1485 u'No commit refs could be found matching: {}'.format(ref))
1485 u'No commit refs could be found matching: {}'.format(ref))
1486 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1486 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1487 selected = u'branch:{}:{}'.format(
1487 selected = u'branch:{}:{}'.format(
1488 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1488 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1489 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1489 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1490 )
1490 )
1491 elif repo.commit_ids:
1491 elif repo.commit_ids:
1492 # make the user select in this case
1492 # make the user select in this case
1493 selected = None
1493 selected = None
1494 else:
1494 else:
1495 raise EmptyRepositoryError()
1495 raise EmptyRepositoryError()
1496 return groups, selected
1496 return groups, selected
1497
1497
1498 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1498 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1499 hide_whitespace_changes, diff_context):
1499 hide_whitespace_changes, diff_context):
1500
1500
1501 return self._get_diff_from_pr_or_version(
1501 return self._get_diff_from_pr_or_version(
1502 source_repo, source_ref_id, target_ref_id,
1502 source_repo, source_ref_id, target_ref_id,
1503 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1503 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1504
1504
1505 def _get_diff_from_pr_or_version(
1505 def _get_diff_from_pr_or_version(
1506 self, source_repo, source_ref_id, target_ref_id,
1506 self, source_repo, source_ref_id, target_ref_id,
1507 hide_whitespace_changes, diff_context):
1507 hide_whitespace_changes, diff_context):
1508
1508
1509 target_commit = source_repo.get_commit(
1509 target_commit = source_repo.get_commit(
1510 commit_id=safe_str(target_ref_id))
1510 commit_id=safe_str(target_ref_id))
1511 source_commit = source_repo.get_commit(
1511 source_commit = source_repo.get_commit(
1512 commit_id=safe_str(source_ref_id))
1512 commit_id=safe_str(source_ref_id))
1513 if isinstance(source_repo, Repository):
1513 if isinstance(source_repo, Repository):
1514 vcs_repo = source_repo.scm_instance()
1514 vcs_repo = source_repo.scm_instance()
1515 else:
1515 else:
1516 vcs_repo = source_repo
1516 vcs_repo = source_repo
1517
1517
1518 # TODO: johbo: In the context of an update, we cannot reach
1518 # TODO: johbo: In the context of an update, we cannot reach
1519 # the old commit anymore with our normal mechanisms. It needs
1519 # the old commit anymore with our normal mechanisms. It needs
1520 # some sort of special support in the vcs layer to avoid this
1520 # some sort of special support in the vcs layer to avoid this
1521 # workaround.
1521 # workaround.
1522 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1522 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1523 vcs_repo.alias == 'git'):
1523 vcs_repo.alias == 'git'):
1524 source_commit.raw_id = safe_str(source_ref_id)
1524 source_commit.raw_id = safe_str(source_ref_id)
1525
1525
1526 log.debug('calculating diff between '
1526 log.debug('calculating diff between '
1527 'source_ref:%s and target_ref:%s for repo `%s`',
1527 'source_ref:%s and target_ref:%s for repo `%s`',
1528 target_ref_id, source_ref_id,
1528 target_ref_id, source_ref_id,
1529 safe_unicode(vcs_repo.path))
1529 safe_unicode(vcs_repo.path))
1530
1530
1531 vcs_diff = vcs_repo.get_diff(
1531 vcs_diff = vcs_repo.get_diff(
1532 commit1=target_commit, commit2=source_commit,
1532 commit1=target_commit, commit2=source_commit,
1533 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1533 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1534 return vcs_diff
1534 return vcs_diff
1535
1535
1536 def _is_merge_enabled(self, pull_request):
1536 def _is_merge_enabled(self, pull_request):
1537 return self._get_general_setting(
1537 return self._get_general_setting(
1538 pull_request, 'rhodecode_pr_merge_enabled')
1538 pull_request, 'rhodecode_pr_merge_enabled')
1539
1539
1540 def _use_rebase_for_merging(self, pull_request):
1540 def _use_rebase_for_merging(self, pull_request):
1541 repo_type = pull_request.target_repo.repo_type
1541 repo_type = pull_request.target_repo.repo_type
1542 if repo_type == 'hg':
1542 if repo_type == 'hg':
1543 return self._get_general_setting(
1543 return self._get_general_setting(
1544 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1544 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1545 elif repo_type == 'git':
1545 elif repo_type == 'git':
1546 return self._get_general_setting(
1546 return self._get_general_setting(
1547 pull_request, 'rhodecode_git_use_rebase_for_merging')
1547 pull_request, 'rhodecode_git_use_rebase_for_merging')
1548
1548
1549 return False
1549 return False
1550
1550
1551 def _close_branch_before_merging(self, pull_request):
1551 def _close_branch_before_merging(self, pull_request):
1552 repo_type = pull_request.target_repo.repo_type
1552 repo_type = pull_request.target_repo.repo_type
1553 if repo_type == 'hg':
1553 if repo_type == 'hg':
1554 return self._get_general_setting(
1554 return self._get_general_setting(
1555 pull_request, 'rhodecode_hg_close_branch_before_merging')
1555 pull_request, 'rhodecode_hg_close_branch_before_merging')
1556 elif repo_type == 'git':
1556 elif repo_type == 'git':
1557 return self._get_general_setting(
1557 return self._get_general_setting(
1558 pull_request, 'rhodecode_git_close_branch_before_merging')
1558 pull_request, 'rhodecode_git_close_branch_before_merging')
1559
1559
1560 return False
1560 return False
1561
1561
1562 def _get_general_setting(self, pull_request, settings_key, default=False):
1562 def _get_general_setting(self, pull_request, settings_key, default=False):
1563 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1563 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1564 settings = settings_model.get_general_settings()
1564 settings = settings_model.get_general_settings()
1565 return settings.get(settings_key, default)
1565 return settings.get(settings_key, default)
1566
1566
1567 def _log_audit_action(self, action, action_data, user, pull_request):
1567 def _log_audit_action(self, action, action_data, user, pull_request):
1568 audit_logger.store(
1568 audit_logger.store(
1569 action=action,
1569 action=action,
1570 action_data=action_data,
1570 action_data=action_data,
1571 user=user,
1571 user=user,
1572 repo=pull_request.target_repo)
1572 repo=pull_request.target_repo)
1573
1573
1574 def get_reviewer_functions(self):
1574 def get_reviewer_functions(self):
1575 """
1575 """
1576 Fetches functions for validation and fetching default reviewers.
1576 Fetches functions for validation and fetching default reviewers.
1577 If available we use the EE package, else we fallback to CE
1577 If available we use the EE package, else we fallback to CE
1578 package functions
1578 package functions
1579 """
1579 """
1580 try:
1580 try:
1581 from rc_reviewers.utils import get_default_reviewers_data
1581 from rc_reviewers.utils import get_default_reviewers_data
1582 from rc_reviewers.utils import validate_default_reviewers
1582 from rc_reviewers.utils import validate_default_reviewers
1583 except ImportError:
1583 except ImportError:
1584 from rhodecode.apps.repository.utils import get_default_reviewers_data
1584 from rhodecode.apps.repository.utils import get_default_reviewers_data
1585 from rhodecode.apps.repository.utils import validate_default_reviewers
1585 from rhodecode.apps.repository.utils import validate_default_reviewers
1586
1586
1587 return get_default_reviewers_data, validate_default_reviewers
1587 return get_default_reviewers_data, validate_default_reviewers
1588
1588
1589
1589
1590 class MergeCheck(object):
1590 class MergeCheck(object):
1591 """
1591 """
1592 Perform Merge Checks and returns a check object which stores information
1592 Perform Merge Checks and returns a check object which stores information
1593 about merge errors, and merge conditions
1593 about merge errors, and merge conditions
1594 """
1594 """
1595 TODO_CHECK = 'todo'
1595 TODO_CHECK = 'todo'
1596 PERM_CHECK = 'perm'
1596 PERM_CHECK = 'perm'
1597 REVIEW_CHECK = 'review'
1597 REVIEW_CHECK = 'review'
1598 MERGE_CHECK = 'merge'
1598 MERGE_CHECK = 'merge'
1599
1599
1600 def __init__(self):
1600 def __init__(self):
1601 self.review_status = None
1601 self.review_status = None
1602 self.merge_possible = None
1602 self.merge_possible = None
1603 self.merge_msg = ''
1603 self.merge_msg = ''
1604 self.failed = None
1604 self.failed = None
1605 self.errors = []
1605 self.errors = []
1606 self.error_details = OrderedDict()
1606 self.error_details = OrderedDict()
1607
1607
1608 def push_error(self, error_type, message, error_key, details):
1608 def push_error(self, error_type, message, error_key, details):
1609 self.failed = True
1609 self.failed = True
1610 self.errors.append([error_type, message])
1610 self.errors.append([error_type, message])
1611 self.error_details[error_key] = dict(
1611 self.error_details[error_key] = dict(
1612 details=details,
1612 details=details,
1613 error_type=error_type,
1613 error_type=error_type,
1614 message=message
1614 message=message
1615 )
1615 )
1616
1616
1617 @classmethod
1617 @classmethod
1618 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1618 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1619 force_shadow_repo_refresh=False):
1619 force_shadow_repo_refresh=False):
1620 _ = translator
1620 _ = translator
1621 merge_check = cls()
1621 merge_check = cls()
1622
1622
1623 # permissions to merge
1623 # permissions to merge
1624 user_allowed_to_merge = PullRequestModel().check_user_merge(
1624 user_allowed_to_merge = PullRequestModel().check_user_merge(
1625 pull_request, auth_user)
1625 pull_request, auth_user)
1626 if not user_allowed_to_merge:
1626 if not user_allowed_to_merge:
1627 log.debug("MergeCheck: cannot merge, approval is pending.")
1627 log.debug("MergeCheck: cannot merge, approval is pending.")
1628
1628
1629 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1629 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1630 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1630 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1631 if fail_early:
1631 if fail_early:
1632 return merge_check
1632 return merge_check
1633
1633
1634 # permission to merge into the target branch
1634 # permission to merge into the target branch
1635 target_commit_id = pull_request.target_ref_parts.commit_id
1635 target_commit_id = pull_request.target_ref_parts.commit_id
1636 if pull_request.target_ref_parts.type == 'branch':
1636 if pull_request.target_ref_parts.type == 'branch':
1637 branch_name = pull_request.target_ref_parts.name
1637 branch_name = pull_request.target_ref_parts.name
1638 else:
1638 else:
1639 # for mercurial we can always figure out the branch from the commit
1639 # for mercurial we can always figure out the branch from the commit
1640 # in case of bookmark
1640 # in case of bookmark
1641 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1641 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1642 branch_name = target_commit.branch
1642 branch_name = target_commit.branch
1643
1643
1644 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1644 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1645 pull_request.target_repo.repo_name, branch_name)
1645 pull_request.target_repo.repo_name, branch_name)
1646 if branch_perm and branch_perm == 'branch.none':
1646 if branch_perm and branch_perm == 'branch.none':
1647 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1647 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1648 branch_name, rule)
1648 branch_name, rule)
1649 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1649 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1650 if fail_early:
1650 if fail_early:
1651 return merge_check
1651 return merge_check
1652
1652
1653 # review status, must be always present
1653 # review status, must be always present
1654 review_status = pull_request.calculated_review_status()
1654 review_status = pull_request.calculated_review_status()
1655 merge_check.review_status = review_status
1655 merge_check.review_status = review_status
1656
1656
1657 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1657 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1658 if not status_approved:
1658 if not status_approved:
1659 log.debug("MergeCheck: cannot merge, approval is pending.")
1659 log.debug("MergeCheck: cannot merge, approval is pending.")
1660
1660
1661 msg = _('Pull request reviewer approval is pending.')
1661 msg = _('Pull request reviewer approval is pending.')
1662
1662
1663 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
1663 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
1664
1664
1665 if fail_early:
1665 if fail_early:
1666 return merge_check
1666 return merge_check
1667
1667
1668 # left over TODOs
1668 # left over TODOs
1669 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
1669 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
1670 if todos:
1670 if todos:
1671 log.debug("MergeCheck: cannot merge, {} "
1671 log.debug("MergeCheck: cannot merge, {} "
1672 "unresolved TODOs left.".format(len(todos)))
1672 "unresolved TODOs left.".format(len(todos)))
1673
1673
1674 if len(todos) == 1:
1674 if len(todos) == 1:
1675 msg = _('Cannot merge, {} TODO still not resolved.').format(
1675 msg = _('Cannot merge, {} TODO still not resolved.').format(
1676 len(todos))
1676 len(todos))
1677 else:
1677 else:
1678 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1678 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1679 len(todos))
1679 len(todos))
1680
1680
1681 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1681 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1682
1682
1683 if fail_early:
1683 if fail_early:
1684 return merge_check
1684 return merge_check
1685
1685
1686 # merge possible, here is the filesystem simulation + shadow repo
1686 # merge possible, here is the filesystem simulation + shadow repo
1687 merge_status, msg = PullRequestModel().merge_status(
1687 merge_status, msg = PullRequestModel().merge_status(
1688 pull_request, translator=translator,
1688 pull_request, translator=translator,
1689 force_shadow_repo_refresh=force_shadow_repo_refresh)
1689 force_shadow_repo_refresh=force_shadow_repo_refresh)
1690 merge_check.merge_possible = merge_status
1690 merge_check.merge_possible = merge_status
1691 merge_check.merge_msg = msg
1691 merge_check.merge_msg = msg
1692 if not merge_status:
1692 if not merge_status:
1693 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
1693 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
1694 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1694 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1695
1695
1696 if fail_early:
1696 if fail_early:
1697 return merge_check
1697 return merge_check
1698
1698
1699 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1699 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1700 return merge_check
1700 return merge_check
1701
1701
1702 @classmethod
1702 @classmethod
1703 def get_merge_conditions(cls, pull_request, translator):
1703 def get_merge_conditions(cls, pull_request, translator):
1704 _ = translator
1704 _ = translator
1705 merge_details = {}
1705 merge_details = {}
1706
1706
1707 model = PullRequestModel()
1707 model = PullRequestModel()
1708 use_rebase = model._use_rebase_for_merging(pull_request)
1708 use_rebase = model._use_rebase_for_merging(pull_request)
1709
1709
1710 if use_rebase:
1710 if use_rebase:
1711 merge_details['merge_strategy'] = dict(
1711 merge_details['merge_strategy'] = dict(
1712 details={},
1712 details={},
1713 message=_('Merge strategy: rebase')
1713 message=_('Merge strategy: rebase')
1714 )
1714 )
1715 else:
1715 else:
1716 merge_details['merge_strategy'] = dict(
1716 merge_details['merge_strategy'] = dict(
1717 details={},
1717 details={},
1718 message=_('Merge strategy: explicit merge commit')
1718 message=_('Merge strategy: explicit merge commit')
1719 )
1719 )
1720
1720
1721 close_branch = model._close_branch_before_merging(pull_request)
1721 close_branch = model._close_branch_before_merging(pull_request)
1722 if close_branch:
1722 if close_branch:
1723 repo_type = pull_request.target_repo.repo_type
1723 repo_type = pull_request.target_repo.repo_type
1724 close_msg = ''
1724 close_msg = ''
1725 if repo_type == 'hg':
1725 if repo_type == 'hg':
1726 close_msg = _('Source branch will be closed after merge.')
1726 close_msg = _('Source branch will be closed after merge.')
1727 elif repo_type == 'git':
1727 elif repo_type == 'git':
1728 close_msg = _('Source branch will be deleted after merge.')
1728 close_msg = _('Source branch will be deleted after merge.')
1729
1729
1730 merge_details['close_branch'] = dict(
1730 merge_details['close_branch'] = dict(
1731 details={},
1731 details={},
1732 message=close_msg
1732 message=close_msg
1733 )
1733 )
1734
1734
1735 return merge_details
1735 return merge_details
1736
1736
1737
1737
1738 ChangeTuple = collections.namedtuple(
1738 ChangeTuple = collections.namedtuple(
1739 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1739 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1740
1740
1741 FileChangeTuple = collections.namedtuple(
1741 FileChangeTuple = collections.namedtuple(
1742 'FileChangeTuple', ['added', 'modified', 'removed'])
1742 'FileChangeTuple', ['added', 'modified', 'removed'])
General Comments 0
You need to be logged in to leave comments. Login now